test(sdk) Restored the ParallelFor compiler test data (4103)

* SDK - Tests - Restored the ParallelFor compiler test data

Fixes https://github.com/kubeflow/pipelines/issues/4102

* Removed the pipeline-sdk-type annotations

* Fixed the test_artifact_passing_using_volume test data
This commit is contained in:
Alexey Volkov 2020-06-29 01:30:14 -07:00 committed by GitHub
parent 2268ddddba
commit d24eb78371
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 106 additions and 204 deletions

View File

@ -17,20 +17,20 @@ spec:
template: consumer
arguments:
parameters:
- name: processor-output_1
value: '{{tasks.processor.outputs.parameters.processor-output_1}}'
- name: processor-output_2-subpath
value: '{{tasks.processor.outputs.parameters.processor-output_2-subpath}}'
- name: processor-Output-1
value: '{{tasks.processor.outputs.parameters.processor-Output-1}}'
- name: processor-Output-2-subpath
value: '{{tasks.processor.outputs.parameters.processor-Output-2-subpath}}'
dependencies:
- processor
- name: processor
template: processor
arguments:
parameters:
- name: producer-output_1
value: '{{tasks.producer.outputs.parameters.producer-output_1}}'
- name: producer-output_2-subpath
value: '{{tasks.producer.outputs.parameters.producer-output_2-subpath}}'
- name: producer-Output-1
value: '{{tasks.producer.outputs.parameters.producer-Output-1}}'
- name: producer-Output-2-subpath
value: '{{tasks.producer.outputs.parameters.producer-Output-2-subpath}}'
dependencies:
- producer
- name: producer
@ -41,8 +41,8 @@ spec:
pipelines.kubeflow.org/component_spec: '{"inputs": [{"name": "Input parameter"}, {"name": "Input artifact"}], "name": "Consumer"}'
inputs:
parameters:
- name: processor-output_1
- name: processor-output_2-subpath
- name: processor-Output-1
- name: processor-Output-2-subpath
container:
image: alpine
command:
@ -52,30 +52,30 @@ spec:
echo "Input parameter = $0"
echo "Input artifact = " && cat "$1"
args:
- '{{inputs.parameters.processor-output_1}}'
- '{{inputs.parameters.processor-Output-1}}'
- /tmp/inputs/Input_artifact/data
volumeMounts:
- name: data-storage
mountPath: /tmp/inputs/Input_artifact
readOnly: true
subPath: '{{inputs.parameters.processor-output_2-subpath}}'
subPath: '{{inputs.parameters.processor-Output-2-subpath}}'
- name: processor
metadata:
annotations:
pipelines.kubeflow.org/component_spec: '{"inputs": [{"name": "Input parameter"}, {"name": "Input artifact"}], "name": "Processor", "outputs": [{"name": "Output 1"}, {"name": "Output 2"}]}'
inputs:
parameters:
- name: producer-output_1
- name: producer-output_2-subpath
- name: producer-Output-1
- name: producer-Output-2-subpath
outputs:
parameters:
- name: processor-output_1
- name: processor-Output-1
valueFrom:
path: /tmp/outputs/Output_1/data
- name: processor-output_1-subpath
value: artifact_data/{{workflow.uid}}_{{pod.name}}/processor-output_1
- name: processor-output_2-subpath
value: artifact_data/{{workflow.uid}}_{{pod.name}}/processor-output_2
- name: processor-Output-1-subpath
value: artifact_data/{{workflow.uid}}_{{pod.name}}/processor-Output-1
- name: processor-Output-2-subpath
value: artifact_data/{{workflow.uid}}_{{pod.name}}/processor-Output-2
container:
image: alpine
command:
@ -87,7 +87,7 @@ spec:
echo "$0" > "$2"
cp "$1" "$3"
args:
- '{{inputs.parameters.producer-output_1}}'
- '{{inputs.parameters.producer-Output-1}}'
- /tmp/inputs/Input_artifact/data
- /tmp/outputs/Output_1/data
- /tmp/outputs/Output_2/data
@ -95,26 +95,26 @@ spec:
- mountPath: /tmp/inputs/Input_artifact
name: data-storage
readOnly: true
subPath: '{{inputs.parameters.producer-output_2-subpath}}'
subPath: '{{inputs.parameters.producer-Output-2-subpath}}'
- mountPath: /tmp/outputs/Output_1
name: data-storage
subPath: artifact_data/{{workflow.uid}}_{{pod.name}}/processor-output_1
subPath: artifact_data/{{workflow.uid}}_{{pod.name}}/processor-Output-1
- mountPath: /tmp/outputs/Output_2
name: data-storage
subPath: artifact_data/{{workflow.uid}}_{{pod.name}}/processor-output_2
subPath: artifact_data/{{workflow.uid}}_{{pod.name}}/processor-Output-2
- name: producer
metadata:
annotations:
pipelines.kubeflow.org/component_spec: '{"name": "Producer", "outputs": [{"name": "Output 1"}, {"name": "Output 2"}]}'
outputs:
parameters:
- name: producer-output_1
- name: producer-Output-1
valueFrom:
path: /tmp/outputs/Output_1/data
- name: producer-output_1-subpath
value: artifact_data/{{workflow.uid}}_{{pod.name}}/producer-output_1
- name: producer-output_2-subpath
value: artifact_data/{{workflow.uid}}_{{pod.name}}/producer-output_2
- name: producer-Output-1-subpath
value: artifact_data/{{workflow.uid}}_{{pod.name}}/producer-Output-1
- name: producer-Output-2-subpath
value: artifact_data/{{workflow.uid}}_{{pod.name}}/producer-Output-2
container:
image: alpine
command:
@ -131,10 +131,10 @@ spec:
volumeMounts:
- mountPath: /tmp/outputs/Output_1
name: data-storage
subPath: artifact_data/{{workflow.uid}}_{{pod.name}}/producer-output_1
subPath: artifact_data/{{workflow.uid}}_{{pod.name}}/producer-Output-1
- mountPath: /tmp/outputs/Output_2
name: data-storage
subPath: artifact_data/{{workflow.uid}}_{{pod.name}}/producer-output_2
subPath: artifact_data/{{workflow.uid}}_{{pod.name}}/producer-Output-2
volumes:
- name: data-storage
persistentVolumeClaim:

View File

@ -14,7 +14,7 @@ spec:
container:
args:
- "--param1"
- "{{inputs.parameters.produce-list-of-strings-output}}"
- "{{inputs.parameters.produce-list-of-strings-Output}}"
command:
- python3
- "-u"
@ -27,27 +27,13 @@ spec:
_parser = argparse.ArgumentParser(prog='Consume', description='')
_parser.add_argument("--param1", dest="param1", type=str, required=True, default=argparse.SUPPRESS)
_parsed_args = vars(_parser.parse_args())
_output_files = _parsed_args.pop("_output_paths", [])
_outputs = consume(**_parsed_args)
_output_serializers = [
]
import os
for idx, output_file in enumerate(_output_files):
try:
os.makedirs(os.path.dirname(output_file))
except OSError:
pass
with open(output_file, 'w') as f:
f.write(_output_serializers[idx](_outputs[idx]))
image: "tensorflow/tensorflow:1.13.2-py3"
image: "python:3.7"
inputs:
parameters:
-
name: produce-list-of-strings-output
name: produce-list-of-strings-Output
metadata:
annotations:
pipelines.kubeflow.org/component_spec: "{\"inputs\": [{\"name\": \"param1\"}], \"name\": \"Consume\"}"
@ -56,7 +42,7 @@ spec:
container:
args:
- "--param1"
- "{{inputs.parameters.produce-list-of-strings-output-loop-item}}"
- "{{inputs.parameters.produce-list-of-strings-Output-loop-item}}"
command:
- python3
- "-u"
@ -69,27 +55,13 @@ spec:
_parser = argparse.ArgumentParser(prog='Consume', description='')
_parser.add_argument("--param1", dest="param1", type=str, required=True, default=argparse.SUPPRESS)
_parsed_args = vars(_parser.parse_args())
_output_files = _parsed_args.pop("_output_paths", [])
_outputs = consume(**_parsed_args)
_output_serializers = [
]
import os
for idx, output_file in enumerate(_output_files):
try:
os.makedirs(os.path.dirname(output_file))
except OSError:
pass
with open(output_file, 'w') as f:
f.write(_output_serializers[idx](_outputs[idx]))
image: "tensorflow/tensorflow:1.13.2-py3"
image: "python:3.7"
inputs:
parameters:
-
name: produce-list-of-strings-output-loop-item
name: produce-list-of-strings-Output-loop-item
metadata:
annotations:
pipelines.kubeflow.org/component_spec: "{\"inputs\": [{\"name\": \"param1\"}], \"name\": \"Consume\"}"
@ -98,7 +70,7 @@ spec:
container:
args:
- "--param1"
- "{{inputs.parameters.produce-str-output}}"
- "{{inputs.parameters.produce-str-Output}}"
command:
- python3
- "-u"
@ -111,27 +83,13 @@ spec:
_parser = argparse.ArgumentParser(prog='Consume', description='')
_parser.add_argument("--param1", dest="param1", type=str, required=True, default=argparse.SUPPRESS)
_parsed_args = vars(_parser.parse_args())
_output_files = _parsed_args.pop("_output_paths", [])
_outputs = consume(**_parsed_args)
_output_serializers = [
]
import os
for idx, output_file in enumerate(_output_files):
try:
os.makedirs(os.path.dirname(output_file))
except OSError:
pass
with open(output_file, 'w') as f:
f.write(_output_serializers[idx](_outputs[idx]))
image: "tensorflow/tensorflow:1.13.2-py3"
image: "python:3.7"
inputs:
parameters:
-
name: produce-str-output
name: produce-str-Output
metadata:
annotations:
pipelines.kubeflow.org/component_spec: "{\"inputs\": [{\"name\": \"param1\"}], \"name\": \"Consume\"}"
@ -140,7 +98,7 @@ spec:
container:
args:
- "--param1"
- "{{inputs.parameters.produce-list-of-ints-output}}"
- "{{inputs.parameters.produce-list-of-ints-Output}}"
command:
- python3
- "-u"
@ -153,27 +111,13 @@ spec:
_parser = argparse.ArgumentParser(prog='Consume', description='')
_parser.add_argument("--param1", dest="param1", type=str, required=True, default=argparse.SUPPRESS)
_parsed_args = vars(_parser.parse_args())
_output_files = _parsed_args.pop("_output_paths", [])
_outputs = consume(**_parsed_args)
_output_serializers = [
]
import os
for idx, output_file in enumerate(_output_files):
try:
os.makedirs(os.path.dirname(output_file))
except OSError:
pass
with open(output_file, 'w') as f:
f.write(_output_serializers[idx](_outputs[idx]))
image: "tensorflow/tensorflow:1.13.2-py3"
image: "python:3.7"
inputs:
parameters:
-
name: produce-list-of-ints-output
name: produce-list-of-ints-Output
metadata:
annotations:
pipelines.kubeflow.org/component_spec: "{\"inputs\": [{\"name\": \"param1\"}], \"name\": \"Consume\"}"
@ -182,7 +126,7 @@ spec:
container:
args:
- "--param1"
- "{{inputs.parameters.produce-list-of-ints-output-loop-item}}"
- "{{inputs.parameters.produce-list-of-ints-Output-loop-item}}"
command:
- python3
- "-u"
@ -195,27 +139,13 @@ spec:
_parser = argparse.ArgumentParser(prog='Consume', description='')
_parser.add_argument("--param1", dest="param1", type=str, required=True, default=argparse.SUPPRESS)
_parsed_args = vars(_parser.parse_args())
_output_files = _parsed_args.pop("_output_paths", [])
_outputs = consume(**_parsed_args)
_output_serializers = [
]
import os
for idx, output_file in enumerate(_output_files):
try:
os.makedirs(os.path.dirname(output_file))
except OSError:
pass
with open(output_file, 'w') as f:
f.write(_output_serializers[idx](_outputs[idx]))
image: "tensorflow/tensorflow:1.13.2-py3"
image: "python:3.7"
inputs:
parameters:
-
name: produce-list-of-ints-output-loop-item
name: produce-list-of-ints-Output-loop-item
metadata:
annotations:
pipelines.kubeflow.org/component_spec: "{\"inputs\": [{\"name\": \"param1\"}], \"name\": \"Consume\"}"
@ -224,7 +154,7 @@ spec:
container:
args:
- "--param1"
- "{{inputs.parameters.produce-list-of-dicts-output}}"
- "{{inputs.parameters.produce-list-of-dicts-Output}}"
command:
- python3
- "-u"
@ -237,27 +167,13 @@ spec:
_parser = argparse.ArgumentParser(prog='Consume', description='')
_parser.add_argument("--param1", dest="param1", type=str, required=True, default=argparse.SUPPRESS)
_parsed_args = vars(_parser.parse_args())
_output_files = _parsed_args.pop("_output_paths", [])
_outputs = consume(**_parsed_args)
_output_serializers = [
]
import os
for idx, output_file in enumerate(_output_files):
try:
os.makedirs(os.path.dirname(output_file))
except OSError:
pass
with open(output_file, 'w') as f:
f.write(_output_serializers[idx](_outputs[idx]))
image: "tensorflow/tensorflow:1.13.2-py3"
image: "python:3.7"
inputs:
parameters:
-
name: produce-list-of-dicts-output
name: produce-list-of-dicts-Output
metadata:
annotations:
pipelines.kubeflow.org/component_spec: "{\"inputs\": [{\"name\": \"param1\"}], \"name\": \"Consume\"}"
@ -266,7 +182,7 @@ spec:
container:
args:
- "--param1"
- "{{inputs.parameters.produce-list-of-dicts-output-loop-item-subvar-aaa}}"
- "{{inputs.parameters.produce-list-of-dicts-Output-loop-item-subvar-aaa}}"
command:
- python3
- "-u"
@ -279,27 +195,13 @@ spec:
_parser = argparse.ArgumentParser(prog='Consume', description='')
_parser.add_argument("--param1", dest="param1", type=str, required=True, default=argparse.SUPPRESS)
_parsed_args = vars(_parser.parse_args())
_output_files = _parsed_args.pop("_output_paths", [])
_outputs = consume(**_parsed_args)
_output_serializers = [
]
import os
for idx, output_file in enumerate(_output_files):
try:
os.makedirs(os.path.dirname(output_file))
except OSError:
pass
with open(output_file, 'w') as f:
f.write(_output_serializers[idx](_outputs[idx]))
image: "tensorflow/tensorflow:1.13.2-py3"
image: "python:3.7"
inputs:
parameters:
-
name: produce-list-of-dicts-output-loop-item-subvar-aaa
name: produce-list-of-dicts-Output-loop-item-subvar-aaa
metadata:
annotations:
pipelines.kubeflow.org/component_spec: "{\"inputs\": [{\"name\": \"param1\"}], \"name\": \"Consume\"}"
@ -311,34 +213,34 @@ spec:
arguments:
parameters:
-
name: produce-list-of-strings-output
value: "{{inputs.parameters.produce-list-of-strings-output}}"
name: produce-list-of-strings-Output
value: "{{inputs.parameters.produce-list-of-strings-Output}}"
name: consume
template: consume
-
arguments:
parameters:
-
name: produce-list-of-strings-output-loop-item
value: "{{inputs.parameters.produce-list-of-strings-output-loop-item}}"
name: produce-list-of-strings-Output-loop-item
value: "{{inputs.parameters.produce-list-of-strings-Output-loop-item}}"
name: consume-2
template: consume-2
-
arguments:
parameters:
-
name: produce-str-output
value: "{{inputs.parameters.produce-str-output}}"
name: produce-str-Output
value: "{{inputs.parameters.produce-str-Output}}"
name: consume-3
template: consume-3
inputs:
parameters:
-
name: produce-list-of-strings-output
name: produce-list-of-strings-Output
-
name: produce-list-of-strings-output-loop-item
name: produce-list-of-strings-Output-loop-item
-
name: produce-str-output
name: produce-str-Output
name: for-loop-for-loop-00000001-1
-
dag:
@ -347,24 +249,24 @@ spec:
arguments:
parameters:
-
name: produce-list-of-ints-output
value: "{{inputs.parameters.produce-list-of-ints-output}}"
name: produce-list-of-ints-Output
value: "{{inputs.parameters.produce-list-of-ints-Output}}"
name: consume-4
template: consume-4
-
arguments:
parameters:
-
name: produce-list-of-ints-output-loop-item
value: "{{inputs.parameters.produce-list-of-ints-output-loop-item}}"
name: produce-list-of-ints-Output-loop-item
value: "{{inputs.parameters.produce-list-of-ints-Output-loop-item}}"
name: consume-5
template: consume-5
inputs:
parameters:
-
name: produce-list-of-ints-output
name: produce-list-of-ints-Output
-
name: produce-list-of-ints-output-loop-item
name: produce-list-of-ints-Output-loop-item
name: for-loop-for-loop-00000002-2
-
dag:
@ -373,24 +275,24 @@ spec:
arguments:
parameters:
-
name: produce-list-of-dicts-output
value: "{{inputs.parameters.produce-list-of-dicts-output}}"
name: produce-list-of-dicts-Output
value: "{{inputs.parameters.produce-list-of-dicts-Output}}"
name: consume-6
template: consume-6
-
arguments:
parameters:
-
name: produce-list-of-dicts-output-loop-item-subvar-aaa
value: "{{inputs.parameters.produce-list-of-dicts-output-loop-item-subvar-aaa}}"
name: produce-list-of-dicts-Output-loop-item-subvar-aaa
value: "{{inputs.parameters.produce-list-of-dicts-Output-loop-item-subvar-aaa}}"
name: consume-7
template: consume-7
inputs:
parameters:
-
name: produce-list-of-dicts-output
name: produce-list-of-dicts-Output
-
name: produce-list-of-dicts-output-loop-item-subvar-aaa
name: produce-list-of-dicts-Output-loop-item-subvar-aaa
name: for-loop-for-loop-00000003-3
-
dag:
@ -399,48 +301,48 @@ spec:
arguments:
parameters:
-
name: produce-list-of-strings-output
value: "{{tasks.produce-list-of-strings.outputs.parameters.produce-list-of-strings-output}}"
name: produce-list-of-strings-Output
value: "{{tasks.produce-list-of-strings.outputs.parameters.produce-list-of-strings-Output}}"
-
name: produce-list-of-strings-output-loop-item
name: produce-list-of-strings-Output-loop-item
value: "{{item}}"
-
name: produce-str-output
value: "{{tasks.produce-str.outputs.parameters.produce-str-output}}"
name: produce-str-Output
value: "{{tasks.produce-str.outputs.parameters.produce-str-Output}}"
dependencies:
- produce-list-of-strings
- produce-str
name: for-loop-for-loop-00000001-1
template: for-loop-for-loop-00000001-1
withParam: "{{tasks.produce-list-of-strings.outputs.parameters.produce-list-of-strings-output}}"
withParam: "{{tasks.produce-list-of-strings.outputs.parameters.produce-list-of-strings-Output}}"
-
arguments:
parameters:
-
name: produce-list-of-ints-output
value: "{{tasks.produce-list-of-ints.outputs.parameters.produce-list-of-ints-output}}"
name: produce-list-of-ints-Output
value: "{{tasks.produce-list-of-ints.outputs.parameters.produce-list-of-ints-Output}}"
-
name: produce-list-of-ints-output-loop-item
name: produce-list-of-ints-Output-loop-item
value: "{{item}}"
dependencies:
- produce-list-of-ints
name: for-loop-for-loop-00000002-2
template: for-loop-for-loop-00000002-2
withParam: "{{tasks.produce-list-of-ints.outputs.parameters.produce-list-of-ints-output}}"
withParam: "{{tasks.produce-list-of-ints.outputs.parameters.produce-list-of-ints-Output}}"
-
arguments:
parameters:
-
name: produce-list-of-dicts-output
value: "{{tasks.produce-list-of-dicts.outputs.parameters.produce-list-of-dicts-output}}"
name: produce-list-of-dicts-Output
value: "{{tasks.produce-list-of-dicts.outputs.parameters.produce-list-of-dicts-Output}}"
-
name: produce-list-of-dicts-output-loop-item-subvar-aaa
name: produce-list-of-dicts-Output-loop-item-subvar-aaa
value: "{{item.aaa}}"
dependencies:
- produce-list-of-dicts
name: for-loop-for-loop-00000003-3
template: for-loop-for-loop-00000003-3
withParam: "{{tasks.produce-list-of-dicts.outputs.parameters.produce-list-of-dicts-output}}"
withParam: "{{tasks.produce-list-of-dicts.outputs.parameters.produce-list-of-dicts-Output}}"
-
name: produce-list-of-dicts
template: produce-list-of-dicts
@ -464,7 +366,7 @@ spec:
- "-u"
- "-c"
- |
def produce_list_of_dicts() :
def produce_list_of_dicts():
return ([{"aaa": "aaa1", "bbb": "bbb1"}, {"aaa": "aaa2", "bbb": "bbb2"}],)
def _serialize_json(obj) -> str:
@ -476,7 +378,7 @@ spec:
return obj.to_struct()
else:
raise TypeError("Object of type '%s' is not JSON serializable and does not have .to_struct() method." % obj.__class__.__name__)
return json.dumps(obj, default=default_serializer)
return json.dumps(obj, default=default_serializer, sort_keys=True)
import argparse
_parser = argparse.ArgumentParser(prog='Produce list of dicts', description='')
@ -501,7 +403,7 @@ spec:
pass
with open(output_file, 'w') as f:
f.write(_output_serializers[idx](_outputs[idx]))
image: "tensorflow/tensorflow:1.13.2-py3"
image: "python:3.7"
metadata:
annotations:
pipelines.kubeflow.org/component_spec: "{\"name\": \"Produce list of dicts\", \"outputs\": [{\"name\": \"Output\", \"type\": \"JsonArray\"}]}"
@ -509,11 +411,11 @@ spec:
outputs:
artifacts:
-
name: produce-list-of-dicts-output
name: produce-list-of-dicts-Output
path: /tmp/outputs/Output/data
parameters:
-
name: produce-list-of-dicts-output
name: produce-list-of-dicts-Output
valueFrom:
path: /tmp/outputs/Output/data
-
@ -526,7 +428,7 @@ spec:
- "-u"
- "-c"
- |
def produce_list_of_ints() :
def produce_list_of_ints():
return ([1234567890, 987654321],)
def _serialize_json(obj) -> str:
@ -538,7 +440,7 @@ spec:
return obj.to_struct()
else:
raise TypeError("Object of type '%s' is not JSON serializable and does not have .to_struct() method." % obj.__class__.__name__)
return json.dumps(obj, default=default_serializer)
return json.dumps(obj, default=default_serializer, sort_keys=True)
import argparse
_parser = argparse.ArgumentParser(prog='Produce list of ints', description='')
@ -563,7 +465,7 @@ spec:
pass
with open(output_file, 'w') as f:
f.write(_output_serializers[idx](_outputs[idx]))
image: "tensorflow/tensorflow:1.13.2-py3"
image: "python:3.7"
metadata:
annotations:
pipelines.kubeflow.org/component_spec: "{\"name\": \"Produce list of ints\", \"outputs\": [{\"name\": \"Output\", \"type\": \"JsonArray\"}]}"
@ -571,11 +473,11 @@ spec:
outputs:
artifacts:
-
name: produce-list-of-ints-output
name: produce-list-of-ints-Output
path: /tmp/outputs/Output/data
parameters:
-
name: produce-list-of-ints-output
name: produce-list-of-ints-Output
valueFrom:
path: /tmp/outputs/Output/data
-
@ -588,7 +490,7 @@ spec:
- "-u"
- "-c"
- |
def produce_list_of_strings() :
def produce_list_of_strings():
return (["a", "z"],)
def _serialize_json(obj) -> str:
@ -600,7 +502,7 @@ spec:
return obj.to_struct()
else:
raise TypeError("Object of type '%s' is not JSON serializable and does not have .to_struct() method." % obj.__class__.__name__)
return json.dumps(obj, default=default_serializer)
return json.dumps(obj, default=default_serializer, sort_keys=True)
import argparse
_parser = argparse.ArgumentParser(prog='Produce list of strings', description='')
@ -625,7 +527,7 @@ spec:
pass
with open(output_file, 'w') as f:
f.write(_output_serializers[idx](_outputs[idx]))
image: "tensorflow/tensorflow:1.13.2-py3"
image: "python:3.7"
metadata:
annotations:
pipelines.kubeflow.org/component_spec: "{\"name\": \"Produce list of strings\", \"outputs\": [{\"name\": \"Output\", \"type\": \"JsonArray\"}]}"
@ -633,11 +535,11 @@ spec:
outputs:
artifacts:
-
name: produce-list-of-strings-output
name: produce-list-of-strings-Output
path: /tmp/outputs/Output/data
parameters:
-
name: produce-list-of-strings-output
name: produce-list-of-strings-Output
valueFrom:
path: /tmp/outputs/Output/data
-
@ -650,7 +552,7 @@ spec:
- "-u"
- "-c"
- |
def produce_str() :
def produce_str():
return "Hello"
def _serialize_str(str_value: str) -> str:
@ -681,7 +583,7 @@ spec:
pass
with open(output_file, 'w') as f:
f.write(_output_serializers[idx](_outputs[idx]))
image: "tensorflow/tensorflow:1.13.2-py3"
image: "python:3.7"
metadata:
annotations:
pipelines.kubeflow.org/component_spec: "{\"name\": \"Produce str\", \"outputs\": [{\"name\": \"Output\", \"type\": \"String\"}]}"
@ -689,10 +591,10 @@ spec:
outputs:
artifacts:
-
name: produce-str-output
name: produce-str-Output
path: /tmp/outputs/Output/data
parameters:
-
name: produce-str-output
name: produce-str-Output
valueFrom:
path: /tmp/outputs/Output/data