chore: Rollback telemetry related changes (4088)

* Revert "fix length (#3934)"

This reverts commit 7fbb7cae

* Revert "[SDK] Add first party component label (#3861)"

This reverts commit 1e2b9d4e

* Revert "[SDK] Add pod labels for telemetry purpose. (#3578)"

This reverts commit aa8da64b
This commit is contained in:
Jiaxiao Zheng 2020-06-27 15:46:14 -07:00 committed by GitHub
parent c52a73e52c
commit b099c6f5d3
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
37 changed files with 192 additions and 556 deletions

View File

@ -12,43 +12,16 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import re
from typing import Callable, Dict, Optional, Text
from ..dsl._container_op import BaseOp, ContainerOp
# Pod label indicating the SDK type from which the pipeline is
# generated. By default it's set to kfp.
_SDK_ENV_LABEL = 'pipelines.kubeflow.org/pipeline-sdk-type'
_SDK_ENV_DEFAULT = 'kfp'
# Common prefix of KFP OOB components url paths.
_OOB_COMPONENT_PATH_PREFIX = 'https://raw.githubusercontent.com/kubeflow/'\
'pipelines'
# Key for component origin path pod label.
COMPONENT_PATH_LABEL_KEY = 'pipelines.kubeflow.org/component_origin_path'
# Key for component spec digest pod label.
COMPONENT_DIGEST_LABEL_KEY = 'pipelines.kubeflow.org/component_digest'
def get_default_telemetry_labels() -> Dict[Text, Text]:
"""Returns the default pod labels for telemetry purpose."""
result = {
_SDK_ENV_LABEL: _SDK_ENV_DEFAULT,
}
return result
def add_pod_env(op: BaseOp) -> BaseOp:
"""Adds pod environment info to ContainerOp.
"""
if isinstance(op, ContainerOp) and op.pod_labels and op.pod_labels.get('add-pod-env', None) == 'true':
if isinstance(op, ContainerOp) and op.pod_labels and 'add-pod-env' in op.pod_labels and op.pod_labels['add-pod-env'] == 'true':
from kubernetes import client as k8s_client
op.container.add_env_variable(
k8s_client.V1EnvVar(
name='KFP_POD_NAME',
name='KFP_POD_NAME',
value_from=k8s_client.V1EnvVarSource(
field_ref=k8s_client.V1ObjectFieldSelector(
field_path='metadata.name'
@ -57,7 +30,7 @@ def add_pod_env(op: BaseOp) -> BaseOp:
)
).add_env_variable(
k8s_client.V1EnvVar(
name='KFP_NAMESPACE',
name='KFP_NAMESPACE',
value_from=k8s_client.V1EnvVarSource(
field_ref=k8s_client.V1ObjectFieldSelector(
field_path='metadata.namespace'
@ -65,56 +38,4 @@ def add_pod_env(op: BaseOp) -> BaseOp:
)
)
)
return op
def add_pod_labels(labels: Optional[Dict[Text, Text]] = None) -> Callable:
"""Adds provided pod labels to each pod."""
def _add_pod_labels(task):
for k, v in labels.items():
# Only append but not update.
# This is needed to bypass TFX pipelines/components.
if k not in task.pod_labels:
task.add_pod_label(k, v)
return task
return _add_pod_labels
def _remove_suffix(string: Text, suffix: Text) -> Text:
"""Removes the suffix from a string."""
if suffix and string.endswith(suffix):
return string[:-len(suffix)]
else:
return string
def add_name_for_oob_components() -> Callable:
"""Adds the OOB component name if applicable."""
def _add_name_for_oob_components(task):
# Detect the component origin uri in component_ref if exists, and
# attach the OOB component name as a pod label.
component_ref = getattr(task, '_component_ref', None)
if component_ref:
if component_ref.url:
origin_path = _remove_suffix(
component_ref.url, 'component.yaml').rstrip('/')
# Only include KFP OOB components.
if origin_path.startswith(_OOB_COMPONENT_PATH_PREFIX):
origin_path = origin_path.split('/', 7)[-1]
else:
return task
# Clean the label to comply with the k8s label convention.
origin_path = re.sub('[^-a-z0-9A-Z_.]', '.', origin_path)
origin_path_label = origin_path[-63:].strip('-_.')
task.add_pod_label(COMPONENT_PATH_LABEL_KEY, origin_path_label)
if component_ref.digest:
# We can only preserve the first 63 digits of the digest.
task.add_pod_label(
COMPONENT_DIGEST_LABEL_KEY, component_ref.digest[:63])
return task
return _add_name_for_oob_components
return op

View File

@ -27,7 +27,7 @@ from kfp.dsl import _for_loop
from .. import dsl
from ._k8s_helper import convert_k8s_obj_to_json, sanitize_k8s_name
from ._op_to_template import _op_to_template
from ._default_transformers import add_pod_env, add_pod_labels, add_name_for_oob_components, get_default_telemetry_labels
from ._default_transformers import add_pod_env
from ..components.structures import InputSpec
from ..components._yaml_utils import dump_yaml
@ -768,8 +768,7 @@ class Compiler(object):
pipeline_description: Text=None,
params_list: List[dsl.PipelineParam]=None,
pipeline_conf: dsl.PipelineConf = None,
allow_telemetry: bool = True,
) -> Dict[Text, Any]:
) -> Dict[Text, Any]:
""" Internal implementation of create_workflow."""
params_list = params_list or []
@ -829,14 +828,6 @@ class Compiler(object):
default=param.value) for param in params_list]
op_transformers = [add_pod_env]
# By default adds telemetry instruments. Users can opt out toggling
# allow_telemetry.
# Also, TFX pipelines will be bypassed for pipeline compiled by tfx>0.21.4.
if allow_telemetry:
pod_labels = get_default_telemetry_labels()
op_transformers.append(add_pod_labels(pod_labels))
op_transformers.append(add_name_for_oob_components())
op_transformers.extend(pipeline_conf.op_transformers)
workflow = self._create_pipeline_workflow(
@ -898,14 +889,7 @@ class Compiler(object):
"""Compile the given pipeline function into workflow."""
return self._create_workflow(pipeline_func=pipeline_func, pipeline_conf=pipeline_conf)
def compile(
self,
pipeline_func,
package_path,
type_check=True,
pipeline_conf: dsl.PipelineConf = None,
allow_telemetry: bool = True,
):
def compile(self, pipeline_func, package_path, type_check=True, pipeline_conf: dsl.PipelineConf = None):
"""Compile the given pipeline function into workflow yaml.
Args:
@ -913,9 +897,6 @@ class Compiler(object):
package_path: the output workflow tar.gz file path. for example, "~/a.tar.gz"
type_check: whether to enable the type check or not, default: False.
pipeline_conf: PipelineConf instance. Can specify op transforms, image pull secrets and other pipeline-level configuration options. Overrides any configuration that may be set by the pipeline.
allow_telemetry: If set to true, two pod labels will be attached to k8s
pods spawned by this pipeline: 1) pipeline SDK style, 2) pipeline random
ID.
"""
import kfp
type_check_old_value = kfp.TYPE_CHECK
@ -924,8 +905,7 @@ class Compiler(object):
self._create_and_write_workflow(
pipeline_func=pipeline_func,
pipeline_conf=pipeline_conf,
package_path=package_path,
allow_telemetry=allow_telemetry)
package_path=package_path)
finally:
kfp.TYPE_CHECK = type_check_old_value
@ -972,8 +952,7 @@ class Compiler(object):
pipeline_description: Text=None,
params_list: List[dsl.PipelineParam]=None,
pipeline_conf: dsl.PipelineConf=None,
package_path: Text=None,
allow_telemetry: bool=True
package_path: Text=None
) -> None:
"""Compile the given pipeline function and dump it to specified file format."""
workflow = self._create_workflow(
@ -981,8 +960,7 @@ class Compiler(object):
pipeline_name,
pipeline_description,
params_list,
pipeline_conf,
allow_telemetry)
pipeline_conf)
self._write_workflow(workflow, package_path)
_validate_workflow(workflow)

View File

@ -23,16 +23,6 @@ import sys
import tempfile
from deprecated.sphinx import deprecated
def _str2bool(v):
if isinstance(v, bool):
return v
if v.lower() in ('yes', 'true', 't', 'y', '1'):
return True
elif v.lower() in ('no', 'false', 'f', 'n', '0'):
return False
else:
raise argparse.ArgumentTypeError('Boolean value expected.')
def parse_arguments():
"""Parse command line arguments."""
@ -57,16 +47,12 @@ def parse_arguments():
parser.add_argument('--disable-type-check',
action='store_true',
help='disable the type check, default is enabled.')
parser.add_argument('--disable-telemetry',
action='store_true',
help='disable adding telemetry labels, default is enabled.')
args = parser.parse_args()
return args
def _compile_pipeline_function(
pipeline_funcs, function_name, output_path, type_check, allow_telemetry):
def _compile_pipeline_function(pipeline_funcs, function_name, output_path, type_check):
if len(pipeline_funcs) == 0:
raise ValueError('A function with @dsl.pipeline decorator is required in the py file.')
@ -82,8 +68,7 @@ def _compile_pipeline_function(
else:
pipeline_func = pipeline_funcs[0]
kfp.compiler.Compiler().compile(
pipeline_func, output_path, type_check, allow_telemetry=allow_telemetry)
kfp.compiler.Compiler().compile(pipeline_func, output_path, type_check)
class PipelineCollectorContext():
@ -105,31 +90,26 @@ class PipelineCollectorContext():
Please switch to compiling pipeline files or functions.
If you use this feature please create an issue in https://github.com/kubeflow/pipelines/issues .'''
)
def compile_package(
package_path, namespace, function_name, output_path, type_check, allow_telemetry):
def compile_package(package_path, namespace, function_name, output_path, type_check):
tmpdir = tempfile.mkdtemp()
sys.path.insert(0, tmpdir)
try:
subprocess.check_call(['python3', '-m', 'pip', 'install', package_path, '-t', tmpdir])
with PipelineCollectorContext() as pipeline_funcs:
__import__(namespace)
_compile_pipeline_function(
pipeline_funcs, function_name, output_path, type_check,
allow_telemetry=allow_telemetry)
_compile_pipeline_function(pipeline_funcs, function_name, output_path, type_check)
finally:
del sys.path[0]
shutil.rmtree(tmpdir)
def compile_pyfile(pyfile, function_name, output_path, type_check, allow_telemetry):
def compile_pyfile(pyfile, function_name, output_path, type_check):
sys.path.insert(0, os.path.dirname(pyfile))
try:
filename = os.path.basename(pyfile)
with PipelineCollectorContext() as pipeline_funcs:
__import__(os.path.splitext(filename)[0])
_compile_pipeline_function(
pipeline_funcs, function_name, output_path, type_check,
allow_telemetry=allow_telemetry)
_compile_pipeline_function(pipeline_funcs, function_name, output_path, type_check)
finally:
del sys.path[0]
@ -140,22 +120,9 @@ def main():
(args.py is not None and args.package is not None)):
raise ValueError('Either --py or --package is needed but not both.')
if args.py:
compile_pyfile(
args.py,
args.function,
args.output,
not args.disable_type_check,
not args.disable_telemetry
)
compile_pyfile(args.py, args.function, args.output, not args.disable_type_check)
else:
if args.namespace is None:
raise ValueError('--namespace is required for compiling packages.')
compile_package(
args.package,
args.namespace,
args.function,
args.output,
not args.disable_type_check,
not args.disable_telemetry
)
compile_package(args.package, args.namespace, args.function, args.output, not args.disable_type_check)

View File

@ -26,8 +26,6 @@ import tempfile
import unittest
import yaml
from kfp import components
from kfp.compiler._default_transformers import COMPONENT_DIGEST_LABEL_KEY, COMPONENT_PATH_LABEL_KEY
from kfp.dsl._component import component
from kfp.dsl import ContainerOp, pipeline
from kfp.dsl.types import Integer, InconsistentTypeException
@ -42,11 +40,6 @@ def some_op():
command=['sleep 1'],
)
_TEST_GCS_DOWNLOAD_COMPONENT_URL = 'https://raw.githubusercontent.com/kubeflow/'\
'pipelines/2dac60c400ad8767b452649d08f328df'\
'af230f96/components/google-cloud/storage/'\
'download/component.yaml'
class TestCompiler(unittest.TestCase):
# Define the places of samples covered by unit tests.
@ -718,27 +711,6 @@ implementation:
container = template.get('container', None)
if container:
self.assertEqual(template['retryStrategy']['limit'], 5)
def test_oob_component_label(self):
gcs_download_op = components.load_component_from_url(
_TEST_GCS_DOWNLOAD_COMPONENT_URL)
@dsl.pipeline(name='some_pipeline')
def some_pipeline():
_download_task = gcs_download_op('gs://some_bucket/some_dir/some_file')
workflow_dict = compiler.Compiler()._compile(some_pipeline)
found_download_task = False
for template in workflow_dict['spec']['templates']:
if template.get('container', None):
found_download_task = True
self.assertEqual(
template['metadata']['labels'][COMPONENT_PATH_LABEL_KEY],
'google-cloud.storage.download')
self.assertIsNotNone(
template['metadata']['labels'].get(COMPONENT_DIGEST_LABEL_KEY))
self.assertTrue(found_download_task, 'download task not found in workflow.')
def test_image_pull_policy(self):
def some_op():

View File

@ -28,7 +28,6 @@ spec:
image: library/bash
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
add-pod-env: 'true'
name: echo
- dag:

View File

@ -57,7 +57,6 @@ spec:
- -c
image: python:3.5-jessie
name: exiting
metadata: {'labels': {'pipelines.kubeflow.org/pipeline-sdk-type': 'kfp'}}
- container:
args:
- python -c "from collections import Counter; words = Counter('{{inputs.parameters.message}}'.split());
@ -73,9 +72,6 @@ spec:
parameters:
- name: message
name: get-frequent
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
outputs:
artifacts:
- name: get-frequent-word
@ -102,8 +98,6 @@ spec:
- name: get-frequent-word
- name: outputpath
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
annotations:
tf-version.cloud-tpus.google.com: "1.12"
name: save

View File

@ -58,9 +58,6 @@ spec:
- -c
image: python:3.5-jessie
name: exiting
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
- container:
args:
- python -c "from collections import Counter; words = Counter('{{inputs.parameters.message}}'.split());
@ -76,9 +73,6 @@ spec:
parameters:
- name: message
name: get-frequent
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
outputs:
artifacts:
- name: get-frequent-word
@ -105,8 +99,6 @@ spec:
- name: get-frequent-word
- name: outputpath
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
annotations:
tf-version.cloud-tpus.google.com: "1.12"
name: save

View File

@ -75,9 +75,6 @@ spec:
- -c
image: python:alpine3.6
name: flip
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
outputs:
artifacts:
- name: flip-output
@ -95,9 +92,6 @@ spec:
- -c
image: python:alpine3.6
name: flip-again
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
outputs:
artifacts:
- name: flip-again-output
@ -135,9 +129,6 @@ spec:
parameters:
- name: flip-again-output
name: print1
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
- container:
command:
- echo
@ -147,6 +138,3 @@ spec:
parameters:
- name: flip-again-output
name: print2
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp

View File

@ -36,9 +36,6 @@ spec:
parameters:
- name: url
name: download
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
outputs:
artifacts:
- name: download-downloaded
@ -90,9 +87,6 @@ spec:
parameters:
- name: download-downloaded
name: get-frequent
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
outputs:
artifacts:
- name: get-frequent-word
@ -114,6 +108,3 @@ spec:
- name: get-frequent-word
- name: outputpath
name: save
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp

View File

@ -56,9 +56,6 @@ spec:
parameters:
- name: url
name: download
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
outputs:
artifacts:
- name: download-downloaded
@ -78,6 +75,3 @@ spec:
parameters:
- name: download-downloaded
name: echo
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp

View File

@ -25,9 +25,6 @@ spec:
parameters:
- name: message
name: get-frequent
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
outputs:
artifacts:
- name: get-frequent-word

View File

@ -22,9 +22,6 @@ spec:
raw:
data: Constant artifact value
name: component-with-inline-input-artifact
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
- container:
command:
- cat
@ -37,9 +34,6 @@ spec:
raw:
data: Constant artifact value
name: component-with-input-artifact
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
- container:
command:
- cat
@ -52,9 +46,6 @@ spec:
raw:
data: hard-coded artifact value
name: component-with-input-artifact-2
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
- container:
command:
- cat
@ -67,9 +58,6 @@ spec:
raw:
data: Text from a file with hard-coded artifact value
name: component-with-input-artifact-3
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
- dag:
tasks:
- name: component-with-inline-input-artifact

View File

@ -44,8 +44,6 @@
- "name": |-
produce-list-data_list-loop-item
"metadata":
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
"annotations":
"pipelines.kubeflow.org/component_spec": |-
{"inputs": [{"name": "data"}], "name": "Consume data"}
@ -106,8 +104,6 @@
"image": |-
busybox
"metadata":
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
"annotations":
"pipelines.kubeflow.org/component_spec": |-
{"name": "Produce list", "outputs": [{"name": "data_list"}]}

View File

@ -14,7 +14,7 @@ spec:
container:
args:
- "--param1"
- "{{inputs.parameters.produce-list-of-strings-Output}}"
- "{{inputs.parameters.produce-list-of-strings-output}}"
command:
- python3
- "-u"
@ -27,24 +27,36 @@ spec:
_parser = argparse.ArgumentParser(prog='Consume', description='')
_parser.add_argument("--param1", dest="param1", type=str, required=True, default=argparse.SUPPRESS)
_parsed_args = vars(_parser.parse_args())
_output_files = _parsed_args.pop("_output_paths", [])
_outputs = consume(**_parsed_args)
image: "python:3.7"
_output_serializers = [
]
import os
for idx, output_file in enumerate(_output_files):
try:
os.makedirs(os.path.dirname(output_file))
except OSError:
pass
with open(output_file, 'w') as f:
f.write(_output_serializers[idx](_outputs[idx]))
image: "tensorflow/tensorflow:1.13.2-py3"
inputs:
parameters:
-
name: produce-list-of-strings-Output
name: produce-list-of-strings-output
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
annotations:
annotations:
pipelines.kubeflow.org/component_spec: "{\"inputs\": [{\"name\": \"param1\"}], \"name\": \"Consume\"}"
name: consume
-
container:
args:
- "--param1"
- "{{inputs.parameters.produce-list-of-strings-Output-loop-item}}"
- "{{inputs.parameters.produce-list-of-strings-output-loop-item}}"
command:
- python3
- "-u"
@ -57,16 +69,28 @@ spec:
_parser = argparse.ArgumentParser(prog='Consume', description='')
_parser.add_argument("--param1", dest="param1", type=str, required=True, default=argparse.SUPPRESS)
_parsed_args = vars(_parser.parse_args())
_output_files = _parsed_args.pop("_output_paths", [])
_outputs = consume(**_parsed_args)
image: "python:3.7"
_output_serializers = [
]
import os
for idx, output_file in enumerate(_output_files):
try:
os.makedirs(os.path.dirname(output_file))
except OSError:
pass
with open(output_file, 'w') as f:
f.write(_output_serializers[idx](_outputs[idx]))
image: "tensorflow/tensorflow:1.13.2-py3"
inputs:
parameters:
-
name: produce-list-of-strings-Output-loop-item
name: produce-list-of-strings-output-loop-item
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
annotations:
pipelines.kubeflow.org/component_spec: "{\"inputs\": [{\"name\": \"param1\"}], \"name\": \"Consume\"}"
name: consume-2
@ -74,7 +98,7 @@ spec:
container:
args:
- "--param1"
- "{{inputs.parameters.produce-str-Output}}"
- "{{inputs.parameters.produce-str-output}}"
command:
- python3
- "-u"
@ -87,16 +111,28 @@ spec:
_parser = argparse.ArgumentParser(prog='Consume', description='')
_parser.add_argument("--param1", dest="param1", type=str, required=True, default=argparse.SUPPRESS)
_parsed_args = vars(_parser.parse_args())
_output_files = _parsed_args.pop("_output_paths", [])
_outputs = consume(**_parsed_args)
image: "python:3.7"
_output_serializers = [
]
import os
for idx, output_file in enumerate(_output_files):
try:
os.makedirs(os.path.dirname(output_file))
except OSError:
pass
with open(output_file, 'w') as f:
f.write(_output_serializers[idx](_outputs[idx]))
image: "tensorflow/tensorflow:1.13.2-py3"
inputs:
parameters:
-
name: produce-str-Output
name: produce-str-output
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
annotations:
pipelines.kubeflow.org/component_spec: "{\"inputs\": [{\"name\": \"param1\"}], \"name\": \"Consume\"}"
name: consume-3
@ -104,7 +140,7 @@ spec:
container:
args:
- "--param1"
- "{{inputs.parameters.produce-list-of-ints-Output}}"
- "{{inputs.parameters.produce-list-of-ints-output}}"
command:
- python3
- "-u"
@ -117,16 +153,28 @@ spec:
_parser = argparse.ArgumentParser(prog='Consume', description='')
_parser.add_argument("--param1", dest="param1", type=str, required=True, default=argparse.SUPPRESS)
_parsed_args = vars(_parser.parse_args())
_output_files = _parsed_args.pop("_output_paths", [])
_outputs = consume(**_parsed_args)
image: "python:3.7"
_output_serializers = [
]
import os
for idx, output_file in enumerate(_output_files):
try:
os.makedirs(os.path.dirname(output_file))
except OSError:
pass
with open(output_file, 'w') as f:
f.write(_output_serializers[idx](_outputs[idx]))
image: "tensorflow/tensorflow:1.13.2-py3"
inputs:
parameters:
-
name: produce-list-of-ints-Output
name: produce-list-of-ints-output
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
annotations:
pipelines.kubeflow.org/component_spec: "{\"inputs\": [{\"name\": \"param1\"}], \"name\": \"Consume\"}"
name: consume-4
@ -134,7 +182,7 @@ spec:
container:
args:
- "--param1"
- "{{inputs.parameters.produce-list-of-ints-Output-loop-item}}"
- "{{inputs.parameters.produce-list-of-ints-output-loop-item}}"
command:
- python3
- "-u"
@ -147,16 +195,28 @@ spec:
_parser = argparse.ArgumentParser(prog='Consume', description='')
_parser.add_argument("--param1", dest="param1", type=str, required=True, default=argparse.SUPPRESS)
_parsed_args = vars(_parser.parse_args())
_output_files = _parsed_args.pop("_output_paths", [])
_outputs = consume(**_parsed_args)
image: "python:3.7"
_output_serializers = [
]
import os
for idx, output_file in enumerate(_output_files):
try:
os.makedirs(os.path.dirname(output_file))
except OSError:
pass
with open(output_file, 'w') as f:
f.write(_output_serializers[idx](_outputs[idx]))
image: "tensorflow/tensorflow:1.13.2-py3"
inputs:
parameters:
-
name: produce-list-of-ints-Output-loop-item
name: produce-list-of-ints-output-loop-item
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
annotations:
pipelines.kubeflow.org/component_spec: "{\"inputs\": [{\"name\": \"param1\"}], \"name\": \"Consume\"}"
name: consume-5
@ -164,7 +224,7 @@ spec:
container:
args:
- "--param1"
- "{{inputs.parameters.produce-list-of-dicts-Output}}"
- "{{inputs.parameters.produce-list-of-dicts-output}}"
command:
- python3
- "-u"
@ -177,16 +237,28 @@ spec:
_parser = argparse.ArgumentParser(prog='Consume', description='')
_parser.add_argument("--param1", dest="param1", type=str, required=True, default=argparse.SUPPRESS)
_parsed_args = vars(_parser.parse_args())
_output_files = _parsed_args.pop("_output_paths", [])
_outputs = consume(**_parsed_args)
image: "python:3.7"
_output_serializers = [
]
import os
for idx, output_file in enumerate(_output_files):
try:
os.makedirs(os.path.dirname(output_file))
except OSError:
pass
with open(output_file, 'w') as f:
f.write(_output_serializers[idx](_outputs[idx]))
image: "tensorflow/tensorflow:1.13.2-py3"
inputs:
parameters:
-
name: produce-list-of-dicts-Output
name: produce-list-of-dicts-output
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
annotations:
pipelines.kubeflow.org/component_spec: "{\"inputs\": [{\"name\": \"param1\"}], \"name\": \"Consume\"}"
name: consume-6
@ -194,7 +266,7 @@ spec:
container:
args:
- "--param1"
- "{{inputs.parameters.produce-list-of-dicts-Output-loop-item-subvar-aaa}}"
- "{{inputs.parameters.produce-list-of-dicts-output-loop-item-subvar-aaa}}"
command:
- python3
- "-u"
@ -207,16 +279,28 @@ spec:
_parser = argparse.ArgumentParser(prog='Consume', description='')
_parser.add_argument("--param1", dest="param1", type=str, required=True, default=argparse.SUPPRESS)
_parsed_args = vars(_parser.parse_args())
_output_files = _parsed_args.pop("_output_paths", [])
_outputs = consume(**_parsed_args)
image: "python:3.7"
_output_serializers = [
]
import os
for idx, output_file in enumerate(_output_files):
try:
os.makedirs(os.path.dirname(output_file))
except OSError:
pass
with open(output_file, 'w') as f:
f.write(_output_serializers[idx](_outputs[idx]))
image: "tensorflow/tensorflow:1.13.2-py3"
inputs:
parameters:
-
name: produce-list-of-dicts-Output-loop-item-subvar-aaa
name: produce-list-of-dicts-output-loop-item-subvar-aaa
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
annotations:
pipelines.kubeflow.org/component_spec: "{\"inputs\": [{\"name\": \"param1\"}], \"name\": \"Consume\"}"
name: consume-7
@ -227,34 +311,34 @@ spec:
arguments:
parameters:
-
name: produce-list-of-strings-Output
value: "{{inputs.parameters.produce-list-of-strings-Output}}"
name: produce-list-of-strings-output
value: "{{inputs.parameters.produce-list-of-strings-output}}"
name: consume
template: consume
-
arguments:
parameters:
-
name: produce-list-of-strings-Output-loop-item
value: "{{inputs.parameters.produce-list-of-strings-Output-loop-item}}"
name: produce-list-of-strings-output-loop-item
value: "{{inputs.parameters.produce-list-of-strings-output-loop-item}}"
name: consume-2
template: consume-2
-
arguments:
parameters:
-
name: produce-str-Output
value: "{{inputs.parameters.produce-str-Output}}"
name: produce-str-output
value: "{{inputs.parameters.produce-str-output}}"
name: consume-3
template: consume-3
inputs:
parameters:
-
name: produce-list-of-strings-Output
name: produce-list-of-strings-output
-
name: produce-list-of-strings-Output-loop-item
name: produce-list-of-strings-output-loop-item
-
name: produce-str-Output
name: produce-str-output
name: for-loop-for-loop-00000001-1
-
dag:
@ -263,24 +347,24 @@ spec:
arguments:
parameters:
-
name: produce-list-of-ints-Output
value: "{{inputs.parameters.produce-list-of-ints-Output}}"
name: produce-list-of-ints-output
value: "{{inputs.parameters.produce-list-of-ints-output}}"
name: consume-4
template: consume-4
-
arguments:
parameters:
-
name: produce-list-of-ints-Output-loop-item
value: "{{inputs.parameters.produce-list-of-ints-Output-loop-item}}"
name: produce-list-of-ints-output-loop-item
value: "{{inputs.parameters.produce-list-of-ints-output-loop-item}}"
name: consume-5
template: consume-5
inputs:
parameters:
-
name: produce-list-of-ints-Output
name: produce-list-of-ints-output
-
name: produce-list-of-ints-Output-loop-item
name: produce-list-of-ints-output-loop-item
name: for-loop-for-loop-00000002-2
-
dag:
@ -289,24 +373,24 @@ spec:
arguments:
parameters:
-
name: produce-list-of-dicts-Output
value: "{{inputs.parameters.produce-list-of-dicts-Output}}"
name: produce-list-of-dicts-output
value: "{{inputs.parameters.produce-list-of-dicts-output}}"
name: consume-6
template: consume-6
-
arguments:
parameters:
-
name: produce-list-of-dicts-Output-loop-item-subvar-aaa
value: "{{inputs.parameters.produce-list-of-dicts-Output-loop-item-subvar-aaa}}"
name: produce-list-of-dicts-output-loop-item-subvar-aaa
value: "{{inputs.parameters.produce-list-of-dicts-output-loop-item-subvar-aaa}}"
name: consume-7
template: consume-7
inputs:
parameters:
-
name: produce-list-of-dicts-Output
name: produce-list-of-dicts-output
-
name: produce-list-of-dicts-Output-loop-item-subvar-aaa
name: produce-list-of-dicts-output-loop-item-subvar-aaa
name: for-loop-for-loop-00000003-3
-
dag:
@ -315,48 +399,48 @@ spec:
arguments:
parameters:
-
name: produce-list-of-strings-Output
value: "{{tasks.produce-list-of-strings.outputs.parameters.produce-list-of-strings-Output}}"
name: produce-list-of-strings-output
value: "{{tasks.produce-list-of-strings.outputs.parameters.produce-list-of-strings-output}}"
-
name: produce-list-of-strings-Output-loop-item
name: produce-list-of-strings-output-loop-item
value: "{{item}}"
-
name: produce-str-Output
value: "{{tasks.produce-str.outputs.parameters.produce-str-Output}}"
name: produce-str-output
value: "{{tasks.produce-str.outputs.parameters.produce-str-output}}"
dependencies:
- produce-list-of-strings
- produce-str
name: for-loop-for-loop-00000001-1
template: for-loop-for-loop-00000001-1
withParam: "{{tasks.produce-list-of-strings.outputs.parameters.produce-list-of-strings-Output}}"
withParam: "{{tasks.produce-list-of-strings.outputs.parameters.produce-list-of-strings-output}}"
-
arguments:
parameters:
-
name: produce-list-of-ints-Output
value: "{{tasks.produce-list-of-ints.outputs.parameters.produce-list-of-ints-Output}}"
name: produce-list-of-ints-output
value: "{{tasks.produce-list-of-ints.outputs.parameters.produce-list-of-ints-output}}"
-
name: produce-list-of-ints-Output-loop-item
name: produce-list-of-ints-output-loop-item
value: "{{item}}"
dependencies:
- produce-list-of-ints
name: for-loop-for-loop-00000002-2
template: for-loop-for-loop-00000002-2
withParam: "{{tasks.produce-list-of-ints.outputs.parameters.produce-list-of-ints-Output}}"
withParam: "{{tasks.produce-list-of-ints.outputs.parameters.produce-list-of-ints-output}}"
-
arguments:
parameters:
-
name: produce-list-of-dicts-Output
value: "{{tasks.produce-list-of-dicts.outputs.parameters.produce-list-of-dicts-Output}}"
name: produce-list-of-dicts-output
value: "{{tasks.produce-list-of-dicts.outputs.parameters.produce-list-of-dicts-output}}"
-
name: produce-list-of-dicts-Output-loop-item-subvar-aaa
name: produce-list-of-dicts-output-loop-item-subvar-aaa
value: "{{item.aaa}}"
dependencies:
- produce-list-of-dicts
name: for-loop-for-loop-00000003-3
template: for-loop-for-loop-00000003-3
withParam: "{{tasks.produce-list-of-dicts.outputs.parameters.produce-list-of-dicts-Output}}"
withParam: "{{tasks.produce-list-of-dicts.outputs.parameters.produce-list-of-dicts-output}}"
-
name: produce-list-of-dicts
template: produce-list-of-dicts
@ -380,7 +464,7 @@ spec:
- "-u"
- "-c"
- |
def produce_list_of_dicts():
def produce_list_of_dicts() :
return ([{"aaa": "aaa1", "bbb": "bbb1"}, {"aaa": "aaa2", "bbb": "bbb2"}],)
def _serialize_json(obj) -> str:
@ -392,7 +476,7 @@ spec:
return obj.to_struct()
else:
raise TypeError("Object of type '%s' is not JSON serializable and does not have .to_struct() method." % obj.__class__.__name__)
return json.dumps(obj, default=default_serializer, sort_keys=True)
return json.dumps(obj, default=default_serializer)
import argparse
_parser = argparse.ArgumentParser(prog='Produce list of dicts', description='')
@ -417,21 +501,19 @@ spec:
pass
with open(output_file, 'w') as f:
f.write(_output_serializers[idx](_outputs[idx]))
image: "python:3.7"
image: "tensorflow/tensorflow:1.13.2-py3"
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
annotations:
pipelines.kubeflow.org/component_spec: "{\"name\": \"Produce list of dicts\", \"outputs\": [{\"name\": \"Output\", \"type\": \"JsonArray\"}]}"
name: produce-list-of-dicts
outputs:
artifacts:
-
name: produce-list-of-dicts-Output
name: produce-list-of-dicts-output
path: /tmp/outputs/Output/data
parameters:
-
name: produce-list-of-dicts-Output
name: produce-list-of-dicts-output
valueFrom:
path: /tmp/outputs/Output/data
-
@ -444,7 +526,7 @@ spec:
- "-u"
- "-c"
- |
def produce_list_of_ints():
def produce_list_of_ints() :
return ([1234567890, 987654321],)
def _serialize_json(obj) -> str:
@ -456,7 +538,7 @@ spec:
return obj.to_struct()
else:
raise TypeError("Object of type '%s' is not JSON serializable and does not have .to_struct() method." % obj.__class__.__name__)
return json.dumps(obj, default=default_serializer, sort_keys=True)
return json.dumps(obj, default=default_serializer)
import argparse
_parser = argparse.ArgumentParser(prog='Produce list of ints', description='')
@ -481,21 +563,19 @@ spec:
pass
with open(output_file, 'w') as f:
f.write(_output_serializers[idx](_outputs[idx]))
image: "python:3.7"
image: "tensorflow/tensorflow:1.13.2-py3"
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
annotations:
pipelines.kubeflow.org/component_spec: "{\"name\": \"Produce list of ints\", \"outputs\": [{\"name\": \"Output\", \"type\": \"JsonArray\"}]}"
name: produce-list-of-ints
outputs:
artifacts:
-
name: produce-list-of-ints-Output
name: produce-list-of-ints-output
path: /tmp/outputs/Output/data
parameters:
-
name: produce-list-of-ints-Output
name: produce-list-of-ints-output
valueFrom:
path: /tmp/outputs/Output/data
-
@ -508,7 +588,7 @@ spec:
- "-u"
- "-c"
- |
def produce_list_of_strings():
def produce_list_of_strings() :
return (["a", "z"],)
def _serialize_json(obj) -> str:
@ -520,7 +600,7 @@ spec:
return obj.to_struct()
else:
raise TypeError("Object of type '%s' is not JSON serializable and does not have .to_struct() method." % obj.__class__.__name__)
return json.dumps(obj, default=default_serializer, sort_keys=True)
return json.dumps(obj, default=default_serializer)
import argparse
_parser = argparse.ArgumentParser(prog='Produce list of strings', description='')
@ -545,21 +625,19 @@ spec:
pass
with open(output_file, 'w') as f:
f.write(_output_serializers[idx](_outputs[idx]))
image: "python:3.7"
image: "tensorflow/tensorflow:1.13.2-py3"
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
annotations:
pipelines.kubeflow.org/component_spec: "{\"name\": \"Produce list of strings\", \"outputs\": [{\"name\": \"Output\", \"type\": \"JsonArray\"}]}"
name: produce-list-of-strings
outputs:
artifacts:
-
name: produce-list-of-strings-Output
name: produce-list-of-strings-output
path: /tmp/outputs/Output/data
parameters:
-
name: produce-list-of-strings-Output
name: produce-list-of-strings-output
valueFrom:
path: /tmp/outputs/Output/data
-
@ -572,7 +650,7 @@ spec:
- "-u"
- "-c"
- |
def produce_str():
def produce_str() :
return "Hello"
def _serialize_str(str_value: str) -> str:
@ -603,20 +681,18 @@ spec:
pass
with open(output_file, 'w') as f:
f.write(_output_serializers[idx](_outputs[idx]))
image: "python:3.7"
image: "tensorflow/tensorflow:1.13.2-py3"
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
annotations:
pipelines.kubeflow.org/component_spec: "{\"name\": \"Produce str\", \"outputs\": [{\"name\": \"Output\", \"type\": \"String\"}]}"
name: produce-str
outputs:
artifacts:
-
name: produce-str-Output
name: produce-str-output
path: /tmp/outputs/Output/data
parameters:
-
name: produce-str-Output
name: produce-str-output
valueFrom:
path: /tmp/outputs/Output/data

View File

@ -21,7 +21,6 @@ spec:
- name: param
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
param: '{{inputs.parameters.param}}'
name: cop
- dag:

View File

@ -23,17 +23,11 @@ spec:
parameters:
- name: create-volume-name
name: cop
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
volumes:
- name: create-volume
persistentVolumeClaim:
claimName: '{{inputs.parameters.create-volume-name}}'
- name: create-volume
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
outputs:
parameters:
- name: create-volume-manifest

View File

@ -29,9 +29,6 @@ spec:
value: '10'
templates:
- name: download
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
inputs:
parameters:
- name: sleep_ms
@ -57,9 +54,6 @@ spec:
args:
- -text="hello world"
- name: echo
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
inputs:
parameters:
- name: download-downloaded

View File

@ -32,9 +32,6 @@ spec:
limits:
nvidia.com/gpu: 1
name: flip
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
outputs:
artifacts:
- name: flip-output

View File

@ -32,9 +32,6 @@ spec:
- -c
image: python:alpine3.6
name: flip
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
outputs:
artifacts:
- name: flip-output
@ -52,9 +49,6 @@ spec:
- -c
image: python:alpine3.6
name: flip-2
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
outputs:
artifacts:
- name: flip-2-output
@ -68,9 +62,6 @@ spec:
- -c
image: python:alpine3.6
name: flip-3
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
outputs:
artifacts:
- name: flip-3-output
@ -138,9 +129,6 @@ spec:
parameters:
- name: flip-output
name: print
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
- container:
command:
- echo
@ -150,6 +138,3 @@ spec:
parameters:
- name: flip-output
name: print-2
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp

View File

@ -46,9 +46,6 @@ spec:
- -c
image: python:alpine3.6
name: flip
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
outputs:
artifacts:
- name: flip-output
@ -66,9 +63,6 @@ spec:
- -c
image: python:alpine3.6
name: flip-2
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
outputs:
artifacts:
- name: flip-2-output
@ -82,9 +76,6 @@ spec:
- -c
image: python:alpine3.6
name: flip-3
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
outputs:
artifacts:
- name: flip-3-output
@ -140,9 +131,6 @@ spec:
parameters:
- name: flip-output
name: print
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
- container:
command:
- echo
@ -152,6 +140,3 @@ spec:
parameters:
- name: flip-output
name: print-2
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp

View File

@ -27,9 +27,6 @@ spec:
parameters:
- name: create-my-secret-name
name: cop
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
volumes:
- name: my-secret
secret:
@ -39,9 +36,6 @@ spec:
- name: password
- name: username
name: create-my-secret
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
outputs:
parameters:
- name: create-my-secret-manifest

View File

@ -43,13 +43,7 @@ spec:
command:
- sh
- "-c"
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
- name: echo
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
inputs:
parameters:
- name: download-downloaded

View File

@ -30,9 +30,6 @@ spec:
- -c
image: python:alpine3.6
name: random-failure
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
- container:
args:
- import random; import sys; exit_code = random.choice([0,1]); print(exit_code);
@ -42,6 +39,3 @@ spec:
- -c
image: python:alpine3.6
name: random-failure-2
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp

View File

@ -40,9 +40,6 @@ spec:
- mountPath: /secret/gcp-credentials
name: gcp-credentials
name: download
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
outputs:
artifacts:
- name: download-downloaded
@ -66,9 +63,6 @@ spec:
parameters:
- name: download-downloaded
name: echo
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
- dag:
tasks:
- name: download

View File

@ -16,9 +16,6 @@ spec:
parameters:
- name: create-volume-1-name
name: create-snapshot-1
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
outputs:
parameters:
- name: create-snapshot-1-manifest
@ -40,9 +37,6 @@ spec:
parameters:
- name: create-volume-2-name
name: create-snapshot-2
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
outputs:
parameters:
- name: create-snapshot-2-manifest
@ -64,9 +58,6 @@ spec:
parameters:
- name: rok_url
name: create-volume-1
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
outputs:
parameters:
- name: create-volume-1-manifest
@ -89,9 +80,6 @@ spec:
- name: create-snapshot-1-name
- name: create-snapshot-1-size
name: create-volume-2
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
outputs:
parameters:
- name: create-volume-2-manifest
@ -114,9 +102,6 @@ spec:
- name: create-snapshot-2-name
- name: create-snapshot-2-size
name: create-volume-3
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
outputs:
parameters:
- name: create-volume-3-manifest
@ -148,9 +133,6 @@ spec:
parameters:
- name: create-volume-1-name
name: step1-concat
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
volumes:
- name: create-volume-1
persistentVolumeClaim:
@ -168,9 +150,6 @@ spec:
parameters:
- name: create-volume-2-name
name: step2-gunzip
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
volumes:
- name: create-volume-2
persistentVolumeClaim:
@ -187,9 +166,6 @@ spec:
parameters:
- name: create-volume-3-name
name: step3-output
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
volumes:
- name: create-volume-3
persistentVolumeClaim:

View File

@ -13,9 +13,6 @@ spec:
serviceAccountName: pipeline-runner
templates:
- name: create-volume
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
outputs:
parameters:
- name: create-volume-manifest
@ -47,9 +44,6 @@ spec:
- name: create-volume-name
- name: url
name: step1-ingest
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
volumes:
- name: create-volume
persistentVolumeClaim:
@ -58,9 +52,6 @@ spec:
parameters:
- name: create-volume-name
name: step1-snap
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
outputs:
parameters:
- name: step1-snap-manifest
@ -92,9 +83,6 @@ spec:
parameters:
- name: create-volume-name
name: step2-gunzip
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
volumes:
- name: create-volume
persistentVolumeClaim:
@ -103,9 +91,6 @@ spec:
parameters:
- name: create-volume-name
name: step2-snap
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
outputs:
parameters:
- name: step2-snap-manifest
@ -137,9 +122,6 @@ spec:
parameters:
- name: create-volume-name
name: step3-copy
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
volumes:
- name: create-volume
persistentVolumeClaim:
@ -148,9 +130,6 @@ spec:
parameters:
- name: create-volume-name
name: step3-snap
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
outputs:
parameters:
- name: step3-snap-manifest
@ -181,9 +160,6 @@ spec:
parameters:
- name: create-volume-name
name: step4-output
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
volumes:
- name: create-volume
persistentVolumeClaim:

View File

@ -26,9 +26,6 @@ spec:
parameters:
- name: create-pvc-name
name: cop
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
volumes:
- name: create-pvc
persistentVolumeClaim:
@ -37,9 +34,6 @@ spec:
parameters:
- name: size
name: create-pvc
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
outputs:
parameters:
- name: create-pvc-manifest

View File

@ -12,9 +12,6 @@ spec:
serviceAccountName: pipeline-runner
templates:
- name: create-pvc
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
outputs:
parameters:
- name: create-pvc-manifest
@ -45,9 +42,6 @@ spec:
parameters:
- name: create-pvc-name
name: step1
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
volumes:
- name: create-pvc
persistentVolumeClaim:
@ -66,9 +60,6 @@ spec:
parameters:
- name: create-pvc-name
name: step2
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
volumes:
- name: create-pvc
persistentVolumeClaim:
@ -87,9 +78,6 @@ spec:
parameters:
- name: create-pvc-name
name: step3
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
volumes:
- name: create-pvc
persistentVolumeClaim:

View File

@ -12,9 +12,6 @@ spec:
serviceAccountName: pipeline-runner
templates:
- name: create-pvc
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
outputs:
parameters:
- name: create-pvc-manifest
@ -45,9 +42,6 @@ spec:
parameters:
- name: create-pvc-name
name: step1
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
volumes:
- name: create-pvc
persistentVolumeClaim:
@ -66,9 +60,6 @@ spec:
parameters:
- name: create-pvc-name
name: step2
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
volumes:
- name: create-pvc
persistentVolumeClaim:
@ -87,9 +78,6 @@ spec:
parameters:
- name: create-pvc-name
name: step3
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
volumes:
- name: create-pvc
persistentVolumeClaim:

View File

@ -12,9 +12,6 @@ spec:
serviceAccountName: pipeline-runner
templates:
- name: mypvc
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
outputs:
parameters:
- name: mypvc-manifest
@ -45,9 +42,6 @@ spec:
parameters:
- name: mypvc-name
name: step1
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
volumes:
- name: mypvc
persistentVolumeClaim:
@ -66,9 +60,6 @@ spec:
parameters:
- name: mypvc-name
name: step2
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
volumes:
- name: mypvc
persistentVolumeClaim:
@ -86,9 +77,6 @@ spec:
parameters:
- name: mypvc-name
name: step3
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
volumes:
- name: mypvc
persistentVolumeClaim:

View File

@ -47,9 +47,6 @@ spec:
- name: loop-item-param-00000001-subvar-a
- name: my_pipe_param
name: my-in-coop1
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
- container:
args:
- echo op2 {{inputs.parameters.loop-item-param-00000001-subvar-b}}
@ -61,9 +58,6 @@ spec:
parameters:
- name: loop-item-param-00000001-subvar-b
name: my-in-coop2
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
- container:
args:
- echo {{inputs.parameters.my_pipe_param}}
@ -75,9 +69,6 @@ spec:
parameters:
- name: my_pipe_param
name: my-out-cop
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
- dag:
tasks:
- arguments:

View File

@ -79,9 +79,6 @@ spec:
- name: loop-item-param-00000001-subvar-a
- name: my_pipe_param
name: my-in-coop1
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
- container:
args:
- echo op2 {{inputs.parameters.loop-item-param-00000001-subvar-b}}
@ -93,9 +90,6 @@ spec:
parameters:
- name: loop-item-param-00000001-subvar-b
name: my-in-coop2
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
- container:
args:
- echo op1 {{inputs.parameters.loop-item-param-00000001-subvar-a}} {{inputs.parameters.loop-item-param-00000002}}
@ -110,9 +104,6 @@ spec:
- name: loop-item-param-00000002
- name: my_pipe_param
name: my-inner-inner-coop
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
- container:
args:
- echo {{inputs.parameters.my_pipe_param}}
@ -124,9 +115,6 @@ spec:
parameters:
- name: my_pipe_param
name: my-out-cop
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
- dag:
tasks:
- arguments:

View File

@ -36,9 +36,6 @@ spec:
parameters:
- name: loopidy_doop-loop-item
name: my-in-cop1
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
- container:
args:
- python -c "import json; import sys; json.dump([i for i in range(20, 31)],
@ -48,9 +45,6 @@ spec:
- -c
image: python:alpine3.6
name: my-out-cop0
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
outputs:
artifacts:
- name: my-out-cop0-out
@ -70,9 +64,6 @@ spec:
parameters:
- name: my-out-cop0-out
name: my-out-cop2
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
- dag:
tasks:
- arguments:

View File

@ -36,9 +36,6 @@ spec:
parameters:
- name: loopidy_doop-loop-item-subvar-a
name: my-in-cop1
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
- container:
args:
- python -c "import json; import sys; json.dump([i for i in range(20, 31)],
@ -48,9 +45,6 @@ spec:
- -c
image: python:alpine3.6
name: my-out-cop0
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
outputs:
artifacts:
- name: my-out-cop0-out
@ -70,9 +64,6 @@ spec:
parameters:
- name: my-out-cop0-out
name: my-out-cop2
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
- dag:
tasks:
- arguments:

View File

@ -33,9 +33,6 @@ spec:
parameters:
- name: my-out-cop0-out-loop-item
name: my-in-cop1
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
- container:
args:
- python -c "import json; import sys; json.dump([i for i in range(20, 31)],
@ -45,9 +42,6 @@ spec:
- -c
image: python:alpine3.6
name: my-out-cop0
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
outputs:
artifacts:
- name: my-out-cop0-out
@ -67,9 +61,6 @@ spec:
parameters:
- name: my-out-cop0-out
name: my-out-cop2
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
- dag:
tasks:
- arguments:

View File

@ -33,9 +33,6 @@ spec:
parameters:
- name: my-out-cop0-out-loop-item-subvar-a
name: my-in-cop1
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
- container:
args:
- 'python -c "import json; import sys; json.dump([{''a'': 1, ''b'': 2}, {''a'':
@ -45,9 +42,6 @@ spec:
- -c
image: python:alpine3.6
name: my-out-cop0
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
outputs:
artifacts:
- name: my-out-cop0-out
@ -67,9 +61,6 @@ spec:
parameters:
- name: my-out-cop0-out
name: my-out-cop2
metadata:
labels:
pipelines.kubeflow.org/pipeline-sdk-type: kfp
- dag:
tasks:
- arguments: