chore(sdk): undo creation of kfp-dsl namespace package (#9874)
* revert to c8204d0
* reapply #9742
* modify #9791
* reapply #9800
* reapply #9827
* revert parts of #9738
* reapply parts of #9785
* remove duplicated dsl-test code
* reapply parts of #9791
* correct version
This commit is contained in:
parent
e88e7a6638
commit
c9e54798cd
|
|
@ -21,7 +21,7 @@
|
|||
# ./build.sh [output_file]
|
||||
|
||||
|
||||
target_archive_file=$1
|
||||
target_archive_file=${1:-kfp.tar.gz}
|
||||
|
||||
pushd "$(dirname "$0")"
|
||||
dist_dir=$(mktemp -d)
|
||||
|
|
|
|||
|
|
@ -1299,8 +1299,7 @@ def ignore_kfp_version_helper(spec: Dict[str, Any]) -> Dict[str, Any]:
|
|||
pipeline_spec['deploymentSpec']['executors'][
|
||||
executor] = yaml.safe_load(
|
||||
re.sub(
|
||||
r"'(kfp(-dsl)?)==(\d+).(\d+).(\d+)(-[a-z]+.\d+)?'",
|
||||
'kfp',
|
||||
r"'kfp==(\d+).(\d+).(\d+)(-[a-z]+.\d+)?'", 'kfp',
|
||||
yaml.dump(
|
||||
pipeline_spec['deploymentSpec']['executors']
|
||||
[executor],
|
||||
|
|
|
|||
|
|
@ -1748,7 +1748,6 @@ def _validate_dag_output_types(
|
|||
output_spec,
|
||||
error_message_prefix,
|
||||
checks_input=False,
|
||||
raise_on_error=kfp.TYPE_CHECK,
|
||||
)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -74,8 +74,7 @@ def ignore_kfp_version_helper(spec: Dict[str, Any]) -> Dict[str, Any]:
|
|||
pipeline_spec['deploymentSpec']['executors'][
|
||||
executor] = yaml.safe_load(
|
||||
re.sub(
|
||||
r"'(kfp(-dsl)?)==(\d+).(\d+).(\d+)(-[a-z]+.\d+)?'",
|
||||
'kfp',
|
||||
r"'kfp==(\d+).(\d+).(\d+)(-[a-z]+.\d+)?'", 'kfp',
|
||||
yaml.dump(
|
||||
pipeline_spec['deploymentSpec']['executors']
|
||||
[executor],
|
||||
|
|
|
|||
|
|
@ -30,7 +30,6 @@ from kfp.components.load_yaml_utilities import load_component_from_url
|
|||
# keep this for backward compatibility with user code "from kfp.components import placholders" and similar
|
||||
from kfp.dsl import base_component # noqa: keep unused import
|
||||
from kfp.dsl import placeholders # noqa: keep unused import
|
||||
#
|
||||
from kfp.dsl.base_component import BaseComponent
|
||||
from kfp.dsl.container_component_class import ContainerComponent
|
||||
from kfp.dsl.python_component import PythonComponent
|
||||
|
|
|
|||
|
|
@ -13,15 +13,11 @@
|
|||
# limitations under the License.
|
||||
"""Functions for loading components from compiled YAML."""
|
||||
|
||||
import hashlib
|
||||
from typing import Optional, Tuple, Union
|
||||
import warnings
|
||||
from typing import Optional, Tuple
|
||||
|
||||
from kfp.dsl import structures
|
||||
from kfp.dsl import v1_structures
|
||||
from kfp.dsl import yaml_component
|
||||
import requests
|
||||
import yaml
|
||||
|
||||
|
||||
def load_component_from_text(text: str) -> yaml_component.YamlComponent:
|
||||
|
|
@ -34,7 +30,7 @@ def load_component_from_text(text: str) -> yaml_component.YamlComponent:
|
|||
Component loaded from YAML.
|
||||
"""
|
||||
return yaml_component.YamlComponent(
|
||||
component_spec=_load_component_spec_from_yaml_documents(text),
|
||||
component_spec=structures.ComponentSpec.from_yaml_documents(text),
|
||||
component_yaml=text)
|
||||
|
||||
|
||||
|
|
@ -90,97 +86,3 @@ def load_component_from_url(
|
|||
resp.raise_for_status()
|
||||
|
||||
return load_component_from_text(resp.content.decode('utf-8'))
|
||||
|
||||
|
||||
def _load_documents_from_yaml(component_yaml: str) -> Tuple[dict, dict]:
|
||||
"""Loads up to two YAML documents from a YAML string.
|
||||
|
||||
First document must always be present. If second document is
|
||||
present, it is returned as a dict, else an empty dict.
|
||||
"""
|
||||
documents = list(yaml.safe_load_all(component_yaml))
|
||||
num_docs = len(documents)
|
||||
if num_docs == 1:
|
||||
pipeline_spec_dict = documents[0]
|
||||
platform_spec_dict = {}
|
||||
elif num_docs == 2:
|
||||
pipeline_spec_dict = documents[0]
|
||||
platform_spec_dict = documents[1]
|
||||
else:
|
||||
raise ValueError(
|
||||
f'Expected one or two YAML documents in the IR YAML file. Got: {num_docs}.'
|
||||
)
|
||||
return pipeline_spec_dict, platform_spec_dict
|
||||
|
||||
|
||||
def _load_component_spec_from_yaml_documents(
|
||||
component_yaml: str) -> structures.ComponentSpec:
|
||||
"""Loads V1 or V2 component YAML into a ComponentSpec.
|
||||
|
||||
Args:
|
||||
component_yaml: PipelineSpec and optionally PlatformSpec YAML documents as a single string.
|
||||
|
||||
Returns:
|
||||
ComponentSpec: The ComponentSpec object.
|
||||
"""
|
||||
|
||||
def extract_description(component_yaml: str) -> Union[str, None]:
|
||||
heading = '# Description: '
|
||||
multi_line_description_prefix = '# '
|
||||
index_of_heading = 2
|
||||
if heading in component_yaml:
|
||||
description = component_yaml.splitlines()[index_of_heading]
|
||||
|
||||
# Multi line
|
||||
comments = component_yaml.splitlines()
|
||||
index = index_of_heading + 1
|
||||
while comments[index][:len(multi_line_description_prefix
|
||||
)] == multi_line_description_prefix:
|
||||
description += '\n' + comments[index][
|
||||
len(multi_line_description_prefix) + 1:]
|
||||
index += 1
|
||||
|
||||
return description[len(heading):]
|
||||
else:
|
||||
return None
|
||||
|
||||
pipeline_spec_dict, platform_spec_dict = _load_documents_from_yaml(
|
||||
component_yaml)
|
||||
|
||||
is_v1 = 'implementation' in set(pipeline_spec_dict.keys())
|
||||
if is_v1:
|
||||
v1_component = load_v1_component_spec_from_component_text(
|
||||
component_yaml)
|
||||
return structures.ComponentSpec.from_v1_component_spec(v1_component)
|
||||
else:
|
||||
component_spec = structures.ComponentSpec.from_ir_dicts(
|
||||
pipeline_spec_dict, platform_spec_dict)
|
||||
if not component_spec.description:
|
||||
component_spec.description = extract_description(
|
||||
component_yaml=component_yaml)
|
||||
return component_spec
|
||||
|
||||
|
||||
def load_v1_component_spec_from_component_text(
|
||||
text) -> v1_structures.ComponentSpec:
|
||||
component_dict = yaml.safe_load(text)
|
||||
component_spec = v1_structures.ComponentSpec.from_dict(component_dict)
|
||||
|
||||
if isinstance(component_spec.implementation,
|
||||
v1_structures.ContainerImplementation) and (
|
||||
component_spec.implementation.container.command is None):
|
||||
warnings.warn(
|
||||
'Container component must specify command to be compatible with KFP '
|
||||
'v2 compatible mode and emissary executor, which will be the default'
|
||||
' executor for KFP v2.'
|
||||
'https://www.kubeflow.org/docs/components/pipelines/installation/choose-executor/',
|
||||
category=FutureWarning,
|
||||
)
|
||||
|
||||
# Calculating hash digest for the component
|
||||
data = text if isinstance(text, bytes) else text.encode('utf-8')
|
||||
data = data.replace(b'\r\n', b'\n') # Normalizing line endings
|
||||
digest = hashlib.sha256(data).hexdigest()
|
||||
component_spec._digest = digest
|
||||
|
||||
return component_spec
|
||||
|
|
|
|||
|
|
@ -19,7 +19,6 @@ import textwrap
|
|||
import unittest
|
||||
|
||||
from kfp import components
|
||||
from kfp.components import load_yaml_utilities
|
||||
from kfp.dsl import structures
|
||||
|
||||
SAMPLE_YAML = textwrap.dedent("""\
|
||||
|
|
@ -125,47 +124,5 @@ class LoadYamlTests(unittest.TestCase):
|
|||
'python:3.7')
|
||||
|
||||
|
||||
class TestLoadDocumentsFromYAML(unittest.TestCase):
|
||||
|
||||
def test_no_documents(self):
|
||||
with self.assertRaisesRegex(
|
||||
ValueError,
|
||||
r'Expected one or two YAML documents in the IR YAML file\. Got\: 0\.'
|
||||
):
|
||||
load_yaml_utilities._load_documents_from_yaml('')
|
||||
|
||||
def test_one_document(self):
|
||||
doc1, doc2 = load_yaml_utilities._load_documents_from_yaml(
|
||||
textwrap.dedent("""\
|
||||
key1: value1
|
||||
"""))
|
||||
self.assertEqual(doc1, {'key1': 'value1'})
|
||||
self.assertEqual(doc2, {})
|
||||
|
||||
def test_two_documents(self):
|
||||
doc1, doc2 = load_yaml_utilities._load_documents_from_yaml(
|
||||
textwrap.dedent("""\
|
||||
key1: value1
|
||||
---
|
||||
key2: value2
|
||||
"""))
|
||||
self.assertEqual(doc1, {'key1': 'value1'})
|
||||
self.assertEqual(doc2, {'key2': 'value2'})
|
||||
|
||||
def test_three_documents(self):
|
||||
with self.assertRaisesRegex(
|
||||
ValueError,
|
||||
r'Expected one or two YAML documents in the IR YAML file\. Got\: 3\.'
|
||||
):
|
||||
load_yaml_utilities._load_documents_from_yaml(
|
||||
textwrap.dedent("""\
|
||||
key3: value3
|
||||
---
|
||||
key3: value3
|
||||
---
|
||||
key3: value3
|
||||
"""))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
|
|
|||
|
|
@ -0,0 +1,249 @@
|
|||
"""The `kfp.dsl` module contains domain-specific language objects used to
|
||||
compose pipelines."""
|
||||
# Copyright 2020 The Kubeflow Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
__all__ = [
|
||||
'component',
|
||||
'container_component',
|
||||
'pipeline',
|
||||
'importer',
|
||||
'ContainerSpec',
|
||||
'Condition',
|
||||
'ExitHandler',
|
||||
'ParallelFor',
|
||||
'Collected',
|
||||
'Input',
|
||||
'Output',
|
||||
'InputPath',
|
||||
'OutputPath',
|
||||
'IfPresentPlaceholder',
|
||||
'ConcatPlaceholder',
|
||||
'PipelineTaskFinalStatus',
|
||||
'PIPELINE_JOB_NAME_PLACEHOLDER',
|
||||
'PIPELINE_JOB_RESOURCE_NAME_PLACEHOLDER',
|
||||
'PIPELINE_JOB_ID_PLACEHOLDER',
|
||||
'PIPELINE_TASK_NAME_PLACEHOLDER',
|
||||
'PIPELINE_TASK_ID_PLACEHOLDER',
|
||||
'PIPELINE_ROOT_PLACEHOLDER',
|
||||
'PIPELINE_JOB_CREATE_TIME_UTC_PLACEHOLDER',
|
||||
'PIPELINE_JOB_SCHEDULE_TIME_UTC_PLACEHOLDER',
|
||||
'Artifact',
|
||||
'ClassificationMetrics',
|
||||
'Dataset',
|
||||
'HTML',
|
||||
'Markdown',
|
||||
'Metrics',
|
||||
'Model',
|
||||
'SlicedClassificationMetrics',
|
||||
'PipelineTask',
|
||||
]
|
||||
|
||||
try:
|
||||
from typing import Annotated
|
||||
except ImportError:
|
||||
from typing_extensions import Annotated
|
||||
|
||||
from typing import TypeVar
|
||||
|
||||
from kfp.dsl.component_decorator import component
|
||||
from kfp.dsl.container_component_decorator import container_component
|
||||
from kfp.dsl.for_loop import Collected
|
||||
from kfp.dsl.importer_node import importer
|
||||
from kfp.dsl.pipeline_context import pipeline
|
||||
from kfp.dsl.pipeline_task import PipelineTask
|
||||
from kfp.dsl.placeholders import ConcatPlaceholder
|
||||
from kfp.dsl.placeholders import IfPresentPlaceholder
|
||||
from kfp.dsl.structures import ContainerSpec
|
||||
from kfp.dsl.task_final_status import PipelineTaskFinalStatus
|
||||
from kfp.dsl.tasks_group import Condition
|
||||
from kfp.dsl.tasks_group import ExitHandler
|
||||
from kfp.dsl.tasks_group import ParallelFor
|
||||
from kfp.dsl.types.artifact_types import Artifact
|
||||
from kfp.dsl.types.artifact_types import ClassificationMetrics
|
||||
from kfp.dsl.types.artifact_types import Dataset
|
||||
from kfp.dsl.types.artifact_types import HTML
|
||||
from kfp.dsl.types.artifact_types import Markdown
|
||||
from kfp.dsl.types.artifact_types import Metrics
|
||||
from kfp.dsl.types.artifact_types import Model
|
||||
from kfp.dsl.types.artifact_types import SlicedClassificationMetrics
|
||||
from kfp.dsl.types.type_annotations import InputAnnotation
|
||||
from kfp.dsl.types.type_annotations import InputPath
|
||||
from kfp.dsl.types.type_annotations import OutputAnnotation
|
||||
from kfp.dsl.types.type_annotations import OutputPath
|
||||
|
||||
# hack: constants and custom type generics have to be defined here to be captured by autodoc and autodocsumm used in ./docs/conf.py
|
||||
|
||||
PIPELINE_JOB_NAME_PLACEHOLDER = '{{$.pipeline_job_name}}'
|
||||
"""A placeholder used to obtain a pipeline job name within a task at pipeline runtime.
|
||||
|
||||
Example:
|
||||
::
|
||||
|
||||
@dsl.pipeline
|
||||
def my_pipeline():
|
||||
print_op(
|
||||
msg='Job name:',
|
||||
value=dsl.PIPELINE_JOB_NAME_PLACEHOLDER,
|
||||
)
|
||||
"""
|
||||
|
||||
PIPELINE_JOB_RESOURCE_NAME_PLACEHOLDER = '{{$.pipeline_job_resource_name}}'
|
||||
"""A placeholder used to obtain a pipeline job resource name within a task at pipeline runtime.
|
||||
|
||||
Example:
|
||||
::
|
||||
|
||||
@dsl.pipeline
|
||||
def my_pipeline():
|
||||
print_op(
|
||||
msg='Job resource name:',
|
||||
value=dsl.PIPELINE_JOB_RESOURCE_NAME_PLACEHOLDER,
|
||||
)
|
||||
"""
|
||||
|
||||
PIPELINE_JOB_ID_PLACEHOLDER = '{{$.pipeline_job_uuid}}'
|
||||
"""A placeholder used to obtain a pipeline job ID within a task at pipeline runtime.
|
||||
|
||||
Example:
|
||||
::
|
||||
|
||||
@dsl.pipeline
|
||||
def my_pipeline():
|
||||
print_op(
|
||||
msg='Job ID:',
|
||||
value=dsl.PIPELINE_JOB_ID_PLACEHOLDER,
|
||||
)
|
||||
"""
|
||||
|
||||
PIPELINE_TASK_NAME_PLACEHOLDER = '{{$.pipeline_task_name}}'
|
||||
"""A placeholder used to obtain a task name within a task at pipeline runtime.
|
||||
|
||||
Example:
|
||||
::
|
||||
|
||||
@dsl.pipeline
|
||||
def my_pipeline():
|
||||
print_op(
|
||||
msg='Task name:',
|
||||
value=dsl.PIPELINE_TASK_NAME_PLACEHOLDER,
|
||||
)
|
||||
"""
|
||||
|
||||
PIPELINE_TASK_ID_PLACEHOLDER = '{{$.pipeline_task_uuid}}'
|
||||
"""A placeholder used to obtain a task ID within a task at pipeline runtime.
|
||||
|
||||
Example:
|
||||
::
|
||||
|
||||
@dsl.pipeline
|
||||
def my_pipeline():
|
||||
print_op(
|
||||
msg='Task ID:',
|
||||
value=dsl.PIPELINE_TASK_ID_PLACEHOLDER,
|
||||
)
|
||||
"""
|
||||
|
||||
PIPELINE_ROOT_PLACEHOLDER = '{{$.pipeline_root}}'
|
||||
"""A placeholder used to obtain the pipeline root.
|
||||
|
||||
Example:
|
||||
::
|
||||
|
||||
@dsl.pipeline
|
||||
def my_pipeline():
|
||||
store_model(
|
||||
tmp_dir=dsl.PIPELINE_ROOT_PLACEHOLDER+'/tmp',
|
||||
)
|
||||
"""
|
||||
|
||||
PIPELINE_JOB_CREATE_TIME_UTC_PLACEHOLDER = '{{$.pipeline_job_create_time_utc}}'
|
||||
"""A placeholder used to obtain the time that a pipeline job was created.
|
||||
|
||||
Example:
|
||||
::
|
||||
|
||||
@dsl.pipeline
|
||||
def my_pipeline():
|
||||
print_op(
|
||||
msg='Job created at:',
|
||||
value=dsl.PIPELINE_JOB_CREATE_TIME_UTC,
|
||||
)
|
||||
"""
|
||||
PIPELINE_JOB_SCHEDULE_TIME_UTC_PLACEHOLDER = '{{$.pipeline_job_schedule_time_utc}}'
|
||||
"""A placeholder used to obtain the time for which a pipeline job is scheduled.
|
||||
|
||||
Example:
|
||||
::
|
||||
|
||||
@dsl.pipeline
|
||||
def my_pipeline():
|
||||
print_op(
|
||||
msg='Job scheduled at:',
|
||||
value=dsl.PIPELINE_JOB_SCHEDULE_TIME_UTC,
|
||||
)
|
||||
"""
|
||||
|
||||
T = TypeVar('T')
|
||||
Input = Annotated[T, InputAnnotation]
|
||||
"""Type generic used to represent an input artifact of type ``T``, where ``T`` is an artifact class.
|
||||
|
||||
Use ``Input[Artifact]`` or ``Output[Artifact]`` to indicate whether the enclosed artifact is a component input or output.
|
||||
|
||||
Args:
|
||||
T: The type of the input artifact.
|
||||
|
||||
Example:
|
||||
::
|
||||
|
||||
@dsl.component
|
||||
def artifact_producer(model: Output[Artifact]):
|
||||
with open(model.path, 'w') as f:
|
||||
f.write('my model')
|
||||
|
||||
@dsl.component
|
||||
def artifact_consumer(model: Input[Artifact]):
|
||||
print(model)
|
||||
|
||||
@dsl.pipeline
|
||||
def my_pipeline():
|
||||
producer_task = artifact_producer()
|
||||
artifact_consumer(model=producer_task.output)
|
||||
"""
|
||||
|
||||
Output = Annotated[T, OutputAnnotation]
|
||||
"""A type generic used to represent an output artifact of type ``T``, where ``T`` is an artifact class. The argument typed with this annotation is provided at runtime by the executing backend and does not need to be passed as an input by the pipeline author (see example).
|
||||
|
||||
Use ``Input[Artifact]`` or ``Output[Artifact]`` to indicate whether the enclosed artifact is a component input or output.
|
||||
|
||||
Args:
|
||||
T: The type of the output artifact.
|
||||
|
||||
Example:
|
||||
::
|
||||
|
||||
@dsl.component
|
||||
def artifact_producer(model: Output[Artifact]):
|
||||
with open(model.path, 'w') as f:
|
||||
f.write('my model')
|
||||
|
||||
@dsl.component
|
||||
def artifact_consumer(model: Input[Artifact]):
|
||||
print(model)
|
||||
|
||||
@dsl.pipeline
|
||||
def my_pipeline():
|
||||
producer_task = artifact_producer()
|
||||
artifact_consumer(model=producer_task.output)
|
||||
"""
|
||||
|
|
@ -0,0 +1,149 @@
|
|||
# Copyright 2021 The Kubeflow Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Base class for KFP components."""
|
||||
|
||||
import abc
|
||||
from typing import List
|
||||
|
||||
from kfp.dsl import pipeline_task
|
||||
from kfp.dsl import structures
|
||||
from kfp.dsl.types import type_utils
|
||||
from kfp.pipeline_spec import pipeline_spec_pb2
|
||||
|
||||
|
||||
class BaseComponent(abc.ABC):
|
||||
"""Base class for a component.
|
||||
|
||||
**Note:** ``BaseComponent`` is not intended to be used to construct components directly. Use ``@kfp.dsl.component`` or ``kfp.components.load_component_from_*()`` instead.
|
||||
|
||||
Attributes:
|
||||
name: Name of the component.
|
||||
component_spec: Component definition.
|
||||
"""
|
||||
|
||||
def __init__(self, component_spec: structures.ComponentSpec):
|
||||
"""Init function for BaseComponent.
|
||||
|
||||
Args:
|
||||
component_spec: The component definition.
|
||||
"""
|
||||
self.component_spec = component_spec
|
||||
self.name = component_spec.name
|
||||
self.description = component_spec.description or None
|
||||
|
||||
# Arguments typed as PipelineTaskFinalStatus are special arguments that
|
||||
# do not count as user inputs. Instead, they are reserved to for the
|
||||
# (backend) system to pass a value.
|
||||
self._component_inputs = {
|
||||
input_name for input_name, input_spec in (
|
||||
self.component_spec.inputs or {}).items()
|
||||
if not type_utils.is_task_final_status_type(input_spec.type)
|
||||
}
|
||||
|
||||
def _prevent_using_output_lists_of_artifacts(self):
|
||||
"""This method should be called at the end of __init__ for
|
||||
PythonComponent and ContainerComponent subclasses to temporarily block
|
||||
outputting lists of artifacts from a component."""
|
||||
# TODO: remove when output lists of artifacts from primitive components is supported
|
||||
for output_name, output_spec in (self.component_spec.outputs or
|
||||
{}).items():
|
||||
if output_spec.is_artifact_list:
|
||||
raise ValueError(
|
||||
f'Output lists of artifacts are only supported for pipelines. Got output list of artifacts for output parameter {output_name!r} of component {self.name!r}.'
|
||||
)
|
||||
|
||||
def __call__(self, *args, **kwargs) -> pipeline_task.PipelineTask:
|
||||
"""Creates a PipelineTask object.
|
||||
|
||||
The arguments are generated on the fly based on component input
|
||||
definitions.
|
||||
"""
|
||||
task_inputs = {}
|
||||
|
||||
if args:
|
||||
raise TypeError(
|
||||
'Components must be instantiated using keyword arguments. Positional '
|
||||
f'parameters are not allowed (found {len(args)} such parameters for '
|
||||
f'component "{self.name}").')
|
||||
|
||||
for k, v in kwargs.items():
|
||||
if k not in self._component_inputs:
|
||||
raise TypeError(
|
||||
f'{self.name}() got an unexpected keyword argument "{k}".')
|
||||
task_inputs[k] = v
|
||||
|
||||
# Skip optional inputs and arguments typed as PipelineTaskFinalStatus.
|
||||
missing_arguments = [
|
||||
arg for arg in self.required_inputs if arg not in kwargs
|
||||
]
|
||||
if missing_arguments:
|
||||
argument_or_arguments = 'argument' if len(
|
||||
missing_arguments) == 1 else 'arguments'
|
||||
arguments = ', '.join(
|
||||
arg_name.replace('-', '_') for arg_name in missing_arguments)
|
||||
|
||||
raise TypeError(
|
||||
f'{self.name}() missing {len(missing_arguments)} required '
|
||||
f'{argument_or_arguments}: {arguments}.')
|
||||
|
||||
return pipeline_task.PipelineTask(
|
||||
component_spec=self.component_spec,
|
||||
args=task_inputs,
|
||||
)
|
||||
|
||||
@property
|
||||
def pipeline_spec(self) -> pipeline_spec_pb2.PipelineSpec:
|
||||
"""Returns the pipeline spec of the component."""
|
||||
with BlockPipelineTaskRegistration():
|
||||
return self.component_spec.to_pipeline_spec()
|
||||
|
||||
@property
|
||||
def platform_spec(self) -> pipeline_spec_pb2.PlatformSpec:
|
||||
"""Returns the PlatformSpec of the component.
|
||||
|
||||
Useful when the component is a GraphComponent, else will be
|
||||
empty per component_spec.platform_spec default.
|
||||
"""
|
||||
return self.component_spec.platform_spec
|
||||
|
||||
@abc.abstractmethod
|
||||
def execute(self, **kwargs):
|
||||
"""Executes the component locally if implemented by the inheriting
|
||||
subclass."""
|
||||
|
||||
@property
|
||||
def required_inputs(self) -> List[str]:
|
||||
return [
|
||||
input_name for input_name, input_spec in (
|
||||
self.component_spec.inputs or {}).items()
|
||||
if not input_spec.optional
|
||||
]
|
||||
|
||||
|
||||
class BlockPipelineTaskRegistration:
|
||||
"""Temporarily stop registering tasks to the default pipeline.
|
||||
|
||||
Handles special, uncommon functions that decorate and mutate a
|
||||
component, possibly by using the component's .pipeline_spec
|
||||
attribute. This is exhibited in the version of
|
||||
google_cloud_pipeline_components compatible with KFP SDK v2.
|
||||
"""
|
||||
|
||||
# TODO: this handles the special case of a compiled component (when compiled inside a pipeline), which should not have any concept of a default pipeline. Perhaps there is a way to unify component/pipeline compilation concepts to remove this workaround?
|
||||
|
||||
def __enter__(self):
|
||||
self.task_handler, pipeline_task.PipelineTask._register_task_handler = pipeline_task.PipelineTask._register_task_handler, pipeline_task._register_task_handler
|
||||
|
||||
def __exit__(self, *args):
|
||||
pipeline_task.PipelineTask._register_task_handler = self.task_handler
|
||||
|
|
@ -0,0 +1,127 @@
|
|||
# Copyright 2021-2022 The Kubeflow Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import functools
|
||||
from typing import Callable, List, Optional
|
||||
import warnings
|
||||
|
||||
from kfp.dsl import component_factory
|
||||
|
||||
|
||||
def component(func: Optional[Callable] = None,
|
||||
*,
|
||||
base_image: Optional[str] = None,
|
||||
target_image: Optional[str] = None,
|
||||
packages_to_install: List[str] = None,
|
||||
pip_index_urls: Optional[List[str]] = None,
|
||||
output_component_file: Optional[str] = None,
|
||||
install_kfp_package: bool = True,
|
||||
kfp_package_path: Optional[str] = None):
|
||||
"""Decorator for Python-function based components.
|
||||
|
||||
A KFP component can either be a lightweight component or a containerized
|
||||
component.
|
||||
|
||||
If ``target_image`` is not specified, this function creates a lightweight
|
||||
component. A lightweight component is a self-contained Python function that
|
||||
includes all necessary imports and dependencies. In lightweight components,
|
||||
``packages_to_install`` will be used to install dependencies at runtime. The
|
||||
parameters ``install_kfp_package`` and ``kfp_package_path`` can be used to control
|
||||
how and from where KFP should be installed when the lightweight component is executed.
|
||||
|
||||
If ``target_image`` is specified, this function creates a component definition
|
||||
based around the ``target_image``. The assumption is that the function in ``func``
|
||||
will be packaged by KFP into this ``target_image``. You can use the KFP CLI's ``build``
|
||||
command to package the function into ``target_image``.
|
||||
|
||||
Args:
|
||||
func: Python function from which to create a component. The function
|
||||
should have type annotations for all its arguments, indicating how
|
||||
each argument is intended to be used (e.g. as an input/output artifact,
|
||||
a plain parameter, or a path to a file).
|
||||
base_image: Image to use when executing the Python function. It should
|
||||
contain a default Python interpreter that is compatible with KFP.
|
||||
target_image: Image to when creating containerized components.
|
||||
packages_to_install: List of packages to install before
|
||||
executing the Python function. These will always be installed at component runtime.
|
||||
pip_index_urls: Python Package Index base URLs from which to
|
||||
install ``packages_to_install``. Defaults to installing from only PyPI
|
||||
(``'https://pypi.org/simple'``). For more information, see `pip install docs <https://pip.pypa.io/en/stable/cli/pip_install/#cmdoption-0>`_.
|
||||
output_component_file: If specified, this function will write a
|
||||
shareable/loadable version of the component spec into this file.
|
||||
|
||||
**Warning:** This compilation approach is deprecated.
|
||||
install_kfp_package: Specifies if the KFP SDK should add the ``kfp`` Python package to
|
||||
``packages_to_install``. Lightweight Python functions always require
|
||||
an installation of KFP in ``base_image`` to work. If you specify
|
||||
a ``base_image`` that already contains KFP, you can set this to ``False``.
|
||||
This flag is ignored when ``target_image`` is specified, which implies
|
||||
a choice to build a containerized component. Containerized components
|
||||
will always install KFP as part of the build process.
|
||||
kfp_package_path: Specifies the location from which to install KFP. By
|
||||
default, this will try to install from PyPI using the same version
|
||||
as that used when this component was created. Component authors can
|
||||
choose to override this to point to a GitHub pull request or
|
||||
other pip-compatible package server.
|
||||
|
||||
Returns:
|
||||
A component task factory that can be used in pipeline definitions.
|
||||
|
||||
Example:
|
||||
::
|
||||
|
||||
from kfp import dsl
|
||||
|
||||
@dsl.component
|
||||
def my_function_one(input: str, output: Output[Model]):
|
||||
...
|
||||
|
||||
@dsl.component(
|
||||
base_image='python:3.9',
|
||||
output_component_file='my_function.yaml'
|
||||
)
|
||||
def my_function_two(input: Input[Mode])):
|
||||
...
|
||||
|
||||
@dsl.pipeline(name='my-pipeline', pipeline_root='...')
|
||||
def pipeline():
|
||||
my_function_one_task = my_function_one(input=...)
|
||||
my_function_two_task = my_function_two(input=my_function_one_task.outputs)
|
||||
"""
|
||||
if output_component_file is not None:
|
||||
warnings.warn(
|
||||
'output_component_file parameter is deprecated and will eventually be removed. Please use `Compiler().compile()` to compile a component instead.',
|
||||
DeprecationWarning,
|
||||
stacklevel=2)
|
||||
|
||||
if func is None:
|
||||
return functools.partial(
|
||||
component,
|
||||
base_image=base_image,
|
||||
target_image=target_image,
|
||||
packages_to_install=packages_to_install,
|
||||
pip_index_urls=pip_index_urls,
|
||||
output_component_file=output_component_file,
|
||||
install_kfp_package=install_kfp_package,
|
||||
kfp_package_path=kfp_package_path)
|
||||
|
||||
return component_factory.create_component_from_func(
|
||||
func,
|
||||
base_image=base_image,
|
||||
target_image=target_image,
|
||||
packages_to_install=packages_to_install,
|
||||
pip_index_urls=pip_index_urls,
|
||||
output_component_file=output_component_file,
|
||||
install_kfp_package=install_kfp_package,
|
||||
kfp_package_path=kfp_package_path)
|
||||
|
|
@ -17,8 +17,8 @@ import tempfile
|
|||
from typing import Dict, List, NamedTuple
|
||||
import unittest
|
||||
|
||||
from kfp.components import load_yaml_utilities
|
||||
from kfp.dsl import python_component
|
||||
from kfp.dsl import structures
|
||||
from kfp.dsl.component_decorator import component
|
||||
|
||||
|
||||
|
|
@ -104,8 +104,7 @@ class TestComponentDecorator(unittest.TestCase):
|
|||
with open(filepath, 'r') as f:
|
||||
yaml_text = f.read()
|
||||
|
||||
component_spec = load_yaml_utilities._load_component_spec_from_yaml_documents(
|
||||
yaml_text)
|
||||
component_spec = structures.ComponentSpec.from_yaml_documents(yaml_text)
|
||||
self.assertEqual(component_spec.name, comp.component_spec.name)
|
||||
|
||||
def test_output_named_tuple_with_dict(self):
|
||||
|
|
@ -0,0 +1,639 @@
|
|||
# Copyright 2021-2022 The Kubeflow Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import dataclasses
|
||||
import inspect
|
||||
import itertools
|
||||
import pathlib
|
||||
import re
|
||||
import textwrap
|
||||
from typing import Callable, List, Mapping, Optional, Tuple, Type, Union
|
||||
import warnings
|
||||
|
||||
import docstring_parser
|
||||
from kfp.dsl import container_component_artifact_channel
|
||||
from kfp.dsl import container_component_class
|
||||
from kfp.dsl import graph_component
|
||||
from kfp.dsl import placeholders
|
||||
from kfp.dsl import python_component
|
||||
from kfp.dsl import structures
|
||||
from kfp.dsl import task_final_status
|
||||
from kfp.dsl.types import artifact_types
|
||||
from kfp.dsl.types import custom_artifact_types
|
||||
from kfp.dsl.types import type_annotations
|
||||
from kfp.dsl.types import type_utils
|
||||
|
||||
_DEFAULT_BASE_IMAGE = 'python:3.7'
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class ComponentInfo():
|
||||
"""A dataclass capturing registered components.
|
||||
|
||||
This will likely be subsumed/augmented with BaseComponent.
|
||||
"""
|
||||
name: str
|
||||
function_name: str
|
||||
func: Callable
|
||||
target_image: str
|
||||
module_path: pathlib.Path
|
||||
component_spec: structures.ComponentSpec
|
||||
output_component_file: Optional[str] = None
|
||||
base_image: str = _DEFAULT_BASE_IMAGE
|
||||
packages_to_install: Optional[List[str]] = None
|
||||
pip_index_urls: Optional[List[str]] = None
|
||||
|
||||
|
||||
# A map from function_name to components. This is always populated when a
|
||||
# module containing KFP components is loaded. Primarily used by KFP CLI
|
||||
# component builder to package components in a file into containers.
|
||||
REGISTERED_MODULES = None
|
||||
|
||||
|
||||
def _python_function_name_to_component_name(name):
|
||||
name_with_spaces = re.sub(' +', ' ', name.replace('_', ' ')).strip(' ')
|
||||
return name_with_spaces[0].upper() + name_with_spaces[1:]
|
||||
|
||||
|
||||
def make_index_url_options(pip_index_urls: Optional[List[str]]) -> str:
|
||||
"""Generates index url options for pip install command based on provided
|
||||
pip_index_urls.
|
||||
|
||||
Args:
|
||||
pip_index_urls: Optional list of pip index urls
|
||||
|
||||
Returns:
|
||||
- Empty string if pip_index_urls is empty/None.
|
||||
- '--index-url url --trusted-host url ' if pip_index_urls contains 1
|
||||
url
|
||||
- the above followed by '--extra-index-url url --trusted-host url '
|
||||
for
|
||||
each next url in pip_index_urls if pip_index_urls contains more than 1
|
||||
url
|
||||
|
||||
Note: In case pip_index_urls is not empty, the returned string will
|
||||
contain space at the end.
|
||||
"""
|
||||
if not pip_index_urls:
|
||||
return ''
|
||||
|
||||
index_url = pip_index_urls[0]
|
||||
extra_index_urls = pip_index_urls[1:]
|
||||
|
||||
options = [f'--index-url {index_url} --trusted-host {index_url}']
|
||||
options.extend(
|
||||
f'--extra-index-url {extra_index_url} --trusted-host {extra_index_url}'
|
||||
for extra_index_url in extra_index_urls)
|
||||
|
||||
return ' '.join(options) + ' '
|
||||
|
||||
|
||||
_install_python_packages_script_template = '''
|
||||
if ! [ -x "$(command -v pip)" ]; then
|
||||
python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip
|
||||
fi
|
||||
|
||||
PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet \
|
||||
--no-warn-script-location {index_url_options}{concat_package_list} && "$0" "$@"
|
||||
'''
|
||||
|
||||
|
||||
def _get_packages_to_install_command(
|
||||
package_list: Optional[List[str]] = None,
|
||||
pip_index_urls: Optional[List[str]] = None) -> List[str]:
|
||||
|
||||
if not package_list:
|
||||
return []
|
||||
|
||||
concat_package_list = ' '.join(
|
||||
[repr(str(package)) for package in package_list])
|
||||
index_url_options = make_index_url_options(pip_index_urls)
|
||||
install_python_packages_script = _install_python_packages_script_template.format(
|
||||
index_url_options=index_url_options,
|
||||
concat_package_list=concat_package_list)
|
||||
return ['sh', '-c', install_python_packages_script]
|
||||
|
||||
|
||||
def _get_default_kfp_package_path() -> str:
|
||||
import kfp
|
||||
return f'kfp=={kfp.__version__}'
|
||||
|
||||
|
||||
def _get_function_source_definition(func: Callable) -> str:
|
||||
func_code = inspect.getsource(func)
|
||||
|
||||
# Function might be defined in some indented scope (e.g. in another
|
||||
# function). We need to handle this and properly dedent the function source
|
||||
# code
|
||||
func_code = textwrap.dedent(func_code)
|
||||
func_code_lines = func_code.split('\n')
|
||||
|
||||
# Removing possible decorators (can be multiline) until the function
|
||||
# definition is found
|
||||
func_code_lines = itertools.dropwhile(lambda x: not x.startswith('def'),
|
||||
func_code_lines)
|
||||
|
||||
if not func_code_lines:
|
||||
raise ValueError(
|
||||
f'Failed to dedent and clean up the source of function "{func.__name__}". It is probably not properly indented.'
|
||||
)
|
||||
|
||||
return '\n'.join(func_code_lines)
|
||||
|
||||
|
||||
def _maybe_make_unique(name: str, names: List[str]):
|
||||
if name not in names:
|
||||
return name
|
||||
|
||||
for i in range(2, 100):
|
||||
unique_name = f'{name}_{i}'
|
||||
if unique_name not in names:
|
||||
return unique_name
|
||||
|
||||
raise RuntimeError(f'Too many arguments with the name {name}')
|
||||
|
||||
|
||||
def extract_component_interface(
|
||||
func: Callable,
|
||||
containerized: bool = False,
|
||||
description: Optional[str] = None,
|
||||
name: Optional[str] = None,
|
||||
) -> structures.ComponentSpec:
|
||||
single_output_name_const = 'Output'
|
||||
|
||||
signature = inspect.signature(func)
|
||||
parameters = list(signature.parameters.values())
|
||||
|
||||
original_docstring = inspect.getdoc(func)
|
||||
parsed_docstring = docstring_parser.parse(original_docstring)
|
||||
|
||||
inputs = {}
|
||||
outputs = {}
|
||||
|
||||
input_names = set()
|
||||
output_names = set()
|
||||
for parameter in parameters:
|
||||
parameter_type = type_annotations.maybe_strip_optional_from_annotation(
|
||||
parameter.annotation)
|
||||
passing_style = None
|
||||
io_name = parameter.name
|
||||
is_artifact_list = False
|
||||
|
||||
if type_annotations.is_Input_Output_artifact_annotation(parameter_type):
|
||||
# passing_style is either type_annotations.InputAnnotation or
|
||||
# type_annotations.OutputAnnotation.
|
||||
passing_style = type_annotations.get_io_artifact_annotation(
|
||||
parameter_type)
|
||||
|
||||
# parameter_type is a type like typing_extensions.Annotated[kfp.dsl.types.artifact_types.Artifact, <class 'kfp.dsl.types.type_annotations.OutputAnnotation'>] OR typing_extensions.Annotated[typing.List[kfp.dsl.types.artifact_types.Artifact], <class 'kfp.dsl.types.type_annotations.OutputAnnotation'>]
|
||||
|
||||
is_artifact_list = type_annotations.is_list_of_artifacts(
|
||||
parameter_type.__origin__)
|
||||
|
||||
parameter_type = type_annotations.get_io_artifact_class(
|
||||
parameter_type)
|
||||
if not type_annotations.is_artifact_class(parameter_type):
|
||||
raise ValueError(
|
||||
f'Input[T] and Output[T] are only supported when T is an artifact or list of artifacts. Found `{io_name} with type {parameter_type}`'
|
||||
)
|
||||
|
||||
if parameter.default is not inspect.Parameter.empty:
|
||||
if passing_style in [
|
||||
type_annotations.OutputAnnotation,
|
||||
type_annotations.OutputPath,
|
||||
]:
|
||||
raise ValueError(
|
||||
'Default values for Output artifacts are not supported.'
|
||||
)
|
||||
elif parameter.default is not None:
|
||||
raise ValueError(
|
||||
f'Optional Input artifacts may only have default value None. Got: {parameter.default}.'
|
||||
)
|
||||
|
||||
elif isinstance(
|
||||
parameter_type,
|
||||
(type_annotations.InputPath, type_annotations.OutputPath)):
|
||||
passing_style = type(parameter_type)
|
||||
parameter_type = parameter_type.type
|
||||
if parameter.default is not inspect.Parameter.empty and not (
|
||||
passing_style == type_annotations.InputPath and
|
||||
parameter.default is None):
|
||||
raise ValueError(
|
||||
'Path inputs only support default values of None. Default'
|
||||
' values for outputs are not supported.')
|
||||
|
||||
type_struct = type_utils._annotation_to_type_struct(parameter_type)
|
||||
if type_struct is None:
|
||||
raise TypeError(
|
||||
f'Missing type annotation for argument: {parameter.name}')
|
||||
|
||||
if passing_style in [
|
||||
type_annotations.OutputAnnotation, type_annotations.OutputPath
|
||||
]:
|
||||
if io_name == single_output_name_const:
|
||||
raise ValueError(
|
||||
f'"{single_output_name_const}" is an invalid parameter name.'
|
||||
)
|
||||
io_name = _maybe_make_unique(io_name, output_names)
|
||||
output_names.add(io_name)
|
||||
if type_annotations.is_artifact_class(parameter_type):
|
||||
schema_version = parameter_type.schema_version
|
||||
output_spec = structures.OutputSpec(
|
||||
type=type_utils.create_bundled_artifact_type(
|
||||
type_struct, schema_version),
|
||||
is_artifact_list=is_artifact_list)
|
||||
else:
|
||||
output_spec = structures.OutputSpec(type=type_struct)
|
||||
outputs[io_name] = output_spec
|
||||
else:
|
||||
io_name = _maybe_make_unique(io_name, input_names)
|
||||
input_names.add(io_name)
|
||||
type_ = type_utils.create_bundled_artifact_type(
|
||||
type_struct, parameter_type.schema_version
|
||||
) if type_annotations.is_artifact_class(
|
||||
parameter_type) else type_struct
|
||||
default = None if parameter.default == inspect.Parameter.empty or type_annotations.is_artifact_class(
|
||||
parameter_type) else parameter.default
|
||||
optional = parameter.default is not inspect.Parameter.empty or type_utils.is_task_final_status_type(
|
||||
type_struct)
|
||||
input_spec = structures.InputSpec(
|
||||
type=type_,
|
||||
default=default,
|
||||
optional=optional,
|
||||
is_artifact_list=is_artifact_list,
|
||||
)
|
||||
|
||||
inputs[io_name] = input_spec
|
||||
|
||||
#Analyzing the return type annotations.
|
||||
return_ann = signature.return_annotation
|
||||
if not containerized:
|
||||
if hasattr(return_ann, '_fields'): #NamedTuple
|
||||
# Getting field type annotations.
|
||||
# __annotations__ does not exist in python 3.5 and earlier
|
||||
# _field_types does not exist in python 3.9 and later
|
||||
field_annotations = getattr(return_ann, '__annotations__',
|
||||
None) or getattr(
|
||||
return_ann, '_field_types', None)
|
||||
for field_name in return_ann._fields:
|
||||
output_name = _maybe_make_unique(field_name, output_names)
|
||||
output_names.add(output_name)
|
||||
type_var = field_annotations.get(field_name)
|
||||
if type_annotations.is_list_of_artifacts(type_var):
|
||||
artifact_cls = type_var.__args__[0]
|
||||
output_spec = structures.OutputSpec(
|
||||
type=type_utils.create_bundled_artifact_type(
|
||||
artifact_cls.schema_title,
|
||||
artifact_cls.schema_version),
|
||||
is_artifact_list=True)
|
||||
elif type_annotations.is_artifact_class(type_var):
|
||||
output_spec = structures.OutputSpec(
|
||||
type=type_utils.create_bundled_artifact_type(
|
||||
type_var.schema_title, type_var.schema_version))
|
||||
else:
|
||||
type_struct = type_utils._annotation_to_type_struct(
|
||||
type_var)
|
||||
output_spec = structures.OutputSpec(type=type_struct)
|
||||
outputs[output_name] = output_spec
|
||||
# Deprecated dict-based way of declaring multiple outputs. Was only used by
|
||||
# the @component decorator
|
||||
elif isinstance(return_ann, dict):
|
||||
warnings.warn(
|
||||
'The ability to specify multiple outputs using the dict syntax'
|
||||
' has been deprecated. It will be removed soon after release'
|
||||
' 0.1.32. Please use typing.NamedTuple to declare multiple'
|
||||
' outputs.')
|
||||
for output_name, output_type_annotation in return_ann.items():
|
||||
output_type_struct = type_utils._annotation_to_type_struct(
|
||||
output_type_annotation)
|
||||
output_spec = structures.OutputSpec(type=output_type_struct)
|
||||
outputs[name] = output_spec
|
||||
elif signature.return_annotation is not None and signature.return_annotation != inspect.Parameter.empty:
|
||||
output_name = _maybe_make_unique(single_output_name_const,
|
||||
output_names)
|
||||
# Fixes exotic, but possible collision:
|
||||
# `def func(output_path: OutputPath()) -> str: ...`
|
||||
output_names.add(output_name)
|
||||
return_ann = signature.return_annotation
|
||||
if type_annotations.is_list_of_artifacts(return_ann):
|
||||
artifact_cls = return_ann.__args__[0]
|
||||
output_spec = structures.OutputSpec(
|
||||
type=type_utils.create_bundled_artifact_type(
|
||||
artifact_cls.schema_title, artifact_cls.schema_version),
|
||||
is_artifact_list=True)
|
||||
elif type_annotations.is_artifact_class(return_ann):
|
||||
output_spec = structures.OutputSpec(
|
||||
type=type_utils.create_bundled_artifact_type(
|
||||
return_ann.schema_title, return_ann.schema_version),
|
||||
is_artifact_list=False)
|
||||
else:
|
||||
type_struct = type_utils._annotation_to_type_struct(return_ann)
|
||||
output_spec = structures.OutputSpec(type=type_struct)
|
||||
|
||||
outputs[output_name] = output_spec
|
||||
elif return_ann != inspect.Parameter.empty and return_ann != structures.ContainerSpec:
|
||||
raise TypeError(
|
||||
'Return annotation should be either ContainerSpec or omitted for container components.'
|
||||
)
|
||||
|
||||
component_name = name or _python_function_name_to_component_name(
|
||||
func.__name__)
|
||||
|
||||
def assign_descriptions(
|
||||
inputs_or_outputs: Mapping[str, Union[structures.InputSpec,
|
||||
structures.OutputSpec]],
|
||||
docstring_params: List[docstring_parser.DocstringParam],
|
||||
) -> None:
|
||||
"""Assigns descriptions to InputSpec or OutputSpec for each component
|
||||
input/output found in the parsed docstring parameters."""
|
||||
docstring_inputs = {param.arg_name: param for param in docstring_params}
|
||||
for name, spec in inputs_or_outputs.items():
|
||||
if name in docstring_inputs:
|
||||
spec.description = docstring_inputs[name].description
|
||||
|
||||
def parse_docstring_with_return_as_args(
|
||||
docstring: Union[str,
|
||||
None]) -> Union[docstring_parser.Docstring, None]:
|
||||
"""Modifies docstring so that a return section can be treated as an
|
||||
args section, then parses the docstring."""
|
||||
if docstring is None:
|
||||
return None
|
||||
|
||||
# Returns and Return are the only two keywords docstring_parser uses for returns
|
||||
# use newline to avoid replacements that aren't in the return section header
|
||||
return_keywords = ['Returns:\n', 'Returns\n', 'Return:\n', 'Return\n']
|
||||
for keyword in return_keywords:
|
||||
if keyword in docstring:
|
||||
modified_docstring = docstring.replace(keyword.strip(), 'Args:')
|
||||
return docstring_parser.parse(modified_docstring)
|
||||
|
||||
return None
|
||||
|
||||
assign_descriptions(inputs, parsed_docstring.params)
|
||||
|
||||
modified_parsed_docstring = parse_docstring_with_return_as_args(
|
||||
original_docstring)
|
||||
if modified_parsed_docstring is not None:
|
||||
assign_descriptions(outputs, modified_parsed_docstring.params)
|
||||
|
||||
description = get_pipeline_description(
|
||||
decorator_description=description,
|
||||
docstring=parsed_docstring,
|
||||
)
|
||||
|
||||
return structures.ComponentSpec(
|
||||
name=component_name,
|
||||
description=description,
|
||||
inputs=inputs or None,
|
||||
outputs=outputs or None,
|
||||
implementation=structures.Implementation(),
|
||||
)
|
||||
|
||||
|
||||
def _get_command_and_args_for_lightweight_component(
|
||||
func: Callable) -> Tuple[List[str], List[str]]:
|
||||
imports_source = [
|
||||
'import kfp',
|
||||
'from kfp import dsl',
|
||||
'from kfp.dsl import *',
|
||||
'from typing import *',
|
||||
] + custom_artifact_types.get_custom_artifact_type_import_statements(func)
|
||||
|
||||
func_source = _get_function_source_definition(func)
|
||||
source = textwrap.dedent('''
|
||||
{imports_source}
|
||||
|
||||
{func_source}\n''').format(
|
||||
imports_source='\n'.join(imports_source), func_source=func_source)
|
||||
command = [
|
||||
'sh',
|
||||
'-ec',
|
||||
textwrap.dedent('''\
|
||||
program_path=$(mktemp -d)
|
||||
printf "%s" "$0" > "$program_path/ephemeral_component.py"
|
||||
python3 -m kfp.dsl.executor_main \
|
||||
--component_module_path \
|
||||
"$program_path/ephemeral_component.py" \
|
||||
"$@"
|
||||
'''),
|
||||
source,
|
||||
]
|
||||
|
||||
args = [
|
||||
'--executor_input',
|
||||
placeholders.ExecutorInputPlaceholder(),
|
||||
'--function_to_execute',
|
||||
func.__name__,
|
||||
]
|
||||
|
||||
return command, args
|
||||
|
||||
|
||||
def _get_command_and_args_for_containerized_component(
|
||||
function_name: str) -> Tuple[List[str], List[str]]:
|
||||
command = [
|
||||
'python3',
|
||||
'-m',
|
||||
'kfp.dsl.executor_main',
|
||||
]
|
||||
|
||||
args = [
|
||||
'--executor_input',
|
||||
placeholders.ExecutorInputPlaceholder()._to_string(),
|
||||
'--function_to_execute',
|
||||
function_name,
|
||||
]
|
||||
return command, args
|
||||
|
||||
|
||||
def create_component_from_func(
|
||||
func: Callable,
|
||||
base_image: Optional[str] = None,
|
||||
target_image: Optional[str] = None,
|
||||
packages_to_install: List[str] = None,
|
||||
pip_index_urls: Optional[List[str]] = None,
|
||||
output_component_file: Optional[str] = None,
|
||||
install_kfp_package: bool = True,
|
||||
kfp_package_path: Optional[str] = None,
|
||||
) -> python_component.PythonComponent:
|
||||
"""Implementation for the @component decorator.
|
||||
|
||||
The decorator is defined under component_decorator.py. See the
|
||||
decorator for the canonical documentation for this function.
|
||||
"""
|
||||
packages_to_install = packages_to_install or []
|
||||
|
||||
if install_kfp_package and target_image is None:
|
||||
if kfp_package_path is None:
|
||||
kfp_package_path = _get_default_kfp_package_path()
|
||||
packages_to_install.append(kfp_package_path)
|
||||
|
||||
packages_to_install_command = _get_packages_to_install_command(
|
||||
package_list=packages_to_install, pip_index_urls=pip_index_urls)
|
||||
|
||||
command = []
|
||||
args = []
|
||||
if base_image is None:
|
||||
base_image = _DEFAULT_BASE_IMAGE
|
||||
|
||||
component_image = base_image
|
||||
|
||||
if target_image:
|
||||
component_image = target_image
|
||||
command, args = _get_command_and_args_for_containerized_component(
|
||||
function_name=func.__name__,)
|
||||
else:
|
||||
command, args = _get_command_and_args_for_lightweight_component(
|
||||
func=func)
|
||||
|
||||
component_spec = extract_component_interface(func)
|
||||
component_spec.implementation = structures.Implementation(
|
||||
container=structures.ContainerSpecImplementation(
|
||||
image=component_image,
|
||||
command=packages_to_install_command + command,
|
||||
args=args,
|
||||
))
|
||||
|
||||
module_path = pathlib.Path(inspect.getsourcefile(func))
|
||||
module_path.resolve()
|
||||
|
||||
component_name = _python_function_name_to_component_name(func.__name__)
|
||||
component_info = ComponentInfo(
|
||||
name=component_name,
|
||||
function_name=func.__name__,
|
||||
func=func,
|
||||
target_image=target_image,
|
||||
module_path=module_path,
|
||||
component_spec=component_spec,
|
||||
output_component_file=output_component_file,
|
||||
base_image=base_image,
|
||||
packages_to_install=packages_to_install,
|
||||
pip_index_urls=pip_index_urls)
|
||||
|
||||
if REGISTERED_MODULES is not None:
|
||||
REGISTERED_MODULES[component_name] = component_info
|
||||
|
||||
if output_component_file:
|
||||
component_spec.save_to_component_yaml(output_component_file)
|
||||
|
||||
return python_component.PythonComponent(
|
||||
component_spec=component_spec, python_func=func)
|
||||
|
||||
|
||||
def make_input_for_parameterized_container_component_function(
|
||||
name: str, annotation: Union[Type[List[artifact_types.Artifact]],
|
||||
Type[artifact_types.Artifact]]
|
||||
) -> Union[placeholders.Placeholder, container_component_artifact_channel
|
||||
.ContainerComponentArtifactChannel]:
|
||||
if type_annotations.is_input_artifact(annotation):
|
||||
|
||||
if type_annotations.is_list_of_artifacts(annotation.__origin__):
|
||||
return placeholders.InputListOfArtifactsPlaceholder(name)
|
||||
else:
|
||||
return container_component_artifact_channel.ContainerComponentArtifactChannel(
|
||||
io_type='input', var_name=name)
|
||||
|
||||
elif type_annotations.is_output_artifact(annotation):
|
||||
|
||||
if type_annotations.is_list_of_artifacts(annotation.__origin__):
|
||||
return placeholders.OutputListOfArtifactsPlaceholder(name)
|
||||
else:
|
||||
return container_component_artifact_channel.ContainerComponentArtifactChannel(
|
||||
io_type='output', var_name=name)
|
||||
|
||||
elif isinstance(
|
||||
annotation,
|
||||
(type_annotations.OutputAnnotation, type_annotations.OutputPath)):
|
||||
return placeholders.OutputParameterPlaceholder(name)
|
||||
|
||||
else:
|
||||
placeholder = placeholders.InputValuePlaceholder(name)
|
||||
# small hack to encode the runtime value's type for a custom json.dumps function
|
||||
if (annotation == task_final_status.PipelineTaskFinalStatus or
|
||||
type_utils.is_task_final_status_type(annotation)):
|
||||
placeholder._ir_type = 'STRUCT'
|
||||
else:
|
||||
placeholder._ir_type = type_utils.get_parameter_type_name(
|
||||
annotation)
|
||||
return placeholder
|
||||
|
||||
|
||||
def create_container_component_from_func(
|
||||
func: Callable) -> container_component_class.ContainerComponent:
|
||||
"""Implementation for the @container_component decorator.
|
||||
|
||||
The decorator is defined under container_component_decorator.py. See
|
||||
the decorator for the canonical documentation for this function.
|
||||
"""
|
||||
|
||||
component_spec = extract_component_interface(func, containerized=True)
|
||||
signature = inspect.signature(func)
|
||||
parameters = list(signature.parameters.values())
|
||||
arg_list = []
|
||||
for parameter in parameters:
|
||||
parameter_type = type_annotations.maybe_strip_optional_from_annotation(
|
||||
parameter.annotation)
|
||||
arg_list.append(
|
||||
make_input_for_parameterized_container_component_function(
|
||||
parameter.name, parameter_type))
|
||||
|
||||
container_spec = func(*arg_list)
|
||||
container_spec_implementation = structures.ContainerSpecImplementation.from_container_spec(
|
||||
container_spec)
|
||||
component_spec.implementation = structures.Implementation(
|
||||
container_spec_implementation)
|
||||
component_spec._validate_placeholders()
|
||||
return container_component_class.ContainerComponent(component_spec, func)
|
||||
|
||||
|
||||
def create_graph_component_from_func(
|
||||
func: Callable,
|
||||
name: Optional[str] = None,
|
||||
description: Optional[str] = None,
|
||||
display_name: Optional[str] = None,
|
||||
) -> graph_component.GraphComponent:
|
||||
"""Implementation for the @pipeline decorator.
|
||||
|
||||
The decorator is defined under pipeline_context.py. See the
|
||||
decorator for the canonical documentation for this function.
|
||||
"""
|
||||
|
||||
component_spec = extract_component_interface(
|
||||
func,
|
||||
description=description,
|
||||
name=name,
|
||||
)
|
||||
return graph_component.GraphComponent(
|
||||
component_spec=component_spec,
|
||||
pipeline_func=func,
|
||||
display_name=display_name,
|
||||
)
|
||||
|
||||
|
||||
def get_pipeline_description(
|
||||
decorator_description: Union[str, None],
|
||||
docstring: docstring_parser.Docstring,
|
||||
) -> Union[str, None]:
|
||||
"""Obtains the correct pipeline description from the pipeline decorator's
|
||||
description argument and the parsed docstring.
|
||||
|
||||
Gives precedence to the decorator argument.
|
||||
"""
|
||||
if decorator_description:
|
||||
return decorator_description
|
||||
|
||||
short_description = docstring.short_description
|
||||
long_description = docstring.long_description
|
||||
docstring_description = short_description + '\n' + long_description if (
|
||||
short_description and long_description) else short_description
|
||||
return docstring_description.strip() if docstring_description else None
|
||||
|
|
@ -0,0 +1,29 @@
|
|||
# Copyright 2021 The Kubeflow Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Constants."""
|
||||
|
||||
# Unit constants for k8s size string.
|
||||
_E = 10**18 # Exa
|
||||
_EI = 1 << 60 # Exa: power-of-two approximate
|
||||
_P = 10**15 # Peta
|
||||
_PI = 1 << 50 # Peta: power-of-two approximate
|
||||
# noinspection PyShadowingBuiltins
|
||||
_T = 10**12 # Tera
|
||||
_TI = 1 << 40 # Tera: power-of-two approximate
|
||||
_G = 10**9 # Giga
|
||||
_GI = 1 << 30 # Giga: power-of-two approximate
|
||||
_M = 10**6 # Mega
|
||||
_MI = 1 << 20 # Mega: power-of-two approximate
|
||||
_K = 10**3 # Kilo
|
||||
_KI = 1 << 10 # Kilo: power-of-two approximate
|
||||
|
|
@ -0,0 +1,46 @@
|
|||
# Copyright 2022 The Kubeflow Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from typing import Union
|
||||
|
||||
|
||||
class ContainerComponentArtifactChannel:
|
||||
"""A class for passing in placeholders into container_component decorated
|
||||
function."""
|
||||
|
||||
def __init__(self, io_type: str, var_name: str):
|
||||
self._io_type = io_type
|
||||
self._var_name = var_name
|
||||
|
||||
def __getattr__(self, _name: str) -> Union['placeholders.Placeholder']:
|
||||
# aviod circular imports
|
||||
from kfp.dsl import placeholders
|
||||
|
||||
attr_to_placeholder_dict = {
|
||||
'uri': {
|
||||
'input': placeholders.InputUriPlaceholder,
|
||||
'output': placeholders.OutputUriPlaceholder,
|
||||
},
|
||||
'path': {
|
||||
'input': placeholders.InputPathPlaceholder,
|
||||
'output': placeholders.OutputPathPlaceholder,
|
||||
},
|
||||
'metadata': {
|
||||
'input': placeholders.InputMetadataPlaceholder,
|
||||
'output': placeholders.OutputMetadataPlaceholder
|
||||
},
|
||||
}
|
||||
if _name not in ['uri', 'path', 'metadata']:
|
||||
raise AttributeError(f'Cannot access artifact attribute "{_name}".')
|
||||
return attr_to_placeholder_dict[_name][self._io_type](self._var_name)
|
||||
|
|
@ -0,0 +1,40 @@
|
|||
# Copyright 2022 The Kubeflow Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Container-based component."""
|
||||
|
||||
from typing import Callable
|
||||
|
||||
from kfp.dsl import base_component
|
||||
from kfp.dsl import structures
|
||||
|
||||
|
||||
class ContainerComponent(base_component.BaseComponent):
|
||||
"""Component defined via pre-built container.
|
||||
|
||||
Attribute:
|
||||
pipeline_func: The function that becomes the implementation of this component.
|
||||
"""
|
||||
|
||||
def __init__(self, component_spec: structures.ComponentSpec,
|
||||
pipeline_func: Callable) -> None:
|
||||
super().__init__(component_spec=component_spec)
|
||||
self.pipeline_func = pipeline_func
|
||||
|
||||
self._prevent_using_output_lists_of_artifacts()
|
||||
|
||||
def execute(self, **kwargs):
|
||||
# ContainerComponent`: Also inherits from `BaseComponent`.
|
||||
# As its name suggests, this class backs (custom) container components.
|
||||
# Its `execute()` method uses `docker run` for local component execution
|
||||
raise NotImplementedError
|
||||
|
|
@ -0,0 +1,53 @@
|
|||
# Copyright 2022 The Kubeflow Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from typing import Callable
|
||||
|
||||
from kfp.dsl import component_factory
|
||||
from kfp.dsl import container_component_class
|
||||
|
||||
|
||||
def container_component(
|
||||
func: Callable) -> container_component_class.ContainerComponent:
|
||||
"""Decorator for container-based components in KFP v2.
|
||||
|
||||
Args:
|
||||
func: The python function to create a component from. The function
|
||||
should have type annotations for all its arguments, indicating how
|
||||
it is intended to be used (e.g. as an input/output Artifact object,
|
||||
a plain parameter, or a path to a file).
|
||||
|
||||
Example:
|
||||
::
|
||||
|
||||
from kfp.dsl import container_component, ContainerSpec, InputPath, OutputPath, Output
|
||||
|
||||
@container_component
|
||||
def my_component(
|
||||
dataset_path: InputPath(Dataset),
|
||||
model: Output[Model],
|
||||
num_epochs: int,
|
||||
output_parameter: OutputPath(str),
|
||||
):
|
||||
return ContainerSpec(
|
||||
image='gcr.io/my-image',
|
||||
command=['sh', 'my_component.sh'],
|
||||
args=[
|
||||
'--dataset_path', dataset_path,
|
||||
'--model_path', model.path,
|
||||
'--output_parameter_path', output_parameter,
|
||||
]
|
||||
)
|
||||
"""
|
||||
return component_factory.create_container_component_from_func(func)
|
||||
|
|
@ -0,0 +1,368 @@
|
|||
# Copyright 2021 The Kubeflow Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import inspect
|
||||
import json
|
||||
import os
|
||||
from typing import Any, Callable, Dict, List, Optional, Union
|
||||
|
||||
from kfp.dsl import python_component
|
||||
from kfp.dsl import task_final_status
|
||||
from kfp.dsl.types import artifact_types
|
||||
from kfp.dsl.types import type_annotations
|
||||
|
||||
|
||||
class Executor():
|
||||
"""Executor executes v2-based Python function components."""
|
||||
|
||||
def __init__(self, executor_input: Dict,
|
||||
function_to_execute: Union[Callable,
|
||||
python_component.PythonComponent]):
|
||||
if hasattr(function_to_execute, 'python_func'):
|
||||
self._func = function_to_execute.python_func
|
||||
else:
|
||||
self._func = function_to_execute
|
||||
|
||||
self._input = executor_input
|
||||
self._input_artifacts: Dict[str,
|
||||
Union[artifact_types.Artifact,
|
||||
List[artifact_types.Artifact]]] = {}
|
||||
self._output_artifacts: Dict[str, artifact_types.Artifact] = {}
|
||||
|
||||
for name, artifacts in self._input.get('inputs',
|
||||
{}).get('artifacts', {}).items():
|
||||
list_of_artifact_proto_structs = artifacts.get('artifacts')
|
||||
if list_of_artifact_proto_structs:
|
||||
annotation = self._func.__annotations__[name]
|
||||
# InputPath has no attribute __origin__ and also should be handled as a single artifact
|
||||
if type_annotations.is_Input_Output_artifact_annotation(
|
||||
annotation) and type_annotations.is_list_of_artifacts(
|
||||
annotation.__origin__):
|
||||
self._input_artifacts[name] = [
|
||||
self.make_artifact(
|
||||
msg,
|
||||
name,
|
||||
self._func,
|
||||
) for msg in list_of_artifact_proto_structs
|
||||
]
|
||||
else:
|
||||
self._input_artifacts[name] = self.make_artifact(
|
||||
list_of_artifact_proto_structs[0],
|
||||
name,
|
||||
self._func,
|
||||
)
|
||||
|
||||
for name, artifacts in self._input.get('outputs',
|
||||
{}).get('artifacts', {}).items():
|
||||
list_of_artifact_proto_structs = artifacts.get('artifacts')
|
||||
if list_of_artifact_proto_structs:
|
||||
output_artifact = self.make_artifact(
|
||||
list_of_artifact_proto_structs[0],
|
||||
name,
|
||||
self._func,
|
||||
)
|
||||
self._output_artifacts[name] = output_artifact
|
||||
self.makedirs_recursively(output_artifact.path)
|
||||
|
||||
self._return_annotation = inspect.signature(
|
||||
self._func).return_annotation
|
||||
self._executor_output = {}
|
||||
|
||||
def make_artifact(
|
||||
self,
|
||||
runtime_artifact: Dict,
|
||||
name: str,
|
||||
func: Callable,
|
||||
) -> Any:
|
||||
annotation = func.__annotations__.get(name)
|
||||
if isinstance(annotation, type_annotations.InputPath):
|
||||
schema_title, _ = annotation.type.split('@')
|
||||
if schema_title in artifact_types._SCHEMA_TITLE_TO_TYPE:
|
||||
artifact_cls = artifact_types._SCHEMA_TITLE_TO_TYPE[
|
||||
schema_title]
|
||||
else:
|
||||
raise TypeError(
|
||||
f'Invalid type argument to {type_annotations.InputPath.__name__}: {annotation.type}'
|
||||
)
|
||||
else:
|
||||
artifact_cls = annotation
|
||||
return create_artifact_instance(
|
||||
runtime_artifact, artifact_cls=artifact_cls)
|
||||
|
||||
def makedirs_recursively(self, path: str) -> None:
|
||||
os.makedirs(os.path.dirname(path), exist_ok=True)
|
||||
|
||||
def _get_input_artifact(self, name: str):
|
||||
return self._input_artifacts.get(name)
|
||||
|
||||
def _get_output_artifact(self, name: str):
|
||||
return self._output_artifacts.get(name)
|
||||
|
||||
def _get_input_parameter_value(self, parameter_name: str):
|
||||
parameter_values = self._input.get('inputs',
|
||||
{}).get('parameterValues', None)
|
||||
|
||||
if parameter_values is not None:
|
||||
return parameter_values.get(parameter_name, None)
|
||||
|
||||
return None
|
||||
|
||||
def _get_output_parameter_path(self, parameter_name: str):
|
||||
parameter = self._input.get('outputs',
|
||||
{}).get('parameters',
|
||||
{}).get(parameter_name, None)
|
||||
if parameter is None:
|
||||
return None
|
||||
|
||||
import os
|
||||
path = parameter.get('outputFile', None)
|
||||
if path:
|
||||
os.makedirs(os.path.dirname(path), exist_ok=True)
|
||||
return path
|
||||
|
||||
def _get_output_artifact_path(self, artifact_name: str):
|
||||
output_artifact = self._output_artifacts.get(artifact_name)
|
||||
if not output_artifact:
|
||||
raise ValueError(
|
||||
f'Failed to get output artifact path for artifact name {artifact_name}'
|
||||
)
|
||||
return output_artifact.path
|
||||
|
||||
def _get_input_artifact_path(self, artifact_name: str):
|
||||
input_artifact = self._input_artifacts.get(artifact_name)
|
||||
if not input_artifact:
|
||||
raise ValueError(
|
||||
f'Failed to get input artifact path for artifact name {artifact_name}'
|
||||
)
|
||||
return input_artifact.path
|
||||
|
||||
def _write_output_parameter_value(self, name: str,
|
||||
value: Union[str, int, float, bool, dict,
|
||||
list, Dict, List]):
|
||||
if isinstance(value, (float, int)):
|
||||
output = str(value)
|
||||
elif isinstance(value, str):
|
||||
# value is already a string.
|
||||
output = value
|
||||
elif isinstance(value, (bool, list, dict)):
|
||||
output = json.dumps(value)
|
||||
else:
|
||||
raise ValueError(
|
||||
f'Unable to serialize unknown type `{value}` for parameter input with value `{type(value)}`'
|
||||
)
|
||||
|
||||
if not self._executor_output.get('parameterValues'):
|
||||
self._executor_output['parameterValues'] = {}
|
||||
|
||||
self._executor_output['parameterValues'][name] = value
|
||||
|
||||
def _write_output_artifact_payload(self, name: str, value: Any):
|
||||
path = self._get_output_artifact_path(name)
|
||||
with open(path, 'w') as f:
|
||||
f.write(str(value))
|
||||
|
||||
# TODO: extract to a util
|
||||
@classmethod
|
||||
def _get_short_type_name(cls, type_name: str) -> str:
|
||||
"""Extracts the short form type name.
|
||||
|
||||
This method is used for looking up serializer for a given type.
|
||||
|
||||
For example:
|
||||
typing.List -> List
|
||||
typing.List[int] -> List
|
||||
typing.Dict[str, str] -> Dict
|
||||
List -> List
|
||||
str -> str
|
||||
|
||||
Args:
|
||||
type_name: The original type name.
|
||||
|
||||
Returns:
|
||||
The short form type name or the original name if pattern doesn't match.
|
||||
"""
|
||||
import re
|
||||
match = re.match('(typing\.)?(?P<type>\w+)(?:\[.+\])?', type_name)
|
||||
return match.group('type') if match else type_name
|
||||
|
||||
# TODO: merge with type_utils.is_parameter_type
|
||||
@classmethod
|
||||
def _is_parameter(cls, annotation: Any) -> bool:
|
||||
if type(annotation) == type:
|
||||
return annotation in [str, int, float, bool, dict, list]
|
||||
|
||||
# Annotation could be, for instance `typing.Dict[str, str]`, etc.
|
||||
return cls._get_short_type_name(str(annotation)) in ['Dict', 'List']
|
||||
|
||||
@classmethod
|
||||
def _is_artifact(cls, annotation: Any) -> bool:
|
||||
if type(annotation) == type:
|
||||
return type_annotations.is_artifact_class(annotation)
|
||||
return False
|
||||
|
||||
@classmethod
|
||||
def _is_named_tuple(cls, annotation: Any) -> bool:
|
||||
if type(annotation) == type:
|
||||
return issubclass(annotation, tuple) and hasattr(
|
||||
annotation, '_fields') and hasattr(annotation,
|
||||
'__annotations__')
|
||||
return False
|
||||
|
||||
def _handle_single_return_value(self, output_name: str,
|
||||
annotation_type: Any, return_value: Any):
|
||||
if self._is_parameter(annotation_type):
|
||||
origin_type = getattr(annotation_type, '__origin__',
|
||||
None) or annotation_type
|
||||
# relax float-typed return to allow both int and float.
|
||||
if origin_type == float:
|
||||
accepted_types = (int, float)
|
||||
# TODO: relax str-typed return to allow all primitive types?
|
||||
else:
|
||||
accepted_types = origin_type
|
||||
if not isinstance(return_value, accepted_types):
|
||||
raise ValueError(
|
||||
f'Function `{self._func.__name__}` returned value of type {type(return_value)}; want type {origin_type}'
|
||||
)
|
||||
self._write_output_parameter_value(output_name, return_value)
|
||||
elif self._is_artifact(annotation_type):
|
||||
self._write_output_artifact_payload(output_name, return_value)
|
||||
else:
|
||||
raise RuntimeError(
|
||||
f'Unknown return type: {annotation_type}. Must be one of the supported data types: https://www.kubeflow.org/docs/components/pipelines/v2/data-types/'
|
||||
)
|
||||
|
||||
def _write_executor_output(self, func_output: Optional[Any] = None):
|
||||
if self._output_artifacts:
|
||||
self._executor_output['artifacts'] = {}
|
||||
|
||||
for name, artifact in self._output_artifacts.items():
|
||||
runtime_artifact = {
|
||||
'name': artifact.name,
|
||||
'uri': artifact.uri,
|
||||
'metadata': artifact.metadata,
|
||||
}
|
||||
artifacts_list = {'artifacts': [runtime_artifact]}
|
||||
|
||||
self._executor_output['artifacts'][name] = artifacts_list
|
||||
|
||||
if func_output is not None:
|
||||
if self._is_parameter(self._return_annotation) or self._is_artifact(
|
||||
self._return_annotation):
|
||||
# Note: single output is named `Output` in component.yaml.
|
||||
self._handle_single_return_value('Output',
|
||||
self._return_annotation,
|
||||
func_output)
|
||||
elif self._is_named_tuple(self._return_annotation):
|
||||
if len(self._return_annotation._fields) != len(func_output):
|
||||
raise RuntimeError(
|
||||
f'Expected {len(self._return_annotation._fields)} return values from function `{self._func.__name__}`, got {len(func_output)}'
|
||||
)
|
||||
for i in range(len(self._return_annotation._fields)):
|
||||
field = self._return_annotation._fields[i]
|
||||
field_type = self._return_annotation.__annotations__[field]
|
||||
if type(func_output) == tuple:
|
||||
field_value = func_output[i]
|
||||
else:
|
||||
field_value = getattr(func_output, field)
|
||||
self._handle_single_return_value(field, field_type,
|
||||
field_value)
|
||||
else:
|
||||
raise RuntimeError(
|
||||
f'Unknown return type: {self._return_annotation}. Must be one of `str`, `int`, `float`, a subclass of `Artifact`, or a NamedTuple collection of these types.'
|
||||
)
|
||||
|
||||
# This check is to ensure only one worker (in a mirrored, distributed training/compute strategy) attempts to write to the same executor output file at the same time using gcsfuse, which enforces immutability of files.
|
||||
write_file = True
|
||||
|
||||
CLUSTER_SPEC_ENV_VAR_NAME = 'CLUSTER_SPEC'
|
||||
cluster_spec_string = os.environ.get(CLUSTER_SPEC_ENV_VAR_NAME)
|
||||
if cluster_spec_string:
|
||||
cluster_spec = json.loads(cluster_spec_string)
|
||||
CHIEF_NODE_LABELS = {'workerpool0', 'chief', 'master'}
|
||||
write_file = cluster_spec['task']['type'] in CHIEF_NODE_LABELS
|
||||
|
||||
if write_file:
|
||||
executor_output_path = self._input['outputs']['outputFile']
|
||||
os.makedirs(os.path.dirname(executor_output_path), exist_ok=True)
|
||||
with open(executor_output_path, 'w') as f:
|
||||
f.write(json.dumps(self._executor_output))
|
||||
|
||||
def execute(self):
|
||||
annotations = inspect.getfullargspec(self._func).annotations
|
||||
|
||||
# Function arguments.
|
||||
func_kwargs = {}
|
||||
|
||||
for k, v in annotations.items():
|
||||
if k == 'return':
|
||||
continue
|
||||
|
||||
# Annotations for parameter types could be written as, for example,
|
||||
# `Optional[str]`. In this case, we need to strip off the part
|
||||
# `Optional[]` to get the actual parameter type.
|
||||
v = type_annotations.maybe_strip_optional_from_annotation(v)
|
||||
|
||||
if v == task_final_status.PipelineTaskFinalStatus:
|
||||
value = self._get_input_parameter_value(k)
|
||||
func_kwargs[k] = task_final_status.PipelineTaskFinalStatus(
|
||||
state=value.get('state'),
|
||||
pipeline_job_resource_name=value.get(
|
||||
'pipelineJobResourceName'),
|
||||
pipeline_task_name=value.get('pipelineTaskName'),
|
||||
error_code=value.get('error').get('code', None),
|
||||
error_message=value.get('error').get('message', None),
|
||||
)
|
||||
|
||||
elif self._is_parameter(v):
|
||||
value = self._get_input_parameter_value(k)
|
||||
if value is not None:
|
||||
func_kwargs[k] = value
|
||||
|
||||
elif type_annotations.is_Input_Output_artifact_annotation(v):
|
||||
if type_annotations.is_input_artifact(v):
|
||||
func_kwargs[k] = self._get_input_artifact(k)
|
||||
if type_annotations.is_output_artifact(v):
|
||||
func_kwargs[k] = self._get_output_artifact(k)
|
||||
|
||||
elif isinstance(v, type_annotations.OutputPath):
|
||||
if self._is_parameter(v.type):
|
||||
func_kwargs[k] = self._get_output_parameter_path(k)
|
||||
else:
|
||||
func_kwargs[k] = self._get_output_artifact_path(k)
|
||||
|
||||
elif isinstance(v, type_annotations.InputPath):
|
||||
func_kwargs[k] = self._get_input_artifact_path(k)
|
||||
|
||||
result = self._func(**func_kwargs)
|
||||
self._write_executor_output(result)
|
||||
|
||||
|
||||
def create_artifact_instance(
|
||||
runtime_artifact: Dict,
|
||||
artifact_cls=artifact_types.Artifact,
|
||||
) -> type:
|
||||
"""Creates an artifact class instances from a runtime artifact
|
||||
dictionary."""
|
||||
schema_title = runtime_artifact.get('type', {}).get('schemaTitle', '')
|
||||
|
||||
artifact_cls = artifact_types._SCHEMA_TITLE_TO_TYPE.get(
|
||||
schema_title, artifact_cls)
|
||||
return artifact_cls._from_executor_fields(
|
||||
uri=runtime_artifact.get('uri', ''),
|
||||
name=runtime_artifact.get('name', ''),
|
||||
metadata=runtime_artifact.get('metadata', {}),
|
||||
) if hasattr(artifact_cls, '_from_executor_fields') else artifact_cls(
|
||||
uri=runtime_artifact.get('uri', ''),
|
||||
name=runtime_artifact.get('name', ''),
|
||||
metadata=runtime_artifact.get('metadata', {}),
|
||||
)
|
||||
|
|
@ -0,0 +1,105 @@
|
|||
# Copyright 2021 The Kubeflow Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import argparse
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
|
||||
from kfp.dsl import executor as component_executor
|
||||
from kfp.dsl import kfp_config
|
||||
from kfp.dsl import utils
|
||||
|
||||
|
||||
def _setup_logging():
|
||||
logging_format = '[KFP Executor %(asctime)s %(levelname)s]: %(message)s'
|
||||
logging.basicConfig(
|
||||
stream=sys.stdout, format=logging_format, level=logging.INFO)
|
||||
|
||||
|
||||
def executor_main():
|
||||
_setup_logging()
|
||||
parser = argparse.ArgumentParser(description='KFP Component Executor.')
|
||||
|
||||
parser.add_argument(
|
||||
'--component_module_path',
|
||||
type=str,
|
||||
help='Path to a module containing the KFP component.')
|
||||
|
||||
parser.add_argument(
|
||||
'--function_to_execute',
|
||||
type=str,
|
||||
required=True,
|
||||
help='The name of the component function in '
|
||||
'--component_module_path file that is to be executed.')
|
||||
|
||||
parser.add_argument(
|
||||
'--executor_input',
|
||||
type=str,
|
||||
help='JSON-serialized ExecutorInput from the orchestrator. '
|
||||
'This should contain inputs and placeholders for outputs.')
|
||||
|
||||
args, _ = parser.parse_known_args()
|
||||
|
||||
func_name = args.function_to_execute
|
||||
module_path = None
|
||||
module_directory = None
|
||||
module_name = None
|
||||
|
||||
if args.component_module_path is not None:
|
||||
logging.info(
|
||||
f'Looking for component `{func_name}` in --component_module_path `{args.component_module_path}`'
|
||||
)
|
||||
module_path = args.component_module_path
|
||||
module_directory = os.path.dirname(args.component_module_path)
|
||||
module_name = os.path.basename(args.component_module_path)[:-len('.py')]
|
||||
else:
|
||||
# Look for module directory using kfp_config.ini
|
||||
logging.info(
|
||||
f'--component_module_path is not specified. Looking for component `{func_name}` in config file `kfp_config.ini` instead'
|
||||
)
|
||||
config = kfp_config.KFPConfig()
|
||||
components = config.get_components()
|
||||
if not components:
|
||||
raise RuntimeError('No components found in `kfp_config.ini`')
|
||||
try:
|
||||
module_path = components[func_name]
|
||||
except KeyError:
|
||||
raise RuntimeError(
|
||||
f'Could not find component `{func_name}` in `kfp_config.ini`. Found the following components instead:\n{components}'
|
||||
)
|
||||
|
||||
module_directory = str(module_path.parent)
|
||||
module_name = str(module_path.name)[:-len('.py')]
|
||||
|
||||
logging.info(
|
||||
f'Loading KFP component "{func_name}" from {module_path} (directory "{module_directory}" and module name "{module_name}")'
|
||||
)
|
||||
|
||||
module = utils.load_module(
|
||||
module_name=module_name, module_directory=module_directory)
|
||||
|
||||
executor_input = json.loads(args.executor_input)
|
||||
function_to_execute = getattr(module, func_name)
|
||||
|
||||
logging.info(f'Got executor_input:\n{json.dumps(executor_input, indent=4)}')
|
||||
|
||||
executor = component_executor.Executor(
|
||||
executor_input=executor_input, function_to_execute=function_to_execute)
|
||||
|
||||
executor.execute()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
executor_main()
|
||||
File diff suppressed because it is too large
Load Diff
|
|
@ -0,0 +1,315 @@
|
|||
# Copyright 2021 The Kubeflow Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Classes and methods that supports argument for ParallelFor."""
|
||||
|
||||
import re
|
||||
from typing import Any, Dict, List, Optional, Union
|
||||
|
||||
from kfp.dsl import pipeline_channel
|
||||
|
||||
ItemList = List[Union[int, float, str, Dict[str, Any]]]
|
||||
|
||||
|
||||
def _get_loop_item_type(type_name: str) -> Optional[str]:
|
||||
"""Extracts the loop item type.
|
||||
|
||||
This method is used for extract the item type from a collection type.
|
||||
For example:
|
||||
|
||||
List[str] -> str
|
||||
typing.List[int] -> int
|
||||
typing.Sequence[str] -> str
|
||||
List -> None
|
||||
str -> None
|
||||
|
||||
Args:
|
||||
type_name: The collection type name, like `List`, Sequence`, etc.
|
||||
|
||||
Returns:
|
||||
The collection item type or None if no match found.
|
||||
"""
|
||||
match = re.match('(typing\.)?(?:\w+)(?:\[(?P<item_type>.+)\])', type_name)
|
||||
return match['item_type'].lstrip().rstrip() if match else None
|
||||
|
||||
|
||||
def _get_subvar_type(type_name: str) -> Optional[str]:
|
||||
"""Extracts the subvar type.
|
||||
|
||||
This method is used for extract the value type from a dictionary type.
|
||||
For example:
|
||||
|
||||
Dict[str, int] -> int
|
||||
typing.Mapping[str, float] -> float
|
||||
|
||||
Args:
|
||||
type_name: The dictionary type.
|
||||
|
||||
Returns:
|
||||
The dictionary value type or None if no match found.
|
||||
"""
|
||||
match = re.match(
|
||||
'(typing\.)?(?:\w+)(?:\[\s*(?:\w+)\s*,\s*(?P<value_type>.+)\])',
|
||||
type_name)
|
||||
return match['value_type'].lstrip().rstrip() if match else None
|
||||
|
||||
|
||||
class LoopArgument(pipeline_channel.PipelineParameterChannel):
|
||||
"""Represents the argument that are looped over in a ParallelFor loop.
|
||||
|
||||
The class shouldn't be instantiated by the end user, rather it is
|
||||
created automatically by a ParallelFor ops group.
|
||||
|
||||
To create a LoopArgument instance, use one of its factory methods::
|
||||
|
||||
LoopArgument.from_pipeline_channel(...)
|
||||
LoopArgument.from_raw_items(...)
|
||||
|
||||
|
||||
Attributes:
|
||||
items_or_pipeline_channel: The raw items or the PipelineChannel object
|
||||
this LoopArgument is associated to.
|
||||
"""
|
||||
LOOP_ITEM_NAME_BASE = 'loop-item'
|
||||
LOOP_ITEM_PARAM_NAME_BASE = 'loop-item-param'
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
items: Union[ItemList, pipeline_channel.PipelineChannel],
|
||||
name_code: Optional[str] = None,
|
||||
name_override: Optional[str] = None,
|
||||
**kwargs,
|
||||
):
|
||||
"""Initializes a LoopArguments object.
|
||||
|
||||
Args:
|
||||
items: List of items to loop over. If a list of dicts then, all
|
||||
dicts must have the same keys and every key must be a legal
|
||||
Python variable name.
|
||||
name_code: A unique code used to identify these loop arguments.
|
||||
Should match the code for the ParallelFor ops_group which created
|
||||
these LoopArguments. This prevents parameter name collisions.
|
||||
name_override: The override name for PipelineChannel.
|
||||
**kwargs: Any other keyword arguments passed down to PipelineChannel.
|
||||
"""
|
||||
if (name_code is None) == (name_override is None):
|
||||
raise ValueError(
|
||||
'Expect one and only one of `name_code` and `name_override` to '
|
||||
'be specified.')
|
||||
|
||||
if name_override is None:
|
||||
super().__init__(name=self._make_name(name_code), **kwargs)
|
||||
else:
|
||||
super().__init__(name=name_override, **kwargs)
|
||||
|
||||
if not isinstance(items,
|
||||
(list, tuple, pipeline_channel.PipelineChannel)):
|
||||
raise TypeError(
|
||||
f'Expected list, tuple, or PipelineChannel, got {items}.')
|
||||
|
||||
if isinstance(items, tuple):
|
||||
items = list(items)
|
||||
|
||||
self.items_or_pipeline_channel = items
|
||||
self.is_with_items_loop_argument = not isinstance(
|
||||
items, pipeline_channel.PipelineChannel)
|
||||
self._referenced_subvars: Dict[str, LoopArgumentVariable] = {}
|
||||
|
||||
if isinstance(items, list) and isinstance(items[0], dict):
|
||||
subvar_names = set(items[0].keys())
|
||||
# then this block creates loop_arg.variable_a and loop_arg.variable_b
|
||||
for subvar_name in subvar_names:
|
||||
loop_arg_var = LoopArgumentVariable(
|
||||
loop_argument=self,
|
||||
subvar_name=subvar_name,
|
||||
)
|
||||
self._referenced_subvars[subvar_name] = loop_arg_var
|
||||
setattr(self, subvar_name, loop_arg_var)
|
||||
|
||||
def __getattr__(self, name: str):
|
||||
# this is being overridden so that we can access subvariables of the
|
||||
# LoopArgument (i.e.: item.a) without knowing the subvariable names ahead
|
||||
# of time.
|
||||
|
||||
return self._referenced_subvars.setdefault(
|
||||
name, LoopArgumentVariable(
|
||||
loop_argument=self,
|
||||
subvar_name=name,
|
||||
))
|
||||
|
||||
def _make_name(self, code: str):
|
||||
"""Makes a name for this loop argument from a unique code."""
|
||||
return f'{self.LOOP_ITEM_PARAM_NAME_BASE}-{code}'
|
||||
|
||||
@classmethod
|
||||
def from_pipeline_channel(
|
||||
cls,
|
||||
channel: pipeline_channel.PipelineChannel,
|
||||
) -> 'LoopArgument':
|
||||
"""Creates a LoopArgument object from a PipelineChannel object."""
|
||||
return LoopArgument(
|
||||
items=channel,
|
||||
name_override=channel.name + '-' + cls.LOOP_ITEM_NAME_BASE,
|
||||
task_name=channel.task_name,
|
||||
channel_type=_get_loop_item_type(channel.channel_type) or 'String',
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_raw_items(
|
||||
cls,
|
||||
raw_items: ItemList,
|
||||
name_code: str,
|
||||
) -> 'LoopArgument':
|
||||
"""Creates a LoopArgument object from raw item list."""
|
||||
if len(raw_items) == 0:
|
||||
raise ValueError('Got an empty item list for loop argument.')
|
||||
|
||||
return LoopArgument(
|
||||
items=raw_items,
|
||||
name_code=name_code,
|
||||
channel_type=type(raw_items[0]).__name__,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def name_is_loop_argument(cls, name: str) -> bool:
|
||||
"""Returns True if the given channel name looks like a loop argument.
|
||||
|
||||
Either it came from a withItems loop item or withParams loop
|
||||
item.
|
||||
"""
|
||||
return ('-' + cls.LOOP_ITEM_NAME_BASE) in name \
|
||||
or (cls.LOOP_ITEM_PARAM_NAME_BASE + '-') in name
|
||||
|
||||
|
||||
class LoopArgumentVariable(pipeline_channel.PipelineChannel):
|
||||
"""Represents a subvariable for a loop argument.
|
||||
|
||||
This is used for cases where we're looping over maps, each of which contains
|
||||
several variables. If the user ran:
|
||||
|
||||
with dsl.ParallelFor([{'a': 1, 'b': 2}, {'a': 3, 'b': 4}]) as item:
|
||||
...
|
||||
|
||||
Then there's one LoopArgumentVariable for 'a' and another for 'b'.
|
||||
|
||||
Attributes:
|
||||
loop_argument: The original LoopArgument object this subvariable is
|
||||
attached to.
|
||||
subvar_name: The subvariable name.
|
||||
"""
|
||||
SUBVAR_NAME_DELIMITER = '-subvar-'
|
||||
LEGAL_SUBVAR_NAME_REGEX = re.compile(r'^[a-zA-Z_][0-9a-zA-Z_]*$')
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
loop_argument: LoopArgument,
|
||||
subvar_name: str,
|
||||
):
|
||||
"""Initializes a LoopArgumentVariable instance.
|
||||
|
||||
Args:
|
||||
loop_argument: The LoopArgument object this subvariable is based on
|
||||
a subvariable to.
|
||||
subvar_name: The name of this subvariable, which is the name of the
|
||||
dict key that spawned this subvariable.
|
||||
|
||||
Raises:
|
||||
ValueError is subvar name is illegal.
|
||||
"""
|
||||
if not self._subvar_name_is_legal(subvar_name):
|
||||
raise ValueError(
|
||||
f'Tried to create subvariable named {subvar_name}, but that is '
|
||||
'not a legal Python variable name.')
|
||||
|
||||
self.subvar_name = subvar_name
|
||||
self.loop_argument = loop_argument
|
||||
|
||||
super().__init__(
|
||||
name=self._get_name_override(
|
||||
loop_arg_name=loop_argument.name,
|
||||
subvar_name=subvar_name,
|
||||
),
|
||||
task_name=loop_argument.task_name,
|
||||
channel_type=_get_subvar_type(loop_argument.channel_type) or
|
||||
'String',
|
||||
)
|
||||
|
||||
@property
|
||||
def items_or_pipeline_channel(
|
||||
self) -> Union[ItemList, pipeline_channel.PipelineChannel]:
|
||||
"""Returns the loop argument items."""
|
||||
return self.loop_argument.items_or_pipeline_chanenl
|
||||
|
||||
@property
|
||||
def is_with_items_loop_argument(self) -> bool:
|
||||
"""Whether the loop argument is originated from raw items."""
|
||||
return self.loop_argument.is_with_items_loop_argument
|
||||
|
||||
def _subvar_name_is_legal(self, proposed_variable_name: str) -> bool:
|
||||
"""Returns True if the subvar name is legal."""
|
||||
return re.match(self.LEGAL_SUBVAR_NAME_REGEX,
|
||||
proposed_variable_name) is not None
|
||||
|
||||
def _get_name_override(self, loop_arg_name: str, subvar_name: str) -> str:
|
||||
"""Gets the name.
|
||||
|
||||
Args:
|
||||
loop_arg_name: the name of the loop argument parameter that this
|
||||
LoopArgumentVariable is attached to.
|
||||
subvar_name: The name of this subvariable.
|
||||
|
||||
Returns:
|
||||
The name of this loop arg variable.
|
||||
"""
|
||||
return f'{loop_arg_name}{self.SUBVAR_NAME_DELIMITER}{subvar_name}'
|
||||
|
||||
|
||||
class Collected(pipeline_channel.PipelineChannel):
|
||||
"""For collecting into a list the output from a task in dsl.ParallelFor
|
||||
loops.
|
||||
|
||||
Args:
|
||||
output: The output of an upstream task within a dsl.ParallelFor loop.
|
||||
|
||||
Example:
|
||||
::
|
||||
|
||||
@dsl.pipeline
|
||||
def math_pipeline() -> int:
|
||||
with dsl.ParallelFor([1, 2, 3]) as x:
|
||||
t = double(num=x)
|
||||
|
||||
return add(nums=dsl.Collected(t.output)).output
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
output: pipeline_channel.PipelineChannel,
|
||||
) -> None:
|
||||
self.output = output
|
||||
if isinstance(output, pipeline_channel.PipelineArtifactChannel):
|
||||
channel_type = output.channel_type
|
||||
self.is_artifact_channel = True
|
||||
# we know all dsl.Collected instances are lists, so set to true
|
||||
# for type checking, which occurs before dsl.Collected is updated to
|
||||
# it's "correct" channel during compilation
|
||||
self.is_artifact_list = True
|
||||
else:
|
||||
channel_type = 'LIST'
|
||||
self.is_artifact_channel = False
|
||||
|
||||
super().__init__(
|
||||
output.name,
|
||||
channel_type=channel_type,
|
||||
task_name=output.task_name,
|
||||
)
|
||||
|
|
@ -0,0 +1,91 @@
|
|||
# Copyright 2022 The Kubeflow Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Pipeline as a component (aka graph component)."""
|
||||
|
||||
import inspect
|
||||
from typing import Callable, Optional
|
||||
import uuid
|
||||
|
||||
from kfp.compiler import pipeline_spec_builder as builder
|
||||
from kfp.dsl import base_component
|
||||
from kfp.dsl import pipeline_channel
|
||||
from kfp.dsl import pipeline_context
|
||||
from kfp.dsl import structures
|
||||
from kfp.pipeline_spec import pipeline_spec_pb2
|
||||
|
||||
|
||||
class GraphComponent(base_component.BaseComponent):
|
||||
"""A component defined via @dsl.pipeline decorator.
|
||||
|
||||
Attribute:
|
||||
pipeline_func: The function that becomes the implementation of this component.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
component_spec: structures.ComponentSpec,
|
||||
pipeline_func: Callable,
|
||||
display_name: Optional[str] = None,
|
||||
):
|
||||
super().__init__(component_spec=component_spec)
|
||||
self.pipeline_func = pipeline_func
|
||||
|
||||
args_list = []
|
||||
signature = inspect.signature(pipeline_func)
|
||||
|
||||
for arg_name in signature.parameters:
|
||||
input_spec = component_spec.inputs[arg_name]
|
||||
args_list.append(
|
||||
pipeline_channel.create_pipeline_channel(
|
||||
name=arg_name,
|
||||
channel_type=input_spec.type,
|
||||
is_artifact_list=input_spec.is_artifact_list,
|
||||
))
|
||||
|
||||
with pipeline_context.Pipeline(
|
||||
self.component_spec.name) as dsl_pipeline:
|
||||
pipeline_outputs = pipeline_func(*args_list)
|
||||
|
||||
if not dsl_pipeline.tasks:
|
||||
raise ValueError('Task is missing from pipeline.')
|
||||
|
||||
# Making the pipeline group name unique to prevent name clashes with
|
||||
# templates
|
||||
pipeline_group = dsl_pipeline.groups[0]
|
||||
pipeline_group.name = uuid.uuid4().hex
|
||||
|
||||
pipeline_spec, platform_spec = builder.create_pipeline_spec(
|
||||
pipeline=dsl_pipeline,
|
||||
component_spec=self.component_spec,
|
||||
pipeline_outputs=pipeline_outputs,
|
||||
)
|
||||
|
||||
pipeline_root = getattr(pipeline_func, 'pipeline_root', None)
|
||||
if pipeline_root is not None:
|
||||
pipeline_spec.default_pipeline_root = pipeline_root
|
||||
if display_name is not None:
|
||||
pipeline_spec.pipeline_info.display_name = display_name
|
||||
if component_spec.description is not None:
|
||||
pipeline_spec.pipeline_info.description = component_spec.description
|
||||
|
||||
self.component_spec.implementation.graph = pipeline_spec
|
||||
self.component_spec.platform_spec = platform_spec
|
||||
|
||||
@property
|
||||
def pipeline_spec(self) -> pipeline_spec_pb2.PipelineSpec:
|
||||
"""Returns the pipeline spec of the component."""
|
||||
return self.component_spec.implementation.graph
|
||||
|
||||
def execute(self, **kwargs):
|
||||
raise RuntimeError('Graph component has no local execution mode.')
|
||||
|
|
@ -0,0 +1,30 @@
|
|||
# Copyright 2021 The Kubeflow Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Importer-based component."""
|
||||
|
||||
from kfp.dsl import base_component
|
||||
from kfp.dsl import structures
|
||||
|
||||
|
||||
class ImporterComponent(base_component.BaseComponent):
|
||||
"""Component defined via dsl.importer."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
component_spec: structures.ComponentSpec,
|
||||
):
|
||||
super().__init__(component_spec=component_spec)
|
||||
|
||||
def execute(self, **kwargs):
|
||||
raise NotImplementedError
|
||||
|
|
@ -0,0 +1,145 @@
|
|||
# Copyright 2020-2022 The Kubeflow Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Utility function for building Importer Node spec."""
|
||||
|
||||
from typing import Any, Dict, Mapping, Optional, Type, Union
|
||||
|
||||
from kfp.dsl import importer_component
|
||||
from kfp.dsl import pipeline_channel
|
||||
from kfp.dsl import pipeline_task
|
||||
from kfp.dsl import placeholders
|
||||
from kfp.dsl import structures
|
||||
from kfp.dsl import utils
|
||||
from kfp.dsl.types import artifact_types
|
||||
from kfp.dsl.types import type_utils
|
||||
|
||||
URI_KEY = 'uri'
|
||||
OUTPUT_KEY = 'artifact'
|
||||
METADATA_KEY = 'metadata'
|
||||
|
||||
|
||||
def importer(
|
||||
artifact_uri: Union[pipeline_channel.PipelineParameterChannel, str],
|
||||
artifact_class: Type[artifact_types.Artifact],
|
||||
reimport: bool = False,
|
||||
metadata: Optional[Mapping[str, Any]] = None,
|
||||
) -> pipeline_task.PipelineTask:
|
||||
"""Imports an existing artifact for use in a downstream component.
|
||||
|
||||
Args:
|
||||
artifact_uri: The URI of the artifact to import.
|
||||
artifact_class: The artifact class being imported.
|
||||
reimport: Whether to reimport the artifact.
|
||||
metadata: Properties of the artifact.
|
||||
|
||||
Returns:
|
||||
A task with the artifact accessible via its ``.output`` attribute.
|
||||
|
||||
Examples::
|
||||
|
||||
@dsl.pipeline(name='pipeline-with-importer')
|
||||
def pipeline_with_importer():
|
||||
|
||||
importer1 = importer(
|
||||
artifact_uri='gs://ml-pipeline-playground/shakespeare1.txt',
|
||||
artifact_class=Dataset,
|
||||
reimport=False)
|
||||
train(dataset=importer1.output)
|
||||
"""
|
||||
|
||||
component_inputs: Dict[str, structures.InputSpec] = {}
|
||||
call_inputs: Dict[str, Any] = {}
|
||||
|
||||
def traverse_dict_and_create_metadata_inputs(d: Any) -> Any:
|
||||
if isinstance(d, pipeline_channel.PipelineParameterChannel):
|
||||
reversed_call_inputs = {
|
||||
pipeline_param_chan: name
|
||||
for name, pipeline_param_chan in call_inputs.items()
|
||||
}
|
||||
|
||||
# minimizes importer spec interface by not creating new
|
||||
# inputspec/parameters if the same input is used multiple places
|
||||
# in metadata
|
||||
unique_name = reversed_call_inputs.get(
|
||||
d,
|
||||
utils.make_name_unique_by_adding_index(
|
||||
METADATA_KEY,
|
||||
list(call_inputs),
|
||||
'-',
|
||||
),
|
||||
)
|
||||
|
||||
call_inputs[unique_name] = d
|
||||
component_inputs[unique_name] = structures.InputSpec(
|
||||
type=d.channel_type)
|
||||
|
||||
return placeholders.InputValuePlaceholder(
|
||||
input_name=unique_name)._to_string()
|
||||
|
||||
elif isinstance(d, dict):
|
||||
# use this instead of list comprehension to ensure compiles are identical across Python versions
|
||||
res = {}
|
||||
for k, v in d.items():
|
||||
new_k = traverse_dict_and_create_metadata_inputs(k)
|
||||
new_v = traverse_dict_and_create_metadata_inputs(v)
|
||||
res[new_k] = new_v
|
||||
return res
|
||||
|
||||
elif isinstance(d, list):
|
||||
return [traverse_dict_and_create_metadata_inputs(el) for el in d]
|
||||
|
||||
elif isinstance(d, str):
|
||||
# extract pipeline channels from f-strings, if any
|
||||
pipeline_channels = pipeline_channel.extract_pipeline_channels_from_any(
|
||||
d)
|
||||
|
||||
# pass the channel back into the recursive function to create the placeholder, component inputs, and call inputs, then replace the channel with the placeholder
|
||||
for channel in pipeline_channels:
|
||||
input_placeholder = traverse_dict_and_create_metadata_inputs(
|
||||
channel)
|
||||
d = d.replace(channel.pattern, input_placeholder)
|
||||
return d
|
||||
|
||||
else:
|
||||
return d
|
||||
|
||||
metadata_with_placeholders = traverse_dict_and_create_metadata_inputs(
|
||||
metadata)
|
||||
|
||||
component_spec = structures.ComponentSpec(
|
||||
name='importer',
|
||||
implementation=structures.Implementation(
|
||||
importer=structures.ImporterSpec(
|
||||
artifact_uri=placeholders.InputValuePlaceholder(
|
||||
URI_KEY)._to_string(),
|
||||
schema_title=type_utils.create_bundled_artifact_type(
|
||||
artifact_class.schema_title, artifact_class.schema_version),
|
||||
schema_version=artifact_class.schema_version,
|
||||
reimport=reimport,
|
||||
metadata=metadata_with_placeholders)),
|
||||
inputs={
|
||||
URI_KEY: structures.InputSpec(type='String'),
|
||||
**component_inputs
|
||||
},
|
||||
outputs={
|
||||
OUTPUT_KEY:
|
||||
structures.OutputSpec(
|
||||
type=type_utils.create_bundled_artifact_type(
|
||||
artifact_class.schema_title,
|
||||
artifact_class.schema_version))
|
||||
},
|
||||
)
|
||||
importer = importer_component.ImporterComponent(
|
||||
component_spec=component_spec)
|
||||
return importer(uri=artifact_uri, **call_inputs)
|
||||
|
|
@ -0,0 +1,106 @@
|
|||
# Copyright 2021 The Kubeflow Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import configparser
|
||||
import pathlib
|
||||
from typing import Dict, Optional
|
||||
import warnings
|
||||
|
||||
_KFP_CONFIG_FILE = 'kfp_config.ini'
|
||||
|
||||
_COMPONENTS_SECTION = 'Components'
|
||||
|
||||
|
||||
class KFPConfig():
|
||||
"""Class for managing KFP component configuration.
|
||||
|
||||
The configuration is .ini file named `kfp_config.ini` that can be parsed by
|
||||
Python's native configparser module. Currently, this class supports a single
|
||||
`Components` section, which lists components as key-value pairs. The key is
|
||||
the component name (i.e. the function name), and the value is the path to
|
||||
the file containing this function. The path is usually relative from the
|
||||
location of the configuration file, but absolute paths should also work.
|
||||
|
||||
At runtime, the KFP v2 Executor, defined in executor_main.py, will look
|
||||
for this configuration file in its current working directory. If found,
|
||||
it will load its contents, and use this to find the file containing the
|
||||
component to execute.
|
||||
|
||||
Example of the file's contents:
|
||||
|
||||
[Components]
|
||||
my_component_1 = my_dir_1/my_component_1.py
|
||||
my_component_2 = my_dir_2/my_component_2.py
|
||||
...
|
||||
"""
|
||||
|
||||
def __init__(self, config_directory: Optional[pathlib.Path] = None):
|
||||
"""Creates a KFPConfig object.
|
||||
|
||||
Loads the config from an existing `kfp_config.ini` file if found.
|
||||
|
||||
Args:
|
||||
config_directory: Looks for a file named `kfp_config.ini` in this
|
||||
directory. Defaults to the current directory.
|
||||
"""
|
||||
self._config_parser = configparser.ConfigParser()
|
||||
# Preserve case for keys.
|
||||
self._config_parser.optionxform = lambda x: x
|
||||
|
||||
if config_directory is None:
|
||||
self._config_filepath = pathlib.Path(_KFP_CONFIG_FILE)
|
||||
else:
|
||||
self._config_filepath = config_directory / _KFP_CONFIG_FILE
|
||||
|
||||
try:
|
||||
with open(str(self._config_filepath), 'r') as f:
|
||||
self._config_parser.read_file(f)
|
||||
except IOError:
|
||||
warnings.warn('No existing KFP Config file found')
|
||||
|
||||
if not self._config_parser.has_section(_COMPONENTS_SECTION):
|
||||
self._config_parser.add_section(_COMPONENTS_SECTION)
|
||||
|
||||
self._components = {}
|
||||
|
||||
def add_component(self, function_name: str, path: pathlib.Path):
|
||||
"""Adds a KFP component.
|
||||
|
||||
Args:
|
||||
function_name: The name of the component function.
|
||||
path: A path to the file containing the component.
|
||||
"""
|
||||
self._components[function_name] = str(path)
|
||||
|
||||
def save(self):
|
||||
"""Writes out a KFP config file."""
|
||||
# Always write out components in alphabetical order for determinism,
|
||||
# especially in tests.
|
||||
for function_name in sorted(self._components.keys()):
|
||||
self._config_parser[_COMPONENTS_SECTION][
|
||||
function_name] = self._components[function_name]
|
||||
|
||||
with open(str(self._config_filepath), 'w') as f:
|
||||
self._config_parser.write(f)
|
||||
|
||||
def get_components(self) -> Dict[str, pathlib.Path]:
|
||||
"""Returns a list of known KFP components.
|
||||
|
||||
Returns:
|
||||
A dictionary from component name (function name) to a pathlib.Path
|
||||
pointing to the Python file with this component's definition.
|
||||
"""
|
||||
return {
|
||||
function_name: pathlib.Path(module_path) for function_name,
|
||||
module_path in self._config_parser[_COMPONENTS_SECTION].items()
|
||||
}
|
||||
|
|
@ -0,0 +1,379 @@
|
|||
# Copyright 2021 The Kubeflow Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Definition of PipelineChannel."""
|
||||
|
||||
import abc
|
||||
import contextlib
|
||||
import dataclasses
|
||||
import json
|
||||
import re
|
||||
from typing import Dict, List, Optional, Union
|
||||
|
||||
from kfp.dsl.types import type_utils
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class ConditionOperator:
|
||||
"""Represents a condition expression to be used in dsl.Condition().
|
||||
|
||||
Attributes:
|
||||
operator: The operator of the condition.
|
||||
left_operand: The left operand.
|
||||
right_operand: The right operand.
|
||||
"""
|
||||
operator: str
|
||||
left_operand: Union['PipelineParameterChannel', type_utils.PARAMETER_TYPES]
|
||||
right_operand: Union['PipelineParameterChannel', type_utils.PARAMETER_TYPES]
|
||||
|
||||
|
||||
# The string template used to generate the placeholder of a PipelineChannel.
|
||||
_PIPELINE_CHANNEL_PLACEHOLDER_TEMPLATE = (
|
||||
'{{channel:task=%s;name=%s;type=%s;}}')
|
||||
# The regex for parsing PipelineChannel placeholders from a string.
|
||||
_PIPELINE_CHANNEL_PLACEHOLDER_REGEX = (
|
||||
r'{{channel:task=([\w\s_-]*);name=([\w\s_-]+);type=([\w\s{}":_-]*);}}')
|
||||
|
||||
|
||||
class PipelineChannel(abc.ABC):
|
||||
"""Represents a future value that is passed between pipeline components.
|
||||
|
||||
A PipelineChannel object can be used as a pipeline function argument so that
|
||||
it will be a pipeline artifact or parameter that shows up in ML Pipelines
|
||||
system UI. It can also represent an intermediate value passed between
|
||||
components.
|
||||
|
||||
Attributes:
|
||||
name: The name of the pipeline channel.
|
||||
channel_type: The type of the pipeline channel.
|
||||
task_name: The name of the task that produces the pipeline channel.
|
||||
None means it is not produced by any task, so if None, either user
|
||||
constructs it directly (for providing an immediate value), or it is
|
||||
a pipeline function argument.
|
||||
pattern: The serialized string regex pattern this pipeline channel
|
||||
created from.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def __init__(
|
||||
self,
|
||||
name: str,
|
||||
channel_type: Union[str, Dict],
|
||||
task_name: Optional[str] = None,
|
||||
):
|
||||
"""Initializes a PipelineChannel instance.
|
||||
|
||||
Args:
|
||||
name: The name of the pipeline channel. The name will be sanitized
|
||||
to be k8s compatible.
|
||||
channel_type: The type of the pipeline channel.
|
||||
task_name: Optional; The name of the task that produces the pipeline
|
||||
channel. If provided, the task name will be sanitized to be k8s
|
||||
compatible.
|
||||
|
||||
Raises:
|
||||
ValueError: If name or task_name contains invalid characters.
|
||||
ValueError: If both task_name and value are set.
|
||||
"""
|
||||
valid_name_regex = r'^[A-Za-z][A-Za-z0-9\s_-]*$'
|
||||
if not re.match(valid_name_regex, name):
|
||||
raise ValueError(
|
||||
f'Only letters, numbers, spaces, "_", and "-" are allowed in the name. Must begin with a letter. Got name: {name}'
|
||||
)
|
||||
|
||||
self.name = name
|
||||
self.channel_type = channel_type
|
||||
# ensure value is None even if empty string or empty list/dict
|
||||
# so that serialization and unserialization remain consistent
|
||||
# (i.e. None => '' => None)
|
||||
self.task_name = task_name or None
|
||||
from kfp.dsl import pipeline_context
|
||||
|
||||
default_pipeline = pipeline_context.Pipeline.get_default_pipeline()
|
||||
if self.task_name is not None and default_pipeline is not None and default_pipeline.tasks:
|
||||
self.task = pipeline_context.Pipeline.get_default_pipeline().tasks[
|
||||
self.task_name]
|
||||
else:
|
||||
self.task = None
|
||||
|
||||
@property
|
||||
def full_name(self) -> str:
|
||||
"""Unique name for the PipelineChannel."""
|
||||
return f'{self.task_name}-{self.name}' if self.task_name else self.name
|
||||
|
||||
@property
|
||||
def pattern(self) -> str:
|
||||
"""Unique pattern for the PipelineChannel."""
|
||||
return str(self)
|
||||
|
||||
def __str__(self) -> str:
|
||||
"""String representation of the PipelineChannel.
|
||||
|
||||
The string representation is a string identifier so we can mix
|
||||
the PipelineChannel inline with other strings such as arguments.
|
||||
For example, we can support: ['echo %s' % param] as the
|
||||
container command and later a compiler can replace the
|
||||
placeholder '{{pipeline_channel:task=%s;name=%s;type=%s}}' with
|
||||
its own parameter identifier.
|
||||
"""
|
||||
task_name = self.task_name or ''
|
||||
name = self.name
|
||||
channel_type = self.channel_type or ''
|
||||
if isinstance(channel_type, dict):
|
||||
channel_type = json.dumps(channel_type)
|
||||
return _PIPELINE_CHANNEL_PLACEHOLDER_TEMPLATE % (task_name, name,
|
||||
channel_type)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""Representation of the PipelineChannel.
|
||||
|
||||
We make repr return the placeholder string so that if someone
|
||||
uses str()-based serialization of complex objects containing
|
||||
`PipelineChannel`, it works properly. (e.g. str([1, 2, 3,
|
||||
kfp.pipeline_channel.PipelineParameterChannel("aaa"), 4, 5, 6,]))
|
||||
"""
|
||||
return str(self)
|
||||
|
||||
def __hash__(self) -> int:
|
||||
"""Returns the hash of a PipelineChannel."""
|
||||
return hash(self.pattern)
|
||||
|
||||
def __eq__(self, other):
|
||||
return ConditionOperator('==', self, other)
|
||||
|
||||
def __ne__(self, other):
|
||||
return ConditionOperator('!=', self, other)
|
||||
|
||||
def __lt__(self, other):
|
||||
return ConditionOperator('<', self, other)
|
||||
|
||||
def __le__(self, other):
|
||||
return ConditionOperator('<=', self, other)
|
||||
|
||||
def __gt__(self, other):
|
||||
return ConditionOperator('>', self, other)
|
||||
|
||||
def __ge__(self, other):
|
||||
return ConditionOperator('>=', self, other)
|
||||
|
||||
|
||||
class PipelineParameterChannel(PipelineChannel):
|
||||
"""Represents a pipeline parameter channel.
|
||||
|
||||
Attributes:
|
||||
name: The name of the pipeline channel.
|
||||
channel_type: The type of the pipeline channel.
|
||||
task_name: The name of the task that produces the pipeline channel.
|
||||
None means it is not produced by any task, so if None, either user
|
||||
constructs it directly (for providing an immediate value), or it is a
|
||||
pipeline function argument.
|
||||
pattern: The serialized string regex pattern this pipeline channel created
|
||||
from.
|
||||
value: The actual value of the pipeline channel. If provided, the
|
||||
pipeline channel is "resolved" immediately.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
name: str,
|
||||
channel_type: Union[str, Dict],
|
||||
task_name: Optional[str] = None,
|
||||
value: Optional[type_utils.PARAMETER_TYPES] = None,
|
||||
):
|
||||
"""Initializes a PipelineArtifactChannel instance.
|
||||
|
||||
Args:
|
||||
name: The name of the pipeline channel.
|
||||
channel_type: The type of the pipeline channel.
|
||||
task_name: Optional; The name of the task that produces the pipeline
|
||||
channel.
|
||||
value: Optional; The actual value of the pipeline channel.
|
||||
|
||||
Raises:
|
||||
ValueError: If name or task_name contains invalid characters.
|
||||
ValueError: If both task_name and value are set.
|
||||
TypeError: If the channel type is not a parameter type.
|
||||
"""
|
||||
if task_name and value:
|
||||
raise ValueError('task_name and value cannot be both set.')
|
||||
|
||||
if not type_utils.is_parameter_type(channel_type):
|
||||
raise TypeError(f'{channel_type} is not a parameter type.')
|
||||
|
||||
self.value = value
|
||||
|
||||
super(PipelineParameterChannel, self).__init__(
|
||||
name=name,
|
||||
channel_type=channel_type,
|
||||
task_name=task_name,
|
||||
)
|
||||
|
||||
|
||||
class PipelineArtifactChannel(PipelineChannel):
|
||||
"""Represents a pipeline artifact channel.
|
||||
|
||||
Attributes:
|
||||
name: The name of the pipeline channel.
|
||||
channel_type: The type of the pipeline channel.
|
||||
task_name: The name of the task that produces the pipeline channel.
|
||||
A pipeline artifact channel is always produced by some task.
|
||||
pattern: The serialized string regex pattern this pipeline channel created
|
||||
from.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
name: str,
|
||||
channel_type: Union[str, Dict],
|
||||
task_name: Optional[str],
|
||||
is_artifact_list: bool,
|
||||
):
|
||||
"""Initializes a PipelineArtifactChannel instance.
|
||||
|
||||
Args:
|
||||
name: The name of the pipeline channel.
|
||||
channel_type: The type of the pipeline channel.
|
||||
task_name: Optional; the name of the task that produces the pipeline
|
||||
channel.
|
||||
|
||||
Raises:
|
||||
ValueError: If name or task_name contains invalid characters.
|
||||
TypeError: If the channel type is not an artifact type.
|
||||
"""
|
||||
if type_utils.is_parameter_type(channel_type):
|
||||
raise TypeError(f'{channel_type} is not an artifact type.')
|
||||
|
||||
self.is_artifact_list = is_artifact_list
|
||||
|
||||
super(PipelineArtifactChannel, self).__init__(
|
||||
name=name,
|
||||
channel_type=channel_type,
|
||||
task_name=task_name,
|
||||
)
|
||||
|
||||
|
||||
def create_pipeline_channel(
|
||||
name: str,
|
||||
channel_type: Union[str, Dict],
|
||||
task_name: Optional[str] = None,
|
||||
value: Optional[type_utils.PARAMETER_TYPES] = None,
|
||||
is_artifact_list: bool = False,
|
||||
) -> PipelineChannel:
|
||||
"""Creates a PipelineChannel object.
|
||||
|
||||
Args:
|
||||
name: The name of the channel.
|
||||
channel_type: The type of the channel, which decides whether it is an
|
||||
PipelineParameterChannel or PipelineArtifactChannel
|
||||
task_name: Optional; the task that produced the channel.
|
||||
value: Optional; the realized value for a channel.
|
||||
|
||||
Returns:
|
||||
A PipelineParameterChannel or PipelineArtifactChannel object.
|
||||
"""
|
||||
if type_utils.is_parameter_type(channel_type):
|
||||
return PipelineParameterChannel(
|
||||
name=name,
|
||||
channel_type=channel_type,
|
||||
task_name=task_name,
|
||||
value=value,
|
||||
)
|
||||
else:
|
||||
return PipelineArtifactChannel(
|
||||
name=name,
|
||||
channel_type=channel_type,
|
||||
task_name=task_name,
|
||||
is_artifact_list=is_artifact_list,
|
||||
)
|
||||
|
||||
|
||||
def extract_pipeline_channels_from_string(
|
||||
payload: str) -> List[PipelineChannel]:
|
||||
"""Extracts a list of PipelineChannel instances from the payload string.
|
||||
|
||||
Note: this function removes all duplicate matches.
|
||||
|
||||
Args:
|
||||
payload: A string that may contain serialized PipelineChannels.
|
||||
|
||||
Returns:
|
||||
A list of PipelineChannels found from the payload.
|
||||
"""
|
||||
matches = re.findall(_PIPELINE_CHANNEL_PLACEHOLDER_REGEX, payload)
|
||||
unique_channels = set()
|
||||
for match in matches:
|
||||
task_name, name, channel_type = match
|
||||
|
||||
# channel_type could be either a string (e.g. "Integer") or a dictionary
|
||||
# (e.g.: {"custom_type": {"custom_property": "some_value"}}).
|
||||
# Try loading it into dictionary, if failed, it means channel_type is a
|
||||
# string.
|
||||
with contextlib.suppress(json.JSONDecodeError):
|
||||
channel_type = json.loads(channel_type)
|
||||
|
||||
if type_utils.is_parameter_type(channel_type):
|
||||
pipeline_channel = PipelineParameterChannel(
|
||||
name=name,
|
||||
channel_type=channel_type,
|
||||
task_name=task_name,
|
||||
)
|
||||
else:
|
||||
pipeline_channel = PipelineArtifactChannel(
|
||||
name=name,
|
||||
channel_type=channel_type,
|
||||
task_name=task_name,
|
||||
# currently no support for getting the index from a list of artifacts (e.g., my_datasets[0].uri), so this will always be False until accessing a single artifact element is supported
|
||||
is_artifact_list=False,
|
||||
)
|
||||
unique_channels.add(pipeline_channel)
|
||||
|
||||
return list(unique_channels)
|
||||
|
||||
|
||||
def extract_pipeline_channels_from_any(
|
||||
payload: Union[PipelineChannel, str, list, tuple, dict]
|
||||
) -> List[PipelineChannel]:
|
||||
"""Recursively extract PipelineChannels from any object or list of objects.
|
||||
|
||||
Args:
|
||||
payload: An object that contains serialized PipelineChannels or k8
|
||||
definition objects.
|
||||
|
||||
Returns:
|
||||
A list of PipelineChannels found from the payload.
|
||||
"""
|
||||
if not payload:
|
||||
return []
|
||||
|
||||
if isinstance(payload, PipelineChannel):
|
||||
return [payload]
|
||||
|
||||
if isinstance(payload, str):
|
||||
return list(set(extract_pipeline_channels_from_string(payload)))
|
||||
|
||||
if isinstance(payload, (list, tuple)):
|
||||
pipeline_channels = []
|
||||
for item in payload:
|
||||
pipeline_channels += extract_pipeline_channels_from_any(item)
|
||||
return list(set(pipeline_channels))
|
||||
|
||||
if isinstance(payload, dict):
|
||||
pipeline_channels = []
|
||||
for key, value in payload.items():
|
||||
pipeline_channels += extract_pipeline_channels_from_any(key)
|
||||
pipeline_channels += extract_pipeline_channels_from_any(value)
|
||||
return list(set(pipeline_channels))
|
||||
|
||||
# TODO(chensun): extract PipelineChannel from v2 container spec?
|
||||
|
||||
return []
|
||||
|
|
@ -0,0 +1,203 @@
|
|||
# Copyright 2021-2022 The Kubeflow Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Definition for Pipeline."""
|
||||
|
||||
import functools
|
||||
from typing import Callable, Optional
|
||||
|
||||
from kfp.dsl import component_factory
|
||||
from kfp.dsl import pipeline_task
|
||||
from kfp.dsl import tasks_group
|
||||
from kfp.dsl import utils
|
||||
|
||||
|
||||
def pipeline(func: Optional[Callable] = None,
|
||||
*,
|
||||
name: Optional[str] = None,
|
||||
description: Optional[str] = None,
|
||||
pipeline_root: Optional[str] = None,
|
||||
display_name: Optional[str] = None) -> Callable:
|
||||
"""Decorator used to construct a pipeline.
|
||||
|
||||
Example
|
||||
::
|
||||
|
||||
@pipeline(
|
||||
name='my-pipeline',
|
||||
description='My ML Pipeline.'
|
||||
pipeline_root='gs://my-bucket/my-output-path'
|
||||
)
|
||||
def my_pipeline(a: str, b: int):
|
||||
...
|
||||
|
||||
Args:
|
||||
func: The Python function that defines a pipeline.
|
||||
name: The pipeline name. Defaults to a sanitized version of the
|
||||
decorated function name.
|
||||
description: A human-readable description of the pipeline.
|
||||
pipeline_root: The root directory from which to read input and output
|
||||
parameters and artifacts.
|
||||
display_name: A human-readable name for the pipeline.
|
||||
"""
|
||||
if func is None:
|
||||
return functools.partial(
|
||||
pipeline,
|
||||
name=name,
|
||||
description=description,
|
||||
pipeline_root=pipeline_root,
|
||||
display_name=display_name,
|
||||
)
|
||||
|
||||
if pipeline_root:
|
||||
func.pipeline_root = pipeline_root
|
||||
|
||||
return component_factory.create_graph_component_from_func(
|
||||
func,
|
||||
name=name,
|
||||
description=description,
|
||||
display_name=display_name,
|
||||
)
|
||||
|
||||
|
||||
class Pipeline:
|
||||
"""A pipeline contains a list of tasks.
|
||||
|
||||
This class is not supposed to be used by pipeline authors since pipeline
|
||||
authors can use pipeline functions (decorated with @pipeline) to reference
|
||||
their pipelines.
|
||||
This class is useful for implementing a compiler. For example, the compiler
|
||||
can use the following to get the pipeline object and its tasks:
|
||||
|
||||
Example:
|
||||
::
|
||||
|
||||
with Pipeline() as p:
|
||||
pipeline_func(*args_list)
|
||||
|
||||
traverse(p.tasks)
|
||||
|
||||
Attributes:
|
||||
name:
|
||||
tasks:
|
||||
groups:
|
||||
"""
|
||||
|
||||
# _default_pipeline is set when the compiler runs "with Pipeline()"
|
||||
_default_pipeline = None
|
||||
|
||||
@staticmethod
|
||||
def get_default_pipeline():
|
||||
"""Gets the default pipeline."""
|
||||
return Pipeline._default_pipeline
|
||||
|
||||
def __init__(self, name: str):
|
||||
"""Creates a new instance of Pipeline.
|
||||
|
||||
Args:
|
||||
name: The name of the pipeline.
|
||||
"""
|
||||
self.name = name
|
||||
self.tasks = {}
|
||||
# Add the root group.
|
||||
self.groups = [
|
||||
tasks_group.TasksGroup(
|
||||
group_type=tasks_group.TasksGroupType.PIPELINE,
|
||||
name=name,
|
||||
is_root=True)
|
||||
]
|
||||
self._group_id = 0
|
||||
|
||||
def __enter__(self):
|
||||
|
||||
if Pipeline._default_pipeline:
|
||||
raise Exception('Nested pipelines are not allowed.')
|
||||
|
||||
Pipeline._default_pipeline = self
|
||||
|
||||
def register_task_and_generate_id(task: pipeline_task.PipelineTask):
|
||||
return self.add_task(
|
||||
task=task,
|
||||
add_to_group=not getattr(task, 'is_exit_handler', False))
|
||||
|
||||
self._old_register_task_handler = (
|
||||
pipeline_task.PipelineTask._register_task_handler)
|
||||
pipeline_task.PipelineTask._register_task_handler = (
|
||||
register_task_and_generate_id)
|
||||
return self
|
||||
|
||||
def __exit__(self, *unused_args):
|
||||
|
||||
Pipeline._default_pipeline = None
|
||||
pipeline_task.PipelineTask._register_task_handler = (
|
||||
self._old_register_task_handler)
|
||||
|
||||
def add_task(
|
||||
self,
|
||||
task: pipeline_task.PipelineTask,
|
||||
add_to_group: bool,
|
||||
) -> str:
|
||||
"""Adds a new task.
|
||||
|
||||
Args:
|
||||
task: A PipelineTask instance.
|
||||
add_to_group: Whether add the task into the current group. Expect
|
||||
True for all tasks expect for exit handler.
|
||||
|
||||
Returns:
|
||||
A unique task name.
|
||||
"""
|
||||
# Sanitizing the task name.
|
||||
# Technically this could be delayed to the compilation stage, but string
|
||||
# serialization of PipelineChannels make unsanitized names problematic.
|
||||
task_name = utils.maybe_rename_for_k8s(task.component_spec.name)
|
||||
#If there is an existing task with this name then generate a new name.
|
||||
task_name = utils.make_name_unique_by_adding_index(
|
||||
task_name, list(self.tasks.keys()), '-')
|
||||
if task_name == '':
|
||||
task_name = utils.make_name_unique_by_adding_index(
|
||||
'task', list(self.tasks.keys()), '-')
|
||||
|
||||
self.tasks[task_name] = task
|
||||
if add_to_group:
|
||||
task.parent_task_group = self.groups[-1]
|
||||
self.groups[-1].tasks.append(task)
|
||||
|
||||
return task_name
|
||||
|
||||
def push_tasks_group(self, group: 'tasks_group.TasksGroup'):
|
||||
"""Pushes a TasksGroup into the stack.
|
||||
|
||||
Args:
|
||||
group: A TasksGroup. Typically it is one of ExitHandler, Condition,
|
||||
and ParallelFor.
|
||||
"""
|
||||
self.groups[-1].groups.append(group)
|
||||
self.groups.append(group)
|
||||
|
||||
def pop_tasks_group(self):
|
||||
"""Removes the current TasksGroup from the stack."""
|
||||
del self.groups[-1]
|
||||
|
||||
def remove_task_from_groups(self, task: pipeline_task.PipelineTask):
|
||||
"""Removes a task from the pipeline.
|
||||
|
||||
This is useful for excluding exit handler from the pipeline.
|
||||
"""
|
||||
for group in self.groups:
|
||||
group.remove_task_recursive(task)
|
||||
|
||||
def get_next_group_id(self) -> str:
|
||||
"""Gets the next id for a new group."""
|
||||
self._group_id += 1
|
||||
return str(self._group_id)
|
||||
|
|
@ -0,0 +1,685 @@
|
|||
# Copyright 2021-2022 The Kubeflow Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Pipeline task class and operations."""
|
||||
|
||||
import copy
|
||||
import inspect
|
||||
import itertools
|
||||
import re
|
||||
from typing import Any, Dict, List, Mapping, Optional, Union
|
||||
import warnings
|
||||
|
||||
from kfp.dsl import constants
|
||||
from kfp.dsl import pipeline_channel
|
||||
from kfp.dsl import placeholders
|
||||
from kfp.dsl import structures
|
||||
from kfp.dsl import utils
|
||||
from kfp.dsl.types import type_utils
|
||||
from kfp.pipeline_spec import pipeline_spec_pb2
|
||||
|
||||
_register_task_handler = lambda task: utils.maybe_rename_for_k8s(
|
||||
task.component_spec.name)
|
||||
|
||||
|
||||
class PipelineTask:
|
||||
"""Represents a pipeline task (instantiated component).
|
||||
|
||||
**Note:** ``PipelineTask`` should not be constructed by pipeline authors directly, but instead obtained via an instantiated component (see example).
|
||||
|
||||
Replaces ``ContainerOp`` from ``kfp`` v1. Holds operations available on a task object, such as
|
||||
``.after()``, ``.set_memory_limit()``, ``.enable_caching()``, etc.
|
||||
|
||||
Args:
|
||||
component_spec: The component definition.
|
||||
args: The dictionary of arguments on which the component was called to instantiate this task.
|
||||
|
||||
Example:
|
||||
::
|
||||
|
||||
@dsl.component
|
||||
def identity(message: str) -> str:
|
||||
return message
|
||||
|
||||
@dsl.pipeline(name='my_pipeline')
|
||||
def my_pipeline():
|
||||
# task is an instance of PipelineTask
|
||||
task = identity(message='my string')
|
||||
"""
|
||||
_register_task_handler = _register_task_handler
|
||||
|
||||
# Fallback behavior for compiling a component. This should be overriden by
|
||||
# pipeline `register_task_and_generate_id` if compiling a pipeline (more
|
||||
# than one component).
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
component_spec: structures.ComponentSpec,
|
||||
args: Mapping[str, Any],
|
||||
):
|
||||
"""Initilizes a PipelineTask instance."""
|
||||
# import within __init__ to avoid circular import
|
||||
from kfp.dsl.tasks_group import TasksGroup
|
||||
|
||||
self.parent_task_group: Union[None, TasksGroup] = None
|
||||
args = args or {}
|
||||
|
||||
for input_name, argument_value in args.items():
|
||||
|
||||
if input_name not in component_spec.inputs:
|
||||
raise ValueError(
|
||||
f'Component {component_spec.name!r} got an unexpected input:'
|
||||
f' {input_name!r}.')
|
||||
|
||||
input_spec = component_spec.inputs[input_name]
|
||||
|
||||
type_utils.verify_type_compatibility(
|
||||
given_value=argument_value,
|
||||
expected_spec=input_spec,
|
||||
error_message_prefix=(
|
||||
f'Incompatible argument passed to the input '
|
||||
f'{input_name!r} of component {component_spec.name!r}: '),
|
||||
)
|
||||
|
||||
self.component_spec = component_spec
|
||||
|
||||
self._task_spec = structures.TaskSpec(
|
||||
name=self._register_task_handler(),
|
||||
inputs=dict(args.items()),
|
||||
dependent_tasks=[],
|
||||
component_ref=component_spec.name,
|
||||
enable_caching=True)
|
||||
self._run_after: List[str] = []
|
||||
|
||||
self.importer_spec = None
|
||||
self.container_spec = None
|
||||
self.pipeline_spec = None
|
||||
self._ignore_upstream_failure_tag = False
|
||||
# platform_config for this primitive task; empty if task is for a graph component
|
||||
self.platform_config = {}
|
||||
|
||||
def validate_placeholder_types(
|
||||
component_spec: structures.ComponentSpec) -> None:
|
||||
inputs_dict = component_spec.inputs or {}
|
||||
outputs_dict = component_spec.outputs or {}
|
||||
for arg in itertools.chain(
|
||||
(component_spec.implementation.container.command or []),
|
||||
(component_spec.implementation.container.args or [])):
|
||||
check_primitive_placeholder_is_used_for_correct_io_type(
|
||||
inputs_dict, outputs_dict, arg)
|
||||
|
||||
if component_spec.implementation.container is not None:
|
||||
validate_placeholder_types(component_spec)
|
||||
self.container_spec = self._extract_container_spec_and_convert_placeholders(
|
||||
component_spec=component_spec)
|
||||
elif component_spec.implementation.importer is not None:
|
||||
self.importer_spec = component_spec.implementation.importer
|
||||
self.importer_spec.artifact_uri = args['uri']
|
||||
else:
|
||||
self.pipeline_spec = self.component_spec.implementation.graph
|
||||
|
||||
self._outputs = {
|
||||
output_name: pipeline_channel.create_pipeline_channel(
|
||||
name=output_name,
|
||||
channel_type=output_spec.type,
|
||||
task_name=self._task_spec.name,
|
||||
is_artifact_list=output_spec.is_artifact_list,
|
||||
) for output_name, output_spec in (
|
||||
component_spec.outputs or {}).items()
|
||||
}
|
||||
|
||||
self._inputs = args
|
||||
|
||||
self._channel_inputs = [
|
||||
value for _, value in args.items()
|
||||
if isinstance(value, pipeline_channel.PipelineChannel)
|
||||
] + pipeline_channel.extract_pipeline_channels_from_any([
|
||||
value for _, value in args.items()
|
||||
if not isinstance(value, pipeline_channel.PipelineChannel)
|
||||
])
|
||||
|
||||
@property
|
||||
def platform_spec(self) -> pipeline_spec_pb2.PlatformSpec:
|
||||
"""PlatformSpec for all tasks in the pipeline as task.
|
||||
|
||||
Only for use on tasks created from GraphComponents.
|
||||
"""
|
||||
if self.pipeline_spec:
|
||||
return self.component_spec.platform_spec
|
||||
|
||||
# can only create primitive task platform spec at compile-time, since the executor label is not known until then
|
||||
raise ValueError(
|
||||
f'Can only access {".platform_spec"!r} property on a tasks created from pipelines. Use {".platform_config"!r} for tasks created from primitive components.'
|
||||
)
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
"""The name of the task.
|
||||
|
||||
Unique within its parent group.
|
||||
"""
|
||||
return self._task_spec.name
|
||||
|
||||
@property
|
||||
def inputs(
|
||||
self
|
||||
) -> List[Union[type_utils.PARAMETER_TYPES,
|
||||
pipeline_channel.PipelineChannel]]:
|
||||
"""The list of actual inputs passed to the task."""
|
||||
return self._inputs
|
||||
|
||||
@property
|
||||
def channel_inputs(self) -> List[pipeline_channel.PipelineChannel]:
|
||||
"""The list of all channel inputs passed to the task.
|
||||
|
||||
:meta private:
|
||||
"""
|
||||
return self._channel_inputs
|
||||
|
||||
@property
|
||||
def output(self) -> pipeline_channel.PipelineChannel:
|
||||
"""The single output of the task.
|
||||
|
||||
Used when a task has exactly one output parameter.
|
||||
"""
|
||||
if len(self._outputs) != 1:
|
||||
raise AttributeError(
|
||||
'The task has multiple outputs. Please reference the output by its name.'
|
||||
)
|
||||
return list(self._outputs.values())[0]
|
||||
|
||||
@property
|
||||
def outputs(self) -> Mapping[str, pipeline_channel.PipelineChannel]:
|
||||
"""The dictionary of outputs of the task.
|
||||
|
||||
Used when a task has more the one output or uses an
|
||||
``OutputPath`` or ``Output[Artifact]`` type annotation.
|
||||
"""
|
||||
return self._outputs
|
||||
|
||||
@property
|
||||
def dependent_tasks(self) -> List[str]:
|
||||
"""A list of the dependent task names."""
|
||||
return self._task_spec.dependent_tasks
|
||||
|
||||
def _extract_container_spec_and_convert_placeholders(
|
||||
self, component_spec: structures.ComponentSpec
|
||||
) -> structures.ContainerSpecImplementation:
|
||||
"""Extracts a ContainerSpec from a ComponentSpec and converts
|
||||
placeholder objects to strings.
|
||||
|
||||
Args:
|
||||
component_spec: The component definition.
|
||||
"""
|
||||
container_spec = copy.deepcopy(component_spec.implementation.container)
|
||||
if container_spec is None:
|
||||
raise ValueError(
|
||||
'_extract_container_spec_and_convert_placeholders used incorrectly. ComponentSpec.implementation.container is None.'
|
||||
)
|
||||
container_spec.command = [
|
||||
placeholders.convert_command_line_element_to_string(e)
|
||||
for e in container_spec.command or []
|
||||
]
|
||||
container_spec.args = [
|
||||
placeholders.convert_command_line_element_to_string(e)
|
||||
for e in container_spec.args or []
|
||||
]
|
||||
return container_spec
|
||||
|
||||
def set_caching_options(self, enable_caching: bool) -> 'PipelineTask':
|
||||
"""Sets caching options for the task.
|
||||
|
||||
Args:
|
||||
enable_caching: Whether to enable caching.
|
||||
|
||||
Returns:
|
||||
Self return to allow chained setting calls.
|
||||
"""
|
||||
self._task_spec.enable_caching = enable_caching
|
||||
return self
|
||||
|
||||
def _ensure_container_spec_exists(self) -> None:
|
||||
"""Ensures that the task has a container spec."""
|
||||
caller_method_name = inspect.stack()[1][3]
|
||||
|
||||
if self.container_spec is None:
|
||||
raise ValueError(
|
||||
f'{caller_method_name} can only be used on single-step components, not pipelines used as components, or special components like importers.'
|
||||
)
|
||||
|
||||
def _validate_cpu_request_limit(self, cpu: str) -> float:
|
||||
"""Validates cpu request/limit string and converts to its numeric
|
||||
value.
|
||||
|
||||
Args:
|
||||
cpu: CPU requests or limits. This string should be a number or a
|
||||
number followed by an "m" to indicate millicores (1/1000). For
|
||||
more information, see `Specify a CPU Request and a CPU Limit
|
||||
<https://kubernetes.io/docs/tasks/configure-pod-container/assign-cpu-resource/#specify-a-cpu-request-and-a-cpu-limit>`_.
|
||||
|
||||
Raises:
|
||||
ValueError if the cpu request/limit string value is invalid.
|
||||
|
||||
Returns:
|
||||
The numeric value (float) of the cpu request/limit.
|
||||
"""
|
||||
if re.match(r'([0-9]*[.])?[0-9]+m?$', cpu) is None:
|
||||
raise ValueError(
|
||||
'Invalid cpu string. Should be float or integer, or integer'
|
||||
' followed by "m".')
|
||||
|
||||
return float(cpu[:-1]) / 1000 if cpu.endswith('m') else float(cpu)
|
||||
|
||||
def set_cpu_request(self, cpu: str) -> 'PipelineTask':
|
||||
"""Sets CPU request (minimum) for the task.
|
||||
|
||||
Args:
|
||||
cpu: Minimum CPU requests required. This string should be a number
|
||||
or a number followed by an "m" to indicate millicores (1/1000).
|
||||
For more information, see `Specify a CPU Request and a CPU Limit
|
||||
<https://kubernetes.io/docs/tasks/configure-pod-container/assign-cpu-resource/#specify-a-cpu-request-and-a-cpu-limit>`_.
|
||||
|
||||
Returns:
|
||||
Self return to allow chained setting calls.
|
||||
"""
|
||||
self._ensure_container_spec_exists()
|
||||
|
||||
cpu = self._validate_cpu_request_limit(cpu)
|
||||
|
||||
if self.container_spec.resources is not None:
|
||||
self.container_spec.resources.cpu_request = cpu
|
||||
else:
|
||||
self.container_spec.resources = structures.ResourceSpec(
|
||||
cpu_request=cpu)
|
||||
|
||||
return self
|
||||
|
||||
def set_cpu_limit(self, cpu: str) -> 'PipelineTask':
|
||||
"""Sets CPU limit (maximum) for the task.
|
||||
|
||||
Args:
|
||||
cpu: Maximum CPU requests allowed. This string should be a number
|
||||
or a number followed by an "m" to indicate millicores (1/1000).
|
||||
For more information, see `Specify a CPU Request and a CPU Limit
|
||||
<https://kubernetes.io/docs/tasks/configure-pod-container/assign-cpu-resource/#specify-a-cpu-request-and-a-cpu-limit>`_.
|
||||
|
||||
Returns:
|
||||
Self return to allow chained setting calls.
|
||||
"""
|
||||
self._ensure_container_spec_exists()
|
||||
|
||||
cpu = self._validate_cpu_request_limit(cpu)
|
||||
|
||||
if self.container_spec.resources is not None:
|
||||
self.container_spec.resources.cpu_limit = cpu
|
||||
else:
|
||||
self.container_spec.resources = structures.ResourceSpec(
|
||||
cpu_limit=cpu)
|
||||
|
||||
return self
|
||||
|
||||
def set_accelerator_limit(self, limit: int) -> 'PipelineTask':
|
||||
"""Sets accelerator limit (maximum) for the task. Only applies if
|
||||
accelerator type is also set via .set_accelerator_type().
|
||||
|
||||
Args:
|
||||
limit: Maximum number of accelerators allowed.
|
||||
|
||||
Returns:
|
||||
Self return to allow chained setting calls.
|
||||
"""
|
||||
self._ensure_container_spec_exists()
|
||||
|
||||
if isinstance(limit, str):
|
||||
if re.match(r'[1-9]\d*$', limit) is None:
|
||||
raise ValueError(f'{"limit"!r} must be positive integer.')
|
||||
limit = int(limit)
|
||||
|
||||
if self.container_spec.resources is not None:
|
||||
self.container_spec.resources.accelerator_count = limit
|
||||
else:
|
||||
self.container_spec.resources = structures.ResourceSpec(
|
||||
accelerator_count=limit)
|
||||
|
||||
return self
|
||||
|
||||
def set_gpu_limit(self, gpu: str) -> 'PipelineTask':
|
||||
"""Sets GPU limit (maximum) for the task. Only applies if accelerator
|
||||
type is also set via .add_accelerator_type().
|
||||
|
||||
Args:
|
||||
gpu: The maximum GPU reuqests allowed. This string should be a positive integer number of GPUs.
|
||||
|
||||
Returns:
|
||||
Self return to allow chained setting calls.
|
||||
|
||||
:meta private:
|
||||
"""
|
||||
warnings.warn(
|
||||
f'{self.set_gpu_limit.__name__!r} is deprecated. Please use {self.set_accelerator_limit.__name__!r} instead.',
|
||||
category=DeprecationWarning)
|
||||
return self.set_accelerator_limit(gpu)
|
||||
|
||||
def _validate_memory_request_limit(self, memory: str) -> float:
|
||||
"""Validates memory request/limit string and converts to its numeric
|
||||
value.
|
||||
|
||||
Args:
|
||||
memory: Memory requests or limits. This string should be a number or
|
||||
a number followed by one of "E", "Ei", "P", "Pi", "T", "Ti", "G",
|
||||
"Gi", "M", "Mi", "K", or "Ki".
|
||||
|
||||
Raises:
|
||||
ValueError if the memory request/limit string value is invalid.
|
||||
|
||||
Returns:
|
||||
The numeric value (float) of the memory request/limit.
|
||||
"""
|
||||
if re.match(r'^[0-9]+(E|Ei|P|Pi|T|Ti|G|Gi|M|Mi|K|Ki){0,1}$',
|
||||
memory) is None:
|
||||
raise ValueError(
|
||||
'Invalid memory string. Should be a number or a number '
|
||||
'followed by one of "E", "Ei", "P", "Pi", "T", "Ti", "G", '
|
||||
'"Gi", "M", "Mi", "K", "Ki".')
|
||||
|
||||
if memory.endswith('E'):
|
||||
memory = float(memory[:-1]) * constants._E / constants._G
|
||||
elif memory.endswith('Ei'):
|
||||
memory = float(memory[:-2]) * constants._EI / constants._G
|
||||
elif memory.endswith('P'):
|
||||
memory = float(memory[:-1]) * constants._P / constants._G
|
||||
elif memory.endswith('Pi'):
|
||||
memory = float(memory[:-2]) * constants._PI / constants._G
|
||||
elif memory.endswith('T'):
|
||||
memory = float(memory[:-1]) * constants._T / constants._G
|
||||
elif memory.endswith('Ti'):
|
||||
memory = float(memory[:-2]) * constants._TI / constants._G
|
||||
elif memory.endswith('G'):
|
||||
memory = float(memory[:-1])
|
||||
elif memory.endswith('Gi'):
|
||||
memory = float(memory[:-2]) * constants._GI / constants._G
|
||||
elif memory.endswith('M'):
|
||||
memory = float(memory[:-1]) * constants._M / constants._G
|
||||
elif memory.endswith('Mi'):
|
||||
memory = float(memory[:-2]) * constants._MI / constants._G
|
||||
elif memory.endswith('K'):
|
||||
memory = float(memory[:-1]) * constants._K / constants._G
|
||||
elif memory.endswith('Ki'):
|
||||
memory = float(memory[:-2]) * constants._KI / constants._G
|
||||
else:
|
||||
# By default interpret as a plain integer, in the unit of Bytes.
|
||||
memory = float(memory) / constants._G
|
||||
|
||||
return memory
|
||||
|
||||
def set_memory_request(self, memory: str) -> 'PipelineTask':
|
||||
"""Sets memory request (minimum) for the task.
|
||||
|
||||
Args:
|
||||
memory: The minimum memory requests required. This string should be
|
||||
a number or a number followed by one of "E", "Ei", "P", "Pi",
|
||||
"T", "Ti", "G", "Gi", "M", "Mi", "K", or "Ki".
|
||||
|
||||
Returns:
|
||||
Self return to allow chained setting calls.
|
||||
"""
|
||||
self._ensure_container_spec_exists()
|
||||
|
||||
memory = self._validate_memory_request_limit(memory)
|
||||
|
||||
if self.container_spec.resources is not None:
|
||||
self.container_spec.resources.memory_request = memory
|
||||
else:
|
||||
self.container_spec.resources = structures.ResourceSpec(
|
||||
memory_request=memory)
|
||||
|
||||
return self
|
||||
|
||||
def set_memory_limit(self, memory: str) -> 'PipelineTask':
|
||||
"""Sets memory limit (maximum) for the task.
|
||||
|
||||
Args:
|
||||
memory: The maximum memory requests allowed. This string should be
|
||||
a number or a number followed by one of "E", "Ei", "P", "Pi",
|
||||
"T", "Ti", "G", "Gi", "M", "Mi", "K", or "Ki".
|
||||
|
||||
Returns:
|
||||
Self return to allow chained setting calls.
|
||||
"""
|
||||
self._ensure_container_spec_exists()
|
||||
|
||||
memory = self._validate_memory_request_limit(memory)
|
||||
|
||||
if self.container_spec.resources is not None:
|
||||
self.container_spec.resources.memory_limit = memory
|
||||
else:
|
||||
self.container_spec.resources = structures.ResourceSpec(
|
||||
memory_limit=memory)
|
||||
|
||||
return self
|
||||
|
||||
def set_retry(self,
|
||||
num_retries: int,
|
||||
backoff_duration: Optional[str] = None,
|
||||
backoff_factor: Optional[float] = None,
|
||||
backoff_max_duration: Optional[str] = None) -> 'PipelineTask':
|
||||
"""Sets task retry parameters.
|
||||
|
||||
Args:
|
||||
num_retries : Number of times to retry on failure.
|
||||
backoff_duration: Number of seconds to wait before triggering a retry. Defaults to ``'0s'`` (immediate retry).
|
||||
backoff_factor: Exponential backoff factor applied to ``backoff_duration``. For example, if ``backoff_duration="60"`` (60 seconds) and ``backoff_factor=2``, the first retry will happen after 60 seconds, then again after 120, 240, and so on. Defaults to ``2.0``.
|
||||
backoff_max_duration: Maximum duration during which the task will be retried. Maximum duration is 1 hour (3600s). Defaults to ``'3600s'``.
|
||||
|
||||
Returns:
|
||||
Self return to allow chained setting calls.
|
||||
"""
|
||||
self._task_spec.retry_policy = structures.RetryPolicy(
|
||||
max_retry_count=num_retries,
|
||||
backoff_duration=backoff_duration,
|
||||
backoff_factor=backoff_factor,
|
||||
backoff_max_duration=backoff_max_duration,
|
||||
)
|
||||
return self
|
||||
|
||||
def add_node_selector_constraint(self, accelerator: str) -> 'PipelineTask':
|
||||
"""Sets accelerator type to use when executing this task.
|
||||
|
||||
Args:
|
||||
accelerator: The name of the accelerator, such as ``'NVIDIA_TESLA_K80'``, ``'TPU_V3'``, ``'nvidia.com/gpu'`` or ``'cloud-tpus.google.com/v3'``.
|
||||
|
||||
Returns:
|
||||
Self return to allow chained setting calls.
|
||||
"""
|
||||
warnings.warn(
|
||||
f'{self.add_node_selector_constraint.__name__!r} is deprecated. Please use {self.set_accelerator_type.__name__!r} instead.',
|
||||
category=DeprecationWarning)
|
||||
return self.set_accelerator_type(accelerator)
|
||||
|
||||
def set_accelerator_type(self, accelerator: str) -> 'PipelineTask':
|
||||
"""Sets accelerator type to use when executing this task.
|
||||
|
||||
Args:
|
||||
accelerator: The name of the accelerator, such as ``'NVIDIA_TESLA_K80'``, ``'TPU_V3'``, ``'nvidia.com/gpu'`` or ``'cloud-tpus.google.com/v3'``.
|
||||
|
||||
Returns:
|
||||
Self return to allow chained setting calls.
|
||||
"""
|
||||
self._ensure_container_spec_exists()
|
||||
|
||||
if self.container_spec.resources is not None:
|
||||
self.container_spec.resources.accelerator_type = accelerator
|
||||
if self.container_spec.resources.accelerator_count is None:
|
||||
self.container_spec.resources.accelerator_count = 1
|
||||
else:
|
||||
self.container_spec.resources = structures.ResourceSpec(
|
||||
accelerator_count=1, accelerator_type=accelerator)
|
||||
|
||||
return self
|
||||
|
||||
def set_display_name(self, name: str) -> 'PipelineTask':
|
||||
"""Sets display name for the task.
|
||||
|
||||
Args:
|
||||
name: Display name.
|
||||
|
||||
Returns:
|
||||
Self return to allow chained setting calls.
|
||||
"""
|
||||
self._task_spec.display_name = name
|
||||
return self
|
||||
|
||||
def set_env_variable(self, name: str, value: str) -> 'PipelineTask':
|
||||
"""Sets environment variable for the task.
|
||||
|
||||
Args:
|
||||
name: Environment variable name.
|
||||
value: Environment variable value.
|
||||
|
||||
Returns:
|
||||
Self return to allow chained setting calls.
|
||||
"""
|
||||
self._ensure_container_spec_exists()
|
||||
|
||||
if self.container_spec.env is not None:
|
||||
self.container_spec.env[name] = value
|
||||
else:
|
||||
self.container_spec.env = {name: value}
|
||||
return self
|
||||
|
||||
def after(self, *tasks) -> 'PipelineTask':
|
||||
"""Specifies an explicit dependency on other tasks by requiring this
|
||||
task be executed after other tasks finish completion.
|
||||
|
||||
Args:
|
||||
*tasks: Tasks after which this task should be executed.
|
||||
|
||||
Returns:
|
||||
Self return to allow chained setting calls.
|
||||
|
||||
Example:
|
||||
::
|
||||
|
||||
@dsl.pipeline(name='my-pipeline')
|
||||
def my_pipeline():
|
||||
task1 = my_component(text='1st task')
|
||||
task2 = my_component(text='2nd task').after(task1)
|
||||
"""
|
||||
for task in tasks:
|
||||
self._run_after.append(task.name)
|
||||
self._task_spec.dependent_tasks.append(task.name)
|
||||
return self
|
||||
|
||||
def ignore_upstream_failure(self) -> 'PipelineTask':
|
||||
"""If called, the pipeline task will run when any specified upstream
|
||||
tasks complete, even if unsuccessful.
|
||||
|
||||
This method effectively turns the caller task into an exit task
|
||||
if the caller task has upstream dependencies.
|
||||
|
||||
If the task has no upstream tasks, either via data exchange or an explicit dependency via .after(), this method has no effect.
|
||||
|
||||
Returns:
|
||||
Self return to allow chained setting calls.
|
||||
|
||||
Example:
|
||||
::
|
||||
|
||||
@dsl.pipeline()
|
||||
def my_pipeline(text: str = 'message'):
|
||||
task = fail_op(message=text)
|
||||
clean_up_task = print_op(
|
||||
message=task.output).ignore_upstream_failure()
|
||||
"""
|
||||
|
||||
for input_spec_name, input_spec in (self.component_spec.inputs or
|
||||
{}).items():
|
||||
argument_value = self._inputs[input_spec_name]
|
||||
if (isinstance(argument_value, pipeline_channel.PipelineChannel)
|
||||
) and (not input_spec.optional) and (argument_value.task_name
|
||||
is not None):
|
||||
raise ValueError(
|
||||
f'Tasks can only use .ignore_upstream_failure() if all input parameters that accept arguments created by an upstream task have a default value, in case the upstream task fails to produce its output. Input parameter task {self.name!r}`s {input_spec_name!r} argument is an output of an upstream task {argument_value.task_name!r}, but {input_spec_name!r} has no default value.'
|
||||
)
|
||||
|
||||
self._ignore_upstream_failure_tag = True
|
||||
|
||||
return self
|
||||
|
||||
|
||||
# TODO: this function should ideally be in the function kfp.dsl.structures.check_placeholder_references_valid_io_name, which does something similar, but this causes the exception to be raised at component definition time, rather than compile time. This would break tests that load v1 component YAML, even though that YAML is invalid.
|
||||
def check_primitive_placeholder_is_used_for_correct_io_type(
|
||||
inputs_dict: Dict[str, structures.InputSpec],
|
||||
outputs_dict: Dict[str, structures.OutputSpec],
|
||||
arg: Union[placeholders.CommandLineElement, Any],
|
||||
):
|
||||
"""Validates input/output placeholders refer to an input/output with an
|
||||
appropriate type for the placeholder. This should only apply to components
|
||||
loaded from v1 component YAML, where the YAML is authored directly. For v2
|
||||
YAML, this is encapsulated in the DSL logic which does not permit writing
|
||||
incorrect placeholders.
|
||||
|
||||
Args:
|
||||
inputs_dict: The existing input names.
|
||||
outputs_dict: The existing output names.
|
||||
arg: The command line element, which may be a placeholder.
|
||||
"""
|
||||
|
||||
if isinstance(arg, placeholders.InputValuePlaceholder):
|
||||
input_name = arg.input_name
|
||||
if not type_utils.is_parameter_type(inputs_dict[input_name].type):
|
||||
raise TypeError(
|
||||
f'Input "{input_name}" with type '
|
||||
f'"{inputs_dict[input_name].type}" cannot be paired with '
|
||||
'InputValuePlaceholder.')
|
||||
|
||||
elif isinstance(
|
||||
arg,
|
||||
(placeholders.InputUriPlaceholder, placeholders.InputPathPlaceholder)):
|
||||
input_name = arg.input_name
|
||||
if type_utils.is_parameter_type(inputs_dict[input_name].type):
|
||||
raise TypeError(
|
||||
f'Input "{input_name}" with type '
|
||||
f'"{inputs_dict[input_name].type}" cannot be paired with '
|
||||
f'{arg.__class__.__name__}.')
|
||||
|
||||
elif isinstance(arg, placeholders.OutputUriPlaceholder):
|
||||
output_name = arg.output_name
|
||||
if type_utils.is_parameter_type(outputs_dict[output_name].type):
|
||||
raise TypeError(
|
||||
f'Output "{output_name}" with type '
|
||||
f'"{outputs_dict[output_name].type}" cannot be paired with '
|
||||
f'{arg.__class__.__name__}.')
|
||||
elif isinstance(arg, placeholders.IfPresentPlaceholder):
|
||||
all_normalized_args: List[placeholders.CommandLineElement] = []
|
||||
if arg.then is None:
|
||||
pass
|
||||
elif isinstance(arg.then, list):
|
||||
all_normalized_args.extend(arg.then)
|
||||
else:
|
||||
all_normalized_args.append(arg.then)
|
||||
|
||||
if arg.else_ is None:
|
||||
pass
|
||||
elif isinstance(arg.else_, list):
|
||||
all_normalized_args.extend(arg.else_)
|
||||
else:
|
||||
all_normalized_args.append(arg.else_)
|
||||
|
||||
for arg in all_normalized_args:
|
||||
check_primitive_placeholder_is_used_for_correct_io_type(
|
||||
inputs_dict, outputs_dict, arg)
|
||||
elif isinstance(arg, placeholders.ConcatPlaceholder):
|
||||
for arg in arg.items:
|
||||
check_primitive_placeholder_is_used_for_correct_io_type(
|
||||
inputs_dict, outputs_dict, arg)
|
||||
|
|
@ -18,7 +18,6 @@ import unittest
|
|||
|
||||
from absl.testing import parameterized
|
||||
from kfp import dsl
|
||||
from kfp.components import load_yaml_utilities
|
||||
from kfp.dsl import pipeline_task
|
||||
from kfp.dsl import placeholders
|
||||
from kfp.dsl import structures
|
||||
|
|
@ -113,8 +112,8 @@ class PipelineTaskTest(parameterized.TestCase):
|
|||
)
|
||||
|
||||
task = pipeline_task.PipelineTask(
|
||||
component_spec=load_yaml_utilities
|
||||
._load_component_spec_from_yaml_documents(V2_YAML),
|
||||
component_spec=structures.ComponentSpec.from_yaml_documents(
|
||||
V2_YAML),
|
||||
args={'input1': 'value'},
|
||||
)
|
||||
self.assertEqual(task._task_spec, expected_task_spec)
|
||||
|
|
@ -126,8 +125,8 @@ class PipelineTaskTest(parameterized.TestCase):
|
|||
ValueError,
|
||||
"Component 'component1' got an unexpected input: 'input0'."):
|
||||
task = pipeline_task.PipelineTask(
|
||||
component_spec=load_yaml_utilities
|
||||
._load_component_spec_from_yaml_documents(V2_YAML),
|
||||
component_spec=structures.ComponentSpec.from_yaml_documents(
|
||||
V2_YAML),
|
||||
args={
|
||||
'input1': 'value',
|
||||
'input0': 'abc',
|
||||
|
|
@ -136,8 +135,8 @@ class PipelineTaskTest(parameterized.TestCase):
|
|||
|
||||
def test_set_caching_options(self):
|
||||
task = pipeline_task.PipelineTask(
|
||||
component_spec=load_yaml_utilities
|
||||
._load_component_spec_from_yaml_documents(V2_YAML),
|
||||
component_spec=structures.ComponentSpec.from_yaml_documents(
|
||||
V2_YAML),
|
||||
args={'input1': 'value'},
|
||||
)
|
||||
task.set_caching_options(False)
|
||||
|
|
@ -164,8 +163,8 @@ class PipelineTaskTest(parameterized.TestCase):
|
|||
def test_set_valid_cpu_request_limit(self, cpu: str,
|
||||
expected_cpu_number: float):
|
||||
task = pipeline_task.PipelineTask(
|
||||
component_spec=load_yaml_utilities
|
||||
._load_component_spec_from_yaml_documents(V2_YAML),
|
||||
component_spec=structures.ComponentSpec.from_yaml_documents(
|
||||
V2_YAML),
|
||||
args={'input1': 'value'},
|
||||
)
|
||||
task.set_cpu_request(cpu)
|
||||
|
|
@ -183,8 +182,8 @@ class PipelineTaskTest(parameterized.TestCase):
|
|||
def test_set_valid_gpu_limit(self, gpu_limit: str,
|
||||
expected_gpu_number: int):
|
||||
task = pipeline_task.PipelineTask(
|
||||
component_spec=load_yaml_utilities
|
||||
._load_component_spec_from_yaml_documents(V2_YAML),
|
||||
component_spec=structures.ComponentSpec.from_yaml_documents(
|
||||
V2_YAML),
|
||||
args={'input1': 'value'},
|
||||
)
|
||||
with self.assertWarnsRegex(
|
||||
|
|
@ -197,8 +196,8 @@ class PipelineTaskTest(parameterized.TestCase):
|
|||
|
||||
def test_add_valid_node_selector_constraint(self):
|
||||
task = pipeline_task.PipelineTask(
|
||||
component_spec=load_yaml_utilities
|
||||
._load_component_spec_from_yaml_documents(V2_YAML),
|
||||
component_spec=structures.ComponentSpec.from_yaml_documents(
|
||||
V2_YAML),
|
||||
args={'input1': 'value'},
|
||||
)
|
||||
with self.assertWarnsRegex(
|
||||
|
|
@ -221,8 +220,8 @@ class PipelineTaskTest(parameterized.TestCase):
|
|||
)
|
||||
def test_set_accelerator_limit(self, limit, expected):
|
||||
task = pipeline_task.PipelineTask(
|
||||
component_spec=load_yaml_utilities
|
||||
._load_component_spec_from_yaml_documents(V2_YAML),
|
||||
component_spec=structures.ComponentSpec.from_yaml_documents(
|
||||
V2_YAML),
|
||||
args={'input1': 'value'},
|
||||
)
|
||||
|
||||
|
|
@ -286,8 +285,8 @@ class PipelineTaskTest(parameterized.TestCase):
|
|||
)
|
||||
def test_set_memory_limit(self, memory: str, expected_memory_number: int):
|
||||
task = pipeline_task.PipelineTask(
|
||||
component_spec=load_yaml_utilities
|
||||
._load_component_spec_from_yaml_documents(V2_YAML),
|
||||
component_spec=structures.ComponentSpec.from_yaml_documents(
|
||||
V2_YAML),
|
||||
args={'input1': 'value'},
|
||||
)
|
||||
task.set_memory_request(memory)
|
||||
|
|
@ -299,8 +298,8 @@ class PipelineTaskTest(parameterized.TestCase):
|
|||
|
||||
def test_set_accelerator_type_with_type_only(self):
|
||||
task = pipeline_task.PipelineTask(
|
||||
component_spec=load_yaml_utilities
|
||||
._load_component_spec_from_yaml_documents(V2_YAML),
|
||||
component_spec=structures.ComponentSpec.from_yaml_documents(
|
||||
V2_YAML),
|
||||
args={'input1': 'value'},
|
||||
)
|
||||
task.set_accelerator_type('NVIDIA_TESLA_K80')
|
||||
|
|
@ -311,8 +310,8 @@ class PipelineTaskTest(parameterized.TestCase):
|
|||
|
||||
def test_set_accelerator_type_with_accelerator_count(self):
|
||||
task = pipeline_task.PipelineTask(
|
||||
component_spec=load_yaml_utilities
|
||||
._load_component_spec_from_yaml_documents(V2_YAML),
|
||||
component_spec=structures.ComponentSpec.from_yaml_documents(
|
||||
V2_YAML),
|
||||
args={'input1': 'value'},
|
||||
)
|
||||
task.set_accelerator_limit('5').set_accelerator_type('TPU_V3')
|
||||
|
|
@ -323,8 +322,8 @@ class PipelineTaskTest(parameterized.TestCase):
|
|||
|
||||
def test_set_env_variable(self):
|
||||
task = pipeline_task.PipelineTask(
|
||||
component_spec=load_yaml_utilities
|
||||
._load_component_spec_from_yaml_documents(V2_YAML),
|
||||
component_spec=structures.ComponentSpec.from_yaml_documents(
|
||||
V2_YAML),
|
||||
args={'input1': 'value'},
|
||||
)
|
||||
task.set_env_variable('env_name', 'env_value')
|
||||
|
|
@ -332,8 +331,8 @@ class PipelineTaskTest(parameterized.TestCase):
|
|||
|
||||
def test_set_display_name(self):
|
||||
task = pipeline_task.PipelineTask(
|
||||
component_spec=load_yaml_utilities
|
||||
._load_component_spec_from_yaml_documents(V2_YAML),
|
||||
component_spec=structures.ComponentSpec.from_yaml_documents(
|
||||
V2_YAML),
|
||||
args={'input1': 'value'},
|
||||
)
|
||||
task.set_display_name('test_name')
|
||||
|
|
@ -0,0 +1,458 @@
|
|||
# Copyright 2022 The Kubeflow Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Contains data structures and functions for handling input and output
|
||||
placeholders."""
|
||||
|
||||
import abc
|
||||
import json
|
||||
from typing import Any, Dict, List, Optional, Union
|
||||
|
||||
from kfp.dsl import utils
|
||||
from kfp.dsl.types import type_utils
|
||||
|
||||
|
||||
class Placeholder(abc.ABC):
|
||||
|
||||
@abc.abstractmethod
|
||||
def _to_string(self) -> str:
|
||||
raise NotImplementedError
|
||||
|
||||
def __str__(self) -> str:
|
||||
"""Enables use of placeholders in f-strings.
|
||||
|
||||
To be overridden by container placeholders ConcatPlaceholder and
|
||||
IfPresentPlaceholder, which cannot be used in an f-string.
|
||||
"""
|
||||
return self._to_string()
|
||||
|
||||
def __eq__(self, other: Any) -> bool:
|
||||
"""Used for comparing placeholders in tests."""
|
||||
return isinstance(other,
|
||||
self.__class__) and self.__dict__ == other.__dict__
|
||||
|
||||
|
||||
class ExecutorInputPlaceholder(Placeholder):
|
||||
|
||||
def _to_string(self) -> str:
|
||||
return '{{$}}'
|
||||
|
||||
|
||||
class InputValuePlaceholder(Placeholder):
|
||||
|
||||
def __init__(self, input_name: str) -> None:
|
||||
self.input_name = input_name
|
||||
|
||||
def _to_string(self) -> str:
|
||||
return f"{{{{$.inputs.parameters['{self.input_name}']}}}}"
|
||||
|
||||
|
||||
class InputListOfArtifactsPlaceholder(Placeholder):
|
||||
|
||||
def __init__(self, input_name: str) -> None:
|
||||
self.input_name = input_name
|
||||
|
||||
def _to_string(self) -> str:
|
||||
return f"{{{{$.inputs.artifacts['{self.input_name}']}}}}"
|
||||
|
||||
def __getattribute__(self, name: str) -> Any:
|
||||
if name in {'name', 'uri', 'metadata', 'path'}:
|
||||
raise AttributeError(
|
||||
f'Cannot access an attribute on a list of artifacts in a Custom Container Component. Found reference to attribute {name!r} on {self.input_name!r}. Please pass the whole list of artifacts only.'
|
||||
)
|
||||
else:
|
||||
return object.__getattribute__(self, name)
|
||||
|
||||
def __getitem__(self, k: int) -> None:
|
||||
raise KeyError(
|
||||
f'Cannot access individual artifacts in a list of artifacts. Found access to element {k} on {self.input_name!r}. Please pass the whole list of artifacts only.'
|
||||
)
|
||||
|
||||
|
||||
class OutputListOfArtifactsPlaceholder(Placeholder):
|
||||
|
||||
def __init__(self, input_name: str) -> None:
|
||||
self.output_name = input_name
|
||||
|
||||
def _to_string(self) -> str:
|
||||
return f"{{{{$.outputs.artifacts['{self.output_name}']}}}}"
|
||||
|
||||
def __getattribute__(self, name: str) -> Any:
|
||||
if name in {'name', 'uri', 'metadata', 'path'}:
|
||||
raise AttributeError(
|
||||
f'Cannot access an attribute on a list of artifacts in a Custom Container Component. Found reference to attribute {name!r} on {self.output_name!r}. Please pass the whole list of artifacts only.'
|
||||
)
|
||||
else:
|
||||
return object.__getattribute__(self, name)
|
||||
|
||||
def __getitem__(self, k: int) -> None:
|
||||
raise KeyError(
|
||||
f'Cannot access individual artifacts in a list of artifacts. Found access to element {k} on {self.output_name!r}. Please pass the whole list of artifacts only.'
|
||||
)
|
||||
|
||||
|
||||
class InputPathPlaceholder(Placeholder):
|
||||
|
||||
def __init__(self, input_name: str) -> None:
|
||||
self.input_name = input_name
|
||||
|
||||
def _to_string(self) -> str:
|
||||
return f"{{{{$.inputs.artifacts['{self.input_name}'].path}}}}"
|
||||
|
||||
|
||||
class InputUriPlaceholder(Placeholder):
|
||||
|
||||
def __init__(self, input_name: str) -> None:
|
||||
self.input_name = input_name
|
||||
|
||||
def _to_string(self) -> str:
|
||||
return f"{{{{$.inputs.artifacts['{self.input_name}'].uri}}}}"
|
||||
|
||||
|
||||
class InputMetadataPlaceholder(Placeholder):
|
||||
|
||||
def __init__(self, input_name: str) -> None:
|
||||
self.input_name = input_name
|
||||
|
||||
def _to_string(self) -> str:
|
||||
return f"{{{{$.inputs.artifacts['{self.input_name}'].metadata}}}}"
|
||||
|
||||
def __getitem__(self, key: str) -> str:
|
||||
return f"{{{{$.inputs.artifacts['{self.input_name}'].metadata['{key}']}}}}"
|
||||
|
||||
|
||||
class OutputParameterPlaceholder(Placeholder):
|
||||
|
||||
def __init__(self, output_name: str) -> None:
|
||||
self.output_name = output_name
|
||||
|
||||
def _to_string(self) -> str:
|
||||
return f"{{{{$.outputs.parameters['{self.output_name}'].output_file}}}}"
|
||||
|
||||
|
||||
class OutputPathPlaceholder(Placeholder):
|
||||
|
||||
def __init__(self, output_name: str) -> None:
|
||||
self.output_name = output_name
|
||||
|
||||
def _to_string(self) -> str:
|
||||
return f"{{{{$.outputs.artifacts['{self.output_name}'].path}}}}"
|
||||
|
||||
|
||||
class OutputUriPlaceholder(Placeholder):
|
||||
|
||||
def __init__(self, output_name: str) -> None:
|
||||
self.output_name = output_name
|
||||
|
||||
def _to_string(self) -> str:
|
||||
return f"{{{{$.outputs.artifacts['{self.output_name}'].uri}}}}"
|
||||
|
||||
|
||||
class OutputMetadataPlaceholder(Placeholder):
|
||||
|
||||
def __init__(self, output_name: str) -> None:
|
||||
self.output_name = output_name
|
||||
|
||||
def _to_string(self) -> str:
|
||||
return f"{{{{$.outputs.artifacts['{self.output_name}'].metadata}}}}"
|
||||
|
||||
def __getitem__(self, key: str) -> str:
|
||||
return f"{{{{$.outputs.artifacts['{self.output_name}'].metadata['{key}']}}}}"
|
||||
|
||||
|
||||
class ConcatPlaceholder(Placeholder):
|
||||
"""Placeholder for concatenating multiple strings. May contain other
|
||||
placeholders.
|
||||
|
||||
Args:
|
||||
items: Elements to concatenate.
|
||||
|
||||
Examples:
|
||||
::
|
||||
|
||||
@container_component
|
||||
def container_with_concat_placeholder(text1: str, text2: Output[Dataset],
|
||||
output_path: OutputPath(str)):
|
||||
return ContainerSpec(
|
||||
image='python:3.7',
|
||||
command=[
|
||||
'my_program',
|
||||
ConcatPlaceholder(['prefix-', text1, text2.uri])
|
||||
],
|
||||
args=['--output_path', output_path]
|
||||
)
|
||||
"""
|
||||
|
||||
def __init__(self, items: List['CommandLineElement']) -> None:
|
||||
for item in items:
|
||||
if isinstance(item, IfPresentPlaceholder):
|
||||
item._validate_then_and_else_are_only_single_element()
|
||||
self.items = items
|
||||
|
||||
def _to_dict(self) -> Dict[str, Any]:
|
||||
return {
|
||||
'Concat': [
|
||||
convert_command_line_element_to_string_or_struct(item)
|
||||
for item in self.items
|
||||
]
|
||||
}
|
||||
|
||||
def _to_string(self) -> str:
|
||||
return json.dumps(self._to_dict())
|
||||
|
||||
def __str__(self) -> str:
|
||||
raise ValueError(
|
||||
f'Cannot use {self.__class__.__name__} in an f-string.')
|
||||
|
||||
|
||||
class IfPresentPlaceholder(Placeholder):
|
||||
"""Placeholder for handling cases where an input may or may not be passed.
|
||||
May contain other placeholders.
|
||||
|
||||
Args:
|
||||
input_name: Name of the input/output.
|
||||
then: If the input/output specified in name is present, the command-line argument will be replaced at run-time by the value of then.
|
||||
else_: If the input/output specified in name is not present, the command-line argument will be replaced at run-time by the value of else_.
|
||||
|
||||
Examples:
|
||||
::
|
||||
|
||||
@container_component
|
||||
def container_with_if_placeholder(output_path: OutputPath(str),
|
||||
dataset: Output[Dataset],
|
||||
optional_input: str = 'default'):
|
||||
return ContainerSpec(
|
||||
image='python:3.7',
|
||||
command=[
|
||||
'my_program',
|
||||
IfPresentPlaceholder(
|
||||
input_name='optional_input',
|
||||
then=[optional_input],
|
||||
else_=['no_input']), '--dataset',
|
||||
IfPresentPlaceholder(
|
||||
input_name='optional_input', then=[dataset.uri], else_=['no_dataset'])
|
||||
],
|
||||
args=['--output_path', output_path]
|
||||
)
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
input_name: str,
|
||||
then: Union['CommandLineElement', List['CommandLineElement']],
|
||||
else_: Optional[Union['CommandLineElement',
|
||||
List['CommandLineElement']]] = None,
|
||||
) -> None:
|
||||
self.input_name = input_name
|
||||
self.then = then
|
||||
self.else_ = else_
|
||||
|
||||
def _validate_then_and_else_are_only_single_element(self) -> None:
|
||||
"""Rercursively validate that then and else contain only a single
|
||||
element.
|
||||
|
||||
This method should only be called by a ConcatPlaceholder, which
|
||||
cannot have an IfPresentPlaceholder with a list in either 'then'
|
||||
or 'else_'.
|
||||
"""
|
||||
|
||||
# the illegal state
|
||||
if isinstance(self.then, list) or isinstance(self.else_, list):
|
||||
raise ValueError(
|
||||
f'Cannot use {IfPresentPlaceholder.__name__} within {ConcatPlaceholder.__name__} when `then` and `else_` arguments to {IfPresentPlaceholder.__name__} are lists. Please use a single element for `then` and `else_` only.'
|
||||
)
|
||||
|
||||
# check that there is no illegal state found recursively
|
||||
if isinstance(self.then, ConcatPlaceholder):
|
||||
for item in self.then.items:
|
||||
if isinstance(item, IfPresentPlaceholder):
|
||||
item._validate_then_and_else_are_only_single_element()
|
||||
elif isinstance(self.then, IfPresentPlaceholder):
|
||||
self.then._validate_then_and_else_are_only_single_element()
|
||||
|
||||
if isinstance(self.else_, ConcatPlaceholder):
|
||||
for item in self.else_.items:
|
||||
if isinstance(item, IfPresentPlaceholder):
|
||||
item._validate_then_and_else_are_only_single_element()
|
||||
elif isinstance(self.else_, IfPresentPlaceholder):
|
||||
self.else_._validate_then_and_else_are_only_single_element()
|
||||
|
||||
def _to_dict(self) -> Dict[str, Any]:
|
||||
struct = {
|
||||
'IfPresent': {
|
||||
'InputName':
|
||||
self.input_name,
|
||||
'Then': [
|
||||
convert_command_line_element_to_string_or_struct(e)
|
||||
for e in self.then
|
||||
] if isinstance(self.then, list) else
|
||||
convert_command_line_element_to_string_or_struct(
|
||||
self.then)
|
||||
}
|
||||
}
|
||||
if self.else_:
|
||||
struct['IfPresent']['Else'] = [
|
||||
convert_command_line_element_to_string_or_struct(e)
|
||||
for e in self.else_
|
||||
] if isinstance(
|
||||
self.else_,
|
||||
list) else convert_command_line_element_to_string_or_struct(
|
||||
self.else_)
|
||||
return struct
|
||||
|
||||
def _to_string(self) -> str:
|
||||
return json.dumps(self._to_dict())
|
||||
|
||||
def __str__(self) -> str:
|
||||
raise ValueError(
|
||||
f'Cannot use {self.__class__.__name__} in an f-string.')
|
||||
|
||||
|
||||
_CONTAINER_PLACEHOLDERS = (IfPresentPlaceholder, ConcatPlaceholder)
|
||||
PRIMITIVE_INPUT_PLACEHOLDERS = (InputValuePlaceholder, InputPathPlaceholder,
|
||||
InputUriPlaceholder, InputMetadataPlaceholder,
|
||||
InputListOfArtifactsPlaceholder)
|
||||
PRIMITIVE_OUTPUT_PLACEHOLDERS = (OutputParameterPlaceholder,
|
||||
OutputPathPlaceholder, OutputUriPlaceholder,
|
||||
OutputMetadataPlaceholder,
|
||||
OutputListOfArtifactsPlaceholder)
|
||||
|
||||
CommandLineElement = Union[str, Placeholder]
|
||||
|
||||
|
||||
def convert_command_line_element_to_string(
|
||||
element: Union[str, Placeholder]) -> str:
|
||||
return element._to_string() if isinstance(element, Placeholder) else element
|
||||
|
||||
|
||||
def convert_command_line_element_to_string_or_struct(
|
||||
element: Union[Placeholder, Any]) -> Any:
|
||||
if isinstance(element, Placeholder):
|
||||
return element._to_dict() if isinstance(
|
||||
element, _CONTAINER_PLACEHOLDERS) else element._to_string()
|
||||
|
||||
return element
|
||||
|
||||
|
||||
def maybe_convert_v1_yaml_placeholder_to_v2_placeholder(
|
||||
arg: Dict[str, Any],
|
||||
component_dict: Dict[str, Any]) -> Union[CommandLineElement, Any]:
|
||||
if isinstance(arg, str):
|
||||
return arg
|
||||
|
||||
if not isinstance(arg, dict):
|
||||
raise ValueError
|
||||
|
||||
has_one_entry = len(arg) == 1
|
||||
|
||||
if not has_one_entry:
|
||||
raise ValueError(
|
||||
f'Got unexpected dictionary {arg}. Expected a dictionary with one entry.'
|
||||
)
|
||||
|
||||
first_key = list(arg.keys())[0]
|
||||
first_value = list(arg.values())[0]
|
||||
if first_key == 'inputValue':
|
||||
return InputValuePlaceholder(
|
||||
input_name=utils.sanitize_input_name(first_value))
|
||||
|
||||
elif first_key == 'inputPath':
|
||||
return InputPathPlaceholder(
|
||||
input_name=utils.sanitize_input_name(first_value))
|
||||
|
||||
elif first_key == 'inputUri':
|
||||
return InputUriPlaceholder(
|
||||
input_name=utils.sanitize_input_name(first_value))
|
||||
|
||||
elif first_key == 'outputPath':
|
||||
outputs = component_dict['outputs']
|
||||
for output in outputs:
|
||||
if output['name'] == first_value:
|
||||
type_ = output.get('type')
|
||||
is_parameter = type_utils.is_parameter_type(type_)
|
||||
if is_parameter:
|
||||
return OutputParameterPlaceholder(
|
||||
output_name=utils.sanitize_input_name(first_value))
|
||||
else:
|
||||
return OutputPathPlaceholder(
|
||||
output_name=utils.sanitize_input_name(first_value))
|
||||
raise ValueError(
|
||||
f'{first_value} not found in component outputs. Could not process placeholders. Component spec: {component_dict}.'
|
||||
)
|
||||
|
||||
elif first_key == 'outputUri':
|
||||
return OutputUriPlaceholder(
|
||||
output_name=utils.sanitize_input_name(first_value))
|
||||
|
||||
elif first_key == 'ifPresent':
|
||||
structure_kwargs = arg['ifPresent']
|
||||
structure_kwargs['input_name'] = structure_kwargs.pop('inputName')
|
||||
structure_kwargs['otherwise'] = structure_kwargs.pop('else')
|
||||
structure_kwargs['then'] = [
|
||||
maybe_convert_v1_yaml_placeholder_to_v2_placeholder(
|
||||
e, component_dict=component_dict)
|
||||
for e in structure_kwargs['then']
|
||||
]
|
||||
structure_kwargs['otherwise'] = [
|
||||
maybe_convert_v1_yaml_placeholder_to_v2_placeholder(
|
||||
e, component_dict=component_dict)
|
||||
for e in structure_kwargs['otherwise']
|
||||
]
|
||||
return IfPresentPlaceholder(**structure_kwargs)
|
||||
|
||||
elif first_key == 'concat':
|
||||
return ConcatPlaceholder(items=[
|
||||
maybe_convert_v1_yaml_placeholder_to_v2_placeholder(
|
||||
e, component_dict=component_dict) for e in arg['concat']
|
||||
])
|
||||
|
||||
elif first_key == 'executorInput':
|
||||
return ExecutorInputPlaceholder()
|
||||
|
||||
elif 'if' in arg:
|
||||
if_ = arg['if']
|
||||
input_name = utils.sanitize_input_name(if_['cond']['isPresent'])
|
||||
then = if_['then']
|
||||
else_ = if_.get('else')
|
||||
|
||||
if isinstance(then, list):
|
||||
then = [
|
||||
maybe_convert_v1_yaml_placeholder_to_v2_placeholder(
|
||||
val, component_dict=component_dict) for val in then
|
||||
]
|
||||
else:
|
||||
then = maybe_convert_v1_yaml_placeholder_to_v2_placeholder(
|
||||
then, component_dict=component_dict)
|
||||
|
||||
if else_ is None:
|
||||
pass
|
||||
elif isinstance(else_, list):
|
||||
else_ = [
|
||||
maybe_convert_v1_yaml_placeholder_to_v2_placeholder(
|
||||
val, component_dict=component_dict) for val in else_
|
||||
]
|
||||
else:
|
||||
maybe_convert_v1_yaml_placeholder_to_v2_placeholder(
|
||||
else_, component_dict=component_dict)
|
||||
|
||||
return IfPresentPlaceholder(
|
||||
input_name=input_name, then=then, else_=else_)
|
||||
|
||||
elif 'concat' in arg:
|
||||
|
||||
return ConcatPlaceholder(items=[
|
||||
maybe_convert_v1_yaml_placeholder_to_v2_placeholder(
|
||||
val, component_dict=component_dict) for val in arg['concat']
|
||||
])
|
||||
else:
|
||||
raise TypeError(f'Unexpected argument {arg} of type {type(arg)}.')
|
||||
|
|
@ -0,0 +1,44 @@
|
|||
# Copyright 2021 The Kubeflow Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Python function-based component."""
|
||||
|
||||
from typing import Callable
|
||||
|
||||
from kfp.dsl import base_component
|
||||
from kfp.dsl import structures
|
||||
|
||||
|
||||
class PythonComponent(base_component.BaseComponent):
|
||||
"""A component defined via Python function.
|
||||
|
||||
**Note:** ``PythonComponent`` is not intended to be used to construct components directly. Use ``@kfp.dsl.component`` instead.
|
||||
|
||||
Args:
|
||||
component_spec: Component definition.
|
||||
python_func: Python function that becomes the implementation of this component.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
component_spec: structures.ComponentSpec,
|
||||
python_func: Callable,
|
||||
):
|
||||
super().__init__(component_spec=component_spec)
|
||||
self.python_func = python_func
|
||||
|
||||
self._prevent_using_output_lists_of_artifacts()
|
||||
|
||||
def execute(self, **kwargs):
|
||||
"""Executes the Python function that defines the component."""
|
||||
return self.python_func(**kwargs)
|
||||
File diff suppressed because it is too large
Load Diff
|
|
@ -22,7 +22,6 @@ from absl.testing import parameterized
|
|||
from kfp import compiler
|
||||
from kfp import components
|
||||
from kfp import dsl
|
||||
from kfp.components import load_yaml_utilities
|
||||
from kfp.dsl import component_factory
|
||||
from kfp.dsl import placeholders
|
||||
from kfp.dsl import structures
|
||||
|
|
@ -264,7 +263,7 @@ class StructuresTest(parameterized.TestCase):
|
|||
# test that it can be read back correctly
|
||||
with open(output_path, 'r') as f:
|
||||
contents = f.read()
|
||||
new_component_spec = load_yaml_utilities._load_component_spec_from_yaml_documents(
|
||||
new_component_spec = structures.ComponentSpec.from_yaml_documents(
|
||||
contents)
|
||||
|
||||
self.assertEqual(original_component_spec, new_component_spec)
|
||||
|
|
@ -319,7 +318,7 @@ schemaVersion: 2.1.0
|
|||
sdkVersion: kfp-2.0.0-alpha.2
|
||||
""")
|
||||
|
||||
generated_spec = load_yaml_utilities._load_component_spec_from_yaml_documents(
|
||||
generated_spec = structures.ComponentSpec.from_yaml_documents(
|
||||
component_yaml_v2)
|
||||
|
||||
expected_spec = structures.ComponentSpec(
|
||||
|
|
@ -360,8 +359,7 @@ sdkVersion: kfp-2.0.0-alpha.2
|
|||
)
|
||||
def test_component_spec_placeholder_load_from_v2_component_yaml(
|
||||
self, yaml, expected_component):
|
||||
generated_spec = load_yaml_utilities._load_component_spec_from_yaml_documents(
|
||||
yaml)
|
||||
generated_spec = structures.ComponentSpec.from_yaml_documents(yaml)
|
||||
self.assertEqual(generated_spec, expected_component)
|
||||
|
||||
def test_component_spec_load_from_v1_component_yaml(self):
|
||||
|
|
@ -390,7 +388,7 @@ sdkVersion: kfp-2.0.0-alpha.2
|
|||
- {outputPath: Output 2}
|
||||
""")
|
||||
|
||||
generated_spec = load_yaml_utilities._load_component_spec_from_yaml_documents(
|
||||
generated_spec = structures.ComponentSpec.from_yaml_documents(
|
||||
component_yaml_v1)
|
||||
|
||||
expected_spec = structures.ComponentSpec(
|
||||
|
|
@ -641,7 +639,7 @@ V1_YAML = textwrap.dedent("""\
|
|||
class TestReadInComponent(parameterized.TestCase):
|
||||
|
||||
def test_read_v1(self):
|
||||
component_spec = load_yaml_utilities._load_component_spec_from_yaml_documents(
|
||||
component_spec = structures.ComponentSpec.from_yaml_documents(
|
||||
V1_YAML_IF_PLACEHOLDER)
|
||||
self.assertEqual(component_spec.name, 'component-if')
|
||||
self.assertEqual(component_spec.implementation.container.image,
|
||||
|
|
@ -696,7 +694,7 @@ root:
|
|||
parameterType: STRING
|
||||
schemaVersion: 2.1.0
|
||||
sdkVersion: kfp-2.0.0-alpha.2""")
|
||||
loaded_component_spec = load_yaml_utilities._load_component_spec_from_yaml_documents(
|
||||
loaded_component_spec = structures.ComponentSpec.from_yaml_documents(
|
||||
compiled_yaml)
|
||||
component_spec = structures.ComponentSpec(
|
||||
name='component1',
|
||||
|
|
@ -764,7 +762,7 @@ root:
|
|||
parameterType: STRING
|
||||
schemaVersion: 2.1.0
|
||||
sdkVersion: kfp-2.0.0-alpha.2""")
|
||||
loaded_component_spec = load_yaml_utilities._load_component_spec_from_yaml_documents(
|
||||
loaded_component_spec = structures.ComponentSpec.from_yaml_documents(
|
||||
compiled_yaml)
|
||||
component_spec = structures.ComponentSpec(
|
||||
name='if',
|
||||
|
|
@ -835,7 +833,7 @@ root:
|
|||
parameterType: STRING
|
||||
schemaVersion: 2.1.0
|
||||
sdkVersion: kfp-2.0.0-alpha.2""")
|
||||
loaded_component_spec = load_yaml_utilities._load_component_spec_from_yaml_documents(
|
||||
loaded_component_spec = structures.ComponentSpec.from_yaml_documents(
|
||||
compiled_yaml)
|
||||
component_spec = structures.ComponentSpec(
|
||||
name='concat',
|
||||
|
|
@ -1115,5 +1113,47 @@ implementation:
|
|||
self.assertEqual(outputs['output4'].type, 'Dict')
|
||||
|
||||
|
||||
class TestLoadDocumentsFromYAML(unittest.TestCase):
|
||||
|
||||
def test_no_documents(self):
|
||||
with self.assertRaisesRegex(
|
||||
ValueError,
|
||||
r'Expected one or two YAML documents in the IR YAML file\. Got\: 0\.'
|
||||
):
|
||||
structures.load_documents_from_yaml('')
|
||||
|
||||
def test_one_document(self):
|
||||
doc1, doc2 = structures.load_documents_from_yaml(
|
||||
textwrap.dedent("""\
|
||||
key1: value1
|
||||
"""))
|
||||
self.assertEqual(doc1, {'key1': 'value1'})
|
||||
self.assertEqual(doc2, {})
|
||||
|
||||
def test_two_documents(self):
|
||||
doc1, doc2 = structures.load_documents_from_yaml(
|
||||
textwrap.dedent("""\
|
||||
key1: value1
|
||||
---
|
||||
key2: value2
|
||||
"""))
|
||||
self.assertEqual(doc1, {'key1': 'value1'})
|
||||
self.assertEqual(doc2, {'key2': 'value2'})
|
||||
|
||||
def test_three_documents(self):
|
||||
with self.assertRaisesRegex(
|
||||
ValueError,
|
||||
r'Expected one or two YAML documents in the IR YAML file\. Got\: 3\.'
|
||||
):
|
||||
structures.load_documents_from_yaml(
|
||||
textwrap.dedent("""\
|
||||
key3: value3
|
||||
---
|
||||
key3: value3
|
||||
---
|
||||
key3: value3
|
||||
"""))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
|
@ -0,0 +1,55 @@
|
|||
# Copyright 2022 The Kubeflow Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Definition for PipelineTaskFinalStatus."""
|
||||
|
||||
import dataclasses
|
||||
from typing import Optional
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class PipelineTaskFinalStatus:
|
||||
"""A final status of a pipeline task. Annotate a component parameter with
|
||||
this class to obtain a handle to a task's status (see example).
|
||||
|
||||
This is the Python representation of the proto message `PipelineTaskFinalStatus <https://github.com/kubeflow/pipelines/blob/d8b9439ef92b88da3420df9e8c67db0f1e89d4ef/api/v2alpha1/pipeline_spec.proto#L929-L951>`_.
|
||||
|
||||
Examples:
|
||||
::
|
||||
|
||||
@dsl.component
|
||||
def task_status(user_input: str, status: PipelineTaskFinalStatus):
|
||||
print('Pipeline status: ', status.state)
|
||||
print('Job resource name: ', status.pipeline_job_resource_name)
|
||||
print('Pipeline task name: ', status.pipeline_task_name)
|
||||
print('Error code: ', status.error_code)
|
||||
print('Error message: ', status.error_message)
|
||||
|
||||
@dsl.pipeline(name='my_pipeline')
|
||||
def my_pipeline():
|
||||
task = task_status(user_input='my_input')
|
||||
"""
|
||||
state: str
|
||||
"""Final state of the task. The value could be one of ``'SUCCEEDED'``, ``'FAILED'`` or ``'CANCELLED'``."""
|
||||
|
||||
pipeline_job_resource_name: str
|
||||
"""Pipeline job resource name, in the format of ``projects/{project}/locations/{location}/pipelineJobs/{pipeline_job}``."""
|
||||
|
||||
pipeline_task_name: str
|
||||
"""Name of the task that produced this status."""
|
||||
|
||||
error_code: Optional[int]
|
||||
"""The `google.rpc.Code <github.com/googleapis/googleapis/blob/master/google/rpc/code.proto>`_ in case of error. If state is ``'SUCCEEDED'``, this is ``None``."""
|
||||
|
||||
error_message: Optional[str]
|
||||
"""In case of error, the detailed error message. If state is ``'SUCCEEDED'``, this is ``None``."""
|
||||
|
|
@ -0,0 +1,230 @@
|
|||
# Copyright 2021 The Kubeflow Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Definition for TasksGroup."""
|
||||
|
||||
import enum
|
||||
from typing import Optional, Union
|
||||
|
||||
from kfp.dsl import for_loop
|
||||
from kfp.dsl import pipeline_channel
|
||||
from kfp.dsl import pipeline_context
|
||||
from kfp.dsl import pipeline_task
|
||||
|
||||
|
||||
class TasksGroupType(str, enum.Enum):
|
||||
"""Types of TasksGroup."""
|
||||
PIPELINE = 'pipeline'
|
||||
CONDITION = 'condition'
|
||||
FOR_LOOP = 'for-loop'
|
||||
EXIT_HANDLER = 'exit-handler'
|
||||
|
||||
|
||||
class TasksGroup:
|
||||
"""Represents a logical group of tasks and groups of TasksGroups.
|
||||
|
||||
This class is the base class for groups of tasks, such as tasks
|
||||
sharing an exit handler, a condition branch, or a loop. This class
|
||||
is not supposed to be used by pipeline authors. It is useful for
|
||||
implementing a compiler.
|
||||
|
||||
Attributes:
|
||||
group_type: The type of the TasksGroup.
|
||||
tasks: A list of all PipelineTasks in this group.
|
||||
groups: A list of TasksGroups in this group.
|
||||
display_name: The optional user given name of the group.
|
||||
dependencies: A list of tasks or groups this group depends on.
|
||||
is_root: If TasksGroup is root group.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
group_type: TasksGroupType,
|
||||
name: Optional[str] = None,
|
||||
is_root: bool = False,
|
||||
):
|
||||
"""Create a new instance of TasksGroup.
|
||||
|
||||
Args:
|
||||
group_type: The type of the group.
|
||||
name: The name of the group. Used as display name in UI.
|
||||
"""
|
||||
self.group_type = group_type
|
||||
self.tasks = []
|
||||
self.groups = []
|
||||
self.display_name = name
|
||||
self.dependencies = []
|
||||
self.is_root = is_root
|
||||
|
||||
def __enter__(self):
|
||||
if not pipeline_context.Pipeline.get_default_pipeline():
|
||||
raise ValueError('Default pipeline not defined.')
|
||||
|
||||
self._make_name_unique()
|
||||
|
||||
pipeline_context.Pipeline.get_default_pipeline().push_tasks_group(self)
|
||||
return self
|
||||
|
||||
def __exit__(self, *unused_args):
|
||||
pipeline_context.Pipeline.get_default_pipeline().pop_tasks_group()
|
||||
|
||||
def _make_name_unique(self):
|
||||
"""Generates a unique TasksGroup name in the pipeline."""
|
||||
if not pipeline_context.Pipeline.get_default_pipeline():
|
||||
raise ValueError('Default pipeline not defined.')
|
||||
|
||||
group_id = pipeline_context.Pipeline.get_default_pipeline(
|
||||
).get_next_group_id()
|
||||
self.name = f'{self.group_type.value}-{group_id}'
|
||||
self.name = self.name.replace('_', '-')
|
||||
|
||||
def remove_task_recursive(self, task: pipeline_task.PipelineTask):
|
||||
"""Removes a task from the group recursively."""
|
||||
if self.tasks and task in self.tasks:
|
||||
self.tasks.remove(task)
|
||||
for group in self.groups or []:
|
||||
group.remove_task_recursive(task)
|
||||
|
||||
|
||||
class ExitHandler(TasksGroup):
|
||||
"""A class for setting an exit handler task that is invoked upon exiting a
|
||||
group of other tasks.
|
||||
|
||||
Args:
|
||||
exit_task: The task that is invoked after exiting a group of other tasks.
|
||||
name: The name of the exit handler group.
|
||||
|
||||
Example:
|
||||
::
|
||||
|
||||
exit_task = ExitComponent(...)
|
||||
with ExitHandler(exit_task):
|
||||
task1 = my_component1(...)
|
||||
task2 = my_component2(...)
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
exit_task: pipeline_task.PipelineTask,
|
||||
name: Optional[str] = None,
|
||||
):
|
||||
"""Initializes a Condition task group."""
|
||||
super().__init__(
|
||||
group_type=TasksGroupType.EXIT_HANDLER,
|
||||
name=name,
|
||||
is_root=False,
|
||||
)
|
||||
|
||||
if exit_task.dependent_tasks:
|
||||
raise ValueError('exit_task cannot depend on any other tasks.')
|
||||
|
||||
# Removing exit_task form any group
|
||||
pipeline_context.Pipeline.get_default_pipeline(
|
||||
).remove_task_from_groups(exit_task)
|
||||
|
||||
# Set is_exit_handler since the compiler might be using this attribute.
|
||||
exit_task.is_exit_handler = True
|
||||
|
||||
self.exit_task = exit_task
|
||||
|
||||
|
||||
class Condition(TasksGroup):
|
||||
"""A class for creating conditional control flow within a pipeline
|
||||
definition.
|
||||
|
||||
Args:
|
||||
condition: A comparative expression that evaluates to True or False. At least one of the operands must be an output from an upstream task or a pipeline parameter.
|
||||
name: The name of the condition group.
|
||||
|
||||
Example:
|
||||
::
|
||||
|
||||
task1 = my_component1(...)
|
||||
with Condition(task1.output=='pizza', 'pizza-condition'):
|
||||
task2 = my_component2(...)
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
condition: pipeline_channel.ConditionOperator,
|
||||
name: Optional[str] = None,
|
||||
):
|
||||
"""Initializes a conditional task group."""
|
||||
super().__init__(
|
||||
group_type=TasksGroupType.CONDITION,
|
||||
name=name,
|
||||
is_root=False,
|
||||
)
|
||||
self.condition = condition
|
||||
|
||||
|
||||
class ParallelFor(TasksGroup):
|
||||
"""A class for creating parallelized for loop control flow over a static
|
||||
set of items within a pipeline definition.
|
||||
|
||||
Args:
|
||||
items: The items to loop over. It can be either a constant Python list or a list output from an upstream task.
|
||||
name: The name of the for loop group.
|
||||
parallelism: The maximum number of concurrent iterations that can be scheduled for execution. A value of 0 represents unconstrained parallelism (default is unconstrained).
|
||||
|
||||
Example:
|
||||
::
|
||||
|
||||
with dsl.ParallelFor(
|
||||
items=[{'a': 1, 'b': 10}, {'a': 2, 'b': 20}],
|
||||
parallelism=1
|
||||
) as item:
|
||||
task1 = my_component(..., number=item.a)
|
||||
task2 = my_component(..., number=item.b)
|
||||
|
||||
In the example, the group of tasks containing ``task1`` and ``task2`` would
|
||||
be executed twice, once with case ``args=[{'a': 1, 'b': 10}]`` and once with
|
||||
case ``args=[{'a': 2, 'b': 20}]``. The ``parallelism=1`` setting causes only
|
||||
1 execution to be scheduled at a time.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
items: Union[for_loop.ItemList, pipeline_channel.PipelineChannel],
|
||||
name: Optional[str] = None,
|
||||
parallelism: Optional[int] = None,
|
||||
):
|
||||
"""Initializes a for loop task group."""
|
||||
parallelism = parallelism or 0
|
||||
if parallelism < 0:
|
||||
raise ValueError(
|
||||
f'ParallelFor parallelism must be >= 0. Got: {parallelism}.')
|
||||
|
||||
super().__init__(
|
||||
group_type=TasksGroupType.FOR_LOOP,
|
||||
name=name,
|
||||
is_root=False,
|
||||
)
|
||||
|
||||
if isinstance(items, pipeline_channel.PipelineChannel):
|
||||
self.loop_argument = for_loop.LoopArgument.from_pipeline_channel(
|
||||
items)
|
||||
self.items_is_pipeline_channel = True
|
||||
else:
|
||||
self.loop_argument = for_loop.LoopArgument.from_raw_items(
|
||||
raw_items=items,
|
||||
name_code=pipeline_context.Pipeline.get_default_pipeline()
|
||||
.get_next_group_id(),
|
||||
)
|
||||
self.items_is_pipeline_channel = False
|
||||
|
||||
self.parallelism_limit = parallelism
|
||||
|
||||
def __enter__(self) -> for_loop.LoopArgument:
|
||||
super().__enter__()
|
||||
return self.loop_argument
|
||||
13
test/presubmit-test-kfp-dsl-runtime-code.sh → sdk/python/kfp/dsl/types/__init__.py
Executable file → Normal file
13
test/presubmit-test-kfp-dsl-runtime-code.sh → sdk/python/kfp/dsl/types/__init__.py
Executable file → Normal file
|
|
@ -1,5 +1,4 @@
|
|||
#!/bin/bash -ex
|
||||
# Copyright 2023 Kubeflow Pipelines contributors
|
||||
# Copyright 2021 The Kubeflow Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
|
|
@ -12,13 +11,3 @@
|
|||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
source_root=$(pwd)
|
||||
|
||||
pip install --upgrade pip
|
||||
pip install -e $source_root/sdk/python/kfp-dsl
|
||||
pip install pyyaml
|
||||
pip install $(grep 'absl-py==' sdk/python/requirements-dev.txt)
|
||||
pip install $(grep 'pytest==' sdk/python/requirements-dev.txt)
|
||||
|
||||
pytest sdk/python/kfp-dsl
|
||||
|
|
@ -0,0 +1,472 @@
|
|||
# Copyright 2021 The Kubeflow Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Classes for input/output Artifacts in KFP SDK."""
|
||||
|
||||
from typing import Dict, List, Optional, Type
|
||||
|
||||
_GCS_LOCAL_MOUNT_PREFIX = '/gcs/'
|
||||
_MINIO_LOCAL_MOUNT_PREFIX = '/minio/'
|
||||
_S3_LOCAL_MOUNT_PREFIX = '/s3/'
|
||||
|
||||
|
||||
class Artifact:
|
||||
"""Represents a generic machine learning artifact.
|
||||
|
||||
This class and all artifact classes store the name, uri, and metadata for a machine learning artifact. Use this artifact type when an artifact does not fit into another more specific artifact type (e.g., ``Model``, ``Dataset``).
|
||||
|
||||
Args:
|
||||
name: Name of the artifact.
|
||||
uri: The artifact's location on disk or cloud storage.
|
||||
metadata: Arbitrary key-value pairs about the artifact.
|
||||
|
||||
Example:
|
||||
::
|
||||
|
||||
from kfp import dsl
|
||||
from kfp.dsl import Output, Artifact, Input
|
||||
|
||||
|
||||
@dsl.component
|
||||
def create_artifact(
|
||||
data: str,
|
||||
output_artifact: Output[Artifact],
|
||||
):
|
||||
with open(output_artifact.path, 'w') as f:
|
||||
f.write(data)
|
||||
|
||||
|
||||
@dsl.component
|
||||
def use_artifact(input_artifact: Input[Artifact]):
|
||||
with open(input_artifact.path) as input_file:
|
||||
artifact_contents = input_file.read()
|
||||
print(artifact_contents)
|
||||
|
||||
|
||||
@dsl.pipeline(name='my-pipeline', pipeline_root='gs://my/storage')
|
||||
def my_pipeline():
|
||||
create_task = create_artifact(data='my data')
|
||||
use_artifact(input_artifact=create_task.outputs['output_artifact'])
|
||||
|
||||
Note: Other artifacts are used similarly to the usage of ``Artifact`` in the example above (within ``Input[]`` and ``Output[]``).
|
||||
"""
|
||||
schema_title = 'system.Artifact'
|
||||
schema_version = '0.0.1'
|
||||
|
||||
def __init__(self,
|
||||
name: Optional[str] = None,
|
||||
uri: Optional[str] = None,
|
||||
metadata: Optional[Dict] = None) -> None:
|
||||
"""Initializes the Artifact with the given name, URI and metadata."""
|
||||
self.uri = uri or ''
|
||||
self.name = name or ''
|
||||
self.metadata = metadata or {}
|
||||
|
||||
@property
|
||||
def path(self) -> str:
|
||||
return self._get_path()
|
||||
|
||||
@path.setter
|
||||
def path(self, path: str) -> None:
|
||||
self._set_path(path)
|
||||
|
||||
def _get_path(self) -> Optional[str]:
|
||||
if self.uri.startswith('gs://'):
|
||||
return _GCS_LOCAL_MOUNT_PREFIX + self.uri[len('gs://'):]
|
||||
elif self.uri.startswith('minio://'):
|
||||
return _MINIO_LOCAL_MOUNT_PREFIX + self.uri[len('minio://'):]
|
||||
elif self.uri.startswith('s3://'):
|
||||
return _S3_LOCAL_MOUNT_PREFIX + self.uri[len('s3://'):]
|
||||
return None
|
||||
|
||||
def _set_path(self, path: str) -> None:
|
||||
if path.startswith(_GCS_LOCAL_MOUNT_PREFIX):
|
||||
path = 'gs://' + path[len(_GCS_LOCAL_MOUNT_PREFIX):]
|
||||
elif path.startswith(_MINIO_LOCAL_MOUNT_PREFIX):
|
||||
path = 'minio://' + path[len(_MINIO_LOCAL_MOUNT_PREFIX):]
|
||||
elif path.startswith(_S3_LOCAL_MOUNT_PREFIX):
|
||||
path = 's3://' + path[len(_S3_LOCAL_MOUNT_PREFIX):]
|
||||
self.uri = path
|
||||
|
||||
|
||||
class Model(Artifact):
|
||||
"""An artifact representing a machine learning model.
|
||||
|
||||
Args:
|
||||
name: Name of the model.
|
||||
uri: The model's location on disk or cloud storage.
|
||||
metadata: Arbitrary key-value pairs about the model.
|
||||
"""
|
||||
schema_title = 'system.Model'
|
||||
|
||||
@property
|
||||
def framework(self) -> str:
|
||||
return self._get_framework()
|
||||
|
||||
def _get_framework(self) -> str:
|
||||
return self.metadata.get('framework', '')
|
||||
|
||||
@framework.setter
|
||||
def framework(self, framework: str) -> None:
|
||||
self._set_framework(framework)
|
||||
|
||||
def _set_framework(self, framework: str) -> None:
|
||||
self.metadata['framework'] = framework
|
||||
|
||||
|
||||
class Dataset(Artifact):
|
||||
"""An artifact representing a machine learning dataset.
|
||||
|
||||
Args:
|
||||
name: Name of the dataset.
|
||||
uri: The dataset's location on disk or cloud storage.
|
||||
metadata: Arbitrary key-value pairs about the dataset.
|
||||
"""
|
||||
schema_title = 'system.Dataset'
|
||||
|
||||
|
||||
class Metrics(Artifact):
|
||||
"""An artifact for storing key-value scalar metrics.
|
||||
|
||||
Args:
|
||||
name: Name of the metrics artifact.
|
||||
uri: The metrics artifact's location on disk or cloud storage.
|
||||
metadata: Key-value scalar metrics.
|
||||
"""
|
||||
schema_title = 'system.Metrics'
|
||||
|
||||
def log_metric(self, metric: str, value: float) -> None:
|
||||
"""Sets a custom scalar metric in the artifact's metadata.
|
||||
|
||||
Args:
|
||||
metric: The metric key.
|
||||
value: The metric value.
|
||||
"""
|
||||
self.metadata[metric] = value
|
||||
|
||||
|
||||
class ClassificationMetrics(Artifact):
|
||||
"""An artifact for storing classification metrics.
|
||||
|
||||
Args:
|
||||
name: Name of the metrics artifact.
|
||||
uri: The metrics artifact's location on disk or cloud storage.
|
||||
metadata: The key-value scalar metrics.
|
||||
"""
|
||||
schema_title = 'system.ClassificationMetrics'
|
||||
|
||||
def log_roc_data_point(self, fpr: float, tpr: float,
|
||||
threshold: float) -> None:
|
||||
"""Logs a single data point in the ROC curve to metadata.
|
||||
|
||||
Args:
|
||||
fpr: False positive rate value of the data point.
|
||||
tpr: True positive rate value of the data point.
|
||||
threshold: Threshold value for the data point.
|
||||
"""
|
||||
|
||||
roc_reading = {
|
||||
'confidenceThreshold': threshold,
|
||||
'recall': tpr,
|
||||
'falsePositiveRate': fpr
|
||||
}
|
||||
if 'confidenceMetrics' not in self.metadata.keys():
|
||||
self.metadata['confidenceMetrics'] = []
|
||||
|
||||
self.metadata['confidenceMetrics'].append(roc_reading)
|
||||
|
||||
def log_roc_curve(self, fpr: List[float], tpr: List[float],
|
||||
threshold: List[float]) -> None:
|
||||
"""Logs an ROC curve to metadata.
|
||||
|
||||
Args:
|
||||
fpr: List of false positive rate values.
|
||||
tpr: List of true positive rate values.
|
||||
threshold: List of threshold values.
|
||||
|
||||
Raises:
|
||||
ValueError: If the lists ``fpr``, ``tpr`` and ``threshold`` are not the same length.
|
||||
"""
|
||||
if len(fpr) != len(tpr) or len(fpr) != len(threshold) or len(
|
||||
tpr) != len(threshold):
|
||||
raise ValueError(
|
||||
f'Length of fpr, tpr and threshold must be the same. Got lengths {len(fpr)}, {len(tpr)} and {len(threshold)} respectively.'
|
||||
)
|
||||
|
||||
for i in range(len(fpr)):
|
||||
self.log_roc_data_point(
|
||||
fpr=fpr[i], tpr=tpr[i], threshold=threshold[i])
|
||||
|
||||
def set_confusion_matrix_categories(self, categories: List[str]) -> None:
|
||||
"""Stores confusion matrix categories to metadata.
|
||||
|
||||
Args:
|
||||
categories: List of strings specifying the categories.
|
||||
"""
|
||||
|
||||
self._categories = []
|
||||
annotation_specs = []
|
||||
for category in categories:
|
||||
annotation_spec = {'displayName': category}
|
||||
self._categories.append(category)
|
||||
annotation_specs.append(annotation_spec)
|
||||
|
||||
self._matrix = []
|
||||
for row in range(len(self._categories)):
|
||||
self._matrix.append({'row': [0] * len(self._categories)})
|
||||
|
||||
self._confusion_matrix = {
|
||||
'annotationSpecs': annotation_specs,
|
||||
'rows': self._matrix
|
||||
}
|
||||
|
||||
self.metadata['confusionMatrix'] = self._confusion_matrix
|
||||
|
||||
def log_confusion_matrix_row(self, row_category: str,
|
||||
row: List[float]) -> None:
|
||||
"""Logs a confusion matrix row to metadata.
|
||||
|
||||
Args:
|
||||
row_category: Category to which the row belongs.
|
||||
row: List of integers specifying the values for the row.
|
||||
|
||||
Raises:
|
||||
ValueError: If ``row_category`` is not in the list of categories
|
||||
set in ``set_categories`` call.
|
||||
"""
|
||||
if row_category not in self._categories:
|
||||
raise ValueError(
|
||||
f'Invalid category: {row_category} passed. Expected one of: {self._categories}'
|
||||
)
|
||||
|
||||
if len(row) != len(self._categories):
|
||||
raise ValueError(
|
||||
f'Invalid row. Expected size: {len(self._categories)} got: {len(row)}'
|
||||
)
|
||||
|
||||
self._matrix[self._categories.index(row_category)] = {'row': row}
|
||||
self.metadata['confusionMatrix'] = self._confusion_matrix
|
||||
|
||||
def log_confusion_matrix_cell(self, row_category: str, col_category: str,
|
||||
value: int) -> None:
|
||||
"""Logs a cell in the confusion matrix to metadata.
|
||||
|
||||
Args:
|
||||
row_category: String representing the name of the row category.
|
||||
col_category: String representing the name of the column category.
|
||||
value: Value of the cell.
|
||||
|
||||
Raises:
|
||||
ValueError: If ``row_category`` or ``col_category`` is not in the list of
|
||||
categories set in ``set_categories``.
|
||||
"""
|
||||
if row_category not in self._categories:
|
||||
raise ValueError(
|
||||
f'Invalid category: {row_category} passed. Expected one of: {self._categories}'
|
||||
)
|
||||
|
||||
if col_category not in self._categories:
|
||||
raise ValueError(
|
||||
f'Invalid category: {row_category} passed. Expected one of: {self._categories}'
|
||||
)
|
||||
|
||||
self._matrix[self._categories.index(row_category)]['row'][
|
||||
self._categories.index(col_category)] = value
|
||||
self.metadata['confusionMatrix'] = self._confusion_matrix
|
||||
|
||||
def log_confusion_matrix(self, categories: List[str],
|
||||
matrix: List[List[int]]) -> None:
|
||||
"""Logs a confusion matrix to metadata.
|
||||
|
||||
Args:
|
||||
categories: List of the category names.
|
||||
matrix: Complete confusion matrix.
|
||||
|
||||
Raises:
|
||||
ValueError: If the length of ``categories`` does not match number of rows or columns of ``matrix``.
|
||||
"""
|
||||
self.set_confusion_matrix_categories(categories)
|
||||
|
||||
if len(matrix) != len(categories):
|
||||
raise ValueError(
|
||||
f'Invalid matrix: {matrix} passed for categories: {categories}')
|
||||
|
||||
for index in range(len(categories)):
|
||||
if len(matrix[index]) != len(categories):
|
||||
raise ValueError(
|
||||
f'Invalid matrix: {matrix} passed for categories: {categories}'
|
||||
)
|
||||
|
||||
self.log_confusion_matrix_row(categories[index], matrix[index])
|
||||
|
||||
self.metadata['confusionMatrix'] = self._confusion_matrix
|
||||
|
||||
|
||||
class SlicedClassificationMetrics(Artifact):
|
||||
"""An artifact for storing sliced classification metrics.
|
||||
|
||||
Similar to ``ClassificationMetrics``, tasks using this class are
|
||||
expected to use log methods of the class to log metrics with the
|
||||
difference being each log method takes a slice to associate the
|
||||
``ClassificationMetrics``.
|
||||
|
||||
Args:
|
||||
name: Name of the metrics artifact.
|
||||
uri: The metrics artifact's location on disk or cloud storage.
|
||||
metadata: Arbitrary key-value pairs about the metrics artifact.
|
||||
"""
|
||||
|
||||
schema_title = 'system.SlicedClassificationMetrics'
|
||||
|
||||
def _upsert_classification_metrics_for_slice(self, slice: str) -> None:
|
||||
"""Upserts the classification metrics instance for a slice."""
|
||||
if slice not in self._sliced_metrics:
|
||||
self._sliced_metrics[slice] = ClassificationMetrics()
|
||||
|
||||
def _update_metadata(self, slice: str) -> None:
|
||||
"""Updates metadata to adhere to the metrics schema."""
|
||||
self.metadata = {'evaluationSlices': []}
|
||||
for slice in self._sliced_metrics.keys():
|
||||
slice_metrics = {
|
||||
'slice':
|
||||
slice,
|
||||
'sliceClassificationMetrics':
|
||||
self._sliced_metrics[slice].metadata
|
||||
}
|
||||
self.metadata['evaluationSlices'].append(slice_metrics)
|
||||
|
||||
def log_roc_reading(self, slice: str, threshold: float, tpr: float,
|
||||
fpr: float) -> None:
|
||||
"""Logs a single data point in the ROC curve of a slice to metadata.
|
||||
|
||||
Args:
|
||||
slice: String representing slice label.
|
||||
threshold: Thresold value for the data point.
|
||||
tpr: True positive rate value of the data point.
|
||||
fpr: False positive rate value of the data point.
|
||||
"""
|
||||
|
||||
self._upsert_classification_metrics_for_slice(slice)
|
||||
self._sliced_metrics[slice].log_roc_reading(threshold, tpr, fpr)
|
||||
self._update_metadata(slice)
|
||||
|
||||
def load_roc_readings(self, slice: str,
|
||||
readings: List[List[float]]) -> None:
|
||||
"""Bulk loads ROC curve readings for a slice.
|
||||
|
||||
Args:
|
||||
slice: String representing slice label.
|
||||
readings: A 2-dimensional list providing ROC curve data points. The expected order of the data points is: threshold, true positive rate, false positive rate.
|
||||
"""
|
||||
self._upsert_classification_metrics_for_slice(slice)
|
||||
self._sliced_metrics[slice].load_roc_readings(readings)
|
||||
self._update_metadata(slice)
|
||||
|
||||
def set_confusion_matrix_categories(self, slice: str,
|
||||
categories: List[str]) -> None:
|
||||
"""Logs confusion matrix categories for a slice to metadata.
|
||||
|
||||
Categories are stored in the internal ``metrics_utils.ConfusionMatrix``
|
||||
instance of the slice.
|
||||
|
||||
Args:
|
||||
slice: String representing slice label.
|
||||
categories: List of strings specifying the categories.
|
||||
"""
|
||||
self._upsert_classification_metrics_for_slice(slice)
|
||||
self._sliced_metrics[slice].set_confusion_matrix_categories(categories)
|
||||
self._update_metadata(slice)
|
||||
|
||||
def log_confusion_matrix_row(self, slice: str, row_category: str,
|
||||
row: List[int]) -> None:
|
||||
"""Logs a confusion matrix row for a slice to metadata.
|
||||
|
||||
Row is updated on the internal ``metrics_utils.ConfusionMatrix``
|
||||
instance of the slice.
|
||||
|
||||
Args:
|
||||
slice: String representing slice label.
|
||||
row_category: Category to which the row belongs.
|
||||
row: List of integers specifying the values for the row.
|
||||
"""
|
||||
self._upsert_classification_metrics_for_slice(slice)
|
||||
self._sliced_metrics[slice].log_confusion_matrix_row(row_category, row)
|
||||
self._update_metadata(slice)
|
||||
|
||||
def log_confusion_matrix_cell(self, slice: str, row_category: str,
|
||||
col_category: str, value: int) -> None:
|
||||
"""Logs a confusion matrix cell for a slice to metadata.
|
||||
|
||||
Cell is updated on the internal ``metrics_utils.ConfusionMatrix``
|
||||
instance of the slice.
|
||||
|
||||
Args:
|
||||
slice: String representing slice label.
|
||||
row_category: String representing the name of the row category.
|
||||
col_category: String representing the name of the column category.
|
||||
value: Value of the cell.
|
||||
"""
|
||||
self._upsert_classification_metrics_for_slice(slice)
|
||||
self._sliced_metrics[slice].log_confusion_matrix_cell(
|
||||
row_category, col_category, value)
|
||||
self._update_metadata(slice)
|
||||
|
||||
def load_confusion_matrix(self, slice: str, categories: List[str],
|
||||
matrix: List[List[int]]) -> None:
|
||||
"""Bulk loads the whole confusion matrix for a slice.
|
||||
|
||||
Args:
|
||||
slice: String representing slice label.
|
||||
categories: List of the category names.
|
||||
matrix: Complete confusion matrix.
|
||||
"""
|
||||
self._upsert_classification_metrics_for_slice(slice)
|
||||
self._sliced_metrics[slice].log_confusion_matrix_cell(
|
||||
categories, matrix)
|
||||
self._update_metadata(slice)
|
||||
|
||||
|
||||
class HTML(Artifact):
|
||||
"""An artifact representing an HTML file.
|
||||
|
||||
Args:
|
||||
name: Name of the HTML file.
|
||||
uri: The HTML file's location on disk or cloud storage.
|
||||
metadata: Arbitrary key-value pairs about the HTML file.
|
||||
"""
|
||||
schema_title = 'system.HTML'
|
||||
|
||||
|
||||
class Markdown(Artifact):
|
||||
"""An artifact representing a markdown file.
|
||||
|
||||
Args:
|
||||
name: Name of the markdown file.
|
||||
uri: The markdown file's location on disk or cloud storage.
|
||||
metadata: Arbitrary key-value pairs about the markdown file.
|
||||
"""
|
||||
schema_title = 'system.Markdown'
|
||||
|
||||
|
||||
_SCHEMA_TITLE_TO_TYPE: Dict[str, Type[Artifact]] = {
|
||||
x.schema_title: x for x in [
|
||||
Artifact,
|
||||
Model,
|
||||
Dataset,
|
||||
Metrics,
|
||||
ClassificationMetrics,
|
||||
SlicedClassificationMetrics,
|
||||
HTML,
|
||||
Markdown,
|
||||
]
|
||||
}
|
||||
|
|
@ -0,0 +1,191 @@
|
|||
# Copyright 2022 The Kubeflow Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import ast
|
||||
import inspect
|
||||
from typing import Callable, Dict, List, Union
|
||||
|
||||
from kfp.dsl import component_factory
|
||||
from kfp.dsl.types import type_annotations
|
||||
from kfp.dsl.types import type_utils
|
||||
|
||||
RETURN_PREFIX = 'return-'
|
||||
|
||||
|
||||
def get_custom_artifact_type_import_statements(func: Callable) -> List[str]:
|
||||
"""Gets a list of custom artifact type import statements from a lightweight
|
||||
Python component function."""
|
||||
artifact_imports = get_custom_artifact_import_items_from_function(func)
|
||||
imports_source = []
|
||||
for obj_str in artifact_imports:
|
||||
if '.' in obj_str:
|
||||
path, name = obj_str.rsplit('.', 1)
|
||||
imports_source.append(f'from {path} import {name}')
|
||||
else:
|
||||
imports_source.append(f'import {obj_str}')
|
||||
return imports_source
|
||||
|
||||
|
||||
def get_param_to_custom_artifact_class(func: Callable) -> Dict[str, type]:
|
||||
"""Gets a map of parameter names to custom artifact classes.
|
||||
|
||||
Return key is 'return-' for normal returns and 'return-<field>' for
|
||||
typing.NamedTuple returns.
|
||||
"""
|
||||
param_to_artifact_cls: Dict[str, type] = {}
|
||||
kfp_artifact_classes = set(type_utils._ARTIFACT_CLASSES_MAPPING.values())
|
||||
|
||||
signature = inspect.signature(func)
|
||||
for name, param in signature.parameters.items():
|
||||
annotation = param.annotation
|
||||
if type_annotations.is_Input_Output_artifact_annotation(annotation):
|
||||
artifact_class = type_annotations.get_io_artifact_class(annotation)
|
||||
if artifact_class not in kfp_artifact_classes:
|
||||
param_to_artifact_cls[name] = artifact_class
|
||||
elif type_annotations.is_artifact_class(annotation):
|
||||
param_to_artifact_cls[name] = annotation
|
||||
if artifact_class not in kfp_artifact_classes:
|
||||
param_to_artifact_cls[name] = artifact_class
|
||||
|
||||
return_annotation = signature.return_annotation
|
||||
|
||||
if return_annotation is inspect.Signature.empty:
|
||||
pass
|
||||
|
||||
elif type_utils.is_typed_named_tuple_annotation(return_annotation):
|
||||
for name, annotation in return_annotation.__annotations__.items():
|
||||
if type_annotations.is_artifact_class(
|
||||
annotation) and annotation not in kfp_artifact_classes:
|
||||
param_to_artifact_cls[f'{RETURN_PREFIX}{name}'] = annotation
|
||||
|
||||
elif type_annotations.is_artifact_class(
|
||||
return_annotation
|
||||
) and return_annotation not in kfp_artifact_classes:
|
||||
param_to_artifact_cls[RETURN_PREFIX] = return_annotation
|
||||
|
||||
return param_to_artifact_cls
|
||||
|
||||
|
||||
def get_full_qualname_for_artifact(obj: type) -> str:
|
||||
"""Gets the fully qualified name for an object. For example, for class Foo
|
||||
in module bar.baz, this function returns bar.baz.Foo.
|
||||
|
||||
Note: typing.get_type_hints purports to do the same thing, but it behaves
|
||||
differently when executed within the scope of a test, so preferring this
|
||||
approach instead.
|
||||
|
||||
Args:
|
||||
obj: The class or module for which to get the fully qualified name.
|
||||
|
||||
Returns:
|
||||
The fully qualified name for the class.
|
||||
"""
|
||||
module = obj.__module__
|
||||
name = obj.__qualname__
|
||||
if module is not None:
|
||||
name = module + '.' + name
|
||||
return name
|
||||
|
||||
|
||||
def get_symbol_import_path(artifact_class_base_symbol: str,
|
||||
qualname: str) -> str:
|
||||
"""Gets the fully qualified name of the symbol that must be imported for
|
||||
the custom artifact type annotation to be referenced successfully.
|
||||
|
||||
Args:
|
||||
artifact_class_base_symbol: The base symbol from which the artifact class is referenced (e.g., aiplatform for aiplatform.VertexDataset).
|
||||
qualname: The fully qualified type annotation name as a string.
|
||||
|
||||
Returns:
|
||||
The fully qualified names of the module or type to import.
|
||||
"""
|
||||
split_qualname = qualname.split('.')
|
||||
if artifact_class_base_symbol in split_qualname:
|
||||
name_to_import = '.'.join(
|
||||
split_qualname[:split_qualname.index(artifact_class_base_symbol) +
|
||||
1])
|
||||
else:
|
||||
raise TypeError(
|
||||
f"Module or type name aliases are not supported. You appear to be using an alias in your type annotation: '{qualname}'. This may be due to use of an 'as' statement in an import statement or a reassignment of a module or type to a new name. Reference the module and/or type using the name as defined in the source from which the module or type is imported."
|
||||
)
|
||||
return name_to_import
|
||||
|
||||
|
||||
def traverse_ast_node_values_to_get_id(obj: Union[ast.Slice, None]) -> str:
|
||||
while not hasattr(obj, 'id'):
|
||||
obj = getattr(obj, 'value')
|
||||
return obj.id
|
||||
|
||||
|
||||
def get_custom_artifact_base_symbol_for_parameter(func: Callable,
|
||||
arg_name: str) -> str:
|
||||
"""Gets the symbol required for the custom artifact type annotation to be
|
||||
referenced correctly."""
|
||||
module_node = ast.parse(
|
||||
component_factory._get_function_source_definition(func))
|
||||
args = module_node.body[0].args.args
|
||||
args = {arg.arg: arg for arg in args}
|
||||
annotation = args[arg_name].annotation
|
||||
return traverse_ast_node_values_to_get_id(annotation.slice)
|
||||
|
||||
|
||||
def get_custom_artifact_base_symbol_for_return(func: Callable,
|
||||
return_name: str) -> str:
|
||||
"""Gets the symbol required for the custom artifact type return annotation
|
||||
to be referenced correctly."""
|
||||
module_node = ast.parse(
|
||||
component_factory._get_function_source_definition(func))
|
||||
return_ann = module_node.body[0].returns
|
||||
|
||||
if return_name == RETURN_PREFIX:
|
||||
if isinstance(return_ann, (ast.Name, ast.Attribute)):
|
||||
return traverse_ast_node_values_to_get_id(return_ann)
|
||||
elif isinstance(return_ann, ast.Call):
|
||||
func = return_ann.func
|
||||
# handles NamedTuple and typing.NamedTuple
|
||||
if (isinstance(func, ast.Attribute) and func.value.id == 'typing' and
|
||||
func.attr == 'NamedTuple') or (isinstance(func, ast.Name) and
|
||||
func.id == 'NamedTuple'):
|
||||
nt_field_list = return_ann.args[1].elts
|
||||
for el in nt_field_list:
|
||||
if f'{RETURN_PREFIX}{el.elts[0].s}' == return_name:
|
||||
return traverse_ast_node_values_to_get_id(el.elts[1])
|
||||
|
||||
raise TypeError(f"Unexpected type annotation '{return_ann}' for {func}.")
|
||||
|
||||
|
||||
def get_custom_artifact_import_items_from_function(func: Callable) -> List[str]:
|
||||
"""Gets the fully qualified name of the symbol that must be imported for
|
||||
the custom artifact type annotation to be referenced successfully from a
|
||||
component function."""
|
||||
|
||||
param_to_ann_obj = get_param_to_custom_artifact_class(func)
|
||||
import_items = []
|
||||
for param_name, artifact_class in param_to_ann_obj.items():
|
||||
|
||||
base_symbol = get_custom_artifact_base_symbol_for_return(
|
||||
func, param_name
|
||||
) if param_name.startswith(
|
||||
RETURN_PREFIX) else get_custom_artifact_base_symbol_for_parameter(
|
||||
func, param_name)
|
||||
artifact_qualname = get_full_qualname_for_artifact(artifact_class)
|
||||
symbol_import_path = get_symbol_import_path(base_symbol,
|
||||
artifact_qualname)
|
||||
|
||||
# could use set here, but want to be have deterministic import ordering
|
||||
# in compilation
|
||||
if symbol_import_path not in import_items:
|
||||
import_items.append(symbol_import_path)
|
||||
|
||||
return import_items
|
||||
|
|
@ -0,0 +1,245 @@
|
|||
# Copyright 2021 The Kubeflow Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Classes for input/output type annotations in KFP SDK.
|
||||
|
||||
These are only compatible with v2 Pipelines.
|
||||
"""
|
||||
|
||||
import re
|
||||
from typing import List, Type, TypeVar, Union
|
||||
|
||||
from kfp.dsl.types import artifact_types
|
||||
from kfp.dsl.types import type_annotations
|
||||
from kfp.dsl.types import type_utils
|
||||
|
||||
|
||||
class OutputPath:
|
||||
"""Type annotation used in component definitions for indicating a parameter
|
||||
is a path to an output. The path parameter typed with this annotation can
|
||||
be treated as a locally accessible filepath within the component body.
|
||||
|
||||
The argument typed with this annotation is provided at runtime by the executing backend and does not need to be passed as an input by the pipeline author (see example).
|
||||
|
||||
|
||||
Args:
|
||||
type: The type of the value written to the output path.
|
||||
|
||||
Example:
|
||||
::
|
||||
|
||||
@dsl.component
|
||||
def create_parameter(
|
||||
message: str,
|
||||
output_parameter_path: OutputPath(str),
|
||||
):
|
||||
with open(output_parameter_path, 'w') as f:
|
||||
f.write(message)
|
||||
|
||||
|
||||
@dsl.component
|
||||
def consume_parameter(message: str):
|
||||
print(message)
|
||||
|
||||
|
||||
@dsl.pipeline(name='my-pipeline', pipeline_root='gs://my-bucket')
|
||||
def my_pipeline(message: str = 'default message'):
|
||||
create_param_op = create_parameter(message=message)
|
||||
consume_parameter(message=create_param_op.outputs['output_parameter_path'])
|
||||
"""
|
||||
|
||||
def __init__(self, type=None):
|
||||
self.type = construct_type_for_inputpath_or_outputpath(type)
|
||||
|
||||
def __eq__(self, other):
|
||||
return isinstance(other, OutputPath) and self.type == other.type
|
||||
|
||||
|
||||
class InputPath:
|
||||
"""Type annotation used in component definitions for indicating a parameter
|
||||
is a path to an input.
|
||||
|
||||
Example:
|
||||
::
|
||||
|
||||
@dsl.component
|
||||
def create_dataset(dataset_path: OutputPath('Dataset'),):
|
||||
import json
|
||||
dataset = {'my_dataset': [[1, 2, 3], [4, 5, 6]]}
|
||||
with open(dataset_path, 'w') as f:
|
||||
json.dump(dataset, f)
|
||||
|
||||
|
||||
@dsl.component
|
||||
def consume_dataset(dataset: InputPath('Dataset')):
|
||||
print(dataset)
|
||||
|
||||
|
||||
@dsl.pipeline(name='my-pipeline', pipeline_root='gs://my-bucket')
|
||||
def my_pipeline():
|
||||
create_dataset_op = create_dataset()
|
||||
consume_dataset(dataset=create_dataset_op.outputs['dataset_path'])
|
||||
"""
|
||||
|
||||
def __init__(self, type=None):
|
||||
self.type = construct_type_for_inputpath_or_outputpath(type)
|
||||
|
||||
def __eq__(self, other):
|
||||
return isinstance(other, InputPath) and self.type == other.type
|
||||
|
||||
|
||||
def construct_type_for_inputpath_or_outputpath(
|
||||
type_: Union[str, Type, None]) -> Union[str, None]:
|
||||
if type_annotations.is_artifact_class(type_):
|
||||
return type_utils.create_bundled_artifact_type(type_.schema_title,
|
||||
type_.schema_version)
|
||||
elif isinstance(
|
||||
type_,
|
||||
str) and type_.lower() in type_utils._ARTIFACT_CLASSES_MAPPING:
|
||||
# v1 artifact backward compat, e.g. dsl.OutputPath('Dataset')
|
||||
return type_utils.create_bundled_artifact_type(
|
||||
type_utils._ARTIFACT_CLASSES_MAPPING[type_.lower()].schema_title)
|
||||
elif type_utils.get_parameter_type(type_):
|
||||
return type_
|
||||
else:
|
||||
# v1 unknown type dsl.OutputPath('MyCustomType')
|
||||
return type_utils.create_bundled_artifact_type(
|
||||
artifact_types.Artifact.schema_title)
|
||||
|
||||
|
||||
class InputAnnotation:
|
||||
"""Marker type for input artifacts."""
|
||||
|
||||
|
||||
class OutputAnnotation:
|
||||
"""Marker type for output artifacts."""
|
||||
|
||||
|
||||
def is_Input_Output_artifact_annotation(typ) -> bool:
|
||||
if not hasattr(typ, '__metadata__'):
|
||||
return False
|
||||
|
||||
if typ.__metadata__[0] not in [InputAnnotation, OutputAnnotation]:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def is_input_artifact(typ) -> bool:
|
||||
"""Returns True if typ is of type Input[T]."""
|
||||
if not is_Input_Output_artifact_annotation(typ):
|
||||
return False
|
||||
|
||||
return typ.__metadata__[0] == InputAnnotation
|
||||
|
||||
|
||||
def is_output_artifact(typ) -> bool:
|
||||
"""Returns True if typ is of type Output[T]."""
|
||||
if not is_Input_Output_artifact_annotation(typ):
|
||||
return False
|
||||
|
||||
return typ.__metadata__[0] == OutputAnnotation
|
||||
|
||||
|
||||
def get_io_artifact_class(typ):
|
||||
from kfp.dsl import Input
|
||||
from kfp.dsl import Output
|
||||
if not is_Input_Output_artifact_annotation(typ):
|
||||
return None
|
||||
if typ == Input or typ == Output:
|
||||
return None
|
||||
|
||||
# extract inner type from list of artifacts
|
||||
inner = typ.__args__[0]
|
||||
if hasattr(inner, '__origin__') and inner.__origin__ == list:
|
||||
return inner.__args__[0]
|
||||
|
||||
return inner
|
||||
|
||||
|
||||
def get_io_artifact_annotation(typ):
|
||||
if not is_Input_Output_artifact_annotation(typ):
|
||||
return None
|
||||
|
||||
return typ.__metadata__[0]
|
||||
|
||||
|
||||
T = TypeVar('T')
|
||||
|
||||
|
||||
def maybe_strip_optional_from_annotation(annotation: T) -> T:
|
||||
"""Strips 'Optional' from 'Optional[<type>]' if applicable.
|
||||
|
||||
For example::
|
||||
Optional[str] -> str
|
||||
str -> str
|
||||
List[int] -> List[int]
|
||||
|
||||
Args:
|
||||
annotation: The original type annotation which may or may not has
|
||||
`Optional`.
|
||||
|
||||
Returns:
|
||||
The type inside Optional[] if Optional exists, otherwise the original type.
|
||||
"""
|
||||
if getattr(annotation, '__origin__',
|
||||
None) is Union and annotation.__args__[1] is type(None):
|
||||
return annotation.__args__[0]
|
||||
return annotation
|
||||
|
||||
|
||||
def maybe_strip_optional_from_annotation_string(annotation: str) -> str:
|
||||
if annotation.startswith('Optional[') and annotation.endswith(']'):
|
||||
return annotation.lstrip('Optional[').rstrip(']')
|
||||
return annotation
|
||||
|
||||
|
||||
def get_short_type_name(type_name: str) -> str:
|
||||
"""Extracts the short form type name.
|
||||
|
||||
This method is used for looking up serializer for a given type.
|
||||
|
||||
For example::
|
||||
typing.List -> List
|
||||
typing.List[int] -> List
|
||||
typing.Dict[str, str] -> Dict
|
||||
List -> List
|
||||
str -> str
|
||||
|
||||
Args:
|
||||
type_name: The original type name.
|
||||
|
||||
Returns:
|
||||
The short form type name or the original name if pattern doesn't match.
|
||||
"""
|
||||
match = re.match('(typing\.)?(?P<type>\w+)(?:\[.+\])?', type_name)
|
||||
return match['type'] if match else type_name
|
||||
|
||||
|
||||
def is_artifact_class(artifact_class_or_instance: Type) -> bool:
|
||||
# we do not yet support non-pre-registered custom artifact types with instance_schema attribute
|
||||
return hasattr(artifact_class_or_instance, 'schema_title') and hasattr(
|
||||
artifact_class_or_instance, 'schema_version')
|
||||
|
||||
|
||||
def is_list_of_artifacts(
|
||||
type_var: Union[Type[List[artifact_types.Artifact]],
|
||||
Type[artifact_types.Artifact]]
|
||||
) -> bool:
|
||||
# the type annotation for this function's `type_var` parameter may not actually be a subclass of the KFP SDK's Artifact class for custom artifact types
|
||||
is_list_or_list_generic = getattr(type_var, '__origin__', None) == list
|
||||
# in >= python3.9, List wont have .__args__ if it's used as `-> List` with no inner type argument
|
||||
contains_artifact = hasattr(
|
||||
type_var, '__args__') and type_annotations.is_artifact_class(
|
||||
type_var.__args__[0])
|
||||
return is_list_or_list_generic and contains_artifact
|
||||
|
|
@ -0,0 +1,543 @@
|
|||
# Copyright 2020-2022 The Kubeflow Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Utilities for component I/O type mapping."""
|
||||
|
||||
from distutils import util
|
||||
import inspect
|
||||
import json
|
||||
from typing import Any, Callable, Dict, Optional, Type, Union
|
||||
import warnings
|
||||
|
||||
import kfp
|
||||
from kfp.dsl import structures
|
||||
from kfp.dsl import task_final_status
|
||||
from kfp.dsl.types import artifact_types
|
||||
from kfp.dsl.types import type_annotations
|
||||
|
||||
DEFAULT_ARTIFACT_SCHEMA_VERSION = '0.0.1'
|
||||
PARAMETER_TYPES = Union[str, int, float, bool, dict, list]
|
||||
|
||||
# ComponentSpec I/O types to DSL ontology artifact classes mapping.
|
||||
_ARTIFACT_CLASSES_MAPPING = {
|
||||
'artifact': artifact_types.Artifact,
|
||||
'model': artifact_types.Model,
|
||||
'dataset': artifact_types.Dataset,
|
||||
'metrics': artifact_types.Metrics,
|
||||
'classificationmetrics': artifact_types.ClassificationMetrics,
|
||||
'slicedclassificationmetrics': artifact_types.SlicedClassificationMetrics,
|
||||
'html': artifact_types.HTML,
|
||||
'markdown': artifact_types.Markdown,
|
||||
}
|
||||
|
||||
_GOOGLE_TYPES_PATTERN = r'^google.[A-Za-z]+$'
|
||||
_GOOGLE_TYPES_VERSION = DEFAULT_ARTIFACT_SCHEMA_VERSION
|
||||
|
||||
# ComponentSpec I/O types to (IR) PipelineTaskSpec I/O types mapping.
|
||||
# The keys are normalized (lowercased). These are types viewed as Parameters.
|
||||
# The values are the corresponding IR parameter primitive types.
|
||||
|
||||
# pipeline_spec_pb2.ParameterType enum values
|
||||
NUMBER_DOUBLE = 1
|
||||
NUMBER_INTEGER = 2
|
||||
STRING = 3
|
||||
BOOLEAN = 4
|
||||
LIST = 5
|
||||
STRUCT = 6
|
||||
PARAMETER_TYPES_MAPPING = {
|
||||
'integer': 2,
|
||||
'int': NUMBER_INTEGER,
|
||||
'double': NUMBER_DOUBLE,
|
||||
'float': NUMBER_DOUBLE,
|
||||
'string': STRING,
|
||||
'str': STRING,
|
||||
'text': STRING,
|
||||
'bool': BOOLEAN,
|
||||
'boolean': BOOLEAN,
|
||||
'dict': STRUCT,
|
||||
'list': LIST,
|
||||
'jsonobject': STRUCT,
|
||||
'jsonarray': LIST,
|
||||
}
|
||||
|
||||
|
||||
def bool_cast_fn(default: Union[str, bool]) -> bool:
|
||||
if isinstance(default, str):
|
||||
default = util.strtobool(default) == 1
|
||||
return default
|
||||
|
||||
|
||||
def try_loading_json(default: str) -> Union[dict, list, str]:
|
||||
try:
|
||||
return json.loads(default)
|
||||
except:
|
||||
return default
|
||||
|
||||
|
||||
_V1_DEFAULT_DESERIALIZER_MAPPING: Dict[str, Callable] = {
|
||||
'integer': int,
|
||||
'int': int,
|
||||
'double': float,
|
||||
'float': float,
|
||||
'string': str,
|
||||
'str': str,
|
||||
'text': str,
|
||||
'bool': bool_cast_fn,
|
||||
'boolean': bool_cast_fn,
|
||||
'dict': try_loading_json,
|
||||
'list': try_loading_json,
|
||||
'jsonobject': try_loading_json,
|
||||
'jsonarray': try_loading_json,
|
||||
}
|
||||
|
||||
|
||||
def deserialize_v1_component_yaml_default(type_: str, default: Any) -> Any:
|
||||
"""Deserializes v1 default values to correct in-memory types.
|
||||
|
||||
Typecasts for primitive types. Tries to load JSON for arrays and
|
||||
structs.
|
||||
"""
|
||||
if default is None:
|
||||
return default
|
||||
if isinstance(type_, str):
|
||||
cast_fn = _V1_DEFAULT_DESERIALIZER_MAPPING.get(type_.lower(),
|
||||
lambda x: x)
|
||||
return cast_fn(default)
|
||||
return default
|
||||
|
||||
|
||||
def is_task_final_status_type(type_name: Optional[Union[str, dict]]) -> bool:
|
||||
"""Check if a ComponentSpec I/O type is PipelineTaskFinalStatus.
|
||||
|
||||
Args:
|
||||
type_name: type name of the ComponentSpec I/O type.
|
||||
|
||||
Returns:
|
||||
True if the type name is 'PipelineTaskFinalStatus'.
|
||||
"""
|
||||
return isinstance(type_name, str) and (
|
||||
type_name == task_final_status.PipelineTaskFinalStatus.__name__)
|
||||
|
||||
|
||||
def is_parameter_type(type_name: Optional[Union[str, dict]]) -> bool:
|
||||
"""Check if a ComponentSpec I/O type is considered as a parameter type.
|
||||
|
||||
Args:
|
||||
type_name: type name of the ComponentSpec I/O type.
|
||||
|
||||
Returns:
|
||||
True if the type name maps to a parameter type else False.
|
||||
"""
|
||||
if isinstance(type_name, str):
|
||||
type_name = type_annotations.get_short_type_name(type_name)
|
||||
elif isinstance(type_name, dict):
|
||||
type_name = list(type_name.keys())[0]
|
||||
else:
|
||||
return False
|
||||
|
||||
return type_name.lower(
|
||||
) in PARAMETER_TYPES_MAPPING or is_task_final_status_type(type_name)
|
||||
|
||||
|
||||
def bundled_artifact_to_artifact_proto(
|
||||
bundled_artifact_str: str) -> 'pipeline_spec_pb2.ArtifactTypeSchema':
|
||||
"""Gets the IR ArtifactTypeSchema proto for a bundled artifact in form
|
||||
`<namespace>.<Name>@x.x.x` (e.g., system.Artifact@0.0.1)."""
|
||||
bundled_artifact_str, schema_version = bundled_artifact_str.split('@')
|
||||
|
||||
from kfp.pipeline_spec import pipeline_spec_pb2
|
||||
|
||||
return pipeline_spec_pb2.ArtifactTypeSchema(
|
||||
schema_title=bundled_artifact_str,
|
||||
schema_version=schema_version,
|
||||
)
|
||||
|
||||
|
||||
def get_parameter_type(
|
||||
param_type: Optional[Union[Type, str, dict]]
|
||||
) -> 'pipeline_spec_pb2.ParameterType':
|
||||
"""Get the IR I/O parameter type for the given ComponentSpec I/O type.
|
||||
|
||||
Args:
|
||||
param_type: type of the ComponentSpec I/O type. Can be a primitive Python
|
||||
builtin type or a type name.
|
||||
|
||||
Returns:
|
||||
The enum value of the mapped IR I/O primitive type.
|
||||
|
||||
Raises:
|
||||
AttributeError: if type_name is not a string type.
|
||||
"""
|
||||
# Special handling for PipelineTaskFinalStatus, treat it as Dict type.
|
||||
if is_task_final_status_type(param_type):
|
||||
param_type = 'dict'
|
||||
if type(param_type) == type:
|
||||
type_name = param_type.__name__
|
||||
elif isinstance(param_type, dict):
|
||||
type_name = list(param_type.keys())[0]
|
||||
else:
|
||||
type_name = type_annotations.get_short_type_name(str(param_type))
|
||||
return PARAMETER_TYPES_MAPPING.get(type_name.lower())
|
||||
|
||||
|
||||
def get_parameter_type_name(
|
||||
param_type: Optional[Union[Type, str, dict]]) -> str:
|
||||
"""Gets the parameter type name."""
|
||||
|
||||
from kfp.pipeline_spec import pipeline_spec_pb2
|
||||
|
||||
return pipeline_spec_pb2.ParameterType.ParameterTypeEnum.Name(
|
||||
get_parameter_type(param_type))
|
||||
|
||||
|
||||
class InconsistentTypeException(Exception):
|
||||
"""InconsistencyTypeException is raised when two types are not
|
||||
consistent."""
|
||||
|
||||
|
||||
class InconsistentTypeWarning(Warning):
|
||||
"""InconsistentTypeWarning is issued when two types are not consistent."""
|
||||
|
||||
|
||||
def _get_type_string_from_component_argument(
|
||||
argument_value: Union['pipeline_channel.PipelineChannel', str, bool, int,
|
||||
float, dict, list]
|
||||
) -> str:
|
||||
# avoid circular imports
|
||||
from kfp.dsl import pipeline_channel
|
||||
if isinstance(argument_value, pipeline_channel.PipelineChannel):
|
||||
return argument_value.channel_type
|
||||
|
||||
# argument is a constant
|
||||
argument_type = type(argument_value)
|
||||
if argument_type in _TYPE_TO_TYPE_NAME:
|
||||
return _TYPE_TO_TYPE_NAME[argument_type]
|
||||
|
||||
raise ValueError(
|
||||
f'Constant argument inputs must be one of type {list(_TYPE_TO_TYPE_NAME.values())} Got: {argument_value!r} of type {type(argument_value)!r}.'
|
||||
)
|
||||
|
||||
|
||||
def verify_type_compatibility(
|
||||
given_value: Union['pipeline_channel.PipelineChannel', str, bool, int,
|
||||
float, dict, list],
|
||||
expected_spec: Union[structures.InputSpec, structures.OutputSpec],
|
||||
error_message_prefix: str,
|
||||
checks_input: bool = True,
|
||||
raise_on_error: bool = True,
|
||||
) -> bool:
|
||||
"""Verifies the given argument type is compatible with the expected type.
|
||||
|
||||
Args:
|
||||
given_value: The channel or constant provided as an argument.
|
||||
expected_spec: The InputSpec or OutputSpec that describes the expected type of given_value.
|
||||
error_message_prefix: The prefix for the error message.
|
||||
checks_input: True if checks an argument (given_value) against a component/pipeline input type (expected_spec). False if checks a component output (argument_value) against the pipeline output type (expected_spec).
|
||||
raise_on_error: Whether to raise on type compatibility error. Should be passed kfp.TYPE_CHECK.
|
||||
|
||||
Returns:
|
||||
True if types are compatible, and False if otherwise.
|
||||
|
||||
Raises:
|
||||
InconsistentTypeException if raise_on_error=True.
|
||||
"""
|
||||
# extract and normalize types
|
||||
expected_type = expected_spec.type
|
||||
given_type = _get_type_string_from_component_argument(given_value)
|
||||
|
||||
given_is_param = is_parameter_type(str(given_type))
|
||||
if given_is_param:
|
||||
given_type = get_parameter_type_name(given_type)
|
||||
given_is_artifact_list = False
|
||||
else:
|
||||
given_is_artifact_list = given_value.is_artifact_list
|
||||
|
||||
expected_is_param = is_parameter_type(expected_type)
|
||||
if expected_is_param:
|
||||
expected_type = get_parameter_type_name(expected_type)
|
||||
expected_is_artifact_list = False
|
||||
else:
|
||||
expected_is_artifact_list = expected_spec.is_artifact_list
|
||||
|
||||
# compare the normalized types
|
||||
if given_is_param != expected_is_param:
|
||||
types_are_compatible = False
|
||||
elif given_is_param and expected_is_param:
|
||||
types_are_compatible = check_parameter_type_compatibility(
|
||||
given_type, expected_type)
|
||||
else:
|
||||
types_are_compatible = check_artifact_type_compatibility(
|
||||
given_type=given_type,
|
||||
given_is_artifact_list=given_is_artifact_list,
|
||||
expected_type=expected_type,
|
||||
expected_is_artifact_list=expected_is_artifact_list)
|
||||
|
||||
# maybe raise, maybe warn, return bool
|
||||
if not types_are_compatible:
|
||||
# update the types for lists of artifacts for error message
|
||||
given_type = f'List[{given_type}]' if given_is_artifact_list else given_type
|
||||
expected_type = f'List[{expected_type}]' if expected_is_artifact_list else expected_type
|
||||
if checks_input:
|
||||
error_message_suffix = f'Argument type {given_type!r} is incompatible with the input type {expected_type!r}'
|
||||
else:
|
||||
error_message_suffix = f'Output of type {given_type!r} cannot be surfaced as pipeline output type {expected_type!r}'
|
||||
error_text = error_message_prefix + error_message_suffix
|
||||
if raise_on_error:
|
||||
raise InconsistentTypeException(error_text)
|
||||
else:
|
||||
warnings.warn(InconsistentTypeWarning(error_text))
|
||||
|
||||
return types_are_compatible
|
||||
|
||||
|
||||
def check_artifact_type_compatibility(given_type: str,
|
||||
given_is_artifact_list: bool,
|
||||
expected_type: str,
|
||||
expected_is_artifact_list: bool) -> bool:
|
||||
given_schema_title, given_schema_version = given_type.split('@')
|
||||
expected_schema_title, expected_schema_version = expected_type.split('@')
|
||||
same_list_of_artifacts_status = expected_is_artifact_list == given_is_artifact_list
|
||||
if not same_list_of_artifacts_status:
|
||||
return False
|
||||
elif artifact_types.Artifact.schema_title in {
|
||||
given_schema_title, expected_schema_title
|
||||
}:
|
||||
return True
|
||||
else:
|
||||
schema_title_compatible = given_schema_title == expected_schema_title
|
||||
schema_version_compatible = given_schema_version.split(
|
||||
'.')[0] == expected_schema_version.split('.')[0]
|
||||
|
||||
return schema_title_compatible and schema_version_compatible
|
||||
|
||||
|
||||
def check_parameter_type_compatibility(given_type: str,
|
||||
expected_type: str) -> bool:
|
||||
if isinstance(given_type, str) and isinstance(expected_type, str):
|
||||
return given_type == expected_type
|
||||
else:
|
||||
return check_v1_struct_parameter_type_compatibility(
|
||||
given_type, expected_type)
|
||||
|
||||
|
||||
def check_v1_struct_parameter_type_compatibility(
|
||||
given_type: Union[str, dict],
|
||||
expected_type: Union[str, dict],
|
||||
) -> bool:
|
||||
if isinstance(given_type, str):
|
||||
given_type = {given_type: {}}
|
||||
if isinstance(expected_type, str):
|
||||
expected_type = {expected_type: {}}
|
||||
return _check_dict_types(given_type, expected_type)
|
||||
|
||||
|
||||
def _check_dict_types(
|
||||
given_type: dict,
|
||||
expected_type: dict,
|
||||
) -> bool:
|
||||
given_type_name, _ = list(given_type.items())[0]
|
||||
expected_type_name, _ = list(expected_type.items())[0]
|
||||
if given_type_name == '' or expected_type_name == '':
|
||||
# If the type name is empty, it matches any types
|
||||
return True
|
||||
if given_type_name != expected_type_name:
|
||||
print('type name ' + str(given_type_name) +
|
||||
' is different from expected: ' + str(expected_type_name))
|
||||
return False
|
||||
type_name = given_type_name
|
||||
for type_property in given_type[type_name]:
|
||||
if type_property not in expected_type[type_name]:
|
||||
print(type_name + ' has a property ' + str(type_property) +
|
||||
' that the latter does not.')
|
||||
return False
|
||||
if given_type[type_name][type_property] != expected_type[type_name][
|
||||
type_property]:
|
||||
print(type_name + ' has a property ' + str(type_property) +
|
||||
' with value: ' + str(given_type[type_name][type_property]) +
|
||||
' and ' + str(expected_type[type_name][type_property]))
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
_TYPE_TO_TYPE_NAME = {
|
||||
str: 'String',
|
||||
int: 'Integer',
|
||||
float: 'Float',
|
||||
bool: 'Boolean',
|
||||
list: 'List',
|
||||
dict: 'Dict',
|
||||
}
|
||||
|
||||
|
||||
def get_canonical_type_name_for_type(typ: Type) -> Optional[str]:
|
||||
"""Find the canonical type name for a given type.
|
||||
|
||||
Args:
|
||||
typ: The type to search for.
|
||||
|
||||
Returns:
|
||||
The canonical name of the type found.
|
||||
"""
|
||||
return _TYPE_TO_TYPE_NAME.get(typ, None)
|
||||
|
||||
|
||||
class TypeCheckManager:
|
||||
"""Context manager to set a type check mode within context, then restore
|
||||
mode to original value upon exiting the context."""
|
||||
|
||||
def __init__(self, enable: bool) -> None:
|
||||
"""TypeCheckManager constructor.
|
||||
|
||||
Args:
|
||||
enable: Type check mode used within context.
|
||||
"""
|
||||
self._enable = enable
|
||||
|
||||
def __enter__(self) -> 'TypeCheckManager':
|
||||
"""Set type check mode to self._enable.
|
||||
|
||||
Returns:
|
||||
TypeCheckManager: Returns itself.
|
||||
"""
|
||||
self._prev = kfp.TYPE_CHECK
|
||||
kfp.TYPE_CHECK = self._enable
|
||||
return self
|
||||
|
||||
def __exit__(self, *unused_args) -> None:
|
||||
"""Restore type check mode to its previous state."""
|
||||
kfp.TYPE_CHECK = self._prev
|
||||
|
||||
|
||||
# for reading in IR back to in-memory data structures
|
||||
IR_TYPE_TO_IN_MEMORY_SPEC_TYPE = {
|
||||
'STRING': 'String',
|
||||
'NUMBER_INTEGER': 'Integer',
|
||||
'NUMBER_DOUBLE': 'Float',
|
||||
'LIST': 'List',
|
||||
'STRUCT': 'Dict',
|
||||
'BOOLEAN': 'Boolean',
|
||||
'TASK_FINAL_STATUS': task_final_status.PipelineTaskFinalStatus.__name__,
|
||||
}
|
||||
|
||||
IR_TYPE_TO_COMMENT_TYPE_STRING = {
|
||||
'STRING': str.__name__,
|
||||
'NUMBER_INTEGER': int.__name__,
|
||||
'NUMBER_DOUBLE': float.__name__,
|
||||
'LIST': list.__name__,
|
||||
'STRUCT': dict.__name__,
|
||||
'BOOLEAN': bool.__name__,
|
||||
'TASK_FINAL_STATUS': task_final_status.PipelineTaskFinalStatus.__name__,
|
||||
}
|
||||
|
||||
IN_MEMORY_SPEC_TYPE_TO_IR_TYPE = {
|
||||
v: k for k, v in IR_TYPE_TO_IN_MEMORY_SPEC_TYPE.items()
|
||||
}
|
||||
|
||||
|
||||
def get_canonical_name_for_outer_generic(type_name: Any) -> str:
|
||||
"""Maps a complex/nested type name back to a canonical type.
|
||||
|
||||
E.g.
|
||||
get_canonical_name_for_outer_generic('typing.List[str]')
|
||||
'List'
|
||||
|
||||
get_canonical_name_for_outer_generic('typing.Dict[typing.List[str], str]')
|
||||
'Dict'
|
||||
|
||||
Args:
|
||||
type_name (Any): The type. Returns input if not a string.
|
||||
|
||||
Returns:
|
||||
str: The canonical type.
|
||||
"""
|
||||
if not isinstance(type_name, str):
|
||||
return type_name
|
||||
|
||||
if type_name.startswith('typing.'):
|
||||
type_name = type_name.lstrip('typing.')
|
||||
|
||||
if type_name.lower().startswith('list') or type_name.lower().startswith(
|
||||
'dict'):
|
||||
return type_name.split('[')[0]
|
||||
|
||||
else:
|
||||
return type_name
|
||||
|
||||
|
||||
def create_bundled_artifact_type(schema_title: str,
|
||||
schema_version: Optional[str] = None) -> str:
|
||||
if not isinstance(schema_title, str):
|
||||
raise ValueError
|
||||
return schema_title + '@' + (
|
||||
schema_version or DEFAULT_ARTIFACT_SCHEMA_VERSION)
|
||||
|
||||
|
||||
def validate_schema_version(schema_version: str) -> None:
|
||||
split_schema_version = schema_version.split('.')
|
||||
if len(split_schema_version) != 3:
|
||||
raise TypeError(
|
||||
f'Artifact schema_version must use three-part semantic versioning. Got: {schema_version}'
|
||||
)
|
||||
|
||||
|
||||
def validate_schema_title(schema_title: str) -> None:
|
||||
split_schema_title = schema_title.split('.')
|
||||
if len(split_schema_title) != 2:
|
||||
raise TypeError(
|
||||
f'Artifact schema_title must have both a namespace and a name, separated by a `.`. Got: {schema_title}'
|
||||
)
|
||||
namespace, _ = split_schema_title
|
||||
if namespace not in {'system', 'google'}:
|
||||
raise TypeError(
|
||||
f'Artifact schema_title must belong to `system` or `google` namespace. Got: {schema_title}'
|
||||
)
|
||||
|
||||
|
||||
def validate_bundled_artifact_type(type_: str) -> None:
|
||||
split_type = type_.split('@')
|
||||
# two parts and neither are empty strings
|
||||
if len(split_type) != 2 or not all(split_type):
|
||||
raise TypeError(
|
||||
f'Artifacts must have both a schema_title and a schema_version, separated by `@`. Got: {type_}'
|
||||
)
|
||||
schema_title, schema_version = split_type
|
||||
validate_schema_title(schema_title)
|
||||
validate_schema_version(schema_version)
|
||||
|
||||
|
||||
def _annotation_to_type_struct(annotation):
|
||||
if not annotation or annotation == inspect.Parameter.empty:
|
||||
return None
|
||||
if hasattr(annotation, 'to_dict'):
|
||||
annotation = annotation.to_dict()
|
||||
if isinstance(annotation, dict):
|
||||
return annotation
|
||||
if isinstance(annotation, type):
|
||||
type_struct = get_canonical_type_name_for_type(annotation)
|
||||
if type_struct:
|
||||
return type_struct
|
||||
elif type_annotations.is_artifact_class(annotation):
|
||||
schema_title = annotation.schema_title
|
||||
else:
|
||||
schema_title = str(annotation.__name__)
|
||||
elif hasattr(annotation, '__forward_arg__'):
|
||||
schema_title = str(annotation.__forward_arg__)
|
||||
else:
|
||||
schema_title = str(annotation)
|
||||
type_struct = get_canonical_type_name_for_type(schema_title)
|
||||
return type_struct or schema_title
|
||||
|
||||
|
||||
def is_typed_named_tuple_annotation(annotation: Any) -> bool:
|
||||
return hasattr(annotation, '_fields') and hasattr(annotation,
|
||||
'__annotations__')
|
||||
|
|
@ -727,7 +727,6 @@ class TestTypeChecking(parameterized.TestCase):
|
|||
given_value=argument_value,
|
||||
expected_spec=parameter_input_spec,
|
||||
error_message_prefix='',
|
||||
raise_on_error=kfp.TYPE_CHECK,
|
||||
))
|
||||
else:
|
||||
with self.assertRaises(InconsistentTypeException):
|
||||
|
|
@ -735,7 +734,6 @@ class TestTypeChecking(parameterized.TestCase):
|
|||
given_value=argument_value,
|
||||
expected_spec=parameter_input_spec,
|
||||
error_message_prefix='',
|
||||
raise_on_error=kfp.TYPE_CHECK,
|
||||
)
|
||||
|
||||
def test_list_of_artifacts_across_compilation_valid(self):
|
||||
|
|
@ -0,0 +1,128 @@
|
|||
# Copyright 2021 The Kubeflow Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Definitions of utils methods."""
|
||||
|
||||
import importlib
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import types
|
||||
from typing import List
|
||||
|
||||
_COMPONENT_NAME_PREFIX = 'comp-'
|
||||
_EXECUTOR_LABEL_PREFIX = 'exec-'
|
||||
|
||||
|
||||
def load_module(module_name: str, module_directory: str) -> types.ModuleType:
|
||||
"""Dynamically imports the Python module with the given name and package
|
||||
path.
|
||||
|
||||
E.g., Assuming there is a file called `my_module.py` under
|
||||
`/some/directory/my_module`, we can use::
|
||||
|
||||
load_module('my_module', '/some/directory')
|
||||
|
||||
to effectively `import mymodule`.
|
||||
|
||||
Args:
|
||||
module_name: The name of the module.
|
||||
package_path: The package under which the specified module resides.
|
||||
"""
|
||||
module_spec = importlib.util.spec_from_file_location(
|
||||
name=module_name,
|
||||
location=os.path.join(module_directory, f'{module_name}.py'))
|
||||
module = importlib.util.module_from_spec(module_spec)
|
||||
sys.modules[module_spec.name] = module
|
||||
sys.path.insert(0, str(module_directory))
|
||||
module_spec.loader.exec_module(module)
|
||||
return module
|
||||
|
||||
|
||||
def maybe_rename_for_k8s(name: str) -> str:
|
||||
"""Cleans and converts a name to be k8s compatible.
|
||||
|
||||
Args:
|
||||
name: The original name.
|
||||
|
||||
Returns:
|
||||
A sanitized name.
|
||||
"""
|
||||
return re.sub('-+', '-', re.sub('[^-0-9a-z]+', '-',
|
||||
name.lower())).lstrip('-').rstrip('-')
|
||||
|
||||
|
||||
def sanitize_input_name(name: str) -> str:
|
||||
"""Sanitizes input name."""
|
||||
return re.sub('[^_0-9a-z]+', '_', name.lower()).lstrip('_').rstrip('_')
|
||||
|
||||
|
||||
def sanitize_component_name(name: str) -> str:
|
||||
"""Sanitizes component name."""
|
||||
return _COMPONENT_NAME_PREFIX + maybe_rename_for_k8s(name)
|
||||
|
||||
|
||||
def sanitize_task_name(name: str) -> str:
|
||||
"""Sanitizes task name."""
|
||||
return maybe_rename_for_k8s(name)
|
||||
|
||||
|
||||
def sanitize_executor_label(label: str) -> str:
|
||||
"""Sanitizes executor label."""
|
||||
return _EXECUTOR_LABEL_PREFIX + maybe_rename_for_k8s(label)
|
||||
|
||||
|
||||
def make_name_unique_by_adding_index(
|
||||
name: str,
|
||||
collection: List[str],
|
||||
delimiter: str,
|
||||
) -> str:
|
||||
"""Makes a unique name by adding index.
|
||||
|
||||
The index starts from 2 and increase by 1 until we find a unique name.
|
||||
|
||||
Args:
|
||||
name: The original name.
|
||||
collection: The collection of existing names.
|
||||
delimiter: The delimiter to connect the original name and an index.
|
||||
|
||||
Returns:
|
||||
A unique name composed of name+delimiter+next index
|
||||
"""
|
||||
unique_name = name
|
||||
if unique_name in collection:
|
||||
for i in range(2, sys.maxsize**10):
|
||||
unique_name = name + delimiter + str(i)
|
||||
if unique_name not in collection:
|
||||
break
|
||||
return unique_name
|
||||
|
||||
|
||||
def validate_pipeline_name(name: str) -> None:
|
||||
"""Validate pipeline name.
|
||||
|
||||
A valid pipeline name should match ^[a-z0-9][a-z0-9-]{0,127}$.
|
||||
|
||||
Args:
|
||||
name: The pipeline name.
|
||||
|
||||
Raises:
|
||||
ValueError if the pipeline name doesn't conform to the regular expression.
|
||||
"""
|
||||
pattern = re.compile(r'^[a-z0-9][a-z0-9-]{0,127}$')
|
||||
if not pattern.match(name):
|
||||
raise ValueError(
|
||||
'Invalid pipeline name: %s.\n'
|
||||
'Please specify a pipeline name that matches the regular '
|
||||
'expression "^[a-z0-9][a-z0-9-]{0,127}$" using '
|
||||
'`dsl.pipeline(name=...)` decorator.' % name)
|
||||
|
|
@ -0,0 +1,44 @@
|
|||
# Copyright 2018-2022 The Kubeflow Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import hashlib
|
||||
import warnings
|
||||
|
||||
from kfp.dsl import v1_structures
|
||||
import yaml
|
||||
|
||||
|
||||
def _load_component_spec_from_component_text(
|
||||
text) -> v1_structures.ComponentSpec:
|
||||
component_dict = yaml.safe_load(text)
|
||||
component_spec = v1_structures.ComponentSpec.from_dict(component_dict)
|
||||
|
||||
if isinstance(component_spec.implementation,
|
||||
v1_structures.ContainerImplementation) and (
|
||||
component_spec.implementation.container.command is None):
|
||||
warnings.warn(
|
||||
'Container component must specify command to be compatible with KFP '
|
||||
'v2 compatible mode and emissary executor, which will be the default'
|
||||
' executor for KFP v2.'
|
||||
'https://www.kubeflow.org/docs/components/pipelines/installation/choose-executor/',
|
||||
category=FutureWarning,
|
||||
)
|
||||
|
||||
# Calculating hash digest for the component
|
||||
data = text if isinstance(text, bytes) else text.encode('utf-8')
|
||||
data = data.replace(b'\r\n', b'\n') # Normalizing line endings
|
||||
digest = hashlib.sha256(data).hexdigest()
|
||||
component_spec._digest = digest
|
||||
|
||||
return component_spec
|
||||
|
|
@ -0,0 +1,379 @@
|
|||
# Copyright 2018-2022 The Kubeflow Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from collections import abc
|
||||
from collections import OrderedDict
|
||||
import inspect
|
||||
from typing import (Any, cast, Dict, get_type_hints, List, Mapping,
|
||||
MutableMapping, MutableSequence, Sequence, Type, TypeVar,
|
||||
Union)
|
||||
|
||||
T = TypeVar('T')
|
||||
|
||||
|
||||
def verify_object_against_type(x: Any, typ: Type[T]) -> T:
|
||||
"""Verifies that the object is compatible to the specified type (types from
|
||||
the typing package can be used)."""
|
||||
#TODO: Merge with parse_object_from_struct_based_on_type which has almost the same code
|
||||
if typ is type(None):
|
||||
if x is None:
|
||||
return x
|
||||
else:
|
||||
raise TypeError(f'Error: Object "{x}" is not None.')
|
||||
|
||||
if typ is Any or type(typ) is TypeVar:
|
||||
return x
|
||||
|
||||
try: #isinstance can fail for generics
|
||||
if isinstance(x, typ):
|
||||
return cast(typ, x)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if hasattr(typ, '__origin__'): #Handling generic types
|
||||
if typ.__origin__ is Union: #Optional == Union
|
||||
exception_map = {}
|
||||
possible_types = typ.__args__
|
||||
if type(
|
||||
None
|
||||
) in possible_types and x is None: #Shortcut for Optional[] tests. Can be removed, but the exceptions will be more noisy.
|
||||
return x
|
||||
for possible_type in possible_types:
|
||||
try:
|
||||
verify_object_against_type(x, possible_type)
|
||||
return x
|
||||
except Exception as ex:
|
||||
exception_map[possible_type] = ex
|
||||
#exception_lines = ['Exception for type {}: {}.'.format(t, e) for t, e in exception_map.items()]
|
||||
exception_lines = [str(e) for t, e in exception_map.items()]
|
||||
exception_lines.append(
|
||||
f'Error: Object "{x}" is incompatible with type "{typ}".')
|
||||
raise TypeError('\n'.join(exception_lines))
|
||||
|
||||
#not Union => not None
|
||||
if x is None:
|
||||
raise TypeError(
|
||||
f'Error: None object is incompatible with type {typ}')
|
||||
|
||||
generic_type = typ.__origin__
|
||||
if generic_type in [
|
||||
list, List, abc.Sequence, abc.MutableSequence, Sequence,
|
||||
MutableSequence
|
||||
] and type(x) is not str: #! str is also Sequence
|
||||
if not isinstance(x, generic_type):
|
||||
raise TypeError(
|
||||
f'Error: Object "{x}" is incompatible with type "{typ}"')
|
||||
|
||||
# In Python 3.9 typ.__args__ does not exist when the generic type does not have subscripts
|
||||
type_args = typ.__args__ if getattr(
|
||||
typ, '__args__', None) is not None else (Any, Any)
|
||||
inner_type = type_args[0]
|
||||
for item in x:
|
||||
verify_object_against_type(item, inner_type)
|
||||
return x
|
||||
|
||||
elif generic_type in [
|
||||
dict, Dict, abc.Mapping, abc.MutableMapping, Mapping,
|
||||
MutableMapping, OrderedDict
|
||||
]:
|
||||
if not isinstance(x, generic_type):
|
||||
raise TypeError(
|
||||
f'Error: Object "{x}" is incompatible with type "{typ}"')
|
||||
|
||||
# In Python 3.9 typ.__args__ does not exist when the generic type does not have subscripts
|
||||
type_args = typ.__args__ if getattr(
|
||||
typ, '__args__', None) is not None else (Any, Any)
|
||||
inner_key_type = type_args[0]
|
||||
inner_value_type = type_args[1]
|
||||
for k, v in x.items():
|
||||
verify_object_against_type(k, inner_key_type)
|
||||
verify_object_against_type(v, inner_value_type)
|
||||
return x
|
||||
|
||||
else:
|
||||
raise TypeError(
|
||||
f'Error: Unsupported generic type "{typ}". type.__origin__ or type.__extra__ == "{generic_type}"'
|
||||
)
|
||||
|
||||
raise TypeError(f'Error: Object "{x}" is incompatible with type "{typ}"')
|
||||
|
||||
|
||||
def parse_object_from_struct_based_on_type(struct: Any, typ: Type[T]) -> T:
|
||||
"""Constructs an object from structure (usually dict) based on type.
|
||||
|
||||
Supports list and dict types from the typing package plus Optional[]
|
||||
and Union[] types. If some type is a class that has .from_dict class
|
||||
method, that method is used for object construction.
|
||||
"""
|
||||
if typ is type(None):
|
||||
if struct is None:
|
||||
return None
|
||||
else:
|
||||
raise TypeError(f'Error: Structure "{struct}" is not None.')
|
||||
|
||||
if typ is Any or type(typ) is TypeVar:
|
||||
return struct
|
||||
|
||||
try: #isinstance can fail for generics
|
||||
#if (isinstance(struct, typ)
|
||||
# and not (typ is Sequence and type(struct) is str) #! str is also Sequence
|
||||
# and not (typ is int and type(struct) is bool) #! bool is int
|
||||
#):
|
||||
if type(struct) is typ:
|
||||
return struct
|
||||
except:
|
||||
pass
|
||||
if hasattr(typ, 'from_dict'):
|
||||
try: #More informative errors
|
||||
return typ.from_dict(struct)
|
||||
except Exception as ex:
|
||||
raise TypeError(
|
||||
f'Error: {typ.__name__}.from_dict(struct={struct}) failed with exception:\n{str(ex)}'
|
||||
)
|
||||
if hasattr(typ, '__origin__'): #Handling generic types
|
||||
if typ.__origin__ is Union: #Optional == Union
|
||||
results = {}
|
||||
exception_map = {}
|
||||
# In Python 3.9 typ.__args__ does not exist when the generic type does not have subscripts
|
||||
# Union without subscripts seems useless, but semantically it should be the same as Any.
|
||||
possible_types = list(getattr(typ, '__args__', [Any]))
|
||||
#if type(None) in possible_types and struct is None: #Shortcut for Optional[] tests. Can be removed, but the exceptions will be more noisy.
|
||||
# return None
|
||||
|
||||
for possible_type in possible_types:
|
||||
try:
|
||||
obj = parse_object_from_struct_based_on_type(
|
||||
struct, possible_type)
|
||||
results[possible_type] = obj
|
||||
except Exception as ex:
|
||||
if isinstance(ex, TypeError):
|
||||
exception_map[possible_type] = ex
|
||||
else:
|
||||
exception_map[
|
||||
possible_type] = f'Unexpected exception when trying to convert structure "{struct}" to type "{typ}": {type(ex)}: {ex}'
|
||||
|
||||
#Single successful parsing.
|
||||
if len(results) == 1:
|
||||
return list(results.values())[0]
|
||||
|
||||
if len(results) > 1:
|
||||
raise TypeError(
|
||||
f'Error: Structure "{struct}" is ambiguous. It can be parsed to multiple types: {list(results.keys())}.'
|
||||
)
|
||||
|
||||
exception_lines = [str(e) for t, e in exception_map.items()]
|
||||
exception_lines.append(
|
||||
f'Error: Structure "{struct}" is incompatible with type "{typ}" - none of the types in Union are compatible.'
|
||||
)
|
||||
raise TypeError('\n'.join(exception_lines))
|
||||
#not Union => not None
|
||||
if struct is None:
|
||||
raise TypeError(
|
||||
f'Error: None structure is incompatible with type {typ}')
|
||||
|
||||
generic_type = typ.__origin__
|
||||
if generic_type in [
|
||||
list, List, abc.Sequence, abc.MutableSequence, Sequence,
|
||||
MutableSequence
|
||||
] and type(struct) is not str: #! str is also Sequence
|
||||
if not isinstance(struct, generic_type):
|
||||
raise TypeError(
|
||||
f'Error: Structure "{struct}" is incompatible with type "{typ}" - it does not have list type.'
|
||||
)
|
||||
|
||||
# In Python 3.9 typ.__args__ does not exist when the generic type does not have subscripts
|
||||
type_args = typ.__args__ if getattr(
|
||||
typ, '__args__', None) is not None else (Any, Any)
|
||||
inner_type = type_args[0]
|
||||
return [
|
||||
parse_object_from_struct_based_on_type(item, inner_type)
|
||||
for item in struct
|
||||
]
|
||||
|
||||
elif generic_type in [
|
||||
dict, Dict, abc.Mapping, abc.MutableMapping, Mapping,
|
||||
MutableMapping, OrderedDict
|
||||
]:
|
||||
if not isinstance(struct, generic_type):
|
||||
raise TypeError(
|
||||
f'Error: Structure "{struct}" is incompatible with type "{typ}" - it does not have dict type.'
|
||||
)
|
||||
|
||||
# In Python 3.9 typ.__args__ does not exist when the generic type does not have subscripts
|
||||
type_args = typ.__args__ if getattr(
|
||||
typ, '__args__', None) is not None else (Any, Any)
|
||||
inner_key_type = type_args[0]
|
||||
inner_value_type = type_args[1]
|
||||
return {
|
||||
parse_object_from_struct_based_on_type(k, inner_key_type):
|
||||
parse_object_from_struct_based_on_type(v, inner_value_type)
|
||||
for k, v in struct.items()
|
||||
}
|
||||
|
||||
else:
|
||||
raise TypeError(
|
||||
f'Error: Unsupported generic type "{typ}". type.__origin__ or type.__extra__ == "{generic_type}"'
|
||||
)
|
||||
|
||||
raise TypeError(
|
||||
f'Error: Structure "{struct}" is incompatible with type "{typ}". Structure is not the instance of the type, the type does not have .from_dict method and is not generic.'
|
||||
)
|
||||
|
||||
|
||||
def convert_object_to_struct(obj, serialized_names: Mapping[str, str] = {}):
|
||||
"""Converts an object to structure (usually a dict).
|
||||
|
||||
Serializes all properties that do not start with underscores. If the
|
||||
type of some property is a class that has .to_dict class method,
|
||||
that method is used for conversion. Used by the ModelBase class.
|
||||
"""
|
||||
signature = inspect.signature(obj.__init__) #Needed for default values
|
||||
result = {}
|
||||
for python_name in signature.parameters: #TODO: Make it possible to specify the field ordering regardless of the presence of default values
|
||||
value = getattr(obj, python_name)
|
||||
if python_name.startswith('_'):
|
||||
continue
|
||||
attr_name = serialized_names.get(python_name, python_name)
|
||||
if hasattr(value, 'to_dict'):
|
||||
result[attr_name] = value.to_dict()
|
||||
elif isinstance(value, list):
|
||||
result[attr_name] = [
|
||||
(x.to_dict() if hasattr(x, 'to_dict') else x) for x in value
|
||||
]
|
||||
elif isinstance(value, dict):
|
||||
result[attr_name] = {
|
||||
k: (v.to_dict() if hasattr(v, 'to_dict') else v)
|
||||
for k, v in value.items()
|
||||
}
|
||||
else:
|
||||
param = signature.parameters.get(python_name, None)
|
||||
if param is None or param.default == inspect.Parameter.empty or value != param.default:
|
||||
result[attr_name] = value
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def parse_object_from_struct_based_on_class_init(
|
||||
cls: Type[T],
|
||||
struct: Mapping,
|
||||
serialized_names: Mapping[str, str] = {}) -> T:
|
||||
"""Constructs an object of specified class from structure (usually dict)
|
||||
using the class.__init__ method. Converts all constructor arguments to
|
||||
appropriate types based on the __init__ type hints. Used by the ModelBase
|
||||
class.
|
||||
|
||||
Arguments:
|
||||
|
||||
serialized_names: specifies the mapping between __init__ parameter names and the structure key names for cases where these names are different (due to language syntax clashes or style differences).
|
||||
"""
|
||||
parameter_types = get_type_hints(
|
||||
cls.__init__) #Properlty resolves forward references
|
||||
|
||||
serialized_names_to_pythonic = {v: k for k, v in serialized_names.items()}
|
||||
#If a pythonic name has a different original name, we forbid the pythonic name in the structure. Otherwise, this function would accept "python-styled" structures that should be invalid
|
||||
forbidden_struct_keys = set(
|
||||
serialized_names_to_pythonic.values()).difference(
|
||||
serialized_names_to_pythonic.keys())
|
||||
args = {}
|
||||
for original_name, value in struct.items():
|
||||
if original_name in forbidden_struct_keys:
|
||||
raise ValueError(
|
||||
f'Use "{serialized_names[original_name]}" key instead of pythonic key "{original_name}" in the structure: {struct}.'
|
||||
)
|
||||
python_name = serialized_names_to_pythonic.get(original_name,
|
||||
original_name)
|
||||
param_type = parameter_types.get(python_name, None)
|
||||
if param_type is not None:
|
||||
args[python_name] = parse_object_from_struct_based_on_type(
|
||||
value, param_type)
|
||||
else:
|
||||
args[python_name] = value
|
||||
|
||||
return cls(**args)
|
||||
|
||||
|
||||
class ModelBase:
|
||||
"""Base class for types that can be converted to JSON-like dict structures
|
||||
or constructed from such structures. The object fields, their types and
|
||||
default values are taken from the __init__ method arguments. Override the
|
||||
_serialized_names mapping to control the key names of the serialized
|
||||
structures.
|
||||
|
||||
The derived class objects will have the .from_dict and .to_dict methods for conversion to or from structure. The base class constructor accepts the arguments map, checks the argument types and sets the object field values.
|
||||
|
||||
Example derived class:
|
||||
|
||||
class TaskSpec(ModelBase):
|
||||
_serialized_names = {
|
||||
'component_ref': 'componentRef',
|
||||
'is_enabled': 'isEnabled',
|
||||
}
|
||||
|
||||
def __init__(self,
|
||||
component_ref: ComponentReference,
|
||||
arguments: Optional[Mapping[str, ArgumentType]] = None,
|
||||
is_enabled: Optional[Union[ArgumentType, EqualsPredicate, NotEqualsPredicate]] = None, #Optional property with default value
|
||||
):
|
||||
super().__init__(locals()) #Calling the ModelBase constructor to check the argument types and set the object field values.
|
||||
|
||||
task_spec = TaskSpec.from_dict("{'componentRef': {...}, 'isEnabled: {'and': {...}}}") # = instance of TaskSpec
|
||||
task_struct = task_spec.to_dict() #= "{'componentRef': {...}, 'isEnabled: {'and': {...}}}"
|
||||
"""
|
||||
_serialized_names = {}
|
||||
|
||||
def __init__(self, args):
|
||||
parameter_types = get_type_hints(self.__class__.__init__)
|
||||
field_values = {
|
||||
k: v
|
||||
for k, v in args.items()
|
||||
if k != 'self' and not k.startswith('_')
|
||||
}
|
||||
for k, v in field_values.items():
|
||||
parameter_type = parameter_types.get(k, None)
|
||||
if parameter_type is not None:
|
||||
try:
|
||||
verify_object_against_type(v, parameter_type)
|
||||
except Exception as e:
|
||||
raise TypeError(
|
||||
f'Argument for {k} is not compatible with type "{parameter_type}". Exception: {e}'
|
||||
)
|
||||
self.__dict__.update(field_values)
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls: Type[T], struct: Mapping) -> T:
|
||||
return parse_object_from_struct_based_on_class_init(
|
||||
cls, struct, serialized_names=cls._serialized_names)
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
return convert_object_to_struct(
|
||||
self, serialized_names=self._serialized_names)
|
||||
|
||||
def _get_field_names(self):
|
||||
return list(inspect.signature(self.__init__).parameters)
|
||||
|
||||
def __repr__(self):
|
||||
return self.__class__.__name__ + '(' + ', '.join(
|
||||
param + '=' + repr(getattr(self, param))
|
||||
for param in self._get_field_names()) + ')'
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.__class__ == other.__class__ and {
|
||||
k: getattr(self, k) for k in self._get_field_names()
|
||||
} == {k: getattr(other, k) for k in other._get_field_names()}
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
def __hash__(self):
|
||||
return hash(repr(self))
|
||||
|
|
@ -0,0 +1,851 @@
|
|||
# Copyright 2018-2022 The Kubeflow Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from collections import OrderedDict
|
||||
from typing import Any, Dict, List, Mapping, Optional, Union
|
||||
|
||||
from kfp.dsl.v1_modelbase import ModelBase
|
||||
import yaml
|
||||
|
||||
PrimitiveTypes = Union[str, int, float, bool]
|
||||
PrimitiveTypesIncludingNone = Optional[PrimitiveTypes]
|
||||
|
||||
TypeSpecType = Union[str, Dict, List]
|
||||
|
||||
|
||||
class InputSpec(ModelBase):
|
||||
"""Describes the component input specification."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
name: str,
|
||||
type: Optional[TypeSpecType] = None,
|
||||
description: Optional[str] = None,
|
||||
default: Optional[PrimitiveTypes] = None,
|
||||
optional: Optional[bool] = False,
|
||||
annotations: Optional[Dict[str, Any]] = None,
|
||||
):
|
||||
super().__init__(locals())
|
||||
|
||||
|
||||
class OutputSpec(ModelBase):
|
||||
"""Describes the component output specification."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
name: str,
|
||||
type: Optional[TypeSpecType] = None,
|
||||
description: Optional[str] = None,
|
||||
annotations: Optional[Dict[str, Any]] = None,
|
||||
):
|
||||
super().__init__(locals())
|
||||
|
||||
|
||||
class InputValuePlaceholder(ModelBase): #Non-standard attr names
|
||||
"""Represents the command-line argument placeholder that will be replaced
|
||||
at run-time by the input argument value."""
|
||||
_serialized_names = {
|
||||
'input_name': 'inputValue',
|
||||
}
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
input_name: str,
|
||||
):
|
||||
super().__init__(locals())
|
||||
|
||||
|
||||
class InputPathPlaceholder(ModelBase): #Non-standard attr names
|
||||
"""Represents the command-line argument placeholder that will be replaced
|
||||
at run-time by a local file path pointing to a file containing the input
|
||||
argument value."""
|
||||
_serialized_names = {
|
||||
'input_name': 'inputPath',
|
||||
}
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
input_name: str,
|
||||
):
|
||||
super().__init__(locals())
|
||||
|
||||
|
||||
class OutputPathPlaceholder(ModelBase): #Non-standard attr names
|
||||
"""Represents the command-line argument placeholder that will be replaced
|
||||
at run-time by a local file path pointing to a file where the program
|
||||
should write its output data."""
|
||||
_serialized_names = {
|
||||
'output_name': 'outputPath',
|
||||
}
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
output_name: str,
|
||||
):
|
||||
super().__init__(locals())
|
||||
|
||||
|
||||
class InputUriPlaceholder(ModelBase): # Non-standard attr names
|
||||
"""Represents a placeholder for the URI of an input artifact.
|
||||
|
||||
Represents the command-line argument placeholder that will be
|
||||
replaced at run-time by the URI of the input artifact argument.
|
||||
"""
|
||||
_serialized_names = {
|
||||
'input_name': 'inputUri',
|
||||
}
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
input_name: str,
|
||||
):
|
||||
super().__init__(locals())
|
||||
|
||||
|
||||
class OutputUriPlaceholder(ModelBase): # Non-standard attr names
|
||||
"""Represents a placeholder for the URI of an output artifact.
|
||||
|
||||
Represents the command-line argument placeholder that will be
|
||||
replaced at run-time by a URI of the output artifac where the
|
||||
program should write its output data.
|
||||
"""
|
||||
_serialized_names = {
|
||||
'output_name': 'outputUri',
|
||||
}
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
output_name: str,
|
||||
):
|
||||
super().__init__(locals())
|
||||
|
||||
|
||||
class InputMetadataPlaceholder(ModelBase): # Non-standard attr names
|
||||
"""Represents the file path to an input artifact metadata.
|
||||
|
||||
During runtime, this command-line argument placeholder will be
|
||||
replaced by the path where the metadata file associated with this
|
||||
artifact has been written to. Currently only supported in v2
|
||||
components.
|
||||
"""
|
||||
_serialized_names = {
|
||||
'input_name': 'inputMetadata',
|
||||
}
|
||||
|
||||
def __init__(self, input_name: str):
|
||||
super().__init__(locals())
|
||||
|
||||
|
||||
class InputOutputPortNamePlaceholder(ModelBase): # Non-standard attr names
|
||||
"""Represents the output port name of an input artifact.
|
||||
|
||||
During compile time, this command-line argument placeholder will be
|
||||
replaced by the actual output port name used by the producer task.
|
||||
Currently only supported in v2 components.
|
||||
"""
|
||||
_serialized_names = {
|
||||
'input_name': 'inputOutputPortName',
|
||||
}
|
||||
|
||||
def __init__(self, input_name: str):
|
||||
super().__init__(locals())
|
||||
|
||||
|
||||
class OutputMetadataPlaceholder(ModelBase): # Non-standard attr names
|
||||
"""Represents the output metadata JSON file location of this task.
|
||||
|
||||
This file will encode the metadata information produced by this task:
|
||||
- Artifacts metadata, but not the content of the artifact, and
|
||||
- output parameters.
|
||||
|
||||
Only supported in v2 components.
|
||||
"""
|
||||
_serialized_names = {
|
||||
'output_metadata': 'outputMetadata',
|
||||
}
|
||||
|
||||
def __init__(self, output_metadata: type(None) = None):
|
||||
if output_metadata:
|
||||
raise RuntimeError(
|
||||
'Output metadata placeholder cannot be associated with key')
|
||||
super().__init__(locals())
|
||||
|
||||
def to_dict(self) -> Mapping[str, Any]:
|
||||
# Override parent implementation. Otherwise it always returns {}.
|
||||
return {'outputMetadata': None}
|
||||
|
||||
|
||||
class ExecutorInputPlaceholder(ModelBase): # Non-standard attr names
|
||||
"""Represents the serialized ExecutorInput message at runtime.
|
||||
|
||||
This placeholder will be replaced by a serialized
|
||||
[ExecutorInput](https://github.com/kubeflow/pipelines/blob/61f9c2c328d245d89c9d9b8c923f24dbbd08cdc9/api/v2alpha1/pipeline_spec.proto#L730)
|
||||
proto message at runtime, which includes parameters of the task, artifact
|
||||
URIs and metadata.
|
||||
"""
|
||||
_serialized_names = {
|
||||
'executor_input': 'executorInput',
|
||||
}
|
||||
|
||||
def __init__(self, executor_input: type(None) = None):
|
||||
if executor_input:
|
||||
raise RuntimeError(
|
||||
f'Executor input placeholder cannot be associated with input key. Got {executor_input}'
|
||||
)
|
||||
super().__init__(locals())
|
||||
|
||||
def to_dict(self) -> Mapping[str, Any]:
|
||||
# Override parent implementation. Otherwise it always returns {}.
|
||||
return {'executorInput': None}
|
||||
|
||||
|
||||
CommandlineArgumentType = Union[str, InputValuePlaceholder,
|
||||
InputPathPlaceholder, OutputPathPlaceholder,
|
||||
InputUriPlaceholder, OutputUriPlaceholder,
|
||||
InputMetadataPlaceholder,
|
||||
InputOutputPortNamePlaceholder,
|
||||
OutputMetadataPlaceholder,
|
||||
ExecutorInputPlaceholder, 'ConcatPlaceholder',
|
||||
'IfPlaceholder',]
|
||||
|
||||
|
||||
class ConcatPlaceholder(ModelBase): #Non-standard attr names
|
||||
"""Represents the command-line argument placeholder that will be replaced
|
||||
at run-time by the concatenated values of its items."""
|
||||
_serialized_names = {
|
||||
'items': 'concat',
|
||||
}
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
items: List[CommandlineArgumentType],
|
||||
):
|
||||
super().__init__(locals())
|
||||
|
||||
|
||||
class IsPresentPlaceholder(ModelBase): #Non-standard attr names
|
||||
"""Represents the command-line argument placeholder that will be replaced
|
||||
at run-time by a boolean value specifying whether the caller has passed an
|
||||
argument for the specified optional input."""
|
||||
_serialized_names = {
|
||||
'input_name': 'isPresent',
|
||||
}
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
input_name: str,
|
||||
):
|
||||
super().__init__(locals())
|
||||
|
||||
|
||||
IfConditionArgumentType = Union[bool, str, IsPresentPlaceholder,
|
||||
InputValuePlaceholder]
|
||||
|
||||
|
||||
class IfPlaceholderStructure(ModelBase): #Non-standard attr names
|
||||
'''Used in by the IfPlaceholder - the command-line argument placeholder that will be replaced at run-time by the expanded value of either "then_value" or "else_value" depending on the submissio-time resolved value of the "cond" predicate.'''
|
||||
_serialized_names = {
|
||||
'condition': 'cond',
|
||||
'then_value': 'then',
|
||||
'else_value': 'else',
|
||||
}
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
condition: IfConditionArgumentType,
|
||||
then_value: Union[CommandlineArgumentType,
|
||||
List[CommandlineArgumentType]],
|
||||
else_value: Optional[Union[CommandlineArgumentType,
|
||||
List[CommandlineArgumentType]]] = None,
|
||||
):
|
||||
super().__init__(locals())
|
||||
|
||||
|
||||
class IfPlaceholder(ModelBase): #Non-standard attr names
|
||||
"""Represents the command-line argument placeholder that will be replaced
|
||||
at run-time by the expanded value of either "then_value" or "else_value"
|
||||
depending on the submissio-time resolved value of the "cond" predicate."""
|
||||
_serialized_names = {
|
||||
'if_structure': 'if',
|
||||
}
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
if_structure: IfPlaceholderStructure,
|
||||
):
|
||||
super().__init__(locals())
|
||||
|
||||
|
||||
class ContainerSpec(ModelBase):
|
||||
"""Describes the container component implementation."""
|
||||
_serialized_names = {
|
||||
'file_outputs':
|
||||
'fileOutputs', #TODO: rename to something like legacy_unconfigurable_output_paths
|
||||
}
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
image: str,
|
||||
command: Optional[List[CommandlineArgumentType]] = None,
|
||||
args: Optional[List[CommandlineArgumentType]] = None,
|
||||
env: Optional[Mapping[str, str]] = None,
|
||||
file_outputs:
|
||||
Optional[Mapping[
|
||||
str,
|
||||
str]] = None, #TODO: rename to something like legacy_unconfigurable_output_paths
|
||||
):
|
||||
super().__init__(locals())
|
||||
|
||||
|
||||
class ContainerImplementation(ModelBase):
|
||||
"""Represents the container component implementation."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
container: ContainerSpec,
|
||||
):
|
||||
super().__init__(locals())
|
||||
|
||||
|
||||
ImplementationType = Union[ContainerImplementation, 'GraphImplementation']
|
||||
|
||||
|
||||
class MetadataSpec(ModelBase):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
annotations: Optional[Dict[str, str]] = None,
|
||||
labels: Optional[Dict[str, str]] = None,
|
||||
):
|
||||
super().__init__(locals())
|
||||
|
||||
|
||||
class ComponentSpec(ModelBase):
|
||||
"""Component specification.
|
||||
|
||||
Describes the metadata (name, description, annotations and labels),
|
||||
the interface (inputs and outputs) and the implementation of the
|
||||
component.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
name: Optional[str] = None, #? Move to metadata?
|
||||
description: Optional[str] = None, #? Move to metadata?
|
||||
metadata: Optional[MetadataSpec] = None,
|
||||
inputs: Optional[List[InputSpec]] = None,
|
||||
outputs: Optional[List[OutputSpec]] = None,
|
||||
implementation: Optional[ImplementationType] = None,
|
||||
version: Optional[str] = 'google.com/cloud/pipelines/component/v1',
|
||||
#tags: Optional[Set[str]] = None,
|
||||
):
|
||||
super().__init__(locals())
|
||||
self._post_init()
|
||||
|
||||
def _post_init(self):
|
||||
#Checking input names for uniqueness
|
||||
self._inputs_dict = {}
|
||||
if self.inputs:
|
||||
for input in self.inputs:
|
||||
if input.name in self._inputs_dict:
|
||||
raise ValueError(f'Non-unique input name "{input.name}"')
|
||||
self._inputs_dict[input.name] = input
|
||||
|
||||
#Checking output names for uniqueness
|
||||
self._outputs_dict = {}
|
||||
if self.outputs:
|
||||
for output in self.outputs:
|
||||
if output.name in self._outputs_dict:
|
||||
raise ValueError(f'Non-unique output name "{output.name}"')
|
||||
self._outputs_dict[output.name] = output
|
||||
|
||||
if isinstance(self.implementation, ContainerImplementation):
|
||||
container = self.implementation.container
|
||||
|
||||
if container.file_outputs:
|
||||
for output_name, path in container.file_outputs.items():
|
||||
if output_name not in self._outputs_dict:
|
||||
raise TypeError(
|
||||
'Unconfigurable output entry "{}" references non-existing output.'
|
||||
.format({output_name: path}))
|
||||
|
||||
def verify_arg(arg):
|
||||
if arg is None:
|
||||
pass
|
||||
elif isinstance(
|
||||
arg, (str, int, float, bool, OutputMetadataPlaceholder,
|
||||
ExecutorInputPlaceholder)):
|
||||
pass
|
||||
elif isinstance(arg, list):
|
||||
for arg2 in arg:
|
||||
verify_arg(arg2)
|
||||
elif isinstance(
|
||||
arg,
|
||||
(InputUriPlaceholder, InputValuePlaceholder,
|
||||
InputPathPlaceholder, IsPresentPlaceholder,
|
||||
InputMetadataPlaceholder, InputOutputPortNamePlaceholder)):
|
||||
if arg.input_name not in self._inputs_dict:
|
||||
raise TypeError(
|
||||
f'Argument "{arg}" references non-existing input.')
|
||||
elif isinstance(arg,
|
||||
(OutputUriPlaceholder, OutputPathPlaceholder)):
|
||||
if arg.output_name not in self._outputs_dict:
|
||||
raise TypeError(
|
||||
f'Argument "{arg}" references non-existing output.')
|
||||
elif isinstance(arg, ConcatPlaceholder):
|
||||
for arg2 in arg.items:
|
||||
verify_arg(arg2)
|
||||
elif isinstance(arg, IfPlaceholder):
|
||||
verify_arg(arg.if_structure.condition)
|
||||
verify_arg(arg.if_structure.then_value)
|
||||
verify_arg(arg.if_structure.else_value)
|
||||
else:
|
||||
raise TypeError(f'Unexpected argument "{arg}"')
|
||||
|
||||
verify_arg(container.command)
|
||||
verify_arg(container.args)
|
||||
|
||||
if isinstance(self.implementation, GraphImplementation):
|
||||
graph = self.implementation.graph
|
||||
|
||||
if graph.output_values is not None:
|
||||
for output_name, argument in graph.output_values.items():
|
||||
if output_name not in self._outputs_dict:
|
||||
raise TypeError(
|
||||
'Graph output argument entry "{}" references non-existing output.'
|
||||
.format({output_name: argument}))
|
||||
|
||||
if graph.tasks is not None:
|
||||
for task in graph.tasks.values():
|
||||
if task.arguments is not None:
|
||||
for argument in task.arguments.values():
|
||||
if isinstance(
|
||||
argument, GraphInputArgument
|
||||
) and argument.graph_input.input_name not in self._inputs_dict:
|
||||
raise TypeError(
|
||||
f'Argument "{argument}" references non-existing input.'
|
||||
)
|
||||
|
||||
def save(self, file_path: str):
|
||||
"""Saves the component definition to file.
|
||||
|
||||
It can be shared online and later loaded using the
|
||||
load_component function.
|
||||
"""
|
||||
|
||||
component_yaml = yaml.dump(self.to_dict(), sort_keys=True)
|
||||
with open(file_path, 'w') as f:
|
||||
f.write(component_yaml)
|
||||
|
||||
|
||||
class ComponentReference(ModelBase):
|
||||
"""Component reference.
|
||||
|
||||
Contains information that can be used to locate and load a component
|
||||
by name, digest or URL
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
name: Optional[str] = None,
|
||||
digest: Optional[str] = None,
|
||||
tag: Optional[str] = None,
|
||||
url: Optional[str] = None,
|
||||
spec: Optional[ComponentSpec] = None,
|
||||
):
|
||||
super().__init__(locals())
|
||||
self._post_init()
|
||||
|
||||
def _post_init(self) -> None:
|
||||
if not any([self.name, self.digest, self.tag, self.url, self.spec]):
|
||||
raise TypeError('Need at least one argument.')
|
||||
|
||||
|
||||
class GraphInputReference(ModelBase):
|
||||
"""References the input of the graph (the scope is a single graph)."""
|
||||
_serialized_names = {
|
||||
'input_name': 'inputName',
|
||||
}
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
input_name: str,
|
||||
type:
|
||||
Optional[
|
||||
TypeSpecType] = None, # Can be used to override the reference data type
|
||||
):
|
||||
super().__init__(locals())
|
||||
|
||||
def as_argument(self) -> 'GraphInputArgument':
|
||||
return GraphInputArgument(graph_input=self)
|
||||
|
||||
def with_type(self, type_spec: TypeSpecType) -> 'GraphInputReference':
|
||||
return GraphInputReference(
|
||||
input_name=self.input_name,
|
||||
type=type_spec,
|
||||
)
|
||||
|
||||
def without_type(self) -> 'GraphInputReference':
|
||||
return self.with_type(None)
|
||||
|
||||
|
||||
class GraphInputArgument(ModelBase):
|
||||
"""Represents the component argument value that comes from the graph
|
||||
component input."""
|
||||
_serialized_names = {
|
||||
'graph_input': 'graphInput',
|
||||
}
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
graph_input: GraphInputReference,
|
||||
):
|
||||
super().__init__(locals())
|
||||
|
||||
|
||||
class TaskOutputReference(ModelBase):
|
||||
"""References the output of some task (the scope is a single graph)."""
|
||||
_serialized_names = {
|
||||
'task_id': 'taskId',
|
||||
'output_name': 'outputName',
|
||||
}
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
output_name: str,
|
||||
task_id:
|
||||
Optional[
|
||||
str] = None, # Used for linking to the upstream task in serialized component file.
|
||||
task:
|
||||
Optional[
|
||||
'TaskSpec'] = None, # Used for linking to the upstream task in runtime since Task does not have an ID until inserted into a graph.
|
||||
type:
|
||||
Optional[
|
||||
TypeSpecType] = None, # Can be used to override the reference data type
|
||||
):
|
||||
super().__init__(locals())
|
||||
if self.task_id is None and self.task is None:
|
||||
raise TypeError('task_id and task cannot be None at the same time.')
|
||||
|
||||
def with_type(self, type_spec: TypeSpecType) -> 'TaskOutputReference':
|
||||
return TaskOutputReference(
|
||||
output_name=self.output_name,
|
||||
task_id=self.task_id,
|
||||
task=self.task,
|
||||
type=type_spec,
|
||||
)
|
||||
|
||||
def without_type(self) -> 'TaskOutputReference':
|
||||
return self.with_type(None)
|
||||
|
||||
|
||||
class TaskOutputArgument(ModelBase
|
||||
): #Has additional constructor for convenience
|
||||
"""Represents the component argument value that comes from the output of
|
||||
another task."""
|
||||
_serialized_names = {
|
||||
'task_output': 'taskOutput',
|
||||
}
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
task_output: TaskOutputReference,
|
||||
):
|
||||
super().__init__(locals())
|
||||
|
||||
@staticmethod
|
||||
def construct(
|
||||
task_id: str,
|
||||
output_name: str,
|
||||
) -> 'TaskOutputArgument':
|
||||
return TaskOutputArgument(
|
||||
TaskOutputReference(
|
||||
task_id=task_id,
|
||||
output_name=output_name,
|
||||
))
|
||||
|
||||
def with_type(self, type_spec: TypeSpecType) -> 'TaskOutputArgument':
|
||||
return TaskOutputArgument(
|
||||
task_output=self.task_output.with_type(type_spec),)
|
||||
|
||||
def without_type(self) -> 'TaskOutputArgument':
|
||||
return self.with_type(None)
|
||||
|
||||
|
||||
ArgumentType = Union[PrimitiveTypes, GraphInputArgument, TaskOutputArgument]
|
||||
|
||||
|
||||
class TwoOperands(ModelBase):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
op1: ArgumentType,
|
||||
op2: ArgumentType,
|
||||
):
|
||||
super().__init__(locals())
|
||||
|
||||
|
||||
class BinaryPredicate(ModelBase): #abstract base type
|
||||
|
||||
def __init__(self, operands: TwoOperands):
|
||||
super().__init__(locals())
|
||||
|
||||
|
||||
class EqualsPredicate(BinaryPredicate):
|
||||
"""Represents the "equals" comparison predicate."""
|
||||
_serialized_names = {'operands': '=='}
|
||||
|
||||
|
||||
class NotEqualsPredicate(BinaryPredicate):
|
||||
"""Represents the "not equals" comparison predicate."""
|
||||
_serialized_names = {'operands': '!='}
|
||||
|
||||
|
||||
class GreaterThanPredicate(BinaryPredicate):
|
||||
"""Represents the "greater than" comparison predicate."""
|
||||
_serialized_names = {'operands': '>'}
|
||||
|
||||
|
||||
class GreaterThanOrEqualPredicate(BinaryPredicate):
|
||||
"""Represents the "greater than or equal" comparison predicate."""
|
||||
_serialized_names = {'operands': '>='}
|
||||
|
||||
|
||||
class LessThenPredicate(BinaryPredicate):
|
||||
"""Represents the "less than" comparison predicate."""
|
||||
_serialized_names = {'operands': '<'}
|
||||
|
||||
|
||||
class LessThenOrEqualPredicate(BinaryPredicate):
|
||||
"""Represents the "less than or equal" comparison predicate."""
|
||||
_serialized_names = {'operands': '<='}
|
||||
|
||||
|
||||
PredicateType = Union[ArgumentType, EqualsPredicate, NotEqualsPredicate,
|
||||
GreaterThanPredicate, GreaterThanOrEqualPredicate,
|
||||
LessThenPredicate, LessThenOrEqualPredicate,
|
||||
'NotPredicate', 'AndPredicate', 'OrPredicate',]
|
||||
|
||||
|
||||
class TwoBooleanOperands(ModelBase):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
op1: PredicateType,
|
||||
op2: PredicateType,
|
||||
):
|
||||
super().__init__(locals())
|
||||
|
||||
|
||||
class NotPredicate(ModelBase):
|
||||
"""Represents the "not" logical operation."""
|
||||
_serialized_names = {'operand': 'not'}
|
||||
|
||||
def __init__(self, operand: PredicateType):
|
||||
super().__init__(locals())
|
||||
|
||||
|
||||
class AndPredicate(ModelBase):
|
||||
"""Represents the "and" logical operation."""
|
||||
_serialized_names = {'operands': 'and'}
|
||||
|
||||
def __init__(self, operands: TwoBooleanOperands):
|
||||
super().__init__(locals())
|
||||
|
||||
|
||||
class OrPredicate(ModelBase):
|
||||
"""Represents the "or" logical operation."""
|
||||
_serialized_names = {'operands': 'or'}
|
||||
|
||||
def __init__(self, operands: TwoBooleanOperands):
|
||||
super().__init__(locals())
|
||||
|
||||
|
||||
class RetryStrategySpec(ModelBase):
|
||||
_serialized_names = {
|
||||
'max_retries': 'maxRetries',
|
||||
}
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
max_retries: int,
|
||||
):
|
||||
super().__init__(locals())
|
||||
|
||||
|
||||
class CachingStrategySpec(ModelBase):
|
||||
_serialized_names = {
|
||||
'max_cache_staleness': 'maxCacheStaleness',
|
||||
}
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
max_cache_staleness: Optional[
|
||||
str] = None, # RFC3339 compliant duration: P30DT1H22M3S
|
||||
):
|
||||
super().__init__(locals())
|
||||
|
||||
|
||||
class ExecutionOptionsSpec(ModelBase):
|
||||
_serialized_names = {
|
||||
'retry_strategy': 'retryStrategy',
|
||||
'caching_strategy': 'cachingStrategy',
|
||||
}
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
retry_strategy: Optional[RetryStrategySpec] = None,
|
||||
caching_strategy: Optional[CachingStrategySpec] = None,
|
||||
):
|
||||
super().__init__(locals())
|
||||
|
||||
|
||||
class TaskSpec(ModelBase):
|
||||
"""Task specification.
|
||||
|
||||
Task is a "configured" component - a component supplied with arguments and other applied configuration changes.
|
||||
"""
|
||||
_serialized_names = {
|
||||
'component_ref': 'componentRef',
|
||||
'is_enabled': 'isEnabled',
|
||||
'execution_options': 'executionOptions'
|
||||
}
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
component_ref: ComponentReference,
|
||||
arguments: Optional[Mapping[str, ArgumentType]] = None,
|
||||
is_enabled: Optional[PredicateType] = None,
|
||||
execution_options: Optional[ExecutionOptionsSpec] = None,
|
||||
annotations: Optional[Dict[str, Any]] = None,
|
||||
):
|
||||
super().__init__(locals())
|
||||
#TODO: If component_ref is resolved to component spec, then check that the arguments correspond to the inputs
|
||||
|
||||
def _init_outputs(self):
|
||||
#Adding output references to the task
|
||||
if self.component_ref.spec is None:
|
||||
return
|
||||
task_outputs = OrderedDict()
|
||||
for output in self.component_ref.spec.outputs or []:
|
||||
task_output_ref = TaskOutputReference(
|
||||
output_name=output.name,
|
||||
task=self,
|
||||
type=output.
|
||||
type, # TODO: Resolve type expressions. E.g. type: {TypeOf: Input 1}
|
||||
)
|
||||
task_output_arg = TaskOutputArgument(task_output=task_output_ref)
|
||||
task_outputs[output.name] = task_output_arg
|
||||
|
||||
self.outputs = task_outputs
|
||||
if len(task_outputs) == 1:
|
||||
self.output = list(task_outputs.values())[0]
|
||||
|
||||
|
||||
class GraphSpec(ModelBase):
|
||||
"""Describes the graph component implementation.
|
||||
|
||||
It represents a graph of component tasks connected to the upstream
|
||||
sources of data using the argument specifications. It also describes
|
||||
the sources of graph output values.
|
||||
"""
|
||||
_serialized_names = {
|
||||
'output_values': 'outputValues',
|
||||
}
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
tasks: Mapping[str, TaskSpec],
|
||||
output_values: Mapping[str, ArgumentType] = None,
|
||||
):
|
||||
super().__init__(locals())
|
||||
self._post_init()
|
||||
|
||||
def _post_init(self):
|
||||
#Checking task output references and preparing the dependency table
|
||||
task_dependencies = {}
|
||||
for task_id, task in self.tasks.items():
|
||||
dependencies = set()
|
||||
task_dependencies[task_id] = dependencies
|
||||
if task.arguments is not None:
|
||||
for argument in task.arguments.values():
|
||||
if isinstance(argument, TaskOutputArgument):
|
||||
dependencies.add(argument.task_output.task_id)
|
||||
if argument.task_output.task_id not in self.tasks:
|
||||
raise TypeError(
|
||||
f'Argument "{argument}" references non-existing task.'
|
||||
)
|
||||
|
||||
#Topologically sorting tasks to detect cycles
|
||||
task_dependents = {k: set() for k in task_dependencies.keys()}
|
||||
for task_id, dependencies in task_dependencies.items():
|
||||
for dependency in dependencies:
|
||||
task_dependents[dependency].add(task_id)
|
||||
task_number_of_remaining_dependencies = {
|
||||
k: len(v) for k, v in task_dependencies.items()
|
||||
}
|
||||
sorted_tasks = OrderedDict()
|
||||
|
||||
def process_task(task_id):
|
||||
if task_number_of_remaining_dependencies[
|
||||
task_id] == 0 and task_id not in sorted_tasks:
|
||||
sorted_tasks[task_id] = self.tasks[task_id]
|
||||
for dependent_task in task_dependents[task_id]:
|
||||
task_number_of_remaining_dependencies[
|
||||
dependent_task] = task_number_of_remaining_dependencies[
|
||||
dependent_task] - 1
|
||||
process_task(dependent_task)
|
||||
|
||||
for task_id in task_dependencies.keys():
|
||||
process_task(task_id)
|
||||
if len(sorted_tasks) != len(task_dependencies):
|
||||
tasks_with_unsatisfied_dependencies = {
|
||||
k: v
|
||||
for k, v in task_number_of_remaining_dependencies.items()
|
||||
if v > 0
|
||||
}
|
||||
task_wth_minimal_number_of_unsatisfied_dependencies = min(
|
||||
tasks_with_unsatisfied_dependencies.keys(),
|
||||
key=lambda task_id: tasks_with_unsatisfied_dependencies[task_id]
|
||||
)
|
||||
raise ValueError(
|
||||
f'Task "{task_wth_minimal_number_of_unsatisfied_dependencies}" has cyclical dependency.'
|
||||
)
|
||||
|
||||
self._toposorted_tasks = sorted_tasks
|
||||
|
||||
|
||||
class GraphImplementation(ModelBase):
|
||||
"""Represents the graph component implementation."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
graph: GraphSpec,
|
||||
):
|
||||
super().__init__(locals())
|
||||
|
||||
|
||||
class PipelineRunSpec(ModelBase):
|
||||
"""The object that can be sent to the backend to start a new Run."""
|
||||
_serialized_names = {
|
||||
'root_task': 'rootTask',
|
||||
#'on_exit_task': 'onExitTask',
|
||||
}
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
root_task: TaskSpec,
|
||||
#on_exit_task: Optional[TaskSpec] = None,
|
||||
):
|
||||
super().__init__(locals())
|
||||
|
|
@ -0,0 +1,54 @@
|
|||
# Copyright 2021-2022 The Kubeflow Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Component loaded from YAML."""
|
||||
|
||||
from google.protobuf import json_format
|
||||
from kfp.dsl import base_component
|
||||
from kfp.dsl import structures
|
||||
from kfp.pipeline_spec import pipeline_spec_pb2
|
||||
|
||||
|
||||
class YamlComponent(base_component.BaseComponent):
|
||||
"""A component loaded from a YAML file.
|
||||
|
||||
**Note:** ``YamlComponent`` is not intended to be used to construct components directly. Use ``kfp.components.load_component_from_*()`` instead.
|
||||
|
||||
Attribute:
|
||||
component_spec: Component definition.
|
||||
component_yaml: The yaml string that this component is loaded from.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
component_spec: structures.ComponentSpec,
|
||||
component_yaml: str,
|
||||
):
|
||||
super().__init__(component_spec=component_spec)
|
||||
self.component_yaml = component_yaml
|
||||
|
||||
@property
|
||||
def pipeline_spec(self) -> pipeline_spec_pb2.PipelineSpec:
|
||||
"""Returns the pipeline spec of the component."""
|
||||
component_dict = structures.load_documents_from_yaml(
|
||||
self.component_yaml)[0]
|
||||
is_v1 = 'implementation' in set(component_dict.keys())
|
||||
if is_v1:
|
||||
return self.component_spec.to_pipeline_spec()
|
||||
else:
|
||||
return json_format.ParseDict(component_dict,
|
||||
pipeline_spec_pb2.PipelineSpec())
|
||||
|
||||
def execute(self, *args, **kwargs):
|
||||
"""Not implemented."""
|
||||
raise NotImplementedError
|
||||
|
|
@ -10,7 +10,6 @@ google-api-core>=1.31.5,<3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.0
|
|||
google-auth>=1.6.1,<3
|
||||
# https://github.com/googleapis/python-storage/blob/main/CHANGELOG.md#221-2022-03-15
|
||||
google-cloud-storage>=2.2.1,<3
|
||||
kfp-dsl==2.1.2
|
||||
# pin kfp-pipeline-spec to an exact version, since this is the contract between a given KFP SDK version and the BE. we don't want old version of the SDK to write new fields and to have the BE reject the new unsupported field (even if the new field backward compatible from a proto perspective)
|
||||
kfp-pipeline-spec==0.2.2
|
||||
# Update the upper version whenever a new major version of the
|
||||
|
|
|
|||
|
|
@ -32,7 +32,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
|
|||
|
|
@ -48,7 +48,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ deploymentSpec:
|
|||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location --index-url\
|
||||
\ https://pypi.org/simple --trusted-host https://pypi.org/simple 'yapf'\
|
||||
\ 'kfp-dsl==2.0.1' && \"$0\" \"$@\"\n"
|
||||
\ 'kfp==2.0.1' && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
- 'program_path=$(mktemp -d)
|
||||
|
|
|
|||
|
|
@ -24,7 +24,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
|
|||
|
|
@ -32,7 +32,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
|
|||
|
|
@ -29,7 +29,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
|
|||
|
|
@ -25,7 +25,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
|
|||
|
|
@ -27,7 +27,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
|
|||
|
|
@ -56,7 +56,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
|
|||
|
|
@ -29,7 +29,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ deploymentSpec:
|
|||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location --index-url\
|
||||
\ https://pypi.org/simple --trusted-host https://pypi.org/simple 'yapf'\
|
||||
\ 'kfp-dsl==2.0.1' && \"$0\" \"$@\"\n"
|
||||
\ 'kfp==2.0.1' && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
- 'program_path=$(mktemp -d)
|
||||
|
|
|
|||
|
|
@ -126,7 +126,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
@ -155,7 +155,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
|
|||
|
|
@ -78,7 +78,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
@ -130,7 +130,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
|
|||
|
|
@ -81,7 +81,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
@ -108,7 +108,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
@ -135,7 +135,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
@ -162,7 +162,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
|
|||
|
|
@ -285,7 +285,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
@ -315,7 +315,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
@ -345,7 +345,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
@ -375,7 +375,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
@ -403,7 +403,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
|
|||
|
|
@ -90,7 +90,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
@ -136,7 +136,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
|
|||
|
|
@ -132,7 +132,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
@ -158,7 +158,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
|
|||
|
|
@ -150,7 +150,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
@ -177,7 +177,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
@ -203,7 +203,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
@ -229,7 +229,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
|
|||
|
|
@ -224,7 +224,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
@ -251,7 +251,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
@ -277,7 +277,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
@ -303,7 +303,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
@ -330,7 +330,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
@ -357,7 +357,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
@ -383,7 +383,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
|
|||
|
|
@ -75,7 +75,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
@ -111,7 +111,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
|
|||
|
|
@ -206,7 +206,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
@ -233,7 +233,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
@ -259,7 +259,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
@ -286,7 +286,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
|
|||
|
|
@ -129,7 +129,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
@ -156,7 +156,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
@ -183,7 +183,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
@ -210,7 +210,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
|
|||
|
|
@ -74,7 +74,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
@ -101,7 +101,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
|
|||
|
|
@ -161,7 +161,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
@ -188,7 +188,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
|
|||
|
|
@ -152,7 +152,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
@ -179,7 +179,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
@ -206,7 +206,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
|
|||
|
|
@ -88,7 +88,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
@ -116,7 +116,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
@ -144,7 +144,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
@ -171,7 +171,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
@ -198,7 +198,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
|
|||
|
|
@ -94,7 +94,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
|
|||
|
|
@ -41,7 +41,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
|
|||
|
|
@ -65,7 +65,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
@ -92,7 +92,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
@ -119,7 +119,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
|
|||
|
|
@ -57,7 +57,7 @@ deploymentSpec:
|
|||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'aiplatform'\
|
||||
\ 'kfp-dsl==2.0.1' 'kfp-dsl==2.0.1' && \"$0\" \"$@\"\n"
|
||||
\ 'kfp==2.0.1' 'kfp==2.0.1' && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
- 'program_path=$(mktemp -d)
|
||||
|
|
@ -90,7 +90,7 @@ deploymentSpec:
|
|||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'aiplatform'\
|
||||
\ 'kfp-dsl==2.0.1' && \"$0\" \"$@\"\n"
|
||||
\ 'kfp==2.0.1' && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
- 'program_path=$(mktemp -d)
|
||||
|
|
|
|||
|
|
@ -127,7 +127,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
@ -159,7 +159,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
|
|||
|
|
@ -171,7 +171,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
@ -198,7 +198,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
@ -224,7 +224,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
@ -250,7 +250,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
@ -276,7 +276,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
@ -302,7 +302,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
@ -328,7 +328,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
@ -354,7 +354,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
|
|||
|
|
@ -602,7 +602,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
@ -631,7 +631,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
@ -660,7 +660,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
@ -688,7 +688,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
@ -714,7 +714,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
@ -741,7 +741,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
@ -768,7 +768,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
@ -795,7 +795,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
@ -822,7 +822,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
@ -849,7 +849,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
@ -876,7 +876,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
@ -903,7 +903,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
@ -930,7 +930,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
|
|||
|
|
@ -60,7 +60,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
@ -95,7 +95,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
|
|||
|
|
@ -60,7 +60,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
@ -89,7 +89,7 @@ deploymentSpec:
|
|||
- -c
|
||||
- "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\
|
||||
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp-dsl==2.0.1'\
|
||||
\ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.1'\
|
||||
\ && \"$0\" \"$@\"\n"
|
||||
- sh
|
||||
- -ec
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue