chore(sdk): apply yapf formatting (#7414)

This commit is contained in:
Connor McCarthy 2022-03-16 14:34:36 -06:00 committed by GitHub
parent 45c6a7093b
commit 4621aec146
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
22 changed files with 97 additions and 71 deletions

View File

@ -16,8 +16,13 @@ from typing import Dict, List
from kfp import compiler from kfp import compiler
from kfp import dsl from kfp import dsl
from kfp.dsl import (Dataset, Input, InputPath, Model, Output, OutputPath, from kfp.dsl import Dataset
component) from kfp.dsl import Input
from kfp.dsl import InputPath
from kfp.dsl import Model
from kfp.dsl import Output
from kfp.dsl import OutputPath
from kfp.dsl import component
@component @component

View File

@ -38,8 +38,8 @@ def my_pipeline(input_location: str = 'gs://test-bucket/pipeline_root',
training_op( training_op(
examples=ingestor.outputs['examples'], examples=ingestor.outputs['examples'],
optimizer=optimizer, optimizer=optimizer,
n_epochs=n_epochs).set_cpu_limit('4').set_memory_limit( n_epochs=n_epochs).set_cpu_limit('4').set_memory_limit('14Gi')
'14Gi').add_node_selector_constraint('tpu-v3').set_gpu_limit('1')) .add_node_selector_constraint('tpu-v3').set_gpu_limit('1'))
if __name__ == '__main__': if __name__ == '__main__':

View File

@ -93,7 +93,7 @@ def component(func: Optional[Callable] = None,
""" """
if output_component_file is not None: if output_component_file is not None:
raise Exception("output_component_file is not supported yet in v2 early" raise Exception("output_component_file is not supported yet in v2 early"
"releases and will be added back for v2.0.0 ") "releases and will be added back for v2.0.0 ")
if func is None: if func is None:
return functools.partial( return functools.partial(

View File

@ -106,9 +106,7 @@ class PipelineTask:
self.task_spec = structures.TaskSpec( self.task_spec = structures.TaskSpec(
name=self.register_task_handler(), name=self.register_task_handler(),
inputs={ inputs={input_name: value for input_name, value in args.items()},
input_name: value for input_name, value in args.items()
},
dependent_tasks=[], dependent_tasks=[],
component_ref=component_spec.name, component_ref=component_spec.name,
enable_caching=True, enable_caching=True,
@ -338,8 +336,7 @@ class PipelineTask:
resolved_container_spec = copy.deepcopy(container_spec) resolved_container_spec = copy.deepcopy(container_spec)
resolved_container_spec.command = expand_argument_list( resolved_container_spec.command = expand_argument_list(
container_spec.command) container_spec.command)
resolved_container_spec.args = expand_argument_list( resolved_container_spec.args = expand_argument_list(container_spec.args)
container_spec.args)
return resolved_container_spec return resolved_container_spec

View File

@ -375,7 +375,8 @@ class ComponentSpec(BaseModel):
raise ValueError( raise ValueError(
f'Argument "{arg}" references non-existing input.') f'Argument "{arg}" references non-existing input.')
for placeholder in itertools.chain(arg.if_structure.then or [], for placeholder in itertools.chain(arg.if_structure.then or [],
arg.if_structure.otherwise or []): arg.if_structure.otherwise or
[]):
cls._check_valid_placeholder_reference(valid_inputs, cls._check_valid_placeholder_reference(valid_inputs,
valid_outputs, valid_outputs,
placeholder) placeholder)
@ -413,15 +414,20 @@ class ComponentSpec(BaseModel):
if isinstance(arg, str): if isinstance(arg, str):
return arg return arg
if 'inputValue' in arg: if 'inputValue' in arg:
return InputValuePlaceholder(input_name=utils.sanitize_input_name(arg['inputValue'])) return InputValuePlaceholder(
input_name=utils.sanitize_input_name(arg['inputValue']))
if 'inputPath' in arg: if 'inputPath' in arg:
return InputPathPlaceholder(input_name=utils.sanitize_input_name(arg['inputPath'])) return InputPathPlaceholder(
input_name=utils.sanitize_input_name(arg['inputPath']))
if 'inputUri' in arg: if 'inputUri' in arg:
return InputUriPlaceholder(input_name=utils.sanitize_input_name(arg['inputUri'])) return InputUriPlaceholder(
input_name=utils.sanitize_input_name(arg['inputUri']))
if 'outputPath' in arg: if 'outputPath' in arg:
return OutputPathPlaceholder(output_name=utils.sanitize_input_name(arg['outputPath'])) return OutputPathPlaceholder(
output_name=utils.sanitize_input_name(arg['outputPath']))
if 'outputUri' in arg: if 'outputUri' in arg:
return OutputUriPlaceholder(output_name=utils.sanitize_input_name(arg['outputUri'])) return OutputUriPlaceholder(
output_name=utils.sanitize_input_name(arg['outputUri']))
if 'if' in arg: if 'if' in arg:
if_placeholder_values = arg['if'] if_placeholder_values = arg['if']
if_placeholder_values_then = list(if_placeholder_values['then']) if_placeholder_values_then = list(if_placeholder_values['then'])
@ -434,7 +440,8 @@ class ComponentSpec(BaseModel):
IfPresentPlaceholderStructure.update_forward_refs() IfPresentPlaceholderStructure.update_forward_refs()
return IfPresentPlaceholder( return IfPresentPlaceholder(
if_structure=IfPresentPlaceholderStructure( if_structure=IfPresentPlaceholderStructure(
input_name=utils.sanitize_input_name(if_placeholder_values['cond']['isPresent']), input_name=utils.sanitize_input_name(
if_placeholder_values['cond']['isPresent']),
then=list( then=list(
_transform_arg(val) _transform_arg(val)
for val in if_placeholder_values_then), for val in if_placeholder_values_then),
@ -497,7 +504,8 @@ class ComponentSpec(BaseModel):
for spec in component_dict.get('inputs', []) for spec in component_dict.get('inputs', [])
}, },
outputs={ outputs={
utils.sanitize_input_name(spec['name']): OutputSpec(type=spec.get('type', 'Artifact')) utils.sanitize_input_name(spec['name']):
OutputSpec(type=spec.get('type', 'Artifact'))
for spec in component_dict.get('outputs', []) for spec in component_dict.get('outputs', [])
}) })
@ -554,8 +562,8 @@ class ComponentSpec(BaseModel):
for cmd in self.implementation.container.command or [] for cmd in self.implementation.container.command or []
], ],
args=[ args=[
_transform_arg(arg) for arg in _transform_arg(arg)
self.implementation.container.args or [] for arg in self.implementation.container.args or []
], ],
env={ env={
name: _transform_arg(value) for name, value in name: _transform_arg(value) for name, value in

View File

@ -20,9 +20,8 @@ from absl.testing import parameterized
from kfp.components.types import type_annotations from kfp.components.types import type_annotations
from kfp.components.types.artifact_types import Model from kfp.components.types.artifact_types import Model
from kfp.components.types.type_annotations import (Input, InputAnnotation, from kfp.components.types.type_annotations import (Input, InputAnnotation,
InputPath, Output, InputPath, Output,
OutputAnnotation, OutputAnnotation, OutputPath)
OutputPath)
class AnnotationsTest(parameterized.TestCase): class AnnotationsTest(parameterized.TestCase):

View File

@ -346,13 +346,12 @@ class LocalClient:
return cmd return cmd
def _generate_cmd_for_docker_execution( def _generate_cmd_for_docker_execution(
self, self,
run_name: str, run_name: str,
pipeline: dsl.Pipeline, pipeline: dsl.Pipeline,
op: dsl.ContainerOp, op: dsl.ContainerOp,
stack: Dict[str, Any], stack: Dict[str, Any],
docker_options: List[str] = [] docker_options: List[str] = []) -> List[str]:
) -> List[str]:
"""Generate the command to run the op in docker locally.""" """Generate the command to run the op in docker locally."""
cmd = self._generate_cmd_for_subprocess_execution( cmd = self._generate_cmd_for_subprocess_execution(
run_name, pipeline, op, stack) run_name, pipeline, op, stack)
@ -394,8 +393,8 @@ class LocalClient:
for node in group_dag.topological_sort(): for node in group_dag.topological_sort():
subgroup = _get_subgroup(current_group.groups, node) subgroup = _get_subgroup(current_group.groups, node)
if subgroup is not None: # Node of DAG is subgroup if subgroup is not None: # Node of DAG is subgroup
success = self._run_group(run_name, pipeline, pipeline_dag, subgroup, success = self._run_group(run_name, pipeline, pipeline_dag,
stack, execution_mode) subgroup, stack, execution_mode)
if not success: if not success:
return False return False
else: # Node of DAG is op else: # Node of DAG is op
@ -416,7 +415,8 @@ class LocalClient:
run_name, pipeline, op, stack) run_name, pipeline, op, stack)
else: else:
cmd = self._generate_cmd_for_docker_execution( cmd = self._generate_cmd_for_docker_execution(
run_name, pipeline, op, stack, execution_mode.docker_options) run_name, pipeline, op, stack,
execution_mode.docker_options)
process = subprocess.Popen( process = subprocess.Popen(
cmd, cmd,
shell=False, shell=False,
@ -492,8 +492,8 @@ class LocalClient:
else: else:
raise Exception("Not implemented") raise Exception("Not implemented")
else: else:
return self._run_group_dag(run_name, pipeline, pipeline_dag, current_group, return self._run_group_dag(run_name, pipeline, pipeline_dag,
stack, execution_mode) current_group, stack, execution_mode)
def create_run_from_pipeline_func( def create_run_from_pipeline_func(
self, self,
@ -540,7 +540,7 @@ class LocalClient:
run_name = pipeline.name.replace(" ", "_").lower() + "_" + run_version run_name = pipeline.name.replace(" ", "_").lower() + "_" + run_version
pipeline_dag = self._create_op_dag(pipeline) pipeline_dag = self._create_op_dag(pipeline)
success = self._run_group(run_name, pipeline, pipeline_dag, pipeline.groups[0], success = self._run_group(run_name, pipeline, pipeline_dag,
{}, execution_mode) pipeline.groups[0], {}, execution_mode)
return RunPipelineResult(self, pipeline, run_name, success=success) return RunPipelineResult(self, pipeline, run_name, success=success)

View File

@ -122,11 +122,13 @@ def _display_experiment(exp: kfp_server_api.ApiExperiment,
@click.option( @click.option(
"--experiment-id", "--experiment-id",
default=None, default=None,
help="The ID of the experiment to archive, can only supply either an experiment ID or name.") help="The ID of the experiment to archive, can only supply either an experiment ID or name."
)
@click.option( @click.option(
"--experiment-name", "--experiment-name",
default=None, default=None,
help="The name of the experiment to archive, can only supply either an experiment ID or name.") help="The name of the experiment to archive, can only supply either an experiment ID or name."
)
@click.pass_context @click.pass_context
def archive(ctx: click.Context, experiment_id: str, experiment_name: str): def archive(ctx: click.Context, experiment_id: str, experiment_name: str):
"""Archive an experiment""" """Archive an experiment"""

View File

@ -48,10 +48,12 @@ def recurring_run():
help='The RFC3339 time string of the time when to end the job.') help='The RFC3339 time string of the time when to end the job.')
@click.option( @click.option(
'--experiment-id', '--experiment-id',
help='The ID of the experiment to create the recurring run under, can only supply either an experiment ID or name.') help='The ID of the experiment to create the recurring run under, can only supply either an experiment ID or name.'
)
@click.option( @click.option(
'--experiment-name', '--experiment-name',
help='The name of the experiment to create the recurring run under, can only supply either an experiment ID or name.') help='The name of the experiment to create the recurring run under, can only supply either an experiment ID or name.'
)
@click.option('--job-name', help='The name of the recurring run.') @click.option('--job-name', help='The name of the recurring run.')
@click.option( @click.option(
'--interval-second', '--interval-second',

View File

@ -129,11 +129,15 @@ def rewrite_data_passing_to_use_volumes(
'value': output_subpath, # Requires Argo 2.3.0+ 'value': output_subpath, # Requires Argo 2.3.0+
}) })
whitelist = ['mlpipeline-ui-metadata', 'mlpipeline-metrics'] whitelist = ['mlpipeline-ui-metadata', 'mlpipeline-metrics']
output_artifacts = [artifact for artifact in output_artifacts if artifact['name'] in whitelist] output_artifacts = [
artifact for artifact in output_artifacts
if artifact['name'] in whitelist
]
if not output_artifacts: if not output_artifacts:
template.get('outputs', {}).pop('artifacts', None) template.get('outputs', {}).pop('artifacts', None)
else: else:
template.get('outputs', {}).update({'artifacts': output_artifacts}) template.get('outputs',
{}).update({'artifacts': output_artifacts})
# Rewrite DAG templates # Rewrite DAG templates
for template in templates: for template in templates:

View File

@ -319,9 +319,9 @@ def _op_to_template(op: BaseOp):
and re.match('^{{inputs.parameters.*}}$', str(param)): and re.match('^{{inputs.parameters.*}}$', str(param)):
if not 'containers' in podSpecPatch: if not 'containers' in podSpecPatch:
podSpecPatch['containers'] = [{ podSpecPatch['containers'] = [{
'name': 'main', 'name': 'main',
'resources': {} 'resources': {}
}] }]
if setting not in podSpecPatch['containers'][0][ if setting not in podSpecPatch['containers'][0][
'resources']: 'resources']:
podSpecPatch['containers'][0]['resources'][setting] = { podSpecPatch['containers'][0]['resources'][setting] = {

View File

@ -831,7 +831,10 @@ class Compiler(object):
# set ttl after workflow finishes # set ttl after workflow finishes
if pipeline_conf.ttl_seconds_after_finished >= 0: if pipeline_conf.ttl_seconds_after_finished >= 0:
workflow['spec']['ttlStrategy'] = {'secondsAfterCompletion': pipeline_conf.ttl_seconds_after_finished} workflow['spec']['ttlStrategy'] = {
'secondsAfterCompletion':
pipeline_conf.ttl_seconds_after_finished
}
if pipeline_conf._pod_disruption_budget_min_available: if pipeline_conf._pod_disruption_budget_min_available:
pod_disruption_budget = { pod_disruption_budget = {

View File

@ -82,8 +82,9 @@ def _compile_pipeline_function(
else: else:
pipeline_func = pipeline_funcs[0] pipeline_func = pipeline_funcs[0]
kfp.deprecated.compiler.Compiler(mode=mode).compile(pipeline_func, output_path, kfp.deprecated.compiler.Compiler(mode=mode).compile(pipeline_func,
type_check, pipeline_conf) output_path, type_check,
pipeline_conf)
class PipelineCollectorContext(): class PipelineCollectorContext():

View File

@ -136,7 +136,7 @@ def load_component_from_spec(component_spec):
if component_spec is None: if component_spec is None:
raise TypeError raise TypeError
return _create_task_factory_from_component_spec( return _create_task_factory_from_component_spec(
component_spec=component_spec) component_spec=component_spec)
def _fix_component_uri(uri: str) -> str: def _fix_component_uri(uri: str) -> str:

View File

@ -21,8 +21,9 @@ from pathlib import Path
from typing import Callable, NamedTuple, Sequence from typing import Callable, NamedTuple, Sequence
from kfp.deprecated import components as comp from kfp.deprecated import components as comp
from kfp.deprecated.components import (InputBinaryFile, InputPath, InputTextFile, from kfp.deprecated.components import (InputBinaryFile, InputPath,
OutputBinaryFile, OutputPath, OutputTextFile) InputTextFile, OutputBinaryFile,
OutputPath, OutputTextFile)
from kfp.deprecated.components._components import _resolve_command_line_and_paths from kfp.deprecated.components._components import _resolve_command_line_and_paths
from kfp.deprecated.components.structures import InputSpec, OutputSpec from kfp.deprecated.components.structures import InputSpec, OutputSpec

View File

@ -18,7 +18,6 @@ from .types import check_types, InconsistentTypeException
from ._ops_group import Graph from ._ops_group import Graph
import kfp.deprecated as kfp import kfp.deprecated as kfp
# @deprecated( # @deprecated(
# version='0.2.6', # version='0.2.6',
# reason='This decorator does not seem to be used, so we deprecate it. ' # reason='This decorator does not seem to be used, so we deprecate it. '

View File

@ -631,7 +631,7 @@ def _attach_v2_specs(
argument_is_parameter_type = type_utils.is_parameter_type(argument_type) argument_is_parameter_type = type_utils.is_parameter_type(argument_type)
input_is_parameter_type = type_utils.is_parameter_type(input_type) input_is_parameter_type = type_utils.is_parameter_type(input_type)
if COMPILING_FOR_V2 and (argument_is_parameter_type != if COMPILING_FOR_V2 and (argument_is_parameter_type !=
input_is_parameter_type): input_is_parameter_type):
if isinstance(argument_value, dsl.PipelineParam): if isinstance(argument_value, dsl.PipelineParam):
param_or_value_msg = 'PipelineParam "{}"'.format( param_or_value_msg = 'PipelineParam "{}"'.format(
argument_value.full_name) argument_value.full_name)

View File

@ -28,7 +28,6 @@ from kubernetes.client.models import (V1Container, V1ContainerPort,
V1SecurityContext, V1Volume, V1SecurityContext, V1Volume,
V1VolumeDevice, V1VolumeMount) V1VolumeDevice, V1VolumeMount)
# generics # generics
T = TypeVar('T') T = TypeVar('T')
# type alias: either a string or a list of string # type alias: either a string or a list of string
@ -1446,7 +1445,8 @@ class ContainerOp(BaseOp):
is_legacy_name, normalized_name = _is_legacy_output_name( is_legacy_name, normalized_name = _is_legacy_output_name(
output.name) output.name)
if is_legacy_name and normalized_name in self.output_artifact_paths: if is_legacy_name and normalized_name in self.output_artifact_paths:
output_filename = self.output_artifact_paths[normalized_name] output_filename = self.output_artifact_paths[
normalized_name]
else: else:
output_filename = _components._generate_output_file_name( output_filename = _components._generate_output_file_name(
output.name) output.name)

View File

@ -105,7 +105,8 @@ class VolumeOp(ResourceOp):
if not match_serialized_pipelineparam(str(resource_name)): if not match_serialized_pipelineparam(str(resource_name)):
resource_name = sanitize_k8s_name(resource_name) resource_name = sanitize_k8s_name(resource_name)
pvc_metadata = V1ObjectMeta( pvc_metadata = V1ObjectMeta(
name="{{workflow.name}}-%s" % resource_name if generate_unique_name else resource_name, name="{{workflow.name}}-%s" %
resource_name if generate_unique_name else resource_name,
annotations=annotations) annotations=annotations)
requested_resources = V1ResourceRequirements(requests={"storage": size}) requested_resources = V1ResourceRequirements(requests={"storage": size})
pvc_spec = V1PersistentVolumeClaimSpec( pvc_spec = V1PersistentVolumeClaimSpec(

View File

@ -12,14 +12,18 @@ processor_op = load_component_from_file(
consumer_op = load_component_from_file( consumer_op = load_component_from_file(
str(test_data_dir / 'consume_2.component.yaml')) str(test_data_dir / 'consume_2.component.yaml'))
def metadata_and_metrics() -> NamedTuple( def metadata_and_metrics() -> NamedTuple(
"Outputs", "Outputs",
[("mlpipeline_ui_metadata", "UI_metadata"), ("mlpipeline_metrics", "Metrics")], [("mlpipeline_ui_metadata", "UI_metadata"), ("mlpipeline_metrics", "Metrics"
)],
): ):
metadata = { metadata = {
"outputs": [ "outputs": [{
{"storage": "inline", "source": "*this should be bold*", "type": "markdown"} "storage": "inline",
] "source": "*this should be bold*",
"type": "markdown"
}]
} }
metrics = { metrics = {
"metrics": [ "metrics": [
@ -36,9 +40,10 @@ def metadata_and_metrics() -> NamedTuple(
from collections import namedtuple from collections import namedtuple
import json import json
return namedtuple("output", ["mlpipeline_ui_metadata", "mlpipeline_metrics"])( return namedtuple("output",
json.dumps(metadata), json.dumps(metrics) ["mlpipeline_ui_metadata", "mlpipeline_metrics"])(
) json.dumps(metadata), json.dumps(metrics))
@kfp.dsl.pipeline() @kfp.dsl.pipeline()
def artifact_passing_pipeline(): def artifact_passing_pipeline():

View File

@ -23,6 +23,7 @@ from kfp.deprecated.dsl.types import Integer, GCSPath, InconsistentTypeException
from kfp.deprecated.dsl import ContainerOp, Pipeline, PipelineParam from kfp.deprecated.dsl import ContainerOp, Pipeline, PipelineParam
from kfp.deprecated.components.structures import ComponentSpec, InputSpec, OutputSpec from kfp.deprecated.components.structures import ComponentSpec, InputSpec, OutputSpec
@unittest.skip("deprecated") @unittest.skip("deprecated")
class TestGraphComponent(unittest.TestCase): class TestGraphComponent(unittest.TestCase):

View File

@ -228,14 +228,12 @@ class LocalRunnerTest(unittest.TestCase):
check_option() check_option()
run_result = run_pipeline_func_locally( run_result = run_pipeline_func_locally(
_pipeline, _pipeline, {},
{}, execution_mode=LocalClient.ExecutionMode(
execution_mode=LocalClient.ExecutionMode(mode="docker", mode="docker", docker_options=["-e", "foo=bar"]))
docker_options=["-e", "foo=bar"])
)
assert run_result.success assert run_result.success
output_file_path = run_result.get_output_file("check-option") output_file_path = run_result.get_output_file("check-option")
with open(output_file_path, "r") as f: with open(output_file_path, "r") as f:
line = f.readline() line = f.readline()
assert "bar" in line assert "bar" in line