chore(sdk): clean up kfp code style (#8027)

* remove unused imports

* use google as isort profile

* sort imports

* format with yapf

* clean end of file new line, trailing whitespace, double quoted strings
This commit is contained in:
Connor McCarthy 2022-07-14 18:06:34 -06:00 committed by GitHub
parent 4be213a773
commit 13736b85b6
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
126 changed files with 311 additions and 387 deletions

View File

@ -20,6 +20,7 @@ repos:
hooks:
- id: isort
name: isort
entry: isort --profile google
- repo: https://github.com/pre-commit/mirrors-yapf
rev: "v0.32.0"
hooks:

View File

@ -14,7 +14,7 @@
# `kfp` is a namespace package.
# https://packaging.python.org/guides/packaging-namespace-packages/#pkgutil-style-namespace-packages
__path__ = __import__("pkgutil").extend_path(__path__, __name__)
__path__ = __import__('pkgutil').extend_path(__path__, __name__)
__version__ = '2.0.0-beta.1'

View File

@ -10,4 +10,4 @@
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# limitations under the License.

View File

@ -12,8 +12,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from itertools import chain
import os
import click
import kfp

View File

@ -18,12 +18,12 @@ import os
import re
import subprocess
import tempfile
import unittest
from typing import List
import unittest
from unittest import mock
import click
from absl.testing import parameterized
import click
from click import testing
from kfp.cli import cli
from kfp.cli import dsl_compile
@ -113,8 +113,8 @@ class TestCliAutocomplete(parameterized.TestCase):
os.makedirs(os.path.dirname(temp_path), exist_ok=True)
existing_file_contents = [
"something\n",
"something else" + ('\n' if has_trailing_newline else ''),
'something\n',
'something else' + ('\n' if has_trailing_newline else ''),
]
with open(temp_path, 'w') as f:
f.writelines(existing_file_contents)

View File

@ -19,8 +19,8 @@ import shutil
import subprocess
import sys
import tempfile
import warnings
from typing import List, Optional
import warnings
import click
@ -325,7 +325,6 @@ class ComponentBuilder():
@click.group()
def component():
"""Builds shareable, containerized components."""
pass
@component.command()

View File

@ -16,12 +16,12 @@ import contextlib
import os
import pathlib
import textwrap
import unittest
from typing import List, Optional, Union
import unittest
from unittest import mock
import docker
from click import testing
import docker
docker = mock.Mock()

View File

@ -13,11 +13,12 @@
# limitations under the License.
"""Integration tests for diagnose_me.dev_env."""
import unittest
from typing import Text
import unittest
from unittest import mock
from kfp.cli.diagnose_me import dev_env, utility
from kfp.cli.diagnose_me import dev_env
from kfp.cli.diagnose_me import utility
class DevEnvTest(unittest.TestCase):

View File

@ -13,11 +13,12 @@
# limitations under the License.
"""Tests for diagnose_me.gcp."""
import unittest
from typing import Text
import unittest
from unittest import mock
from kfp.cli.diagnose_me import gcp, utility
from kfp.cli.diagnose_me import gcp
from kfp.cli.diagnose_me import utility
class GoogleCloudTest(unittest.TestCase):

View File

@ -13,8 +13,8 @@
# limitations under the License.
"""Tests for diagnose_me.kubernetes_cluster."""
import unittest
from typing import Text
import unittest
from unittest import mock
from kfp.cli.diagnose_me import kubernetes_cluster as dkc

View File

@ -19,4 +19,3 @@ from kfp.cli import dsl_compile
@click.group(commands={'compile': dsl_compile.dsl_compile})
def dsl():
"""Command group for compiling DSL to IR."""
pass

View File

@ -18,7 +18,7 @@ import logging
import os
import sys
import types
from typing import Any, Callable, Dict, List, Optional, Union
from typing import Any, Callable, Dict, Optional, Union
import click
from kfp import compiler

View File

@ -2,13 +2,11 @@ import click
from kfp import client
from kfp.cli import output
from kfp.cli.utils import parsing
from kfp_server_api.models.api_experiment import ApiExperiment
@click.group()
def experiment():
"""Manage experiment resources."""
pass
@experiment.command()

View File

@ -28,7 +28,6 @@ from kfp.cli.utils import parsing
}))
def pipeline():
"""Manage pipeline resources."""
pass
@pipeline.command()

View File

@ -23,7 +23,6 @@ from kfp.cli.utils import parsing
@click.group()
def recurring_run():
"""Manage recurring run resources."""
pass
either_option_required = 'Either --experiment-id or --experiment-name is required.'

View File

@ -30,7 +30,6 @@ from kfp.cli.utils import parsing
{'submit': 'create'}))
def run():
"""Manage run resources."""
pass
@run.command()

View File

@ -21,11 +21,11 @@ import google.auth
import google.auth.app_engine
import google.auth.compute_engine.credentials
import google.auth.iam
from google.auth.transport.requests import Request
import google.oauth2.credentials
import google.oauth2.service_account
import requests
import requests_toolbelt.adapters.appengine
from google.auth.transport.requests import Request
IAM_SCOPE = 'https://www.googleapis.com/auth/iam'
OAUTH_TOKEN_URI = 'https://www.googleapis.com/oauth2/v4/token'
@ -41,7 +41,7 @@ def get_gcp_access_token():
token = None
try:
creds, project = google.auth.default(
scopes=["https://www.googleapis.com/auth/cloud-platform"])
scopes=['https://www.googleapis.com/auth/cloud-platform'])
if not creds.valid:
auth_req = Request()
creds.refresh(auth_req)
@ -190,36 +190,36 @@ def get_refresh_token_from_client_id(client_id, client_secret):
def get_auth_code(client_id):
auth_url = "https://accounts.google.com/o/oauth2/v2/auth?client_id=%s&response_type=code&scope=openid%%20email&access_type=offline&redirect_uri=urn:ietf:wg:oauth:2.0:oob" % client_id
auth_url = 'https://accounts.google.com/o/oauth2/v2/auth?client_id=%s&response_type=code&scope=openid%%20email&access_type=offline&redirect_uri=urn:ietf:wg:oauth:2.0:oob' % client_id
print(auth_url)
open_new_tab(auth_url)
return input(
"If there's no browser window prompt, please direct to the URL above, "
"then copy and paste the authorization code here: ")
'then copy and paste the authorization code here: ')
def get_refresh_token_from_code(auth_code, client_id, client_secret):
payload = {
"code": auth_code,
"client_id": client_id,
"client_secret": client_secret,
"redirect_uri": "urn:ietf:wg:oauth:2.0:oob",
"grant_type": "authorization_code"
'code': auth_code,
'client_id': client_id,
'client_secret': client_secret,
'redirect_uri': 'urn:ietf:wg:oauth:2.0:oob',
'grant_type': 'authorization_code'
}
res = requests.post(OAUTH_TOKEN_URI, data=payload)
res.raise_for_status()
return str(json.loads(res.text)[u"refresh_token"])
return str(json.loads(res.text)[u'refresh_token'])
def id_token_from_refresh_token(client_id, client_secret, refresh_token,
audience):
payload = {
"client_id": client_id,
"client_secret": client_secret,
"refresh_token": refresh_token,
"grant_type": "refresh_token",
"audience": audience
'client_id': client_id,
'client_secret': client_secret,
'refresh_token': refresh_token,
'grant_type': 'refresh_token',
'audience': audience
}
res = requests.post(OAUTH_TOKEN_URI, data=payload)
res.raise_for_status()
return str(json.loads(res.text)[u"id_token"])
return str(json.loads(res.text)[u'id_token'])

View File

@ -22,15 +22,15 @@ import re
import tarfile
import tempfile
import time
import warnings
import zipfile
from types import ModuleType
from typing import Any, Callable, List, Mapping, Optional
import warnings
import zipfile
import kfp_server_api
import yaml
from kfp import compiler
from kfp.client import auth
import kfp_server_api
import yaml
# Operators on scalar values. Only applies to one of |int_value|,
# |long_value|, |string_value| or |timestamp_value|.

View File

@ -15,13 +15,13 @@
import os
import tempfile
import unittest
import yaml
from absl.testing import parameterized
from kfp.client import client
from kfp.compiler import Compiler
from kfp.dsl import component
from kfp.dsl import pipeline
import yaml
class TestValidatePipelineName(parameterized.TestCase):

View File

@ -700,7 +700,6 @@ class TestWriteToFileTypes(parameterized.TestCase):
self.assertTrue(os.path.exists(target_json_file))
with open(target_json_file, 'r') as f:
f.read()
pass
def test_compile_fails_with_bad_pipeline_func(self):
with self.assertRaisesRegex(ValueError,

View File

@ -17,7 +17,6 @@ import collections
import json
import re
from typing import Any, Dict, List, Mapping, Optional, Tuple, Union
import warnings
from google.protobuf import json_format
from google.protobuf import struct_pb2
@ -35,7 +34,6 @@ from kfp.components import utils as component_utils
from kfp.components.types import artifact_types
from kfp.components.types import type_utils
from kfp.pipeline_spec import pipeline_spec_pb2
import yaml
GroupOrTaskType = Union[tasks_group.TasksGroup, pipeline_task.PipelineTask]

View File

@ -13,8 +13,6 @@
# limitations under the License.
"""Tests for kfp.compiler.pipeline_spec_builder."""
import os
import tempfile
import unittest
from absl.testing import parameterized

View File

@ -11,10 +11,14 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from kfp.dsl import component
from kfp.dsl import Input, Dataset, Metrics, Model
from typing import NamedTuple
from kfp.dsl import component
from kfp.dsl import Dataset
from kfp.dsl import Input
from kfp.dsl import Metrics
from kfp.dsl import Model
@component
def output_named_tuple(
@ -41,4 +45,4 @@ def output_named_tuple(
from collections import namedtuple
output = namedtuple('Outputs', ['scalar', 'metrics', 'model'])
return output(scalar, metrics, model)
return output(scalar, metrics, model)

View File

@ -15,7 +15,6 @@ from typing import NamedTuple
from kfp.dsl import component
from kfp.dsl import Dataset
from kfp.dsl import importer
from kfp.dsl import Input
from kfp.dsl import Model

View File

@ -13,7 +13,6 @@
# limitations under the License.
from kfp import compiler
from kfp import components
from kfp import dsl
from kfp.dsl import component

View File

@ -17,7 +17,6 @@ from kfp import components
from kfp import dsl
from kfp.dsl import component
from kfp.dsl import Input
from kfp.dsl import Output
class VertexModel(dsl.Artifact):

View File

@ -13,14 +13,9 @@
# limitations under the License.
"""Pipeline with Metrics outputs."""
from typing import NamedTuple
from kfp import compiler
from kfp import components
from kfp import dsl
from kfp.dsl import component
from kfp.dsl import Dataset
from kfp.dsl import Input
from kfp.dsl import Metrics
from kfp.dsl import Output

View File

@ -13,7 +13,6 @@
# limitations under the License.
from kfp import compiler
from kfp import components
from kfp import dsl
from kfp.dsl import component

View File

@ -13,7 +13,6 @@
# limitations under the License.
from kfp import compiler
from kfp import components
from kfp import dsl
from kfp.dsl import component

View File

@ -16,7 +16,6 @@
from kfp import compiler
from kfp import components
from kfp import dsl
from kfp.dsl import component
exit_op = components.load_component_from_text("""
name: Exit Op

View File

@ -12,8 +12,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import pathlib
from kfp import compiler
from kfp import components
from kfp import dsl

View File

@ -12,8 +12,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import pathlib
from kfp import compiler
from kfp import components
from kfp import dsl

View File

@ -12,6 +12,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from kfp.components.yaml_component import load_component_from_text
from kfp.components.yaml_component import load_component_from_file
from kfp.components.yaml_component import load_component_from_text
from kfp.components.yaml_component import load_component_from_url

View File

@ -101,4 +101,3 @@ class BaseComponent(metaclass=abc.ABCMeta):
the function. For "Bring your own container" component, the
implementation of this method could be `docker run`.
"""
pass

View File

@ -13,11 +13,11 @@
# limitations under the License.
import collections
from collections import abc
import dataclasses
import inspect
import json
import pprint
from collections import abc
from typing import (Any, Dict, ForwardRef, Iterable, Iterator, Mapping,
MutableMapping, MutableSequence, Optional, OrderedDict,
Sequence, Tuple, Type, TypeVar, Union)

View File

@ -14,10 +14,9 @@
import dataclasses
import functools
import unittest
from collections import abc
from typing import (Any, Dict, List, Mapping, MutableMapping, MutableSequence,
Optional, OrderedDict, Sequence, Set, Tuple, Union)
import unittest
from absl.testing import parameterized
from kfp.components import base_model

View File

@ -13,8 +13,8 @@
# limitations under the License.
import functools
import warnings
from typing import Callable, List, Optional
import warnings
from kfp.components import component_factory

View File

@ -17,8 +17,8 @@ import itertools
import pathlib
import re
import textwrap
import warnings
from typing import Callable, List, Optional, Tuple
import warnings
import docstring_parser
from kfp.components import placeholders

View File

@ -15,8 +15,9 @@ import inspect
import json
from typing import Any, Callable, Dict, List, Optional, Union
from kfp.components.types import artifact_types, type_annotations
from kfp.components import task_final_status
from kfp.components.types import artifact_types
from kfp.components.types import type_annotations
class Executor():

View File

@ -16,8 +16,8 @@
import json
import os
import tempfile
import unittest
from typing import Callable, Dict, List, NamedTuple, Optional
import unittest
from kfp.components import executor
from kfp.components.task_final_status import PipelineTaskFinalStatus
@ -107,7 +107,7 @@ class ExecutorTest(unittest.TestCase):
if executor_input is None:
executor_input = _EXECUTOR_INPUT
executor_input_dict = json.loads(executor_input %
{"test_dir": self._test_dir})
{'test_dir': self._test_dir})
return executor.Executor(
executor_input=executor_input_dict, function_to_execute=func)
@ -115,7 +115,7 @@ class ExecutorTest(unittest.TestCase):
def test_input_parameter(self):
def test_func(input_parameter: str):
self.assertEqual(input_parameter, "Hello, KFP")
self.assertEqual(input_parameter, 'Hello, KFP')
self._get_executor(test_func).execute()
@ -239,15 +239,15 @@ class ExecutorTest(unittest.TestCase):
second_message: str,
third_message: str,
) -> str:
return first_message + ", " + second_message + ", " + third_message
return first_message + ', ' + second_message + ', ' + third_message
self._get_executor(test_func, executor_input).execute()
with open(os.path.join(self._test_dir, 'output_metadata.json'),
'r') as f:
output_metadata = json.loads(f.read())
self.assertDictEqual(output_metadata, {
"parameterValues": {
"Output": "Hello, , World"
'parameterValues': {
'Output': 'Hello, , World'
},
})
@ -279,8 +279,8 @@ class ExecutorTest(unittest.TestCase):
'r') as f:
output_metadata = json.loads(f.read())
self.assertDictEqual(output_metadata, {
"parameterValues": {
"Output": 42
'parameterValues': {
'Output': 42
},
})
@ -312,8 +312,8 @@ class ExecutorTest(unittest.TestCase):
'r') as f:
output_metadata = json.loads(f.read())
self.assertDictEqual(output_metadata, {
"parameterValues": {
"Output": 1.2
'parameterValues': {
'Output': 1.2
},
})
@ -345,8 +345,8 @@ class ExecutorTest(unittest.TestCase):
'r') as f:
output_metadata = json.loads(f.read())
self.assertDictEqual(output_metadata, {
"parameterValues": {
"Output": [40, 2]
'parameterValues': {
'Output': [40, 2]
},
})
@ -371,7 +371,7 @@ class ExecutorTest(unittest.TestCase):
"""
def test_func(first: int, second: int) -> Dict:
return {"first": first, "second": second}
return {'first': first, 'second': second}
self._get_executor(test_func, executor_input).execute()
with open(os.path.join(self._test_dir, 'output_metadata.json'),
@ -414,8 +414,8 @@ class ExecutorTest(unittest.TestCase):
'r') as f:
output_metadata = json.loads(f.read())
self.assertDictEqual(output_metadata, {
"parameterValues": {
"Output": [40, 2]
'parameterValues': {
'Output': [40, 2]
},
})
@ -440,17 +440,17 @@ class ExecutorTest(unittest.TestCase):
"""
def test_func(first: int, second: int) -> Dict[str, int]:
return {"first": first, "second": second}
return {'first': first, 'second': second}
self._get_executor(test_func, executor_input).execute()
with open(os.path.join(self._test_dir, 'output_metadata.json'),
'r') as f:
output_metadata = json.loads(f.read())
self.assertDictEqual(output_metadata, {
"parameterValues": {
"Output": {
"first": 40,
"second": 2
'parameterValues': {
'Output': {
'first': 40,
'second': 2
}
},
})
@ -484,7 +484,7 @@ class ExecutorTest(unittest.TestCase):
"""
def test_func(first: str, second: str) -> Artifact:
return first + ", " + second
return first + ', ' + second
self._get_executor(test_func, executor_input).execute()
with open(os.path.join(self._test_dir, 'output_metadata.json'),
@ -505,7 +505,7 @@ class ExecutorTest(unittest.TestCase):
with open(os.path.join(self._test_dir, 'some-bucket/output'), 'r') as f:
artifact_payload = f.read()
self.assertEqual(artifact_payload, "Hello, World")
self.assertEqual(artifact_payload, 'Hello, World')
def test_named_tuple_output(self):
executor_input = """\
@ -539,22 +539,22 @@ class ExecutorTest(unittest.TestCase):
# Functions returning named tuples should work.
def func_returning_named_tuple() -> NamedTuple('Outputs', [
("output_dataset", Dataset),
("output_int", int),
("output_string", str),
('output_dataset', Dataset),
('output_int', int),
('output_string', str),
]):
from collections import namedtuple
output = namedtuple(
'Outputs', ['output_dataset', 'output_int', 'output_string'])
return output("Dataset contents", 101, "Some output string")
return output('Dataset contents', 101, 'Some output string')
# Functions returning plain tuples should work too.
def func_returning_plain_tuple() -> NamedTuple('Outputs', [
("output_dataset", Dataset),
("output_int", int),
("output_string", str),
('output_dataset', Dataset),
('output_int', int),
('output_string', str),
]):
return ("Dataset contents", 101, "Some output string")
return ('Dataset contents', 101, 'Some output string')
for test_func in [
func_returning_named_tuple, func_returning_plain_tuple
@ -575,9 +575,9 @@ class ExecutorTest(unittest.TestCase):
}]
}
},
"parameterValues": {
"output_int": 101,
"output_string": "Some output string"
'parameterValues': {
'output_int': 101,
'output_string': 'Some output string'
},
})
@ -585,7 +585,7 @@ class ExecutorTest(unittest.TestCase):
os.path.join(self._test_dir, 'some-bucket/output_dataset'),
'r') as f:
artifact_payload = f.read()
self.assertEqual(artifact_payload, "Dataset contents")
self.assertEqual(artifact_payload, 'Dataset contents')
def test_function_with_optional_inputs(self):
executor_input = """\
@ -634,8 +634,8 @@ class ExecutorTest(unittest.TestCase):
output_metadata = json.loads(f.read())
self.assertDictEqual(
output_metadata, {
"parameterValues": {
"Output": "Hello (<class 'str'>), "
'parameterValues': {
'Output': "Hello (<class 'str'>), "
"World (<class 'str'>), "
"None (<class 'NoneType'>), "
"abc (<class 'str'>), "

View File

@ -15,8 +15,8 @@
import unittest
from absl.testing import parameterized
from kfp.components import pipeline_channel
from kfp.components import for_loop
from kfp.components import pipeline_channel
class ForLoopTest(parameterized.TestCase):

View File

@ -11,10 +11,9 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Dict, Optional
import configparser
import pathlib
from typing import Dict, Optional
import warnings
_KFP_CONFIG_FILE = 'kfp_config.ini'

View File

@ -304,7 +304,7 @@ class ConcatPlaceholder(base_model.BaseModel, Placeholder):
def to_placeholder_struct(self) -> Dict[str, Any]:
return {
"Concat": [
'Concat': [
maybe_convert_placeholder_to_placeholder_string(item)
for item in self.items
]
@ -355,7 +355,7 @@ class IfPresentPlaceholder(base_model.BaseModel, Placeholder):
@classmethod
def is_match(cls, string: str) -> bool:
try:
return "IfPresent" in json.loads(string)
return 'IfPresent' in json.loads(string)
except json.decoder.JSONDecodeError:
return False
@ -364,13 +364,13 @@ class IfPresentPlaceholder(base_model.BaseModel, Placeholder):
maybe_convert_placeholder_to_placeholder_string(item)
for item in self.then
] if isinstance(self.then, list) else self.then
struct = {"IfPresent": {"InputName": self.input_name, "Then": then}}
struct = {'IfPresent': {'InputName': self.input_name, 'Then': then}}
if self.else_:
otherwise = [
maybe_convert_placeholder_to_placeholder_string(item)
for item in self.else_
] if isinstance(self.else_, list) else self.else_
struct["IfPresent"]["Else"] = otherwise
struct['IfPresent']['Else'] = otherwise
return struct
def to_placeholder_string(self) -> str:

View File

@ -17,7 +17,6 @@ import unittest
from absl.testing import parameterized
from kfp.components import placeholders
from kfp.components import structures
class TestExecutorInputPlaceholder(parameterized.TestCase):

View File

@ -17,8 +17,8 @@ import enum
from typing import Optional, Union
from kfp.components import for_loop
from kfp.components import pipeline_context
from kfp.components import pipeline_channel
from kfp.components import pipeline_context
from kfp.components import pipeline_task

View File

@ -16,8 +16,7 @@
These are only compatible with v2 Pipelines.
"""
import os
from typing import Dict, Generic, List, Optional, Type, TypeVar, Union
from typing import Dict, List, Optional
_GCS_LOCAL_MOUNT_PREFIX = '/gcs/'
_MINIO_LOCAL_MOUNT_PREFIX = '/minio/'

View File

@ -13,9 +13,9 @@
# limitations under the License.
"""Tests for kfp.components.types.artifact_types."""
import unittest
import json
import os
import unittest
from absl.testing import parameterized
from kfp.components.types import artifact_types
@ -59,89 +59,89 @@ class ArtifactsTest(parameterized.TestCase):
@parameterized.parameters(
{
'runtime_artifact': {
"metadata": {},
"name": "input_artifact_one",
"type": {
"schemaTitle": "system.Artifact"
'metadata': {},
'name': 'input_artifact_one',
'type': {
'schemaTitle': 'system.Artifact'
},
"uri": "gs://some-bucket/input_artifact_one"
'uri': 'gs://some-bucket/input_artifact_one'
},
'expected_type': artifact_types.Artifact,
},
{
'runtime_artifact': {
"metadata": {},
"name": "input_artifact_one",
"type": {
"schemaTitle": "system.Model"
'metadata': {},
'name': 'input_artifact_one',
'type': {
'schemaTitle': 'system.Model'
},
"uri": "gs://some-bucket/input_artifact_one"
'uri': 'gs://some-bucket/input_artifact_one'
},
'expected_type': artifact_types.Model,
},
{
'runtime_artifact': {
"metadata": {},
"name": "input_artifact_one",
"type": {
"schemaTitle": "system.Dataset"
'metadata': {},
'name': 'input_artifact_one',
'type': {
'schemaTitle': 'system.Dataset'
},
"uri": "gs://some-bucket/input_artifact_one"
'uri': 'gs://some-bucket/input_artifact_one'
},
'expected_type': artifact_types.Dataset,
},
{
'runtime_artifact': {
"metadata": {},
"name": "input_artifact_one",
"type": {
"schemaTitle": "system.Metrics"
'metadata': {},
'name': 'input_artifact_one',
'type': {
'schemaTitle': 'system.Metrics'
},
"uri": "gs://some-bucket/input_artifact_one"
'uri': 'gs://some-bucket/input_artifact_one'
},
'expected_type': artifact_types.Metrics,
},
{
'runtime_artifact': {
"metadata": {},
"name": "input_artifact_one",
"type": {
"schemaTitle": "system.ClassificationMetrics"
'metadata': {},
'name': 'input_artifact_one',
'type': {
'schemaTitle': 'system.ClassificationMetrics'
},
"uri": "gs://some-bucket/input_artifact_one"
'uri': 'gs://some-bucket/input_artifact_one'
},
'expected_type': artifact_types.ClassificationMetrics,
},
{
'runtime_artifact': {
"metadata": {},
"name": "input_artifact_one",
"type": {
"schemaTitle": "system.SlicedClassificationMetrics"
'metadata': {},
'name': 'input_artifact_one',
'type': {
'schemaTitle': 'system.SlicedClassificationMetrics'
},
"uri": "gs://some-bucket/input_artifact_one"
'uri': 'gs://some-bucket/input_artifact_one'
},
'expected_type': artifact_types.SlicedClassificationMetrics,
},
{
'runtime_artifact': {
"metadata": {},
"name": "input_artifact_one",
"type": {
"schemaTitle": "system.HTML"
'metadata': {},
'name': 'input_artifact_one',
'type': {
'schemaTitle': 'system.HTML'
},
"uri": "gs://some-bucket/input_artifact_one"
'uri': 'gs://some-bucket/input_artifact_one'
},
'expected_type': artifact_types.HTML,
},
{
'runtime_artifact': {
"metadata": {},
"name": "input_artifact_one",
"type": {
"schemaTitle": "system.Markdown"
'metadata': {},
'name': 'input_artifact_one',
'type': {
'schemaTitle': 'system.Markdown'
},
"uri": "gs://some-bucket/input_artifact_one"
'uri': 'gs://some-bucket/input_artifact_one'
},
'expected_type': artifact_types.Markdown,
},

View File

@ -53,12 +53,10 @@ class InputPath:
class InputAnnotation():
"""Marker type for input artifacts."""
pass
class OutputAnnotation():
"""Marker type for output artifacts."""
pass
# Input represents an Input artifact of type T.

View File

@ -13,15 +13,18 @@
# limitations under the License.
"""Tests for kfp.components.types.type_annotations."""
import unittest
from typing import Any, Dict, List, Optional
import unittest
from absl.testing import parameterized
from kfp.components.types import type_annotations
from kfp.components.types.artifact_types import Model
from kfp.components.types.type_annotations import (Input, InputAnnotation,
InputPath, Output,
OutputAnnotation, OutputPath)
from kfp.components.types.type_annotations import Input
from kfp.components.types.type_annotations import InputAnnotation
from kfp.components.types.type_annotations import InputPath
from kfp.components.types.type_annotations import Output
from kfp.components.types.type_annotations import OutputAnnotation
from kfp.components.types.type_annotations import OutputPath
class AnnotationsTest(parameterized.TestCase):

View File

@ -14,8 +14,8 @@
"""Utilities for component I/O type mapping."""
import inspect
import re
import warnings
from typing import Any, List, Optional, Type, Union
import warnings
import kfp
from kfp.components import task_final_status
@ -204,12 +204,10 @@ def get_input_artifact_type_schema(
class InconsistentTypeException(Exception):
"""InconsistencyTypeException is raised when two types are not
consistent."""
pass
class InconsistentTypeWarning(Warning):
"""InconsistentTypeWarning is issued when two types are not consistent."""
pass
def verify_type_compatibility(

View File

@ -11,11 +11,11 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from typing import Any, Dict, List, Union
import unittest
import kfp
from absl.testing import parameterized
import kfp
from kfp.components import v1_structures
from kfp.components.types import artifact_types
from kfp.components.types import type_utils

View File

@ -15,8 +15,8 @@
import hashlib
import warnings
import yaml
from kfp.components import v1_structures
import yaml
def _load_component_spec_from_component_text(

View File

@ -12,11 +12,12 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import inspect
from collections import OrderedDict
from collections import abc
from typing import (Any, Dict, List, Mapping, MutableMapping, MutableSequence,
Sequence, Type, TypeVar, Union, cast, get_type_hints)
from collections import OrderedDict
import inspect
from typing import (Any, cast, Dict, get_type_hints, List, Mapping,
MutableMapping, MutableSequence, Sequence, Type, TypeVar,
Union)
T = TypeVar('T')
@ -54,7 +55,6 @@ def verify_object_against_type(x: Any, typ: Type[T]) -> T:
return x
except Exception as ex:
exception_map[possible_type] = ex
pass
#exception_lines = ['Exception for type {}: {}.'.format(t, e) for t, e in exception_map.items()]
exception_lines = [str(e) for t, e in exception_map.items()]
exception_lines.append(
@ -167,7 +167,6 @@ def parse_object_from_struct_based_on_type(struct: Any, typ: Type[T]) -> T:
exception_map[
possible_type] = 'Unexpected exception when trying to convert structure "{}" to type "{}": {}: {}'.format(
struct, typ, type(ex), ex)
pass
#Single successful parsing.
if len(results) == 1:
@ -252,7 +251,7 @@ def convert_object_to_struct(obj, serialized_names: Mapping[str, str] = {}):
if python_name.startswith('_'):
continue
attr_name = serialized_names.get(python_name, python_name)
if hasattr(value, "to_dict"):
if hasattr(value, 'to_dict'):
result[attr_name] = value.to_dict()
elif isinstance(value, list):
result[attr_name] = [

View File

@ -284,7 +284,6 @@ class Client(object):
k8s.config.load_incluster_config()
except:
in_cluster = False
pass
if in_cluster:
config.host = Client.IN_CLUSTER_DNS_NAME.format(namespace)

View File

@ -17,7 +17,7 @@ __all__ = [
"run_pipeline_func_locally",
]
from typing import Callable, List, Mapping, Optional
from typing import Callable, Mapping, Optional
from . import Client, LocalClient, dsl

View File

@ -27,10 +27,8 @@ from kfp.deprecated.cli.experiment import experiment
from kfp.deprecated.cli.output import OutputFormat
from kfp.deprecated.cli import components
_NO_CLIENT_COMMANDS = [
'diagnose_me',
'components'
]
_NO_CLIENT_COMMANDS = ['diagnose_me', 'components']
@click.group()
@click.option('--endpoint', help='Endpoint of the KFP API service to connect.')

View File

@ -11,7 +11,6 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import configparser
import contextlib
import enum
import pathlib
@ -348,7 +347,6 @@ class _ComponentBuilder():
@app.callback()
def components():
"""Builds shareable, containerized components."""
pass
@app.command()

View File

@ -13,7 +13,6 @@
# limitations under the License.
"""Tests for `components` command group in KFP CLI."""
import contextlib
import importlib
import pathlib
import sys
import textwrap

View File

@ -14,7 +14,6 @@ from kfp.deprecated.cli.diagnose_me import utility
@click.group()
def diagnose_me():
"""Prints diagnoses information for KFP environment."""
pass
@diagnose_me.command()

View File

@ -10,7 +10,6 @@ from kfp_server_api.models.api_experiment import ApiExperiment
@click.group()
def experiment():
"""Manage experiment resources."""
pass
@experiment.command()

View File

@ -23,7 +23,6 @@ from kfp.deprecated.cli.output import print_output, OutputFormat
@click.group()
def pipeline():
"""Manage pipeline resources."""
pass
@pipeline.command()

View File

@ -22,7 +22,6 @@ import kfp_server_api
@click.group()
def recurring_run():
"""Manage recurring-run resources."""
pass
@recurring_run.command()

View File

@ -28,7 +28,6 @@ from kfp.deprecated.cli.output import print_output, OutputFormat
@click.group()
def run():
"""manage run resources."""
pass
@run.command()

View File

@ -1,11 +1,7 @@
import copy
import json
import os
import re
from typing import Any, Dict, List, Optional, Set, Tuple
from kfp.deprecated.dsl import _component_bridge
from kfp.deprecated import dsl
from typing import List, Optional, Set
def fix_big_data_passing(workflow: dict) -> dict:

View File

@ -1,11 +1,9 @@
#/bin/env python3
import copy
import json
import os
import re
import warnings
from typing import List, Optional, Set
def rewrite_data_passing_to_use_volumes(

View File

@ -14,7 +14,7 @@
import warnings
from kubernetes import client as k8s_client
from typing import Callable, Dict, Optional, Text
from typing import Callable, Dict, Optional
from kfp.deprecated.dsl._container_op import BaseOp, ContainerOp

View File

@ -18,7 +18,7 @@ import warnings
import yaml
import copy
from collections import OrderedDict
from typing import Union, List, Any, Callable, TypeVar, Dict
from typing import Any, Dict, List, TypeVar
from kfp.deprecated.compiler._k8s_helper import convert_k8s_obj_to_json
from kfp.deprecated import dsl

View File

@ -24,7 +24,7 @@ from typing import Callable, Set, List, Text, Dict, Tuple, Any, Union, Optional
import kfp.deprecated as kfp
from kfp.deprecated.dsl import _for_loop
from kfp.deprecated.compiler import _data_passing_rewriter, v2_compat
from kfp.deprecated.compiler import v2_compat
from kfp.deprecated import dsl
from kfp.deprecated.compiler._k8s_helper import convert_k8s_obj_to_json, sanitize_k8s_name

View File

@ -24,7 +24,6 @@ __all__ = [
'generate_unique_name_conversion_table',
]
import re
import sys
from typing import Callable, Sequence, Mapping

View File

@ -40,7 +40,7 @@ import inspect
import itertools
from pathlib import Path
import textwrap
from typing import Callable, Dict, List, Mapping, Optional, TypeVar
from typing import Callable, List, Mapping, Optional, TypeVar
import warnings
import docstring_parser
@ -265,7 +265,7 @@ def _strip_type_hints_using_lib2to3(source_code: str) -> str:
# Using the standard lib2to3 library to strip type annotations.
# Switch to another library like strip-hints if issues are found.
from lib2to3 import fixer_base, refactor, fixer_util
from lib2to3 import fixer_base, refactor
class StripAnnotations(fixer_base.BaseFix):
PATTERN = r'''

View File

@ -16,13 +16,12 @@ __all__ = [
'create_graph_component_from_pipeline_func',
]
import inspect
from collections import OrderedDict
from typing import Callable, Mapping, Optional
from . import _components
from . import structures
from ._structures import TaskSpec, ComponentSpec, OutputSpec, GraphInputReference, TaskOutputArgument, GraphImplementation, GraphSpec
from ._structures import ComponentSpec, GraphImplementation, GraphInputReference, GraphSpec, OutputSpec, TaskOutputArgument
from ._naming import _make_name_unique_by_adding_index
from ._python_op import _extract_component_interface
from ._components import _create_task_factory_from_component_spec

View File

@ -56,7 +56,7 @@ __all__ = [
from collections import OrderedDict
from typing import Any, Dict, List, Mapping, Optional, Sequence, Union
from typing import Any, Dict, List, Mapping, Optional, Union
from .modelbase import ModelBase

View File

@ -18,7 +18,7 @@ __all__ = [
import inspect
from collections import abc, OrderedDict
from typing import Any, Callable, Dict, List, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, TypeVar, Union, cast, get_type_hints
from typing import Any, Dict, List, Mapping, MutableMapping, MutableSequence, Sequence, Type, TypeVar, Union, cast, get_type_hints
T = TypeVar('T')
@ -56,7 +56,6 @@ def verify_object_against_type(x: Any, typ: Type[T]) -> T:
return x
except Exception as ex:
exception_map[possible_type] = ex
pass
#exception_lines = ['Exception for type {}: {}.'.format(t, e) for t, e in exception_map.items()]
exception_lines = [str(e) for t, e in exception_map.items()]
exception_lines.append(
@ -174,7 +173,6 @@ def parse_object_from_struct_based_on_type(struct: Any, typ: Type[T]) -> T:
exception_map[
possible_type] = 'Unexpected exception when trying to convert structure "{}" to type "{}": {}: {}'.format(
struct, typ, type(ex), ex)
pass
#Single successful parsing.
if len(results) == 1:

View File

@ -12,13 +12,10 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
import unittest
from pathlib import Path
from kfp.deprecated import components as comp
from kfp.deprecated.components.structures import ComponentReference, ComponentSpec, ContainerSpec, GraphInputReference, GraphSpec, InputSpec, InputValuePlaceholder, GraphImplementation, OutputPathPlaceholder, OutputSpec, TaskOutputArgument, TaskSpec
from kfp.deprecated.components.structures import ComponentReference, ComponentSpec, GraphImplementation, GraphInputReference, GraphSpec, InputSpec, OutputSpec, TaskOutputArgument, TaskSpec
from kfp.deprecated.components._yaml_utils import load_yaml

View File

@ -369,7 +369,6 @@ class PythonOpTestCase(unittest.TestCase):
#('custom_struct_type_param', {'CustomType': {'param1': 'value1', 'param2': 'value2'}}), # TypeError: NamedTuple('Name', [(f0, t0), (f1, t1), ...]); each t must be a type Got {'CustomType': {'param1': 'value1', 'param2': 'value2'}}
]):
"""Function docstring."""
pass
component_spec = comp._python_op._extract_component_interface(my_func)

View File

@ -12,8 +12,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
import unittest
from collections import OrderedDict
from pathlib import Path

View File

@ -12,10 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
import unittest
from pathlib import Path
from typing import List, Dict, Union, Optional
from kfp.deprecated.components.modelbase import ModelBase

View File

@ -20,11 +20,8 @@ import logging
import os
import re
import shutil
import sys
import tempfile
import requests
from ._cache import calculate_recursive_dir_hash, try_read_value_from_cache, write_value_to_cache
from ._container_builder import ContainerBuilder

View File

@ -25,9 +25,6 @@ from deprecated.sphinx import deprecated
from ..components._components import _create_task_factory_from_component_spec
from ..components._python_op import _func_to_component_spec
from ._container_builder import ContainerBuilder
from kfp.deprecated import components
from kfp.deprecated import dsl
from kfp.deprecated.components import _components
from kfp.deprecated.components import _structures
from kfp.deprecated.containers import entrypoint

View File

@ -14,12 +14,9 @@
from typing import Dict, NamedTuple, Optional, Union
from absl import logging
import fire
from google.protobuf import json_format
import os
from kfp.deprecated.containers import _gcs_helper
from kfp.deprecated.containers import entrypoint_utils
from kfp.deprecated.dsl import artifact
from kfp.pipeline_spec import pipeline_spec_pb2

View File

@ -14,13 +14,12 @@
import collections
import copy
import inspect
import json
import pathlib
from typing import Any, Mapping, Optional
from kfp.deprecated._config import COMPILING_FOR_V2
from kfp.deprecated.components import _structures, _data_passing
from kfp.deprecated.components import _structures
from kfp.deprecated.components import _components
from kfp.deprecated.components import _naming
from kfp.deprecated import dsl

View File

@ -19,7 +19,7 @@ from typing import (Any, Callable, Dict, List, Optional, Sequence, Tuple,
from kfp.deprecated._config import COMPILING_FOR_V2
from kfp.deprecated.components import _components, _structures
from kfp.deprecated.dsl import _pipeline_param, dsl_utils
from kfp.deprecated.dsl import _pipeline_param
from kfp.pipeline_spec import pipeline_spec_pb2
from kubernetes.client import V1Affinity, V1Toleration
from kubernetes.client.models import (V1Container, V1ContainerPort,

View File

@ -14,8 +14,6 @@
import warnings
from .types import BaseType, _check_valid_type_dict
from ..components._data_passing import serialize_value
from ..components.structures import ComponentSpec, InputSpec, OutputSpec
def _annotation_to_typemeta(annotation):

View File

@ -12,7 +12,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Union
import uuid
from kfp.deprecated.dsl import _for_loop, _pipeline_param

View File

@ -17,12 +17,10 @@ from typing import Callable, Optional, Union
from kubernetes.client.models import V1PodDNSConfig
from kfp.deprecated.dsl import _container_op
from kfp.deprecated.dsl import _resource_op
from kfp.deprecated.dsl import _ops_group
from kfp.deprecated.dsl import _component_bridge
from kfp.deprecated.components import _components
from kfp.deprecated.components import _naming
import sys
# This handler is called whenever the @pipeline decorator is applied.
# It can be used by command-line DSL compiler to inject code that runs for every

View File

@ -13,12 +13,10 @@
# limitations under the License.
"""Base class for MLMD artifact in KFP SDK."""
from typing import Any, Dict, Optional
from typing import Any, Optional
from absl import logging
import enum
import importlib
import jsonschema
import yaml
from google.protobuf import json_format

View File

@ -15,17 +15,10 @@
import os
import unittest
import yaml
import json
import jsonschema
from google.protobuf import json_format
from google.protobuf.struct_pb2 import Struct
from kfp.deprecated.dsl import metrics_utils
from google.protobuf import json_format
class MetricsUtilsTest(unittest.TestCase):

View File

@ -114,12 +114,10 @@ class LocalPath(BaseType):
class InconsistentTypeException(Exception):
"""InconsistencyTypeException is raised when two types are not
consistent."""
pass
class InconsistentTypeWarning(Warning):
"""InconsistentTypeWarning is issued when two types are not consistent."""
pass
TypeSpecType = Union[str, Dict]

View File

@ -11,8 +11,3 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import tempfile
from kfp.deprecated.compiler import build_docker_image

View File

@ -13,38 +13,28 @@
# limitations under the License.
from kfp.components.component_decorator import component
from kfp.components.importer_node import importer
from kfp.components.pipeline_channel import (
PipelineArtifactChannel,
PipelineChannel,
PipelineParameterChannel,
)
from kfp.components.pipeline_channel import PipelineArtifactChannel
from kfp.components.pipeline_channel import PipelineChannel
from kfp.components.pipeline_channel import PipelineParameterChannel
from kfp.components.pipeline_context import pipeline
from kfp.components.pipeline_task import PipelineTask
from kfp.components.task_final_status import PipelineTaskFinalStatus
from kfp.components.tasks_group import (
Condition,
ExitHandler,
ParallelFor,
)
from kfp.components.types.artifact_types import (
Artifact,
ClassificationMetrics,
Dataset,
HTML,
Markdown,
Metrics,
Model,
SlicedClassificationMetrics,
)
from kfp.components.types.type_annotations import (
Input,
Output,
InputPath,
OutputPath,
)
from kfp.components.tasks_group import Condition
from kfp.components.tasks_group import ExitHandler
from kfp.components.tasks_group import ParallelFor
from kfp.components.types.artifact_types import Artifact
from kfp.components.types.artifact_types import ClassificationMetrics
from kfp.components.types.artifact_types import Dataset
from kfp.components.types.artifact_types import HTML
from kfp.components.types.artifact_types import Markdown
from kfp.components.types.artifact_types import Metrics
from kfp.components.types.artifact_types import Model
from kfp.components.types.artifact_types import SlicedClassificationMetrics
from kfp.components.types.type_annotations import Input
from kfp.components.types.type_annotations import InputPath
from kfp.components.types.type_annotations import Output
from kfp.components.types.type_annotations import OutputPath
PIPELINE_JOB_NAME_PLACEHOLDER = '{{$.pipeline_job_name}}'
PIPELINE_JOB_RESOURCE_NAME_PLACEHOLDER = '{{$.pipeline_job_resource_name}}'

View File

@ -20,8 +20,8 @@ import re
from typing import Any, Dict, List, Optional, Tuple, Union
import google.auth
import requests
from google.auth import credentials
import requests
_KNOWN_HOSTS_REGEX = {
'kfp_pkg_dev': (
@ -35,9 +35,12 @@ _DEFAULT_JSON_HEADER = {
_VERSION_PREFIX = 'sha256:'
LOCAL_REGISTRY_CREDENTIAL = os.path.expanduser('~/.config/kfp/registry_credentials.json')
LOCAL_REGISTRY_CONTEXT = os.path.expanduser('~/.config/kfp/registry_context.json')
DEFAULT_REGISTRY_CONTEXT = os.path.join(os.path.dirname(__file__), 'context/default_pkg_dev.json')
LOCAL_REGISTRY_CREDENTIAL = os.path.expanduser(
'~/.config/kfp/registry_credentials.json')
LOCAL_REGISTRY_CONTEXT = os.path.expanduser(
'~/.config/kfp/registry_context.json')
DEFAULT_REGISTRY_CONTEXT = os.path.join(
os.path.dirname(__file__), 'context/default_pkg_dev.json')
class _SafeDict(dict):
@ -387,7 +390,7 @@ class RegistryClient:
package_name: Name of the package.
version: Version of the package.
tag: Tag attached to the package.
file_name: File name to be saved as. If not specified, the
file_name: File name to be saved as. If not specified, the
file name will be based on the package name and version/tag.
Returns:

View File

@ -13,15 +13,14 @@
# limitations under the License.
"""Tests for KFP Registry RegistryClient."""
import builtins
import json
import os
import requests
from unittest import mock
from absl.testing import parameterized
from kfp.registry import ApiAuth
from kfp.registry import RegistryClient
import requests
_DEFAULT_HOST = 'https://us-central1-kfp.pkg.dev/proj/repo'
_KFP_CONFIG_FILE = os.path.join(
@ -49,8 +48,7 @@ class RegistryClientTest(parameterized.TestCase):
def test_is_ar_host_false(self):
client = RegistryClient(
host='https://hub.docker.com/r/google/cloud-sdk',
auth=ApiAuth(''))
host='https://hub.docker.com/r/google/cloud-sdk', auth=ApiAuth(''))
self.assertFalse(client._is_ar_host())
def test_load_config(self):

View File

@ -16,9 +16,9 @@
import warnings
warnings.warn(
(f"The module `{__name__}` is deprecated and will be removed in a future"
"version. Please import directly from the `kfp` namespace, "
"instead of `kfp.v2`."),
(f'The module `{__name__}` is deprecated and will be removed in a future'
'version. Please import directly from the `kfp` namespace, '
'instead of `kfp.v2`.'),
category=DeprecationWarning,
stacklevel=2)

View File

@ -55,29 +55,29 @@ def read_readme() -> str:
setuptools.setup(
name="kfp",
name='kfp',
version=find_version('kfp', '__init__.py'),
description='Kubeflow Pipelines SDK',
long_description=read_readme(),
long_description_content_type='text/markdown',
author='The Kubeflow Authors',
url="https://github.com/kubeflow/pipelines",
url='https://github.com/kubeflow/pipelines',
project_urls={
"Documentation":
"https://kubeflow-pipelines.readthedocs.io/en/stable/",
"Bug Tracker":
"https://github.com/kubeflow/pipelines/issues",
"Source":
"https://github.com/kubeflow/pipelines/tree/master/sdk",
"Changelog":
"https://github.com/kubeflow/pipelines/blob/master/sdk/RELEASE.md",
'Documentation':
'https://kubeflow-pipelines.readthedocs.io/en/stable/',
'Bug Tracker':
'https://github.com/kubeflow/pipelines/issues',
'Source':
'https://github.com/kubeflow/pipelines/tree/master/sdk',
'Changelog':
'https://github.com/kubeflow/pipelines/blob/master/sdk/RELEASE.md',
},
install_requires=get_requirements('requirements.in'),
extras_require={
'all': ['docker'],
},
packages=setuptools.find_packages(
where=os.path.dirname(__file__), exclude=["*test*"]),
where=os.path.dirname(__file__), exclude=['*test*']),
classifiers=[
'Intended Audience :: Developers',
'Intended Audience :: Education',

View File

@ -12,11 +12,15 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from kfp.deprecated.containers._component_builder import _generate_dockerfile, _dependency_to_requirements, VersionedDependency, DependencyHelper
import os
import unittest
from kfp.deprecated.containers._component_builder import \
_dependency_to_requirements
from kfp.deprecated.containers._component_builder import _generate_dockerfile
from kfp.deprecated.containers._component_builder import DependencyHelper
from kfp.deprecated.containers._component_builder import VersionedDependency
class TestVersionedDependency(unittest.TestCase):

View File

@ -14,11 +14,12 @@
import os
import tarfile
import unittest
import yaml
import tempfile
import unittest
from unittest import mock
from kfp.deprecated.containers._component_builder import ContainerBuilder
import yaml
GCS_BASE = 'gs://kfp-testing/'
DEFAULT_IMAGE_NAME = 'gcr.io/kfp-testing/image'

View File

@ -12,10 +12,11 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from kfp.deprecated.compiler._k8s_helper import convert_k8s_obj_to_json
from datetime import datetime
import unittest
from kfp.deprecated.compiler._k8s_helper import convert_k8s_obj_to_json
class TestCompiler(unittest.TestCase):

View File

@ -1,8 +1,9 @@
from pathlib import Path
from typing import NamedTuple
import kfp.deprecated as kfp
from kfp.deprecated.components import load_component_from_file, create_component_from_func
from typing import NamedTuple
from kfp.deprecated.components import create_component_from_func
from kfp.deprecated.components import load_component_from_file
test_data_dir = Path(__file__).parent / 'test_data'
producer_op = load_component_from_file(
@ -59,8 +60,9 @@ def artifact_passing_pipeline():
).data_passing_method = volume_based_data_passing_method
from kubernetes.client.models import V1Volume, V1PersistentVolumeClaimVolumeSource
from kfp.deprecated.dsl import data_passing_methods
from kubernetes.client.models import V1PersistentVolumeClaimVolumeSource
from kubernetes.client.models import V1Volume
volume_based_data_passing_method = data_passing_methods.KubernetesVolume(
volume=V1Volume(

View File

@ -13,13 +13,13 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
from pathlib import Path
import sys
sys.path.insert(0, __file__ + '/../../../../')
import kfp.deprecated as kfp
from kfp.deprecated import dsl
import kfp.deprecated as kfp
def component_with_inline_input_artifact(text: str):

View File

@ -13,9 +13,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import kfp.deprecated as kfp
from kfp.deprecated import dsl
from kfp.deprecated.dsl import _for_loop
import kfp.deprecated as kfp
produce_op = kfp.components.load_component_from_text('''\
name: Produce list

Some files were not shown because too many files have changed in this diff Show More