SDK - Removed the ArtifactLocation feature (#3517)

* SDK - Removed the ArtifactLocation feature

The feature was deprecated in v0.1.34 https://github.com/kubeflow/pipelines/pull/2326

* Removed the artifact_location sample
This commit is contained in:
Alexey Volkov 2020-04-23 00:49:44 -07:00 committed by GitHub
parent 264953c108
commit b63ad7e614
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
16 changed files with 4 additions and 508 deletions

View File

@ -1,51 +0,0 @@
#!/usr/bin/env python3
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import kfp
from kfp import dsl
from kubernetes.client import V1SecretKeySelector
@dsl.pipeline(
name="custom_artifact_location_pipeline",
description="""A pipeline to demonstrate how to configure the artifact
location for all the ops in the pipeline.""",
)
def custom_artifact_location(
secret_name: str = "mlpipeline-minio-artifact",
tag: str = '1.31.0',
namespace: str = "kubeflow",
bucket: str = "mlpipeline"
):
# configures artifact location
pipeline_artifact_location = dsl.ArtifactLocation.s3(
bucket=bucket,
endpoint="minio-service.%s:9000" % namespace, # parameterize minio-service endpoint
insecure=True,
access_key_secret=V1SecretKeySelector(name=secret_name, key="accesskey"),
secret_key_secret={"name": secret_name, "key": "secretkey"}, # accepts dict also
)
# set pipeline level artifact location
dsl.get_pipeline_conf().set_artifact_location(pipeline_artifact_location)
# artifacts in this op are stored to endpoint `minio-service.<namespace>:9000`
op = dsl.ContainerOp(name="foo", image="busybox:%s" % tag,
command=['sh', '-c', 'echo hello > /tmp/output.txt'],
file_outputs={'output': '/tmp/output.txt'})
if __name__ == '__main__':
kfp.compiler.Compiler().compile(custom_artifact_location, __file__ + '.yaml')

View File

@ -22,7 +22,7 @@ from typing import Union, List, Any, Callable, TypeVar, Dict
from ._k8s_helper import convert_k8s_obj_to_json
from .. import dsl
from ..dsl._container_op import BaseOp
from ..dsl._artifact_location import ArtifactLocation
# generics
T = TypeVar('T')
@ -187,12 +187,7 @@ def _op_to_template(op: BaseOp):
output_artifact_paths.update(sorted(((param.full_name, processed_op.file_outputs[param.name]) for param in processed_op.outputs.values()), key=lambda x: x[0]))
output_artifacts = [
convert_k8s_obj_to_json(
ArtifactLocation.create_artifact_for_s3(
op.artifact_location,
name=name,
path=path,
key='runs/{{workflow.uid}}/{{pod.name}}/' + name + '.tgz'))
{'name': name, 'path': path}
for name, path in output_artifact_paths.items()
]

View File

@ -713,16 +713,8 @@ class Compiler(object):
# Sanitize operator names and param names
sanitized_ops = {}
# pipeline level artifact location
artifact_location = pipeline_conf.artifact_location
for op in pipeline.ops.values():
# inject pipeline level artifact location into if the op does not have
# an artifact location config already.
if hasattr(op, "artifact_location"):
if artifact_location and not op.artifact_location:
op.artifact_location = artifact_location
sanitized_name = sanitize_k8s_name(op.name)
op.name = sanitized_name
for param in op.outputs.values():

View File

@ -24,7 +24,6 @@ from ._pipeline_volume import PipelineVolume
from ._volume_snapshot_op import VolumeSnapshotOp
from ._ops_group import OpsGroup, ExitHandler, Condition, ParallelFor
from ._component import python_component, graph_component, component
from ._artifact_location import ArtifactLocation
EXECUTION_ID_PLACEHOLDER = '{{workflow.uid}}-{{pod.name}}'
RUN_ID_PLACEHOLDER = '{{workflow.uid}}'

View File

@ -1,154 +0,0 @@
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Dict, Union, Any
from argo.models import V1alpha1ArtifactLocation, V1alpha1S3Artifact, V1alpha1Artifact
from deprecated.sphinx import deprecated
from kubernetes.client.models import V1SecretKeySelector
def _dict_to_secret(
value: Union[V1SecretKeySelector, Dict[str, Any]]
) -> V1SecretKeySelector:
"""Converts a dict to a kubernetes V1SecretKeySelector."""
if isinstance(value, dict) and value.get("name") and value.get("key"):
return V1SecretKeySelector(**value)
return value or V1SecretKeySelector(key="", optional=True)
@deprecated(version='0.1.32', reason='ArtifactLocation is deprecated since SDK v0.1.32. Please configure the artifact location in the cluster configMap: https://github.com/argoproj/argo/blob/master/ARTIFACT_REPO.md#configure-the-default-artifact-repository .')
class ArtifactLocation:
"""
ArtifactLocation describes a location for a single or multiple artifacts.
It is used as single artifact in the context of inputs/outputs
(e.g. outputs.artifacts.artname). It is also used to describe the location
of multiple artifacts such as the archive location of a single workflow
step, which the executor will use as a default location to store its files.
"""
@staticmethod
def s3(
bucket: str = None,
endpoint: str = None,
insecure: bool = None,
region: str = None,
access_key_secret: Union[V1SecretKeySelector, Dict[str, Any]] = None,
secret_key_secret: Union[V1SecretKeySelector, Dict[str, Any]] = None,
) -> V1alpha1ArtifactLocation:
"""
Creates a new instance of V1alpha1ArtifactLocation with a s3 artifact
backend.
Example::
from kubernetes.client.models import V1SecretKeySelector
from kfp.dsl import ArtifactLocation
artifact_location = ArtifactLocation(
bucket="foo",
endpoint="s3.amazonaws.com",
insecure=False,
region="ap-southeast-1",
access_key_secret={"name": "s3-secret", "key": "accesskey"},
secret_key_secret=V1SecretKeySelector(name="s3-secret", key="secretkey")
)
Args:
bucket (str): name of the bucket.
endpoint (str): hostname to the bucket endpoint.
insecure (bool): use TLS if set to True.
region (str): bucket region (for s3 buckets).
access_key_secret (Union[V1SecretKeySelector, Dict[str, Any]]): k8s secret selector to access key.
secret_key_secret (Union[V1SecretKeySelector, Dict[str, Any]]): k8s secret selector to secret key.
Returns:
V1alpha1ArtifactLocation: a new instance of V1alpha1ArtifactLocation.
"""
return V1alpha1ArtifactLocation(
s3=V1alpha1S3Artifact(
bucket=bucket,
endpoint=endpoint,
insecure=insecure,
region=region,
access_key_secret=_dict_to_secret(access_key_secret),
secret_key_secret=_dict_to_secret(secret_key_secret),
key="", # key is a required value for V1alpha1S3Artifact
)
)
@staticmethod
def create_artifact_for_s3(
artifact_location: Union[V1alpha1ArtifactLocation, Dict[str, Any]],
name: str,
path: str,
key: str,
**kwargs
) -> V1alpha1Artifact:
"""
Creates a s3-backed `V1alpha1Artifact` object using a
`V1alpha1ArtifactLocation` object.
Args:
artifact_location (Union[V1alpha1ArtifactLocation, Dict[str, Any]]): `V1alpha1ArtifactLocation`
object or a dict representing it.
name (str): name of the artifact. must be unique within a template's
inputs/outputs.
path (str): container path to the artifact.
key (str): key in bucket to store artifact.
**kwargs: any other keyword arguments accepted by `V1alpha1Artifact`.
Returns:
V1alpha1Artifact: V1alpha1Artifact object.
"""
if not artifact_location:
return V1alpha1Artifact(
name=name,
path=path,
**kwargs
)
# dict representation of artifact location
if isinstance(artifact_location, dict) and artifact_location.get("s3"):
s3_artifact = artifact_location.get("s3")
return V1alpha1Artifact(
name=name,
path=path,
s3=V1alpha1S3Artifact(
bucket=s3_artifact.get("bucket"),
endpoint=s3_artifact.get("endpoint"),
insecure=s3_artifact.get("insecure"),
region=s3_artifact.get("region"),
access_key_secret=_dict_to_secret(s3_artifact.get("accessKeySecret")),
secret_key_secret=_dict_to_secret(s3_artifact.get("secretKeySecret")),
key=key
)
)
if artifact_location.s3:
return V1alpha1Artifact(
name=name,
path=path,
s3=V1alpha1S3Artifact(
bucket=artifact_location.s3.bucket,
endpoint=artifact_location.s3.endpoint,
insecure=artifact_location.s3.insecure,
region=artifact_location.s3.region,
access_key_secret=artifact_location.s3.access_key_secret,
secret_key_secret=artifact_location.s3.secret_key_secret,
key=key,
),
**kwargs
)
raise ValueError("artifact_location does not have s3 configuration.")

View File

@ -16,7 +16,6 @@ import re
import warnings
from typing import Any, Dict, List, TypeVar, Union, Callable, Optional, Sequence
from argo.models import V1alpha1ArtifactLocation
from kubernetes.client import V1Toleration, V1Affinity
from kubernetes.client.models import (
V1Container, V1EnvVar, V1EnvFromSource, V1SecurityContext, V1Probe,
@ -973,7 +972,6 @@ class ContainerOp(BaseOp):
artifact_argument_paths: List[InputArgumentPath] = None,
file_outputs: Dict[str, str] = None,
output_artifact_paths: Dict[str, str]=None,
artifact_location: V1alpha1ArtifactLocation=None,
is_exit_handler=False,
pvolumes: Dict[str, V1Volume] = None,
):
@ -1005,9 +1003,6 @@ class ContainerOp(BaseOp):
It has the following default artifact paths during compile time.
{'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json',
'mlpipeline-metrics': '/mlpipeline-metrics.json'}
artifact_location: Deprecated. Configures the default artifact location for artifacts
in the argo workflow template. Must be a `V1alpha1ArtifactLocation`
object.
is_exit_handler: Deprecated. This is no longer needed.
pvolumes: Dictionary for the user to match a path on the op's fs with a
V1Volume or it inherited type.
@ -1015,7 +1010,7 @@ class ContainerOp(BaseOp):
"""
super().__init__(name=name, init_containers=init_containers, sidecars=sidecars, is_exit_handler=is_exit_handler)
self.attrs_with_pipelineparams = BaseOp.attrs_with_pipelineparams + ['_container', 'artifact_location', 'artifact_arguments'] #Copying the BaseOp class variable!
self.attrs_with_pipelineparams = BaseOp.attrs_with_pipelineparams + ['_container', 'artifact_arguments'] #Copying the BaseOp class variable!
input_artifact_paths = {}
artifact_arguments = {}
@ -1090,10 +1085,6 @@ class ContainerOp(BaseOp):
self.artifact_arguments = artifact_arguments
self.file_outputs = file_outputs
self.output_artifact_paths = output_artifact_paths or {}
self.artifact_location = artifact_location
if artifact_location:
warnings.warn('Setting per-ContainerOp artifact_location is deprecated since SDK v0.1.32. Please configure the artifact location in the cluster configMap: https://github.com/argoproj/argo/blob/master/ARTIFACT_REPO.md#configure-the-default-artifact-repository . For short-term workaround use the pipeline-wide kfp.dsl.PipelineConf().set_artifact_location, but it can also be deprecated in future.', PendingDeprecationWarning)
self._metadata = None

View File

@ -60,7 +60,6 @@ class PipelineConf():
self.image_pull_secrets = []
self.timeout = 0
self.ttl_seconds_after_finished = -1
self.artifact_location = None
self.op_transformers = []
def set_image_pull_secrets(self, image_pull_secrets):
@ -92,40 +91,6 @@ class PipelineConf():
self.ttl_seconds_after_finished = seconds
return self
def set_artifact_location(self, artifact_location):
"""Configures the pipeline level artifact location.
Example::
from kfp.dsl import ArtifactLocation, get_pipeline_conf, pipeline
from kubernetes.client.models import V1SecretKeySelector
@pipeline(name='foo', description='hello world')
def foo_pipeline(tag: str, pull_image_policy: str):
'''A demo pipeline'''
# create artifact location object
artifact_location = ArtifactLocation.s3(
bucket="foo",
endpoint="minio-service:9000",
insecure=True,
access_key_secret=V1SecretKeySelector(name="minio", key="accesskey"),
secret_key_secret=V1SecretKeySelector(name="minio", key="secretkey"))
# config pipeline level artifact location
conf = get_pipeline_conf().set_artifact_location(artifact_location)
# rest of codes
...
Args:
artifact_location: V1alpha1ArtifactLocation object
For detailed description, check Argo V1alpha1ArtifactLocation definition
https://github.com/e2fyi/argo-models/blob/release-2.2/argo/models/v1alpha1_artifact_location.py
https://github.com/argoproj/argo/blob/release-2.2/api/openapi-spec/swagger.json
"""
self.artifact_location = artifact_location
return self
def add_op_transformer(self, transformer):
"""Configures the op_transformers which will be applied to all ops in the pipeline.

View File

@ -6,7 +6,6 @@ cloudpickle
strip-hints>=0.1.8
# kfp.dsl
argo-models==2.2.1a #2.2.1a is equivalent to argo 2.2.1
jsonschema>=3.0.1
kubernetes>=8.0.0, <12.0.0

View File

@ -4,7 +4,6 @@
#
# pip-compile --output-file=requirements.txt requirements.in
#
argo-models==2.2.1a # via -r requirements.in
attrs==19.3.0 # via jsonschema
cachetools==4.0.0 # via google-auth
certifi==2019.11.28 # via kfp-server-api, kubernetes, requests
@ -22,7 +21,7 @@ idna==2.9 # via requests
importlib-metadata==1.5.0 # via jsonschema
jsonschema==3.2.0 # via -r requirements.in
kfp-server-api==0.3.0 # via -r requirements.in
kubernetes==11.0.0 # via -r requirements.in, argo-models
kubernetes==11.0.0 # via -r requirements.in
oauthlib==3.1.0 # via requests-oauthlib
protobuf==3.11.3 # via google-api-core, googleapis-common-protos
pyasn1-modules==0.2.8 # via google-auth

View File

@ -31,7 +31,6 @@ REQUIRES = [
# a breaking change in kfp-server-api, or kfp sdk depends on new api changes
# in kfp-server-api.
'kfp-server-api>=0.2.5, <0.6.0',
'argo-models == 2.2.1a', #2.2.1a is equivalent to argo 2.2.1
'jsonschema >= 3.0.1',
'tabulate',
'click',

View File

@ -359,10 +359,6 @@ class TestCompiler(unittest.TestCase):
finally:
shutil.rmtree(tmpdir)
def test_py_compile_artifact_location(self):
"""Test configurable artifact location pipeline."""
self._test_py_compile_yaml('artifact_location')
def test_py_compile_basic(self):
"""Test basic sequential pipeline."""
self._test_py_compile_zip('basic')

View File

@ -1,47 +0,0 @@
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from kfp import dsl
from kubernetes.client.models import V1SecretKeySelector
@dsl.pipeline(name='artifact-location-pipeine', description='hello world')
def foo_pipeline(tag: str, namespace: str = "kubeflow", bucket: str = "foobar"):
# configures artifact location
pipeline_artifact_location = dsl.ArtifactLocation.s3(
bucket=bucket,
endpoint="minio-service.%s:9000" % namespace,
insecure=True,
access_key_secret={"name": "minio", "key": "accesskey"},
secret_key_secret=V1SecretKeySelector(name="minio", key="secretkey"))
# configures artifact location using AWS IAM role (no access key provided)
aws_artifact_location = dsl.ArtifactLocation.s3(
bucket=bucket,
endpoint="s3.amazonaws.com",
region="ap-southeast-1",
insecure=False)
# set pipeline level artifact location
dsl.get_pipeline_conf().set_artifact_location(pipeline_artifact_location)
# pipeline level artifact location (to minio)
op1 = dsl.ContainerOp(
name='foo',
image='busybox:%s' % tag,
output_artifact_paths={
'out_art': '/tmp/out_art.txt',
},
)

View File

@ -1,71 +0,0 @@
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
apiVersion: argoproj.io/v1alpha1
kind: Workflow
metadata:
annotations:
pipelines.kubeflow.org/pipeline_spec: '{"description": "hello world", "inputs": [{"name": "tag", "type": "String"}, {"default": "kubeflow", "name": "namespace", "type": "String"}, {"default": "foobar", "name": "bucket", "type": "String"}], "name": "artifact-location-pipeine"}'
generateName: artifact-location-pipeine-
spec:
arguments:
parameters:
- name: tag
- name: namespace
value: kubeflow
- name: bucket
value: foobar
entrypoint: artifact-location-pipeine
serviceAccountName: pipeline-runner
templates:
- dag:
tasks:
- arguments:
parameters:
- name: bucket
value: '{{inputs.parameters.bucket}}'
- name: namespace
value: '{{inputs.parameters.namespace}}'
- name: tag
value: '{{inputs.parameters.tag}}'
name: foo
template: foo
inputs:
parameters:
- name: bucket
- name: namespace
- name: tag
name: artifact-location-pipeine
- container:
image: busybox:{{inputs.parameters.tag}}
inputs:
parameters:
- name: bucket
- name: namespace
- name: tag
name: foo
outputs:
artifacts:
- name: out_art
path: /tmp/out_art.txt
s3:
accessKeySecret:
key: 'accesskey'
name: 'minio'
bucket: '{{inputs.parameters.bucket}}'
endpoint: minio-service.{{inputs.parameters.namespace}}:9000
insecure: true
key: runs/{{workflow.uid}}/{{pod.name}}/out_art.tgz
secretKeySecret:
key: 'secretkey'
name: 'minio'

View File

@ -1,113 +0,0 @@
# Copyright 2019 The Kubeflow Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from kfp.compiler._k8s_helper import convert_k8s_obj_to_json
from kfp.dsl import ArtifactLocation
from kubernetes.client.models import V1SecretKeySelector
import unittest
class TestArtifactLocation(unittest.TestCase):
def test_artifact_location_constructor(self):
artifact_location = ArtifactLocation.s3(
bucket="foo",
endpoint="s3.amazonaws.com",
insecure=False,
region="ap-southeast-1",
access_key_secret={"name": "s3-secret", "key": "accesskey"},
secret_key_secret=V1SecretKeySelector(name="s3-secret", key="secretkey")
)
expected = {
"bucket": "foo",
"endpoint": "s3.amazonaws.com",
"insecure": False,
"region": "ap-southeast-1",
"access_key_secret": {"name": "s3-secret", "key": "accesskey"},
"secret_key_secret": {"name": "s3-secret", "key": "secretkey"}
}
self.assertEqual(artifact_location.s3.bucket, "foo")
self.assertEqual(artifact_location.s3.endpoint, "s3.amazonaws.com")
self.assertEqual(artifact_location.s3.insecure, False)
self.assertEqual(artifact_location.s3.region, "ap-southeast-1")
self.assertEqual(artifact_location.s3.access_key_secret.name, "s3-secret")
self.assertEqual(artifact_location.s3.access_key_secret.key, "accesskey")
self.assertEqual(artifact_location.s3.secret_key_secret.name, "s3-secret")
self.assertEqual(artifact_location.s3.secret_key_secret.key, "secretkey")
def test_create_artifact_for_s3_with_default(self):
# should trigger pending deprecation warning about not having a default
# artifact_location if artifact_location is not provided.
artifact = ArtifactLocation.create_artifact_for_s3(
None,
name="foo",
path="path/to",
key="key")
self.assertEqual(artifact.name, "foo")
self.assertEqual(artifact.path, "path/to")
def test_create_artifact_for_s3(self):
artifact_location = ArtifactLocation.s3(
bucket="foo",
endpoint="s3.amazonaws.com",
insecure=False,
region="ap-southeast-1",
access_key_secret={"name": "s3-secret", "key": "accesskey"},
secret_key_secret=V1SecretKeySelector(name="s3-secret", key="secretkey")
)
artifact = ArtifactLocation.create_artifact_for_s3(
artifact_location,
name="foo",
path="path/to",
key="key")
self.assertEqual(artifact.name, "foo")
self.assertEqual(artifact.path, "path/to")
self.assertEqual(artifact.s3.endpoint, "s3.amazonaws.com")
self.assertEqual(artifact.s3.bucket, "foo")
self.assertEqual(artifact.s3.key, "key")
self.assertEqual(artifact.s3.access_key_secret.name, "s3-secret")
self.assertEqual(artifact.s3.access_key_secret.key, "accesskey")
self.assertEqual(artifact.s3.secret_key_secret.name, "s3-secret")
self.assertEqual(artifact.s3.secret_key_secret.key, "secretkey")
def test_create_artifact_for_s3_with_dict(self):
# use the convert_k8s_obj_to_json to mimick the compiler
artifact_location_dict = convert_k8s_obj_to_json(ArtifactLocation.s3(
bucket="foo",
endpoint="s3.amazonaws.com",
insecure=False,
region="ap-southeast-1",
access_key_secret={"name": "s3-secret", "key": "accesskey"},
secret_key_secret=V1SecretKeySelector(name="s3-secret", key="secretkey")
))
artifact = ArtifactLocation.create_artifact_for_s3(
artifact_location_dict,
name="foo",
path="path/to",
key="key")
self.assertEqual(artifact.name, "foo")
self.assertEqual(artifact.path, "path/to")
self.assertEqual(artifact.s3.endpoint, "s3.amazonaws.com")
self.assertEqual(artifact.s3.bucket, "foo")
self.assertEqual(artifact.s3.key, "key")
self.assertEqual(artifact.s3.access_key_secret.name, "s3-secret")
self.assertEqual(artifact.s3.access_key_secret.key, "accesskey")
self.assertEqual(artifact.s3.secret_key_secret.name, "s3-secret")
self.assertEqual(artifact.s3.secret_key_secret.key, "secretkey")

View File

@ -16,7 +16,6 @@
import unittest
import sys
import artifact_location_tests
import aws_extensions_tests
import pipeline_tests
import pipeline_param_tests
@ -34,7 +33,6 @@ import extensions.test_kubernetes as test_kubernetes
if __name__ == '__main__':
suite = unittest.TestSuite()
suite.addTests(unittest.defaultTestLoader.loadTestsFromModule(artifact_location_tests))
suite.addTests(unittest.defaultTestLoader.loadTestsFromModule(aws_extensions_tests))
suite.addTests(unittest.defaultTestLoader.loadTestsFromModule(pipeline_param_tests))
suite.addTests(unittest.defaultTestLoader.loadTestsFromModule(pipeline_tests))

View File

@ -74,7 +74,6 @@ spec:
- execution_order
- imagepullsecrets
- retry
- artifact_location
- preemptible_tpu_gpu
- volume_snapshot_ops
- loop_output