Compare commits
268 Commits
sdk-2.11.0
...
master
Author | SHA1 | Date |
---|---|---|
|
04407fbe93 | |
|
ada935a0ad | |
|
e8e23f0d7a | |
|
b7f1d279d8 | |
|
a603d40812 | |
|
7f60100602 | |
|
268e0898ec | |
|
a123d53be0 | |
|
99326e1bd1 | |
|
c3d05eb0b1 | |
|
c09b635a6b | |
|
4dc64056b4 | |
|
9b252ff4fb | |
|
8bbdd6e81f | |
|
a30e9ead10 | |
|
f240685bf3 | |
|
3154ef9258 | |
|
88870823fa | |
|
da358e5176 | |
|
b9a01c0e40 | |
|
5181358d6a | |
|
11000f5fab | |
|
aadf9e3cdf | |
|
cfa47fecbc | |
|
40a9ad4823 | |
|
54475637a2 | |
|
441a465d19 | |
|
b4772693ae | |
|
b306d8d6d6 | |
|
2b04d2f896 | |
|
cc7830812a | |
|
0d857b6f8a | |
|
faa2c8cd16 | |
|
8f5c277d9b | |
|
48468ae1fa | |
|
ea20731060 | |
|
dda6033a03 | |
|
7ba495fcc3 | |
|
b87b521fe9 | |
|
8b0c022b33 | |
|
f342504655 | |
|
73b58e4f16 | |
|
4c966f7773 | |
|
8fe090d461 | |
|
9fdbe980c8 | |
|
beae62fb52 | |
|
53bb3a0aad | |
|
1b0e6535b5 | |
|
6c32514c35 | |
|
8329e64716 | |
|
732a3f26f5 | |
|
d45b5810eb | |
|
3337b5e323 | |
|
67f9b7d73c | |
|
9761a31ec3 | |
|
e329fa39b6 | |
|
dc841dde6f | |
|
7342f2b737 | |
|
4f09f01090 | |
|
c368ac6881 | |
|
d6c864f038 | |
|
0031766201 | |
|
2a175883ee | |
|
5bc1a9bed1 | |
|
943643b413 | |
|
988477a7de | |
|
508fc9dd8f | |
|
ef94ccd734 | |
|
18bed6c70d | |
|
e276474f97 | |
|
0e7e806b4c | |
|
9aebb62be1 | |
|
0010b06731 | |
|
9e4f26ce74 | |
|
9245739f6f | |
|
e5cb3023b1 | |
|
4503eae10d | |
|
ed828b513a | |
|
70d28885f2 | |
|
24782d178d | |
|
56da004d91 | |
|
c68640d950 | |
|
c03127d967 | |
|
d90e4e8a54 | |
|
2b94ee6101 | |
|
8261e4af70 | |
|
27ba5ff671 | |
|
3ab625c645 | |
|
8a402c10a8 | |
|
fe51dfd792 | |
|
64e8264352 | |
|
0359551b76 | |
|
fb182355f0 | |
|
90909fc0ef | |
|
7529bbeba7 | |
|
e21bbbaf22 | |
|
93675b03d4 | |
|
5c868d40ad | |
|
3f6a93b28d | |
|
f63c7c530d | |
|
f3e3fac699 | |
|
d38418efea | |
|
611d582004 | |
|
f9d487cb60 | |
|
05581ccefc | |
|
e696472a5b | |
|
924b13c98c | |
|
598826e1cc | |
|
a4f07231a7 | |
|
177cd3b3e7 | |
|
aa95142f5a | |
|
88cff55914 | |
|
866ff3556a | |
|
cc56d04c46 | |
|
eee4986f18 | |
|
6877d16eac | |
|
69ba50b3fb | |
|
92e4921c4c | |
|
478ca08901 | |
|
5a3dc8473e | |
|
464ca3974f | |
|
38a46533fc | |
|
1ad4f608a0 | |
|
9544293af3 | |
|
6e3548f33e | |
|
a680e2230c | |
|
503beb51a3 | |
|
eacb586f62 | |
|
cd3e747b5d | |
|
f10c7bfbbc | |
|
7c90446966 | |
|
c5b9e4d21e | |
|
bb7a1082c4 | |
|
230c1b8f13 | |
|
564522c42d | |
|
bd4fc5c667 | |
|
048f28332b | |
|
ade8a2d072 | |
|
c9be64dca3 | |
|
97e57368d1 | |
|
556c98ae4e | |
|
9605c08026 | |
|
ec727fd5f3 | |
|
ae7c5243c5 | |
|
35041ef2bd | |
|
596ec90bb8 | |
|
3fa617f19c | |
|
8ffcc2ccb0 | |
|
ea9a8f2344 | |
|
2efcde5efd | |
|
e9f5b5aee2 | |
|
fd1b48b471 | |
|
95bd63ef15 | |
|
715ed40b92 | |
|
c46c1d2cf9 | |
|
01999b8fea | |
|
95c3f2c04d | |
|
c5aba41bca | |
|
2694605996 | |
|
d1b15ef4da | |
|
06a7350191 | |
|
3e7d499236 | |
|
12f4676b4c | |
|
df4e9c2bf5 | |
|
11e84d0caa | |
|
8d0ae5381e | |
|
d502687061 | |
|
0d9a7b00e9 | |
|
3a89bd8564 | |
|
03453bc3b5 | |
|
afb3b1461b | |
|
89c8bd7274 | |
|
30210e33bf | |
|
1956d69968 | |
|
b5c204300b | |
|
a7ec34f571 | |
|
78675b0d7f | |
|
7838009538 | |
|
13b819424e | |
|
7d8e9211f6 | |
|
355f78c51b | |
|
976fba871f | |
|
30d7c397e6 | |
|
c8fe908ba8 | |
|
0a94ac418d | |
|
eb7286d859 | |
|
0afb12d6a7 | |
|
9afe23e748 | |
|
ebaaf75631 | |
|
a6b944b894 | |
|
f7c0616db7 | |
|
d2c0376b0a | |
|
1c4f676d94 | |
|
7719b38061 | |
|
cc1c435f1e | |
|
c0778ba88c | |
|
8ca7ec1768 | |
|
b1315667be | |
|
87498e8b60 | |
|
472f8779de | |
|
7bb0c448cd | |
|
94eca2102c | |
|
22c372437d | |
|
a1f3262f37 | |
|
ba22703263 | |
|
dcaf5a48e5 | |
|
65d1d79fb7 | |
|
6a13f4bad0 | |
|
926aec55d4 | |
|
51c776c745 | |
|
c100648fa8 | |
|
42fc132616 | |
|
a40163fdf2 | |
|
0178d41bf2 | |
|
af4540e7d3 | |
|
6cb7cf71fa | |
|
32bdbe8dbc | |
|
18641e16cb | |
|
d3a016dd64 | |
|
cd66b6965c | |
|
9c5b72c2d0 | |
|
915cc552f5 | |
|
ac9b257a7a | |
|
ce3850ad9a | |
|
37a7b4ecb9 | |
|
a81b51339c | |
|
1234c8d6fe | |
|
d2ddb2ed1c | |
|
113a4c6788 | |
|
dbefbb8ee9 | |
|
a469b10806 | |
|
8fe21574c6 | |
|
7497b65067 | |
|
906b5c0841 | |
|
76ce3226ec | |
|
3e423d8d1c | |
|
a0e24069e3 | |
|
954145b877 | |
|
83791e7703 | |
|
027ca8b9c9 | |
|
4517cbe872 | |
|
533a3c6b66 | |
|
682d3aca5f | |
|
dc97ce77a7 | |
|
8bce9c4ef6 | |
|
028d81b624 | |
|
b4ecbabbba | |
|
56e6116d05 | |
|
81ebd7ff9b | |
|
3059f7c124 | |
|
a40be7b569 | |
|
7c931ae201 | |
|
e89d2d5f2d | |
|
d21fca650c | |
|
2686e017ce | |
|
72f11d9801 | |
|
873e9dedd7 | |
|
2ebb853fd7 | |
|
9c6ec0f4f7 | |
|
2a77a89e34 | |
|
f3cb244198 | |
|
803d7a8ebb | |
|
97acacbd2a | |
|
54b9a253da | |
|
399a597185 | |
|
cb07619ec6 | |
|
e71825abe6 | |
|
0eb67e1f7b |
272
.cloudbuild.yaml
272
.cloudbuild.yaml
|
@ -1,272 +0,0 @@
|
|||
# Copyright 2018 The Kubeflow Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# Test before submit:
|
||||
# gcloud builds submit --config=.cloudbuild.yaml --substitutions=COMMIT_SHA="$(git rev-parse HEAD)" --project=ml-pipeline-test
|
||||
|
||||
steps:
|
||||
|
||||
# # Build the Python SDK
|
||||
# - name: 'python:3-alpine'
|
||||
# entrypoint: '/bin/sh'
|
||||
# args: ['-c', 'cd /workspace/sdk/python/; python3 setup.py sdist --format=gztar; cp dist/*.tar.gz /workspace/kfp.tar.gz']
|
||||
# id: 'preparePythonSDK'
|
||||
# waitFor: ["-"]
|
||||
# - name: 'gcr.io/cloud-builders/gsutil'
|
||||
# args: ['cp', '/workspace/kfp.tar.gz', 'gs://$PROJECT_ID/builds/$COMMIT_SHA/kfp.tar.gz']
|
||||
# id: 'copyPythonSDK'
|
||||
# waitFor: ['preparePythonSDK']
|
||||
# - name: 'gcr.io/cloud-builders/gsutil'
|
||||
# args: ['cp', '/workspace/kfp.tar.gz', 'gs://$PROJECT_ID/builds/latest/kfp.tar.gz']
|
||||
# id: 'copyPythonSDKToLatest'
|
||||
# waitFor: ['preparePythonSDK']
|
||||
|
||||
# Build the pipeline system images
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
entrypoint: /bin/bash
|
||||
args:
|
||||
- -ceux
|
||||
- |
|
||||
sed -i -e "s/ARG DATE/ENV DATE \"$(date -u)\"/" /workspace/frontend/Dockerfile
|
||||
docker build -t gcr.io/$PROJECT_ID/frontend:$COMMIT_SHA \
|
||||
--build-arg COMMIT_HASH=$COMMIT_SHA \
|
||||
--build-arg TAG_NAME="$(cat /workspace/VERSION)" \
|
||||
-f /workspace/frontend/Dockerfile \
|
||||
/workspace
|
||||
id: 'buildFrontend'
|
||||
waitFor: ['-']
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
entrypoint: /bin/bash
|
||||
args:
|
||||
- -ceux
|
||||
- |
|
||||
docker build -t gcr.io/$PROJECT_ID/api-server:$COMMIT_SHA \
|
||||
--build-arg COMMIT_SHA=$COMMIT_SHA \
|
||||
--build-arg TAG_NAME="$(cat /workspace/VERSION)" \
|
||||
-f /workspace/backend/Dockerfile /workspace
|
||||
id: 'buildApiServer'
|
||||
waitFor: ['-']
|
||||
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['build', '-t', 'gcr.io/$PROJECT_ID/scheduledworkflow:$COMMIT_SHA', '-f',
|
||||
'/workspace/backend/Dockerfile.scheduledworkflow', '/workspace']
|
||||
id: 'buildScheduledWorkflow'
|
||||
waitFor: ["-"]
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['build', '-t', 'gcr.io/$PROJECT_ID/viewer-crd-controller:$COMMIT_SHA', '-f',
|
||||
'/workspace/backend/Dockerfile.viewercontroller', '/workspace']
|
||||
id: 'buildViewerCrdController'
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['build', '-t', 'gcr.io/$PROJECT_ID/persistenceagent:$COMMIT_SHA', '-f',
|
||||
'/workspace/backend/Dockerfile.persistenceagent', '/workspace']
|
||||
id: 'buildPersistenceAgent'
|
||||
waitFor: ["-"]
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['build', '-t', 'gcr.io/$PROJECT_ID/inverse-proxy-agent:$COMMIT_SHA', '-f',
|
||||
'/workspace/proxy/Dockerfile', '/workspace/proxy']
|
||||
id: 'buildInverseProxyAgent'
|
||||
waitFor: ["-"]
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['build', '-t', 'gcr.io/$PROJECT_ID/visualization-server:$COMMIT_SHA',
|
||||
'--build-arg', 'COMMIT_HASH=$COMMIT_SHA', '-f',
|
||||
'/workspace/backend/Dockerfile.visualization', '/workspace']
|
||||
id: 'buildVisualizationServer'
|
||||
waitFor: ["-"]
|
||||
- id: 'buildMetadataWriter'
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['build', '-t', 'gcr.io/$PROJECT_ID/metadata-writer:$COMMIT_SHA',
|
||||
'--build-arg', 'COMMIT_HASH=$COMMIT_SHA', '-f',
|
||||
'/workspace/backend/metadata_writer/Dockerfile', '/workspace']
|
||||
waitFor: ["-"]
|
||||
- id: 'buildCacheServer'
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['build', '-t', 'gcr.io/$PROJECT_ID/cache-server:$COMMIT_SHA',
|
||||
'--build-arg', 'COMMIT_HASH=$COMMIT_SHA', '-f',
|
||||
'/workspace/backend/Dockerfile.cacheserver', '/workspace']
|
||||
waitFor: ["-"]
|
||||
- id: 'buildCacheDeployer'
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['build', '-t', 'gcr.io/$PROJECT_ID/cache-deployer:$COMMIT_SHA',
|
||||
'--build-arg', 'COMMIT_HASH=$COMMIT_SHA', '-f',
|
||||
'/workspace/backend/src/cache/deployer/Dockerfile', '/workspace']
|
||||
waitFor: ["-"]
|
||||
|
||||
# Build marketplace deployer
|
||||
- id: 'buildMarketplaceDeployer'
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['build', '-t', 'gcr.io/$PROJECT_ID/deployer:$COMMIT_SHA',
|
||||
'--build-arg', 'COMMIT_HASH=$COMMIT_SHA', '-f',
|
||||
'/workspace/manifests/gcp_marketplace/deployer/Dockerfile', '/workspace/manifests/gcp_marketplace']
|
||||
waitFor: ["-"]
|
||||
|
||||
# Build the Kubeflow-based pipeline component images
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['build', '-t', 'gcr.io/$PROJECT_ID/ml-pipeline-kubeflow-deployer:$COMMIT_SHA',
|
||||
'/workspace/components/kubeflow/deployer']
|
||||
id: 'buildDeployer'
|
||||
waitFor: ["-"]
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
entrypoint: '/bin/bash'
|
||||
args: ['-c', 'cd /workspace/components/kubeflow/launcher && ./build_image.sh -p $PROJECT_ID -t $COMMIT_SHA']
|
||||
id: 'buildTFJobLauncher'
|
||||
waitFor: ["-"]
|
||||
- id: 'buildCpuTrainer'
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
entrypoint: '/bin/bash'
|
||||
args: ['-c', 'cd /workspace/components/kubeflow/dnntrainer && ./build_image.sh -p $PROJECT_ID -t $COMMIT_SHA -l ml-pipeline-kubeflow-tf-trainer -b 2.3.0']
|
||||
waitFor: ["-"]
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
entrypoint: '/bin/bash'
|
||||
args: ['-c', 'cd /workspace/components/kubeflow/dnntrainer && ./build_image.sh -p $PROJECT_ID -t $COMMIT_SHA -l ml-pipeline-kubeflow-tf-trainer-gpu -b 2.3.0-gpu']
|
||||
id: 'buildGpuTrainer'
|
||||
waitFor: ["-"]
|
||||
|
||||
# Build the local pipeline component images
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
entrypoint: '/bin/bash'
|
||||
args: ['-c', 'cd /workspace/components/local/confusion_matrix && ./build_image.sh -p $PROJECT_ID -t $COMMIT_SHA']
|
||||
id: 'buildConfusionMatrix'
|
||||
waitFor: ["-"]
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
entrypoint: '/bin/bash'
|
||||
args: ['-c', 'cd /workspace/components/local/roc && ./build_image.sh -p $PROJECT_ID -t $COMMIT_SHA']
|
||||
id: 'buildROC'
|
||||
waitFor: ["-"]
|
||||
|
||||
# Build third_party images
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['build', '-t', 'gcr.io/$PROJECT_ID/metadata-envoy:$COMMIT_SHA', '-f',
|
||||
'/workspace/third_party/metadata_envoy/Dockerfile', '/workspace']
|
||||
id: 'buildMetadataEnvoy'
|
||||
|
||||
# Pull third_party images
|
||||
# ! Sync to the same MLMD version:
|
||||
# * backend/metadata_writer/requirements.in and requirements.txt
|
||||
# * @kubeflow/frontend/src/mlmd/generated
|
||||
# * .cloudbuild.yaml and .release.cloudbuild.yaml
|
||||
# * manifests/kustomize/base/metadata/base/metadata-grpc-deployment.yaml
|
||||
# * test/tag_for_hosted.sh
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['pull', 'gcr.io/tfx-oss-public/ml_metadata_store_server:1.14.0']
|
||||
id: 'pullMetadataServer'
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['pull', 'gcr.io/ml-pipeline/minio:RELEASE.2019-08-14T20-37-41Z-license-compliance']
|
||||
id: 'pullMinio'
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['pull', 'gcr.io/ml-pipeline/mysql:8.0.26']
|
||||
id: 'pullMysql'
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['pull', 'gcr.io/cloudsql-docker/gce-proxy:1.25.0']
|
||||
id: 'pullCloudsqlProxy'
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['pull', 'gcr.io/ml-pipeline/argoexec:v3.4.17-license-compliance']
|
||||
id: 'pullArgoExecutor'
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['pull', 'gcr.io/ml-pipeline/workflow-controller:v3.4.17-license-compliance']
|
||||
id: 'pullArgoWorkflowController'
|
||||
|
||||
# V2 related images
|
||||
# Prerequisite: Make sure ko image is available on the same project by running the following:
|
||||
# git clone https://github.com/GoogleCloudPlatform/cloud-builders-community.git
|
||||
# cd cloud-builders-community/ko
|
||||
# gcloud builds submit . --config=cloudbuild.yaml --project=$PROJECT_ID
|
||||
# Reference: https://dev.to/amammay/effective-go-on-gcp-lean-containers-with-ko-on-cloud-build-51ek
|
||||
|
||||
# Temporarily disable v2 image build due to unblock kubeflow-pipeline-mkp-test
|
||||
# We aren't building v2 images for MKP at this moment anyway.
|
||||
#
|
||||
# - name: 'gcr.io/$PROJECT_ID/ko'
|
||||
# entrypoint: /bin/sh
|
||||
# args:
|
||||
# - -c
|
||||
# - |
|
||||
# cd /workspace/backend/src/v2/
|
||||
# /ko publish --bare ./cmd/launcher-v2 -t $COMMIT_SHA
|
||||
# env:
|
||||
# - 'KO_DOCKER_REPO=gcr.io/$PROJECT_ID/kfp-launcher'
|
||||
# id: 'buildLauncher'
|
||||
# waitFor: ["-"]
|
||||
# - name: 'gcr.io/$PROJECT_ID/ko'
|
||||
# entrypoint: /bin/sh
|
||||
# args:
|
||||
# - -c
|
||||
# - |
|
||||
# cd /workspace/backend/src/v2/
|
||||
# /ko publish --bare ./cmd/driver -t $COMMIT_SHA
|
||||
# env:
|
||||
# - 'KO_DOCKER_REPO=gcr.io/$PROJECT_ID/kfp-driver'
|
||||
# id: 'buildDriver'
|
||||
# waitFor: ["-"]
|
||||
|
||||
# Tag for Hosted - SemVersion to Major.Minor parsing
|
||||
- id: "parseMajorMinorVersion"
|
||||
waitFor: ["-"]
|
||||
name: gcr.io/cloud-builders/docker
|
||||
entrypoint: /bin/bash
|
||||
args:
|
||||
- -ceux
|
||||
- |
|
||||
# Parse major minor version and save to a file for reusing in other steps.
|
||||
# e.g. 1.0.0-rc.1 and 1.0.1 are parsed as 1.0
|
||||
cat /workspace/VERSION | sed -e "s#\([0-9]\+[.][0-9]\+\)[.].*#\1#" > /workspace/mm.ver
|
||||
|
||||
# Tag for Hosted - Tag to hosted folder with MKP friendly name
|
||||
- id: 'tagForHosted'
|
||||
waitFor: ['parseMajorMinorVersion', 'buildFrontend', 'buildApiServer', 'buildScheduledWorkflow',
|
||||
'buildViewerCrdController', 'buildPersistenceAgent', 'buildInverseProxyAgent', 'buildVisualizationServer',
|
||||
'buildMetadataWriter', 'buildCacheServer', 'buildCacheDeployer', 'buildMetadataEnvoy',
|
||||
'buildMarketplaceDeployer', 'pullMetadataServer', 'pullMinio', 'pullMysql', 'pullCloudsqlProxy',
|
||||
'pullArgoExecutor', 'pullArgoWorkflowController']
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
entrypoint: bash
|
||||
args:
|
||||
- -ceux
|
||||
- |
|
||||
/workspace/test/tag_for_hosted.sh $PROJECT_ID $COMMIT_SHA $(cat /workspace/VERSION) $(cat /workspace/mm.ver)
|
||||
|
||||
images:
|
||||
# Images for the pipeline system itself
|
||||
- 'gcr.io/$PROJECT_ID/frontend:$COMMIT_SHA'
|
||||
- 'gcr.io/$PROJECT_ID/api-server:$COMMIT_SHA'
|
||||
- 'gcr.io/$PROJECT_ID/scheduledworkflow:$COMMIT_SHA'
|
||||
- 'gcr.io/$PROJECT_ID/persistenceagent:$COMMIT_SHA'
|
||||
- 'gcr.io/$PROJECT_ID/viewer-crd-controller:$COMMIT_SHA'
|
||||
- 'gcr.io/$PROJECT_ID/inverse-proxy-agent:$COMMIT_SHA'
|
||||
- 'gcr.io/$PROJECT_ID/visualization-server:$COMMIT_SHA'
|
||||
- 'gcr.io/$PROJECT_ID/metadata-writer:$COMMIT_SHA'
|
||||
- 'gcr.io/$PROJECT_ID/cache-server:$COMMIT_SHA'
|
||||
- 'gcr.io/$PROJECT_ID/cache-deployer:$COMMIT_SHA'
|
||||
|
||||
# Images for Marketplace
|
||||
- 'gcr.io/$PROJECT_ID/deployer:$COMMIT_SHA'
|
||||
|
||||
# Images for the Kubeflow-based pipeline components
|
||||
- 'gcr.io/$PROJECT_ID/ml-pipeline-kubeflow-deployer:$COMMIT_SHA'
|
||||
- 'gcr.io/$PROJECT_ID/ml-pipeline-kubeflow-tf-trainer:$COMMIT_SHA'
|
||||
- 'gcr.io/$PROJECT_ID/ml-pipeline-kubeflow-tf-trainer-gpu:$COMMIT_SHA'
|
||||
- 'gcr.io/$PROJECT_ID/ml-pipeline-kubeflow-tfjob:$COMMIT_SHA'
|
||||
|
||||
# Images for the local components
|
||||
- 'gcr.io/$PROJECT_ID/ml-pipeline-local-confusion-matrix:$COMMIT_SHA'
|
||||
- 'gcr.io/$PROJECT_ID/ml-pipeline-local-roc:$COMMIT_SHA'
|
||||
|
||||
# Images for the third_party components
|
||||
- 'gcr.io/$PROJECT_ID/metadata-envoy:$COMMIT_SHA'
|
||||
|
||||
timeout: '3600s'
|
||||
options:
|
||||
diskSizeGb: 300
|
||||
machineType: 'N1_HIGHCPU_8'
|
||||
tags:
|
||||
- build-each-commit
|
|
@ -1,5 +1,7 @@
|
|||
approvers:
|
||||
- hbelmiro
|
||||
- DharmitD
|
||||
- mprahl
|
||||
reviewers:
|
||||
- rimolive
|
||||
- droctothorpe
|
||||
|
|
|
@ -1,6 +1,23 @@
|
|||
name: "Set up KFP on KinD"
|
||||
description: "Step to start and configure KFP on KinD"
|
||||
|
||||
inputs:
|
||||
k8s_version:
|
||||
description: "The Kubernetes version to use for the Kind cluster"
|
||||
required: true
|
||||
pipeline_store:
|
||||
description: "Flag to deploy KFP with K8s Native API"
|
||||
default: 'database'
|
||||
required: false
|
||||
proxy:
|
||||
description: "If KFP should be deployed with proxy configuration"
|
||||
required: false
|
||||
default: false
|
||||
cache_enabled:
|
||||
description: "If KFP should be deployed with cache enabled globally"
|
||||
required: false
|
||||
default: 'true'
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
|
@ -8,14 +25,36 @@ runs:
|
|||
uses: container-tools/kind-action@v2
|
||||
with:
|
||||
cluster_name: kfp
|
||||
kubectl_version: v1.29.2
|
||||
version: v0.22.0
|
||||
node_image: kindest/node:v1.29.2
|
||||
kubectl_version: ${{ inputs.k8s_version }}
|
||||
version: v0.25.0
|
||||
node_image: kindest/node:${{ inputs.k8s_version }}
|
||||
|
||||
- name: Deploy Squid
|
||||
id: deploy-squid
|
||||
if: ${{ inputs.proxy == 'true' }}
|
||||
shell: bash
|
||||
run: ./.github/resources/squid/deploy-squid.sh
|
||||
|
||||
- name: Build images
|
||||
shell: bash
|
||||
run: ./.github/resources/scripts/build-images.sh
|
||||
run: |
|
||||
if [ "${{ inputs.proxy }}" = "true" ]; then
|
||||
./.github/resources/scripts/build-images.sh --proxy
|
||||
else
|
||||
./.github/resources/scripts/build-images.sh
|
||||
fi
|
||||
|
||||
- name: Deploy KFP
|
||||
shell: bash
|
||||
run: ./.github/resources/scripts/deploy-kfp.sh
|
||||
run: |
|
||||
ARGS=""
|
||||
|
||||
if [ "${{ inputs.proxy }}" = "true" ]; then
|
||||
ARGS="${ARGS} --proxy"
|
||||
elif [ "${{inputs.cache_enabled }}" = "false" ]; then
|
||||
ARGS="${ARGS} --cache-disabled"
|
||||
elif [ "${{inputs.pipeline_store }}" = "kubernetes" ]; then
|
||||
ARGS="${ARGS} --deploy-k8s-native"
|
||||
fi
|
||||
|
||||
./.github/resources/scripts/deploy-kfp.sh $ARGS
|
||||
|
|
|
@ -1,21 +0,0 @@
|
|||
name: "Set up KFP Tekton on KinD"
|
||||
description: "Step to start and configure KFP on KinD"
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Create k8s Kind Cluster
|
||||
uses: container-tools/kind-action@v2
|
||||
with:
|
||||
cluster_name: kfp
|
||||
kubectl_version: v1.29.2
|
||||
version: v0.22.0
|
||||
node_image: kindest/node:v1.29.2
|
||||
|
||||
- name: Build images
|
||||
shell: bash
|
||||
run: ./.github/resources/scripts/build-images.sh
|
||||
|
||||
- name: Deploy KFP
|
||||
shell: bash
|
||||
run: ./.github/resources/scripts/deploy-kfp-tekton.sh
|
|
@ -0,0 +1,12 @@
|
|||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: ml-pipeline
|
||||
spec:
|
||||
template:
|
||||
spec:
|
||||
containers:
|
||||
- name: ml-pipeline-api-server
|
||||
env:
|
||||
- name: CACHEENABLED
|
||||
value: "false"
|
|
@ -0,0 +1,11 @@
|
|||
apiVersion: kustomize.config.k8s.io/v1beta1
|
||||
kind: Kustomization
|
||||
|
||||
resources:
|
||||
- ../no-proxy
|
||||
|
||||
patches:
|
||||
- path: cache-env.yaml
|
||||
target:
|
||||
kind: Deployment
|
||||
name: ml-pipeline
|
|
@ -2,18 +2,18 @@ apiVersion: kustomize.config.k8s.io/v1beta1
|
|||
kind: Kustomization
|
||||
|
||||
resources:
|
||||
- ../../../../manifests/kustomize/env/platform-agnostic
|
||||
- ../../../../../../manifests/kustomize/env/cert-manager/platform-agnostic-k8s-native
|
||||
|
||||
images:
|
||||
- name: gcr.io/ml-pipeline/api-server
|
||||
- name: ghcr.io/kubeflow/kfp-api-server
|
||||
newName: kind-registry:5000/apiserver
|
||||
newTag: latest
|
||||
- name: gcr.io/ml-pipeline/persistenceagent
|
||||
- name: ghcr.io/kubeflow/kfp-persistence-agent
|
||||
newName: kind-registry:5000/persistenceagent
|
||||
newTag: latest
|
||||
- name: gcr.io/ml-pipeline/scheduledworkflow
|
||||
- name: ghcr.io/kubeflow/kfp-scheduled-workflow-controller
|
||||
newName: kind-registry:5000/scheduledworkflow
|
||||
newTag: latest
|
||||
|
||||
patchesStrategicMerge:
|
||||
- overlays/apiserver-env.yaml
|
||||
- apiserver-env.yaml
|
|
@ -0,0 +1,16 @@
|
|||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: ml-pipeline
|
||||
spec:
|
||||
template:
|
||||
spec:
|
||||
containers:
|
||||
- name: ml-pipeline-api-server
|
||||
env:
|
||||
- name: V2_DRIVER_IMAGE
|
||||
value: kind-registry:5000/driver
|
||||
- name: V2_LAUNCHER_IMAGE
|
||||
value: kind-registry:5000/launcher
|
||||
- name: LOG_LEVEL
|
||||
value: "debug"
|
|
@ -0,0 +1,20 @@
|
|||
apiVersion: kustomize.config.k8s.io/v1beta1
|
||||
kind: Kustomization
|
||||
|
||||
resources:
|
||||
- ../../../../../../manifests/kustomize/env/platform-agnostic
|
||||
|
||||
images:
|
||||
- name: ghcr.io/kubeflow/kfp-api-server
|
||||
newName: kind-registry:5000/apiserver
|
||||
newTag: latest
|
||||
- name: ghcr.io/kubeflow/kfp-persistence-agent
|
||||
newName: kind-registry:5000/persistenceagent
|
||||
newTag: latest
|
||||
- name: ghcr.io/kubeflow/kfp-scheduled-workflow-controller
|
||||
newName: kind-registry:5000/scheduledworkflow
|
||||
newTag: latest
|
||||
|
||||
patches:
|
||||
- path: apiserver-env.yaml
|
||||
- path: workflow-disable-logs-patch.yaml
|
20
.github/resources/manifests/argo/overlays/no-proxy/workflow-disable-logs-patch.yaml
vendored
Normal file
20
.github/resources/manifests/argo/overlays/no-proxy/workflow-disable-logs-patch.yaml
vendored
Normal file
|
@ -0,0 +1,20 @@
|
|||
apiVersion: v1
|
||||
kind: ConfigMap
|
||||
metadata:
|
||||
name: workflow-controller-configmap
|
||||
data:
|
||||
artifactRepository: |
|
||||
archiveLogs: false
|
||||
s3:
|
||||
endpoint: "minio-service.$(kfp-namespace):9000"
|
||||
bucket: "$(kfp-artifact-bucket-name)"
|
||||
keyFormat: "artifacts/{{workflow.name}}/{{workflow.creationTimestamp.Y}}/{{workflow.creationTimestamp.m}}/{{workflow.creationTimestamp.d}}/{{pod.name}}"
|
||||
insecure: true
|
||||
accessKeySecret:
|
||||
name: mlpipeline-minio-artifact
|
||||
key: accesskey
|
||||
secretKeySecret:
|
||||
name: mlpipeline-minio-artifact
|
||||
key: secretkey
|
||||
executor: |
|
||||
imagePullPolicy: IfNotPresent
|
|
@ -0,0 +1,11 @@
|
|||
apiVersion: kustomize.config.k8s.io/v1beta1
|
||||
kind: Kustomization
|
||||
|
||||
resources:
|
||||
- ../no-proxy
|
||||
|
||||
patches:
|
||||
- path: proxy-env.yaml
|
||||
target:
|
||||
kind: Deployment
|
||||
name: ml-pipeline
|
|
@ -0,0 +1,16 @@
|
|||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: ml-pipeline
|
||||
spec:
|
||||
template:
|
||||
spec:
|
||||
containers:
|
||||
- name: ml-pipeline-api-server
|
||||
env:
|
||||
- name: HTTP_PROXY
|
||||
value: "http://squid.squid.svc.cluster.local:3128"
|
||||
- name: HTTPS_PROXY
|
||||
value: "http://squid.squid.svc.cluster.local:3128"
|
||||
- name: NO_PROXY
|
||||
value: "localhost,127.0.0.1,.svc.cluster.local,kubernetes.default.svc,metadata-grpc-service,0,1,2,3,4,5,6,7,8,9"
|
|
@ -1,110 +0,0 @@
|
|||
apiVersion: kustomize.config.k8s.io/v1beta1
|
||||
kind: Kustomization
|
||||
|
||||
resources:
|
||||
- ../../../../manifests/kustomize/base/installs/generic
|
||||
- ../../../../manifests/kustomize/base/metadata/base
|
||||
- ../../../../manifests/kustomize/third-party/tekton/installs/cluster
|
||||
- ../../../../manifests/kustomize/third-party/tekton-custom-task
|
||||
- ../../../../manifests/kustomize/third-party/minio/base
|
||||
- ../../../../manifests/kustomize/third-party/mysql/base
|
||||
|
||||
# Identifier for application manager to apply ownerReference.
|
||||
# The ownerReference ensures the resources get garbage collected
|
||||
# when application is deleted.
|
||||
|
||||
images:
|
||||
- name: gcr.io/ml-pipeline/api-server
|
||||
newName: kind-registry:5000/apiserver
|
||||
newTag: latest
|
||||
- name: gcr.io/ml-pipeline/persistenceagent
|
||||
newName: kind-registry:5000/persistenceagent
|
||||
newTag: latest
|
||||
- name: gcr.io/ml-pipeline/scheduledworkflow
|
||||
newName: kind-registry:5000/scheduledworkflow
|
||||
newTag: latest
|
||||
- name: '*/aipipeline/tekton-exithandler-controller'
|
||||
newTag: latest
|
||||
- name: '*/aipipeline/tekton-exithandler-webhook'
|
||||
newTag: latest
|
||||
- name: '*/aipipeline/tekton-kfptask-controller'
|
||||
newTag: latest
|
||||
- name: '*/aipipeline/tekton-kfptask-webhook'
|
||||
newTag: latest
|
||||
|
||||
labels:
|
||||
- includeSelectors: true
|
||||
pairs:
|
||||
application-crd-id: kubeflow-pipelines
|
||||
|
||||
patches:
|
||||
- patch: |-
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: ml-pipeline
|
||||
spec:
|
||||
template:
|
||||
spec:
|
||||
containers:
|
||||
- name: ml-pipeline-api-server
|
||||
env:
|
||||
- name: EXECUTIONTYPE
|
||||
value: PipelineRun
|
||||
- patch: |-
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: ml-pipeline-persistenceagent
|
||||
spec:
|
||||
template:
|
||||
spec:
|
||||
containers:
|
||||
- name: ml-pipeline-persistenceagent
|
||||
env:
|
||||
- name: EXECUTIONTYPE
|
||||
value: PipelineRun
|
||||
- patch: |-
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: ml-pipeline-scheduledworkflow
|
||||
spec:
|
||||
template:
|
||||
spec:
|
||||
containers:
|
||||
- name: ml-pipeline-scheduledworkflow
|
||||
env:
|
||||
- name: EXECUTIONTYPE
|
||||
value: PipelineRun
|
||||
- patch: |-
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: ml-pipeline-ui
|
||||
spec:
|
||||
template:
|
||||
spec:
|
||||
containers:
|
||||
- name: ml-pipeline-ui
|
||||
env:
|
||||
- name: POD_LOG_CONTAINER_NAME
|
||||
value: step-user-main
|
||||
- patch: |-
|
||||
apiVersion: v1
|
||||
kind: PersistentVolumeClaim
|
||||
metadata:
|
||||
name: mysql-pv-claim
|
||||
spec:
|
||||
resources:
|
||||
requests:
|
||||
storage: 5Gi
|
||||
- patch: |-
|
||||
apiVersion: v1
|
||||
kind: PersistentVolumeClaim
|
||||
metadata:
|
||||
name: minio-pvc
|
||||
spec:
|
||||
resources:
|
||||
requests:
|
||||
storage: 5Gi
|
|
@ -25,35 +25,35 @@ EXIT_CODE=0
|
|||
|
||||
docker system prune -a -f
|
||||
|
||||
docker build -q -t "${REGISTRY}/apiserver:${TAG}" -f backend/Dockerfile . && docker push "${REGISTRY}/apiserver:${TAG}" || EXIT_CODE=$?
|
||||
docker build --progress=plain -t "${REGISTRY}/apiserver:${TAG}" -f backend/Dockerfile . && docker push "${REGISTRY}/apiserver:${TAG}" || EXIT_CODE=$?
|
||||
if [[ $EXIT_CODE -ne 0 ]]
|
||||
then
|
||||
echo "Failed to build apiserver image."
|
||||
exit $EXIT_CODE
|
||||
fi
|
||||
|
||||
docker build -q -t "${REGISTRY}/persistenceagent:${TAG}" -f backend/Dockerfile.persistenceagent . && docker push "${REGISTRY}/persistenceagent:${TAG}" || EXIT_CODE=$?
|
||||
docker build --progress=plain -t "${REGISTRY}/persistenceagent:${TAG}" -f backend/Dockerfile.persistenceagent . && docker push "${REGISTRY}/persistenceagent:${TAG}" || EXIT_CODE=$?
|
||||
if [[ $EXIT_CODE -ne 0 ]]
|
||||
then
|
||||
echo "Failed to build persistenceagent image."
|
||||
exit $EXIT_CODE
|
||||
fi
|
||||
|
||||
docker build -q -t "${REGISTRY}/scheduledworkflow:${TAG}" -f backend/Dockerfile.scheduledworkflow . && docker push "${REGISTRY}/scheduledworkflow:${TAG}" || EXIT_CODE=$?
|
||||
docker build --progress=plain -t "${REGISTRY}/scheduledworkflow:${TAG}" -f backend/Dockerfile.scheduledworkflow . && docker push "${REGISTRY}/scheduledworkflow:${TAG}" || EXIT_CODE=$?
|
||||
if [[ $EXIT_CODE -ne 0 ]]
|
||||
then
|
||||
echo "Failed to build scheduledworkflow image."
|
||||
exit $EXIT_CODE
|
||||
fi
|
||||
|
||||
docker build -q -t "${REGISTRY}/driver:${TAG}" -f backend/Dockerfile.driver . && docker push "${REGISTRY}/driver:${TAG}" || EXIT_CODE=$?
|
||||
docker build --progress=plain -t "${REGISTRY}/driver:${TAG}" -f backend/Dockerfile.driver . && docker push "${REGISTRY}/driver:${TAG}" || EXIT_CODE=$?
|
||||
if [[ $EXIT_CODE -ne 0 ]]
|
||||
then
|
||||
echo "Failed to build driver image."
|
||||
exit $EXIT_CODE
|
||||
fi
|
||||
|
||||
docker build -q -t "${REGISTRY}/launcher:${TAG}" -f backend/Dockerfile.launcher . && docker push "${REGISTRY}/launcher:${TAG}" || EXIT_CODE=$?
|
||||
docker build --progress=plain -t "${REGISTRY}/launcher:${TAG}" -f backend/Dockerfile.launcher . && docker push "${REGISTRY}/launcher:${TAG}" || EXIT_CODE=$?
|
||||
if [[ $EXIT_CODE -ne 0 ]]
|
||||
then
|
||||
echo "Failed to build launcher image."
|
||||
|
|
|
@ -0,0 +1,65 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
NS=""
|
||||
OUTPUT_FILE="/tmp/tmp.log/tmp_pod_log.txt"
|
||||
|
||||
while [[ "$#" -gt 0 ]]; do
|
||||
case $1 in
|
||||
--ns) NS="$2"; shift ;;
|
||||
--output) OUTPUT_FILE="$2"; shift ;;
|
||||
*) echo "Unknown parameter passed: $1"; exit 1 ;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
|
||||
mkdir -p /tmp/tmp.log
|
||||
|
||||
if [[ -z "$NS" ]]; then
|
||||
echo "Both --ns parameters are required."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
function check_namespace {
|
||||
if ! kubectl get namespace "$1" &>/dev/null; then
|
||||
echo "Namespace '$1' does not exist."
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
function display_pod_info {
|
||||
local NAMESPACE=$1
|
||||
|
||||
kubectl get pods -n "${NAMESPACE}"
|
||||
|
||||
local POD_NAMES
|
||||
|
||||
POD_NAMES=$(kubectl get pods -n "${NAMESPACE}" -o custom-columns=":metadata.name" --no-headers)
|
||||
|
||||
if [[ -z "${POD_NAMES}" ]]; then
|
||||
echo "No pods found in namespace '${NAMESPACE}'." | tee -a "$OUTPUT_FILE"
|
||||
return
|
||||
fi
|
||||
|
||||
echo "Pod Information for Namespace: ${NAMESPACE}" > "$OUTPUT_FILE"
|
||||
|
||||
for POD_NAME in ${POD_NAMES}; do
|
||||
{
|
||||
echo "===== Pod: ${POD_NAME} in ${NAMESPACE} ====="
|
||||
echo "----- EVENTS -----"
|
||||
kubectl describe pod "${POD_NAME}" -n "${NAMESPACE}" | grep -A 100 Events || echo "No events found for pod ${POD_NAME}."
|
||||
|
||||
echo "----- LOGS -----"
|
||||
kubectl logs "${POD_NAME}" -n "${NAMESPACE}" || echo "No logs found for pod ${POD_NAME}."
|
||||
|
||||
echo "==========================="
|
||||
echo ""
|
||||
} | tee -a "$OUTPUT_FILE"
|
||||
done
|
||||
|
||||
echo "Pod information stored in $OUTPUT_FILE"
|
||||
}
|
||||
|
||||
check_namespace "$NS"
|
||||
display_pod_info "$NS"
|
|
@ -1,59 +0,0 @@
|
|||
#!/bin/bash
|
||||
#
|
||||
# Copyright 2024 kubeflow.org
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# Remove the x if you need no print out of each command
|
||||
set -e
|
||||
|
||||
REGISTRY="${REGISTRY:-kind-registry:5000}"
|
||||
EXIT_CODE=0
|
||||
|
||||
C_DIR="${BASH_SOURCE%/*}"
|
||||
if [[ ! -d "$C_DIR" ]]; then C_DIR="$PWD"; fi
|
||||
source "${C_DIR}/helper-functions.sh"
|
||||
|
||||
kubectl apply -k "manifests/kustomize/cluster-scoped-resources/"
|
||||
kubectl wait crd/applications.app.k8s.io --for condition=established --timeout=60s || EXIT_CODE=$?
|
||||
if [[ $EXIT_CODE -ne 0 ]]
|
||||
then
|
||||
echo "Failed to deploy cluster-scoped resources."
|
||||
exit $EXIT_CODE
|
||||
fi
|
||||
|
||||
# Deploy manifest
|
||||
TEST_MANIFESTS=".github/resources/manifests/tekton"
|
||||
kubectl apply -k "${TEST_MANIFESTS}" || EXIT_CODE=$?
|
||||
if [[ $EXIT_CODE -ne 0 ]]
|
||||
then
|
||||
echo "Deploy unsuccessful. Failure applying $KUSTOMIZE_DIR."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check if all pods are running - (10 minutes)
|
||||
wait_for_pods || EXIT_CODE=$?
|
||||
if [[ $EXIT_CODE -ne 0 ]]
|
||||
then
|
||||
echo "Deploy unsuccessful. Not all pods running."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
collect_artifacts kubeflow
|
||||
|
||||
echo "List Tekton control plane: "
|
||||
kubectl get pod -n tekton-pipelines
|
||||
collect_artifacts tekton-pipelines
|
||||
|
||||
echo "Finished kfp-tekton deployment."
|
||||
|
|
@ -24,6 +24,36 @@ C_DIR="${BASH_SOURCE%/*}"
|
|||
if [[ ! -d "$C_DIR" ]]; then C_DIR="$PWD"; fi
|
||||
source "${C_DIR}/helper-functions.sh"
|
||||
|
||||
TEST_MANIFESTS=".github/resources/manifests/argo"
|
||||
PIPELINES_STORE="database"
|
||||
USE_PROXY=false
|
||||
CACHE_DISABLED=false
|
||||
|
||||
# Loop over script arguments passed. This uses a single switch-case
|
||||
# block with default value in case we want to make alternative deployments
|
||||
# in the future.
|
||||
while [ "$#" -gt 0 ]; do
|
||||
case "$1" in
|
||||
--deploy-k8s-native)
|
||||
PIPELINES_STORE="kubernetes"
|
||||
shift
|
||||
;;
|
||||
--proxy)
|
||||
USE_PROXY=true
|
||||
shift
|
||||
;;
|
||||
--cache-disabled)
|
||||
CACHE_DISABLED=true
|
||||
shift
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [ "${USE_PROXY}" == "true" && "${PIPELINES_STORE}" == "kubernetes" ]; then
|
||||
echo "ERROR: Kubernetes Pipeline store cannot be deployed with proxy support."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
kubectl apply -k "manifests/kustomize/cluster-scoped-resources/"
|
||||
kubectl wait crd/applications.app.k8s.io --for condition=established --timeout=60s || EXIT_CODE=$?
|
||||
if [[ $EXIT_CODE -ne 0 ]]
|
||||
|
@ -32,8 +62,30 @@ then
|
|||
exit $EXIT_CODE
|
||||
fi
|
||||
|
||||
# Deploy manifest
|
||||
TEST_MANIFESTS=".github/resources/manifests/argo"
|
||||
# If pipelines store is set to 'kubernetes', cert-manager must be deployed
|
||||
if [ "${PIPELINES_STORE}" == "kubernetes" ]; then
|
||||
#Install cert-manager
|
||||
make -C ./backend install-cert-manager || EXIT_CODE=$?
|
||||
if [[ $EXIT_CODE -ne 0 ]]
|
||||
then
|
||||
echo "Failed to deploy cert-manager."
|
||||
exit $EXIT_CODE
|
||||
fi
|
||||
fi
|
||||
|
||||
# Manifests will be deployed according to the flag provided
|
||||
if $CACHE_DISABLED; then
|
||||
TEST_MANIFESTS="${TEST_MANIFESTS}/overlays/cache-disabled"
|
||||
elif $USE_PROXY; then
|
||||
TEST_MANIFESTS="${TEST_MANIFESTS}/overlays/proxy"
|
||||
elif [ "${PIPELINES_STORE}" == "kubernetes" ]; then
|
||||
TEST_MANIFESTS="${TEST_MANIFESTS}/overlays/kubernetes-native"
|
||||
else
|
||||
TEST_MANIFESTS="${TEST_MANIFESTS}/overlays/no-proxy"
|
||||
fi
|
||||
|
||||
echo "Deploying ${TEST_MANIFESTS}..."
|
||||
|
||||
kubectl apply -k "${TEST_MANIFESTS}" || EXIT_CODE=$?
|
||||
if [[ $EXIT_CODE -ne 0 ]]
|
||||
then
|
||||
|
@ -52,4 +104,3 @@ fi
|
|||
collect_artifacts kubeflow
|
||||
|
||||
echo "Finished KFP deployment."
|
||||
|
||||
|
|
|
@ -1,47 +0,0 @@
|
|||
#!/bin/bash
|
||||
#
|
||||
# Copyright 2023 kubeflow.org
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# Remove the x if you need no print out of each command
|
||||
set -e
|
||||
|
||||
# Need the following env
|
||||
# - KUBEFLOW_NS: kubeflow namespace
|
||||
|
||||
KUBEFLOW_NS="${KUBEFLOW_NS:-kubeflow}"
|
||||
TEST_SCRIPT="${TEST_SCRIPT:="test-flip-coin.sh"}"
|
||||
|
||||
C_DIR="${BASH_SOURCE%/*}"
|
||||
if [[ ! -d "$C_DIR" ]]; then C_DIR="$PWD"; fi
|
||||
source "${C_DIR}/helper-functions.sh"
|
||||
|
||||
POD_NAME=$(kubectl get pod -n kubeflow -l app=ml-pipeline -o json | jq -r '.items[] | .metadata.name ')
|
||||
kubectl port-forward -n "$KUBEFLOW_NS" "$POD_NAME" 8888:8888 2>&1 > /dev/null &
|
||||
# wait for the port-forward
|
||||
sleep 5
|
||||
|
||||
if [ -n "$TEST_SCRIPT" ]; then
|
||||
source "${C_DIR}/${TEST_SCRIPT}"
|
||||
fi
|
||||
|
||||
kill %1
|
||||
|
||||
if [[ "$RESULT" -ne 0 ]]; then
|
||||
echo "e2e test ${STATUS_MSG}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "e2e test ${STATUS_MSG}"
|
||||
|
|
@ -3,6 +3,7 @@ import time
|
|||
import urllib3
|
||||
import sys
|
||||
from kubernetes import client, config
|
||||
import subprocess
|
||||
|
||||
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||
|
||||
|
@ -13,6 +14,17 @@ namespace = 'kubeflow'
|
|||
config.load_kube_config()
|
||||
v1 = client.CoreV1Api()
|
||||
|
||||
def log_pods():
|
||||
pods = v1.list_namespaced_pod(namespace=namespace)
|
||||
|
||||
for pod in pods.items:
|
||||
try:
|
||||
logging.info(
|
||||
f"---- Pod {namespace}/{pod.metadata.name} logs ----\n"
|
||||
+ v1.read_namespaced_pod_log(pod.metadata.name, namespace)
|
||||
)
|
||||
except client.exceptions.ApiException:
|
||||
continue
|
||||
|
||||
def get_pod_statuses():
|
||||
pods = v1.list_namespaced_pod(namespace=namespace)
|
||||
|
@ -42,6 +54,21 @@ def all_pods_ready(statuses):
|
|||
for pod_status, ready, total, _ in statuses.values())
|
||||
|
||||
|
||||
def print_get_pods():
|
||||
try:
|
||||
result = subprocess.run(
|
||||
['kubectl', 'get', 'pods', '-n', 'kubeflow'],
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
text=True,
|
||||
check=True
|
||||
|
||||
)
|
||||
return result.stdout
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(f"An error occurred while running kubectl get pods: {e.stderr}")
|
||||
|
||||
|
||||
def check_pods(calm_time=10, timeout=600, retries_after_ready=5):
|
||||
start_time = time.time()
|
||||
stable_count = 0
|
||||
|
@ -71,9 +98,12 @@ def check_pods(calm_time=10, timeout=600, retries_after_ready=5):
|
|||
stable_count = 0
|
||||
|
||||
previous_statuses = current_statuses
|
||||
logging.info(f"Pods are still stabilizing. Retrying in {calm_time} seconds...")
|
||||
pods = print_get_pods()
|
||||
logging.info(f"Pods are still stabilizing. Retrying in {calm_time} seconds...\n{pods}")
|
||||
time.sleep(calm_time)
|
||||
else:
|
||||
log_pods()
|
||||
|
||||
raise Exception("Pods did not stabilize within the timeout period.")
|
||||
|
||||
logging.info("Final pod statuses:")
|
||||
|
|
|
@ -1,37 +0,0 @@
|
|||
#!/bin/bash
|
||||
#
|
||||
# Copyright 2023 kubeflow.org
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# source: https://raw.githubusercontent.com/open-toolchain/commons/master/scripts/check_registry.sh
|
||||
|
||||
# Remove the x if you need no print out of each command
|
||||
set -e
|
||||
|
||||
REGISTRY1="${REGISTRY1:-docker.io/aipipeline}"
|
||||
REGISTRY2="${REGISTRY2:-gcr.io/ml-pipeline}"
|
||||
TAG1="${TAG1:-latest}"
|
||||
TAG2="${TAG2:-latest}"
|
||||
|
||||
docker system prune -a -f
|
||||
|
||||
declare -a IMAGES=(apiserver persistenceagent scheduledworkflow tekton-driver)
|
||||
|
||||
for IMAGE in "${IMAGES[@]}"; do
|
||||
docker pull "${REGISTRY1}/${IMAGE}:${TAG1}"
|
||||
docker tag "${REGISTRY1}/${IMAGE}:${TAG1}" "${REGISTRY2}/${IMAGE}:${TAG2}"
|
||||
docker push "${REGISTRY2}/${IMAGE}:${TAG2}"
|
||||
done
|
||||
|
||||
# clean up intermittent build caches to free up disk space
|
||||
docker system prune -a -f
|
|
@ -1,27 +0,0 @@
|
|||
#!/bin/bash
|
||||
#
|
||||
# Copyright 2023 kubeflow.org
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
C_DIR="${BASH_SOURCE%/*}"
|
||||
if [[ ! -d "$C_DIR" ]]; then C_DIR="$PWD"; fi
|
||||
source "${C_DIR}/test-pipeline.sh"
|
||||
|
||||
RESULT=0
|
||||
run_test_case "loop_output" "samples/core/loop_output/loop_output.py" "SUCCEEDED" 20 || RESULT=$?
|
||||
|
||||
STATUS_MSG=PASSED
|
||||
if [[ "$RESULT" -ne 0 ]]; then
|
||||
STATUS_MSG=FAILED
|
||||
fi
|
|
@ -1,33 +0,0 @@
|
|||
#!/bin/bash
|
||||
#
|
||||
# Copyright 2023 kubeflow.org
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
KUBEFLOW_NS="${KUBEFLOW_NS:-kubeflow}"
|
||||
|
||||
C_DIR="${BASH_SOURCE%/*}"
|
||||
if [[ ! -d "$C_DIR" ]]; then C_DIR="$PWD"; fi
|
||||
source "${C_DIR}/test-pipeline.sh"
|
||||
|
||||
# need kfp-kubernetes for this test case
|
||||
# unfortunately, we can't install it from kubernetes_platform/python
|
||||
pip install kfp-kubernetes
|
||||
|
||||
RESULT=0
|
||||
run_test_case "use-env" "samples/v2/pipeline_with_env.py" "SUCCEEDED" 5 || RESULT=$?
|
||||
|
||||
STATUS_MSG=PASSED
|
||||
if [[ "$RESULT" -ne 0 ]]; then
|
||||
STATUS_MSG=FAILED
|
||||
fi
|
|
@ -1,27 +0,0 @@
|
|||
#!/bin/bash
|
||||
#
|
||||
# Copyright 2023 kubeflow.org
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
C_DIR="${BASH_SOURCE%/*}"
|
||||
if [[ ! -d "$C_DIR" ]]; then C_DIR="$PWD"; fi
|
||||
source "${C_DIR}/test-pipeline.sh"
|
||||
|
||||
RESULT=0
|
||||
run_test_case "flip-coin" "samples/core/condition/condition_v2.py" "SUCCEEDED" 20 || RESULT=$?
|
||||
|
||||
STATUS_MSG=PASSED
|
||||
if [[ "$RESULT" -ne 0 ]]; then
|
||||
STATUS_MSG=FAILED
|
||||
fi
|
|
@ -1,132 +0,0 @@
|
|||
#!/bin/bash
|
||||
#
|
||||
# Copyright 2023 kubeflow.org
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
retry() {
|
||||
local max=$1; shift
|
||||
local interval=$1; shift
|
||||
|
||||
until "$@"; do
|
||||
echo "trying.."
|
||||
max=$((max-1))
|
||||
if [[ "$max" -eq 0 ]]; then
|
||||
return 1
|
||||
fi
|
||||
sleep "$interval"
|
||||
done
|
||||
}
|
||||
|
||||
collect_pipeline_artifacts() {
|
||||
pipeline_uid=$1
|
||||
kubeflow_ns=kubeflow
|
||||
|
||||
local log_dir=$(mktemp -d)
|
||||
|
||||
#pods_kubeflow=$(kubectl get pods -n $kubeflow_ns --no-headers -o custom-columns=NAME:.metadata.name -l pipeline/runid=$pipeline_uid)
|
||||
pods_kubeflow=$(kubectl get pods -n $kubeflow_ns --no-headers -o custom-columns=NAME:.metadata.name)
|
||||
|
||||
echo "Collecting pod names for run $pipeline_uid"
|
||||
echo $pods_kubeflow > $log_dir/pods.log
|
||||
|
||||
echo "Collecting pod logs for run $pipeline_uid"
|
||||
for pod in $pods_kubeflow; do
|
||||
kubectl logs -n $kubeflow_ns $pod --all-containers=true > $log_dir/$pod.log
|
||||
done
|
||||
|
||||
echo "Collecting events for run $pipeline_uid"
|
||||
kubectl get events -n $kubeflow_ns > $log_dir/events.log
|
||||
|
||||
echo "Collection Tekton pipeline runs for run $pipeline_uid"
|
||||
kubectl get pipelineruns -n $kubeflow_ns -o yaml > $log_dir/pipelineruns.log
|
||||
|
||||
echo "Collection Tekton task runs for run $pipeline_uid"
|
||||
kubectl get taskruns -n $kubeflow_ns -o yaml > $log_dir/taskruns.log
|
||||
|
||||
echo "Collection Tekton kfptask runs for run $pipeline_uid"
|
||||
kubectl get kfptasks -n $kubeflow_ns -o yaml > $log_dir/kfptasks.log
|
||||
|
||||
echo "Collection Tekton custom runs for run $pipeline_uid"
|
||||
kubectl get customruns -n $kubeflow_ns -o yaml > $log_dir/customruns.log
|
||||
}
|
||||
|
||||
# compile the python to a pipeline yaml, upload the pipeline, create a run,
|
||||
# and wait until the run finishes.
|
||||
run_test_case() {
|
||||
if [[ $# -ne 4 ]]
|
||||
then
|
||||
echo "Usage: run_test_case test-case-name python-file condition-string wait-time"
|
||||
return 1
|
||||
fi
|
||||
local REV=1
|
||||
local TEST_CASE=$1
|
||||
shift
|
||||
local PY_FILE=$1
|
||||
shift
|
||||
local F_STATUS=$1
|
||||
shift
|
||||
local DURATION=$1
|
||||
shift
|
||||
local PIPELINE_ID
|
||||
local RUN_ID
|
||||
local KFP_COMMAND="kfp"
|
||||
local PIPELINE_NAME="${TEST_CASE}-$((RANDOM%10000+1))"
|
||||
local YAML_FILE=$(echo "${PY_FILE}" | sed "s/\.py$/\.yaml/")
|
||||
|
||||
echo " ===== ${TEST_CASE} ====="
|
||||
$KFP_COMMAND dsl compile --py "${PY_FILE}" --output "${YAML_FILE}"
|
||||
retry 3 3 $KFP_COMMAND --endpoint http://localhost:8888 pipeline create -p "$PIPELINE_NAME" "${YAML_FILE}" 2>&1 || :
|
||||
PIPELINE_ID=$($KFP_COMMAND --endpoint http://localhost:8888 pipeline list 2>&1| grep "$PIPELINE_NAME" | awk '{print $1}')
|
||||
if [[ -z "$PIPELINE_ID" ]]; then
|
||||
echo "Failed to upload pipeline"
|
||||
return "$REV"
|
||||
fi
|
||||
VERSION_ID=$($KFP_COMMAND --endpoint http://localhost:8888 pipeline list-versions "${PIPELINE_ID}" 2>&1| grep "$PIPELINE_NAME" | awk '{print $1}')
|
||||
|
||||
local RUN_NAME="${PIPELINE_NAME}-run"
|
||||
retry 3 3 $KFP_COMMAND --endpoint http://localhost:8888 run create -e "exp-${TEST_CASE}" -r "$RUN_NAME" -p "$PIPELINE_ID" -v "$VERSION_ID" 2>&1 || :
|
||||
RUN_ID=$($KFP_COMMAND --endpoint http://localhost:8888 run list 2>&1| grep "$RUN_NAME" | awk '{print $1}')
|
||||
if [[ -z "$RUN_ID" ]]; then
|
||||
echo "Failed to submit a run for ${TEST_CASE} pipeline"
|
||||
return "$REV"
|
||||
fi
|
||||
|
||||
local RUN_STATUS
|
||||
ENDTIME=$(date -ud "$DURATION minute" +%s)
|
||||
while [[ "$(date -u +%s)" -le "$ENDTIME" ]]; do
|
||||
RUN_STATUS=$($KFP_COMMAND --endpoint http://localhost:8888 run list 2>&1| grep "$RUN_NAME" | awk '{print $4}')
|
||||
if [[ "$RUN_STATUS" == "$F_STATUS" ]]; then
|
||||
REV=0
|
||||
break;
|
||||
fi
|
||||
echo " Status of ${TEST_CASE} run: $RUN_STATUS"
|
||||
if [[ "$RUN_STATUS" == "FAILED" ]]; then
|
||||
REV=1
|
||||
break;
|
||||
fi
|
||||
sleep 10
|
||||
done
|
||||
|
||||
if [[ "$REV" -eq 0 ]]; then
|
||||
echo " ===== ${TEST_CASE} PASSED ====="
|
||||
else
|
||||
echo " ===== ${TEST_CASE} FAILED ====="
|
||||
fi
|
||||
|
||||
collect_pipeline_artifacts $RUN_ID
|
||||
|
||||
echo 'y' | $KFP_COMMAND --endpoint http://localhost:8888 run delete "$RUN_ID" || :
|
||||
|
||||
return "$REV"
|
||||
}
|
|
@ -1,39 +0,0 @@
|
|||
#!/bin/bash
|
||||
#
|
||||
# Copyright 2023 kubeflow.org
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
KUBEFLOW_NS="${KUBEFLOW_NS:-kubeflow}"
|
||||
|
||||
C_DIR="${BASH_SOURCE%/*}"
|
||||
if [[ ! -d "$C_DIR" ]]; then C_DIR="$PWD"; fi
|
||||
source "${C_DIR}/test-pipeline.sh"
|
||||
|
||||
# need kfp-kubernetes for this test case
|
||||
# unfortunately, we can't install it from kubernetes_platform/python
|
||||
pip install kfp-kubernetes
|
||||
|
||||
# create the secret
|
||||
kubectl create secret -n "$KUBEFLOW_NS" generic "user-gcp-sa" --from-literal="type=service_account" || true
|
||||
|
||||
RESULT=0
|
||||
run_test_case "secret-env" "samples/v2/pipeline_with_secret_as_env.py" "SUCCEEDED" 5 || RESULT=$?
|
||||
|
||||
# remove secret after the test finishes
|
||||
kubectl delete secret -n "$KUBEFLOW_NS" "user-gcp-sa"
|
||||
|
||||
STATUS_MSG=PASSED
|
||||
if [[ "$RESULT" -ne 0 ]]; then
|
||||
STATUS_MSG=FAILED
|
||||
fi
|
|
@ -1,39 +0,0 @@
|
|||
#!/bin/bash
|
||||
#
|
||||
# Copyright 2023 kubeflow.org
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
KUBEFLOW_NS="${KUBEFLOW_NS:-kubeflow}"
|
||||
|
||||
C_DIR="${BASH_SOURCE%/*}"
|
||||
if [[ ! -d "$C_DIR" ]]; then C_DIR="$PWD"; fi
|
||||
source "${C_DIR}/test-pipeline.sh"
|
||||
|
||||
# need kfp-kubernetes for this test case
|
||||
# unfortunately, we can't install it from kubernetes_platform/python
|
||||
pip install kfp-kubernetes
|
||||
|
||||
# create the secret
|
||||
kubectl create secret -n "$KUBEFLOW_NS" generic "user-gcp-sa" --from-literal="type=service_account" || true
|
||||
|
||||
RESULT=0
|
||||
run_test_case "secret-volume" "samples/v2/pipeline_with_secret_as_volume.py" "SUCCEEDED" 5 || RESULT=$?
|
||||
|
||||
# remove secret after the test finishes
|
||||
kubectl delete secret -n "$KUBEFLOW_NS" "user-gcp-sa"
|
||||
|
||||
STATUS_MSG=PASSED
|
||||
if [[ "$RESULT" -ne 0 ]]; then
|
||||
STATUS_MSG=FAILED
|
||||
fi
|
|
@ -1,27 +0,0 @@
|
|||
#!/bin/bash
|
||||
#
|
||||
# Copyright 2023 kubeflow.org
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
C_DIR="${BASH_SOURCE%/*}"
|
||||
if [[ ! -d "$C_DIR" ]]; then C_DIR="$PWD"; fi
|
||||
source "${C_DIR}/test-pipeline.sh"
|
||||
|
||||
RESULT=0
|
||||
run_test_case "static-loop" "samples/core/loop_static/loop_static.py" "SUCCEEDED" 20 || RESULT=$?
|
||||
|
||||
STATUS_MSG=PASSED
|
||||
if [[ "$RESULT" -ne 0 ]]; then
|
||||
STATUS_MSG=FAILED
|
||||
fi
|
|
@ -1,33 +0,0 @@
|
|||
#!/bin/bash
|
||||
#
|
||||
# Copyright 2023 kubeflow.org
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
KUBEFLOW_NS="${KUBEFLOW_NS:-kubeflow}"
|
||||
|
||||
C_DIR="${BASH_SOURCE%/*}"
|
||||
if [[ ! -d "$C_DIR" ]]; then C_DIR="$PWD"; fi
|
||||
source "${C_DIR}/test-pipeline.sh"
|
||||
|
||||
# need kfp-kubernetes for this test case
|
||||
# unfortunately, we can't install it from kubernetes_platform/python
|
||||
pip install kfp-kubernetes
|
||||
|
||||
RESULT=0
|
||||
run_test_case "use-volume" "samples/v2/pipeline_with_volume.py" "SUCCEEDED" 10 || RESULT=$?
|
||||
|
||||
STATUS_MSG=PASSED
|
||||
if [[ "$RESULT" -ne 0 ]]; then
|
||||
STATUS_MSG=FAILED
|
||||
fi
|
|
@ -0,0 +1,10 @@
|
|||
FROM quay.io/fedora/fedora:41
|
||||
|
||||
RUN dnf install -y squid && \
|
||||
dnf clean all
|
||||
|
||||
COPY squid.conf /etc/squid/squid.conf
|
||||
|
||||
EXPOSE 3128
|
||||
|
||||
CMD ["squid", "-N", "-d", "1"]
|
|
@ -0,0 +1,16 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
C_DIR="${BASH_SOURCE%/*}"
|
||||
NAMESPACE="squid"
|
||||
|
||||
docker build --progress=plain -t "registry.domain.local/squid:test" -f ${C_DIR}/Containerfile ${C_DIR}
|
||||
kind --name kfp load docker-image registry.domain.local/squid:test
|
||||
|
||||
kubectl apply -k ${C_DIR}/manifests
|
||||
|
||||
if ! kubectl -n ${NAMESPACE} wait --for=condition=available deployment/squid --timeout=60s; then
|
||||
echo "Timeout occurred while waiting for the Squid deployment."
|
||||
exit 1
|
||||
fi
|
|
@ -0,0 +1,30 @@
|
|||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: squid
|
||||
namespace: squid
|
||||
spec:
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app: squid
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: squid
|
||||
spec:
|
||||
containers:
|
||||
- name: squid
|
||||
image: registry.domain.local/squid:test
|
||||
ports:
|
||||
- containerPort: 3128
|
||||
volumeMounts:
|
||||
- name: squid-cache
|
||||
mountPath: /var/cache/squid
|
||||
- name: squid-log
|
||||
mountPath: /var/log/squid
|
||||
volumes:
|
||||
- name: squid-cache
|
||||
emptyDir: { }
|
||||
- name: squid-log
|
||||
emptyDir: { }
|
|
@ -0,0 +1,4 @@
|
|||
resources:
|
||||
- deployment.yaml
|
||||
- service.yaml
|
||||
- namespace.yaml
|
|
@ -0,0 +1,4 @@
|
|||
apiVersion: v1
|
||||
kind: Namespace
|
||||
metadata:
|
||||
name: squid
|
|
@ -0,0 +1,12 @@
|
|||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: squid
|
||||
namespace: squid
|
||||
spec:
|
||||
selector:
|
||||
app: squid
|
||||
ports:
|
||||
- protocol: TCP
|
||||
port: 3128
|
||||
targetPort: 3128
|
|
@ -0,0 +1,8 @@
|
|||
# Define an access control list (ACL) for all source IP addresses
|
||||
acl all src all
|
||||
|
||||
# Allow HTTP access from all sources
|
||||
http_access allow all
|
||||
|
||||
# Define the port Squid will listen on
|
||||
http_port 3128
|
|
@ -9,6 +9,8 @@ on:
|
|||
- '.github/workflows/backend-visualization.yml'
|
||||
- 'backend/src/apiserver/visualization/**'
|
||||
- 'test/presubmit-backend-visualization.sh'
|
||||
- '!**/*.md'
|
||||
- '!**/OWNERS'
|
||||
|
||||
jobs:
|
||||
backend-visualization-test:
|
||||
|
|
|
@ -1,73 +0,0 @@
|
|||
name: KFP Tekton backend unit tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [master]
|
||||
|
||||
# Run tests for any PRs which change the backend code
|
||||
pull_request:
|
||||
paths:
|
||||
- '.github/workflows/backend.yml'
|
||||
- 'go.mod'
|
||||
- 'backend/**'
|
||||
- '.github/resources/**'
|
||||
- 'manifests/kustomize/**'
|
||||
|
||||
env:
|
||||
GITHUB_ACTION: "true"
|
||||
SETUPTOOLS_USE_DISTUTILS: "stdlib"
|
||||
|
||||
jobs:
|
||||
run-go-unittests:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Install Go
|
||||
uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version: 1.21.x
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
- name: "run go unit tests"
|
||||
run: go test -v -cover ./backend/...
|
||||
backend-integration:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
- name: Set up Python 3.9
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.9'
|
||||
- name: Install sdk
|
||||
run: |
|
||||
python3 -m venv .venv
|
||||
. .venv/bin/activate
|
||||
pip install -e sdk/python
|
||||
- name: Create KFP cluster
|
||||
uses: ./.github/actions/kfp-tekton-cluster
|
||||
- name: "flip coin test"
|
||||
run: |
|
||||
. .venv/bin/activate
|
||||
TEST_SCRIPT="test-flip-coin.sh" ./.github/resources/scripts/e2e-test.sh
|
||||
- name: "static loop test"
|
||||
run: |
|
||||
. .venv/bin/activate
|
||||
TEST_SCRIPT="test-static-loop.sh" ./.github/resources/scripts/e2e-test.sh
|
||||
- name: "dynamic loop test"
|
||||
run: |
|
||||
. .venv/bin/activate
|
||||
TEST_SCRIPT="test-dynamic-loop.sh" ./.github/resources/scripts/e2e-test.sh
|
||||
- name: "use env"
|
||||
run: |
|
||||
. .venv/bin/activate
|
||||
TEST_SCRIPT="test-env.sh" ./.github/resources/scripts/e2e-test.sh
|
||||
- name: "use volume"
|
||||
run: |
|
||||
. .venv/bin/activate
|
||||
TEST_SCRIPT="test-volume.sh" ./.github/resources/scripts/e2e-test.sh
|
||||
- name: Collect test results
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: kfp-tekton-backend-artifacts
|
||||
path: /tmp/tmp.*/*
|
|
@ -46,7 +46,7 @@ jobs:
|
|||
mkdir -p ./pr
|
||||
echo ${{ github.event.pull_request.number }} >> ./pr/pr_number
|
||||
echo ${{ github.event.action }} >> ./pr/event_action
|
||||
- uses: actions/upload-artifact@v3.1.0
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: pr
|
||||
path: pr/
|
||||
|
|
|
@ -15,10 +15,16 @@ on:
|
|||
- 'proxy/**'
|
||||
- 'manifests/kustomize/**'
|
||||
- 'test/**'
|
||||
- '!**/*.md'
|
||||
- '!**/OWNERS'
|
||||
|
||||
jobs:
|
||||
initialization-tests-v1:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
k8s_version: [ "v1.29.2", "v1.31.0" ]
|
||||
name: Initialization tests v1 - K8s ${{ matrix.k8s_version }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
@ -29,24 +35,46 @@ jobs:
|
|||
python-version: 3.9
|
||||
|
||||
- name: Create KFP cluster
|
||||
id: create-kfp-cluster
|
||||
uses: ./.github/actions/kfp-cluster
|
||||
with:
|
||||
k8s_version: ${{ matrix.k8s_version }}
|
||||
continue-on-error: true
|
||||
|
||||
- name: Forward API port
|
||||
id: forward-api-port
|
||||
if: ${{ steps.create-kfp-cluster.outcome == 'success' }}
|
||||
run: ./.github/resources/scripts/forward-port.sh "kubeflow" "ml-pipeline" 8888 8888
|
||||
continue-on-error: true
|
||||
|
||||
- name: Initialization tests v1
|
||||
id: tests
|
||||
if: ${{ steps.forward-api-port.outcome == 'success' }}
|
||||
working-directory: ./backend/test/initialization
|
||||
run: go test -v ./... -namespace kubeflow -args -runIntegrationTests=true
|
||||
env:
|
||||
PULL_NUMBER: ${{ github.event.pull_request.number }}
|
||||
continue-on-error: true
|
||||
|
||||
- name: Collect failed logs
|
||||
if: ${{ steps.create-kfp-cluster.outcome != 'success' || steps.forward-api-port.outcome != 'success' || steps.tests.outcome != 'success' }}
|
||||
run: |
|
||||
./.github/resources/scripts/collect-logs.sh --ns kubeflow --output /tmp/tmp_pod_log.txt
|
||||
exit 1
|
||||
|
||||
- name: Collect test results
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: kfp-initialization-tests-v1-artifacts
|
||||
path: /tmp/tmp.*/*
|
||||
name: kfp-initialization-tests-v1-artifacts-k8s-${{ matrix.k8s_version }}
|
||||
path: /tmp/tmp*/*
|
||||
|
||||
initialization-tests-v2:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
k8s_version: [ "v1.29.2", "v1.31.0" ]
|
||||
name: Initialization tests v2 - K8s ${{ matrix.k8s_version }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
@ -57,24 +85,46 @@ jobs:
|
|||
python-version: 3.9
|
||||
|
||||
- name: Create KFP cluster
|
||||
id: create-kfp-cluster
|
||||
uses: ./.github/actions/kfp-cluster
|
||||
with:
|
||||
k8s_version: ${{ matrix.k8s_version }}
|
||||
continue-on-error: true
|
||||
|
||||
- name: Forward API port
|
||||
id: forward-api-port
|
||||
if: ${{ steps.create-kfp-cluster.outcome == 'success' }}
|
||||
run: ./.github/resources/scripts/forward-port.sh "kubeflow" "ml-pipeline" 8888 8888
|
||||
continue-on-error: true
|
||||
|
||||
- name: Initialization tests v2
|
||||
id: tests
|
||||
if: ${{ steps.forward-api-port.outcome == 'success' }}
|
||||
working-directory: ./backend/test/v2/initialization
|
||||
run: go test -v ./... -namespace kubeflow -args -runIntegrationTests=true
|
||||
env:
|
||||
PULL_NUMBER: ${{ github.event.pull_request.number }}
|
||||
continue-on-error: true
|
||||
|
||||
- name: Collect failed logs
|
||||
if: ${{ steps.create-kfp-cluster.outcome != 'success' || steps.forward-api-port.outcome != 'success' || steps.tests.outcome != 'success' }}
|
||||
run: |
|
||||
./.github/resources/scripts/collect-logs.sh --ns kubeflow --output /tmp/tmp_pod_log.txt
|
||||
exit 1
|
||||
|
||||
- name: Collect test results
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: kfp-initialization-tests-v2-artifacts
|
||||
path: /tmp/tmp.*/*
|
||||
name: kfp-initialization-tests-v2-artifacts-k8s-${{ matrix.k8s_version }}
|
||||
path: /tmp/tmp*/*
|
||||
|
||||
api-integration-tests-v1:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
k8s_version: [ "v1.29.2", "v1.31.0" ]
|
||||
name: API integration tests v1 - K8s ${{ matrix.k8s_version }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
@ -85,24 +135,53 @@ jobs:
|
|||
python-version: 3.9
|
||||
|
||||
- name: Create KFP cluster
|
||||
id: create-kfp-cluster
|
||||
uses: ./.github/actions/kfp-cluster
|
||||
with:
|
||||
k8s_version: ${{ matrix.k8s_version }}
|
||||
continue-on-error: true
|
||||
|
||||
- name: Forward API port
|
||||
id: forward-api-port
|
||||
if: ${{ steps.create-kfp-cluster.outcome == 'success' }}
|
||||
run: ./.github/resources/scripts/forward-port.sh "kubeflow" "ml-pipeline" 8888 8888
|
||||
continue-on-error: true
|
||||
|
||||
- name: Forward MySQL port
|
||||
id: forward-mysql-port
|
||||
if: ${{ steps.forward-api-port.outcome == 'success' }}
|
||||
run: ./.github/resources/scripts/forward-port.sh "kubeflow" "mysql" 3306 3306
|
||||
continue-on-error: true
|
||||
|
||||
- name: API integration tests v1
|
||||
id: tests
|
||||
if: ${{ steps.forward-mysql-port.outcome == 'success' }}
|
||||
working-directory: ./backend/test/integration
|
||||
run: go test -v ./... -namespace ${NAMESPACE} -args -runIntegrationTests=true
|
||||
run: go test -v ./... -namespace kubeflow -args -runIntegrationTests=true
|
||||
env:
|
||||
PULL_NUMBER: ${{ github.event.pull_request.number }}
|
||||
continue-on-error: true
|
||||
|
||||
- name: Collect failed logs
|
||||
if: ${{ steps.create-kfp-cluster.outcome != 'success' || steps.forward-api-port.outcome != 'success' || steps.forward-mysql-port.outcome != 'success' || steps.tests.outcome != 'success' }}
|
||||
run: |
|
||||
./.github/resources/scripts/collect-logs.sh --ns kubeflow --output /tmp/tmp_pod_log.txt
|
||||
exit 1
|
||||
|
||||
- name: Collect test results
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: kfp-api-integration-tests-v1-artifacts
|
||||
path: /tmp/tmp.*/*
|
||||
name: kfp-api-integration-tests-v1-artifacts-k8s-${{ matrix.k8s_version }}
|
||||
path: /tmp/tmp*/*
|
||||
|
||||
api-integration-tests-v2:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
pipeline_store: [ "database", "kubernetes" ]
|
||||
k8s_version: [ "v1.29.2", "v1.31.0" ]
|
||||
name: API integration tests v2 - K8s with ${{ matrix.pipeline_store }} ${{ matrix.k8s_version }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
@ -113,24 +192,54 @@ jobs:
|
|||
python-version: 3.9
|
||||
|
||||
- name: Create KFP cluster
|
||||
id: create-kfp-cluster
|
||||
uses: ./.github/actions/kfp-cluster
|
||||
with:
|
||||
k8s_version: ${{ matrix.k8s_version }}
|
||||
pipeline_store: ${{ matrix.pipeline_store }}
|
||||
continue-on-error: true
|
||||
|
||||
- name: Forward API port
|
||||
id: forward-api-port
|
||||
if: ${{ steps.create-kfp-cluster.outcome == 'success' }}
|
||||
run: ./.github/resources/scripts/forward-port.sh "kubeflow" "ml-pipeline" 8888 8888
|
||||
continue-on-error: true
|
||||
|
||||
- name: Forward MLMD port
|
||||
id: forward-mlmd-port
|
||||
if: ${{ steps.forward-api-port.outcome == 'success' }}
|
||||
run: kubectl -n kubeflow port-forward svc/metadata-grpc-service 8080:8080 &
|
||||
continue-on-error: true
|
||||
|
||||
- name: API integration tests v2
|
||||
id: tests
|
||||
if: ${{ steps.forward-api-port.outcome == 'success' }}
|
||||
working-directory: ./backend/test/v2/integration
|
||||
run: go test -v ./... -namespace ${NAMESPACE} -args -runIntegrationTests=true
|
||||
run: go test -v ./... -namespace kubeflow -args -runIntegrationTests=true
|
||||
env:
|
||||
PULL_NUMBER: ${{ github.event.pull_request.number }}
|
||||
PIPELINE_STORE: ${{ matrix.pipeline_store }}
|
||||
continue-on-error: true
|
||||
|
||||
- name: Collect failed logs
|
||||
if: ${{ steps.create-kfp-cluster.outcome != 'success' || steps.forward-api-port.outcome != 'success' || steps.forward-mlmd-port.outcome != 'success' || steps.tests.outcome != 'success' }}
|
||||
run: |
|
||||
./.github/resources/scripts/collect-logs.sh --ns kubeflow --output /tmp/tmp_pod_log.txt
|
||||
exit 1
|
||||
|
||||
- name: Collect test results
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: kfp-api-integration-tests-v2-artifacts
|
||||
path: /tmp/tmp.*/*
|
||||
name: kfp-api-integration-tests-v2-artifacts-k8s-${{ matrix.k8s_version }}-${{ matrix.pipeline_store }}
|
||||
path: /tmp/tmp*/*
|
||||
|
||||
frontend-integration-test:
|
||||
api-integration-tests-v2-with-proxy:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
k8s_version: [ "v1.31.0" ]
|
||||
name: API integration tests v2 with proxy - K8s ${{ matrix.k8s_version }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
@ -141,30 +250,168 @@ jobs:
|
|||
python-version: 3.9
|
||||
|
||||
- name: Create KFP cluster
|
||||
id: create-kfp-cluster
|
||||
uses: ./.github/actions/kfp-cluster
|
||||
with:
|
||||
k8s_version: ${{ matrix.k8s_version }}
|
||||
proxy: 'true'
|
||||
continue-on-error: true
|
||||
|
||||
- name: Forward API port
|
||||
id: forward-api-port
|
||||
if: ${{ steps.create-kfp-cluster.outcome == 'success' }}
|
||||
run: ./.github/resources/scripts/forward-port.sh "kubeflow" "ml-pipeline" 8888 8888
|
||||
continue-on-error: true
|
||||
|
||||
- name: Forward MLMD port
|
||||
id: forward-mlmd-port
|
||||
if: ${{ steps.forward-api-port.outcome == 'success' }}
|
||||
run: kubectl -n kubeflow port-forward svc/metadata-grpc-service 8080:8080 &
|
||||
continue-on-error: true
|
||||
|
||||
- name: API integration tests v2
|
||||
id: tests
|
||||
if: ${{ steps.forward-mlmd-port.outcome == 'success' }}
|
||||
working-directory: ./backend/test/v2/integration
|
||||
run: go test -v ./... -namespace kubeflow -args -runIntegrationTests=true -useProxy=true
|
||||
env:
|
||||
PULL_NUMBER: ${{ github.event.pull_request.number }}
|
||||
continue-on-error: true
|
||||
|
||||
- name: Collect failed logs
|
||||
if: ${{ steps.create-kfp-cluster.outcome != 'success' || steps.forward-api-port.outcome != 'success' || steps.tests.outcome != 'success' }}
|
||||
run: |
|
||||
./.github/resources/scripts/collect-logs.sh --ns squid --output /tmp/tmp_squid_pod_log.txt
|
||||
./.github/resources/scripts/collect-logs.sh --ns kubeflow --output /tmp/tmp_pod_log.txt
|
||||
exit 1
|
||||
|
||||
- name: Collect test results
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: kfp-api-integration-tests-v2-with-proxy-artifacts-k8s-${{ matrix.k8s_version }}
|
||||
path: /tmp/tmp*/*
|
||||
|
||||
api-integration-tests-v2-with-cache-disabled:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
k8s_version: [ "v1.31.0" ]
|
||||
name: API integration tests v2 with cache disabled - K8s ${{ matrix.k8s_version }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.9
|
||||
|
||||
- name: Create KFP cluster
|
||||
id: create-kfp-cluster
|
||||
uses: ./.github/actions/kfp-cluster
|
||||
with:
|
||||
k8s_version: ${{ matrix.k8s_version }}
|
||||
cache_enabled: 'false'
|
||||
continue-on-error: true
|
||||
|
||||
- name: Forward API port
|
||||
id: forward-api-port
|
||||
if: ${{ steps.create-kfp-cluster.outcome == 'success' }}
|
||||
run: ./.github/resources/scripts/forward-port.sh "kubeflow" "ml-pipeline" 8888 8888
|
||||
continue-on-error: true
|
||||
|
||||
- name: Forward MLMD port
|
||||
id: forward-mlmd-port
|
||||
if: ${{ steps.forward-api-port.outcome == 'success' }}
|
||||
run: kubectl -n kubeflow port-forward svc/metadata-grpc-service 8080:8080 &
|
||||
continue-on-error: true
|
||||
|
||||
- name: API integration tests v2
|
||||
id: tests
|
||||
if: ${{ steps.forward-mlmd-port.outcome == 'success' }}
|
||||
working-directory: ./backend/test/v2/integration
|
||||
run: go test -v ./... -namespace kubeflow -args -runIntegrationTests=true -cacheEnabled=false
|
||||
env:
|
||||
PULL_NUMBER: ${{ github.event.pull_request.number }}
|
||||
continue-on-error: true
|
||||
|
||||
- name: Collect failed logs
|
||||
if: ${{ steps.create-kfp-cluster.outcome != 'success' || steps.forward-api-port.outcome != 'success' || steps.tests.outcome != 'success' }}
|
||||
run: |
|
||||
./.github/resources/scripts/collect-logs.sh --ns kubeflow --output /tmp/tmp_pod_log.txt
|
||||
exit 1
|
||||
|
||||
- name: Collect test results
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: kfp-api-integration-tests-v2-with-cache-disabled-artifacts-k8s-${{ matrix.k8s_version }}
|
||||
path: /tmp/tmp*/*
|
||||
|
||||
frontend-integration-test:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
k8s_version: [ "v1.29.2", "v1.31.0" ]
|
||||
name: Frontend Integration Tests - K8s ${{ matrix.k8s_version }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.9
|
||||
|
||||
- name: Create KFP cluster
|
||||
id: create-kfp-cluster
|
||||
uses: ./.github/actions/kfp-cluster
|
||||
with:
|
||||
k8s_version: ${{ matrix.k8s_version }}
|
||||
continue-on-error: true
|
||||
|
||||
- name: Forward API port
|
||||
id: forward-api-port
|
||||
if: ${{ steps.create-kfp-cluster.outcome == 'success' }}
|
||||
run: ./.github/resources/scripts/forward-port.sh "kubeflow" "ml-pipeline" 8888 8888
|
||||
continue-on-error: true
|
||||
|
||||
- name: Forward Frontend port
|
||||
id: forward-frontend-port
|
||||
if: ${{ steps.forward-api-port.outcome == 'success' }}
|
||||
run: ./.github/resources/scripts/forward-port.sh "kubeflow" "ml-pipeline-ui" 3000 3000
|
||||
continue-on-error: true
|
||||
|
||||
- name: Build frontend integration tests image
|
||||
working-directory: ./test/frontend-integration-test
|
||||
run: docker build . -t kfp-frontend-integration-test:local
|
||||
|
||||
- name: Frontend integration tests
|
||||
id: tests
|
||||
if: ${{ steps.forward-frontend-port.outcome == 'success' }}
|
||||
run: docker run --net=host kfp-frontend-integration-test:local --remote-run true
|
||||
continue-on-error: true
|
||||
|
||||
- name: Collect failed logs
|
||||
if: ${{ steps.create-kfp-cluster.outcome != 'success' || steps.forward-api-port.outcome != 'success' || steps.forward-frontend-port.outcome != 'success' || steps.tests.outcome != 'success' }}
|
||||
run: |
|
||||
./.github/resources/scripts/collect-logs.sh --ns kubeflow --output /tmp/tmp_pod_log.txt
|
||||
exit 1
|
||||
|
||||
- name: Collect test results
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: kfp-frontend-integration-test-artifacts
|
||||
path: /tmp/tmp.*/*
|
||||
name: kfp-frontend-integration-test-artifacts-k8s-${{ matrix.k8s_version }}
|
||||
path: /tmp/tmp*/*
|
||||
|
||||
basic-sample-tests:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
k8s_version: [ "v1.29.2", "v1.31.0" ]
|
||||
name: Basic Sample Tests - K8s ${{ matrix.k8s_version }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
@ -175,23 +422,61 @@ jobs:
|
|||
python-version: 3.9
|
||||
|
||||
- name: Create KFP cluster
|
||||
id: create-kfp-cluster
|
||||
uses: ./.github/actions/kfp-cluster
|
||||
with:
|
||||
k8s_version: ${{ matrix.k8s_version }}
|
||||
continue-on-error: true
|
||||
|
||||
- name: Forward API port
|
||||
id: forward-api-port
|
||||
if: ${{ steps.create-kfp-cluster.outcome == 'success' }}
|
||||
run: ./.github/resources/scripts/forward-port.sh "kubeflow" "ml-pipeline" 8888 8888
|
||||
continue-on-error: true
|
||||
|
||||
- name: Install prerequisites
|
||||
if: ${{ steps.create-kfp-cluster.outcome == 'success' }}
|
||||
run: pip3 install -r ./test/sample-test/requirements.txt
|
||||
|
||||
- name: Install protobuf-compiler
|
||||
id: install-protobuf-compiler
|
||||
run: sudo apt-get install protobuf-compiler -y
|
||||
|
||||
- name: Install Wheel
|
||||
id: install-wheel
|
||||
run: pip3 install wheel==0.42.0 setuptools
|
||||
|
||||
- name: Generate API proto files
|
||||
working-directory: ./api
|
||||
run: make python
|
||||
|
||||
- name: Install local kfp from source
|
||||
run: python3 -m pip install -I sdk/python
|
||||
|
||||
- name: Install kfp-pipeline-spec from source
|
||||
run: |
|
||||
python3 -m pip install -I api/v2alpha1/python
|
||||
|
||||
- name: Basic sample tests - sequential
|
||||
run: python3 ./test/sample-test/sample_test_launcher.py sample_test run_test --namespace kubeflow --test-name sequential --results-gcs-dir output
|
||||
if: ${{ steps.create-kfp-cluster.outcome == 'success' }}
|
||||
id: sequential-test
|
||||
run: python3 ./test/sample-test/sample_test_launcher.py sample_test run_test --namespace kubeflow --test-name sequential --results-gcs-dir output
|
||||
|
||||
- name: Basic sample tests - exit_handler
|
||||
run: python3 ./test/sample-test/sample_test_launcher.py sample_test run_test --namespace kubeflow --test-name exit_handler --results-gcs-dir output
|
||||
if: ${{ steps.create-kfp-cluster.outcome == 'success' }}
|
||||
id: sample-test
|
||||
run: python3 ./test/sample-test/sample_test_launcher.py sample_test run_test --namespace kubeflow --test-name exit_handler --expected-result failed --results-gcs-dir output
|
||||
continue-on-error: true
|
||||
|
||||
- name: Collect failed logs
|
||||
if: ${{ steps.create-kfp-cluster.outcome != 'success' || steps.forward-api-port.outcome != 'success' || steps.sequential-test.outcome != 'success' || steps.sample-test.outcome != 'success'}}
|
||||
run: |
|
||||
./.github/resources/scripts/collect-logs.sh --ns kubeflow --output /tmp/tmp_pod_log.txt
|
||||
exit 1
|
||||
|
||||
- name: Collect test results
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: kfp-basic-sample-tests-artifacts
|
||||
path: /tmp/tmp.*/*
|
||||
name: kfp-e2e-tests-artifacts-k8s-${{ matrix.k8s_version }}
|
||||
path: /tmp/tmp*/*
|
||||
|
|
|
@ -10,6 +10,9 @@ on:
|
|||
paths:
|
||||
- 'frontend/**'
|
||||
- '.github/workflows/frontend.yml'
|
||||
- '!**/*.md'
|
||||
- '!**/OWNERS'
|
||||
- 'backend/src/apiserver/config/sample_config.json'
|
||||
|
||||
jobs:
|
||||
frontend-tests:
|
||||
|
@ -20,9 +23,9 @@ jobs:
|
|||
uses: actions/checkout@v2
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v3
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '14'
|
||||
node-version: '22'
|
||||
|
||||
- name: Clean npm cache
|
||||
run: npm cache clean --force
|
||||
|
|
|
@ -8,6 +8,8 @@ on:
|
|||
paths:
|
||||
- '.github/workflows/gcpc-modules-tests.yml'
|
||||
- 'sdk/python/**'
|
||||
- '!**/*.md'
|
||||
- '!**/OWNERS'
|
||||
|
||||
jobs:
|
||||
all-gcpc-tests:
|
||||
|
@ -20,13 +22,12 @@ jobs:
|
|||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.9
|
||||
|
||||
- name: apt-get update
|
||||
run: sudo apt-get update
|
||||
|
||||
- name: Install protobuf-compiler
|
||||
run: sudo apt-get install protobuf-compiler -y
|
||||
|
||||
|
||||
- name: Install setuptools
|
||||
run: |
|
||||
pip3 install setuptools
|
||||
|
@ -35,19 +36,19 @@ jobs:
|
|||
- name: Install Wheel
|
||||
run: pip3 install wheel==0.42.0
|
||||
|
||||
- name: Install python sdk
|
||||
- name: Install python sdk
|
||||
run: pip install sdk/python
|
||||
|
||||
- name: Install google-cloud component
|
||||
run: pip install components/google-cloud
|
||||
|
||||
- name: Generate API proto files
|
||||
working-directory: ./api
|
||||
run: make clean python
|
||||
|
||||
- name: Install kfp-pipeline-spec from source
|
||||
run: |
|
||||
python3 -m pip install api/v2alpha1/python
|
||||
|
||||
- name: Install google-cloud component
|
||||
run: pip install components/google-cloud
|
||||
python3 -m pip install -I api/v2alpha1/python
|
||||
|
||||
- name: Install Pytest
|
||||
run: pip install $(grep 'pytest==' sdk/python/requirements-dev.txt)
|
||||
|
|
|
@ -0,0 +1,16 @@
|
|||
name: Build images from sources and push to master.
|
||||
run-name: Build images for master
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
jobs:
|
||||
image-builds:
|
||||
uses: ./.github/workflows/image-builds.yml
|
||||
with:
|
||||
src_branch: master
|
||||
target_tag: master
|
||||
fail_fast: true
|
||||
overwrite_imgs: true
|
||||
set_latest: false
|
||||
add_sha_tag: 'false'
|
|
@ -28,6 +28,11 @@ on:
|
|||
default: 'true'
|
||||
description: 'Set latest tag on build images.'
|
||||
required: true
|
||||
add_sha_tag:
|
||||
type: string
|
||||
default: 'true'
|
||||
description: 'Add a sha image tag.'
|
||||
required: false
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
src_branch:
|
||||
|
@ -55,6 +60,11 @@ on:
|
|||
default: 'true'
|
||||
description: 'Set latest tag on build images.'
|
||||
required: true
|
||||
add_sha_tag:
|
||||
type: string
|
||||
default: 'true'
|
||||
description: 'Add a sha image tag.'
|
||||
required: false
|
||||
env:
|
||||
SOURCE_BRANCH: ${{ inputs.src_branch }}
|
||||
TARGET_IMAGE_TAG: ${{ inputs.target_tag }}
|
||||
|
@ -62,6 +72,7 @@ env:
|
|||
IMAGE_REGISTRY: ghcr.io
|
||||
IMAGE_ORG: ${{ github.repository_owner }}
|
||||
SET_LATEST: ${{ inputs.set_latest }}
|
||||
ADD_SHA_TAG: ${{ inputs.add_sha_tag }}
|
||||
jobs:
|
||||
build-images-with-tag:
|
||||
continue-on-error: false
|
||||
|
@ -77,20 +88,43 @@ jobs:
|
|||
include:
|
||||
- image: kfp-api-server
|
||||
dockerfile: backend/Dockerfile
|
||||
context: .
|
||||
- image: kfp-frontend
|
||||
dockerfile: frontend/Dockerfile
|
||||
context: .
|
||||
- image: kfp-persistence-agent
|
||||
dockerfile: backend/Dockerfile.persistenceagent
|
||||
context: .
|
||||
- image: kfp-scheduled-workflow-controller
|
||||
dockerfile: backend/Dockerfile.scheduledworkflow
|
||||
context: .
|
||||
- image: kfp-viewer-crd-controller
|
||||
dockerfile: backend/Dockerfile.viewercontroller
|
||||
context: .
|
||||
- image: kfp-visualization-server
|
||||
dockerfile: backend/Dockerfile.visualization
|
||||
context: .
|
||||
- image: kfp-launcher
|
||||
dockerfile: backend/Dockerfile.launcher
|
||||
context: .
|
||||
- image: kfp-driver
|
||||
dockerfile: backend/Dockerfile.driver
|
||||
context: .
|
||||
- image: kfp-cache-deployer
|
||||
dockerfile: backend/src/cache/deployer/Dockerfile
|
||||
context: .
|
||||
- image: kfp-cache-server
|
||||
dockerfile: backend/Dockerfile.cacheserver
|
||||
context: .
|
||||
- image: kfp-metadata-writer
|
||||
dockerfile: backend/metadata_writer/Dockerfile
|
||||
context: .
|
||||
- image: kfp-metadata-envoy
|
||||
dockerfile: third_party/metadata_envoy/Dockerfile
|
||||
context: .
|
||||
- image: kfp-inverse-proxy-agent
|
||||
dockerfile: proxy/Dockerfile
|
||||
context: ./proxy
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
@ -134,7 +168,7 @@ jobs:
|
|||
tags: |
|
||||
type=raw,value=${{env.TARGET_IMAGE_TAG}}
|
||||
type=raw,value=latest,enable=${{ env.SET_LATEST == 'true'}}
|
||||
type=sha
|
||||
type=sha,enable=${{ env.ADD_SHA_TAG == 'true' }}
|
||||
|
||||
# Build the image. If the build succeeds, it pushes the image to GitHub
|
||||
# Packages. It uses the context parameter to define the build's context
|
||||
|
@ -144,7 +178,7 @@ jobs:
|
|||
uses: docker/build-push-action@v6
|
||||
if: steps.check_tag.outcome == 'success'
|
||||
with:
|
||||
context: .
|
||||
context: ${{ matrix.context }}
|
||||
file: ${{ matrix.dockerfile }}
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
|
|
|
@ -11,10 +11,17 @@ on:
|
|||
- 'sdk/python/**'
|
||||
- 'api/v2alpha1/**'
|
||||
- 'kubernetes_platform/**'
|
||||
- 'backend/**'
|
||||
- '!**/*.md'
|
||||
- '!**/OWNERS'
|
||||
|
||||
jobs:
|
||||
kfp-kubernetes-execution-tests:
|
||||
runs-on: ubuntu-24.04
|
||||
strategy:
|
||||
matrix:
|
||||
k8s_version: [ "v1.29.2", "v1.31.0" ]
|
||||
name: kfp-kubernetes execution tests - K8s ${{ matrix.k8s_version }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
@ -24,50 +31,108 @@ jobs:
|
|||
with:
|
||||
python-version: '3.9'
|
||||
|
||||
# This is intended to address disk space issues that have surfaced
|
||||
# intermittently during CI -
|
||||
# https://github.com/actions/runner-images/issues/2840#issuecomment-1284059930
|
||||
- name: Free up space in /dev/root
|
||||
run: |
|
||||
echo "Disk usage before clean up:"
|
||||
df -h
|
||||
sudo rm -rf /usr/share/dotnet
|
||||
sudo rm -rf "$AGENT_TOOLSDIRECTORY"
|
||||
echo "Disk usage after clean up:"
|
||||
df -h
|
||||
|
||||
- name: Create KFP cluster
|
||||
id: create-kfp-cluster
|
||||
uses: ./.github/actions/kfp-cluster
|
||||
with:
|
||||
k8s_version: ${{ matrix.k8s_version }}
|
||||
continue-on-error: true
|
||||
|
||||
- name: Forward API port
|
||||
id: forward-api-port
|
||||
if: ${{ steps.create-kfp-cluster.outcome == 'success' }}
|
||||
run: ./.github/resources/scripts/forward-port.sh "kubeflow" "ml-pipeline" 8888 8888
|
||||
continue-on-error: true
|
||||
|
||||
- name: apt-get update
|
||||
id: apt-get-update
|
||||
if: ${{ steps.forward-api-port.outcome == 'success' }}
|
||||
run: sudo apt-get update
|
||||
|
||||
- name: Install protobuf-compiler
|
||||
id: install-protobuf-compiler
|
||||
if: ${{ steps.apt-get-update.outcome == 'success' }}
|
||||
run: sudo apt-get install protobuf-compiler -y
|
||||
|
||||
- name: Install setuptools
|
||||
run: |
|
||||
id: install-setuptools
|
||||
if: ${{ steps.install-protobuf-compiler.outcome == 'success' }}
|
||||
run: |
|
||||
pip3 install setuptools
|
||||
pip3 freeze
|
||||
|
||||
- name: Install Wheel
|
||||
id: install-wheel
|
||||
if: ${{ steps.install-setuptools.outcome == 'success' }}
|
||||
run: pip3 install wheel==0.42.0
|
||||
|
||||
- name: Install protobuf
|
||||
id: install-protobuf
|
||||
if: ${{ steps.install-wheel.outcome == 'success' }}
|
||||
run: pip3 install protobuf==4.25.3
|
||||
|
||||
- name: Generate API proto files
|
||||
id: generate-api-proto-files
|
||||
if: ${{ steps.install-protobuf.outcome == 'success' }}
|
||||
working-directory: ./api
|
||||
run: make clean python
|
||||
|
||||
- name: Install kfp-pipeline-spec from source
|
||||
run: |
|
||||
python3 -m pip install api/v2alpha1/python
|
||||
|
||||
- name: Generate kfp-kubernetes proto files from source
|
||||
id: generate-kfp-kubernetes-proto-files
|
||||
if: ${{ steps.generate-api-proto-files.outcome == 'success' }}
|
||||
working-directory: ./kubernetes_platform
|
||||
run: make clean python
|
||||
|
||||
- name: Install kfp-kubernetes from source
|
||||
id: install-kfp-kubernetes
|
||||
if: ${{ steps.generate-kfp-kubernetes-proto-files.outcome == 'success' }}
|
||||
run: |
|
||||
pip install -e ./kubernetes_platform/python[dev]
|
||||
|
||||
- name: Install requirements
|
||||
id: install-requirements
|
||||
if: ${{ steps.install-kfp-kubernetes.outcome == 'success' }}
|
||||
run: pip install -r ./test/kfp-kubernetes-execution-tests/requirements.txt
|
||||
|
||||
- name: Install kfp-pipeline-spec from source
|
||||
id: install-kfp-pipeline-spec
|
||||
if: ${{ steps.generate-api-proto-files.outcome == 'success' }}
|
||||
run: |
|
||||
python3 -m pip install -I api/v2alpha1/python
|
||||
|
||||
- name: Run tests
|
||||
id: test
|
||||
if: ${{ steps.install-requirements.outcome == 'success' }}
|
||||
env:
|
||||
PULL_NUMBER: ${{ github.event.pull_request.number }}
|
||||
REPO_NAME: ${{ github.repository }}
|
||||
run: |
|
||||
export KFP_ENDPOINT="http://localhost:8888"
|
||||
export TIMEOUT_SECONDS=2700
|
||||
pytest ./test/kfp-kubernetes-execution-tests/sdk_execution_tests.py --asyncio-task-timeout $TIMEOUT_SECONDS
|
||||
continue-on-error: true
|
||||
|
||||
- name: Collect failed logs
|
||||
if: ${{ steps.create-kfp-cluster.outcome != 'success' || steps.forward-api-port.outcome != 'success' || steps.test.outcome != 'success'}}
|
||||
run: |
|
||||
./.github/resources/scripts/collect-logs.sh --ns kubeflow --output /tmp/tmp_pod_log.txt
|
||||
exit 1
|
||||
|
||||
- name: Collect test results
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: kfp-execution-tests-artifacts-k8s-${{ matrix.k8s_version }}
|
||||
path: /tmp/tmp*/*
|
||||
|
|
|
@ -9,7 +9,8 @@ on:
|
|||
- 'sdk/python/**'
|
||||
- 'api/v2alpha1/**'
|
||||
- 'kubernetes_platform/**'
|
||||
- 'test/presubmit-test-kfp-kubernetes-library.sh'
|
||||
- '!**/*.md'
|
||||
- '!**/OWNERS'
|
||||
|
||||
jobs:
|
||||
kfp-kubernetes-library-test:
|
||||
|
@ -18,11 +19,8 @@ jobs:
|
|||
matrix:
|
||||
python: [
|
||||
{ 'version': '3.9' },
|
||||
{ 'version': '3.10' },
|
||||
{ 'version': '3.11' },
|
||||
{ 'version': '3.12' },
|
||||
{ 'version': '3.13' }
|
||||
]
|
||||
]
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
@ -32,5 +30,42 @@ jobs:
|
|||
with:
|
||||
python-version: ${{matrix.python.version}}
|
||||
|
||||
- name: apt-get update
|
||||
run: sudo apt-get update
|
||||
|
||||
- name: Install protobuf-compiler
|
||||
run: sudo apt-get install protobuf-compiler -y
|
||||
|
||||
- name: Install setuptools
|
||||
run: |
|
||||
pip3 install setuptools
|
||||
pip3 freeze
|
||||
|
||||
- name: Install Wheel
|
||||
run: pip3 install wheel==0.42.0
|
||||
|
||||
- name: Install protobuf
|
||||
run: pip3 install protobuf==4.25.3
|
||||
|
||||
- name: Install kfp-sdk from source
|
||||
run: |
|
||||
python3 -m pip install sdk/python
|
||||
|
||||
- name: Generate API proto files
|
||||
working-directory: ./api
|
||||
run: make clean python
|
||||
|
||||
- name: Generate kfp-kubernetes proto files from source
|
||||
working-directory: ./kubernetes_platform
|
||||
run: make clean python
|
||||
|
||||
- name: Install kfp-kubernetes from source
|
||||
run: |
|
||||
pip install -e ./kubernetes_platform/python[dev]
|
||||
|
||||
- name: Install kfp-pipeline-spec from source
|
||||
run: |
|
||||
python3 -m pip install -I api/v2alpha1/python
|
||||
|
||||
- name: Run tests
|
||||
run: ./test/presubmit-test-kfp-kubernetes-library.sh
|
||||
run: pytest ./kubernetes_platform/python/test -n auto
|
||||
|
|
|
@ -2,8 +2,7 @@ name: KFP Samples
|
|||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
branches: [master]
|
||||
pull_request:
|
||||
paths:
|
||||
- '.github/resources/**'
|
||||
|
@ -13,26 +12,104 @@ on:
|
|||
- 'samples/**'
|
||||
- 'samples/core/dataflow/**'
|
||||
- 'samples/core/parameterized_tfx_oss/**'
|
||||
- '!**/*.md'
|
||||
- '!**/OWNERS'
|
||||
|
||||
jobs:
|
||||
samples:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
k8s_version: [ "v1.29.2", "v1.31.0" ]
|
||||
name: KFP Samples - K8s ${{ matrix.k8s_version }}
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.9
|
||||
|
||||
- name: Free up space in /dev/root
|
||||
run: |
|
||||
echo "Disk usage before clean up:"
|
||||
df -h
|
||||
sudo rm -rf /usr/share/dotnet
|
||||
sudo rm -rf "$AGENT_TOOLSDIRECTORY"
|
||||
echo "Disk usage after clean up:"
|
||||
df -h
|
||||
|
||||
- name: apt-get update
|
||||
run: sudo apt-get update
|
||||
|
||||
- name: Install protobuf-compiler
|
||||
run: sudo apt-get install protobuf-compiler -y
|
||||
|
||||
- name: Install setuptools
|
||||
run: |
|
||||
pip3 install setuptools
|
||||
pip3 freeze
|
||||
|
||||
- name: Install Wheel
|
||||
run: pip3 install wheel==0.42.0
|
||||
|
||||
- name: Install protobuf
|
||||
run: pip3 install protobuf==4.25.3
|
||||
|
||||
- name: Generate API proto files
|
||||
working-directory: ./api
|
||||
run: make python
|
||||
|
||||
- name: Generate, Build, and Install Kubernetes API proto files & packages
|
||||
working-directory: ./kubernetes_platform
|
||||
run: make python && pip install python/dist/*.whl
|
||||
|
||||
- name: Install kfp-pipeline-spec from source
|
||||
run: |
|
||||
python3 -m pip install -I api/v2alpha1/python
|
||||
|
||||
- name: Create KFP cluster
|
||||
id: create-kfp-cluster
|
||||
uses: ./.github/actions/kfp-cluster
|
||||
with:
|
||||
k8s_version: ${{ matrix.k8s_version }}
|
||||
continue-on-error: true
|
||||
|
||||
- name: Build and upload the sample Modelcar image to Kind
|
||||
id: build-sample-modelcar-image
|
||||
if: ${{ steps.create-kfp-cluster.outcome == 'success' }}
|
||||
run: |
|
||||
docker build -f samples/v2/modelcar/Dockerfile -t registry.domain.local/modelcar:test .
|
||||
kind --name kfp load docker-image registry.domain.local/modelcar:test
|
||||
continue-on-error: true
|
||||
|
||||
- name: Forward API port
|
||||
id: forward-api-port
|
||||
if: ${{ steps.build-sample-modelcar-image.outcome == 'success' }}
|
||||
run: ./.github/resources/scripts/forward-port.sh "kubeflow" "ml-pipeline" 8888 8888
|
||||
continue-on-error: true
|
||||
|
||||
- name: Run Samples Tests
|
||||
id: tests
|
||||
if: ${{ steps.forward-api-port.outcome == 'success' }}
|
||||
env:
|
||||
PULL_NUMBER: ${{ github.event.pull_request.number }}
|
||||
REPO_NAME: ${{ github.repository }}
|
||||
run: |
|
||||
./backend/src/v2/test/sample-test.sh
|
||||
continue-on-error: true
|
||||
|
||||
- name: Collect failed logs
|
||||
if: ${{ steps.create-kfp-cluster.outcome != 'success' || steps.forward-api-port.outcome != 'success' || steps.tests.outcome != 'success' }}
|
||||
run: |
|
||||
./.github/resources/scripts/collect-logs.sh --ns kubeflow --output /tmp/tmp_pod_log.txt
|
||||
exit 1
|
||||
|
||||
- name: Collect test results
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: kfp-samples-tests-artifacts-k8s-${{ matrix.k8s_version }}
|
||||
path: /tmp/tmp*/*
|
||||
|
|
|
@ -9,13 +9,15 @@ on:
|
|||
- '.github/workflows/kfp-sdk-runtime-tests.yml'
|
||||
- 'sdk/python/**'
|
||||
- 'test/presubmit-test-kfp-runtime-code.sh'
|
||||
- '!**/*.md'
|
||||
- '!**/OWNERS'
|
||||
|
||||
jobs:
|
||||
kfp-runtime-tests:
|
||||
runs-on: ubuntu-24.04
|
||||
strategy:
|
||||
matrix:
|
||||
python: ['3.9', '3.10', '3.11', '3.12', '3.13']
|
||||
python: ['3.9', '3.13']
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
@ -28,4 +30,5 @@ jobs:
|
|||
- name: Run KFP Runtime Code Tests
|
||||
run: |
|
||||
export PULL_NUMBER="${{ github.event.inputs.pull_number || github.event.pull_request.number }}"
|
||||
export REPO_NAME="${{ github.repository }}"
|
||||
./test/presubmit-test-kfp-runtime-code.sh
|
||||
|
|
|
@ -2,28 +2,44 @@ name: KFP SDK Tests
|
|||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
branches: [master]
|
||||
pull_request:
|
||||
paths:
|
||||
- 'sdk/**'
|
||||
- 'test/presubmit-tests-sdk.sh'
|
||||
- '.github/workflows/kfp-sdk-tests.yml'
|
||||
- '!**/*.md'
|
||||
- '!**/OWNERS'
|
||||
|
||||
jobs:
|
||||
sdk-tests:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: [3.9, 3.10, 3.11, 3.12, 3.13]
|
||||
python-version: ['3.9', '3.13']
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ matrix.python }}
|
||||
python-version: ${{ matrix.python-version }}
|
||||
|
||||
- name: Install protobuf-compiler
|
||||
run: sudo apt-get install protobuf-compiler -y
|
||||
|
||||
- name: Install Wheel & setuptools
|
||||
run: pip3 install wheel==0.42.0 setuptools
|
||||
|
||||
- name: Generate API proto files
|
||||
working-directory: ./api
|
||||
run: make clean python
|
||||
|
||||
- name: Install kfp-pipeline-spec from source
|
||||
run: |
|
||||
python3 -m pip install -I api/v2alpha1/python
|
||||
|
||||
- name: Run SDK Tests
|
||||
run: |
|
||||
|
|
|
@ -0,0 +1,53 @@
|
|||
name: KFP Webhook Integration
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
pull_request:
|
||||
paths:
|
||||
- '.github/resources/**'
|
||||
- '.github/workflows/kfp-webhooks.yml'
|
||||
- 'backend/**'
|
||||
- '!**/*.md'
|
||||
- '!**/OWNERS'
|
||||
|
||||
jobs:
|
||||
webhook-tests:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
k8s_version: [ "v1.29.2", "v1.31.0" ]
|
||||
name: KFP Webhooks - K8s ${{ matrix.k8s_version }}
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Create KFP cluster
|
||||
id: create-kfp-cluster
|
||||
uses: ./.github/actions/kfp-cluster
|
||||
with:
|
||||
k8s_version: ${{ matrix.k8s_version }}
|
||||
pipeline_store: kubernetes
|
||||
continue-on-error: true
|
||||
|
||||
- name: Run Webhook Integration Tests
|
||||
id: tests
|
||||
if: ${{ steps.create-kfp-cluster.outcome == 'success' }}
|
||||
run: |
|
||||
make -C backend/test/integration test-webhook
|
||||
continue-on-error: true
|
||||
|
||||
- name: Collect failed logs
|
||||
if: ${{ steps.create-kfp-cluster.outcome != 'success' || steps.tests.outcome != 'success' }}
|
||||
run: |
|
||||
./.github/resources/scripts/collect-logs.sh --ns kubeflow --output /tmp/tmp_pod_log.txt
|
||||
exit 1
|
||||
|
||||
- name: Collect test results
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: kfp-samples-tests-artifacts-k8s-${{ matrix.k8s_version }}
|
||||
path: /tmp/tmp*/*
|
|
@ -6,7 +6,9 @@ on:
|
|||
pull_request:
|
||||
paths:
|
||||
- '.github/workflows/kubeflow-pipelines-manifests.yml'
|
||||
- 'manifests/kustomize/**'
|
||||
- 'manifests/kustomize/**'
|
||||
- '!**/*.md'
|
||||
- '!**/OWNERS'
|
||||
|
||||
jobs:
|
||||
kubeflow-pipelines-manifests:
|
||||
|
|
|
@ -7,6 +7,10 @@ on:
|
|||
jobs:
|
||||
run_tests:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
k8s_version: [ "v1.29.2", "v1.31.0" ]
|
||||
name: Periodic Functional Tests - K8s ${{ matrix.k8s_version }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
@ -16,16 +20,29 @@ jobs:
|
|||
python-version: 3.9
|
||||
- name: Create KFP cluster
|
||||
uses: ./.github/actions/kfp-cluster
|
||||
with:
|
||||
k8s_version: ${{ matrix.k8s_version }}
|
||||
|
||||
- name: Port forward kfp apiserver
|
||||
run: |
|
||||
nohup kubectl port-forward --namespace kubeflow svc/ml-pipeline 8888:8888 &
|
||||
|
||||
- name: Run Functional Tests
|
||||
id: tests
|
||||
run: |
|
||||
log_dir=$(mktemp -d)
|
||||
./test/kfp-functional-test/kfp-functional-test.sh > $log_dir/periodic_tests.txt
|
||||
continue-on-error: true
|
||||
|
||||
- name: Collect failed logs
|
||||
if: steps.tests.outcome != 'success'
|
||||
run: |
|
||||
./.github/resources/scripts/collect-logs.sh --ns kubeflow --output /tmp/tmp_pod_log.txt
|
||||
exit 1
|
||||
|
||||
- name: Collect test results
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: periodic-functional-artifacts
|
||||
path: /tmp/tmp.*/*
|
||||
name: periodic-functional-artifacts-k8s-${{ matrix.k8s_version }}
|
||||
path: /tmp/tmp*/*
|
||||
|
|
|
@ -20,7 +20,6 @@ jobs:
|
|||
- name: Check if the author is a member or Owner
|
||||
id: check-condition
|
||||
run: |
|
||||
echo "slash_command=${{github.event.comment.body}}" >> $GITHUB_ENV
|
||||
if [[ "${{ github.event.comment.author_association }}" == "MEMBER" || "${{ github.event.comment.author_association }}" == "OWNER" ]]; then
|
||||
echo "condition_met=true" >> $GITHUB_ENV
|
||||
else
|
||||
|
|
|
@ -0,0 +1,21 @@
|
|||
name: pre-commit
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
|
||||
jobs:
|
||||
pre-commit:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v3
|
||||
- uses: pre-commit/action@v3.0.1
|
||||
# This is set to only run the golangci-lint pre-commit hooks
|
||||
# Remove in a later PR to run all hooks
|
||||
with:
|
||||
extra_args: golangci-lint --all-files
|
|
@ -10,6 +10,8 @@ on:
|
|||
paths:
|
||||
- 'backend/**'
|
||||
- 'test/presubmit-backend-test.sh'
|
||||
- '!**/*.md'
|
||||
- '!**/OWNERS'
|
||||
|
||||
jobs:
|
||||
backend-tests:
|
||||
|
@ -20,9 +22,9 @@ jobs:
|
|||
uses: actions/checkout@v2
|
||||
|
||||
- name: Set up Go
|
||||
uses: actions/setup-go@v3
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: '1.21'
|
||||
go-version-file: go.mod
|
||||
|
||||
- name: Run Backend Tests
|
||||
run: ./test/presubmit-backend-test.sh
|
||||
|
|
|
@ -1,16 +0,0 @@
|
|||
name: Re-Run PR tests
|
||||
|
||||
on:
|
||||
issue_comment:
|
||||
types: [created]
|
||||
|
||||
jobs:
|
||||
rerun_pr_tests:
|
||||
name: rerun_pr_tests
|
||||
if: ${{ github.event.issue.pull_request }}
|
||||
runs-on: ubuntu-20.04
|
||||
steps:
|
||||
- uses: estroz/rerun-actions@6da343594fa5cc0869523be9b43ed9256c68d39b # https://github.com/estroz/rerun-actions/releases/tag/v0.3.0
|
||||
with:
|
||||
repo_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
comment_id: ${{ github.event.comment.id }}
|
|
@ -11,6 +11,8 @@ on:
|
|||
- 'sdk/python/**'
|
||||
- 'api/v2alpha1/**'
|
||||
- '.github/workflows/sdk-component-yaml.yml'
|
||||
- '!**/*.md'
|
||||
- '!**/OWNERS'
|
||||
|
||||
jobs:
|
||||
test-component-yaml-kfp:
|
||||
|
|
|
@ -9,6 +9,8 @@ on:
|
|||
- 'sdk/python/**'
|
||||
- 'test/presubmit-docformatter-sdk.sh'
|
||||
- '.github/workflows/sdk-docformatter.yml'
|
||||
- '!**/*.md'
|
||||
- '!**/OWNERS'
|
||||
|
||||
jobs:
|
||||
test-docformatter-kfp-sdk:
|
||||
|
|
|
@ -10,55 +10,128 @@ on:
|
|||
- '.github/resources/**'
|
||||
- 'sdk/python/**'
|
||||
- 'api/v2alpha1/**'
|
||||
- 'backend/**'
|
||||
- '!**/*.md'
|
||||
- '!**/OWNERS'
|
||||
|
||||
jobs:
|
||||
sdk-execution-tests:
|
||||
runs-on: ubuntu-24.04
|
||||
strategy:
|
||||
matrix:
|
||||
k8s_version: [ "v1.29.2", "v1.31.0" ]
|
||||
name: KFP SDK Execution Tests - K8s ${{ matrix.k8s_version }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# This is intended to address disk space issues that have surfaced
|
||||
# intermittently during CI -
|
||||
# https://github.com/actions/runner-images/issues/2840#issuecomment-1284059930
|
||||
- name: Free up space in /dev/root
|
||||
run: |
|
||||
echo "Disk usage before clean up:"
|
||||
df -h
|
||||
sudo rm -rf /usr/share/dotnet
|
||||
sudo rm -rf "$AGENT_TOOLSDIRECTORY"
|
||||
echo "Disk usage after clean up:"
|
||||
df -h
|
||||
|
||||
# This must occur after "Free up space" step
|
||||
# otherwise python version will be overridden
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.9
|
||||
python-version: '3.11'
|
||||
|
||||
- name: Create KFP cluster
|
||||
id: create-kfp-cluster
|
||||
uses: ./.github/actions/kfp-cluster
|
||||
with:
|
||||
k8s_version: ${{ matrix.k8s_version }}
|
||||
continue-on-error: true
|
||||
|
||||
- name: Forward API port
|
||||
id: forward-api-port
|
||||
if: ${{ steps.create-kfp-cluster.outcome == 'success' }}
|
||||
run: ./.github/resources/scripts/forward-port.sh "kubeflow" "ml-pipeline" 8888 8888
|
||||
continue-on-error: true
|
||||
|
||||
- name: Forward MLMD port
|
||||
id: forward-mlmd-port
|
||||
if: ${{ steps.create-kfp-cluster.outcome == 'success' }}
|
||||
run: kubectl -n kubeflow port-forward svc/metadata-grpc-service 8080:8080 &
|
||||
continue-on-error: true
|
||||
|
||||
- name: Forward Minio port
|
||||
id: forward-minio-port
|
||||
if: ${{ steps.forward-mlmd-port.outcome == 'success' }}
|
||||
run: kubectl -n kubeflow port-forward service/minio-service 9000:9000 &
|
||||
continue-on-error: true
|
||||
|
||||
- name: apt-get update
|
||||
id: apt-get-update
|
||||
if: ${{ steps.forward-minio-port.outcome == 'success' }}
|
||||
run: sudo apt-get update
|
||||
|
||||
- name: Install protobuf-compiler
|
||||
id: install-protobuf-compiler
|
||||
if: ${{ steps.apt-get-update.outcome == 'success' }}
|
||||
run: sudo apt-get install protobuf-compiler -y
|
||||
|
||||
- name: Install setuptools
|
||||
run: |
|
||||
id: install-setuptools
|
||||
if: ${{ steps.install-protobuf-compiler.outcome == 'success' }}
|
||||
run: |
|
||||
pip3 install setuptools
|
||||
pip3 freeze
|
||||
|
||||
- name: Install Wheel
|
||||
id: install-wheel
|
||||
if: ${{ steps.install-setuptools.outcome == 'success' }}
|
||||
run: pip3 install wheel==0.42.0
|
||||
|
||||
- name: Install protobuf
|
||||
id: install-protobuf
|
||||
if: ${{ steps.install-wheel.outcome == 'success' }}
|
||||
run: pip3 install protobuf==4.25.3
|
||||
|
||||
- name: Generate API proto files
|
||||
id: generate-api-proto-files
|
||||
if: ${{ steps.install-protobuf.outcome == 'success' }}
|
||||
working-directory: ./api
|
||||
run: make clean python
|
||||
|
||||
- name: Install kfp-pipeline-spec from source
|
||||
run: |
|
||||
python3 -m pip install api/v2alpha1/python
|
||||
|
||||
- name: Install requirements
|
||||
id: install-requirements
|
||||
run: pip install -r ./test/sdk-execution-tests/requirements.txt
|
||||
|
||||
- name: Install kfp-pipeline-spec from source
|
||||
id: install-kfp-pipeline-spec
|
||||
if: ${{ steps.generate-api-proto-files.outcome == 'success' }}
|
||||
run: |
|
||||
python3 -m pip install -I api/v2alpha1/python
|
||||
|
||||
- name: Run tests
|
||||
id: tests
|
||||
env:
|
||||
PULL_NUMBER: ${{ github.event.pull_request.number }}
|
||||
REPO_NAME: ${{ github.repository }}
|
||||
run: |
|
||||
export KFP_ENDPOINT="http://localhost:8888"
|
||||
export TIMEOUT_SECONDS=2700
|
||||
pytest ./test/sdk-execution-tests/sdk_execution_tests.py --asyncio-task-timeout $TIMEOUT_SECONDS
|
||||
pytest -v -n 5 ./test/sdk-execution-tests/sdk_execution_tests.py
|
||||
continue-on-error: true
|
||||
|
||||
- name: Collect failed logs
|
||||
if: ${{ steps.create-kfp-cluster.outcome != 'success' || steps.forward-api-port.outcome != 'success' || steps.forward-mlmd-port.outcome != 'success' || steps.forward-minio-port.outcome != 'success' || steps.tests.outcome != 'success'}}
|
||||
run: |
|
||||
./.github/resources/scripts/collect-logs.sh --ns kubeflow --output /tmp/tmp_pod_log.txt
|
||||
exit 1
|
||||
|
||||
- name: Collect test results
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: periodic-functional-artifacts-k8s-${{ matrix.k8s_version }}
|
||||
path: /tmp/tmp*/*
|
||||
|
|
|
@ -9,6 +9,8 @@ on:
|
|||
- 'sdk/python/**'
|
||||
- 'test/presubmit-isort-sdk.sh'
|
||||
- '.github/workflows/sdk-isort.yml'
|
||||
- '!**/*.md'
|
||||
- '!**/OWNERS'
|
||||
|
||||
jobs:
|
||||
test-isort-kfp-sdk:
|
||||
|
|
|
@ -9,6 +9,8 @@ on:
|
|||
- 'sdk/python/**'
|
||||
- 'test/presubmit-test-sdk-upgrade.sh'
|
||||
- '.github/workflows/sdk-upgrade.yml'
|
||||
- '!**/*.md'
|
||||
- '!**/OWNERS'
|
||||
|
||||
jobs:
|
||||
test-upgrade-kfp-sdk:
|
||||
|
|
|
@ -2,19 +2,15 @@ name: KFP SDK YAPF Tests
|
|||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
paths:
|
||||
- 'sdk/python/**'
|
||||
- 'test/presubmit-yapf-sdk.sh'
|
||||
- '.github/workflows/sdk-yapf.yml'
|
||||
branches: [master]
|
||||
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
paths:
|
||||
- 'sdk/python/**'
|
||||
- 'test/presubmit-yapf-sdk.sh'
|
||||
- '.github/workflows/sdk-yapf.yml'
|
||||
- '!**/*.md'
|
||||
- '!**/OWNERS'
|
||||
|
||||
jobs:
|
||||
yapf-sdk:
|
||||
|
|
|
@ -21,8 +21,8 @@ jobs:
|
|||
- uses: actions/stale@v5
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
days-before-stale: 60
|
||||
days-before-close: 21
|
||||
days-before-stale: 365
|
||||
days-before-close: 183 # half a year
|
||||
stale-issue-message: >
|
||||
This issue has been automatically marked as stale because it has not had
|
||||
recent activity. It will be closed if no further activity occurs. Thank you
|
||||
|
|
|
@ -10,10 +10,17 @@ on:
|
|||
- '.github/resources/**'
|
||||
- 'backend/**'
|
||||
- 'manifests/kustomize/**'
|
||||
- '!**/*.md'
|
||||
- '!**/OWNERS'
|
||||
|
||||
jobs:
|
||||
upgrade-test:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
k8s_version: [ "v1.29.2", "v1.31.0" ]
|
||||
name: KFP upgrade tests - K8s ${{ matrix.k8s_version }}
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
@ -24,20 +31,55 @@ jobs:
|
|||
python-version: 3.9
|
||||
|
||||
- name: Create KFP cluster
|
||||
id: create-kfp-cluster
|
||||
uses: ./.github/actions/kfp-cluster
|
||||
with:
|
||||
k8s_version: ${{ matrix.k8s_version }}
|
||||
continue-on-error: true
|
||||
|
||||
- name: Forward API port
|
||||
id: forward-api-port
|
||||
if: ${{ steps.create-kfp-cluster.outcome == 'success' }}
|
||||
run: ./.github/resources/scripts/forward-port.sh "kubeflow" "ml-pipeline" 8888 8888
|
||||
continue-on-error: true
|
||||
|
||||
- name: Prepare upgrade tests
|
||||
id: upgrade-tests
|
||||
if: ${{ steps.forward-api-port.outcome == 'success' }}
|
||||
working-directory: backend/test/integration
|
||||
run: go test -v ./... -namespace ${NAMESPACE} -args -runUpgradeTests=true -testify.m=Prepare
|
||||
run: go test -v ./... -namespace kubeflow -args -runUpgradeTests=true -testify.m=Prepare
|
||||
continue-on-error: true
|
||||
|
||||
- name: Prepare verification tests
|
||||
id: verification-tests
|
||||
if: ${{ steps.forward-api-port.outcome == 'success' }}
|
||||
working-directory: backend/test/integration
|
||||
run: go test -v ./... -namespace ${NAMESPACE} -args -runUpgradeTests=true -testify.m=Verify
|
||||
run: go test -v ./... -namespace kubeflow -args -runUpgradeTests=true -testify.m=Verify
|
||||
continue-on-error: true
|
||||
|
||||
- name: Prepare upgrade tests v2
|
||||
id: upgrade-tests-v2
|
||||
if: ${{ steps.forward-api-port.outcome == 'success' }}
|
||||
working-directory: backend/test/v2/integration/
|
||||
run: go test -v ./... -namespace ${NAMESPACE} -args -runUpgradeTests=true -testify.m=Prepare
|
||||
|
||||
run: go test -v ./... -namespace kubeflow -args -runUpgradeTests=true -testify.m=Prepare
|
||||
continue-on-error: true
|
||||
|
||||
- name: Prepare verification tests v2
|
||||
id: verification-tests-v2
|
||||
if: ${{ steps.forward-api-port.outcome == 'success' }}
|
||||
working-directory: backend/test/v2/integration
|
||||
run: go test -v ./... -namespace ${NAMESPACE} -args -runUpgradeTests=true -testify.m=Verify
|
||||
run: go test -v ./... -namespace kubeflow -args -runUpgradeTests=true -testify.m=Verify
|
||||
continue-on-error: true
|
||||
|
||||
- name: Collect failed logs
|
||||
if: ${{ steps.create-kfp-cluster.outcome != 'success' || steps.forward-api-port.outcome != 'success' || steps.upgrade-tests.outcome != 'success' || steps.upgrade-tests-v2.outcome != 'success' || steps.verification-tests.outcome != 'success' || steps.verification-tests-v2.outcome != 'success' }}
|
||||
run: |
|
||||
./.github/resources/scripts/collect-logs.sh --ns kubeflow --output /tmp/tmp_pod_log.txt
|
||||
exit 1
|
||||
|
||||
- name: Collect test results
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: periodic-functional-artifacts-k8s-${{ matrix.k8s_version }}
|
||||
path: /tmp/tmp*/*
|
||||
|
|
|
@ -0,0 +1,60 @@
|
|||
name: Validate Generated Files
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
pull_request:
|
||||
paths:
|
||||
- '.github/workflows/validate-generated-files.yml'
|
||||
- 'backend/api/**/*.proto'
|
||||
- 'backend/api/**/go_http_client/**'
|
||||
- 'backend/api/**/go_client/**'
|
||||
- 'backend/api/**/python_http_client/**'
|
||||
- 'backend/api/**/swagger/**'
|
||||
- 'api/**/*.proto'
|
||||
- 'api/**/*.go'
|
||||
- 'kubernetes_platform/**/*.proto'
|
||||
- 'kubernetes_platform/**/*.go'
|
||||
- 'backend/src/crd/kubernetes/**/*.go'
|
||||
- 'manifests/kustomize/base/crds/*.yaml'
|
||||
- '!**/*.md'
|
||||
- '!**/OWNERS'
|
||||
|
||||
jobs:
|
||||
validate-generated-files:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install Go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version-file: go.mod
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.9
|
||||
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
sudo apt-get update && sudo apt-get install -y protobuf-compiler jq default-jdk
|
||||
python3 -m pip install setuptools wheel
|
||||
|
||||
- name: Generate API proto files
|
||||
working-directory: ./api
|
||||
run: make clean all
|
||||
|
||||
- name: Generate kfp-kubernetes proto files from source
|
||||
working-directory: ./kubernetes_platform
|
||||
run: make clean all
|
||||
|
||||
- name: Generate K8s Native API CRDs
|
||||
working-directory: ./backend/src/crd/kubernetes
|
||||
run: make generate manifests
|
||||
|
||||
- name: Check for Changes
|
||||
run: make check-diff
|
|
@ -12,6 +12,7 @@ bower_components/
|
|||
|
||||
# Build output
|
||||
dist
|
||||
__debug_bin*
|
||||
|
||||
# Web server
|
||||
frontend/server/dist
|
||||
|
@ -84,3 +85,11 @@ __pycache__
|
|||
|
||||
# kfp local execution default directory
|
||||
local_outputs/
|
||||
|
||||
# Ignore the Kind cluster kubeconfig
|
||||
kubeconfig_dev-pipelines-api
|
||||
|
||||
# Ignore debug Driver Dockerfile produced from `make -C backend image_driver_debug`
|
||||
backend/Dockerfile.driver-debug
|
||||
|
||||
backend/src/crd/kubernetes/bin
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
version: "2"
|
||||
|
||||
run:
|
||||
timeout: 30m
|
||||
skip-files:
|
||||
|
@ -11,17 +13,20 @@ linters:
|
|||
disable-all: true
|
||||
enable: # please keep this alphabetized
|
||||
- gocritic
|
||||
- gosimple
|
||||
- govet
|
||||
- ineffassign
|
||||
- misspell
|
||||
- staticcheck
|
||||
- stylecheck
|
||||
- unused
|
||||
settings:
|
||||
misspell:
|
||||
locale: US
|
||||
staticcheck:
|
||||
checks:
|
||||
- "all"
|
||||
|
||||
linters-settings: # please keep this alphabetized
|
||||
misspell:
|
||||
locale: US
|
||||
staticcheck:
|
||||
checks:
|
||||
- "all"
|
||||
formatters:
|
||||
disable-all: true
|
||||
enable:
|
||||
- gofmt
|
||||
- goimports
|
|
@ -26,7 +26,7 @@ repos:
|
|||
# add comment "noqa" to ignore an import that should not be removed
|
||||
# (e.g., for an import with desired side-effects)
|
||||
- repo: https://github.com/hadialqattan/pycln
|
||||
rev: v2.1.1
|
||||
rev: v2.5.0
|
||||
hooks:
|
||||
- id: pycln
|
||||
name: pycln
|
||||
|
@ -43,7 +43,7 @@ repos:
|
|||
hooks:
|
||||
- id: yapf
|
||||
- repo: https://github.com/pycqa/docformatter
|
||||
rev: v1.4
|
||||
rev: v1.7.7
|
||||
hooks:
|
||||
- id: docformatter
|
||||
name: docformatter
|
||||
|
@ -55,7 +55,7 @@ repos:
|
|||
|
||||
# Golang pre-submit hooks
|
||||
- repo: https://github.com/golangci/golangci-lint
|
||||
rev: v1.52.2
|
||||
rev: v2.1.2
|
||||
hooks:
|
||||
- id: golangci-lint
|
||||
name: golangci-lint
|
||||
|
@ -65,3 +65,11 @@ repos:
|
|||
language: golang
|
||||
require_serial: true
|
||||
pass_filenames: false
|
||||
- id: golangci-lint
|
||||
name: golangci-lint fmt
|
||||
description: Formatter for Go.
|
||||
entry: golangci-lint fmt
|
||||
types: [go]
|
||||
language: golang
|
||||
require_serial: true
|
||||
pass_filenames: false
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
|
||||
version: 2
|
||||
sphinx:
|
||||
configuration: docs/conf.py
|
||||
configuration: docs/sdk/conf.py
|
||||
python:
|
||||
install:
|
||||
- requirements: docs/requirements.txt
|
||||
- requirements: docs/sdk/requirements.txt
|
||||
build:
|
||||
os: ubuntu-22.04
|
||||
tools:
|
||||
|
|
|
@ -1,652 +0,0 @@
|
|||
# Copyright 2018 The Kubeflow Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
steps:
|
||||
|
||||
# Marketplace Major.Minor parsing
|
||||
- id: "parseMajorMinorVersion"
|
||||
name: gcr.io/cloud-builders/docker
|
||||
entrypoint: /bin/bash
|
||||
args:
|
||||
- -ceux
|
||||
- |
|
||||
# Parse major minor version and save to a file for reusing in other steps.
|
||||
# e.g. 1.0.0-rc.1 and 1.0.1 are parsed as 1.0
|
||||
echo $TAG_NAME | sed -e "s#\([0-9]\+[.][0-9]\+\)[.].*#\1#" > /workspace/mm.ver
|
||||
|
||||
# Pull and retag images for pipeline components
|
||||
- id: 'retagComponentImages'
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
entrypoint: bash
|
||||
waitFor: ['-']
|
||||
args:
|
||||
- -ceux
|
||||
- |
|
||||
images=(
|
||||
"ml-pipeline-kubeflow-deployer"
|
||||
"ml-pipeline-kubeflow-tf-trainer"
|
||||
"ml-pipeline-kubeflow-tf-trainer-gpu"
|
||||
"ml-pipeline-kubeflow-tfjob"
|
||||
"ml-pipeline-local-confusion-matrix"
|
||||
"ml-pipeline-local-roc"
|
||||
)
|
||||
for image in "${images[@]}"
|
||||
do
|
||||
from_image="gcr.io/$PROJECT_ID/$image:$COMMIT_SHA"
|
||||
target_image="gcr.io/ml-pipeline/$image:$TAG_NAME"
|
||||
docker pull $from_image
|
||||
docker tag $from_image $target_image
|
||||
docker push $target_image
|
||||
done
|
||||
|
||||
# Pull and retag the images for the pipeline system
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['pull', 'gcr.io/$PROJECT_ID/frontend:$COMMIT_SHA']
|
||||
id: 'pullFrontend'
|
||||
- id: 'tagFrontendForMarketplaceMajorMin'
|
||||
waitFor: ['pullFrontend', 'parseMajorMinorVersion']
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
entrypoint: bash
|
||||
args:
|
||||
- -ceux
|
||||
- |
|
||||
docker tag gcr.io/$PROJECT_ID/frontend:$COMMIT_SHA gcr.io/ml-pipeline/frontend:$TAG_NAME
|
||||
docker tag gcr.io/$PROJECT_ID/frontend:$COMMIT_SHA gcr.io/ml-pipeline/frontend:$COMMIT_SHA
|
||||
docker tag gcr.io/$PROJECT_ID/frontend:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines/frontend:$TAG_NAME
|
||||
docker tag gcr.io/$PROJECT_ID/frontend:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines-test/frontend:$TAG_NAME
|
||||
docker tag gcr.io/$PROJECT_ID/frontend:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines/frontend:$(cat /workspace/mm.ver)
|
||||
docker tag gcr.io/$PROJECT_ID/frontend:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines-test/frontend:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/frontend:$TAG_NAME
|
||||
docker push gcr.io/ml-pipeline/frontend:$COMMIT_SHA
|
||||
docker push gcr.io/ml-pipeline/google/pipelines/frontend:$TAG_NAME
|
||||
docker push gcr.io/ml-pipeline/google/pipelines-test/frontend:$TAG_NAME
|
||||
docker push gcr.io/ml-pipeline/google/pipelines/frontend:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/google/pipelines-test/frontend:$(cat /workspace/mm.ver)
|
||||
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['pull', 'gcr.io/$PROJECT_ID/api-server:$COMMIT_SHA']
|
||||
id: 'pullAPIServer'
|
||||
- id: 'tagAPIServerForMarketplaceMajorMinor'
|
||||
waitFor: ['pullAPIServer', 'parseMajorMinorVersion']
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
entrypoint: bash
|
||||
args:
|
||||
- -ceux
|
||||
- |
|
||||
docker tag gcr.io/$PROJECT_ID/api-server:$COMMIT_SHA 'gcr.io/ml-pipeline/api-server:$TAG_NAME'
|
||||
docker tag gcr.io/$PROJECT_ID/api-server:$COMMIT_SHA 'gcr.io/ml-pipeline/api-server:$COMMIT_SHA'
|
||||
docker tag gcr.io/$PROJECT_ID/api-server:$COMMIT_SHA 'gcr.io/ml-pipeline/google/pipelines/apiserver:$TAG_NAME'
|
||||
docker tag gcr.io/$PROJECT_ID/api-server:$COMMIT_SHA 'gcr.io/ml-pipeline/google/pipelines-test/apiserver:$TAG_NAME'
|
||||
docker tag gcr.io/$PROJECT_ID/api-server:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines/apiserver:$(cat /workspace/mm.ver)
|
||||
docker tag gcr.io/$PROJECT_ID/api-server:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines-test/apiserver:$(cat /workspace/mm.ver)
|
||||
docker push 'gcr.io/ml-pipeline/api-server:$TAG_NAME'
|
||||
docker push 'gcr.io/ml-pipeline/api-server:$COMMIT_SHA'
|
||||
docker push 'gcr.io/ml-pipeline/google/pipelines/apiserver:$TAG_NAME'
|
||||
docker push 'gcr.io/ml-pipeline/google/pipelines-test/apiserver:$TAG_NAME'
|
||||
docker push gcr.io/ml-pipeline/google/pipelines/apiserver:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/google/pipelines-test/apiserver:$(cat /workspace/mm.ver)
|
||||
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['pull', 'gcr.io/$PROJECT_ID/scheduledworkflow:$COMMIT_SHA']
|
||||
id: 'pullScheduledworkflow'
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/scheduledworkflow:$COMMIT_SHA', 'gcr.io/ml-pipeline/scheduledworkflow:$TAG_NAME']
|
||||
id: 'tagScheduledworkflowVersionNumber'
|
||||
waitFor: ['pullScheduledworkflow']
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/scheduledworkflow:$COMMIT_SHA', 'gcr.io/ml-pipeline/scheduledworkflow:$COMMIT_SHA']
|
||||
id: 'tagScheduledworkflowCommitSHA'
|
||||
waitFor: ['pullScheduledworkflow']
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/scheduledworkflow:$COMMIT_SHA', 'gcr.io/ml-pipeline/google/pipelines/scheduledworkflow:$TAG_NAME']
|
||||
id: 'tagScheduledworkflowForMarketplace'
|
||||
waitFor: ['pullScheduledworkflow']
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/scheduledworkflow:$COMMIT_SHA', 'gcr.io/ml-pipeline/google/pipelines-test/scheduledworkflow:$TAG_NAME']
|
||||
id: 'tagScheduledworkflowForMarketplaceTest'
|
||||
waitFor: ['pullScheduledworkflow']
|
||||
- id: 'tagScheduledworkflowForMarketplaceMajorMinor'
|
||||
waitFor: ['pullScheduledworkflow', 'parseMajorMinorVersion']
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
entrypoint: bash
|
||||
args:
|
||||
- -ceux
|
||||
- |
|
||||
docker tag gcr.io/$PROJECT_ID/scheduledworkflow:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines/scheduledworkflow:$(cat /workspace/mm.ver)
|
||||
docker tag gcr.io/$PROJECT_ID/scheduledworkflow:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines-test/scheduledworkflow:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/google/pipelines/scheduledworkflow:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/google/pipelines-test/scheduledworkflow:$(cat /workspace/mm.ver)
|
||||
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['pull', 'gcr.io/$PROJECT_ID/viewer-crd-controller:$COMMIT_SHA']
|
||||
id: 'pullViewerCrdController'
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/viewer-crd-controller:$COMMIT_SHA', 'gcr.io/ml-pipeline/viewer-crd-controller:$TAG_NAME']
|
||||
id: 'tagViewerCrdControllerVersionNumber'
|
||||
waitFor: ['pullViewerCrdController']
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/viewer-crd-controller:$COMMIT_SHA', 'gcr.io/ml-pipeline/viewer-crd-controller:$COMMIT_SHA']
|
||||
id: 'tagViewerCrdControllerCommitSHA'
|
||||
waitFor: ['pullViewerCrdController']
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/viewer-crd-controller:$COMMIT_SHA', 'gcr.io/ml-pipeline/google/pipelines/viewercrd:$TAG_NAME']
|
||||
id: 'tagViewerCrdControllerForMarketplace'
|
||||
waitFor: ['pullViewerCrdController']
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/viewer-crd-controller:$COMMIT_SHA', 'gcr.io/ml-pipeline/google/pipelines-test/viewercrd:$TAG_NAME']
|
||||
id: 'tagViewerCrdControllerForMarketplaceTest'
|
||||
waitFor: ['pullViewerCrdController']
|
||||
- id: 'tagViewerCrdControllerForMarketplaceMajorMinor'
|
||||
waitFor: ['pullViewerCrdController', 'parseMajorMinorVersion']
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
entrypoint: bash
|
||||
args:
|
||||
- -ceux
|
||||
- |
|
||||
docker tag gcr.io/$PROJECT_ID/viewer-crd-controller:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines/viewercrd:$(cat /workspace/mm.ver)
|
||||
docker tag gcr.io/$PROJECT_ID/viewer-crd-controller:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines-test/viewercrd:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/google/pipelines/viewercrd:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/google/pipelines-test/viewercrd:$(cat /workspace/mm.ver)
|
||||
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['pull', 'gcr.io/$PROJECT_ID/persistenceagent:$COMMIT_SHA']
|
||||
id: 'pullPersistenceagent'
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/persistenceagent:$COMMIT_SHA', 'gcr.io/ml-pipeline/persistenceagent:$TAG_NAME']
|
||||
id: 'tagPersistenceagentVersionNumber'
|
||||
waitFor: ['pullPersistenceagent']
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/persistenceagent:$COMMIT_SHA', 'gcr.io/ml-pipeline/persistenceagent:$COMMIT_SHA']
|
||||
id: 'tagPersistenceagentCommitSHA'
|
||||
waitFor: ['pullPersistenceagent']
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/persistenceagent:$COMMIT_SHA', 'gcr.io/ml-pipeline/google/pipelines/persistenceagent:$TAG_NAME']
|
||||
id: 'tagPersistenceagentForMarketplace'
|
||||
waitFor: ['pullPersistenceagent']
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/persistenceagent:$COMMIT_SHA', 'gcr.io/ml-pipeline/google/pipelines-test/persistenceagent:$TAG_NAME']
|
||||
id: 'tagPersistenceagentForMarketplaceTest'
|
||||
waitFor: ['pullPersistenceagent']
|
||||
- id: 'tagPersistenceagentForMarketplaceMajorMinor'
|
||||
waitFor: ['pullPersistenceagent', 'parseMajorMinorVersion']
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
entrypoint: bash
|
||||
args:
|
||||
- -ceux
|
||||
- |
|
||||
docker tag gcr.io/$PROJECT_ID/persistenceagent:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines/persistenceagent:$(cat /workspace/mm.ver)
|
||||
docker tag gcr.io/$PROJECT_ID/persistenceagent:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines-test/persistenceagent:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/google/pipelines/persistenceagent:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/google/pipelines-test/persistenceagent:$(cat /workspace/mm.ver)
|
||||
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['pull', 'gcr.io/$PROJECT_ID/inverse-proxy-agent:$COMMIT_SHA']
|
||||
id: 'pullInverseProxyAgent'
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/inverse-proxy-agent:$COMMIT_SHA', 'gcr.io/ml-pipeline/inverse-proxy-agent:$TAG_NAME']
|
||||
id: 'tagInverseProxyAgentVersionNumber'
|
||||
waitFor: ['pullInverseProxyAgent']
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/inverse-proxy-agent:$COMMIT_SHA', 'gcr.io/ml-pipeline/inverse-proxy-agent:$COMMIT_SHA']
|
||||
id: 'tagInverseProxyAgentCommitSHA'
|
||||
waitFor: ['pullInverseProxyAgent']
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/inverse-proxy-agent:$COMMIT_SHA', 'gcr.io/ml-pipeline/google/pipelines/proxyagent:$TAG_NAME']
|
||||
id: 'tagInverseProxyAgentForMarketplace'
|
||||
waitFor: ['pullInverseProxyAgent']
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/inverse-proxy-agent:$COMMIT_SHA', 'gcr.io/ml-pipeline/google/pipelines-test/proxyagent:$TAG_NAME']
|
||||
id: 'tagInverseProxyAgentForMarketplaceTest'
|
||||
waitFor: ['pullInverseProxyAgent']
|
||||
- id: 'tagInverseProxyAgentForMarketplaceMajorMinor'
|
||||
waitFor: ['pullInverseProxyAgent', 'parseMajorMinorVersion']
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
entrypoint: bash
|
||||
args:
|
||||
- -ceux
|
||||
- |
|
||||
docker tag gcr.io/$PROJECT_ID/inverse-proxy-agent:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines/proxyagent:$(cat /workspace/mm.ver)
|
||||
docker tag gcr.io/$PROJECT_ID/inverse-proxy-agent:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines-test/proxyagent:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/google/pipelines/proxyagent:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/google/pipelines-test/proxyagent:$(cat /workspace/mm.ver)
|
||||
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['pull', 'gcr.io/$PROJECT_ID/visualization-server:$COMMIT_SHA']
|
||||
id: 'pullVisualizationServer'
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/visualization-server:$COMMIT_SHA', 'gcr.io/ml-pipeline/visualization-server:$TAG_NAME']
|
||||
id: 'tagVisualizationServerVersionNumber'
|
||||
waitFor: ['pullVisualizationServer']
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/visualization-server:$COMMIT_SHA', 'gcr.io/ml-pipeline/visualization-server:$COMMIT_SHA']
|
||||
id: 'tagVisualizationServerCommitSHA'
|
||||
waitFor: ['pullVisualizationServer']
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/visualization-server:$COMMIT_SHA', 'gcr.io/ml-pipeline/google/pipelines/visualizationserver:$TAG_NAME']
|
||||
id: 'tagVisualizationServerForMarketplace'
|
||||
waitFor: ['pullVisualizationServer']
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/visualization-server:$COMMIT_SHA', 'gcr.io/ml-pipeline/google/pipelines-test/visualizationserver:$TAG_NAME']
|
||||
id: 'tagVisualizationServerForMarketplaceTest'
|
||||
waitFor: ['pullVisualizationServer']
|
||||
- id: 'tagVisualizationServerForMarketplaceMajorMinor'
|
||||
waitFor: ['pullVisualizationServer', 'parseMajorMinorVersion']
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
entrypoint: bash
|
||||
args:
|
||||
- -ceux
|
||||
- |
|
||||
docker tag gcr.io/$PROJECT_ID/visualization-server:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines/visualizationserver:$(cat /workspace/mm.ver)
|
||||
docker tag gcr.io/$PROJECT_ID/visualization-server:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines-test/visualizationserver:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/google/pipelines/visualizationserver:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/google/pipelines-test/visualizationserver:$(cat /workspace/mm.ver)
|
||||
|
||||
# ! Sync to the same MLMD version:
|
||||
# * backend/metadata_writer/requirements.in and requirements.txt
|
||||
# * @kubeflow/frontend/src/mlmd/generated
|
||||
# * .cloudbuild.yaml and .release.cloudbuild.yaml
|
||||
# * manifests/kustomize/base/metadata/base/metadata-grpc-deployment.yaml
|
||||
# * test/tag_for_hosted.sh
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['pull', 'gcr.io/tfx-oss-public/ml_metadata_store_server:1.14.0']
|
||||
id: 'pullMetadataServer'
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/tfx-oss-public/ml_metadata_store_server:1.14.0', 'gcr.io/ml-pipeline/google/pipelines/metadataserver:$TAG_NAME']
|
||||
id: 'tagMetadataServerForMarketplace'
|
||||
waitFor: ['pullMetadataServer']
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/tfx-oss-public/ml_metadata_store_server:1.14.0', 'gcr.io/ml-pipeline/google/pipelines-test/metadataserver:$TAG_NAME']
|
||||
id: 'tagMetadataServerForMarketplaceTest'
|
||||
waitFor: ['pullMetadataServer']
|
||||
- id: 'tagMetadataServerForMarketplaceMajorMinor'
|
||||
waitFor: ['pullMetadataServer', 'parseMajorMinorVersion']
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
entrypoint: bash
|
||||
args:
|
||||
- -ceux
|
||||
- |
|
||||
docker tag gcr.io/tfx-oss-public/ml_metadata_store_server:1.14.0 gcr.io/ml-pipeline/google/pipelines/metadataserver:$(cat /workspace/mm.ver)
|
||||
docker tag gcr.io/tfx-oss-public/ml_metadata_store_server:1.14.0 gcr.io/ml-pipeline/google/pipelines-test/metadataserver:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/google/pipelines/metadataserver:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/google/pipelines-test/metadataserver:$(cat /workspace/mm.ver)
|
||||
|
||||
- id: 'pullMetadataWriter'
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['pull', 'gcr.io/$PROJECT_ID/metadata-writer:$COMMIT_SHA']
|
||||
waitFor: ['-']
|
||||
- id: 'tagMetadataWriterVersionNumber'
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/metadata-writer:$COMMIT_SHA', 'gcr.io/ml-pipeline/metadata-writer:$TAG_NAME']
|
||||
waitFor: ['pullMetadataWriter']
|
||||
- id: 'tagMetadataWriterCommitSHA'
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/metadata-writer:$COMMIT_SHA', 'gcr.io/ml-pipeline/metadata-writer:$COMMIT_SHA']
|
||||
waitFor: ['pullMetadataWriter']
|
||||
- id: 'tagMetadataWriterForMarketplace'
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/metadata-writer:$COMMIT_SHA', 'gcr.io/ml-pipeline/google/pipelines/metadatawriter:$TAG_NAME']
|
||||
waitFor: ['pullMetadataWriter']
|
||||
- id: 'tagMetadataWriterForMarketplaceTest'
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/metadata-writer:$COMMIT_SHA', 'gcr.io/ml-pipeline/google/pipelines-test/metadatawriter:$TAG_NAME']
|
||||
waitFor: ['pullMetadataWriter']
|
||||
- id: 'tagMetadataWriterForMarketplaceMajorMinor'
|
||||
waitFor: ['pullMetadataWriter', 'parseMajorMinorVersion']
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
entrypoint: bash
|
||||
args:
|
||||
- -ceux
|
||||
- |
|
||||
docker tag gcr.io/$PROJECT_ID/metadata-writer:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines/metadatawriter:$(cat /workspace/mm.ver)
|
||||
docker tag gcr.io/$PROJECT_ID/metadata-writer:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines-test/metadatawriter:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/google/pipelines/metadatawriter:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/google/pipelines-test/metadatawriter:$(cat /workspace/mm.ver)
|
||||
|
||||
- id: 'pullCacheServer'
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['pull', 'gcr.io/$PROJECT_ID/cache-server:$COMMIT_SHA']
|
||||
waitFor: ['-']
|
||||
- id: 'tagCacheServerVersionNumber'
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/cache-server:$COMMIT_SHA', 'gcr.io/ml-pipeline/cache-server:$TAG_NAME']
|
||||
waitFor: ['pullCacheServer']
|
||||
- id: 'tagCacheServerCommitSHA'
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/cache-server:$COMMIT_SHA', 'gcr.io/ml-pipeline/cache-server:$COMMIT_SHA']
|
||||
waitFor: ['pullCacheServer']
|
||||
- id: 'tagCacheServerForMarketplace'
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/cache-server:$COMMIT_SHA', 'gcr.io/ml-pipeline/google/pipelines/cacheserver:$TAG_NAME']
|
||||
waitFor: ['pullCacheServer']
|
||||
- id: 'tagCacheServerForMarketplaceTest'
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/cache-server:$COMMIT_SHA', 'gcr.io/ml-pipeline/google/pipelines-test/cacheserver:$TAG_NAME']
|
||||
waitFor: ['pullCacheServer']
|
||||
- id: 'tagCacheServerForMarketplaceMajorMinor'
|
||||
waitFor: ['pullCacheServer', 'parseMajorMinorVersion']
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
entrypoint: bash
|
||||
args:
|
||||
- -ceux
|
||||
- |
|
||||
docker tag gcr.io/$PROJECT_ID/cache-server:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines/cacheserver:$(cat /workspace/mm.ver)
|
||||
docker tag gcr.io/$PROJECT_ID/cache-server:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines-test/cacheserver:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/google/pipelines/cacheserver:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/google/pipelines-test/cacheserver:$(cat /workspace/mm.ver)
|
||||
|
||||
- id: 'pullCacheDeployer'
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['pull', 'gcr.io/$PROJECT_ID/cache-deployer:$COMMIT_SHA']
|
||||
waitFor: ['-']
|
||||
- id: 'tagCacheDeployerVersionNumber'
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/cache-deployer:$COMMIT_SHA', 'gcr.io/ml-pipeline/cache-deployer:$TAG_NAME']
|
||||
waitFor: ['pullCacheDeployer']
|
||||
- id: 'tagCacheDeployerCommitSHA'
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/cache-deployer:$COMMIT_SHA', 'gcr.io/ml-pipeline/cache-deployer:$COMMIT_SHA']
|
||||
waitFor: ['pullCacheDeployer']
|
||||
- id: 'tagCacheDeployerForMarketplace'
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/cache-deployer:$COMMIT_SHA', 'gcr.io/ml-pipeline/google/pipelines/cachedeployer:$TAG_NAME']
|
||||
waitFor: ['pullCacheDeployer']
|
||||
- id: 'tagCacheDeployerForMarketplaceTest'
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/cache-deployer:$COMMIT_SHA', 'gcr.io/ml-pipeline/google/pipelines-test/cachedeployer:$TAG_NAME']
|
||||
waitFor: ['pullCacheDeployer']
|
||||
- id: 'tagCacheDeployerForMarketplaceMajorMinor'
|
||||
waitFor: ['pullCacheDeployer', 'parseMajorMinorVersion']
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
entrypoint: bash
|
||||
args:
|
||||
- -ceux
|
||||
- |
|
||||
docker tag gcr.io/$PROJECT_ID/cache-deployer:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines/cachedeployer:$(cat /workspace/mm.ver)
|
||||
docker tag gcr.io/$PROJECT_ID/cache-deployer:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines-test/cachedeployer:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/google/pipelines/cachedeployer:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/google/pipelines-test/cachedeployer:$(cat /workspace/mm.ver)
|
||||
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['pull', 'gcr.io/$PROJECT_ID/metadata-envoy:$COMMIT_SHA']
|
||||
id: 'pullMetadataEnvoy'
|
||||
- id: 'tagMetadataEnvoyVersionNumber'
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/metadata-envoy:$COMMIT_SHA', 'gcr.io/ml-pipeline/metadata-envoy:$TAG_NAME']
|
||||
waitFor: ['pullMetadataEnvoy']
|
||||
- id: 'tagMetadataEnvoyCommitSHA'
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/metadata-envoy:$COMMIT_SHA', 'gcr.io/ml-pipeline/metadata-envoy:$COMMIT_SHA']
|
||||
waitFor: ['pullMetadataEnvoy']
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/metadata-envoy:$COMMIT_SHA', 'gcr.io/ml-pipeline/google/pipelines/metadataenvoy:$TAG_NAME']
|
||||
id: 'tagMetadataEnvoyForMarketplace'
|
||||
waitFor: ['pullMetadataEnvoy']
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/metadata-envoy:$COMMIT_SHA', 'gcr.io/ml-pipeline/google/pipelines-test/metadataenvoy:$TAG_NAME']
|
||||
id: 'tagMetadataEnvoyForMarketplaceTest'
|
||||
waitFor: ['pullMetadataEnvoy']
|
||||
- id: 'tagMetadataEnvoyForMarketplaceMajorMinor'
|
||||
waitFor: ['pullMetadataEnvoy', 'parseMajorMinorVersion']
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
entrypoint: bash
|
||||
args:
|
||||
- -ceux
|
||||
- |
|
||||
docker tag gcr.io/$PROJECT_ID/metadata-envoy:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines/metadataenvoy:$(cat /workspace/mm.ver)
|
||||
docker tag gcr.io/$PROJECT_ID/metadata-envoy:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines-test/metadataenvoy:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/google/pipelines/metadataenvoy:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/google/pipelines-test/metadataenvoy:$(cat /workspace/mm.ver)
|
||||
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['pull', 'gcr.io/ml-pipeline/minio:RELEASE.2019-08-14T20-37-41Z-license-compliance']
|
||||
id: 'pullMinio'
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/ml-pipeline/minio:RELEASE.2019-08-14T20-37-41Z-license-compliance', 'gcr.io/ml-pipeline/google/pipelines/minio:$TAG_NAME']
|
||||
id: 'tagMinioForMarketplace'
|
||||
waitFor: ['pullMinio']
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/ml-pipeline/minio:RELEASE.2019-08-14T20-37-41Z-license-compliance', 'gcr.io/ml-pipeline/google/pipelines-test/minio:$TAG_NAME']
|
||||
id: 'tagMinioForMarketplaceTest'
|
||||
waitFor: ['pullMinio']
|
||||
- id: 'tagMinioForMarketplaceMajorMinor'
|
||||
waitFor: ['pullMinio', 'parseMajorMinorVersion']
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
entrypoint: bash
|
||||
args:
|
||||
- -ceux
|
||||
- |
|
||||
docker tag gcr.io/ml-pipeline/minio:RELEASE.2019-08-14T20-37-41Z-license-compliance gcr.io/ml-pipeline/google/pipelines/minio:$(cat /workspace/mm.ver)
|
||||
docker tag gcr.io/ml-pipeline/minio:RELEASE.2019-08-14T20-37-41Z-license-compliance gcr.io/ml-pipeline/google/pipelines-test/minio:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/google/pipelines/minio:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/google/pipelines-test/minio:$(cat /workspace/mm.ver)
|
||||
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['pull', 'gcr.io/ml-pipeline/mysql:8.0.26']
|
||||
id: 'pullMysql'
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/ml-pipeline/mysql:8.0.26', 'gcr.io/ml-pipeline/google/pipelines/mysql:$TAG_NAME']
|
||||
id: 'tagMySqlForMarketplace'
|
||||
waitFor: ['pullMysql']
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/ml-pipeline/mysql:8.0.26', 'gcr.io/ml-pipeline/google/pipelines-test/mysql:$TAG_NAME']
|
||||
id: 'tagMySqlForMarketplaceTest'
|
||||
waitFor: ['pullMysql']
|
||||
- id: 'tagMySqlForMarketplaceMajorMinor'
|
||||
waitFor: ['pullMysql', 'parseMajorMinorVersion']
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
entrypoint: bash
|
||||
args:
|
||||
- -ceux
|
||||
- |
|
||||
docker tag gcr.io/ml-pipeline/mysql:8.0.26 gcr.io/ml-pipeline/google/pipelines/mysql:$(cat /workspace/mm.ver)
|
||||
docker tag gcr.io/ml-pipeline/mysql:8.0.26 gcr.io/ml-pipeline/google/pipelines-test/mysql:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/google/pipelines/mysql:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/google/pipelines-test/mysql:$(cat /workspace/mm.ver)
|
||||
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['pull', 'gcr.io/cloudsql-docker/gce-proxy:1.25.0']
|
||||
id: 'pullCloudsqlProxy'
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/cloudsql-docker/gce-proxy:1.25.0', 'gcr.io/ml-pipeline/google/pipelines/cloudsqlproxy:$TAG_NAME']
|
||||
id: 'tagCloudSqlProxyForMarketplace'
|
||||
waitFor: ['pullCloudsqlProxy']
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/cloudsql-docker/gce-proxy:1.25.0', 'gcr.io/ml-pipeline/google/pipelines-test/cloudsqlproxy:$TAG_NAME']
|
||||
id: 'tagCloudSqlProxyForMarketplaceTest'
|
||||
waitFor: ['pullCloudsqlProxy']
|
||||
- id: 'tagCloudSqlProxyForMarketplaceMajorMinor'
|
||||
waitFor: ['pullCloudsqlProxy', 'parseMajorMinorVersion']
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
entrypoint: bash
|
||||
args:
|
||||
- -ceux
|
||||
- |
|
||||
docker tag gcr.io/cloudsql-docker/gce-proxy:1.25.0 gcr.io/ml-pipeline/google/pipelines/cloudsqlproxy:$(cat /workspace/mm.ver)
|
||||
docker tag gcr.io/cloudsql-docker/gce-proxy:1.25.0 gcr.io/ml-pipeline/google/pipelines-test/cloudsqlproxy:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/google/pipelines/cloudsqlproxy:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/google/pipelines-test/cloudsqlproxy:$(cat /workspace/mm.ver)
|
||||
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['pull', 'gcr.io/ml-pipeline/argoexec:v3.4.17-license-compliance']
|
||||
id: 'pullArgoExecutor'
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/ml-pipeline/argoexec:v3.4.17-license-compliance', 'gcr.io/ml-pipeline/google/pipelines/argoexecutor:$TAG_NAME']
|
||||
id: 'tagArgoExecutorForMarketplace'
|
||||
waitFor: ['pullArgoExecutor']
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/ml-pipeline/argoexec:v3.4.17-license-compliance', 'gcr.io/ml-pipeline/google/pipelines-test/argoexecutor:$TAG_NAME']
|
||||
id: 'tagArgoExecutorForMarketplaceTest'
|
||||
waitFor: ['pullArgoExecutor']
|
||||
- id: 'tagArgoExecutorForMarketplaceMajorMinor'
|
||||
waitFor: ['pullArgoExecutor', 'parseMajorMinorVersion']
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
entrypoint: bash
|
||||
args:
|
||||
- -ceux
|
||||
- |
|
||||
docker tag gcr.io/ml-pipeline/argoexec:v3.4.17-license-compliance gcr.io/ml-pipeline/google/pipelines/argoexecutor:$(cat /workspace/mm.ver)
|
||||
docker tag gcr.io/ml-pipeline/argoexec:v3.4.17-license-compliance gcr.io/ml-pipeline/google/pipelines-test/argoexecutor:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/google/pipelines/argoexecutor:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/google/pipelines-test/argoexecutor:$(cat /workspace/mm.ver)
|
||||
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['pull', 'gcr.io/ml-pipeline/workflow-controller:v3.4.17-license-compliance']
|
||||
id: 'pullArgoWorkflowController'
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/ml-pipeline/workflow-controller:v3.4.17-license-compliance', 'gcr.io/ml-pipeline/google/pipelines/argoworkflowcontroller:$TAG_NAME']
|
||||
id: 'tagArgoWorkflowControllerForMarketplace'
|
||||
waitFor: ['pullArgoWorkflowController']
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/ml-pipeline/workflow-controller:v3.4.17-license-compliance', 'gcr.io/ml-pipeline/google/pipelines-test/argoworkflowcontroller:$TAG_NAME']
|
||||
id: 'tagArgoWorkflowControllerForMarketplaceTest'
|
||||
waitFor: ['pullArgoWorkflowController']
|
||||
- id: 'tagArgoWorkflowControllerForMarketplaceMajorMinor'
|
||||
waitFor: ['pullArgoWorkflowController', 'parseMajorMinorVersion']
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
entrypoint: bash
|
||||
args:
|
||||
- -ceux
|
||||
- |
|
||||
docker tag gcr.io/ml-pipeline/workflow-controller:v3.4.17-license-compliance gcr.io/ml-pipeline/google/pipelines/argoworkflowcontroller:$(cat /workspace/mm.ver)
|
||||
docker tag gcr.io/ml-pipeline/workflow-controller:v3.4.17-license-compliance gcr.io/ml-pipeline/google/pipelines-test/argoworkflowcontroller:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/google/pipelines/argoworkflowcontroller:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/google/pipelines-test/argoworkflowcontroller:$(cat /workspace/mm.ver)
|
||||
|
||||
# Marketplace specific deployer and specific primary image
|
||||
- id: 'pullMarketplaceDeployer'
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['pull', 'gcr.io/$PROJECT_ID/deployer:$COMMIT_SHA']
|
||||
waitFor: ['-']
|
||||
- id: 'tagMarketplaceDeployerVersionNumber'
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/deployer:$COMMIT_SHA', 'gcr.io/ml-pipeline/google/pipelines/deployer:$TAG_NAME']
|
||||
waitFor: ['pullMarketplaceDeployer']
|
||||
- id: 'tagMarketplaceDeployerVersionNumberTest'
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/deployer:$COMMIT_SHA', 'gcr.io/ml-pipeline/google/pipelines-test/deployer:$TAG_NAME']
|
||||
waitFor: ['pullMarketplaceDeployer']
|
||||
- id: 'tagMarketplaceDeployerVersionNumberMajorMinor'
|
||||
waitFor: ['pullMarketplaceDeployer', 'parseMajorMinorVersion']
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
entrypoint: bash
|
||||
args:
|
||||
- -ceux
|
||||
- |
|
||||
docker tag gcr.io/$PROJECT_ID/deployer:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines/deployer:$(cat /workspace/mm.ver)
|
||||
docker tag gcr.io/$PROJECT_ID/deployer:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines-test/deployer:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/google/pipelines/deployer:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/google/pipelines-test/deployer:$(cat /workspace/mm.ver)
|
||||
|
||||
- id: 'tagMarketplacePrimaryVersionNumber'
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/deployer:$COMMIT_SHA', 'gcr.io/ml-pipeline/google/pipelines:$TAG_NAME']
|
||||
waitFor: ['pullMarketplaceDeployer']
|
||||
- id: 'tagMarketplacePrimaryVersionNumberTest'
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/deployer:$COMMIT_SHA', 'gcr.io/ml-pipeline/google/pipelines-test:$TAG_NAME']
|
||||
waitFor: ['pullMarketplaceDeployer']
|
||||
- id: 'tagMarketplacePrimaryVersionNumberMajorMinor'
|
||||
waitFor: ['pullMarketplaceDeployer', 'parseMajorMinorVersion']
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
entrypoint: bash
|
||||
args:
|
||||
- -ceux
|
||||
- |
|
||||
docker tag gcr.io/$PROJECT_ID/deployer:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines:$(cat /workspace/mm.ver)
|
||||
docker tag gcr.io/$PROJECT_ID/deployer:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines-test:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/google/pipelines:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/google/pipelines-test:$(cat /workspace/mm.ver)
|
||||
|
||||
# # Copy the Python SDK
|
||||
# - name: 'gcr.io/cloud-builders/gsutil'
|
||||
# args: ['cp', 'gs://$PROJECT_ID/builds/$COMMIT_SHA/kfp.tar.gz', '/workspace/']
|
||||
# id: 'copyPythonSDKLocal'
|
||||
# - name: 'gcr.io/cloud-builders/gsutil'
|
||||
# args: ['cp', '/workspace/kfp.tar.gz', 'gs://ml-pipeline/release/$TAG_NAME/kfp.tar.gz']
|
||||
# id: 'copyPythonSDK'
|
||||
# waitFor: ['copyPythonSDKLocal']
|
||||
# - name: 'gcr.io/cloud-builders/gsutil'
|
||||
# args: ['cp', '/workspace/kfp.tar.gz', 'gs://ml-pipeline/release/latest/kfp.tar.gz']
|
||||
# id: 'copyPythonSDKToLatest'
|
||||
# waitFor: ['copyPythonSDKLocal']
|
||||
|
||||
# # Copy the Python Component SDK
|
||||
# - name: 'gcr.io/cloud-builders/gsutil'
|
||||
# args: ['cp', 'gs://$PROJECT_ID/builds/$COMMIT_SHA/kfp-component.tar.gz', '/workspace/']
|
||||
# id: 'copyPythonComponentSDKLocal'
|
||||
# - name: 'gcr.io/cloud-builders/gsutil'
|
||||
# args: ['cp', '/workspace/kfp-component.tar.gz', 'gs://ml-pipeline/release/$TAG_NAME/kfp-component.tar.gz']
|
||||
# id: 'copyPythonComponentSDK'
|
||||
# waitFor: ['copyPythonComponentSDKLocal']
|
||||
# - name: 'gcr.io/cloud-builders/gsutil'
|
||||
# args: ['cp', '/workspace/kfp-component.tar.gz', 'gs://ml-pipeline/release/latest/kfp-component.tar.gz']
|
||||
# id: 'copyPythonComponentSDKToLatest'
|
||||
# waitFor: ['copyPythonComponentSDKLocal']
|
||||
|
||||
images:
|
||||
- 'gcr.io/ml-pipeline/scheduledworkflow:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/scheduledworkflow:$COMMIT_SHA'
|
||||
- 'gcr.io/ml-pipeline/persistenceagent:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/persistenceagent:$COMMIT_SHA'
|
||||
- 'gcr.io/ml-pipeline/viewer-crd-controller:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/viewer-crd-controller:$COMMIT_SHA'
|
||||
- 'gcr.io/ml-pipeline/inverse-proxy-agent:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/inverse-proxy-agent:$COMMIT_SHA'
|
||||
- 'gcr.io/ml-pipeline/visualization-server:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/visualization-server:$COMMIT_SHA'
|
||||
- 'gcr.io/ml-pipeline/metadata-envoy:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/metadata-envoy:$COMMIT_SHA'
|
||||
- 'gcr.io/ml-pipeline/metadata-writer:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/metadata-writer:$COMMIT_SHA'
|
||||
- 'gcr.io/ml-pipeline/cache-server:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/cache-server:$COMMIT_SHA'
|
||||
- 'gcr.io/ml-pipeline/cache-deployer:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/cache-deployer:$COMMIT_SHA'
|
||||
- 'gcr.io/ml-pipeline/google/pipelines/scheduledworkflow:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/google/pipelines/viewercrd:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/google/pipelines/persistenceagent:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/google/pipelines/proxyagent:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/google/pipelines/visualizationserver:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/google/pipelines/metadataserver:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/google/pipelines/minio:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/google/pipelines/mysql:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/google/pipelines/cloudsqlproxy:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/google/pipelines/argoexecutor:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/google/pipelines/argoworkflowcontroller:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/google/pipelines/metadataenvoy:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/google/pipelines/metadatawriter:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/google/pipelines/deployer:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/google/pipelines/cacheserver:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/google/pipelines/cachedeployer:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/google/pipelines:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/google/pipelines-test/scheduledworkflow:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/google/pipelines-test/viewercrd:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/google/pipelines-test/persistenceagent:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/google/pipelines-test/proxyagent:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/google/pipelines-test/visualizationserver:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/google/pipelines-test/metadataserver:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/google/pipelines-test/minio:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/google/pipelines-test/mysql:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/google/pipelines-test/cloudsqlproxy:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/google/pipelines-test/argoexecutor:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/google/pipelines-test/argoworkflowcontroller:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/google/pipelines-test/metadataenvoy:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/google/pipelines-test/metadatawriter:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/google/pipelines-test/cacheserver:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/google/pipelines-test/cachedeployer:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/google/pipelines-test/deployer:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/google/pipelines-test:$TAG_NAME'
|
||||
timeout: '2000s'
|
||||
tags:
|
||||
- release-on-tag
|
|
@ -0,0 +1,12 @@
|
|||
# Adopters of Kubeflow Pipelines
|
||||
|
||||
Below are the adopters of project Kubeflow Pipelines. If you are using Kubeflow Pipelines
|
||||
please add yourself into the following list by a pull request.
|
||||
Please keep the list in alphabetical order.
|
||||
|
||||
| Organization | Contact | Description of Use |
|
||||
|------------------------------------------------------------------------------|---------------------------------------------------------------------------------------------------|--------------------------------------------------------|
|
||||
| [Capital One](https://www.capitalone.com/) | [@droctothorpe](https://github.com/droctothorpe) | ML/AI Workflow orchestration |
|
||||
| [IBM Research Foundation Model Data Engineering Team](https://www.research.ibm.com/) | [@yuanchi2807](https://github.com/yuanchi2807), [@roytman](https://github.com/roytman) | Foundation Model Data Engineering |
|
||||
| [Red Hat](https://www.redhat.com/) | [@franciscojavierarceo](https://github.com/franciscojavierarceo) | ML/AI & Data orchestration |
|
||||
|
299
CHANGELOG.md
299
CHANGELOG.md
|
@ -1,5 +1,304 @@
|
|||
# Changelog
|
||||
|
||||
## [2.5.0](https://github.com/kubeflow/pipelines/compare/2.4.1...2.5.0) (2025-04-29)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* **backend:** Add a mutating webhook for the PipelineVersion kind (#11782) ([c9be64d](https://github.com/kubeflow/pipelines/commit/c9be64dca362a33dcfad186fe579066a646a6df1))
|
||||
* **backend:** Add the ability to set a proxy for accessing external resources (#11771) ([6e3548f](https://github.com/kubeflow/pipelines/commit/6e3548f33e226ba374e4d43a175ae8ac9018e268))
|
||||
* **backend:** Add types for KFP Kubernete Native API (#11672) ([0d9a7b0](https://github.com/kubeflow/pipelines/commit/0d9a7b00e926130b07058ea71148fbb9cab69d2b))
|
||||
* **backend:** Create a validating webhook for the PipelineVersion kind (#11774) ([2efcde5](https://github.com/kubeflow/pipelines/commit/2efcde5efd3952b91ea79a5ee6dbf064282f719a))
|
||||
* **backend:** implement logs as artifacts (#11762) ([cd3e747](https://github.com/kubeflow/pipelines/commit/cd3e747b5de3d7e1e338e309cc57311dd4a91258))
|
||||
* **backend:** implement logs as artifacts + CI updates (#11809) ([464ca39](https://github.com/kubeflow/pipelines/commit/464ca3974fbbc46e022f863e49c4fbaabd1a8265))
|
||||
* **backend/sdk:** Add input parameterization for various k8s resources (#11770) ([fd1b48b](https://github.com/kubeflow/pipelines/commit/fd1b48b4712038afe8a78e37843672d4773dc080))
|
||||
* **proto:** Add TTL fields to KFP IR yaml proto (#11758) ([c5aba41](https://github.com/kubeflow/pipelines/commit/c5aba41bcaf3c214d984db4571c1ecae4a0d551d))
|
||||
* **sdk:** add upload pipeline and upload pipeline version from pipeline function (#11804) ([1ad4f60](https://github.com/kubeflow/pipelines/commit/1ad4f608a0b9dea2362cf89f9cf7abdebf20e080))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **backend:** Fix run submissions with OwnerReferencesPermissionEnforcement on (#11821) ([69ba50b](https://github.com/kubeflow/pipelines/commit/69ba50b3fb03bd8441f833950a6c77835a2d47a1))
|
||||
* **backend:** fixed Dockerfile (#11841) ([d38418e](https://github.com/kubeflow/pipelines/commit/d38418efeadec3ea3bea55b3373bcc311dacc135))
|
||||
* **backend:** Include missing go.mod for cacheserver/viewercontroller images (#11776) ([715ed40](https://github.com/kubeflow/pipelines/commit/715ed40b92f9bca521f94e0df5201425d9d30866))
|
||||
* **components:** Set tensorboard_experiment_name to random uuid when uploading plots in Starry Net pipeline ([cc56d04](https://github.com/kubeflow/pipelines/commit/cc56d04c46d01666a8e091b124473c2654e1b6d3))
|
||||
* **deployment:** Update kustomize manifests to use new label and patch syntax (#11733) ([230c1b8](https://github.com/kubeflow/pipelines/commit/230c1b8f1332ffab575b2e69b65d9a6958167195))
|
||||
* **deps:** widen urllib3 upper bound to <3.0.0 (#11819) ([866ff35](https://github.com/kubeflow/pipelines/commit/866ff3556a4454ccb52f1594bbca4167a04c0d3e))
|
||||
* **docs:** Remove Podman as backend README pre-req (#11824) ([88cff55](https://github.com/kubeflow/pipelines/commit/88cff559142e5a985cf31620f07b71244645cb4a))
|
||||
* **docs:** Use the latest driver and launcher images in the dev environment (#11820) ([92e4921](https://github.com/kubeflow/pipelines/commit/92e4921c4cce8155093bf7e332abfbf03bd6eaef))
|
||||
* **local:** warn about oci:// not supported too (#11794) ([564522c](https://github.com/kubeflow/pipelines/commit/564522c42de9136dec67f1bf29590bdd64bf2333))
|
||||
* **metadata-writer:** use mlmd_store.get_context_types() instead of workaround (#11753) ([35041ef](https://github.com/kubeflow/pipelines/commit/35041ef2bd4d9b3261f1250f5803786ed9e453fe))
|
||||
* **sdk:** Add SDK support for setting resource limits on older KFP versions (#11839) ([f9d487c](https://github.com/kubeflow/pipelines/commit/f9d487cb605727f357f58783db298d96898b24d1))
|
||||
* **sdk:** allow google-cloud-storage < 4 (#11735) ([bd4fc5c](https://github.com/kubeflow/pipelines/commit/bd4fc5c6677402d5f2d9ac45481ac86f25da4640))
|
||||
* **sdk:** avoid conflicting component names in DAG when reusing pipelines (#11071) ([d1b15ef](https://github.com/kubeflow/pipelines/commit/d1b15ef4da33cbeafa491564318c7e2a68dc431f))
|
||||
* **tests:** free up space in some test runners (#11818) ([478ca08](https://github.com/kubeflow/pipelines/commit/478ca089012e64edd371feff4ece9d0d156d4710))
|
||||
* minio fsgroup for popular clusters (#11734) ([8d0ae53](https://github.com/kubeflow/pipelines/commit/8d0ae5381e8366905c90009c56fd0e4807e94f0f))
|
||||
|
||||
|
||||
### Other Pull Requests
|
||||
|
||||
* Fix Integration tests ([0359551](https://github.com/kubeflow/pipelines/commit/0359551b7601165ba8bf7cc24fdc1858224c0d2d))
|
||||
* add list or dict support for add toleration json ([fb18235](https://github.com/kubeflow/pipelines/commit/fb182355f08e41eff1ac530be1afac0bad69e15d))
|
||||
* add backend support for toleration lists. ([90909fc](https://github.com/kubeflow/pipelines/commit/90909fc0ef58b71362017a3e48c924b38c389183))
|
||||
* switch selenium image to ghcr ([7529bbe](https://github.com/kubeflow/pipelines/commit/7529bbeba7f245366ca1cbc280169e20a7100a6a))
|
||||
* add missing release note updates to sdk main branch (#11842) ([611d582](https://github.com/kubeflow/pipelines/commit/611d5820049dc51ddf261d7d1368c4858dad5159))
|
||||
* fix component retry test (#11836) ([598826e](https://github.com/kubeflow/pipelines/commit/598826e1ccfecb5f34716876053a22cdc6605ae4))
|
||||
* **chore:** add cleanup resources to sdk execution tests (#11823) ([eee4986](https://github.com/kubeflow/pipelines/commit/eee4986f180cd4e7469a65a3c5f4ffbf3ec0b46c))
|
||||
* update driver & launcher image handling (#11533) ([38a4653](https://github.com/kubeflow/pipelines/commit/38a46533fcd47aa31e825109e2bf6940d127910a))
|
||||
* **chore:** add image builds for default branch (#11800) ([eacb586](https://github.com/kubeflow/pipelines/commit/eacb586f6225bb277642f4977552f799850e06a1))
|
||||
* fix setup env for kfp k8s lib tests (#11798) ([f10c7bf](https://github.com/kubeflow/pipelines/commit/f10c7bfbbcf01eb25f2fa8a437da62bbf07dc1f5))
|
||||
* Handle optional pipeline inputs in the driver (#11788) ([bb7a108](https://github.com/kubeflow/pipelines/commit/bb7a1082c4c5a3fb308aac2bf37bab476c3c4df6))
|
||||
* Fix recurring run output when always using latest (#11790) ([048f283](https://github.com/kubeflow/pipelines/commit/048f28332b6a0b6684632e76dcb284de2f81d829))
|
||||
* increase stale action timers (#11792) ([ade8a2d](https://github.com/kubeflow/pipelines/commit/ade8a2d072efa9897a5a0173316836236d629238))
|
||||
* Fix PSS restricted warnings (#11751) ([01999b8](https://github.com/kubeflow/pipelines/commit/01999b8fea23db52da0f633e475c457fc06ca531))
|
||||
* fix(CI) Github action is vulnerable to code execution via `comment body` (#11772) ([95c3f2c](https://github.com/kubeflow/pipelines/commit/95c3f2c04d8f19b8b656ddbda046ed9f2c81130a))
|
||||
* Fix Istio sidecar injection by moving from annotations to labels (#11750) ([df4e9c2](https://github.com/kubeflow/pipelines/commit/df4e9c2bf5b645f4a3fa831b073846eae5eaceb7))
|
||||
|
||||
## [2.5.0](https://github.com/kubeflow/pipelines/compare/2.3.0...2.5.0) (2025-04-28)
|
||||
|
||||
|
||||
### ⚠ BREAKING CHANGES
|
||||
|
||||
* **sdk:** stop auto-populating metrics as dag output ([\#11362](https://github.com/kubeflow/pipelines/issues/11362))
|
||||
* **components:** Deprecate preview.custom_job module
|
||||
* **sdk:** Pin kfp-pipeline-spec==0.4.0, kfp-server-api>=2.1.0,<2.4.0 ([\#11192](https://github.com/kubeflow/pipelines/issues/11192))
|
||||
|
||||
### Features
|
||||
|
||||
* **api:** add PipelineConfig to api to re-implement pipeline-level config ([\#11333](https://github.com/kubeflow/pipelines/issues/11333)) ([c2f5649](https://github.com/kubeflow/pipelines/commit/c2f56495b9b1e9eda1b44b6106e12d5290a89ed7))
|
||||
* **api:** Add SemaphoreKey and MutexName fields to proto ([\#11384](https://github.com/kubeflow/pipelines/issues/11384)) ([915cc55](https://github.com/kubeflow/pipelines/commit/915cc552f56359454b91870df0e5eea1ecda2218))
|
||||
* **backend:** Add a mutating webhook for the PipelineVersion kind ([\#11782](https://github.com/kubeflow/pipelines/issues/11782)) ([c9be64d](https://github.com/kubeflow/pipelines/commit/c9be64dca362a33dcfad186fe579066a646a6df1))
|
||||
* **backend:** add configurable S3 path style support ([\#11246](https://github.com/kubeflow/pipelines/issues/11246)) ([85fdd73](https://github.com/kubeflow/pipelines/commit/85fdd73ae0bb1c2ce01da6311807b37cfc589710))
|
||||
* **backend:** Add Parallelism Limit to ParallelFor tasks. Fixes [\#8718](https://github.com/kubeflow/pipelines/issues/8718) ([\#10798](https://github.com/kubeflow/pipelines/issues/10798)) ([b7d8c97](https://github.com/kubeflow/pipelines/commit/b7d8c97d65af575b71efe6755eb67b0bb9126f01))
|
||||
* **backend:** Add support for importing models stored in the Modelcar format (sidecar) ([\#11606](https://github.com/kubeflow/pipelines/issues/11606)) ([cc1c435](https://github.com/kubeflow/pipelines/commit/cc1c435f1e06aad3e9b83e57768512a63460b15b))
|
||||
* **backend:** Add support for job and task placeholders in the KFP backend ([\#11599](https://github.com/kubeflow/pipelines/issues/11599)) ([6a13f4b](https://github.com/kubeflow/pipelines/commit/6a13f4bad07e5a22006b73b21515df9d597222f0))
|
||||
* **backend:** add support for uploading new sample pipeline vers ([\#11553](https://github.com/kubeflow/pipelines/issues/11553)) ([d2ddb2e](https://github.com/kubeflow/pipelines/commit/d2ddb2ed1c2afa64fd6014c95190416ff5cdd621))
|
||||
* **backend:** Add the ability to set a proxy for accessing external resources ([\#11771](https://github.com/kubeflow/pipelines/issues/11771)) ([6e3548f](https://github.com/kubeflow/pipelines/commit/6e3548f33e226ba374e4d43a175ae8ac9018e268))
|
||||
* **backend:** Add types for KFP Kubernete Native API ([\#11672](https://github.com/kubeflow/pipelines/issues/11672)) ([0d9a7b0](https://github.com/kubeflow/pipelines/commit/0d9a7b00e926130b07058ea71148fbb9cab69d2b))
|
||||
* **backend:** Allow recurring runs to always use the latest pipeline version ([\#11560](https://github.com/kubeflow/pipelines/issues/11560)) ([9c5b72c](https://github.com/kubeflow/pipelines/commit/9c5b72c2d0b298fc1b6c984e135b74e7a736a7b9))
|
||||
* **backend:** configurable log level for driver / launcher images ([\#11278](https://github.com/kubeflow/pipelines/issues/11278)) ([d2c0376](https://github.com/kubeflow/pipelines/commit/d2c0376b0aff70304fe049c415dc97a22d883966))
|
||||
* **backend:** Create a validating webhook for the PipelineVersion kind ([\#11774](https://github.com/kubeflow/pipelines/issues/11774)) ([2efcde5](https://github.com/kubeflow/pipelines/commit/2efcde5efd3952b91ea79a5ee6dbf064282f719a))
|
||||
* **backend:** implement logs as artifacts ([\#11762](https://github.com/kubeflow/pipelines/issues/11762)) ([cd3e747](https://github.com/kubeflow/pipelines/commit/cd3e747b5de3d7e1e338e309cc57311dd4a91258))
|
||||
* **backend:** implement logs as artifacts + CI updates ([\#11809](https://github.com/kubeflow/pipelines/issues/11809)) ([464ca39](https://github.com/kubeflow/pipelines/commit/464ca3974fbbc46e022f863e49c4fbaabd1a8265))
|
||||
* **backend:** implement subdag output resolution ([\#11196](https://github.com/kubeflow/pipelines/issues/11196)) ([c5b787a](https://github.com/kubeflow/pipelines/commit/c5b787aacc4fddeeb1ebc526a83159540cd7b311))
|
||||
* **backend:** Remove PipelineSpec Template storage from ObjStore responsibilies. Fixes [\#10509](https://github.com/kubeflow/pipelines/issues/10509) ([\#10790](https://github.com/kubeflow/pipelines/issues/10790)) ([374b18b](https://github.com/kubeflow/pipelines/commit/374b18bc3366a51f4b92821cdb3a942bc12343a0))
|
||||
* **backend/sdk:** Add input parameterization for various k8s resources ([\#11770](https://github.com/kubeflow/pipelines/issues/11770)) ([fd1b48b](https://github.com/kubeflow/pipelines/commit/fd1b48b4712038afe8a78e37843672d4773dc080))
|
||||
* **cli:** expose existing_token client property ([\#11400](https://github.com/kubeflow/pipelines/issues/11400)) ([35793be](https://github.com/kubeflow/pipelines/commit/35793be4168584b1084169b723bfb216aa4a03b6))
|
||||
* **component:** Created Snowflake data unload component ([\#11349](https://github.com/kubeflow/pipelines/issues/11349)) ([22e7780](https://github.com/kubeflow/pipelines/commit/22e77805ed41a72837f7cd15a9d679f42169b253))
|
||||
* **component:** execute in a virtual env ([\#11326](https://github.com/kubeflow/pipelines/issues/11326)) ([df28e89](https://github.com/kubeflow/pipelines/commit/df28e891c4374f7eac98cc6a4892b6e6c35a43f2))
|
||||
* **components:** Add reservation_affinity support in v1.create_custom_training_job_from_component ([c84241b](https://github.com/kubeflow/pipelines/commit/c84241b7362c0351109bc0ddbc2f697479ff8675))
|
||||
* **components:** add strategy to v1 GCPC custom job components/utils ([1cdd648](https://github.com/kubeflow/pipelines/commit/1cdd648239ff850bf5baae48e4e7bd1b24330dd5))
|
||||
* **components:** Deprecate preview.custom_job module ([abbd915](https://github.com/kubeflow/pipelines/commit/abbd915a2ac32b22151efef662b937601602ba9d))
|
||||
* **components:** Fix typos: 'statgey' -> 'strategy'in v1 GCPC custom job components/utils ([dcaf5a4](https://github.com/kubeflow/pipelines/commit/dcaf5a48e5feae6e61de6c033bee5f8f6675a630))
|
||||
* **components:** Introduce max_wait_duration to custom job to v1 GCPC custom job components/utils ([6cb7cf7](https://github.com/kubeflow/pipelines/commit/6cb7cf71fa81cc27e87a002f3d7685d9fc828d88))
|
||||
* **docs:** Add architecture diagram ([\#11490](https://github.com/kubeflow/pipelines/issues/11490)) ([3e423d8](https://github.com/kubeflow/pipelines/commit/3e423d8d1c0f8f7700a4b2138d8e9bd20a02a9f2))
|
||||
* **docs:** Replace ADRs with KEPs for documentation ([\#11535](https://github.com/kubeflow/pipelines/issues/11535)) ([7497b65](https://github.com/kubeflow/pipelines/commit/7497b65067aa1e596605c03e8dc4c07c963f907b))
|
||||
* **frontend/backend:** Allow the ability to sort experiments by last run creation. Fixes [\#10884](https://github.com/kubeflow/pipelines/issues/10884) ([\#11163](https://github.com/kubeflow/pipelines/issues/11163)) ([db8669c](https://github.com/kubeflow/pipelines/commit/db8669c33e60bb8910710359c0638d21ec27ac7c))
|
||||
* **proto:** Add TTL fields to KFP IR yaml proto ([\#11758](https://github.com/kubeflow/pipelines/issues/11758)) ([c5aba41](https://github.com/kubeflow/pipelines/commit/c5aba41bcaf3c214d984db4571c1ecae4a0d551d))
|
||||
* **sdk:** Add Input Parameter support for configmap, secrets, node selectors, tolerations, pull secrets ([\#11621](https://github.com/kubeflow/pipelines/issues/11621)) ([7838009](https://github.com/kubeflow/pipelines/commit/78380095385be25e69b891ccb312b2857a200fdd))
|
||||
* **sdk:** add upload pipeline and upload pipeline version from pipeline function ([\#11804](https://github.com/kubeflow/pipelines/issues/11804)) ([1ad4f60](https://github.com/kubeflow/pipelines/commit/1ad4f608a0b9dea2362cf89f9cf7abdebf20e080))
|
||||
* Introduce cache_key for cache key customization ([\#11434](https://github.com/kubeflow/pipelines/issues/11434)) ([50b367f](https://github.com/kubeflow/pipelines/commit/50b367f232b2d37b762745c8b4296a29c9d8fd45))
|
||||
* Introduce cache_key to sdk ([\#11466](https://github.com/kubeflow/pipelines/issues/11466)) ([42fc132](https://github.com/kubeflow/pipelines/commit/42fc13261628d764296607d9e12ecad13e721a68))
|
||||
* **sdk:** add PipelineConfig to DSL to re-implement pipeline-level config ([\#11112](https://github.com/kubeflow/pipelines/issues/11112)) ([df4d787](https://github.com/kubeflow/pipelines/commit/df4d7878c4ce25c801a916351bcbce1266a9daf1))
|
||||
* **sdk:** Allow disabling default caching via a CLI flag and env var ([\#11222](https://github.com/kubeflow/pipelines/issues/11222)) ([3f49522](https://github.com/kubeflow/pipelines/commit/3f495229f26ef08360048d050dfe014ca4b57b4f))
|
||||
* **sdk:** Pin kfp-pipeline-spec==0.4.0, kfp-server-api>=2.1.0,<2.4.0 ([\#11192](https://github.com/kubeflow/pipelines/issues/11192)) ([dfd4cc1](https://github.com/kubeflow/pipelines/commit/dfd4cc1e537523b04b01b6e209b5760bd2a007d5))
|
||||
* **sdk:** stop auto-populating metrics as dag output ([\#11362](https://github.com/kubeflow/pipelines/issues/11362)) ([8d018af](https://github.com/kubeflow/pipelines/commit/8d018aff6ed14b5bed7b3f90d9f450b3144ae18e))
|
||||
* **sdk:** support dynamic machine type parameters in pipeline task setters ([\#11097](https://github.com/kubeflow/pipelines/issues/11097)) ([70aaf8a](https://github.com/kubeflow/pipelines/commit/70aaf8a9a469607dc6e4aad58d40b39c75363b99))
|
||||
* **sdk/backend:** Add support for placeholders in resource limits ([\#11501](https://github.com/kubeflow/pipelines/issues/11501)) ([7c931ae](https://github.com/kubeflow/pipelines/commit/7c931ae20197b2309d7a8462f6ce099882a8f915))
|
||||
* **sdk/backend:** enable parameterization of container images ([\#11404](https://github.com/kubeflow/pipelines/issues/11404)) ([22e85de](https://github.com/kubeflow/pipelines/commit/22e85de2bcbd2ff5ed2a099e4f11a39ff27e4190))
|
||||
* **testing:** use kustomize to patch deployments before deploy ([\#11294](https://github.com/kubeflow/pipelines/issues/11294)) ([be863a8](https://github.com/kubeflow/pipelines/commit/be863a852997718701a1ee548d9db86dca7ffc33))
|
||||
* add fields in SinglePlatformSpec ([\#11299](https://github.com/kubeflow/pipelines/issues/11299)) ([a0d313e](https://github.com/kubeflow/pipelines/commit/a0d313e095c2b5fc1a32809c38cf96b13e5772b2))
|
||||
* **workflows:** use built images in Github workflows ([\#11284](https://github.com/kubeflow/pipelines/issues/11284)) ([1550b36](https://github.com/kubeflow/pipelines/commit/1550b363aed3745b476d2b3798725432329e8cea))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **backend:** Allow initializing the Kubernetes client with a kubeconfig ([\#11443](https://github.com/kubeflow/pipelines/issues/11443)) ([87bdb7c](https://github.com/kubeflow/pipelines/commit/87bdb7c3b1126ae5e899826be0834c11764edbae))
|
||||
* **backend:** Fix enable_caching issues when handling PVC creation/deletion ([\#11411](https://github.com/kubeflow/pipelines/issues/11411)) ([027ca8b](https://github.com/kubeflow/pipelines/commit/027ca8b9c9fe2bb603b2a08c843e726ed4837a1d))
|
||||
* **backend:** Fix run submissions with OwnerReferencesPermissionEnforcement on ([\#11821](https://github.com/kubeflow/pipelines/issues/11821)) ([69ba50b](https://github.com/kubeflow/pipelines/commit/69ba50b3fb03bd8441f833950a6c77835a2d47a1))
|
||||
* **backend:** fixed Dockerfile ([\#11841](https://github.com/kubeflow/pipelines/issues/11841)) ([d38418e](https://github.com/kubeflow/pipelines/commit/d38418efeadec3ea3bea55b3373bcc311dacc135))
|
||||
* **backend:** fixes DAG status update to reflect completion of all tasks ([\#11651](https://github.com/kubeflow/pipelines/issues/11651)) ([7719b38](https://github.com/kubeflow/pipelines/commit/7719b38061d489246294bd53b49aacfc105c8a19))
|
||||
* **backend:** handle client side HTTP timeouts to fix crashes of metadata-writer. Fixes [\#8200](https://github.com/kubeflow/pipelines/issues/8200) ([\#11361](https://github.com/kubeflow/pipelines/issues/11361)) ([94a21cc](https://github.com/kubeflow/pipelines/commit/94a21cc7e27a3824732e7f4c09a4d8b826dde5b8))
|
||||
* **backend:** ignore unknown fields for pb json unmarshaling ([\#11662](https://github.com/kubeflow/pipelines/issues/11662)) ([9afe23e](https://github.com/kubeflow/pipelines/commit/9afe23e74866f30f7333ea47abfe2d9efa4098c7))
|
||||
* **backend:** Include missing go.mod for cacheserver/viewercontroller images ([\#11776](https://github.com/kubeflow/pipelines/issues/11776)) ([715ed40](https://github.com/kubeflow/pipelines/commit/715ed40b92f9bca521f94e0df5201425d9d30866))
|
||||
* **backend:** modelToCRDTrigger was not including periodic schedule correctly ([\#11475](https://github.com/kubeflow/pipelines/issues/11475)) ([97acacb](https://github.com/kubeflow/pipelines/commit/97acacbd2a0b72d442398ca04382ac1e6d9aa37f))
|
||||
* **backend:** parallelFor resolve upstream inputs. Fixes [\#11520](https://github.com/kubeflow/pipelines/issues/11520) ([\#11627](https://github.com/kubeflow/pipelines/issues/11627)) ([f7c0616](https://github.com/kubeflow/pipelines/commit/f7c0616db75ac92581f973e8f05f962b965255b1))
|
||||
* **backend:** randomizing output uri path to avoid overwriting. Fixes [\#10186](https://github.com/kubeflow/pipelines/issues/10186) ([\#11243](https://github.com/kubeflow/pipelines/issues/11243)) ([219725d](https://github.com/kubeflow/pipelines/commit/219725d9f02b690cf0829a21faf092a3e4c65531))
|
||||
* **backend:** remove unused function argument ([\#11425](https://github.com/kubeflow/pipelines/issues/11425)) ([7f2278f](https://github.com/kubeflow/pipelines/commit/7f2278f25222992bedfcae5b613a7a06430f4542))
|
||||
* **backend:** removed old version comment ([\#11549](https://github.com/kubeflow/pipelines/issues/11549)) ([906b5c0](https://github.com/kubeflow/pipelines/commit/906b5c084146506e71685d2324566bd15dc25bec))
|
||||
* **backend:** Replaced hardcoded ServiceAccount with default config ([\#11578](https://github.com/kubeflow/pipelines/issues/11578)) ([18641e1](https://github.com/kubeflow/pipelines/commit/18641e16cbac7512f8f63f001acafd8a0bf52924))
|
||||
* **backend:** return error properly ([\#11424](https://github.com/kubeflow/pipelines/issues/11424)) ([13f83cf](https://github.com/kubeflow/pipelines/commit/13f83cf745eb5628d6ae5b25c1ca979d8c6d92ad))
|
||||
* **backend:** set default value to true for ForcePathStyle ([\#11281](https://github.com/kubeflow/pipelines/issues/11281)) ([391de8c](https://github.com/kubeflow/pipelines/commit/391de8ca9ec68fe4cd85bba6c82348386fc79842))
|
||||
* **backend:** stop heartbeat status updates for ScheduledWorkflows. Fixes [\#8757](https://github.com/kubeflow/pipelines/issues/8757) ([\#11363](https://github.com/kubeflow/pipelines/issues/11363)) ([9ccec4c](https://github.com/kubeflow/pipelines/commit/9ccec4c7d1aff4d2bfdb20cf4fd1f9d64b8632f4))
|
||||
* **backend:** Synced ScheduledWorkflow CRs on apiserver startup ([\#11469](https://github.com/kubeflow/pipelines/issues/11469)) ([d21fca6](https://github.com/kubeflow/pipelines/commit/d21fca650c8152d992ad5f7f590f70b1368bc60b))
|
||||
* **backend:** the metacontroller is broken since [\#11474](https://github.com/kubeflow/pipelines/issues/11474) ([\#11608](https://github.com/kubeflow/pipelines/issues/11608)) ([a40163f](https://github.com/kubeflow/pipelines/commit/a40163fdf2fe281cda91baf2f122c23664d5fcb9))
|
||||
* **backend:** upgrade go version to 1.22.12 to fix CVE-2024-45336 ([\#11631](https://github.com/kubeflow/pipelines/issues/11631)) ([87498e8](https://github.com/kubeflow/pipelines/commit/87498e8b60a167eccfef7cc29f888808ca954155))
|
||||
* **backend:** upgrade PyYMAL to fix metadata_writer build error ([\#11231](https://github.com/kubeflow/pipelines/issues/11231)) ([a4119a6](https://github.com/kubeflow/pipelines/commit/a4119a6bf1fe220c84aaa5caa7051c423b5f145e))
|
||||
* **backend:** upgrade various old dependencies ([\#11448](https://github.com/kubeflow/pipelines/issues/11448)) ([803d7a8](https://github.com/kubeflow/pipelines/commit/803d7a8ebb00924107b890de01e2a53af78d9a5e))
|
||||
* **backend:** Use an Argo Workflow exit lifecycle hook for exit handlers ([\#11470](https://github.com/kubeflow/pipelines/issues/11470)) ([3059f7c](https://github.com/kubeflow/pipelines/commit/3059f7c124dc95f867e6f755f7c0720aaa32d48b))
|
||||
* **CI:** Use the correct image registry for replacements in integration tests ([\#11564](https://github.com/kubeflow/pipelines/issues/11564)) ([ac9b257](https://github.com/kubeflow/pipelines/commit/ac9b257a7a249c4b4c20b04d4c95ff8354c5b4e0))
|
||||
* **components:** Fix create_custom_training_job_from_component default location ([04d600b](https://github.com/kubeflow/pipelines/commit/04d600b2d36405f34799306c5d24287c75e31595))
|
||||
* **components:** remove default prediction column names in evaluation regression component to fix issues with bigquery data source ([753a2f1](https://github.com/kubeflow/pipelines/commit/753a2f148ac3f001bc785acc6359295e6fe521fd))
|
||||
* **components:** Set tensorboard_experiment_name to random uuid when uploading plots in Starry Net pipeline ([cc56d04](https://github.com/kubeflow/pipelines/commit/cc56d04c46d01666a8e091b124473c2654e1b6d3))
|
||||
* **deployment:** Update kustomize manifests to use new label and patch syntax ([\#11733](https://github.com/kubeflow/pipelines/issues/11733)) ([230c1b8](https://github.com/kubeflow/pipelines/commit/230c1b8f1332ffab575b2e69b65d9a6958167195))
|
||||
* **deps:** widen urllib3 upper bound to <3.0.0 ([\#11819](https://github.com/kubeflow/pipelines/issues/11819)) ([866ff35](https://github.com/kubeflow/pipelines/commit/866ff3556a4454ccb52f1594bbca4167a04c0d3e))
|
||||
* **docs:** Remove Podman as backend README pre-req ([\#11824](https://github.com/kubeflow/pipelines/issues/11824)) ([88cff55](https://github.com/kubeflow/pipelines/commit/88cff559142e5a985cf31620f07b71244645cb4a))
|
||||
* **docs:** Use the latest driver and launcher images in the dev environment ([\#11820](https://github.com/kubeflow/pipelines/issues/11820)) ([92e4921](https://github.com/kubeflow/pipelines/commit/92e4921c4cce8155093bf7e332abfbf03bd6eaef))
|
||||
* **frontend:** compatibility with pod_names v1 ([\#11682](https://github.com/kubeflow/pipelines/issues/11682)) ([afb3b14](https://github.com/kubeflow/pipelines/commit/afb3b1461bdd8c4d4cbc697abe1d7d1acfcdc38f))
|
||||
* **frontend:** Detailed information of nodes is not displayed when clicking the node. Fixes [\#11325](https://github.com/kubeflow/pipelines/issues/11325) ([\#11493](https://github.com/kubeflow/pipelines/issues/11493)) ([028d81b](https://github.com/kubeflow/pipelines/commit/028d81b624629d4610ddcdced5b982437ff88d08))
|
||||
* **frontend:** first time choosing a pipeline definition is VERY slow. Fixes [\#10897](https://github.com/kubeflow/pipelines/issues/10897) ([\#11130](https://github.com/kubeflow/pipelines/issues/11130)) ([cfb3b31](https://github.com/kubeflow/pipelines/commit/cfb3b3149d9ba02daec584af77ef763f936cd727))
|
||||
* **frontend:** Fix the frontend image build with Node 22 ([\#11524](https://github.com/kubeflow/pipelines/issues/11524)) ([533a3c6](https://github.com/kubeflow/pipelines/commit/533a3c6b667eb11b9cd7da2f6fe334252867fcc5))
|
||||
* **frontend:** fixes Default pipeline input params are missing from the GUI. Fixes [\#11515](https://github.com/kubeflow/pipelines/issues/11515) ([\#11518](https://github.com/kubeflow/pipelines/issues/11518)) ([8fe2157](https://github.com/kubeflow/pipelines/commit/8fe21574c644543fef55a2d515681d23fdfce508))
|
||||
* **frontend:** fixes optional pipeline inputs. Fixes [\#11632](https://github.com/kubeflow/pipelines/issues/11632) ([\#11657](https://github.com/kubeflow/pipelines/issues/11657)) ([a6b944b](https://github.com/kubeflow/pipelines/commit/a6b944b894a4a297a6310f5028a8c27e3603ac16))
|
||||
* **frontend:** restrict file explorer to show only .yaml, .yml, .zip, and .tar.gz files ([\#11623](https://github.com/kubeflow/pipelines/issues/11623)) ([c0778ba](https://github.com/kubeflow/pipelines/commit/c0778ba88c359d119453c2acc94c0168b3f53772))
|
||||
* **local:** warn about oci:// not supported too ([\#11794](https://github.com/kubeflow/pipelines/issues/11794)) ([564522c](https://github.com/kubeflow/pipelines/commit/564522c42de9136dec67f1bf29590bdd64bf2333))
|
||||
* **manifests:** Upgrading metacontroller to v4.11.22 ([\#11656](https://github.com/kubeflow/pipelines/issues/11656)) ([ebaaf75](https://github.com/kubeflow/pipelines/commit/ebaaf756319ac4ac9498aca5f7dfb3978ff36496))
|
||||
* **metadata-writer:** use mlmd_store.get_context_types() instead of workaround ([\#11753](https://github.com/kubeflow/pipelines/issues/11753)) ([35041ef](https://github.com/kubeflow/pipelines/commit/35041ef2bd4d9b3261f1250f5803786ed9e453fe))
|
||||
* **sdk:** accelerator type setting in kfp ([\#11373](https://github.com/kubeflow/pipelines/issues/11373)) ([64e3900](https://github.com/kubeflow/pipelines/commit/64e390069d6c60c97ea03e833529a0930398620f))
|
||||
* **sdk:** Add error handling. Fixes [\#11164](https://github.com/kubeflow/pipelines/issues/11164) ([\#11356](https://github.com/kubeflow/pipelines/issues/11356)) ([4a64fe9](https://github.com/kubeflow/pipelines/commit/4a64fe9532556a48585b9966db8e10c7de0a8d37))
|
||||
* **sdk:** Add SDK support for setting resource limits on older KFP versions ([\#11839](https://github.com/kubeflow/pipelines/issues/11839)) ([f9d487c](https://github.com/kubeflow/pipelines/commit/f9d487cb605727f357f58783db298d96898b24d1))
|
||||
* **sdk:** allow google-cloud-storage < 4 ([\#11735](https://github.com/kubeflow/pipelines/issues/11735)) ([bd4fc5c](https://github.com/kubeflow/pipelines/commit/bd4fc5c6677402d5f2d9ac45481ac86f25da4640))
|
||||
* **sdk:** avoid conflicting component names in DAG when reusing pipelines ([\#11071](https://github.com/kubeflow/pipelines/issues/11071)) ([d1b15ef](https://github.com/kubeflow/pipelines/commit/d1b15ef4da33cbeafa491564318c7e2a68dc431f))
|
||||
* **sdk:** Backport fixes in kubeflow/pipelines#11075 ([\#11392](https://github.com/kubeflow/pipelines/issues/11392)) ([6ebf4aa](https://github.com/kubeflow/pipelines/commit/6ebf4aae0335424d3bc88175fd06a2b2ba05251f))
|
||||
* **sdk:** dsl.component docstring typo ([\#11547](https://github.com/kubeflow/pipelines/issues/11547)) ([dbefbb8](https://github.com/kubeflow/pipelines/commit/dbefbb8ee935d8c2e86090121274e5d94dddf84e))
|
||||
* **tests:** free up space in some test runners ([\#11818](https://github.com/kubeflow/pipelines/issues/11818)) ([478ca08](https://github.com/kubeflow/pipelines/commit/478ca089012e64edd371feff4ece9d0d156d4710))
|
||||
* Extend env variables in ml-pipeline-ui deployment ([\#11552](https://github.com/kubeflow/pipelines/issues/11552)) ([a469b10](https://github.com/kubeflow/pipelines/commit/a469b10806a02ed01f6d7d08cdd90e8fc44b8a86))
|
||||
* minio fsgroup for popular clusters ([\#11734](https://github.com/kubeflow/pipelines/issues/11734)) ([8d0ae53](https://github.com/kubeflow/pipelines/commit/8d0ae5381e8366905c90009c56fd0e4807e94f0f))
|
||||
* Update broken api-connect link ([\#11521](https://github.com/kubeflow/pipelines/issues/11521)) ([a81b513](https://github.com/kubeflow/pipelines/commit/a81b51339c650b8b1fca9eeb7a2932bdfaab409f))
|
||||
* **tests:** remove redundant integration test wf ([\#11322](https://github.com/kubeflow/pipelines/issues/11322)) ([6a35ee5](https://github.com/kubeflow/pipelines/commit/6a35ee5144fba1c72badc7c52161d8a49f34804e))
|
||||
* **ui:** Disable GKE Metadata as default Fixes: [\#11247](https://github.com/kubeflow/pipelines/issues/11247), fixes [\#11260](https://github.com/kubeflow/pipelines/issues/11260) ([\#11403](https://github.com/kubeflow/pipelines/issues/11403)) ([23f718d](https://github.com/kubeflow/pipelines/commit/23f718d02e402bad5c9da1a3d76da5c4a97743b3))
|
||||
* **ui:** Fixes V1 Run detail unnecessary reloading. Fixes [\#10590](https://github.com/kubeflow/pipelines/issues/10590) ([\#11214](https://github.com/kubeflow/pipelines/issues/11214)) ([eee095e](https://github.com/kubeflow/pipelines/commit/eee095e5c8d53c0eae45165f72549afe5a5cb0e3))
|
||||
* **workflows:** patch reversed launcher / driver in workflow matrix ([\#11238](https://github.com/kubeflow/pipelines/issues/11238)) ([ceeda01](https://github.com/kubeflow/pipelines/commit/ceeda01d0a35bd84b79d8f2e7aa2e029cb1de06b))
|
||||
|
||||
|
||||
### Other Pull Requests
|
||||
|
||||
* Fix Integration tests ([0359551](https://github.com/kubeflow/pipelines/commit/0359551b7601165ba8bf7cc24fdc1858224c0d2d))
|
||||
* add list or dict support for add toleration json ([fb18235](https://github.com/kubeflow/pipelines/commit/fb182355f08e41eff1ac530be1afac0bad69e15d))
|
||||
* add backend support for toleration lists. ([90909fc](https://github.com/kubeflow/pipelines/commit/90909fc0ef58b71362017a3e48c924b38c389183))
|
||||
* switch selenium image to ghcr ([7529bbe](https://github.com/kubeflow/pipelines/commit/7529bbeba7f245366ca1cbc280169e20a7100a6a))
|
||||
* add missing release note updates to sdk main branch ([\#11842](https://github.com/kubeflow/pipelines/issues/11842)) ([611d582](https://github.com/kubeflow/pipelines/commit/611d5820049dc51ddf261d7d1368c4858dad5159))
|
||||
* fix component retry test ([\#11836](https://github.com/kubeflow/pipelines/issues/11836)) ([598826e](https://github.com/kubeflow/pipelines/commit/598826e1ccfecb5f34716876053a22cdc6605ae4))
|
||||
* **chore:** add cleanup resources to sdk execution tests ([\#11823](https://github.com/kubeflow/pipelines/issues/11823)) ([eee4986](https://github.com/kubeflow/pipelines/commit/eee4986f180cd4e7469a65a3c5f4ffbf3ec0b46c))
|
||||
* update driver & launcher image handling ([\#11533](https://github.com/kubeflow/pipelines/issues/11533)) ([38a4653](https://github.com/kubeflow/pipelines/commit/38a46533fcd47aa31e825109e2bf6940d127910a))
|
||||
* **chore:** add image builds for default branch ([\#11800](https://github.com/kubeflow/pipelines/issues/11800)) ([eacb586](https://github.com/kubeflow/pipelines/commit/eacb586f6225bb277642f4977552f799850e06a1))
|
||||
* fix setup env for kfp k8s lib tests ([\#11798](https://github.com/kubeflow/pipelines/issues/11798)) ([f10c7bf](https://github.com/kubeflow/pipelines/commit/f10c7bfbbcf01eb25f2fa8a437da62bbf07dc1f5))
|
||||
* Handle optional pipeline inputs in the driver ([\#11788](https://github.com/kubeflow/pipelines/issues/11788)) ([bb7a108](https://github.com/kubeflow/pipelines/commit/bb7a1082c4c5a3fb308aac2bf37bab476c3c4df6))
|
||||
* Fix recurring run output when always using latest ([\#11790](https://github.com/kubeflow/pipelines/issues/11790)) ([048f283](https://github.com/kubeflow/pipelines/commit/048f28332b6a0b6684632e76dcb284de2f81d829))
|
||||
* increase stale action timers ([\#11792](https://github.com/kubeflow/pipelines/issues/11792)) ([ade8a2d](https://github.com/kubeflow/pipelines/commit/ade8a2d072efa9897a5a0173316836236d629238))
|
||||
* Fix PSS restricted warnings ([\#11751](https://github.com/kubeflow/pipelines/issues/11751)) ([01999b8](https://github.com/kubeflow/pipelines/commit/01999b8fea23db52da0f633e475c457fc06ca531))
|
||||
* fix(CI) Github action is vulnerable to code execution via `comment body` ([\#11772](https://github.com/kubeflow/pipelines/issues/11772)) ([95c3f2c](https://github.com/kubeflow/pipelines/commit/95c3f2c04d8f19b8b656ddbda046ed9f2c81130a))
|
||||
* Fix Istio sidecar injection by moving from annotations to labels ([\#11750](https://github.com/kubeflow/pipelines/issues/11750)) ([df4e9c2](https://github.com/kubeflow/pipelines/commit/df4e9c2bf5b645f4a3fa831b073846eae5eaceb7))
|
||||
* remove unused function ([\#11719](https://github.com/kubeflow/pipelines/issues/11719)) ([89c8bd7](https://github.com/kubeflow/pipelines/commit/89c8bd7274e2d3141a48045427f12faa4e52f029))
|
||||
* fix(backend) fix execution-level retry on the Argo Workflows backend ([\#11673](https://github.com/kubeflow/pipelines/issues/11673)) ([30210e3](https://github.com/kubeflow/pipelines/commit/30210e33bf257ff06727550d1c59e6bcc7158ab7))
|
||||
* Ignore empty PULL_NUMBER environment variables in SDK tests ([\#11714](https://github.com/kubeflow/pipelines/issues/11714)) ([a7ec34f](https://github.com/kubeflow/pipelines/commit/a7ec34f571c367e9f1957019992f3823865f4a5c))
|
||||
* Fix format string in get_kfp_package_path ([\#11712](https://github.com/kubeflow/pipelines/issues/11712)) ([7d8e921](https://github.com/kubeflow/pipelines/commit/7d8e9211f6625d76958023535967bf37b36d9b7a))
|
||||
* Use the correct SDK version in the SDK execution tests CI ([\#11683](https://github.com/kubeflow/pipelines/issues/11683)) ([355f78c](https://github.com/kubeflow/pipelines/commit/355f78c51b084026b8e01db16a06cb93515ff67a))
|
||||
* Limit the number of parallel tests in SDK execution tests ([\#11680](https://github.com/kubeflow/pipelines/issues/11680)) ([976fba8](https://github.com/kubeflow/pipelines/commit/976fba871fa2331c44ab9723744791051c8f9732))
|
||||
* Allow system.Model artifacts in the Modelcar format ([\#11674](https://github.com/kubeflow/pipelines/issues/11674)) ([0afb12d](https://github.com/kubeflow/pipelines/commit/0afb12d6a7fcce6e06a8991a228a0bebf734dabf))
|
||||
* (test) : Collect and upload logs when test fails for k8s ([\#11618](https://github.com/kubeflow/pipelines/issues/11618)) ([8ca7ec1](https://github.com/kubeflow/pipelines/commit/8ca7ec1768f82f50b3b46606f39632eab11b8fe6))
|
||||
* fix(backend) fix run retry for argo ([\#11585](https://github.com/kubeflow/pipelines/issues/11585)) ([b131566](https://github.com/kubeflow/pipelines/commit/b1315667be8f03973898113b3204e375d1f015a4))
|
||||
* Refer to central KEP template ([\#11593](https://github.com/kubeflow/pipelines/issues/11593)) ([7bb0c44](https://github.com/kubeflow/pipelines/commit/7bb0c448cd17953d96e46dcd73972b52b35f5789))
|
||||
* [chore][backend] Add workflow to validate affected generated files ([\#11539](https://github.com/kubeflow/pipelines/issues/11539)) ([22c3724](https://github.com/kubeflow/pipelines/commit/22c372437d76e5bbaec5a65c884384dbcabe2d55))
|
||||
* chor(test) : Fix kfp-sdk-test for different python versions ([\#11559](https://github.com/kubeflow/pipelines/issues/11559)) ([926aec5](https://github.com/kubeflow/pipelines/commit/926aec55d491eb9fb3abc3db3b4a903cf9dd22d8))
|
||||
* chore(frontend) : Fix frontend failing ci test ([\#11575](https://github.com/kubeflow/pipelines/issues/11575)) ([d3a016d](https://github.com/kubeflow/pipelines/commit/d3a016dd645ba828ec375428faa733e0203d278e))
|
||||
* Fix typo in documentation for contribution and developer guide ([\#11537](https://github.com/kubeflow/pipelines/issues/11537)) ([1234c8d](https://github.com/kubeflow/pipelines/commit/1234c8d6fef914d07ab634a266e1e076c152fd06))
|
||||
* switch release/api generator images to ghcr ([\#11528](https://github.com/kubeflow/pipelines/issues/11528)) ([83791e7](https://github.com/kubeflow/pipelines/commit/83791e7703f3761b90fcce376caaf70d826cc488))
|
||||
* add remaining dockerfiles to build workflow ([\#11522](https://github.com/kubeflow/pipelines/issues/11522)) ([682d3ac](https://github.com/kubeflow/pipelines/commit/682d3aca5fb92622fb6a1cd94e5984fea4d90471))
|
||||
* Fix the failing exit handler SDK execution tests ([\#11519](https://github.com/kubeflow/pipelines/issues/11519)) ([8bce9c4](https://github.com/kubeflow/pipelines/commit/8bce9c4ef6b047d3b71206a97f66ca567e1a7e4f))
|
||||
* nominate reviewers for backend ([\#11508](https://github.com/kubeflow/pipelines/issues/11508)) ([56e6116](https://github.com/kubeflow/pipelines/commit/56e6116d054898f8dbe73990da3836e10e8b7523))
|
||||
* nominate approver & reviewer for backend ([\#11507](https://github.com/kubeflow/pipelines/issues/11507)) ([81ebd7f](https://github.com/kubeflow/pipelines/commit/81ebd7ff9b0376c44928f2398f48196e38d92cd3))
|
||||
* feat[frontend]: implement artifact-repositories configmap support ([\#11354](https://github.com/kubeflow/pipelines/issues/11354)) ([467f30c](https://github.com/kubeflow/pipelines/commit/467f30cf613ecfe181e7bf9c03cb2eef7ae1ea2d))
|
||||
* Add-Create-Experiment-button-when-selecting-experiement-final ([\#11332](https://github.com/kubeflow/pipelines/issues/11332)) ([c5f162d](https://github.com/kubeflow/pipelines/commit/c5f162d552e5ae405689066736acf730b9147606))
|
||||
* adding chore as a new template. Fixes [\#11263](https://github.com/kubeflow/pipelines/issues/11263) ([\#11317](https://github.com/kubeflow/pipelines/issues/11317)) ([f256d86](https://github.com/kubeflow/pipelines/commit/f256d86fbb9bfff0388cd6ef9df1120e49e995d8))
|
||||
* correct lastrun unittest timestamps ([\#11270](https://github.com/kubeflow/pipelines/issues/11270)) ([6f6c8ae](https://github.com/kubeflow/pipelines/commit/6f6c8aeda9d5c219e2958df94dce93ca5a88a6ea))
|
||||
* replaced deprecated image repos with registry.k8s.io ([\#11152](https://github.com/kubeflow/pipelines/issues/11152)) ([d23b72b](https://github.com/kubeflow/pipelines/commit/d23b72bf12f55f123a16f53eb35d061180ad9ac4))
|
||||
|
||||
### [2.4.1](https://github.com/kubeflow/pipelines/compare/2.4.0...2.4.1) (2025-03-01)
|
||||
|
||||
|
||||
### Other Pull Requests
|
||||
|
||||
* update driver/launcher images for 2.4.1 ([6e64e4d](https://github.com/kubeflow/pipelines/commit/6e64e4d553ff49a1de4c1840490ab9ca337d08cc))
|
||||
* Cherry Pick Commits for 2.4.1 ([\#11716](https://github.com/kubeflow/pipelines/issues/11716)) ([78fab71](https://github.com/kubeflow/pipelines/commit/78fab71ef1831ae6659c7834bf78d56daa55623d))
|
||||
|
||||
## [2.4.0](https://github.com/kubeflow/pipelines/compare/2.3.0...2.4.0) (2025-01-16)
|
||||
|
||||
|
||||
### ⚠ BREAKING CHANGES
|
||||
|
||||
* **sdk:** stop auto-populating metrics as dag output ([\#11362](https://github.com/kubeflow/pipelines/issues/11362))
|
||||
* **components:** Deprecate preview.custom_job module
|
||||
* **sdk:** Pin kfp-pipeline-spec==0.4.0, kfp-server-api>=2.1.0,<2.4.0 ([\#11192](https://github.com/kubeflow/pipelines/issues/11192))
|
||||
|
||||
### Features
|
||||
|
||||
* **sdk/backend:** Add support for placeholders in resource limits ([\#11501](https://github.com/kubeflow/pipelines/issues/11501)) ([7c931ae](https://github.com/kubeflow/pipelines/commit/7c931ae20197b2309d7a8462f6ce099882a8f915))
|
||||
* Introduce cache_key for cache key customization ([\#11434](https://github.com/kubeflow/pipelines/issues/11434)) ([50b367f](https://github.com/kubeflow/pipelines/commit/50b367f232b2d37b762745c8b4296a29c9d8fd45))
|
||||
* **api:** add PipelineConfig to api to re-implement pipeline-level config ([\#11333](https://github.com/kubeflow/pipelines/issues/11333)) ([c2f5649](https://github.com/kubeflow/pipelines/commit/c2f56495b9b1e9eda1b44b6106e12d5290a89ed7))
|
||||
* **backend:** add configurable S3 path style support ([\#11246](https://github.com/kubeflow/pipelines/issues/11246)) ([85fdd73](https://github.com/kubeflow/pipelines/commit/85fdd73ae0bb1c2ce01da6311807b37cfc589710))
|
||||
* **backend:** Add Parallelism Limit to ParallelFor tasks. Fixes [\#8718](https://github.com/kubeflow/pipelines/issues/8718) ([\#10798](https://github.com/kubeflow/pipelines/issues/10798)) ([b7d8c97](https://github.com/kubeflow/pipelines/commit/b7d8c97d65af575b71efe6755eb67b0bb9126f01))
|
||||
* **backend:** implement subdag output resolution ([\#11196](https://github.com/kubeflow/pipelines/issues/11196)) ([c5b787a](https://github.com/kubeflow/pipelines/commit/c5b787aacc4fddeeb1ebc526a83159540cd7b311))
|
||||
* **backend:** Remove PipelineSpec Template storage from ObjStore responsibilies. Fixes [\#10509](https://github.com/kubeflow/pipelines/issues/10509) ([\#10790](https://github.com/kubeflow/pipelines/issues/10790)) ([374b18b](https://github.com/kubeflow/pipelines/commit/374b18bc3366a51f4b92821cdb3a942bc12343a0))
|
||||
* **cli:** expose existing_token client property ([\#11400](https://github.com/kubeflow/pipelines/issues/11400)) ([35793be](https://github.com/kubeflow/pipelines/commit/35793be4168584b1084169b723bfb216aa4a03b6))
|
||||
* **component:** Created Snowflake data unload component ([\#11349](https://github.com/kubeflow/pipelines/issues/11349)) ([22e7780](https://github.com/kubeflow/pipelines/commit/22e77805ed41a72837f7cd15a9d679f42169b253))
|
||||
* **component:** execute in a virtual env ([\#11326](https://github.com/kubeflow/pipelines/issues/11326)) ([df28e89](https://github.com/kubeflow/pipelines/commit/df28e891c4374f7eac98cc6a4892b6e6c35a43f2))
|
||||
* **components:** Add reservation_affinity support in v1.create_custom_training_job_from_component ([c84241b](https://github.com/kubeflow/pipelines/commit/c84241b7362c0351109bc0ddbc2f697479ff8675))
|
||||
* **components:** add strategy to v1 GCPC custom job components/utils ([1cdd648](https://github.com/kubeflow/pipelines/commit/1cdd648239ff850bf5baae48e4e7bd1b24330dd5))
|
||||
* **components:** Deprecate preview.custom_job module ([abbd915](https://github.com/kubeflow/pipelines/commit/abbd915a2ac32b22151efef662b937601602ba9d))
|
||||
* **frontend/backend:** Allow the ability to sort experiments by last run creation. Fixes [\#10884](https://github.com/kubeflow/pipelines/issues/10884) ([\#11163](https://github.com/kubeflow/pipelines/issues/11163)) ([db8669c](https://github.com/kubeflow/pipelines/commit/db8669c33e60bb8910710359c0638d21ec27ac7c))
|
||||
* **sdk:** add PipelineConfig to DSL to re-implement pipeline-level config ([\#11112](https://github.com/kubeflow/pipelines/issues/11112)) ([df4d787](https://github.com/kubeflow/pipelines/commit/df4d7878c4ce25c801a916351bcbce1266a9daf1))
|
||||
* **sdk:** Allow disabling default caching via a CLI flag and env var ([\#11222](https://github.com/kubeflow/pipelines/issues/11222)) ([3f49522](https://github.com/kubeflow/pipelines/commit/3f495229f26ef08360048d050dfe014ca4b57b4f))
|
||||
* **sdk:** Pin kfp-pipeline-spec==0.4.0, kfp-server-api>=2.1.0,<2.4.0 ([\#11192](https://github.com/kubeflow/pipelines/issues/11192)) ([dfd4cc1](https://github.com/kubeflow/pipelines/commit/dfd4cc1e537523b04b01b6e209b5760bd2a007d5))
|
||||
* **sdk:** stop auto-populating metrics as dag output ([\#11362](https://github.com/kubeflow/pipelines/issues/11362)) ([8d018af](https://github.com/kubeflow/pipelines/commit/8d018aff6ed14b5bed7b3f90d9f450b3144ae18e))
|
||||
* **sdk/backend:** enable parameterization of container images ([\#11404](https://github.com/kubeflow/pipelines/issues/11404)) ([22e85de](https://github.com/kubeflow/pipelines/commit/22e85de2bcbd2ff5ed2a099e4f11a39ff27e4190))
|
||||
* **testing:** use kustomize to patch deployments before deploy ([\#11294](https://github.com/kubeflow/pipelines/issues/11294)) ([be863a8](https://github.com/kubeflow/pipelines/commit/be863a852997718701a1ee548d9db86dca7ffc33))
|
||||
* add fields in SinglePlatformSpec ([\#11299](https://github.com/kubeflow/pipelines/issues/11299)) ([a0d313e](https://github.com/kubeflow/pipelines/commit/a0d313e095c2b5fc1a32809c38cf96b13e5772b2))
|
||||
* **sdk:** support dynamic machine type parameters in pipeline task setters ([\#11097](https://github.com/kubeflow/pipelines/issues/11097)) ([70aaf8a](https://github.com/kubeflow/pipelines/commit/70aaf8a9a469607dc6e4aad58d40b39c75363b99))
|
||||
* **workflows:** use built images in Github workflows ([\#11284](https://github.com/kubeflow/pipelines/issues/11284)) ([1550b36](https://github.com/kubeflow/pipelines/commit/1550b363aed3745b476d2b3798725432329e8cea))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **backend:** Allow initializing the Kubernetes client with a kubeconfig ([\#11443](https://github.com/kubeflow/pipelines/issues/11443)) ([87bdb7c](https://github.com/kubeflow/pipelines/commit/87bdb7c3b1126ae5e899826be0834c11764edbae))
|
||||
* **backend:** handle client side HTTP timeouts to fix crashes of metadata-writer. Fixes [\#8200](https://github.com/kubeflow/pipelines/issues/8200) ([\#11361](https://github.com/kubeflow/pipelines/issues/11361)) ([94a21cc](https://github.com/kubeflow/pipelines/commit/94a21cc7e27a3824732e7f4c09a4d8b826dde5b8))
|
||||
* **backend:** modelToCRDTrigger was not including periodic schedule correctly ([\#11475](https://github.com/kubeflow/pipelines/issues/11475)) ([97acacb](https://github.com/kubeflow/pipelines/commit/97acacbd2a0b72d442398ca04382ac1e6d9aa37f))
|
||||
* **backend:** randomizing output uri path to avoid overwriting. Fixes [\#10186](https://github.com/kubeflow/pipelines/issues/10186) ([\#11243](https://github.com/kubeflow/pipelines/issues/11243)) ([219725d](https://github.com/kubeflow/pipelines/commit/219725d9f02b690cf0829a21faf092a3e4c65531))
|
||||
* **backend:** remove unused function argument ([\#11425](https://github.com/kubeflow/pipelines/issues/11425)) ([7f2278f](https://github.com/kubeflow/pipelines/commit/7f2278f25222992bedfcae5b613a7a06430f4542))
|
||||
* **backend:** return error properly ([\#11424](https://github.com/kubeflow/pipelines/issues/11424)) ([13f83cf](https://github.com/kubeflow/pipelines/commit/13f83cf745eb5628d6ae5b25c1ca979d8c6d92ad))
|
||||
* **backend:** set default value to true for ForcePathStyle ([\#11281](https://github.com/kubeflow/pipelines/issues/11281)) ([391de8c](https://github.com/kubeflow/pipelines/commit/391de8ca9ec68fe4cd85bba6c82348386fc79842))
|
||||
* **backend:** stop heartbeat status updates for ScheduledWorkflows. Fixes [\#8757](https://github.com/kubeflow/pipelines/issues/8757) ([\#11363](https://github.com/kubeflow/pipelines/issues/11363)) ([9ccec4c](https://github.com/kubeflow/pipelines/commit/9ccec4c7d1aff4d2bfdb20cf4fd1f9d64b8632f4))
|
||||
* **backend:** Synced ScheduledWorkflow CRs on apiserver startup ([\#11469](https://github.com/kubeflow/pipelines/issues/11469)) ([d21fca6](https://github.com/kubeflow/pipelines/commit/d21fca650c8152d992ad5f7f590f70b1368bc60b))
|
||||
* **backend:** upgrade PyYMAL to fix metadata_writer build error ([\#11231](https://github.com/kubeflow/pipelines/issues/11231)) ([a4119a6](https://github.com/kubeflow/pipelines/commit/a4119a6bf1fe220c84aaa5caa7051c423b5f145e))
|
||||
* **backend:** upgrade various old dependencies ([\#11448](https://github.com/kubeflow/pipelines/issues/11448)) ([803d7a8](https://github.com/kubeflow/pipelines/commit/803d7a8ebb00924107b890de01e2a53af78d9a5e))
|
||||
* **backend:** Use an Argo Workflow exit lifecycle hook for exit handlers ([\#11470](https://github.com/kubeflow/pipelines/issues/11470)) ([3059f7c](https://github.com/kubeflow/pipelines/commit/3059f7c124dc95f867e6f755f7c0720aaa32d48b))
|
||||
* **components:** Fix create_custom_training_job_from_component default location ([04d600b](https://github.com/kubeflow/pipelines/commit/04d600b2d36405f34799306c5d24287c75e31595))
|
||||
* **components:** remove default prediction column names in evaluation regression component to fix issues with bigquery data source ([753a2f1](https://github.com/kubeflow/pipelines/commit/753a2f148ac3f001bc785acc6359295e6fe521fd))
|
||||
* **frontend:** Detailed information of nodes is not displayed when clicking the node. Fixes [\#11325](https://github.com/kubeflow/pipelines/issues/11325) ([\#11493](https://github.com/kubeflow/pipelines/issues/11493)) ([028d81b](https://github.com/kubeflow/pipelines/commit/028d81b624629d4610ddcdced5b982437ff88d08))
|
||||
* **frontend:** first time choosing a pipeline definition is VERY slow. Fixes [\#10897](https://github.com/kubeflow/pipelines/issues/10897) ([\#11130](https://github.com/kubeflow/pipelines/issues/11130)) ([cfb3b31](https://github.com/kubeflow/pipelines/commit/cfb3b3149d9ba02daec584af77ef763f936cd727))
|
||||
* **frontend:** Fix the frontend image build with Node 22 ([\#11524](https://github.com/kubeflow/pipelines/issues/11524)) ([\#11525](https://github.com/kubeflow/pipelines/issues/11525)) ([2e47604](https://github.com/kubeflow/pipelines/commit/2e4760435ff988063dba6e21707e910bf748e5ff))
|
||||
* **sdk:** accelerator type setting in kfp ([\#11373](https://github.com/kubeflow/pipelines/issues/11373)) ([64e3900](https://github.com/kubeflow/pipelines/commit/64e390069d6c60c97ea03e833529a0930398620f))
|
||||
* **sdk:** Add error handling. Fixes [\#11164](https://github.com/kubeflow/pipelines/issues/11164) ([\#11356](https://github.com/kubeflow/pipelines/issues/11356)) ([4a64fe9](https://github.com/kubeflow/pipelines/commit/4a64fe9532556a48585b9966db8e10c7de0a8d37))
|
||||
* **sdk:** Backport fixes in kubeflow/pipelines#11075 ([\#11392](https://github.com/kubeflow/pipelines/issues/11392)) ([6ebf4aa](https://github.com/kubeflow/pipelines/commit/6ebf4aae0335424d3bc88175fd06a2b2ba05251f))
|
||||
* **tests:** remove redundant integration test wf ([\#11322](https://github.com/kubeflow/pipelines/issues/11322)) ([6a35ee5](https://github.com/kubeflow/pipelines/commit/6a35ee5144fba1c72badc7c52161d8a49f34804e))
|
||||
* **ui:** Disable GKE Metadata as default Fixes: [\#11247](https://github.com/kubeflow/pipelines/issues/11247), fixes [\#11260](https://github.com/kubeflow/pipelines/issues/11260) ([\#11403](https://github.com/kubeflow/pipelines/issues/11403)) ([23f718d](https://github.com/kubeflow/pipelines/commit/23f718d02e402bad5c9da1a3d76da5c4a97743b3))
|
||||
* **ui:** Fixes V1 Run detail unnecessary reloading. Fixes [\#10590](https://github.com/kubeflow/pipelines/issues/10590) ([\#11214](https://github.com/kubeflow/pipelines/issues/11214)) ([eee095e](https://github.com/kubeflow/pipelines/commit/eee095e5c8d53c0eae45165f72549afe5a5cb0e3))
|
||||
* **workflows:** patch reversed launcher / driver in workflow matrix ([\#11238](https://github.com/kubeflow/pipelines/issues/11238)) ([ceeda01](https://github.com/kubeflow/pipelines/commit/ceeda01d0a35bd84b79d8f2e7aa2e029cb1de06b))
|
||||
|
||||
|
||||
### Other Pull Requests
|
||||
|
||||
* switch release/api generator images to ghcr ([\#11528](https://github.com/kubeflow/pipelines/issues/11528)) ([\#11530](https://github.com/kubeflow/pipelines/issues/11530)) ([33fca73](https://github.com/kubeflow/pipelines/commit/33fca73fd6f96751db3c7e2c4795acfc7980b649))
|
||||
* add remaining dockerfiles to build workflow ([\#11522](https://github.com/kubeflow/pipelines/issues/11522)) ([682d3ac](https://github.com/kubeflow/pipelines/commit/682d3aca5fb92622fb6a1cd94e5984fea4d90471))
|
||||
* Fix the failing exit handler SDK execution tests ([\#11519](https://github.com/kubeflow/pipelines/issues/11519)) ([8bce9c4](https://github.com/kubeflow/pipelines/commit/8bce9c4ef6b047d3b71206a97f66ca567e1a7e4f))
|
||||
* nominate reviewers for backend ([\#11508](https://github.com/kubeflow/pipelines/issues/11508)) ([56e6116](https://github.com/kubeflow/pipelines/commit/56e6116d054898f8dbe73990da3836e10e8b7523))
|
||||
* nominate approver & reviewer for backend ([\#11507](https://github.com/kubeflow/pipelines/issues/11507)) ([81ebd7f](https://github.com/kubeflow/pipelines/commit/81ebd7ff9b0376c44928f2398f48196e38d92cd3))
|
||||
* feat[frontend]: implement artifact-repositories configmap support ([\#11354](https://github.com/kubeflow/pipelines/issues/11354)) ([467f30c](https://github.com/kubeflow/pipelines/commit/467f30cf613ecfe181e7bf9c03cb2eef7ae1ea2d))
|
||||
* Add-Create-Experiment-button-when-selecting-experiement-final ([\#11332](https://github.com/kubeflow/pipelines/issues/11332)) ([c5f162d](https://github.com/kubeflow/pipelines/commit/c5f162d552e5ae405689066736acf730b9147606))
|
||||
* adding chore as a new template. Fixes [\#11263](https://github.com/kubeflow/pipelines/issues/11263) ([\#11317](https://github.com/kubeflow/pipelines/issues/11317)) ([f256d86](https://github.com/kubeflow/pipelines/commit/f256d86fbb9bfff0388cd6ef9df1120e49e995d8))
|
||||
* correct lastrun unittest timestamps ([\#11270](https://github.com/kubeflow/pipelines/issues/11270)) ([6f6c8ae](https://github.com/kubeflow/pipelines/commit/6f6c8aeda9d5c219e2958df94dce93ca5a88a6ea))
|
||||
* replaced deprecated image repos with registry.k8s.io ([\#11152](https://github.com/kubeflow/pipelines/issues/11152)) ([d23b72b](https://github.com/kubeflow/pipelines/commit/d23b72bf12f55f123a16f53eb35d061180ad9ac4))
|
||||
|
||||
## [2.3.0](https://github.com/kubeflow/pipelines/compare/2.2.0...2.3.0) (2024-09-06)
|
||||
|
||||
|
||||
|
|
|
@ -181,11 +181,11 @@ usually have different reviewers.
|
|||
If you are not sure, or the PR doesn't fit into above scopes. You can either
|
||||
omit the scope because it's optional, or propose an additional scope here.
|
||||
|
||||
## Adding Architectural Decision Records
|
||||
## Adding Kubernetes Enhancement Proposals (KEPs)
|
||||
|
||||
When a change requires a significant change to the underlying system, it should be preceded with an Architectural Decision Record (ADR).
|
||||
When a change requires a significant change to the underlying system, it should be preceded with an Kubernetes Enhancement Proposal (KEP).
|
||||
|
||||
KFP ADRs are found in the `adrs` folder at the root of this repo. Read more about the process [here](adrs/README.md).
|
||||
KEPs are found in the `proposals` folder at the root of this repo. Read more about the process [here](proposals/README.md).
|
||||
|
||||
## Community Guidelines
|
||||
|
||||
|
|
|
@ -0,0 +1,13 @@
|
|||
|
||||
# Check diff for generated files
|
||||
.PHONY: check-diff
|
||||
check-diff:
|
||||
/bin/bash -c 'if [[ -n "$$(git status --porcelain)" ]]; then \
|
||||
echo "ERROR: Generated files are out of date"; \
|
||||
echo "Please regenerate using make clean all for api and kubernetes_platform"; \
|
||||
echo "Changes found in the following files:"; \
|
||||
git status; \
|
||||
echo "Diff of changes:"; \
|
||||
git diff; \
|
||||
exit 1; \
|
||||
fi'
|
4
OWNERS
4
OWNERS
|
@ -1,10 +1,8 @@
|
|||
approvers:
|
||||
- chensun
|
||||
- droctothorpe
|
||||
- HumairAK
|
||||
- james-jwu
|
||||
- zijianjoy
|
||||
reviewers:
|
||||
- chensun
|
||||
- HumairAK
|
||||
- ouadakarim
|
||||
- zijianjoy
|
||||
|
|
38
README.md
38
README.md
|
@ -1,7 +1,11 @@
|
|||
# Kubeflow Pipelines
|
||||
|
||||
[](https://coveralls.io/github/kubeflow/pipelines?branch=master)
|
||||
[](https://kubeflow-pipelines.readthedocs.io/en/stable/?badge=latest)
|
||||
[](https://pypi.org/project/kfp)
|
||||
[](https://pypi.org/project/kfp)
|
||||
[](https://www.bestpractices.dev/projects/9938)
|
||||
[](https://deepwiki.com/kubeflow/pipelines)
|
||||
|
||||
## Overview of the Kubeflow pipelines service
|
||||
|
||||
|
@ -19,8 +23,7 @@ The Kubeflow pipelines service has the following goals:
|
|||
|
||||
* Kubeflow Pipelines can be installed as part of the [Kubeflow Platform](https://www.kubeflow.org/docs/started/installing-kubeflow/#kubeflow-platform). Alternatively you can deploy [Kubeflow Pipelines](https://www.kubeflow.org/docs/components/pipelines/operator-guides/installation/) as a standalone service.
|
||||
|
||||
|
||||
* The Docker container runtime has been deprecated on Kubernetes 1.20+. Kubeflow Pipelines has switched to use [Emissary Executor](https://www.kubeflow.org/docs/components/pipelines/legacy-v1/installation/choose-executor/#emissary-executor) by default from Kubeflow Pipelines 1.8. Emissary executor is Container runtime agnostic, meaning you are able to run Kubeflow Pipelines on Kubernetes cluster with any [Container runtimes](https://kubernetes.io/docs/setup/production-environment/container-runtimes/).
|
||||
* The Docker container runtime has been deprecated on Kubernetes 1.20+. Kubeflow Pipelines has switched to use [Emissary Executor](https://www.kubeflow.org/docs/components/pipelines/legacy-v1/installation/choose-executor/#emissary-executor) by default from Kubeflow Pipelines 1.8. Emissary executor is Container runtime agnostic, meaning you are able to run Kubeflow Pipelines on Kubernetes cluster with any [Container runtimes](https://kubernetes.io/docs/setup/production-environment/container-runtimes/).
|
||||
|
||||
## Documentation
|
||||
|
||||
|
@ -32,30 +35,43 @@ See the Kubeflow [Pipelines API doc](https://www.kubeflow.org/docs/components/pi
|
|||
|
||||
Consult the [Python SDK reference docs](https://kubeflow-pipelines.readthedocs.io/en/stable/) when writing pipelines using the Python SDK.
|
||||
|
||||
## Deep Wiki
|
||||
Check out our AI Powered repo documentation on [DeepWiki](https://deepwiki.com/kubeflow/pipelines).
|
||||
|
||||
> :warning: Please note, this is AI generated and may not have completely accurate information.
|
||||
|
||||
## Contributing to Kubeflow Pipelines
|
||||
|
||||
Before you start contributing to Kubeflow Pipelines, read the guidelines in [How to Contribute](./CONTRIBUTING.md). To learn how to build and deploy Kubeflow Pipelines from source code, read the [developer guide](./developer_guide.md).
|
||||
|
||||
## Kubeflow Pipelines Community
|
||||
|
||||
## Kubeflow Pipelines Community Meeting
|
||||
### Community Meeting
|
||||
|
||||
The meeting is happening every other Wed 10-11AM (PST)
|
||||
[Calendar Invite](https://calendar.google.com/event?action=TEMPLATE&tmeid=NTdoNG5uMDBtcnJlYmdlOWt1c2lkY25jdmlfMjAxOTExMTNUMTgwMDAwWiBqZXNzaWV6aHVAZ29vZ2xlLmNvbQ&tmsrc=jessiezhu%40google.com&scp=ALL) or [Join Meeting Directly](https://meet.google.com/phd-ixfj-kcr/)
|
||||
The Kubeflow Pipelines Community Meeting occurs every other Wed 10-11AM (PST).
|
||||
|
||||
[Calendar Invite](https://calendar.google.com/event?action=TEMPLATE&tmeid=NTdoNG5uMDBtcnJlYmdlOWt1c2lkY25jdmlfMjAxOTExMTNUMTgwMDAwWiBqZXNzaWV6aHVAZ29vZ2xlLmNvbQ&tmsrc=jessiezhu%40google.com&scp=ALL)
|
||||
|
||||
[Direct Meeting Link](https://zoom.us/j/92607298595?pwd%3DVlKLUbiguGkbT9oKbaoDmCxrhbRop7.1&sa=D&source=calendar&ust=1736264977415448&usg=AOvVaw1EIkjFsKy0d4yQPptIJS3x)
|
||||
|
||||
[Meeting notes](http://bit.ly/kfp-meeting-notes)
|
||||
|
||||
## Kubeflow Pipelines Slack Channel
|
||||
### Slack
|
||||
|
||||
[#kubeflow-pipelines](https://kubeflow.slack.com)
|
||||
We also have a slack channel (#kubeflow-pipelines) on the Cloud Native Computing Foundation Slack workspace. You can find more details at [https://www.kubeflow.org/docs/about/community/#kubeflow-slack-channels](https://www.kubeflow.org/docs/about/community/#kubeflow-slack-channels)
|
||||
|
||||
## Architecture
|
||||
|
||||
Details about the KFP Architecture can be found at [Architecture.md](docs/Architecture.md)
|
||||
|
||||
## Blog posts
|
||||
|
||||
* [Getting started with Kubeflow Pipelines](https://cloud.google.com/blog/products/ai-machine-learning/getting-started-kubeflow-pipelines) (By Amy Unruh)
|
||||
* How to create and deploy a Kubeflow Machine Learning Pipeline (By Lak Lakshmanan)
|
||||
* [Part 1: How to create and deploy a Kubeflow Machine Learning Pipeline](https://towardsdatascience.com/how-to-create-and-deploy-a-kubeflow-machine-learning-pipeline-part-1-efea7a4b650f)
|
||||
* [Part 2: How to deploy Jupyter notebooks as components of a Kubeflow ML pipeline](https://towardsdatascience.com/how-to-deploy-jupyter-notebooks-as-components-of-a-kubeflow-ml-pipeline-part-2-b1df77f4e5b3)
|
||||
* [Part 1: How to create and deploy a Kubeflow Machine Learning Pipeline](https://medium.com/data-science/how-to-create-and-deploy-a-kubeflow-machine-learning-pipeline-part-1-efea7a4b650f)
|
||||
* [Part 2: How to deploy Jupyter notebooks as components of a Kubeflow ML pipeline](https://medium.com/data-science/how-to-deploy-jupyter-notebooks-as-components-of-a-kubeflow-ml-pipeline-part-2-b1df77f4e5b3)
|
||||
* [Part 3: How to carry out CI/CD in Machine Learning (“MLOps”) using Kubeflow ML pipelines](https://medium.com/google-cloud/how-to-carry-out-ci-cd-in-machine-learning-mlops-using-kubeflow-ml-pipelines-part-3-bdaf68082112)
|
||||
* [Tekton optimizations for Kubeflow Pipelines 2.0](https://developer.ibm.com/blogs/awb-tekton-optimizations-for-kubeflow-pipelines-2-0) (By Tommy Li)
|
||||
|
||||
## Acknowledgments
|
||||
|
||||
Kubeflow pipelines uses [Argo Workflows](https://github.com/argoproj/argo-workflows) by default under the hood to orchestrate Kubernetes resources. The Argo community has been very supportive and we are very grateful. Additionally there is Tekton backend available as well. To access it, please refer to [Kubeflow Pipelines with Tekton repository](https://github.com/kubeflow/kfp-tekton).
|
||||
Kubeflow pipelines uses [Argo Workflows](https://github.com/argoproj/argo-workflows) by default under the hood to orchestrate Kubernetes resources. The Argo community has been very supportive and we are very grateful.
|
||||
|
|
122
RELEASE.md
122
RELEASE.md
|
@ -1,6 +1,5 @@
|
|||
# Kubeflow Pipelines Release Process
|
||||
|
||||
<!-- This TOC is auto generated by "markdown all in one" VS Code plugin -->
|
||||
- [Kubeflow Pipelines Release Process](#kubeflow-pipelines-release-process)
|
||||
- [Schedule](#schedule)
|
||||
- [Release Tags and Branches](#release-tags-and-branches)
|
||||
|
@ -13,11 +12,16 @@
|
|||
- [Cutting a release branch (Optional)](#cutting-a-release-branch-optional)
|
||||
- [Before release](#before-release)
|
||||
- [Releasing from release branch](#releasing-from-release-branch)
|
||||
- [Release KFP Python Packages](#releasing-kfp-python-packages)
|
||||
- [Create GitHub Release](#create-github-release)
|
||||
- [Sync Master Branch with Release](#sync-master-branch-with-latest-release)
|
||||
- [Release Process Development](#release-process-development)
|
||||
|
||||
## Schedule
|
||||
|
||||
Kubeflow Pipelines has weekly patch releases and monthly minor releases.
|
||||
Kubeflow Pipelines has quarterly minor releases. Patch releases occur on a
|
||||
need basis and don't currently operate on a schedule.
|
||||
|
||||
Patch releases only contain bug fixes, while minor releases have new features
|
||||
additionally.
|
||||
|
||||
|
@ -85,9 +89,7 @@ if you only want to use or contribute to this repo.
|
|||
|
||||
- OS: Linux or MacOS
|
||||
- Permissions needed
|
||||
- Can create a branch in github.com/kubeflow/pipelines.
|
||||
- (Before [#4840](https://github.com/kubeflow/pipelines/issues/4840) is resolved) one would need the admin access to kubeflow/pipelines repo.
|
||||
- Can trigger cloudbuild jobs in `google.com/ml-pipeline-test` GCP project.
|
||||
- Admin access to kubeflow/pipelines repo.
|
||||
- Tools that should be in your `$PATH`
|
||||
- docker
|
||||
- python3
|
||||
|
@ -167,11 +169,11 @@ Do the following things before a release:
|
|||
console.log(Array.from(document.querySelectorAll('[id^="issue_"][id*="_link"]')).map(el => /issue_(.*)_link/.exec(el.id)[1]).join(' '))
|
||||
```
|
||||
|
||||
1. Verify cloudbuild and postsubmit tests are passing: visit <https://github.com/kubeflow/pipelines/commits/master> for master branch.
|
||||
1. Verify release branch CI is passing: visit <https://github.com/kubeflow/pipelines/commits/master> for master branch.
|
||||
|
||||

|
||||

|
||||
|
||||
If not, contact the KFP team to determine if the failure(s) would block the release. You can also retry the failed job by opening the detail page of prow job, and click the refresh button next ot the job title.
|
||||
If not, contact the KFP team to determine if the failure(s) would block the release.
|
||||
|
||||
### Releasing from release branch
|
||||
|
||||
|
@ -184,36 +186,7 @@ Note, when releasing from master, all the below mentions of "release branch" mea
|
|||
- `1.0.1`
|
||||
- `1.1.0`
|
||||
- ...
|
||||
Set the version by using `VERSION=<version-value>`. Contact @chensun if you are not sure what next version should be.
|
||||
|
||||
1. Release `driver` and `launcher` images
|
||||
|
||||
First build and push `driver` and `launcher` images:
|
||||
|
||||
```bash
|
||||
pushd ./backend
|
||||
|
||||
export IMG_TAG_DRIVER=gcr.io/ml-pipeline/kfp-driver
|
||||
make license_driver image_driver
|
||||
docker push $IMG_TAG_DRIVER
|
||||
|
||||
export IMG_TAG_LAUNCHER=gcr.io/ml-pipeline/kfp-launcher
|
||||
make license_launcher image_launcher
|
||||
docker push $IMG_TAG_LAUNCHER
|
||||
|
||||
popd
|
||||
```
|
||||
|
||||
If there are changes to the licenses files, make a Pull Request and merge the changes. (e.g. https://github.com/kubeflow/pipelines/pull/11177/)
|
||||
|
||||
Once the images are pushed, update the hard-coded hash in the code: [`DefaultLauncherImage`](https://github.com/kubeflow/pipelines/blob/4c955f4780839702dc4924f8f4e7c90aa251b826/backend/src/v2/compiler/argocompiler/container.go#L33) and [`DefaultDriverImage`](https://github.com/kubeflow/pipelines/blob/4c955f4780839702dc4924f8f4e7c90aa251b826/backend/src/v2/compiler/argocompiler/container.go#L35). Make a Pull Request and merge the change (e.g.: https://github.com/kubeflow/pipelines/pull/11178)
|
||||
|
||||
Pull the changes:
|
||||
|
||||
```bash
|
||||
git checkout $BRANCH
|
||||
git pull upstream $BRANCH
|
||||
```
|
||||
Set the version by using `VERSION=<version-value>`. Contact @chensun or @HumairAK if you are not sure what next version should be.
|
||||
|
||||
1. Update all version refs in release branch by
|
||||
|
||||
|
@ -224,39 +197,17 @@ Note, when releasing from master, all the below mentions of "release branch" mea
|
|||
It will prompt you whether to push it to release branch. Press `y` and hit `Enter`.
|
||||
|
||||
Note, the script will clone kubeflow/pipelines repo into a temporary location on your computer, make those changes and attempt to push to upstream, so that it won't interfere with your current git repo.
|
||||
|
||||
> [!Note]
|
||||
> If you see error "docker.sock: connect: permission error", you need to [allow managing docker as a non-root user](https://docs.docker.com/engine/install/linux-postinstall/#manage-docker-as-a-non-root-user).
|
||||
|
||||
If you see error "docker.sock: connect: permission error", you need to [allow managing docker as a non-root user](https://docs.docker.com/engine/install/linux-postinstall/#manage-docker-as-a-non-root-user).
|
||||
1. Build the release images by using the [Build images from sources](https://github.com/kubeflow/pipelines/actions/workflows/image-builds.yml).
|
||||
|
||||
1. View related cloudbuild jobs' statuses by clicking the latest commit's status icon
|
||||
in the release branch. Refer to the screenshot below to find the page.
|
||||
The target tag should be `$VERSION`.
|
||||
|
||||
1. Wait and make sure the `build-each-commit` cloudbuild job that builds all images
|
||||
in gcr.io/ml-pipeline-test succeeded. If it fails, please click "View more details
|
||||
on Google Cloud Build" and then "Retry".
|
||||
)
|
||||
|
||||
NOTE: you can find your latest release commit in <https://github.com/kubeflow/pipelines/commits/master> and select your release branch.
|
||||

|
||||
|
||||
1. Select the `release-on-tag` cloudbuild job that copies built images and artifacts to
|
||||
public image registry and gcs bucket. This job should have already failed because
|
||||
artifacts haven't been built. Now, please click "View more details on Google Cloud Build"
|
||||
and then "Retry", because after waiting for previous step, artifacts are now ready.
|
||||
|
||||
NOTE: **DO NOT** click the "Re-run" button from GitHub Actions status page.
|
||||
It will create a build with "Branch: $BRANCH" instead of "TAG: $VERSION".
|
||||
Open "View more details on Google Cloud Build", and rerun from there.
|
||||
|
||||

|
||||
|
||||

|
||||
|
||||
TODO: we should have an automation KFP cluster, and the waiting and submiting
|
||||
`release-on-tag` cloudbuild task should happen automatically.
|
||||
|
||||
NOTE: postsubmit tests will most likely fail for the release commit, this is expected, postsubmit
|
||||
tests start right after the commit is in GitHub repo, but some artifacts they depend on are still
|
||||
being built by the processes in these two steps.
|
||||
1. Search "PyPI" in Google internal release doc for getting password of kubeflow-pipelines user.
|
||||
### Releasing KFP Python Packages
|
||||
|
||||
1. Release `kfp-server-api` python packages to PyPI.
|
||||
|
||||
|
@ -270,15 +221,42 @@ and then "Retry", because after waiting for previous step, artifacts are now rea
|
|||
```
|
||||
|
||||
1. Release `kfp` python packages to PyPI. (Note: Please skip this step for backend release, this step will be handled by SDK release.)
|
||||
|
||||
|
||||
Update the SDK version in `version.py` and `readthedocs` `versions.json`, example PR [here](https://github.com/kubeflow/pipelines/pull/11715/files).
|
||||
|
||||
```bash
|
||||
pip3 install twine --user
|
||||
gsutil cp gs://ml-pipeline/release/$VERSION/kfp.tar.gz kfp-$VERSION.tar.gz
|
||||
cd sdk/python
|
||||
./build.sh kfp-$VERSION.tar.gz
|
||||
python3 -m twine upload kfp-$VERSION.tar.gz
|
||||
```
|
||||
|
||||
!!! The file name must contain the version. See <https://github.com/kubeflow/pipelines/issues/1292>
|
||||
|
||||
1. Release `kfp-kubernetes` python packages to PyPI. (Note: Please skip this step for backend release, this step will be handled by SDK release.)
|
||||
|
||||
Update the KFP Kubernetes SDK version in `__init__.py` and `readthedocs` `versions.json`, example PR [here](https://github.com/kubeflow/pipelines/pull/11380).
|
||||
|
||||
```bash
|
||||
export KFP_KUBERNETES_VERSION=
|
||||
pip3 install twine --user
|
||||
cd kubernetes_platform/python
|
||||
./create_release_branch.sh
|
||||
```
|
||||
|
||||
Follow the output push instructions to **commit and push the branch to KFP**, then do the following:
|
||||
|
||||
```bash
|
||||
# set this to the appropriate version that matches what was set in __init__.py earlier
|
||||
export KFP_KUBERNETES_VERSION=
|
||||
cd kubernetes_platform/python
|
||||
./release.sh
|
||||
```
|
||||
|
||||
Note that this script will build the package, test install, and push to PyPi.
|
||||
|
||||
### Create GitHub Release
|
||||
|
||||
1. Create a GitHub release using `$VERSION` git tag and title `Version $VERSION`,
|
||||
fill in the description. Detailed steps:
|
||||
|
||||
|
@ -289,7 +267,7 @@ fill in the description. Detailed steps:
|
|||
1. Use this template for public releases and replace the `$VERSION` with real values.
|
||||
|
||||
<pre>
|
||||
To deploy Kubeflow Pipelines in an existing cluster, follow the instruction in [here](https://www.kubeflow.org/docs/pipelines/standalone-deployment-gcp/) or via UI [here](https://console.cloud.google.com/ai-platform/pipelines)
|
||||
To deploy Kubeflow Pipelines in an existing cluster, follow the instruction in [here](https://www.kubeflow.org/docs/components/pipelines/operator-guides/installation/)
|
||||
|
||||
Install python SDK (python 3.9 above) by running:
|
||||
|
||||
|
@ -304,7 +282,7 @@ fill in the description. Detailed steps:
|
|||
***This is a prerelease*** checkbox in the GitHub release UI.
|
||||
|
||||
<pre>
|
||||
To deploy Kubeflow Pipelines in an existing cluster, follow the instruction in [here](https://www.kubeflow.org/docs/pipelines/standalone-deployment-gcp/).
|
||||
To deploy Kubeflow Pipelines in an existing cluster, follow the instruction in [here](https://www.kubeflow.org/docs/components/pipelines/operator-guides/installation/).
|
||||
|
||||
Install kfp-server-api package (python 3.9 above) by running:
|
||||
|
||||
|
@ -318,6 +296,8 @@ fill in the description. Detailed steps:
|
|||
|
||||
NOTE, kfp python SDK is **NOT** included and released separately.
|
||||
</pre>
|
||||
|
||||
### Sync Master Branch with latest release
|
||||
|
||||
1. **(Do this step only when releasing from a NON-master release branch)**
|
||||
Update master branch to the same version and include latest changelog:
|
||||
|
|
|
@ -1,96 +0,0 @@
|
|||
# Use Architecture Decision Records for Kubeflow Pipelines
|
||||
|
||||
| | |
|
||||
|----------------|--------------------------|
|
||||
| Date | 2024-04-24 |
|
||||
| Scope | Kubeflow Pipelines |
|
||||
| Status | Accepted |
|
||||
| Authors | [Humair Khan](@HumairAK) |
|
||||
| Supersedes | N/A |
|
||||
| Superseded by: | N/A |
|
||||
| Issues | |
|
||||
| Other docs: | none |
|
||||
|
||||
# Kubeflow Pipelines Architecture Decision Records
|
||||
|
||||
"Documenting architectural decisions helps a project succeed by helping current and future contributors understand the reasons for doing things a certain way." [1]
|
||||
|
||||
## What is an ADR?
|
||||
|
||||
An architecture decision record is a short text file in a Markdown format. Each record describes a set of forces and a single decision in response to those forces. [2]
|
||||
|
||||
An ADR is not a technical design, a team-level internal procedure, or a roadmap. An ADR does not replace detailed technical design documents or good commit messages.
|
||||
|
||||
## Why
|
||||
|
||||
Using an Architecture Decision Record (ADR) offers many benefits, particularly in managing the complexity and longevity of software projects.
|
||||
|
||||
Some examples include:
|
||||
|
||||
1. ADRs capture the why behind critical architectural choices, not just the what.
|
||||
* This helps current and future team members understand the reasoning behind decisions, particularly when the rationale is no longer obvious.
|
||||
2. Improve Communication and Collaboration.
|
||||
* They serve as a single source of truth for architectural decisions.
|
||||
* By documenting options and their trade-offs, ADRs encourage structured decision-making and transparency.
|
||||
3. Enable Traceability
|
||||
* ADRs create a decision history that allows teams to trace architectural choices back to their original context, assumptions, and goals
|
||||
|
||||
See references below for more exhaustive lists on how ADRs can be a net benefit, especially to an open source project,
|
||||
where transparency in decision making is key.
|
||||
|
||||
## Goals
|
||||
|
||||
* Capture the Why Behind Decisions
|
||||
* Foster Clear Communication
|
||||
* Enable Decision Traceability
|
||||
* Encourage Thoughtful, Deliberate Decisions
|
||||
* Preserve Institutional Knowledge
|
||||
|
||||
## Non-Goals
|
||||
|
||||
* Not a substitute for technical or user documentation
|
||||
* Not a substitute or replacement for meaningful commit messages
|
||||
|
||||
## How
|
||||
|
||||
We will keep each ADR in a short text file in Markdown format.
|
||||
|
||||
We will keep ADRs in this repository, https://github.com/kubeflow/pipelines, under the `./adrs` folder.
|
||||
|
||||
ADRs will be numbered sequentially and monotonically. Numbers will not be reused.
|
||||
|
||||
If a decision is reversed, we will keep the old one around, but mark it as superseded. (It's still relevant to know that it was the decision, but is no longer the decision.)
|
||||
|
||||
We will use a format with just a few parts, so each document is easy to digest.
|
||||
|
||||
## Alternatives
|
||||
|
||||
**Current Approach**
|
||||
|
||||
One alternative is to not do ADRs, and stick to the current approach of doing google docs or similar and presenting it in KFP calls.
|
||||
The pros for this approach is that it's relatively low overhead and simple. Communicating changes/editing google docs is also immensely easier than on a markdown PR.
|
||||
|
||||
The cons are plentiful however:
|
||||
* No way to preserve these docs effectively
|
||||
* Does not live near the codebase
|
||||
* Difficult to enforce immutability
|
||||
* No way to formalize an "approval" process (i.e. something akin to a PR "merge")
|
||||
* Doc owners are not maintainers, and access can be revoked at any time
|
||||
* Hard to keep track off google documents
|
||||
|
||||
## Reviews
|
||||
|
||||
| Reviewed by | Date | Notes |
|
||||
|-------------|------|-------|
|
||||
|
||||
## References
|
||||
|
||||
* https://cognitect.com/blog/2011/11/15/documenting-architecture-decisions
|
||||
* https://adr.github.io/
|
||||
* https://docs.aws.amazon.com/prescriptive-guidance/latest/architectural-decision-records/adr-process.html
|
||||
* https://github.com/joelparkerhenderson/architecture-decision-record?tab=readme-ov-file#what-is-an-architecture-decision-record
|
||||
|
||||
## Citations
|
||||
|
||||
* [1] Heiko W. Rupp, https://www.redhat.com/architect/architecture-decision-records
|
||||
* [2] Michael Nygard, https://cognitect.com/blog/2011/11/15/documenting-architecture-decisions
|
|
@ -1,4 +0,0 @@
|
|||
approvers:
|
||||
- chensun
|
||||
- HumairAK
|
||||
- zijianjoy
|
|
@ -1,16 +0,0 @@
|
|||
# Kubeflow Pipelines Architecture Decision Records (ADR)
|
||||
|
||||
This folder contains all the Architecture Decision Records for Kubeflow Pipelines. Read more about ADRs [here][1].
|
||||
|
||||
When adding a new ADR please follow the following instructions:
|
||||
|
||||
1. Use the `template.md` as the basis of your document
|
||||
2. The format should be in markdown
|
||||
3. The file name should follow the format
|
||||
4. (Optional) Bring this up in the KFP community call to bring attention to your proposal
|
||||
|
||||
It is fine to start with something like a Google Document while you are iterating and asking for feedback, but the
|
||||
document should eventually be surfaced as a markdown ADR in the form of a pull request to this repository so it can
|
||||
persist alongside the codebase.
|
||||
|
||||
[1]: https://github.com/joelparkerhenderson/architecture-decision-record?tab=readme-ov-file#what-is-an-architecture-decision-record
|
|
@ -1,69 +0,0 @@
|
|||
# Kubeflow - Architecture Decision Record template
|
||||
|
||||
<!-- copy and paste this template to start authoring your own ADR -->
|
||||
<!-- for the Status of new ADRs, please use Approved, since it will be approved by the time it is merged -->
|
||||
<!-- remove this comment block too -->
|
||||
|
||||
| | |
|
||||
|----------------|--------------------------|
|
||||
| Date | insert data |
|
||||
| Scope | |
|
||||
| Status | Approved |
|
||||
| Authors | [name](@github-username) |
|
||||
| Supersedes | N/A |
|
||||
| Superseded by: | N/A |
|
||||
| Issues | |
|
||||
| Other docs: | none |
|
||||
|
||||
## What
|
||||
|
||||
A couple sentences describing what this ADR is about.
|
||||
|
||||
## Why
|
||||
|
||||
A couple sentences describing why we need an ADR for this.
|
||||
|
||||
## Goals
|
||||
|
||||
* Bulleted list of goals
|
||||
|
||||
## Non-Goals
|
||||
|
||||
* Bulleted list of non-goals
|
||||
|
||||
## How
|
||||
|
||||
A couple sentences describing the high level approach that this ADR captures.
|
||||
|
||||
## Open Questions
|
||||
|
||||
Optional section, hopefully removed before transitioning from Draft/Proposed to Accepted.
|
||||
|
||||
## Alternatives
|
||||
|
||||
Carefully describe the alternatives considered, and specifically document what the tradeoffs of each approach are.
|
||||
|
||||
## Security and Privacy Considerations
|
||||
|
||||
Optional section. Talk about any security and privacy concerns here.
|
||||
|
||||
## Risks
|
||||
|
||||
Optional section. Talk about any risks here.
|
||||
|
||||
## Stakeholder Impacts
|
||||
|
||||
| Group | Key Contacts | Date | Impacted? |
|
||||
|--------------------|------------------|------|-----------|
|
||||
| group or team name | key contact name | date | ? |
|
||||
|
||||
|
||||
## References
|
||||
|
||||
* optional bulleted list
|
||||
|
||||
## Reviews
|
||||
|
||||
| Reviewed by | Date | Notes |
|
||||
|-------------|------|-------|
|
||||
| name | date | ? |
|
|
@ -12,8 +12,8 @@
|
|||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# Contact one of Bobgy, capri-xiyue or zijianjoy if this remote image needs an update.
|
||||
PREBUILT_REMOTE_IMAGE=gcr.io/ml-pipeline-test/api-generator:latest
|
||||
# Contact one of chensun, HumairAK, zijianjoy if this remote image needs an update.
|
||||
PREBUILT_REMOTE_IMAGE=ghcr.io/kubeflow/kfp-api-generator:1.1
|
||||
|
||||
.PHONY: all
|
||||
all: golang python
|
||||
|
|
|
@ -1,7 +1,9 @@
|
|||
approvers:
|
||||
- chensun
|
||||
- connor-mccarthy
|
||||
- neuromage
|
||||
reviewers:
|
||||
- chensun
|
||||
- connor-mccarthy
|
||||
- droctothorpe
|
||||
- zazulam
|
||||
- mprahl
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
module github.com/kubeflow/pipelines/api
|
||||
|
||||
go 1.21
|
||||
go 1.23
|
||||
|
||||
require (
|
||||
google.golang.org/genproto v0.0.0-20211026145609-4688e4c4e024
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
// Code generated by protoc-gen-go. DO NOT EDIT.
|
||||
// versions:
|
||||
// protoc-gen-go v1.33.0
|
||||
// protoc v3.17.3
|
||||
// protoc v3.20.3
|
||||
// source: cache_key.proto
|
||||
|
||||
package cachekey
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -292,7 +292,7 @@ message TaskInputsSpec {
|
|||
|
||||
// Represents an input parameter. The value can be taken from an upstream
|
||||
// task's output parameter (if specifying `producer_task` and
|
||||
// `output_parameter_key`, or it can be a runtime value, which can either be
|
||||
// `output_parameter_key`), or it can be a runtime value, which can either be
|
||||
// determined at compile-time, or from a pipeline parameter.
|
||||
message InputParameterSpec {
|
||||
// Represents an upstream task's output parameter.
|
||||
|
@ -307,9 +307,9 @@ message TaskInputsSpec {
|
|||
|
||||
// Represents an upstream task's final status. The field can only be set if
|
||||
// the schema version is `2.0.0`. The resolved input parameter will be a
|
||||
// json payload in string type.
|
||||
// JSON payload in string type.
|
||||
message TaskFinalStatus {
|
||||
// The name of the upsteram task where the final status is coming from.
|
||||
// The name of the upstream task where the final status is coming from.
|
||||
string producer_task = 1;
|
||||
}
|
||||
|
||||
|
@ -320,7 +320,7 @@ message TaskInputsSpec {
|
|||
ValueOrRuntimeParameter runtime_value = 2;
|
||||
// Pass the input parameter from parent component input parameter.
|
||||
string component_input_parameter = 3;
|
||||
// The final status of an uptream task.
|
||||
// The final status of an upstream task.
|
||||
TaskFinalStatus task_final_status = 5;
|
||||
}
|
||||
|
||||
|
@ -527,7 +527,7 @@ message PipelineTaskSpec {
|
|||
// t2.outputs.parameters = { 'p': 'v2' }
|
||||
// t2.outputs.artifacts = { 'a': [a2] }
|
||||
// parent_task.outputs.parameters = { 'p': '["v1", "v2"]' }
|
||||
// parent_task.outputs.aritfacts = { 'a': [a1, a2] }
|
||||
// parent_task.outputs.artifacts = { 'a': [a1, a2] }
|
||||
oneof iterator {
|
||||
// Iterator to iterate over an artifact input.
|
||||
ArtifactIteratorSpec artifact_iterator = 9;
|
||||
|
@ -591,7 +591,7 @@ message ArtifactIteratorSpec {
|
|||
// The spec of a parameter iterator. It supports fan-out a workflow from a
|
||||
// string parameter which contains a JSON array.
|
||||
message ParameterIteratorSpec {
|
||||
// Specifies the spec to decribe the parameter items to iterate.
|
||||
// Specifies the spec to describe the parameter items to iterate.
|
||||
message ItemsSpec {
|
||||
// Specifies where to get the collection of items to iterate. The iterator
|
||||
// will create a sub-task for each item of the collection and pass the item
|
||||
|
@ -665,6 +665,9 @@ message ArtifactTypeSchema {
|
|||
message PipelineTaskInfo {
|
||||
// The display name of the task.
|
||||
string name = 1;
|
||||
// The name of the task used throughout the pipeline acts as a unique
|
||||
// identifier.
|
||||
string task_name = 2;
|
||||
}
|
||||
|
||||
// Definition for a value or reference to a runtime parameter. A
|
||||
|
@ -1104,7 +1107,38 @@ message PlatformDeploymentConfig {
|
|||
map<string, google.protobuf.Struct> executors = 1;
|
||||
}
|
||||
|
||||
message WorkspaceConfig {
|
||||
// Size of the workspace
|
||||
// Example: "250Gi"
|
||||
// See https://kubernetes.io/docs/reference/kubernetes-api/common-definitions/quantity/ for valid quantity formats
|
||||
string size = 1;
|
||||
|
||||
// Kubernetes specific configuration for the workspace
|
||||
optional KubernetesWorkspaceConfig kubernetes = 2;
|
||||
}
|
||||
|
||||
message KubernetesWorkspaceConfig {
|
||||
// Patch of a PersistentVolumeClaim (PVC) spec to override defaults set on the API server for the workspace PVC
|
||||
// Example: {
|
||||
// "storageClassName": "super-fast-storage",
|
||||
// "accessModes": ["ReadWriteMany"]
|
||||
// }
|
||||
optional google.protobuf.Struct pvc_spec_patch = 1;
|
||||
}
|
||||
|
||||
// Spec for pipeline-level config options. See PipelineConfig DSL class.
|
||||
message PipelineConfig {
|
||||
// TODO add pipeline-level configs
|
||||
// Name of the semaphore key to control pipeline concurrency
|
||||
string semaphore_key = 1;
|
||||
|
||||
// Name of the mutex to ensure mutual exclusion
|
||||
string mutex_name = 2;
|
||||
|
||||
// Time to live configuration after the pipeline run is completed for
|
||||
// ephemeral resources created by the pipeline run.
|
||||
int32 resource_ttl = 3;
|
||||
|
||||
// Configuration for a shared storage workspace that persists for the duration of the pipeline run.
|
||||
// The workspace can be configured with size and Kubernetes-specific settings to override default PVC configurations.
|
||||
optional WorkspaceConfig workspace = 4;
|
||||
}
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
import setuptools
|
||||
|
||||
NAME = 'kfp-pipeline-spec'
|
||||
VERSION = '0.6.0'
|
||||
VERSION = '0.7.0'
|
||||
|
||||
setuptools.setup(
|
||||
name=NAME,
|
||||
|
|
|
@ -13,35 +13,29 @@
|
|||
# limitations under the License.
|
||||
|
||||
# 1. Build api server application
|
||||
FROM golang:1.21.7-bookworm as builder
|
||||
FROM golang:1.23-bookworm AS builder
|
||||
RUN apt-get update && apt-get install -y cmake clang musl-dev openssl
|
||||
WORKDIR /go/src/github.com/kubeflow/pipelines
|
||||
|
||||
COPY ./go.mod ./
|
||||
COPY ./go.sum ./
|
||||
COPY ./hack/install-go-licenses.sh ./hack/
|
||||
COPY ./kubernetes_platform/go.mod ./kubernetes_platform/go.mod
|
||||
COPY ./api/go.mod ./api/go.mod
|
||||
|
||||
RUN GO111MODULE=on go mod download
|
||||
RUN ./hack/install-go-licenses.sh
|
||||
|
||||
COPY . .
|
||||
RUN GO111MODULE=on go build -o /bin/apiserver backend/src/apiserver/*.go
|
||||
# Check licenses and comply with license terms.
|
||||
# First, make sure there's no forbidden license.
|
||||
RUN go-licenses check ./backend/src/apiserver
|
||||
RUN go-licenses csv ./backend/src/apiserver > /tmp/licenses.csv && \
|
||||
diff /tmp/licenses.csv backend/third_party_licenses/apiserver.csv && \
|
||||
go-licenses save ./backend/src/apiserver --save_path /tmp/NOTICES
|
||||
|
||||
# 2. Compile preloaded pipeline samples
|
||||
FROM python:3.9 as compiler
|
||||
FROM python:3.9 AS compiler
|
||||
RUN apt-get update -y && apt-get install --no-install-recommends -y -q default-jdk python3-setuptools python3-dev jq
|
||||
RUN wget https://bootstrap.pypa.io/get-pip.py && python3 get-pip.py
|
||||
COPY backend/requirements.txt .
|
||||
RUN python3 -m pip install -r requirements.txt --no-cache-dir
|
||||
|
||||
# Downloading Argo CLI so that the samples are validated
|
||||
ENV ARGO_VERSION v3.4.17
|
||||
ENV ARGO_VERSION=v3.5.14
|
||||
RUN curl -sLO https://github.com/argoproj/argo-workflows/releases/download/${ARGO_VERSION}/argo-linux-amd64.gz && \
|
||||
gunzip argo-linux-amd64.gz && \
|
||||
chmod +x argo-linux-amd64 && \
|
||||
|
@ -54,9 +48,9 @@ COPY backend/src/apiserver/config/sample_config.json /samples/
|
|||
# Compiling the preloaded samples.
|
||||
# The default image is replaced with the GCR-hosted python image.
|
||||
RUN set -e; \
|
||||
< /samples/sample_config.json jq .[].file --raw-output | while read pipeline_yaml; do \
|
||||
< /samples/sample_config.json jq ".pipelines[].file" --raw-output | while read pipeline_yaml; do \
|
||||
pipeline_py="${pipeline_yaml%.yaml}"; \
|
||||
python3 "$pipeline_py"; \
|
||||
echo "Compiling: \"$pipeline_py\"" && python3 "$pipeline_py" && echo -n "Output: " && ls "$pipeline_py.yaml"; \
|
||||
done
|
||||
|
||||
# 3. Start api web server
|
||||
|
@ -66,22 +60,19 @@ ARG COMMIT_SHA=unknown
|
|||
ENV COMMIT_SHA=${COMMIT_SHA}
|
||||
ARG TAG_NAME=unknown
|
||||
ENV TAG_NAME=${TAG_NAME}
|
||||
ENV LOG_LEVEL info
|
||||
ENV LOG_LEVEL=info
|
||||
|
||||
WORKDIR /bin
|
||||
|
||||
# Adding CA certificate so API server can download pipeline through URL and wget is used for liveness/readiness probe command
|
||||
RUN apt-get update && apt-get install -y ca-certificates wget
|
||||
|
||||
COPY backend/src/apiserver/config/ /config
|
||||
COPY --from=builder /bin/apiserver /bin/apiserver
|
||||
|
||||
# Copy licenses and notices.
|
||||
COPY --from=builder /tmp/licenses.csv /third_party/licenses.csv
|
||||
COPY --from=builder /tmp/NOTICES /third_party/NOTICES
|
||||
COPY --from=compiler /samples/ /samples/
|
||||
RUN chmod +x /bin/apiserver
|
||||
|
||||
# Adding CA certificate so API server can download pipeline through URL and wget is used for liveness/readiness probe command
|
||||
RUN apt-get update && apt-get install -y ca-certificates wget
|
||||
|
||||
# Pin sample doc links to the commit that built the backend image
|
||||
RUN sed -E "s#/(blob|tree)/master/#/\1/${COMMIT_SHA}/#g" -i /config/sample_config.json && \
|
||||
sed -E "s/%252Fmaster/%252F${COMMIT_SHA}/#g" -i /config/sample_config.json
|
||||
|
|
|
@ -13,7 +13,7 @@
|
|||
# limitations under the License.
|
||||
|
||||
# Dockerfile for building the source code of cache_server
|
||||
FROM golang:1.21.7-alpine3.19 as builder
|
||||
FROM golang:1.23-alpine as builder
|
||||
|
||||
RUN apk update && apk upgrade && \
|
||||
apk add --no-cache bash git openssh gcc musl-dev
|
||||
|
@ -22,23 +22,16 @@ WORKDIR /go/src/github.com/kubeflow/pipelines
|
|||
|
||||
COPY ./go.mod ./
|
||||
COPY ./go.sum ./
|
||||
COPY ./hack/install-go-licenses.sh ./hack/
|
||||
COPY ./kubernetes_platform/go.mod ./kubernetes_platform/go.mod
|
||||
COPY ./api/go.mod ./api/go.mod
|
||||
|
||||
RUN GO111MODULE=on go mod download
|
||||
RUN ./hack/install-go-licenses.sh
|
||||
|
||||
COPY . .
|
||||
|
||||
RUN GO111MODULE=on go build -o /bin/cache_server backend/src/cache/*.go
|
||||
|
||||
# Check licenses and comply with license terms.
|
||||
# First, make sure there's no forbidden license.
|
||||
RUN go-licenses check ./backend/src/cache
|
||||
RUN go-licenses csv ./backend/src/cache > /tmp/licenses.csv && \
|
||||
diff /tmp/licenses.csv backend/third_party_licenses/cache_server.csv && \
|
||||
go-licenses save ./backend/src/cache --save_path /tmp/NOTICES
|
||||
|
||||
FROM alpine:3.19
|
||||
FROM alpine
|
||||
|
||||
RUN adduser -S appuser
|
||||
USER appuser
|
||||
|
@ -46,8 +39,5 @@ USER appuser
|
|||
WORKDIR /bin
|
||||
|
||||
COPY --from=builder /bin/cache_server /bin/cache_server
|
||||
# Copy licenses and notices.
|
||||
COPY --from=builder /tmp/licenses.csv /third_party/licenses.csv
|
||||
COPY --from=builder /tmp/NOTICES /third_party/NOTICES
|
||||
|
||||
ENTRYPOINT [ "/bin/cache_server" ]
|
||||
|
|
|
@ -13,7 +13,7 @@
|
|||
# limitations under the License.
|
||||
|
||||
# Dockerfile for building the source code of conformance tests
|
||||
FROM golang:1.21.7-alpine3.19 as builder
|
||||
FROM golang:1.23-alpine as builder
|
||||
|
||||
RUN apk update && apk upgrade && \
|
||||
apk add --no-cache bash git openssh gcc musl-dev
|
||||
|
|
|
@ -12,27 +12,22 @@
|
|||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
FROM golang:1.21.7-alpine3.19 as builder
|
||||
FROM golang:1.23-alpine AS builder
|
||||
|
||||
ARG GCFLAGS=""
|
||||
|
||||
WORKDIR /go/src/github.com/kubeflow/pipelines
|
||||
|
||||
COPY ./go.mod ./
|
||||
COPY ./go.sum ./
|
||||
COPY ./hack/install-go-licenses.sh ./hack/
|
||||
COPY ./kubernetes_platform/go.mod ./kubernetes_platform/go.mod
|
||||
COPY ./api/go.mod ./api/go.mod
|
||||
|
||||
RUN GO111MODULE=on go mod download
|
||||
RUN ./hack/install-go-licenses.sh
|
||||
|
||||
COPY . .
|
||||
|
||||
RUN GO111MODULE=on CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build -tags netgo -ldflags '-extldflags "-static"' -o /bin/driver ./backend/src/v2/cmd/driver/*.go
|
||||
|
||||
# Check licenses and comply with license terms.
|
||||
# First, make sure there's no forbidden license.
|
||||
RUN go-licenses check ./backend/src/v2/cmd/driver
|
||||
RUN go-licenses csv ./backend/src/v2/cmd/driver > /tmp/licenses.csv && \
|
||||
diff /tmp/licenses.csv backend/third_party_licenses/driver.csv && \
|
||||
go-licenses save ./backend/src/v2/cmd/driver --save_path /tmp/NOTICES
|
||||
RUN GO111MODULE=on CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build -tags netgo -gcflags="${GCFLAGS}" -ldflags '-extldflags "-static"' -o /bin/driver ./backend/src/v2/cmd/driver/*.go
|
||||
|
||||
FROM alpine:3.19
|
||||
|
||||
|
@ -42,8 +37,5 @@ USER appuser
|
|||
WORKDIR /bin
|
||||
|
||||
COPY --from=builder /bin/driver /bin/driver
|
||||
# Copy licenses and notices.
|
||||
COPY --from=builder /tmp/licenses.csv /third_party/licenses.csv
|
||||
COPY --from=builder /tmp/NOTICES /third_party/NOTICES
|
||||
|
||||
ENTRYPOINT [ "/bin/driver" ]
|
||||
ENTRYPOINT [ "/bin/driver" ]
|
||||
|
|
|
@ -12,28 +12,21 @@
|
|||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
FROM golang:1.21.7-alpine3.19 as builder
|
||||
FROM golang:1.23-alpine AS builder
|
||||
|
||||
WORKDIR /go/src/github.com/kubeflow/pipelines
|
||||
|
||||
COPY ./go.mod ./
|
||||
COPY ./go.sum ./
|
||||
COPY ./hack/install-go-licenses.sh ./hack/
|
||||
COPY ./kubernetes_platform/go.mod ./kubernetes_platform/go.mod
|
||||
COPY ./api/go.mod ./api/go.mod
|
||||
|
||||
RUN GO111MODULE=on go mod download
|
||||
RUN ./hack/install-go-licenses.sh
|
||||
|
||||
COPY . .
|
||||
|
||||
RUN GO111MODULE=on CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build -tags netgo -ldflags '-extldflags "-static"' -o /bin/launcher-v2 ./backend/src/v2/cmd/launcher-v2/*.go
|
||||
|
||||
# Check licenses and comply with license terms.
|
||||
# First, make sure there's no forbidden license.
|
||||
RUN go-licenses check ./backend/src/v2/cmd/launcher-v2
|
||||
RUN go-licenses csv ./backend/src/v2/cmd/launcher-v2 > /tmp/licenses.csv && \
|
||||
diff /tmp/licenses.csv backend/third_party_licenses/launcher.csv && \
|
||||
go-licenses save ./backend/src/v2/cmd/launcher-v2 --save_path /tmp/NOTICES
|
||||
|
||||
FROM alpine:3.19
|
||||
|
||||
RUN adduser -S appuser
|
||||
|
@ -42,8 +35,5 @@ USER appuser
|
|||
WORKDIR /bin
|
||||
|
||||
COPY --from=builder /bin/launcher-v2 /bin/launcher-v2
|
||||
# Copy licenses and notices.
|
||||
COPY --from=builder /tmp/licenses.csv /third_party/licenses.csv
|
||||
COPY --from=builder /tmp/NOTICES /third_party/NOTICES
|
||||
|
||||
ENTRYPOINT [ "/bin/launcher-v2" ]
|
||||
ENTRYPOINT [ "/bin/launcher-v2" ]
|
||||
|
|
|
@ -12,30 +12,24 @@
|
|||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
FROM golang:1.21.7-alpine3.19 as builder
|
||||
|
||||
WORKDIR /go/src/github.com/kubeflow/pipelines
|
||||
|
||||
COPY ./go.mod ./
|
||||
COPY ./go.sum ./
|
||||
COPY ./hack/install-go-licenses.sh ./hack/
|
||||
|
||||
RUN GO111MODULE=on go mod download
|
||||
RUN ./hack/install-go-licenses.sh
|
||||
|
||||
COPY . .
|
||||
FROM golang:1.23-alpine AS builder
|
||||
|
||||
# Needed musl-dev for github.com/mattn/go-sqlite3
|
||||
RUN apk update && apk upgrade && \
|
||||
apk add --no-cache bash git openssh gcc musl-dev
|
||||
|
||||
WORKDIR /go/src/github.com/kubeflow/pipelines
|
||||
|
||||
COPY ./go.mod ./
|
||||
COPY ./go.sum ./
|
||||
COPY ./kubernetes_platform/go.mod ./kubernetes_platform/go.mod
|
||||
COPY ./api/go.mod ./api/go.mod
|
||||
|
||||
RUN GO111MODULE=on go mod download
|
||||
|
||||
COPY . .
|
||||
|
||||
RUN GO111MODULE=on go build -o /bin/persistence_agent backend/src/agent/persistence/*.go
|
||||
# Check licenses and comply with license terms.
|
||||
# First, make sure there's no forbidden license.
|
||||
RUN go-licenses check ./backend/src/agent/persistence
|
||||
RUN go-licenses csv ./backend/src/agent/persistence > /tmp/licenses.csv && \
|
||||
diff /tmp/licenses.csv backend/third_party_licenses/persistence_agent.csv && \
|
||||
go-licenses save ./backend/src/agent/persistence --save_path /tmp/NOTICES
|
||||
|
||||
FROM alpine:3.19
|
||||
|
||||
|
@ -45,19 +39,15 @@ USER appuser
|
|||
WORKDIR /bin
|
||||
|
||||
COPY --from=builder /bin/persistence_agent /bin/persistence_agent
|
||||
# Copy licenses and notices.
|
||||
COPY --from=builder /tmp/licenses.csv /third_party/licenses.csv
|
||||
COPY --from=builder /tmp/NOTICES /third_party/NOTICES
|
||||
|
||||
ENV NAMESPACE ""
|
||||
ENV NAMESPACE=""
|
||||
|
||||
# Set Workflow TTL to 1 day. The way to use a different value for a particular Kubeflow Pipelines deployment is demonstrated in manifests/kustomize/base/pipeline/ml-pipeline-persistenceagent-deployment.yaml
|
||||
ENV TTL_SECONDS_AFTER_WORKFLOW_FINISH 86400
|
||||
ENV TTL_SECONDS_AFTER_WORKFLOW_FINISH=86400
|
||||
|
||||
# NUM_WORKERS indicates now many worker goroutines
|
||||
ENV NUM_WORKERS 2
|
||||
ENV LOG_LEVEL info
|
||||
ENV NUM_WORKERS=2
|
||||
ENV LOG_LEVEL=info
|
||||
|
||||
ENV EXECUTIONTYPE Workflow
|
||||
ENV EXECUTIONTYPE=Workflow
|
||||
|
||||
CMD persistence_agent --logtostderr=true --namespace=${NAMESPACE} --ttlSecondsAfterWorkflowFinish=${TTL_SECONDS_AFTER_WORKFLOW_FINISH} --numWorker ${NUM_WORKERS} --executionType ${EXECUTIONTYPE} --logLevel=${LOG_LEVEL}
|
||||
CMD persistence_agent --logtostderr=true --namespace=${NAMESPACE} --ttlSecondsAfterWorkflowFinish=${TTL_SECONDS_AFTER_WORKFLOW_FINISH} --numWorker ${NUM_WORKERS} --executionType ${EXECUTIONTYPE} --logLevel=${LOG_LEVEL}
|
||||
|
|
|
@ -12,30 +12,24 @@
|
|||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
FROM golang:1.21.7-alpine3.19 as builder
|
||||
|
||||
WORKDIR /go/src/github.com/kubeflow/pipelines
|
||||
|
||||
COPY ./go.mod ./
|
||||
COPY ./go.sum ./
|
||||
COPY ./hack/install-go-licenses.sh ./hack/
|
||||
|
||||
RUN GO111MODULE=on go mod download
|
||||
RUN ./hack/install-go-licenses.sh
|
||||
|
||||
COPY . .
|
||||
FROM golang:1.23-alpine AS builder
|
||||
|
||||
# Needed musl-dev for github.com/mattn/go-sqlite3
|
||||
RUN apk update && apk upgrade && \
|
||||
apk add --no-cache bash git openssh gcc musl-dev
|
||||
|
||||
WORKDIR /go/src/github.com/kubeflow/pipelines
|
||||
|
||||
COPY ./go.mod ./
|
||||
COPY ./go.sum ./
|
||||
COPY ./kubernetes_platform/go.mod ./kubernetes_platform/go.mod
|
||||
COPY ./api/go.mod ./api/go.mod
|
||||
|
||||
RUN GO111MODULE=on go mod download
|
||||
|
||||
COPY . .
|
||||
|
||||
RUN GO111MODULE=on go build -o /bin/controller backend/src/crd/controller/scheduledworkflow/*.go
|
||||
# Check licenses and comply with license terms.
|
||||
# First, make sure there's no forbidden license.
|
||||
RUN go-licenses check ./backend/src/crd/controller/scheduledworkflow
|
||||
RUN go-licenses csv ./backend/src/crd/controller/scheduledworkflow > /tmp/licenses.csv && \
|
||||
diff /tmp/licenses.csv backend/third_party_licenses/swf.csv && \
|
||||
go-licenses save ./backend/src/crd/controller/scheduledworkflow --save_path /tmp/NOTICES
|
||||
|
||||
FROM alpine:3.19
|
||||
|
||||
|
@ -47,11 +41,7 @@ USER appuser
|
|||
WORKDIR /bin
|
||||
|
||||
COPY --from=builder /bin/controller /bin/controller
|
||||
# Copy licenses and notices.
|
||||
COPY --from=builder /tmp/licenses.csv /third_party/licenses.csv
|
||||
COPY --from=builder /tmp/NOTICES /third_party/NOTICES
|
||||
|
||||
ENV NAMESPACE ""
|
||||
ENV LOG_LEVEL info
|
||||
ENV NAMESPACE=""
|
||||
ENV LOG_LEVEL=info
|
||||
|
||||
CMD /bin/controller --logtostderr=true --namespace=${NAMESPACE} --logLevel=${LOG_LEVEL}
|
||||
|
|
|
@ -12,7 +12,7 @@
|
|||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
FROM golang:1.21.7-alpine3.19 as builder
|
||||
FROM golang:1.23-alpine as builder
|
||||
|
||||
RUN apk update && apk upgrade
|
||||
RUN apk add --no-cache git gcc musl-dev
|
||||
|
@ -21,20 +21,14 @@ WORKDIR /src/github.com/kubeflow/pipelines
|
|||
|
||||
COPY ./go.mod ./
|
||||
COPY ./go.sum ./
|
||||
COPY ./hack/install-go-licenses.sh ./hack/
|
||||
COPY ./kubernetes_platform/go.mod ./kubernetes_platform/go.mod
|
||||
COPY ./api/go.mod ./api/go.mod
|
||||
|
||||
RUN GO111MODULE=on go mod download
|
||||
RUN ./hack/install-go-licenses.sh
|
||||
|
||||
COPY . .
|
||||
|
||||
RUN GO111MODULE=on go build -o /bin/controller backend/src/crd/controller/viewer/*.go
|
||||
# Check licenses and comply with license terms.
|
||||
# First, make sure there's no forbidden license.
|
||||
RUN go-licenses check ./backend/src/crd/controller/viewer
|
||||
RUN go-licenses csv ./backend/src/crd/controller/viewer > /tmp/licenses.csv && \
|
||||
diff /tmp/licenses.csv backend/third_party_licenses/viewer.csv && \
|
||||
go-licenses save ./backend/src/crd/controller/viewer --save_path /tmp/NOTICES
|
||||
|
||||
FROM alpine
|
||||
WORKDIR /bin
|
||||
|
@ -42,9 +36,6 @@ WORKDIR /bin
|
|||
COPY --from=builder /bin/controller /bin/controller
|
||||
RUN chmod +x /bin/controller
|
||||
|
||||
# Copy licenses and notices.
|
||||
COPY --from=builder /tmp/licenses.csv /third_party/licenses.csv
|
||||
COPY --from=builder /tmp/NOTICES /third_party/NOTICES
|
||||
|
||||
ENV MAX_NUM_VIEWERS "50"
|
||||
ENV NAMESPACE "kubeflow"
|
||||
|
|
|
@ -21,12 +21,12 @@
|
|||
FROM tensorflow/tensorflow:2.10.1
|
||||
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y wget curl tar openssl
|
||||
&& apt-get install -y wget curl tar openssl \
|
||||
&& curl https://dl.google.com/dl/cloudsdk/release/google-cloud-sdk.tar.gz > /tmp/google-cloud-sdk.tar.gz \
|
||||
&& mkdir -p /usr/local/gcloud \
|
||||
&& tar -C /usr/local/gcloud -xf /tmp/google-cloud-sdk.tar.gz \
|
||||
&& /usr/local/gcloud/google-cloud-sdk/install.sh
|
||||
|
||||
RUN curl https://dl.google.com/dl/cloudsdk/release/google-cloud-sdk.tar.gz > /tmp/google-cloud-sdk.tar.gz
|
||||
RUN mkdir -p /usr/local/gcloud
|
||||
RUN tar -C /usr/local/gcloud -xf /tmp/google-cloud-sdk.tar.gz
|
||||
RUN /usr/local/gcloud/google-cloud-sdk/install.sh
|
||||
ENV PATH $PATH:/usr/local/gcloud/google-cloud-sdk/bin
|
||||
|
||||
WORKDIR /src
|
||||
|
|
149
backend/Makefile
149
backend/Makefile
|
@ -1,9 +1,20 @@
|
|||
BUILD=build
|
||||
MOD_ROOT=..
|
||||
CSV_PATH=backend/third_party_licenses
|
||||
KIND_NAME ?= dev-pipelines-api
|
||||
|
||||
CERT_MANAGER_VERSION ?= v1.16.2
|
||||
|
||||
# Container Build Params
|
||||
CONTAINER_ENGINE ?= docker
|
||||
CONTAINER_ENGINE ?= $(shell \
|
||||
if command -v docker >/dev/null 2>&1; then \
|
||||
echo docker; \
|
||||
elif command -v podman >/dev/null 2>&1; then \
|
||||
echo podman; \
|
||||
fi \
|
||||
)
|
||||
|
||||
# IMG_REGISTRY can be used to automatically prepend registry details. e.g. "quay.io/kubeflow/"
|
||||
IMG_REGISTRY ?=
|
||||
IMG_TAG_APISERVER ?= apiserver
|
||||
IMG_TAG_PERSISTENCEAGENT ?= persistence-agent
|
||||
IMG_TAG_CACHESERVER ?= cache-server
|
||||
|
@ -12,79 +23,117 @@ IMG_TAG_VIEWERCONTROLLER ?= viewercontroller
|
|||
IMG_TAG_VISUALIZATION ?= visualization
|
||||
IMG_TAG_DRIVER ?= kfp-driver
|
||||
IMG_TAG_LAUNCHER ?= kfp-launcher
|
||||
IMG_TAG_WEBHOOK_PROXY ?= domain.local/kfp/webhook-proxy:latest
|
||||
|
||||
# Whenever build command for any of the binaries change, we should update them both here and in backend/Dockerfiles.
|
||||
|
||||
.PHONY: all
|
||||
all: license_apiserver license_persistence_agent license_cache_server license_swf license_viewer license_driver license_launcher
|
||||
all: image_all
|
||||
|
||||
.PHONY: clean
|
||||
clean:
|
||||
rm -rf $(BUILD)
|
||||
|
||||
$(BUILD)/apiserver:
|
||||
GO111MODULE=on go build -o $(BUILD)/apiserver github.com/kubeflow/pipelines/backend/src/apiserver
|
||||
$(BUILD)/persistence_agent:
|
||||
GO111MODULE=on go build -o $(BUILD)/persistence_agent github.com/kubeflow/pipelines/backend/src/agent/persistence
|
||||
$(BUILD)/cache_server:
|
||||
GO111MODULE=on go build -o $(BUILD)/cache_server github.com/kubeflow/pipelines/backend/src/cache
|
||||
$(BUILD)/swf:
|
||||
GO111MODULE=on go build -o $(BUILD)/swf github.com/kubeflow/pipelines/backend/src/crd/controller/scheduledworkflow
|
||||
$(BUILD)/viewer:
|
||||
GO111MODULE=on go build -o $(BUILD)/viewer github.com/kubeflow/pipelines/backend/src/crd/controller/viewer
|
||||
$(BUILD)/driver:
|
||||
GO111MODULE=on go build -o $(BUILD)/driver github.com/kubeflow/pipelines/backend/src/v2/cmd/driver
|
||||
$(BUILD)/launcher:
|
||||
GO111MODULE=on go build -o $(BUILD)/launcher github.com/kubeflow/pipelines/backend/src/v2/cmd/launcher-v2
|
||||
|
||||
# Update licenses info after dependencies changed.
|
||||
# See README.md#updating-licenses-info section for more details.
|
||||
.PHONY: license_apiserver
|
||||
license_apiserver: $(BUILD)/apiserver
|
||||
cd $(MOD_ROOT) && go-licenses csv ./backend/src/apiserver > $(CSV_PATH)/apiserver.csv
|
||||
.PHONY: license_persistence_agent
|
||||
license_persistence_agent: $(BUILD)/persistence_agent
|
||||
cd $(MOD_ROOT) && go-licenses csv ./backend/src/agent/persistence > $(CSV_PATH)/persistence_agent.csv
|
||||
.PHONY: license_cache_server
|
||||
license_cache_server: $(BUILD)/cache_server
|
||||
cd $(MOD_ROOT) && go-licenses csv ./backend/src/cache > $(CSV_PATH)/cache_server.csv
|
||||
.PHONY: license_swf
|
||||
license_swf: $(BUILD)/swf
|
||||
cd $(MOD_ROOT) && go-licenses csv ./backend/src/crd/controller/scheduledworkflow > $(CSV_PATH)/swf.csv
|
||||
.PHONY: license_viewer
|
||||
license_viewer: $(BUILD)/viewer
|
||||
cd $(MOD_ROOT) && go-licenses csv ./backend/src/crd/controller/viewer > $(CSV_PATH)/viewer.csv
|
||||
.PHONY: license_driver
|
||||
license_driver: $(BUILD)/driver
|
||||
cd $(MOD_ROOT) && go-licenses csv ./backend/src/v2/cmd/driver > $(CSV_PATH)/driver.csv
|
||||
.PHONY: license_launcher
|
||||
license_launcher: $(BUILD)/launcher
|
||||
cd $(MOD_ROOT) && go-licenses csv ./backend/src/v2/cmd/launcher-v2 > $(CSV_PATH)/launcher.csv
|
||||
|
||||
.PHONY: image_all
|
||||
image_all: image_apiserver image_persistence_agent image_cache image_swf image_viewer image_visualization image_driver image_launcher
|
||||
|
||||
.PHONY: image_apiserver
|
||||
image_apiserver:
|
||||
cd $(MOD_ROOT) && ${CONTAINER_ENGINE} build -t ${IMG_TAG_APISERVER} -f backend/Dockerfile .
|
||||
cd $(MOD_ROOT) && ${CONTAINER_ENGINE} build --platform linux/amd64 -t ${IMG_REGISTRY}${IMG_TAG_APISERVER} -f backend/Dockerfile .
|
||||
.PHONY: image_persistence_agent
|
||||
image_persistence_agent:
|
||||
cd $(MOD_ROOT) && ${CONTAINER_ENGINE} build -t ${IMG_TAG_PERSISTENCEAGENT} -f backend/Dockerfile.persistenceagent .
|
||||
cd $(MOD_ROOT) && ${CONTAINER_ENGINE} build --platform linux/amd64 -t ${IMG_REGISTRY}${IMG_TAG_PERSISTENCEAGENT} -f backend/Dockerfile.persistenceagent .
|
||||
.PHONY: image_cache
|
||||
image_cache:
|
||||
cd $(MOD_ROOT) && ${CONTAINER_ENGINE} build -t ${IMG_TAG_CACHESERVER} -f backend/Dockerfile.cacheserver .
|
||||
cd $(MOD_ROOT) && ${CONTAINER_ENGINE} build --platform linux/amd64 -t ${IMG_REGISTRY}${IMG_TAG_CACHESERVER} -f backend/Dockerfile.cacheserver .
|
||||
.PHONY: image_swf
|
||||
image_swf:
|
||||
cd $(MOD_ROOT) && ${CONTAINER_ENGINE} build -t ${IMG_TAG_SCHEDULEDWORKFLOW} -f backend/Dockerfile.scheduledworkflow .
|
||||
cd $(MOD_ROOT) && ${CONTAINER_ENGINE} build --platform linux/amd64 -t ${IMG_REGISTRY}${IMG_TAG_SCHEDULEDWORKFLOW} -f backend/Dockerfile.scheduledworkflow .
|
||||
.PHONY: image_viewer
|
||||
image_viewer:
|
||||
cd $(MOD_ROOT) && ${CONTAINER_ENGINE} build -t ${IMG_TAG_VIEWERCONTROLLER} -f backend/Dockerfile.viewercontroller .
|
||||
cd $(MOD_ROOT) && ${CONTAINER_ENGINE} build --platform linux/amd64 -t ${IMG_REGISTRY}${IMG_TAG_VIEWERCONTROLLER} -f backend/Dockerfile.viewercontroller .
|
||||
.PHONY: image_visualization
|
||||
image_visualization:
|
||||
cd $(MOD_ROOT) && ${CONTAINER_ENGINE} build -t ${IMG_TAG_VISUALIZATION} -f backend/Dockerfile.visualization .
|
||||
cd $(MOD_ROOT) && ${CONTAINER_ENGINE} build --platform linux/amd64 -t ${IMG_REGISTRY}${IMG_TAG_VISUALIZATION} -f backend/Dockerfile.visualization .
|
||||
.PHONY: image_driver
|
||||
image_driver:
|
||||
cd $(MOD_ROOT) && ${CONTAINER_ENGINE} build -t ${IMG_TAG_DRIVER} -f backend/Dockerfile.driver .
|
||||
cd $(MOD_ROOT) && ${CONTAINER_ENGINE} build --platform linux/amd64 -t ${IMG_REGISTRY}${IMG_TAG_DRIVER} -f backend/Dockerfile.driver .
|
||||
.PHONY: image_driver_debug
|
||||
image_driver_debug:
|
||||
cd $(MOD_ROOT) && sed -e '/RUN .*go mod download/a\
|
||||
RUN go install github.com/go-delve/delve/cmd/dlv@latest' \
|
||||
-e '/COPY .*\/bin\/driver \/bin\/driver/a\
|
||||
COPY . \/go\/src\/github.com\/kubeflow\/pipelines\
|
||||
COPY --from=builder /go/bin/dlv /bin/dlv\
|
||||
EXPOSE 2345' \
|
||||
backend/Dockerfile.driver > backend/Dockerfile.driver-debug
|
||||
cd $(MOD_ROOT) && ${CONTAINER_ENGINE} build --platform linux/amd64 --build-arg GCFLAGS="all=-N -l" -t ${IMG_REGISTRY}${IMG_TAG_DRIVER}:debug -f backend/Dockerfile.driver-debug .
|
||||
.PHONY: image_launcher
|
||||
image_launcher:
|
||||
cd $(MOD_ROOT) && ${CONTAINER_ENGINE} build -t ${IMG_TAG_LAUNCHER} -f backend/Dockerfile.launcher .
|
||||
cd $(MOD_ROOT) && ${CONTAINER_ENGINE} build --platform linux/amd64 -t ${IMG_REGISTRY}${IMG_TAG_LAUNCHER} -f backend/Dockerfile.launcher .
|
||||
|
||||
.PHONY: install-cert-manager
|
||||
install-cert-manager:
|
||||
kubectl apply -f https://github.com/cert-manager/cert-manager/releases/download/$(CERT_MANAGER_VERSION)/cert-manager.yaml
|
||||
kubectl wait deployment -n cert-manager cert-manager --for condition=Available=True --timeout=180s
|
||||
kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=cert-manager -n cert-manager --timeout=180s
|
||||
|
||||
# Creates a Kind cluster and Deploys a standalone KFP instance
|
||||
# In the Kubeflow namespace.
|
||||
.PHONY: kind-cluster-agnostic
|
||||
kind-cluster-agnostic:
|
||||
# Deploy Kind Cluster
|
||||
kind create cluster --name $(KIND_NAME)
|
||||
kubectl config use-context kind-$(KIND_NAME)
|
||||
kind get kubeconfig --name $(KIND_NAME) > $(CURDIR)/../kubeconfig_$(KIND_NAME)
|
||||
# Deploy cluster resources required by KFP
|
||||
kubectl apply -k $(CURDIR)/../manifests/kustomize/cluster-scoped-resources
|
||||
kubectl wait --for condition=established --timeout=1m crd/applications.app.k8s.io
|
||||
# Deploy KFP
|
||||
kubectl apply -k $(CURDIR)/../manifests/kustomize/env/platform-agnostic
|
||||
kubectl -n kubeflow wait --for condition=Available --timeout=10m deployment/mysql
|
||||
kubectl -n kubeflow wait --for condition=Available --timeout=3m deployment/metadata-grpc-deployment
|
||||
kubectl -n kubeflow wait --for condition=Available --timeout=3m deployment/ml-pipeline
|
||||
# Switch to Kubeflow namespace context
|
||||
kubectl config set-context --current --namespace=kubeflow
|
||||
|
||||
.PHONY: dev-kind-cluster
|
||||
dev-kind-cluster:
|
||||
${CONTAINER_ENGINE} build -t ${IMG_TAG_WEBHOOK_PROXY} -f $(CURDIR)/../tools/kind/Dockerfile.webhook-proxy $(CURDIR)/../tools/kind
|
||||
-kind create cluster --name $(KIND_NAME) --config $(CURDIR)/../tools/kind/kind-config.yaml
|
||||
kubectl config use-context kind-$(KIND_NAME)
|
||||
kind get kubeconfig --name $(KIND_NAME) > $(CURDIR)/../kubeconfig_$(KIND_NAME)
|
||||
@if [ "${CONTAINER_ENGINE}" = "docker" ]; then \
|
||||
kind --name ${KIND_NAME} load docker-image ${IMG_TAG_WEBHOOK_PROXY}; \
|
||||
else \
|
||||
bash -c "kind load --name ${KIND_NAME} image-archive <( ${CONTAINER_ENGINE} save ${IMG_TAG_WEBHOOK_PROXY})"; \
|
||||
fi
|
||||
$(MAKE) install-cert-manager
|
||||
kubectl apply -k $(CURDIR)/../manifests/kustomize/cluster-scoped-resources
|
||||
kubectl wait --for condition=established --timeout=1m crd/applications.app.k8s.io
|
||||
kubectl apply -k $(CURDIR)/../manifests/kustomize/env/dev-kind
|
||||
kubectl apply -f $(CURDIR)/../tools/kind/webhook-proxy.yaml
|
||||
kubectl -n kubeflow wait --for condition=Available --timeout=10m deployment/mysql
|
||||
kubectl -n kubeflow wait --for condition=Available --timeout=3m deployment/metadata-grpc-deployment
|
||||
|
||||
.PHONY: kind-load-driver-debug
|
||||
kind-load-driver-debug:
|
||||
@if [ "${CONTAINER_ENGINE}" = "docker" ]; then \
|
||||
kind --name ${KIND_NAME} load docker-image ${IMG_TAG_DRIVER}:debug
|
||||
else \
|
||||
bash -c "kind load --name ${KIND_NAME} image-archive <( ${CONTAINER_ENGINE} save ${IMG_TAG_DRIVER})"; \
|
||||
fi
|
||||
|
||||
.PHONY: kind-build-and-load-driver-debug
|
||||
kind-build-and-load-driver-debug: image_driver_debug kind-load-driver-debug
|
||||
|
||||
.PHONY: lint-and-format
|
||||
lint-and-format: lint format
|
||||
|
||||
.PHONY: lint
|
||||
lint:
|
||||
golangci-lint run --new-from-rev HEAD --fix
|
||||
|
||||
.PHONY: format
|
||||
format:
|
||||
golangci-lint fmt
|
|
@ -1,8 +1,14 @@
|
|||
approvers:
|
||||
- chensun
|
||||
- rimolive
|
||||
- hbelmiro
|
||||
- mprahl
|
||||
reviewers:
|
||||
- chensun
|
||||
- hbelmiro
|
||||
- HumairAK
|
||||
- rimolive
|
||||
- mprahl
|
||||
- gmfrasca
|
||||
- droctothorpe
|
||||
- zazulam
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue