Compare commits
520 Commits
Author | SHA1 | Date |
---|---|---|
|
6ccf261e25 | |
|
ccd0c334e0 | |
|
00c95e91b3 | |
|
798647b08d | |
|
10f340bf73 | |
|
87106d06bc | |
|
472f72c934 | |
|
9f5abab7d2 | |
|
000a111396 | |
|
c64feac20e | |
|
4bd3d4b4e9 | |
|
8b84ad0f5b | |
|
ecf488b65f | |
|
a870b1a325 | |
|
169e45f6f0 | |
|
8b9d224b0e | |
|
bcb9ee4324 | |
|
cc35187dff | |
|
1791485129 | |
|
ca52e050f5 | |
|
ce848730b8 | |
|
756c7ad202 | |
|
dcd2707477 | |
|
ec2cb97a64 | |
|
f8a8faa175 | |
|
bc32adbccd | |
|
dc398f689e | |
|
fb5262f2d2 | |
|
f358787e5f | |
|
9cad3200a5 | |
|
f20cec5b70 | |
|
09ced38500 | |
|
daac099508 | |
|
ea10149c66 | |
|
ad2730cf19 | |
|
69a184637a | |
|
9aa3dfb0d9 | |
|
ecfe94ebc3 | |
|
63acc678aa | |
|
04407fbe93 | |
|
ada935a0ad | |
|
e8e23f0d7a | |
|
b7f1d279d8 | |
|
a603d40812 | |
|
7f60100602 | |
|
268e0898ec | |
|
a123d53be0 | |
|
99326e1bd1 | |
|
c3d05eb0b1 | |
|
c09b635a6b | |
|
4dc64056b4 | |
|
9b252ff4fb | |
|
8bbdd6e81f | |
|
a30e9ead10 | |
|
f240685bf3 | |
|
3154ef9258 | |
|
88870823fa | |
|
da358e5176 | |
|
b9a01c0e40 | |
|
5181358d6a | |
|
11000f5fab | |
|
aadf9e3cdf | |
|
cfa47fecbc | |
|
40a9ad4823 | |
|
54475637a2 | |
|
441a465d19 | |
|
b4772693ae | |
|
b306d8d6d6 | |
|
2b04d2f896 | |
|
cc7830812a | |
|
0d857b6f8a | |
|
faa2c8cd16 | |
|
8f5c277d9b | |
|
48468ae1fa | |
|
ea20731060 | |
|
dda6033a03 | |
|
7ba495fcc3 | |
|
b87b521fe9 | |
|
8b0c022b33 | |
|
f342504655 | |
|
73b58e4f16 | |
|
4c966f7773 | |
|
8fe090d461 | |
|
9fdbe980c8 | |
|
beae62fb52 | |
|
53bb3a0aad | |
|
1b0e6535b5 | |
|
6c32514c35 | |
|
8329e64716 | |
|
732a3f26f5 | |
|
d45b5810eb | |
|
3337b5e323 | |
|
67f9b7d73c | |
|
9761a31ec3 | |
|
e329fa39b6 | |
|
dc841dde6f | |
|
7342f2b737 | |
|
4f09f01090 | |
|
c368ac6881 | |
|
d6c864f038 | |
|
0031766201 | |
|
2a175883ee | |
|
5bc1a9bed1 | |
|
943643b413 | |
|
988477a7de | |
|
508fc9dd8f | |
|
ef94ccd734 | |
|
18bed6c70d | |
|
e276474f97 | |
|
0e7e806b4c | |
|
9aebb62be1 | |
|
0010b06731 | |
|
9e4f26ce74 | |
|
9245739f6f | |
|
e5cb3023b1 | |
|
4503eae10d | |
|
ed828b513a | |
|
70d28885f2 | |
|
24782d178d | |
|
56da004d91 | |
|
c68640d950 | |
|
c03127d967 | |
|
d90e4e8a54 | |
|
2b94ee6101 | |
|
8261e4af70 | |
|
27ba5ff671 | |
|
3ab625c645 | |
|
8a402c10a8 | |
|
fe51dfd792 | |
|
64e8264352 | |
|
0359551b76 | |
|
fb182355f0 | |
|
90909fc0ef | |
|
7529bbeba7 | |
|
e21bbbaf22 | |
|
93675b03d4 | |
|
5c868d40ad | |
|
3f6a93b28d | |
|
f63c7c530d | |
|
f3e3fac699 | |
|
d38418efea | |
|
611d582004 | |
|
f9d487cb60 | |
|
05581ccefc | |
|
e696472a5b | |
|
924b13c98c | |
|
598826e1cc | |
|
a4f07231a7 | |
|
177cd3b3e7 | |
|
aa95142f5a | |
|
88cff55914 | |
|
866ff3556a | |
|
cc56d04c46 | |
|
eee4986f18 | |
|
6877d16eac | |
|
69ba50b3fb | |
|
92e4921c4c | |
|
478ca08901 | |
|
5a3dc8473e | |
|
464ca3974f | |
|
38a46533fc | |
|
1ad4f608a0 | |
|
9544293af3 | |
|
6e3548f33e | |
|
a680e2230c | |
|
503beb51a3 | |
|
eacb586f62 | |
|
cd3e747b5d | |
|
f10c7bfbbc | |
|
7c90446966 | |
|
c5b9e4d21e | |
|
bb7a1082c4 | |
|
230c1b8f13 | |
|
564522c42d | |
|
bd4fc5c667 | |
|
048f28332b | |
|
ade8a2d072 | |
|
c9be64dca3 | |
|
97e57368d1 | |
|
556c98ae4e | |
|
9605c08026 | |
|
ec727fd5f3 | |
|
ae7c5243c5 | |
|
35041ef2bd | |
|
596ec90bb8 | |
|
3fa617f19c | |
|
8ffcc2ccb0 | |
|
ea9a8f2344 | |
|
2efcde5efd | |
|
e9f5b5aee2 | |
|
fd1b48b471 | |
|
95bd63ef15 | |
|
715ed40b92 | |
|
c46c1d2cf9 | |
|
01999b8fea | |
|
95c3f2c04d | |
|
c5aba41bca | |
|
2694605996 | |
|
d1b15ef4da | |
|
06a7350191 | |
|
3e7d499236 | |
|
12f4676b4c | |
|
df4e9c2bf5 | |
|
11e84d0caa | |
|
8d0ae5381e | |
|
d502687061 | |
|
0d9a7b00e9 | |
|
3a89bd8564 | |
|
03453bc3b5 | |
|
afb3b1461b | |
|
89c8bd7274 | |
|
30210e33bf | |
|
1956d69968 | |
|
b5c204300b | |
|
a7ec34f571 | |
|
78675b0d7f | |
|
7838009538 | |
|
13b819424e | |
|
7d8e9211f6 | |
|
355f78c51b | |
|
976fba871f | |
|
30d7c397e6 | |
|
c8fe908ba8 | |
|
0a94ac418d | |
|
eb7286d859 | |
|
0afb12d6a7 | |
|
9afe23e748 | |
|
ebaaf75631 | |
|
a6b944b894 | |
|
f7c0616db7 | |
|
d2c0376b0a | |
|
1c4f676d94 | |
|
7719b38061 | |
|
cc1c435f1e | |
|
c0778ba88c | |
|
8ca7ec1768 | |
|
b1315667be | |
|
87498e8b60 | |
|
472f8779de | |
|
7bb0c448cd | |
|
94eca2102c | |
|
22c372437d | |
|
a1f3262f37 | |
|
ba22703263 | |
|
dcaf5a48e5 | |
|
65d1d79fb7 | |
|
6a13f4bad0 | |
|
926aec55d4 | |
|
51c776c745 | |
|
c100648fa8 | |
|
42fc132616 | |
|
a40163fdf2 | |
|
0178d41bf2 | |
|
af4540e7d3 | |
|
6cb7cf71fa | |
|
32bdbe8dbc | |
|
18641e16cb | |
|
d3a016dd64 | |
|
cd66b6965c | |
|
9c5b72c2d0 | |
|
915cc552f5 | |
|
ac9b257a7a | |
|
ce3850ad9a | |
|
37a7b4ecb9 | |
|
a81b51339c | |
|
1234c8d6fe | |
|
d2ddb2ed1c | |
|
113a4c6788 | |
|
dbefbb8ee9 | |
|
a469b10806 | |
|
8fe21574c6 | |
|
7497b65067 | |
|
906b5c0841 | |
|
76ce3226ec | |
|
3e423d8d1c | |
|
a0e24069e3 | |
|
954145b877 | |
|
83791e7703 | |
|
027ca8b9c9 | |
|
4517cbe872 | |
|
533a3c6b66 | |
|
682d3aca5f | |
|
dc97ce77a7 | |
|
8bce9c4ef6 | |
|
028d81b624 | |
|
b4ecbabbba | |
|
56e6116d05 | |
|
81ebd7ff9b | |
|
3059f7c124 | |
|
a40be7b569 | |
|
7c931ae201 | |
|
e89d2d5f2d | |
|
d21fca650c | |
|
2686e017ce | |
|
72f11d9801 | |
|
873e9dedd7 | |
|
2ebb853fd7 | |
|
9c6ec0f4f7 | |
|
2a77a89e34 | |
|
f3cb244198 | |
|
803d7a8ebb | |
|
97acacbd2a | |
|
54b9a253da | |
|
399a597185 | |
|
cb07619ec6 | |
|
e71825abe6 | |
|
0eb67e1f7b | |
|
2eed0b8faf | |
|
d49b5d0c4b | |
|
87bdb7c3b1 | |
|
6c567adfb8 | |
|
2b1aed3a30 | |
|
50b367f232 | |
|
b4799df4ba | |
|
22e85de2bc | |
|
b7d8c97d65 | |
|
6ebf4aae03 | |
|
60e4163923 | |
|
7f2278f252 | |
|
4a64fe9532 | |
|
1cdd648239 | |
|
22e77805ed | |
|
13f83cf745 | |
|
58b2d8d721 | |
|
c00d2139fd | |
|
ca004cca30 | |
|
37279617d6 | |
|
23f718d02e | |
|
da02858061 | |
|
5cc1b8d221 | |
|
51f4a3b6e2 | |
|
9ccec4c7d1 | |
|
94a21cc7e2 | |
|
11c5d7ed43 | |
|
35793be416 | |
|
2e05b3db26 | |
|
467f30cf61 | |
|
533eddc942 | |
|
c57e8973ac | |
|
634aadf808 | |
|
8faa24dfd6 | |
|
77f23c950e | |
|
af0cd0628d | |
|
1ea240e893 | |
|
64e390069d | |
|
55de4a3a54 | |
|
cac3739565 | |
|
11a8b866e9 | |
|
48a1340ce5 | |
|
1708ce79bb | |
|
8d018aff6e | |
|
f8973d27cc | |
|
5dc43b6c5b | |
|
9a9cf73179 | |
|
60a88654d4 | |
|
c5b787aacc | |
|
f2fead51cc | |
|
6684b6d671 | |
|
04d600b2d3 | |
|
c81c61e101 | |
|
df4d7878c4 | |
|
df28e891c4 | |
|
c2f56495b9 | |
|
abbd915a2a | |
|
c84241b736 | |
|
c5f162d552 | |
|
f256d86fbb | |
|
6a35ee5144 | |
|
095a794cd4 | |
|
187ecd0cc4 | |
|
c2a7713426 | |
|
be863a8529 | |
|
aec28561d5 | |
|
ae4ab389a3 | |
|
a0d313e095 | |
|
1aeea0093f | |
|
e16b4469d7 | |
|
7afbbd7aed | |
|
4ccb047771 | |
|
1550b363ae | |
|
753a2f148a | |
|
391de8ca9e | |
|
c27df6fd21 | |
|
03b488d1a3 | |
|
8590ec1147 | |
|
6f6c8aeda9 | |
|
219725d9f0 | |
|
374b18bc33 | |
|
f4cdbeb913 | |
|
6c3ab39ee2 | |
|
85fdd73ae0 | |
|
feacb5287b | |
|
db8669c33e | |
|
3f495229f2 | |
|
880e46dc1f | |
|
be6adb6ae8 | |
|
ceeda01d0a | |
|
ea8f37c4dc | |
|
2305344af0 | |
|
28967bc910 | |
|
7b38b6b3b0 | |
|
a4119a6bf1 | |
|
70aaf8a9a4 | |
|
f4857ad84f | |
|
6589d4af07 | |
|
0b92f86408 | |
|
ba006bddcb | |
|
581b7e5b7e | |
|
b3e75cf649 | |
|
cfb3b3149d | |
|
eee095e5c8 | |
|
1a4849a032 | |
|
c64294910e | |
|
123ed1eb30 | |
|
d23b72bf12 | |
|
1cded35cf5 | |
|
dfd4cc1e53 | |
|
d3aa8370aa | |
|
3af8b6494d | |
|
2481193faf | |
|
389776463b | |
|
4467df5f9c | |
|
b69c1b67b3 | |
|
e9c77ec6d5 | |
|
c7f87293a3 | |
|
a59261c994 | |
|
3efa02984c | |
|
7f9cf65f1c | |
|
a34552aa99 | |
|
d64554b1a8 | |
|
01ad460ba4 | |
|
bd76229fc7 | |
|
344ea02549 | |
|
f075d0c516 | |
|
4c955f4780 | |
|
2c91fb797e | |
|
51d2c92b55 | |
|
1c7954de26 | |
|
0d098db41c | |
|
7c7ff1234f | |
|
e1d172bb57 | |
|
a71033b20a | |
|
a3adf9471c | |
|
7506a8eafa | |
|
ae8315c4e6 | |
|
37441afd54 | |
|
91cd1f4da3 | |
|
4f36fe3637 | |
|
0d3e79adc7 | |
|
43cdc2081d | |
|
38f8f8e44f | |
|
1a07ffa911 | |
|
120a908305 | |
|
36cf06677d | |
|
383cbcd065 | |
|
8041ea8740 | |
|
2e6e634de4 | |
|
000ef60080 | |
|
4458bdd53f | |
|
3a68b602e3 | |
|
7b7918ebf8 | |
|
289f64fe94 | |
|
e8f0208777 | |
|
c06d9aee96 | |
|
0e37fd66a4 | |
|
99bd234401 | |
|
6f62203ccf | |
|
2eb9bec1ae | |
|
83dcf1a609 | |
|
c6f658b0e7 | |
|
e128bdbaf3 | |
|
abe525737f | |
|
65f6475156 | |
|
6c4a250032 | |
|
522b5933b0 | |
|
1612dac4ad | |
|
ff4e918033 | |
|
c420f8c218 | |
|
ad03bb6daa | |
|
461d892db8 | |
|
87184fd976 | |
|
e44dfa7e89 | |
|
eea7d48379 | |
|
e940ba201e | |
|
3324d672b1 | |
|
643c923010 | |
|
97f2c66a83 | |
|
f1d54790c6 | |
|
1e95eb6285 | |
|
f6ea7e356f | |
|
13ff5a9a51 | |
|
d911c8b73b | |
|
08185e7171 | |
|
39fbd604fb | |
|
b274866b6a | |
|
10aaf43136 | |
|
c8f08ba49f | |
|
62b355a4c5 | |
|
58b342a850 | |
|
7660e8a91c | |
|
9cb5913d47 | |
|
4930beebb1 | |
|
71a52ab4a9 | |
|
574c2ffeac | |
|
2c3d9d7b35 | |
|
0eae4307a0 | |
|
38ef986eaa | |
|
7d2ec39515 | |
|
2fb4922fc0 | |
|
568777d17c | |
|
7a21329bad | |
|
d499172897 | |
|
fb9213bcf7 | |
|
e3a498020a | |
|
29b7d2fe35 | |
|
bdc3bb1f0d | |
|
1b5de64c6f | |
|
0eac0811fc | |
|
338ff7534e | |
|
6723d3d5a9 |
272
.cloudbuild.yaml
272
.cloudbuild.yaml
|
@ -1,272 +0,0 @@
|
|||
# Copyright 2018 The Kubeflow Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# Test before submit:
|
||||
# gcloud builds submit --config=.cloudbuild.yaml --substitutions=COMMIT_SHA="$(git rev-parse HEAD)" --project=ml-pipeline-test
|
||||
|
||||
steps:
|
||||
|
||||
# # Build the Python SDK
|
||||
# - name: 'python:3-alpine'
|
||||
# entrypoint: '/bin/sh'
|
||||
# args: ['-c', 'cd /workspace/sdk/python/; python3 setup.py sdist --format=gztar; cp dist/*.tar.gz /workspace/kfp.tar.gz']
|
||||
# id: 'preparePythonSDK'
|
||||
# waitFor: ["-"]
|
||||
# - name: 'gcr.io/cloud-builders/gsutil'
|
||||
# args: ['cp', '/workspace/kfp.tar.gz', 'gs://$PROJECT_ID/builds/$COMMIT_SHA/kfp.tar.gz']
|
||||
# id: 'copyPythonSDK'
|
||||
# waitFor: ['preparePythonSDK']
|
||||
# - name: 'gcr.io/cloud-builders/gsutil'
|
||||
# args: ['cp', '/workspace/kfp.tar.gz', 'gs://$PROJECT_ID/builds/latest/kfp.tar.gz']
|
||||
# id: 'copyPythonSDKToLatest'
|
||||
# waitFor: ['preparePythonSDK']
|
||||
|
||||
# Build the pipeline system images
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
entrypoint: /bin/bash
|
||||
args:
|
||||
- -ceux
|
||||
- |
|
||||
sed -i -e "s/ARG DATE/ENV DATE \"$(date -u)\"/" /workspace/frontend/Dockerfile
|
||||
docker build -t gcr.io/$PROJECT_ID/frontend:$COMMIT_SHA \
|
||||
--build-arg COMMIT_HASH=$COMMIT_SHA \
|
||||
--build-arg TAG_NAME="$(cat /workspace/VERSION)" \
|
||||
-f /workspace/frontend/Dockerfile \
|
||||
/workspace
|
||||
id: 'buildFrontend'
|
||||
waitFor: ['-']
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
entrypoint: /bin/bash
|
||||
args:
|
||||
- -ceux
|
||||
- |
|
||||
docker build -t gcr.io/$PROJECT_ID/api-server:$COMMIT_SHA \
|
||||
--build-arg COMMIT_SHA=$COMMIT_SHA \
|
||||
--build-arg TAG_NAME="$(cat /workspace/VERSION)" \
|
||||
-f /workspace/backend/Dockerfile /workspace
|
||||
id: 'buildApiServer'
|
||||
waitFor: ['-']
|
||||
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['build', '-t', 'gcr.io/$PROJECT_ID/scheduledworkflow:$COMMIT_SHA', '-f',
|
||||
'/workspace/backend/Dockerfile.scheduledworkflow', '/workspace']
|
||||
id: 'buildScheduledWorkflow'
|
||||
waitFor: ["-"]
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['build', '-t', 'gcr.io/$PROJECT_ID/viewer-crd-controller:$COMMIT_SHA', '-f',
|
||||
'/workspace/backend/Dockerfile.viewercontroller', '/workspace']
|
||||
id: 'buildViewerCrdController'
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['build', '-t', 'gcr.io/$PROJECT_ID/persistenceagent:$COMMIT_SHA', '-f',
|
||||
'/workspace/backend/Dockerfile.persistenceagent', '/workspace']
|
||||
id: 'buildPersistenceAgent'
|
||||
waitFor: ["-"]
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['build', '-t', 'gcr.io/$PROJECT_ID/inverse-proxy-agent:$COMMIT_SHA', '-f',
|
||||
'/workspace/proxy/Dockerfile', '/workspace/proxy']
|
||||
id: 'buildInverseProxyAgent'
|
||||
waitFor: ["-"]
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['build', '-t', 'gcr.io/$PROJECT_ID/visualization-server:$COMMIT_SHA',
|
||||
'--build-arg', 'COMMIT_HASH=$COMMIT_SHA', '-f',
|
||||
'/workspace/backend/Dockerfile.visualization', '/workspace']
|
||||
id: 'buildVisualizationServer'
|
||||
waitFor: ["-"]
|
||||
- id: 'buildMetadataWriter'
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['build', '-t', 'gcr.io/$PROJECT_ID/metadata-writer:$COMMIT_SHA',
|
||||
'--build-arg', 'COMMIT_HASH=$COMMIT_SHA', '-f',
|
||||
'/workspace/backend/metadata_writer/Dockerfile', '/workspace']
|
||||
waitFor: ["-"]
|
||||
- id: 'buildCacheServer'
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['build', '-t', 'gcr.io/$PROJECT_ID/cache-server:$COMMIT_SHA',
|
||||
'--build-arg', 'COMMIT_HASH=$COMMIT_SHA', '-f',
|
||||
'/workspace/backend/Dockerfile.cacheserver', '/workspace']
|
||||
waitFor: ["-"]
|
||||
- id: 'buildCacheDeployer'
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['build', '-t', 'gcr.io/$PROJECT_ID/cache-deployer:$COMMIT_SHA',
|
||||
'--build-arg', 'COMMIT_HASH=$COMMIT_SHA', '-f',
|
||||
'/workspace/backend/src/cache/deployer/Dockerfile', '/workspace']
|
||||
waitFor: ["-"]
|
||||
|
||||
# Build marketplace deployer
|
||||
- id: 'buildMarketplaceDeployer'
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['build', '-t', 'gcr.io/$PROJECT_ID/deployer:$COMMIT_SHA',
|
||||
'--build-arg', 'COMMIT_HASH=$COMMIT_SHA', '-f',
|
||||
'/workspace/manifests/gcp_marketplace/deployer/Dockerfile', '/workspace/manifests/gcp_marketplace']
|
||||
waitFor: ["-"]
|
||||
|
||||
# Build the Kubeflow-based pipeline component images
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['build', '-t', 'gcr.io/$PROJECT_ID/ml-pipeline-kubeflow-deployer:$COMMIT_SHA',
|
||||
'/workspace/components/kubeflow/deployer']
|
||||
id: 'buildDeployer'
|
||||
waitFor: ["-"]
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
entrypoint: '/bin/bash'
|
||||
args: ['-c', 'cd /workspace/components/kubeflow/launcher && ./build_image.sh -p $PROJECT_ID -t $COMMIT_SHA']
|
||||
id: 'buildTFJobLauncher'
|
||||
waitFor: ["-"]
|
||||
- id: 'buildCpuTrainer'
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
entrypoint: '/bin/bash'
|
||||
args: ['-c', 'cd /workspace/components/kubeflow/dnntrainer && ./build_image.sh -p $PROJECT_ID -t $COMMIT_SHA -l ml-pipeline-kubeflow-tf-trainer -b 2.3.0']
|
||||
waitFor: ["-"]
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
entrypoint: '/bin/bash'
|
||||
args: ['-c', 'cd /workspace/components/kubeflow/dnntrainer && ./build_image.sh -p $PROJECT_ID -t $COMMIT_SHA -l ml-pipeline-kubeflow-tf-trainer-gpu -b 2.3.0-gpu']
|
||||
id: 'buildGpuTrainer'
|
||||
waitFor: ["-"]
|
||||
|
||||
# Build the local pipeline component images
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
entrypoint: '/bin/bash'
|
||||
args: ['-c', 'cd /workspace/components/local/confusion_matrix && ./build_image.sh -p $PROJECT_ID -t $COMMIT_SHA']
|
||||
id: 'buildConfusionMatrix'
|
||||
waitFor: ["-"]
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
entrypoint: '/bin/bash'
|
||||
args: ['-c', 'cd /workspace/components/local/roc && ./build_image.sh -p $PROJECT_ID -t $COMMIT_SHA']
|
||||
id: 'buildROC'
|
||||
waitFor: ["-"]
|
||||
|
||||
# Build third_party images
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['build', '-t', 'gcr.io/$PROJECT_ID/metadata-envoy:$COMMIT_SHA', '-f',
|
||||
'/workspace/third_party/metadata_envoy/Dockerfile', '/workspace']
|
||||
id: 'buildMetadataEnvoy'
|
||||
|
||||
# Pull third_party images
|
||||
# ! Sync to the same MLMD version:
|
||||
# * backend/metadata_writer/requirements.in and requirements.txt
|
||||
# * @kubeflow/frontend/src/mlmd/generated
|
||||
# * .cloudbuild.yaml and .release.cloudbuild.yaml
|
||||
# * manifests/kustomize/base/metadata/base/metadata-grpc-deployment.yaml
|
||||
# * test/tag_for_hosted.sh
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['pull', 'gcr.io/tfx-oss-public/ml_metadata_store_server:1.14.0']
|
||||
id: 'pullMetadataServer'
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['pull', 'gcr.io/ml-pipeline/minio:RELEASE.2019-08-14T20-37-41Z-license-compliance']
|
||||
id: 'pullMinio'
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['pull', 'gcr.io/ml-pipeline/mysql:8.0.26']
|
||||
id: 'pullMysql'
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['pull', 'gcr.io/cloudsql-docker/gce-proxy:1.25.0']
|
||||
id: 'pullCloudsqlProxy'
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['pull', 'gcr.io/ml-pipeline/argoexec:v3.4.16-license-compliance']
|
||||
id: 'pullArgoExecutor'
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['pull', 'gcr.io/ml-pipeline/workflow-controller:v3.4.16-license-compliance']
|
||||
id: 'pullArgoWorkflowController'
|
||||
|
||||
# V2 related images
|
||||
# Prerequisite: Make sure ko image is available on the same project by running the following:
|
||||
# git clone https://github.com/GoogleCloudPlatform/cloud-builders-community.git
|
||||
# cd cloud-builders-community/ko
|
||||
# gcloud builds submit . --config=cloudbuild.yaml --project=$PROJECT_ID
|
||||
# Reference: https://dev.to/amammay/effective-go-on-gcp-lean-containers-with-ko-on-cloud-build-51ek
|
||||
|
||||
# Temporarily disable v2 image build due to unblock kubeflow-pipeline-mkp-test
|
||||
# We aren't building v2 images for MKP at this moment anyway.
|
||||
#
|
||||
# - name: 'gcr.io/$PROJECT_ID/ko'
|
||||
# entrypoint: /bin/sh
|
||||
# args:
|
||||
# - -c
|
||||
# - |
|
||||
# cd /workspace/backend/src/v2/
|
||||
# /ko publish --bare ./cmd/launcher-v2 -t $COMMIT_SHA
|
||||
# env:
|
||||
# - 'KO_DOCKER_REPO=gcr.io/$PROJECT_ID/kfp-launcher'
|
||||
# id: 'buildLauncher'
|
||||
# waitFor: ["-"]
|
||||
# - name: 'gcr.io/$PROJECT_ID/ko'
|
||||
# entrypoint: /bin/sh
|
||||
# args:
|
||||
# - -c
|
||||
# - |
|
||||
# cd /workspace/backend/src/v2/
|
||||
# /ko publish --bare ./cmd/driver -t $COMMIT_SHA
|
||||
# env:
|
||||
# - 'KO_DOCKER_REPO=gcr.io/$PROJECT_ID/kfp-driver'
|
||||
# id: 'buildDriver'
|
||||
# waitFor: ["-"]
|
||||
|
||||
# Tag for Hosted - SemVersion to Major.Minor parsing
|
||||
- id: "parseMajorMinorVersion"
|
||||
waitFor: ["-"]
|
||||
name: gcr.io/cloud-builders/docker
|
||||
entrypoint: /bin/bash
|
||||
args:
|
||||
- -ceux
|
||||
- |
|
||||
# Parse major minor version and save to a file for reusing in other steps.
|
||||
# e.g. 1.0.0-rc.1 and 1.0.1 are parsed as 1.0
|
||||
cat /workspace/VERSION | sed -e "s#\([0-9]\+[.][0-9]\+\)[.].*#\1#" > /workspace/mm.ver
|
||||
|
||||
# Tag for Hosted - Tag to hosted folder with MKP friendly name
|
||||
- id: 'tagForHosted'
|
||||
waitFor: ['parseMajorMinorVersion', 'buildFrontend', 'buildApiServer', 'buildScheduledWorkflow',
|
||||
'buildViewerCrdController', 'buildPersistenceAgent', 'buildInverseProxyAgent', 'buildVisualizationServer',
|
||||
'buildMetadataWriter', 'buildCacheServer', 'buildCacheDeployer', 'buildMetadataEnvoy',
|
||||
'buildMarketplaceDeployer', 'pullMetadataServer', 'pullMinio', 'pullMysql', 'pullCloudsqlProxy',
|
||||
'pullArgoExecutor', 'pullArgoWorkflowController']
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
entrypoint: bash
|
||||
args:
|
||||
- -ceux
|
||||
- |
|
||||
/workspace/test/tag_for_hosted.sh $PROJECT_ID $COMMIT_SHA $(cat /workspace/VERSION) $(cat /workspace/mm.ver)
|
||||
|
||||
images:
|
||||
# Images for the pipeline system itself
|
||||
- 'gcr.io/$PROJECT_ID/frontend:$COMMIT_SHA'
|
||||
- 'gcr.io/$PROJECT_ID/api-server:$COMMIT_SHA'
|
||||
- 'gcr.io/$PROJECT_ID/scheduledworkflow:$COMMIT_SHA'
|
||||
- 'gcr.io/$PROJECT_ID/persistenceagent:$COMMIT_SHA'
|
||||
- 'gcr.io/$PROJECT_ID/viewer-crd-controller:$COMMIT_SHA'
|
||||
- 'gcr.io/$PROJECT_ID/inverse-proxy-agent:$COMMIT_SHA'
|
||||
- 'gcr.io/$PROJECT_ID/visualization-server:$COMMIT_SHA'
|
||||
- 'gcr.io/$PROJECT_ID/metadata-writer:$COMMIT_SHA'
|
||||
- 'gcr.io/$PROJECT_ID/cache-server:$COMMIT_SHA'
|
||||
- 'gcr.io/$PROJECT_ID/cache-deployer:$COMMIT_SHA'
|
||||
|
||||
# Images for Marketplace
|
||||
- 'gcr.io/$PROJECT_ID/deployer:$COMMIT_SHA'
|
||||
|
||||
# Images for the Kubeflow-based pipeline components
|
||||
- 'gcr.io/$PROJECT_ID/ml-pipeline-kubeflow-deployer:$COMMIT_SHA'
|
||||
- 'gcr.io/$PROJECT_ID/ml-pipeline-kubeflow-tf-trainer:$COMMIT_SHA'
|
||||
- 'gcr.io/$PROJECT_ID/ml-pipeline-kubeflow-tf-trainer-gpu:$COMMIT_SHA'
|
||||
- 'gcr.io/$PROJECT_ID/ml-pipeline-kubeflow-tfjob:$COMMIT_SHA'
|
||||
|
||||
# Images for the local components
|
||||
- 'gcr.io/$PROJECT_ID/ml-pipeline-local-confusion-matrix:$COMMIT_SHA'
|
||||
- 'gcr.io/$PROJECT_ID/ml-pipeline-local-roc:$COMMIT_SHA'
|
||||
|
||||
# Images for the third_party components
|
||||
- 'gcr.io/$PROJECT_ID/metadata-envoy:$COMMIT_SHA'
|
||||
|
||||
timeout: '3600s'
|
||||
options:
|
||||
diskSizeGb: 300
|
||||
machineType: 'N1_HIGHCPU_8'
|
||||
tags:
|
||||
- build-each-commit
|
|
@ -3,4 +3,14 @@ bower_components
|
|||
dist
|
||||
**/node_modules
|
||||
backend/build
|
||||
backend/Dockerfile
|
||||
backend/Dockerfile.cacheserver
|
||||
backend/Dockerfile.conformance
|
||||
backend/Dockerfile.driver
|
||||
backend/Dockerfile.launcher
|
||||
backend/Dockerfile.persistenceagent
|
||||
backend/Dockerfile.scheduledworkflow
|
||||
backend/Dockerfile.viewercontroller
|
||||
backend/Dockerfile.visualization
|
||||
frontend/Dockerfile
|
||||
v2/build
|
||||
|
|
|
@ -0,0 +1,25 @@
|
|||
---
|
||||
name: 🧹 Chore
|
||||
about: Create a Chore on Kubeflow Pipelines
|
||||
title: "chore(<component>): <Chore Name>"
|
||||
---
|
||||
|
||||
## Chore description
|
||||
|
||||
<!-- Describe the chore details and why it's needed. -->
|
||||
|
||||
|
||||
### Labels
|
||||
<!-- Please include labels below by uncommenting them to help us better triage issues -->
|
||||
|
||||
<!-- /area frontend -->
|
||||
<!-- /area backend -->
|
||||
<!-- /area sdk -->
|
||||
<!-- /area testing -->
|
||||
<!-- /area samples -->
|
||||
<!-- /area components -->
|
||||
|
||||
---
|
||||
|
||||
<!-- Don't delete the message below to encourage users to support your issue! -->
|
||||
Love this idea? Give it a 👍.
|
|
@ -0,0 +1,7 @@
|
|||
approvers:
|
||||
- hbelmiro
|
||||
- DharmitD
|
||||
- mprahl
|
||||
reviewers:
|
||||
- rimolive
|
||||
- droctothorpe
|
|
@ -0,0 +1,60 @@
|
|||
name: "Set up KFP on KinD"
|
||||
description: "Step to start and configure KFP on KinD"
|
||||
|
||||
inputs:
|
||||
k8s_version:
|
||||
description: "The Kubernetes version to use for the Kind cluster"
|
||||
required: true
|
||||
pipeline_store:
|
||||
description: "Flag to deploy KFP with K8s Native API"
|
||||
default: 'database'
|
||||
required: false
|
||||
proxy:
|
||||
description: "If KFP should be deployed with proxy configuration"
|
||||
required: false
|
||||
default: false
|
||||
cache_enabled:
|
||||
description: "If KFP should be deployed with cache enabled globally"
|
||||
required: false
|
||||
default: 'true'
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Create k8s Kind Cluster
|
||||
uses: container-tools/kind-action@v2
|
||||
with:
|
||||
cluster_name: kfp
|
||||
kubectl_version: ${{ inputs.k8s_version }}
|
||||
version: v0.25.0
|
||||
node_image: kindest/node:${{ inputs.k8s_version }}
|
||||
|
||||
- name: Deploy Squid
|
||||
id: deploy-squid
|
||||
if: ${{ inputs.proxy == 'true' }}
|
||||
shell: bash
|
||||
run: ./.github/resources/squid/deploy-squid.sh
|
||||
|
||||
- name: Build images
|
||||
shell: bash
|
||||
run: |
|
||||
if [ "${{ inputs.proxy }}" = "true" ]; then
|
||||
./.github/resources/scripts/build-images.sh --proxy
|
||||
else
|
||||
./.github/resources/scripts/build-images.sh
|
||||
fi
|
||||
|
||||
- name: Deploy KFP
|
||||
shell: bash
|
||||
run: |
|
||||
ARGS=""
|
||||
|
||||
if [ "${{ inputs.proxy }}" = "true" ]; then
|
||||
ARGS="${ARGS} --proxy"
|
||||
elif [ "${{inputs.cache_enabled }}" = "false" ]; then
|
||||
ARGS="${ARGS} --cache-disabled"
|
||||
elif [ "${{inputs.pipeline_store }}" = "kubernetes" ]; then
|
||||
ARGS="${ARGS} --deploy-k8s-native"
|
||||
fi
|
||||
|
||||
./.github/resources/scripts/deploy-kfp.sh $ARGS
|
|
@ -0,0 +1,53 @@
|
|||
name: "Install kfp & kfp-kubernetes"
|
||||
inputs:
|
||||
build_version:
|
||||
required: true
|
||||
default: "1.2.2"
|
||||
description: "build package version"
|
||||
generate_golang_proto:
|
||||
required: true
|
||||
default: "false"
|
||||
description: "optionally generate golang proto files"
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Install build tool
|
||||
shell: bash
|
||||
run: pip install build==${{inputs.build_version}}
|
||||
|
||||
- name: Build kfp dist
|
||||
id: install-kfp
|
||||
shell: bash
|
||||
working-directory: sdk/python
|
||||
run: |
|
||||
python -m build .
|
||||
|
||||
- name: Generate kfp-kubernetes python proto files from source
|
||||
id: generate-kfp-kubernetes-proto-files
|
||||
shell: bash
|
||||
if: ${{ steps.install-kfp.outcome == 'success' }}
|
||||
working-directory: ./kubernetes_platform
|
||||
run: make clean python USE_FIND_LINKS=true
|
||||
|
||||
- name: Generate kfp-kubernetes golang proto files from source
|
||||
id: generate-kfp-kubernetes-go-proto-files
|
||||
shell: bash
|
||||
if: ${{ steps.install-kfp.outcome == 'success' && inputs.generate_golang_proto == 'true' }}
|
||||
working-directory: ./kubernetes_platform
|
||||
run: make golang
|
||||
|
||||
# kfp is installed transitively
|
||||
# --find-links ensures pip first looks in the sdk/python/dist folder
|
||||
# outputted from generate-kfp-kubernetes-proto-files step before looking at pypi
|
||||
- name: Install kfp & kfp-kubernetes from source
|
||||
id: install-kfp-kubernetes
|
||||
shell: bash
|
||||
if: ${{ steps.generate-kfp-kubernetes-proto-files.outcome == 'success' }}
|
||||
run: |
|
||||
pip install -e ./kubernetes_platform/python[dev] --find-links=sdk/python/dist
|
||||
|
||||
- name: Install test requirements
|
||||
id: install-requirements
|
||||
shell: bash
|
||||
if: ${{ steps.install-kfp-kubernetes.outcome == 'success' }}
|
||||
run: pip install -r ./test/kfp-kubernetes-execution-tests/requirements.txt
|
|
@ -0,0 +1,81 @@
|
|||
name: "Install Proto dependencies & Pipeline Spec"
|
||||
description: |
|
||||
This action pins various Proto generation packages to default versions and
|
||||
installs these dependencies in the workflow environment. It will also
|
||||
install the kfp-pipeline-spec. Whenever KFP project updates generation
|
||||
packages, the defaults here must be updated.
|
||||
inputs:
|
||||
protoc_version:
|
||||
required: true
|
||||
default: "31.1"
|
||||
description: "protoc version"
|
||||
protobuf_python_version:
|
||||
required: true
|
||||
default: "6.31.1"
|
||||
description: "protobuf python package version"
|
||||
setuptools_version:
|
||||
required: true
|
||||
default: "80.9.0"
|
||||
description: "setuptools python package version"
|
||||
wheels_version:
|
||||
required: true
|
||||
default: "0.42.0"
|
||||
description: "wheels python package version"
|
||||
generate_golang_proto:
|
||||
required: true
|
||||
default: "false"
|
||||
description: "optionally generate golang proto files"
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Install protoc
|
||||
shell: bash
|
||||
run: |
|
||||
PROTOC_ZIP=protoc-${{inputs.protoc_version}}-linux-x86_64.zip
|
||||
curl -sSL -O https://github.com/protocolbuffers/protobuf/releases/download/v${{inputs.protoc_version}}/$PROTOC_ZIP
|
||||
sudo unzip -o $PROTOC_ZIP -d /usr/local bin/protoc
|
||||
sudo unzip -o $PROTOC_ZIP -d /usr/local 'include/*'
|
||||
rm $PROTOC_ZIP
|
||||
protoc --version
|
||||
|
||||
- name: Install setuptools
|
||||
shell: bash
|
||||
run: |
|
||||
pip3 install setuptools==${{inputs.setuptools_version}}
|
||||
pip3 freeze
|
||||
|
||||
- name: Install Wheel
|
||||
shell: bash
|
||||
run: pip3 install wheel==${{inputs.wheels_version}}
|
||||
- name: Install protobuf
|
||||
shell: bash
|
||||
run: pip3 install protobuf==${{inputs.protobuf_python_version}}
|
||||
- name: Generate API proto files
|
||||
working-directory: ./api
|
||||
shell: bash
|
||||
run: make clean python
|
||||
- name: Install kfp-pipeline-spec from source
|
||||
shell: bash
|
||||
run: |
|
||||
pip install api/v2alpha1/python/dist/*.whl
|
||||
|
||||
- name: Generate kfp-pipeline-spec golang files
|
||||
if: ${{ inputs.generate_golang_proto == 'true' }}
|
||||
working-directory: ./api
|
||||
shell: bash
|
||||
run: |
|
||||
make golang
|
||||
|
||||
- name: Summary
|
||||
shell: bash
|
||||
run: |
|
||||
cat <<EOF
|
||||
Installed the following dependencies:
|
||||
Binaries:
|
||||
protoc: ${{ inputs.protoc_version }}
|
||||
Python Packages:
|
||||
setuptools: ${{ inputs.setuptools_version }}
|
||||
wheels: ${{ inputs.wheels_version }}
|
||||
protobuf: ${{ inputs.protobuf_python_version }}
|
||||
EOF
|
||||
|
|
@ -2,6 +2,7 @@
|
|||
|
||||
|
||||
**Checklist:**
|
||||
- [ ] You have [signed off your commits](https://www.kubeflow.org/docs/about/contributing/#sign-off-your-commits)
|
||||
- [ ] The title for your pull request (PR) should follow our title convention. [Learn more about the pull request title convention used in this repository](https://github.com/kubeflow/pipelines/blob/master/CONTRIBUTING.md#pull-request-title-convention).
|
||||
<!--
|
||||
PR titles examples:
|
||||
|
|
|
@ -0,0 +1,12 @@
|
|||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: ml-pipeline
|
||||
spec:
|
||||
template:
|
||||
spec:
|
||||
containers:
|
||||
- name: ml-pipeline-api-server
|
||||
env:
|
||||
- name: CACHEENABLED
|
||||
value: "false"
|
|
@ -0,0 +1,11 @@
|
|||
apiVersion: kustomize.config.k8s.io/v1beta1
|
||||
kind: Kustomization
|
||||
|
||||
resources:
|
||||
- ../no-proxy
|
||||
|
||||
patches:
|
||||
- path: cache-env.yaml
|
||||
target:
|
||||
kind: Deployment
|
||||
name: ml-pipeline
|
14
.github/resources/manifests/argo/overlays/kubernetes-native/apiserver-env.yaml
vendored
Normal file
14
.github/resources/manifests/argo/overlays/kubernetes-native/apiserver-env.yaml
vendored
Normal file
|
@ -0,0 +1,14 @@
|
|||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: ml-pipeline
|
||||
spec:
|
||||
template:
|
||||
spec:
|
||||
containers:
|
||||
- name: ml-pipeline-api-server
|
||||
env:
|
||||
- name: V2_DRIVER_IMAGE
|
||||
value: kind-registry:5000/driver
|
||||
- name: V2_LAUNCHER_IMAGE
|
||||
value: kind-registry:5000/launcher
|
19
.github/resources/manifests/argo/overlays/kubernetes-native/kustomization.yaml
vendored
Normal file
19
.github/resources/manifests/argo/overlays/kubernetes-native/kustomization.yaml
vendored
Normal file
|
@ -0,0 +1,19 @@
|
|||
apiVersion: kustomize.config.k8s.io/v1beta1
|
||||
kind: Kustomization
|
||||
|
||||
resources:
|
||||
- ../../../../../../manifests/kustomize/env/cert-manager/platform-agnostic-k8s-native
|
||||
|
||||
images:
|
||||
- name: ghcr.io/kubeflow/kfp-api-server
|
||||
newName: kind-registry:5000/apiserver
|
||||
newTag: latest
|
||||
- name: ghcr.io/kubeflow/kfp-persistence-agent
|
||||
newName: kind-registry:5000/persistenceagent
|
||||
newTag: latest
|
||||
- name: ghcr.io/kubeflow/kfp-scheduled-workflow-controller
|
||||
newName: kind-registry:5000/scheduledworkflow
|
||||
newTag: latest
|
||||
|
||||
patchesStrategicMerge:
|
||||
- apiserver-env.yaml
|
|
@ -0,0 +1,16 @@
|
|||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: ml-pipeline
|
||||
spec:
|
||||
template:
|
||||
spec:
|
||||
containers:
|
||||
- name: ml-pipeline-api-server
|
||||
env:
|
||||
- name: V2_DRIVER_IMAGE
|
||||
value: kind-registry:5000/driver
|
||||
- name: V2_LAUNCHER_IMAGE
|
||||
value: kind-registry:5000/launcher
|
||||
- name: LOG_LEVEL
|
||||
value: "debug"
|
|
@ -0,0 +1,20 @@
|
|||
apiVersion: kustomize.config.k8s.io/v1beta1
|
||||
kind: Kustomization
|
||||
|
||||
resources:
|
||||
- ../../../../../../manifests/kustomize/env/platform-agnostic
|
||||
|
||||
images:
|
||||
- name: ghcr.io/kubeflow/kfp-api-server
|
||||
newName: kind-registry:5000/apiserver
|
||||
newTag: latest
|
||||
- name: ghcr.io/kubeflow/kfp-persistence-agent
|
||||
newName: kind-registry:5000/persistenceagent
|
||||
newTag: latest
|
||||
- name: ghcr.io/kubeflow/kfp-scheduled-workflow-controller
|
||||
newName: kind-registry:5000/scheduledworkflow
|
||||
newTag: latest
|
||||
|
||||
patches:
|
||||
- path: apiserver-env.yaml
|
||||
- path: workflow-disable-logs-patch.yaml
|
20
.github/resources/manifests/argo/overlays/no-proxy/workflow-disable-logs-patch.yaml
vendored
Normal file
20
.github/resources/manifests/argo/overlays/no-proxy/workflow-disable-logs-patch.yaml
vendored
Normal file
|
@ -0,0 +1,20 @@
|
|||
apiVersion: v1
|
||||
kind: ConfigMap
|
||||
metadata:
|
||||
name: workflow-controller-configmap
|
||||
data:
|
||||
artifactRepository: |
|
||||
archiveLogs: false
|
||||
s3:
|
||||
endpoint: "minio-service.$(kfp-namespace):9000"
|
||||
bucket: "$(kfp-artifact-bucket-name)"
|
||||
keyFormat: "artifacts/{{workflow.name}}/{{workflow.creationTimestamp.Y}}/{{workflow.creationTimestamp.m}}/{{workflow.creationTimestamp.d}}/{{pod.name}}"
|
||||
insecure: true
|
||||
accessKeySecret:
|
||||
name: mlpipeline-minio-artifact
|
||||
key: accesskey
|
||||
secretKeySecret:
|
||||
name: mlpipeline-minio-artifact
|
||||
key: secretkey
|
||||
executor: |
|
||||
imagePullPolicy: IfNotPresent
|
|
@ -0,0 +1,11 @@
|
|||
apiVersion: kustomize.config.k8s.io/v1beta1
|
||||
kind: Kustomization
|
||||
|
||||
resources:
|
||||
- ../no-proxy
|
||||
|
||||
patches:
|
||||
- path: proxy-env.yaml
|
||||
target:
|
||||
kind: Deployment
|
||||
name: ml-pipeline
|
|
@ -0,0 +1,16 @@
|
|||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: ml-pipeline
|
||||
spec:
|
||||
template:
|
||||
spec:
|
||||
containers:
|
||||
- name: ml-pipeline-api-server
|
||||
env:
|
||||
- name: HTTP_PROXY
|
||||
value: "http://squid.squid.svc.cluster.local:3128"
|
||||
- name: HTTPS_PROXY
|
||||
value: "http://squid.squid.svc.cluster.local:3128"
|
||||
- name: NO_PROXY
|
||||
value: "localhost,127.0.0.1,.svc.cluster.local,kubernetes.default.svc,metadata-grpc-service,0,1,2,3,4,5,6,7,8,9"
|
|
@ -0,0 +1,64 @@
|
|||
#!/bin/bash
|
||||
#
|
||||
# Copyright 2023 kubeflow.org
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# source: https://raw.githubusercontent.com/open-toolchain/commons/master/scripts/check_registry.sh
|
||||
|
||||
# Remove the x if you need no print out of each command
|
||||
set -e
|
||||
|
||||
REGISTRY="${REGISTRY:-kind-registry:5000}"
|
||||
echo "REGISTRY=$REGISTRY"
|
||||
TAG="${TAG:-latest}"
|
||||
EXIT_CODE=0
|
||||
|
||||
docker system prune -a -f
|
||||
|
||||
docker build --progress=plain -t "${REGISTRY}/apiserver:${TAG}" -f backend/Dockerfile . && docker push "${REGISTRY}/apiserver:${TAG}" || EXIT_CODE=$?
|
||||
if [[ $EXIT_CODE -ne 0 ]]
|
||||
then
|
||||
echo "Failed to build apiserver image."
|
||||
exit $EXIT_CODE
|
||||
fi
|
||||
|
||||
docker build --progress=plain -t "${REGISTRY}/persistenceagent:${TAG}" -f backend/Dockerfile.persistenceagent . && docker push "${REGISTRY}/persistenceagent:${TAG}" || EXIT_CODE=$?
|
||||
if [[ $EXIT_CODE -ne 0 ]]
|
||||
then
|
||||
echo "Failed to build persistenceagent image."
|
||||
exit $EXIT_CODE
|
||||
fi
|
||||
|
||||
docker build --progress=plain -t "${REGISTRY}/scheduledworkflow:${TAG}" -f backend/Dockerfile.scheduledworkflow . && docker push "${REGISTRY}/scheduledworkflow:${TAG}" || EXIT_CODE=$?
|
||||
if [[ $EXIT_CODE -ne 0 ]]
|
||||
then
|
||||
echo "Failed to build scheduledworkflow image."
|
||||
exit $EXIT_CODE
|
||||
fi
|
||||
|
||||
docker build --progress=plain -t "${REGISTRY}/driver:${TAG}" -f backend/Dockerfile.driver . && docker push "${REGISTRY}/driver:${TAG}" || EXIT_CODE=$?
|
||||
if [[ $EXIT_CODE -ne 0 ]]
|
||||
then
|
||||
echo "Failed to build driver image."
|
||||
exit $EXIT_CODE
|
||||
fi
|
||||
|
||||
docker build --progress=plain -t "${REGISTRY}/launcher:${TAG}" -f backend/Dockerfile.launcher . && docker push "${REGISTRY}/launcher:${TAG}" || EXIT_CODE=$?
|
||||
if [[ $EXIT_CODE -ne 0 ]]
|
||||
then
|
||||
echo "Failed to build launcher image."
|
||||
exit $EXIT_CODE
|
||||
fi
|
||||
|
||||
# clean up intermittent build caches to free up disk space
|
||||
docker system prune -a -f
|
|
@ -0,0 +1,65 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
NS=""
|
||||
OUTPUT_FILE="/tmp/tmp.log/tmp_pod_log.txt"
|
||||
|
||||
while [[ "$#" -gt 0 ]]; do
|
||||
case $1 in
|
||||
--ns) NS="$2"; shift ;;
|
||||
--output) OUTPUT_FILE="$2"; shift ;;
|
||||
*) echo "Unknown parameter passed: $1"; exit 1 ;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
|
||||
mkdir -p /tmp/tmp.log
|
||||
|
||||
if [[ -z "$NS" ]]; then
|
||||
echo "Both --ns parameters are required."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
function check_namespace {
|
||||
if ! kubectl get namespace "$1" &>/dev/null; then
|
||||
echo "Namespace '$1' does not exist."
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
function display_pod_info {
|
||||
local NAMESPACE=$1
|
||||
|
||||
kubectl get pods -n "${NAMESPACE}"
|
||||
|
||||
local POD_NAMES
|
||||
|
||||
POD_NAMES=$(kubectl get pods -n "${NAMESPACE}" -o custom-columns=":metadata.name" --no-headers)
|
||||
|
||||
if [[ -z "${POD_NAMES}" ]]; then
|
||||
echo "No pods found in namespace '${NAMESPACE}'." | tee -a "$OUTPUT_FILE"
|
||||
return
|
||||
fi
|
||||
|
||||
echo "Pod Information for Namespace: ${NAMESPACE}" > "$OUTPUT_FILE"
|
||||
|
||||
for POD_NAME in ${POD_NAMES}; do
|
||||
{
|
||||
echo "===== Pod: ${POD_NAME} in ${NAMESPACE} ====="
|
||||
echo "----- EVENTS -----"
|
||||
kubectl describe pod "${POD_NAME}" -n "${NAMESPACE}" | grep -A 100 Events || echo "No events found for pod ${POD_NAME}."
|
||||
|
||||
echo "----- LOGS -----"
|
||||
kubectl logs "${POD_NAME}" -n "${NAMESPACE}" || echo "No logs found for pod ${POD_NAME}."
|
||||
|
||||
echo "==========================="
|
||||
echo ""
|
||||
} | tee -a "$OUTPUT_FILE"
|
||||
done
|
||||
|
||||
echo "Pod information stored in $OUTPUT_FILE"
|
||||
}
|
||||
|
||||
check_namespace "$NS"
|
||||
display_pod_info "$NS"
|
|
@ -0,0 +1,106 @@
|
|||
#!/bin/bash
|
||||
#
|
||||
# Copyright 2023 kubeflow.org
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# Remove the x if you need no print out of each command
|
||||
set -e
|
||||
|
||||
REGISTRY="${REGISTRY:-kind-registry:5000}"
|
||||
EXIT_CODE=0
|
||||
|
||||
C_DIR="${BASH_SOURCE%/*}"
|
||||
if [[ ! -d "$C_DIR" ]]; then C_DIR="$PWD"; fi
|
||||
source "${C_DIR}/helper-functions.sh"
|
||||
|
||||
TEST_MANIFESTS=".github/resources/manifests/argo"
|
||||
PIPELINES_STORE="database"
|
||||
USE_PROXY=false
|
||||
CACHE_DISABLED=false
|
||||
|
||||
# Loop over script arguments passed. This uses a single switch-case
|
||||
# block with default value in case we want to make alternative deployments
|
||||
# in the future.
|
||||
while [ "$#" -gt 0 ]; do
|
||||
case "$1" in
|
||||
--deploy-k8s-native)
|
||||
PIPELINES_STORE="kubernetes"
|
||||
shift
|
||||
;;
|
||||
--proxy)
|
||||
USE_PROXY=true
|
||||
shift
|
||||
;;
|
||||
--cache-disabled)
|
||||
CACHE_DISABLED=true
|
||||
shift
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [ "${USE_PROXY}" == "true" && "${PIPELINES_STORE}" == "kubernetes" ]; then
|
||||
echo "ERROR: Kubernetes Pipeline store cannot be deployed with proxy support."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
kubectl apply -k "manifests/kustomize/cluster-scoped-resources/"
|
||||
kubectl wait crd/applications.app.k8s.io --for condition=established --timeout=60s || EXIT_CODE=$?
|
||||
if [[ $EXIT_CODE -ne 0 ]]
|
||||
then
|
||||
echo "Failed to deploy cluster-scoped resources."
|
||||
exit $EXIT_CODE
|
||||
fi
|
||||
|
||||
# If pipelines store is set to 'kubernetes', cert-manager must be deployed
|
||||
if [ "${PIPELINES_STORE}" == "kubernetes" ]; then
|
||||
#Install cert-manager
|
||||
make -C ./backend install-cert-manager || EXIT_CODE=$?
|
||||
if [[ $EXIT_CODE -ne 0 ]]
|
||||
then
|
||||
echo "Failed to deploy cert-manager."
|
||||
exit $EXIT_CODE
|
||||
fi
|
||||
fi
|
||||
|
||||
# Manifests will be deployed according to the flag provided
|
||||
if $CACHE_DISABLED; then
|
||||
TEST_MANIFESTS="${TEST_MANIFESTS}/overlays/cache-disabled"
|
||||
elif $USE_PROXY; then
|
||||
TEST_MANIFESTS="${TEST_MANIFESTS}/overlays/proxy"
|
||||
elif [ "${PIPELINES_STORE}" == "kubernetes" ]; then
|
||||
TEST_MANIFESTS="${TEST_MANIFESTS}/overlays/kubernetes-native"
|
||||
else
|
||||
TEST_MANIFESTS="${TEST_MANIFESTS}/overlays/no-proxy"
|
||||
fi
|
||||
|
||||
echo "Deploying ${TEST_MANIFESTS}..."
|
||||
|
||||
kubectl apply -k "${TEST_MANIFESTS}" || EXIT_CODE=$?
|
||||
if [[ $EXIT_CODE -ne 0 ]]
|
||||
then
|
||||
echo "Deploy unsuccessful. Failure applying ${TEST_MANIFESTS}."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check if all pods are running - (10 minutes)
|
||||
wait_for_pods || EXIT_CODE=$?
|
||||
if [[ $EXIT_CODE -ne 0 ]]
|
||||
then
|
||||
echo "Deploy unsuccessful. Not all pods running."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
collect_artifacts kubeflow
|
||||
|
||||
echo "Finished KFP deployment."
|
|
@ -24,6 +24,7 @@ LOCAL_PORT=$3
|
|||
REMOTE_PORT=$4
|
||||
|
||||
POD_NAME=$(kubectl get pods -n "$KUBEFLOW_NS" -l "app=$APP_NAME" -o jsonpath='{.items[0].metadata.name}')
|
||||
echo "POD_NAME=$POD_NAME"
|
||||
|
||||
if [ $QUIET -eq 1 ]; then
|
||||
kubectl port-forward -n "$KUBEFLOW_NS" "$POD_NAME" "$LOCAL_PORT:$REMOTE_PORT" > /dev/null 2>&1 &
|
|
@ -56,57 +56,9 @@ wait_for_namespace () {
|
|||
}
|
||||
|
||||
wait_for_pods () {
|
||||
if [[ $# -ne 3 ]]
|
||||
then
|
||||
echo "Usage: wait_for_pods namespace max_retries sleep_time"
|
||||
return 1
|
||||
fi
|
||||
|
||||
local namespace=$1
|
||||
local max_retries=$2
|
||||
local sleep_time=$3
|
||||
|
||||
local i=0
|
||||
|
||||
while [[ $i -lt $max_retries ]]
|
||||
do
|
||||
local pods
|
||||
local statuses
|
||||
local num_pods
|
||||
local num_running
|
||||
pods=$(kubectl get pod -n "$namespace")
|
||||
# echo "$pods"
|
||||
# kubectl get pvc -n "$namespace"
|
||||
|
||||
if [[ -z $pods ]]
|
||||
then
|
||||
echo "no pod is up yet"
|
||||
else
|
||||
# Using quotations around variables to keep column format in echo
|
||||
# Remove 1st line (header line) -> trim whitespace -> cut statuses column (3rd column)
|
||||
# Might be overkill to parse down to specific columns :).
|
||||
statuses=$(echo "$pods" | tail -n +2 | tr -s ' ' | cut -d ' ' -f 3)
|
||||
num_pods=$(echo "$statuses" | wc -l | xargs)
|
||||
num_running=$(echo "$statuses" | grep -ow "Running\|Completed" | wc -l | xargs)
|
||||
|
||||
local msg="${num_running}/${num_pods} pods running in \"${namespace}\"."
|
||||
|
||||
if [[ $num_running -ne $num_pods ]]
|
||||
then
|
||||
# for debugging
|
||||
# kubectl get pod -n "$namespace" | grep '0/1' | awk '{print $1}' | xargs kubectl describe pod -n "$namespace"
|
||||
echo "$msg Checking again in ${sleep_time}s."
|
||||
else
|
||||
echo "$msg"
|
||||
return 0
|
||||
fi
|
||||
fi
|
||||
|
||||
sleep "$sleep_time"
|
||||
i=$((i+1))
|
||||
done
|
||||
|
||||
return 1
|
||||
C_DIR="${BASH_SOURCE%/*}"
|
||||
pip install -r "${C_DIR}"/kfp-readiness/requirements.txt
|
||||
python "${C_DIR}"/kfp-readiness/wait_for_pods.py
|
||||
}
|
||||
|
||||
deploy_with_retries () {
|
|
@ -0,0 +1,2 @@
|
|||
kubernetes==30.1.0
|
||||
urllib3==2.5.0
|
|
@ -0,0 +1,117 @@
|
|||
import logging
|
||||
import time
|
||||
import urllib3
|
||||
from kubernetes import client, config
|
||||
import subprocess
|
||||
|
||||
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||
|
||||
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
|
||||
|
||||
namespace = 'kubeflow'
|
||||
|
||||
config.load_kube_config()
|
||||
v1 = client.CoreV1Api()
|
||||
|
||||
def log_pods():
|
||||
pods = v1.list_namespaced_pod(namespace=namespace)
|
||||
|
||||
for pod in pods.items:
|
||||
try:
|
||||
logging.info(
|
||||
f"---- Pod {namespace}/{pod.metadata.name} logs ----\n"
|
||||
+ v1.read_namespaced_pod_log(pod.metadata.name, namespace)
|
||||
)
|
||||
except client.exceptions.ApiException:
|
||||
continue
|
||||
|
||||
def get_pod_statuses():
|
||||
pods = v1.list_namespaced_pod(namespace=namespace)
|
||||
statuses = {}
|
||||
for pod in pods.items:
|
||||
pod_name = pod.metadata.name
|
||||
pod_status = pod.status.phase
|
||||
container_statuses = pod.status.container_statuses or []
|
||||
ready = 0
|
||||
total = 0
|
||||
waiting_messages = []
|
||||
for status in container_statuses:
|
||||
total += 1
|
||||
if status.ready:
|
||||
ready += 1
|
||||
if status.state.waiting is not None:
|
||||
if status.state.waiting.message is not None:
|
||||
waiting_messages.append(f'Waiting on Container: {status.name} - {status.state.waiting.reason}: {status.state.waiting.message}')
|
||||
else:
|
||||
waiting_messages.append(f'Waiting on Container: {status.name} - {status.state.waiting.reason}')
|
||||
statuses[pod_name] = (pod_status, ready, total, waiting_messages)
|
||||
return statuses
|
||||
|
||||
|
||||
def all_pods_ready(statuses):
|
||||
return all(pod_status == 'Running' and ready == total
|
||||
for pod_status, ready, total, _ in statuses.values())
|
||||
|
||||
|
||||
def print_get_pods():
|
||||
try:
|
||||
result = subprocess.run(
|
||||
['kubectl', 'get', 'pods', '-n', 'kubeflow'],
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
text=True,
|
||||
check=True
|
||||
|
||||
)
|
||||
return result.stdout
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(f"An error occurred while running kubectl get pods: {e.stderr}")
|
||||
|
||||
|
||||
def check_pods(calm_time=10, timeout=600, retries_after_ready=5):
|
||||
start_time = time.time()
|
||||
stable_count = 0
|
||||
previous_statuses = {}
|
||||
|
||||
while time.time() - start_time < timeout:
|
||||
current_statuses = get_pod_statuses()
|
||||
|
||||
logging.info("Checking pod statuses...")
|
||||
for pod_name, (pod_status, ready, total, waiting_messages) in current_statuses.items():
|
||||
logging.info(f"Pod {pod_name} - Status: {pod_status}, Ready: {ready}/{total}")
|
||||
for waiting_msg in waiting_messages:
|
||||
logging.info(waiting_msg)
|
||||
|
||||
if current_statuses == previous_statuses:
|
||||
if all_pods_ready(current_statuses):
|
||||
stable_count += 1
|
||||
if stable_count >= retries_after_ready:
|
||||
logging.info("All pods are calm and fully ready.")
|
||||
break
|
||||
else:
|
||||
logging.info(
|
||||
f"Pods are calm but have only been stable for {stable_count}/{retries_after_ready} retries.")
|
||||
else:
|
||||
stable_count = 0
|
||||
else:
|
||||
stable_count = 0
|
||||
|
||||
previous_statuses = current_statuses
|
||||
pods = print_get_pods()
|
||||
logging.info(f"Pods are still stabilizing. Retrying in {calm_time} seconds...\n{pods}")
|
||||
time.sleep(calm_time)
|
||||
else:
|
||||
log_pods()
|
||||
|
||||
raise Exception("Pods did not stabilize within the timeout period.")
|
||||
|
||||
logging.info("Final pod statuses:")
|
||||
for pod_name, (pod_status, ready, total, _) in previous_statuses.items():
|
||||
if pod_status == 'Running' and ready == total:
|
||||
logging.info(f"Pod {pod_name} is fully ready ({ready}/{total})")
|
||||
else:
|
||||
logging.info(f"Pod {pod_name} is not ready (Status: {pod_status}, Ready: {ready}/{total})")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
check_pods()
|
|
@ -0,0 +1,10 @@
|
|||
FROM quay.io/fedora/fedora:41
|
||||
|
||||
RUN dnf install -y squid && \
|
||||
dnf clean all
|
||||
|
||||
COPY squid.conf /etc/squid/squid.conf
|
||||
|
||||
EXPOSE 3128
|
||||
|
||||
CMD ["squid", "-N", "-d", "1"]
|
|
@ -0,0 +1,16 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
C_DIR="${BASH_SOURCE%/*}"
|
||||
NAMESPACE="squid"
|
||||
|
||||
docker build --progress=plain -t "registry.domain.local/squid:test" -f ${C_DIR}/Containerfile ${C_DIR}
|
||||
kind --name kfp load docker-image registry.domain.local/squid:test
|
||||
|
||||
kubectl apply -k ${C_DIR}/manifests
|
||||
|
||||
if ! kubectl -n ${NAMESPACE} wait --for=condition=available deployment/squid --timeout=60s; then
|
||||
echo "Timeout occurred while waiting for the Squid deployment."
|
||||
exit 1
|
||||
fi
|
|
@ -0,0 +1,30 @@
|
|||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: squid
|
||||
namespace: squid
|
||||
spec:
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app: squid
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: squid
|
||||
spec:
|
||||
containers:
|
||||
- name: squid
|
||||
image: registry.domain.local/squid:test
|
||||
ports:
|
||||
- containerPort: 3128
|
||||
volumeMounts:
|
||||
- name: squid-cache
|
||||
mountPath: /var/cache/squid
|
||||
- name: squid-log
|
||||
mountPath: /var/log/squid
|
||||
volumes:
|
||||
- name: squid-cache
|
||||
emptyDir: { }
|
||||
- name: squid-log
|
||||
emptyDir: { }
|
|
@ -0,0 +1,4 @@
|
|||
resources:
|
||||
- deployment.yaml
|
||||
- service.yaml
|
||||
- namespace.yaml
|
|
@ -0,0 +1,4 @@
|
|||
apiVersion: v1
|
||||
kind: Namespace
|
||||
metadata:
|
||||
name: squid
|
|
@ -0,0 +1,12 @@
|
|||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: squid
|
||||
namespace: squid
|
||||
spec:
|
||||
selector:
|
||||
app: squid
|
||||
ports:
|
||||
- protocol: TCP
|
||||
port: 3128
|
||||
targetPort: 3128
|
|
@ -0,0 +1,8 @@
|
|||
# Define an access control list (ACL) for all source IP addresses
|
||||
acl all src all
|
||||
|
||||
# Allow HTTP access from all sources
|
||||
http_access allow all
|
||||
|
||||
# Define the port Squid will listen on
|
||||
http_port 3128
|
|
@ -0,0 +1,84 @@
|
|||
# This workflow adds the 'ci-passed' label to a pull request once the 'CI Check' workflow completes successfully.
|
||||
# Resets the 'ci-passed' label status when a pull request is synchronized or reopened,
|
||||
# indicating that changes have been pushed and CI needs to rerun.
|
||||
name: Add CI Passed Label
|
||||
|
||||
on:
|
||||
workflow_run:
|
||||
workflows: ["CI Check"]
|
||||
types:
|
||||
- completed
|
||||
|
||||
permissions:
|
||||
pull-requests: write
|
||||
checks: read
|
||||
actions: read
|
||||
|
||||
jobs:
|
||||
fetch_data:
|
||||
name: Fetch workflow payload
|
||||
runs-on: ubuntu-latest
|
||||
if: >
|
||||
github.event.workflow_run.event == 'pull_request' &&
|
||||
github.event.workflow_run.conclusion == 'success'
|
||||
outputs:
|
||||
pr_number: ${{ steps.extract.outputs.pr_number }}
|
||||
event_action: ${{ steps.extract.outputs.event_action }}
|
||||
steps:
|
||||
- name: 'Download artifact'
|
||||
uses: actions/github-script@v3.1.0
|
||||
with:
|
||||
script: |
|
||||
var artifacts = await github.actions.listWorkflowRunArtifacts({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
run_id: ${{github.event.workflow_run.id}},
|
||||
});
|
||||
var matchArtifact = artifacts.data.artifacts.filter((artifact) => {
|
||||
return artifact.name == "pr"
|
||||
})[0];
|
||||
var download = await github.actions.downloadArtifact({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
artifact_id: matchArtifact.id,
|
||||
archive_format: 'zip',
|
||||
});
|
||||
var fs = require('fs');
|
||||
fs.writeFileSync('${{github.workspace}}/pr.zip', Buffer.from(download.data));
|
||||
|
||||
- name: Unzip artifact
|
||||
run: unzip pr.zip
|
||||
|
||||
- name: Extract PR information
|
||||
id: extract
|
||||
run: |
|
||||
pr_number=$(cat ./pr_number)
|
||||
event_action=$(cat ./event_action)
|
||||
echo "pr_number=${pr_number}" >> $GITHUB_OUTPUT
|
||||
echo "event_action=${event_action}" >> $GITHUB_OUTPUT
|
||||
|
||||
reset_ci_passed_label:
|
||||
name: Reset 'ci-passed' label on PR Synchronization
|
||||
runs-on: ubuntu-latest
|
||||
needs: fetch_data
|
||||
steps:
|
||||
- name: Check and reset label
|
||||
run: |
|
||||
if [[ "${{ needs.fetch_data.outputs.event_action }}" == "synchronize" || "${{ needs.fetch_data.outputs.event_action }}" == "reopened" ]]; then
|
||||
echo "Resetting 'ci-passed' label as changes were pushed (event: ${{ needs.fetch_data.outputs.event_action }})."
|
||||
gh pr edit ${{ needs.fetch_data.outputs.pr_number }} --remove-label "ci-passed" --repo $GITHUB_REPOSITORY || echo "Label not present"
|
||||
fi
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
add_ci_passed_label:
|
||||
name: Add 'ci-passed' label
|
||||
runs-on: ubuntu-latest
|
||||
needs: [fetch_data, reset_ci_passed_label]
|
||||
steps:
|
||||
- name: Add 'ci-passed' label
|
||||
run: |
|
||||
echo "Adding 'ci-passed' label to PR #${{ needs.fetch_data.outputs.pr_number }}"
|
||||
gh pr edit ${{ needs.fetch_data.outputs.pr_number }} --add-label "ci-passed" --repo $GITHUB_REPOSITORY
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
@ -0,0 +1,28 @@
|
|||
name: KFP backend visualization tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ master ]
|
||||
|
||||
pull_request:
|
||||
paths:
|
||||
- '.github/workflows/backend-visualization.yml'
|
||||
- 'backend/src/apiserver/visualization/**'
|
||||
- 'test/presubmit-backend-visualization.sh'
|
||||
- '!**/*.md'
|
||||
- '!**/OWNERS'
|
||||
|
||||
jobs:
|
||||
backend-visualization-test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.9'
|
||||
|
||||
- name: Run tests
|
||||
run: ./test/presubmit-backend-visualization.sh
|
|
@ -1,81 +0,0 @@
|
|||
name: KFP Tekton backend unit tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [master]
|
||||
|
||||
# Run tests for any PRs which change the backend code
|
||||
pull_request:
|
||||
paths:
|
||||
- 'go.mod'
|
||||
- 'backend/**'
|
||||
- 'scripts/deploy/github/**'
|
||||
- 'manifests/kustomize/**'
|
||||
|
||||
env:
|
||||
GITHUB_ACTION: "true"
|
||||
SETUPTOOLS_USE_DISTUTILS: "stdlib"
|
||||
|
||||
jobs:
|
||||
run-go-unittests:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Install Go
|
||||
uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version: 1.20.x
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
- name: "run go unit tests"
|
||||
run: go test -v -cover ./backend/...
|
||||
backend-integration:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
- name: Create k8s Kind Cluster
|
||||
uses: container-tools/kind-action@v2
|
||||
with:
|
||||
cluster_name: kfp-tekton
|
||||
kubectl_version: v1.29.2
|
||||
version: v0.22.0
|
||||
node_image: kindest/node:v1.29.2
|
||||
- name: build images
|
||||
run: ./scripts/deploy/github/build-images.sh
|
||||
- name: Set up Python 3.10
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.10'
|
||||
- name: "deploy kfp-tekton"
|
||||
run: ./scripts/deploy/github/deploy-kfp.sh
|
||||
- name: Install sdk
|
||||
run: |
|
||||
python3 -m venv .venv
|
||||
. .venv/bin/activate
|
||||
pip install -e sdk/python
|
||||
- name: "flip coin test"
|
||||
run: |
|
||||
. .venv/bin/activate
|
||||
TEST_SCRIPT="test-flip-coin.sh" ./scripts/deploy/github/e2e-test.sh
|
||||
- name: "static loop test"
|
||||
run: |
|
||||
. .venv/bin/activate
|
||||
TEST_SCRIPT="test-static-loop.sh" ./scripts/deploy/github/e2e-test.sh
|
||||
- name: "dynamic loop test"
|
||||
run: |
|
||||
. .venv/bin/activate
|
||||
TEST_SCRIPT="test-dynamic-loop.sh" ./scripts/deploy/github/e2e-test.sh
|
||||
- name: "use env"
|
||||
run: |
|
||||
. .venv/bin/activate
|
||||
TEST_SCRIPT="test-env.sh" ./scripts/deploy/github/e2e-test.sh
|
||||
- name: "use volume"
|
||||
run: |
|
||||
. .venv/bin/activate
|
||||
TEST_SCRIPT="test-volume.sh" ./scripts/deploy/github/e2e-test.sh
|
||||
- name: Collect test results
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: kfp-tekton-backend-artifacts
|
||||
path: /tmp/tmp.*/*
|
|
@ -0,0 +1,52 @@
|
|||
# This workflow checks if all CI checks have passed by polling every 5 minutes for a total of 8 attempts.
|
||||
name: CI Check
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [opened, synchronize, reopened, labeled]
|
||||
|
||||
jobs:
|
||||
check_ci_status:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
checks: read
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Check out the repository
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Check for 'needs-ok-to-test' and 'ok-to-test' labels
|
||||
id: label_check
|
||||
run: |
|
||||
LABELS=$(gh pr view ${{ github.event.pull_request.number }} --json labels --jq '.labels[].name')
|
||||
if echo "$LABELS" | grep -q 'needs-ok-to-test'; then
|
||||
echo "Label 'needs-ok-to-test' found. Skipping the workflow."
|
||||
exit 0
|
||||
fi
|
||||
if echo "$LABELS" | grep -q 'ok-to-test'; then
|
||||
echo "Label 'ok-to-test' found. Continuing the workflow."
|
||||
else
|
||||
echo "Label 'ok-to-test' not found. Skipping the workflow."
|
||||
exit 0
|
||||
fi
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Check if all CI checks passed
|
||||
uses: wechuli/allcheckspassed@0b68b3b7d92e595bcbdea0c860d05605720cf479
|
||||
with:
|
||||
delay: '5'
|
||||
retries: '8'
|
||||
polling_interval: '5'
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Save PR payload
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir -p ./pr
|
||||
echo ${{ github.event.pull_request.number }} >> ./pr/pr_number
|
||||
echo ${{ github.event.action }} >> ./pr/event_action
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: pr
|
||||
path: pr/
|
|
@ -1,81 +0,0 @@
|
|||
name: KFP e2e tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [master]
|
||||
|
||||
pull_request:
|
||||
paths:
|
||||
- '.github/workflows/e2e-test.yaml'
|
||||
- 'scripts/deploy/github/**'
|
||||
- 'go.mod'
|
||||
- 'go.sum'
|
||||
- 'backend/**'
|
||||
- 'frontend/**'
|
||||
- 'proxy/**'
|
||||
- 'manifests/kustomize/**'
|
||||
- 'test/**'
|
||||
|
||||
jobs:
|
||||
integration-test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Create k8s Kind Cluster
|
||||
uses: container-tools/kind-action@v2
|
||||
with:
|
||||
cluster_name: kfp
|
||||
kubectl_version: v1.29.2
|
||||
version: v0.22.0
|
||||
node_image: kindest/node:v1.29.2
|
||||
|
||||
- name: Build images
|
||||
run: ./scripts/deploy/github/build-images.sh
|
||||
|
||||
- name: Deploy KFP
|
||||
run: ./scripts/deploy/github/deploy-kfp.sh
|
||||
|
||||
- name: Forward API port
|
||||
run: ./scripts/deploy/github/forward-port.sh "kubeflow" "ml-pipeline" 8888 8888
|
||||
|
||||
- name: Initialization tests v1
|
||||
working-directory: ./backend/test/initialization
|
||||
run: go test -v ./... -namespace kubeflow -args -runIntegrationTests=true
|
||||
|
||||
- name: Initialization tests v2
|
||||
working-directory: ./backend/test/v2/initialization
|
||||
run: go test -v ./... -namespace kubeflow -args -runIntegrationTests=true
|
||||
|
||||
- name: API integration tests v1
|
||||
working-directory: ./backend/test/integration
|
||||
run: go test -v ./... -namespace ${NAMESPACE} -args -runIntegrationTests=true
|
||||
|
||||
- name: API integration tests v2
|
||||
working-directory: ./backend/test/v2/integration
|
||||
run: go test -v ./... -namespace ${NAMESPACE} -args -runIntegrationTests=true
|
||||
|
||||
- name: Forward Frontend port
|
||||
run: ./scripts/deploy/github/forward-port.sh "kubeflow" "ml-pipeline-ui" 3000 3000
|
||||
|
||||
- name: Build frontend integration tests image
|
||||
working-directory: ./test/frontend-integration-test
|
||||
run: docker build . -t kfp-frontend-integration-test:local
|
||||
|
||||
- name: Frontend integration tests
|
||||
run: docker run --net=host kfp-frontend-integration-test:local --remote-run true
|
||||
|
||||
- name: Basic sample tests - sequential
|
||||
run: pip3 install -r ./test/sample-test/requirements.txt && pip3 install kfp~=2.0 && python3 ./test/sample-test/sample_test_launcher.py sample_test run_test --namespace kubeflow --test-name sequential --results-gcs-dir output
|
||||
|
||||
# Disabled while https://github.com/kubeflow/pipelines/issues/10885 is not resolved
|
||||
# - name: Basic sample tests - exit_handler
|
||||
# run: pip3 install -r ./test/sample-test/requirements.txt && pip3 install kfp~=2.0 && python3 ./test/sample-test/sample_test_launcher.py sample_test run_test --namespace kubeflow --test-name exit_handler --results-gcs-dir output
|
||||
|
||||
- name: Collect test results
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: kfp-backend-artifacts
|
||||
path: /tmp/tmp.*/*
|
|
@ -0,0 +1,474 @@
|
|||
name: KFP e2e tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [master]
|
||||
|
||||
pull_request:
|
||||
paths:
|
||||
- '.github/workflows/e2e-test.yml'
|
||||
- '.github/resources/**'
|
||||
- 'api/**'
|
||||
- 'go.mod'
|
||||
- 'go.sum'
|
||||
- 'backend/**'
|
||||
- 'frontend/**'
|
||||
- 'proxy/**'
|
||||
- 'manifests/kustomize/**'
|
||||
- 'test/**'
|
||||
- '!**/*.md'
|
||||
- '!**/OWNERS'
|
||||
|
||||
jobs:
|
||||
initialization-tests-v1:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
k8s_version: [ "v1.29.2", "v1.31.0" ]
|
||||
name: Initialization tests v1 - K8s ${{ matrix.k8s_version }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.9
|
||||
|
||||
- name: Create KFP cluster
|
||||
id: create-kfp-cluster
|
||||
uses: ./.github/actions/kfp-cluster
|
||||
with:
|
||||
k8s_version: ${{ matrix.k8s_version }}
|
||||
continue-on-error: true
|
||||
|
||||
- name: Forward API port
|
||||
id: forward-api-port
|
||||
if: ${{ steps.create-kfp-cluster.outcome == 'success' }}
|
||||
run: ./.github/resources/scripts/forward-port.sh "kubeflow" "ml-pipeline" 8888 8888
|
||||
continue-on-error: true
|
||||
|
||||
- name: Initialization tests v1
|
||||
id: tests
|
||||
if: ${{ steps.forward-api-port.outcome == 'success' }}
|
||||
working-directory: ./backend/test/initialization
|
||||
run: go test -v ./... -namespace kubeflow -args -runIntegrationTests=true
|
||||
env:
|
||||
PULL_NUMBER: ${{ github.event.pull_request.number }}
|
||||
continue-on-error: true
|
||||
|
||||
- name: Collect failed logs
|
||||
if: ${{ steps.create-kfp-cluster.outcome != 'success' || steps.forward-api-port.outcome != 'success' || steps.tests.outcome != 'success' }}
|
||||
run: |
|
||||
./.github/resources/scripts/collect-logs.sh --ns kubeflow --output /tmp/tmp_pod_log.txt
|
||||
exit 1
|
||||
|
||||
- name: Collect test results
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: kfp-initialization-tests-v1-artifacts-k8s-${{ matrix.k8s_version }}
|
||||
path: /tmp/tmp*/*
|
||||
|
||||
initialization-tests-v2:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
k8s_version: [ "v1.29.2", "v1.31.0" ]
|
||||
name: Initialization tests v2 - K8s ${{ matrix.k8s_version }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.9
|
||||
|
||||
- name: Create KFP cluster
|
||||
id: create-kfp-cluster
|
||||
uses: ./.github/actions/kfp-cluster
|
||||
with:
|
||||
k8s_version: ${{ matrix.k8s_version }}
|
||||
continue-on-error: true
|
||||
|
||||
- name: Forward API port
|
||||
id: forward-api-port
|
||||
if: ${{ steps.create-kfp-cluster.outcome == 'success' }}
|
||||
run: ./.github/resources/scripts/forward-port.sh "kubeflow" "ml-pipeline" 8888 8888
|
||||
continue-on-error: true
|
||||
|
||||
- name: Initialization tests v2
|
||||
id: tests
|
||||
if: ${{ steps.forward-api-port.outcome == 'success' }}
|
||||
working-directory: ./backend/test/v2/initialization
|
||||
run: go test -v ./... -namespace kubeflow -args -runIntegrationTests=true
|
||||
env:
|
||||
PULL_NUMBER: ${{ github.event.pull_request.number }}
|
||||
continue-on-error: true
|
||||
|
||||
- name: Collect failed logs
|
||||
if: ${{ steps.create-kfp-cluster.outcome != 'success' || steps.forward-api-port.outcome != 'success' || steps.tests.outcome != 'success' }}
|
||||
run: |
|
||||
./.github/resources/scripts/collect-logs.sh --ns kubeflow --output /tmp/tmp_pod_log.txt
|
||||
exit 1
|
||||
|
||||
- name: Collect test results
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: kfp-initialization-tests-v2-artifacts-k8s-${{ matrix.k8s_version }}
|
||||
path: /tmp/tmp*/*
|
||||
|
||||
api-integration-tests-v1:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
k8s_version: [ "v1.29.2", "v1.31.0" ]
|
||||
name: API integration tests v1 - K8s ${{ matrix.k8s_version }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.9
|
||||
|
||||
- name: Create KFP cluster
|
||||
id: create-kfp-cluster
|
||||
uses: ./.github/actions/kfp-cluster
|
||||
with:
|
||||
k8s_version: ${{ matrix.k8s_version }}
|
||||
continue-on-error: true
|
||||
|
||||
- name: Forward API port
|
||||
id: forward-api-port
|
||||
if: ${{ steps.create-kfp-cluster.outcome == 'success' }}
|
||||
run: ./.github/resources/scripts/forward-port.sh "kubeflow" "ml-pipeline" 8888 8888
|
||||
continue-on-error: true
|
||||
|
||||
- name: Forward MySQL port
|
||||
id: forward-mysql-port
|
||||
if: ${{ steps.forward-api-port.outcome == 'success' }}
|
||||
run: ./.github/resources/scripts/forward-port.sh "kubeflow" "mysql" 3306 3306
|
||||
continue-on-error: true
|
||||
|
||||
- name: API integration tests v1
|
||||
id: tests
|
||||
if: ${{ steps.forward-mysql-port.outcome == 'success' }}
|
||||
working-directory: ./backend/test/integration
|
||||
run: go test -v ./... -namespace kubeflow -args -runIntegrationTests=true
|
||||
env:
|
||||
PULL_NUMBER: ${{ github.event.pull_request.number }}
|
||||
continue-on-error: true
|
||||
|
||||
- name: Collect failed logs
|
||||
if: ${{ steps.create-kfp-cluster.outcome != 'success' || steps.forward-api-port.outcome != 'success' || steps.forward-mysql-port.outcome != 'success' || steps.tests.outcome != 'success' }}
|
||||
run: |
|
||||
./.github/resources/scripts/collect-logs.sh --ns kubeflow --output /tmp/tmp_pod_log.txt
|
||||
exit 1
|
||||
|
||||
- name: Collect test results
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: kfp-api-integration-tests-v1-artifacts-k8s-${{ matrix.k8s_version }}
|
||||
path: /tmp/tmp*/*
|
||||
|
||||
api-integration-tests-v2:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
pipeline_store: [ "database", "kubernetes" ]
|
||||
k8s_version: [ "v1.29.2", "v1.31.0" ]
|
||||
name: API integration tests v2 - K8s with ${{ matrix.pipeline_store }} ${{ matrix.k8s_version }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.9
|
||||
|
||||
- name: Create KFP cluster
|
||||
id: create-kfp-cluster
|
||||
uses: ./.github/actions/kfp-cluster
|
||||
with:
|
||||
k8s_version: ${{ matrix.k8s_version }}
|
||||
pipeline_store: ${{ matrix.pipeline_store }}
|
||||
continue-on-error: true
|
||||
|
||||
- name: Forward API port
|
||||
id: forward-api-port
|
||||
if: ${{ steps.create-kfp-cluster.outcome == 'success' }}
|
||||
run: ./.github/resources/scripts/forward-port.sh "kubeflow" "ml-pipeline" 8888 8888
|
||||
continue-on-error: true
|
||||
|
||||
- name: Forward MLMD port
|
||||
id: forward-mlmd-port
|
||||
if: ${{ steps.forward-api-port.outcome == 'success' }}
|
||||
run: kubectl -n kubeflow port-forward svc/metadata-grpc-service 8080:8080 &
|
||||
continue-on-error: true
|
||||
|
||||
- name: API integration tests v2
|
||||
id: tests
|
||||
if: ${{ steps.forward-api-port.outcome == 'success' }}
|
||||
working-directory: ./backend/test/v2/integration
|
||||
run: go test -v ./... -namespace kubeflow -args -runIntegrationTests=true
|
||||
env:
|
||||
PULL_NUMBER: ${{ github.event.pull_request.number }}
|
||||
PIPELINE_STORE: ${{ matrix.pipeline_store }}
|
||||
continue-on-error: true
|
||||
|
||||
- name: Collect failed logs
|
||||
if: ${{ steps.create-kfp-cluster.outcome != 'success' || steps.forward-api-port.outcome != 'success' || steps.forward-mlmd-port.outcome != 'success' || steps.tests.outcome != 'success' }}
|
||||
run: |
|
||||
./.github/resources/scripts/collect-logs.sh --ns kubeflow --output /tmp/tmp_pod_log.txt
|
||||
exit 1
|
||||
|
||||
- name: Collect test results
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: kfp-api-integration-tests-v2-artifacts-k8s-${{ matrix.k8s_version }}-${{ matrix.pipeline_store }}
|
||||
path: /tmp/tmp*/*
|
||||
|
||||
api-integration-tests-v2-with-proxy:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
k8s_version: [ "v1.31.0" ]
|
||||
name: API integration tests v2 with proxy - K8s ${{ matrix.k8s_version }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.9
|
||||
|
||||
- name: Create KFP cluster
|
||||
id: create-kfp-cluster
|
||||
uses: ./.github/actions/kfp-cluster
|
||||
with:
|
||||
k8s_version: ${{ matrix.k8s_version }}
|
||||
proxy: 'true'
|
||||
continue-on-error: true
|
||||
|
||||
- name: Forward API port
|
||||
id: forward-api-port
|
||||
if: ${{ steps.create-kfp-cluster.outcome == 'success' }}
|
||||
run: ./.github/resources/scripts/forward-port.sh "kubeflow" "ml-pipeline" 8888 8888
|
||||
continue-on-error: true
|
||||
|
||||
- name: Forward MLMD port
|
||||
id: forward-mlmd-port
|
||||
if: ${{ steps.forward-api-port.outcome == 'success' }}
|
||||
run: kubectl -n kubeflow port-forward svc/metadata-grpc-service 8080:8080 &
|
||||
continue-on-error: true
|
||||
|
||||
- name: API integration tests v2
|
||||
id: tests
|
||||
if: ${{ steps.forward-mlmd-port.outcome == 'success' }}
|
||||
working-directory: ./backend/test/v2/integration
|
||||
run: go test -v ./... -namespace kubeflow -args -runIntegrationTests=true -useProxy=true
|
||||
env:
|
||||
PULL_NUMBER: ${{ github.event.pull_request.number }}
|
||||
continue-on-error: true
|
||||
|
||||
- name: Collect failed logs
|
||||
if: ${{ steps.create-kfp-cluster.outcome != 'success' || steps.forward-api-port.outcome != 'success' || steps.tests.outcome != 'success' }}
|
||||
run: |
|
||||
./.github/resources/scripts/collect-logs.sh --ns squid --output /tmp/tmp_squid_pod_log.txt
|
||||
./.github/resources/scripts/collect-logs.sh --ns kubeflow --output /tmp/tmp_pod_log.txt
|
||||
exit 1
|
||||
|
||||
- name: Collect test results
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: kfp-api-integration-tests-v2-with-proxy-artifacts-k8s-${{ matrix.k8s_version }}
|
||||
path: /tmp/tmp*/*
|
||||
|
||||
api-integration-tests-v2-with-cache-disabled:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
k8s_version: [ "v1.31.0" ]
|
||||
name: API integration tests v2 with cache disabled - K8s ${{ matrix.k8s_version }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.9
|
||||
|
||||
- name: Create KFP cluster
|
||||
id: create-kfp-cluster
|
||||
uses: ./.github/actions/kfp-cluster
|
||||
with:
|
||||
k8s_version: ${{ matrix.k8s_version }}
|
||||
cache_enabled: 'false'
|
||||
continue-on-error: true
|
||||
|
||||
- name: Forward API port
|
||||
id: forward-api-port
|
||||
if: ${{ steps.create-kfp-cluster.outcome == 'success' }}
|
||||
run: ./.github/resources/scripts/forward-port.sh "kubeflow" "ml-pipeline" 8888 8888
|
||||
continue-on-error: true
|
||||
|
||||
- name: Forward MLMD port
|
||||
id: forward-mlmd-port
|
||||
if: ${{ steps.forward-api-port.outcome == 'success' }}
|
||||
run: kubectl -n kubeflow port-forward svc/metadata-grpc-service 8080:8080 &
|
||||
continue-on-error: true
|
||||
|
||||
- name: API integration tests v2
|
||||
id: tests
|
||||
if: ${{ steps.forward-mlmd-port.outcome == 'success' }}
|
||||
working-directory: ./backend/test/v2/integration
|
||||
run: go test -v ./... -namespace kubeflow -args -runIntegrationTests=true -cacheEnabled=false
|
||||
env:
|
||||
PULL_NUMBER: ${{ github.event.pull_request.number }}
|
||||
continue-on-error: true
|
||||
|
||||
- name: Collect failed logs
|
||||
if: ${{ steps.create-kfp-cluster.outcome != 'success' || steps.forward-api-port.outcome != 'success' || steps.tests.outcome != 'success' }}
|
||||
run: |
|
||||
./.github/resources/scripts/collect-logs.sh --ns kubeflow --output /tmp/tmp_pod_log.txt
|
||||
exit 1
|
||||
|
||||
- name: Collect test results
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: kfp-api-integration-tests-v2-with-cache-disabled-artifacts-k8s-${{ matrix.k8s_version }}
|
||||
path: /tmp/tmp*/*
|
||||
|
||||
frontend-integration-test:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
k8s_version: [ "v1.29.2", "v1.31.0" ]
|
||||
name: Frontend Integration Tests - K8s ${{ matrix.k8s_version }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.9
|
||||
|
||||
- name: Create KFP cluster
|
||||
id: create-kfp-cluster
|
||||
uses: ./.github/actions/kfp-cluster
|
||||
with:
|
||||
k8s_version: ${{ matrix.k8s_version }}
|
||||
continue-on-error: true
|
||||
|
||||
- name: Forward API port
|
||||
id: forward-api-port
|
||||
if: ${{ steps.create-kfp-cluster.outcome == 'success' }}
|
||||
run: ./.github/resources/scripts/forward-port.sh "kubeflow" "ml-pipeline" 8888 8888
|
||||
continue-on-error: true
|
||||
|
||||
- name: Forward Frontend port
|
||||
id: forward-frontend-port
|
||||
if: ${{ steps.forward-api-port.outcome == 'success' }}
|
||||
run: ./.github/resources/scripts/forward-port.sh "kubeflow" "ml-pipeline-ui" 3000 3000
|
||||
continue-on-error: true
|
||||
|
||||
- name: Build frontend integration tests image
|
||||
working-directory: ./test/frontend-integration-test
|
||||
run: docker build . -t kfp-frontend-integration-test:local
|
||||
|
||||
- name: Frontend integration tests
|
||||
id: tests
|
||||
if: ${{ steps.forward-frontend-port.outcome == 'success' }}
|
||||
run: docker run --net=host kfp-frontend-integration-test:local --remote-run true
|
||||
continue-on-error: true
|
||||
|
||||
- name: Collect failed logs
|
||||
if: ${{ steps.create-kfp-cluster.outcome != 'success' || steps.forward-api-port.outcome != 'success' || steps.forward-frontend-port.outcome != 'success' || steps.tests.outcome != 'success' }}
|
||||
run: |
|
||||
./.github/resources/scripts/collect-logs.sh --ns kubeflow --output /tmp/tmp_pod_log.txt
|
||||
exit 1
|
||||
|
||||
- name: Collect test results
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: kfp-frontend-integration-test-artifacts-k8s-${{ matrix.k8s_version }}
|
||||
path: /tmp/tmp*/*
|
||||
|
||||
basic-sample-tests:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
k8s_version: [ "v1.29.2", "v1.31.0" ]
|
||||
name: Basic Sample Tests - K8s ${{ matrix.k8s_version }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.9
|
||||
|
||||
- name: Create KFP cluster
|
||||
id: create-kfp-cluster
|
||||
uses: ./.github/actions/kfp-cluster
|
||||
with:
|
||||
k8s_version: ${{ matrix.k8s_version }}
|
||||
continue-on-error: true
|
||||
|
||||
- name: Forward API port
|
||||
id: forward-api-port
|
||||
if: ${{ steps.create-kfp-cluster.outcome == 'success' }}
|
||||
run: ./.github/resources/scripts/forward-port.sh "kubeflow" "ml-pipeline" 8888 8888
|
||||
continue-on-error: true
|
||||
|
||||
- name: Install protobuf dependencies & kfp-pipeline-spec
|
||||
if: ${{ steps.forward-api-port.outcome == 'success' }}
|
||||
id: install-protobuf-deps
|
||||
uses: ./.github/actions/protobuf
|
||||
|
||||
- name: Install kfp & kfp-kubernetes from source
|
||||
if: ${{ steps.install-protobuf-deps.outcome == 'success' }}
|
||||
id: install-kfp-k8s-deps
|
||||
uses: ./.github/actions/kfp-k8s
|
||||
|
||||
- name: Install prerequisites
|
||||
if: ${{ steps.create-kfp-cluster.outcome == 'success' }}
|
||||
run: pip3 install -r ./test/sample-test/requirements.txt
|
||||
|
||||
- name: Basic sample tests - sequential
|
||||
if: ${{ steps.create-kfp-cluster.outcome == 'success' }}
|
||||
id: sequential-test
|
||||
run: python3 ./test/sample-test/sample_test_launcher.py sample_test run_test --namespace kubeflow --test-name sequential --results-gcs-dir output
|
||||
|
||||
- name: Basic sample tests - exit_handler
|
||||
if: ${{ steps.create-kfp-cluster.outcome == 'success' }}
|
||||
id: sample-test
|
||||
run: python3 ./test/sample-test/sample_test_launcher.py sample_test run_test --namespace kubeflow --test-name exit_handler --expected-result failed --results-gcs-dir output
|
||||
continue-on-error: true
|
||||
|
||||
- name: Collect failed logs
|
||||
if: ${{ steps.create-kfp-cluster.outcome != 'success' || steps.forward-api-port.outcome != 'success' || steps.sequential-test.outcome != 'success' || steps.sample-test.outcome != 'success'}}
|
||||
run: |
|
||||
./.github/resources/scripts/collect-logs.sh --ns kubeflow --output /tmp/tmp_pod_log.txt
|
||||
exit 1
|
||||
|
||||
- name: Collect test results
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: kfp-e2e-tests-artifacts-k8s-${{ matrix.k8s_version }}
|
||||
path: /tmp/tmp*/*
|
|
@ -0,0 +1,38 @@
|
|||
name: Frontend Tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
paths:
|
||||
- 'frontend/**'
|
||||
- '.github/workflows/frontend.yml'
|
||||
- '!**/*.md'
|
||||
- '!**/OWNERS'
|
||||
- 'backend/src/apiserver/config/sample_config.json'
|
||||
|
||||
jobs:
|
||||
frontend-tests:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '22'
|
||||
|
||||
- name: Clean npm cache
|
||||
run: npm cache clean --force
|
||||
|
||||
- name: Install dependencies
|
||||
run: cd ./frontend && npm ci
|
||||
|
||||
- name: Run Frontend Tests
|
||||
run: cd ./frontend && npm run test:ci
|
||||
|
|
@ -0,0 +1,55 @@
|
|||
name: GCPC modules test
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [master]
|
||||
|
||||
pull_request:
|
||||
paths:
|
||||
- '.github/workflows/gcpc-modules-tests.yml'
|
||||
- 'sdk/python/**'
|
||||
- '!**/*.md'
|
||||
- '!**/OWNERS'
|
||||
|
||||
jobs:
|
||||
all-gcpc-tests:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.9
|
||||
|
||||
- name: Install protobuf-compiler
|
||||
run: sudo apt update && sudo apt install -y protobuf-compiler
|
||||
|
||||
- name: Install setuptools
|
||||
run: |
|
||||
pip3 install setuptools
|
||||
pip3 freeze
|
||||
|
||||
- name: Install Wheel
|
||||
run: pip3 install wheel==0.42.0
|
||||
|
||||
- name: Install python sdk
|
||||
run: pip install sdk/python
|
||||
|
||||
- name: Install google-cloud component
|
||||
run: pip install components/google-cloud
|
||||
|
||||
- name: Generate API proto files
|
||||
working-directory: ./api
|
||||
run: make clean python
|
||||
|
||||
- name: Install kfp-pipeline-spec from source
|
||||
run: |
|
||||
python3 -m pip install -I api/v2alpha1/python
|
||||
|
||||
- name: Install Pytest
|
||||
run: pip install $(grep 'pytest==' sdk/python/requirements-dev.txt)
|
||||
|
||||
- name: Run test
|
||||
run: pytest ./test/gcpc-tests/run_all_gcpc_modules.py
|
|
@ -0,0 +1,16 @@
|
|||
name: Build images from sources and push to master.
|
||||
run-name: Build images for master
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
jobs:
|
||||
image-builds:
|
||||
uses: ./.github/workflows/image-builds.yml
|
||||
with:
|
||||
src_branch: master
|
||||
target_tag: master
|
||||
fail_fast: true
|
||||
overwrite_imgs: true
|
||||
set_latest: false
|
||||
add_sha_tag: 'false'
|
|
@ -0,0 +1,198 @@
|
|||
name: Build images from sources.
|
||||
run-name: Build images
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
src_branch:
|
||||
type: string
|
||||
default: 'release-X.Y'
|
||||
description: 'Source branch to build KFP from'
|
||||
required: true
|
||||
target_tag:
|
||||
type: string
|
||||
default: 'X.Y.Z'
|
||||
description: 'Target Image Tag'
|
||||
required: true
|
||||
fail_fast:
|
||||
type: string
|
||||
default: 'true'
|
||||
description: 'Stop running entire Workflow if a single build fails'
|
||||
required: true
|
||||
overwrite_imgs:
|
||||
type: string
|
||||
default: 'true'
|
||||
description: 'Overwrite images in GHCR if they already exist for this tag.'
|
||||
required: true
|
||||
set_latest:
|
||||
type: string
|
||||
default: 'true'
|
||||
description: 'Set latest tag on build images.'
|
||||
required: true
|
||||
add_sha_tag:
|
||||
type: string
|
||||
default: 'true'
|
||||
description: 'Add a sha image tag.'
|
||||
required: false
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
src_branch:
|
||||
type: string
|
||||
default: 'release-X.Y'
|
||||
description: 'Source branch to build KFP from'
|
||||
required: true
|
||||
target_tag:
|
||||
type: string
|
||||
default: 'X.Y.Z'
|
||||
description: 'Target Image Tag'
|
||||
required: true
|
||||
fail_fast:
|
||||
type: string
|
||||
default: 'true'
|
||||
description: 'Stop running entire Workflow if a single build fails'
|
||||
required: true
|
||||
overwrite_imgs:
|
||||
type: string
|
||||
default: 'true'
|
||||
description: 'Overwrite images in GHCR if they already exist for this tag.'
|
||||
required: true
|
||||
set_latest:
|
||||
type: string
|
||||
default: 'true'
|
||||
description: 'Set latest tag on build images.'
|
||||
required: true
|
||||
add_sha_tag:
|
||||
type: string
|
||||
default: 'true'
|
||||
description: 'Add a sha image tag.'
|
||||
required: false
|
||||
env:
|
||||
SOURCE_BRANCH: ${{ inputs.src_branch }}
|
||||
TARGET_IMAGE_TAG: ${{ inputs.target_tag }}
|
||||
OVERWRITE_IMAGES: ${{ inputs.overwrite_imgs }}
|
||||
IMAGE_REGISTRY: ghcr.io
|
||||
IMAGE_ORG: ${{ github.repository_owner }}
|
||||
SET_LATEST: ${{ inputs.set_latest }}
|
||||
ADD_SHA_TAG: ${{ inputs.add_sha_tag }}
|
||||
jobs:
|
||||
build-images-with-tag:
|
||||
continue-on-error: false
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
attestations: write
|
||||
id-token: write
|
||||
strategy:
|
||||
fail-fast: ${{ inputs.fail_fast == 'true' }}
|
||||
matrix:
|
||||
include:
|
||||
- image: kfp-api-server
|
||||
dockerfile: backend/Dockerfile
|
||||
context: .
|
||||
- image: kfp-frontend
|
||||
dockerfile: frontend/Dockerfile
|
||||
context: .
|
||||
- image: kfp-persistence-agent
|
||||
dockerfile: backend/Dockerfile.persistenceagent
|
||||
context: .
|
||||
- image: kfp-scheduled-workflow-controller
|
||||
dockerfile: backend/Dockerfile.scheduledworkflow
|
||||
context: .
|
||||
- image: kfp-viewer-crd-controller
|
||||
dockerfile: backend/Dockerfile.viewercontroller
|
||||
context: .
|
||||
- image: kfp-visualization-server
|
||||
dockerfile: backend/Dockerfile.visualization
|
||||
context: .
|
||||
- image: kfp-launcher
|
||||
dockerfile: backend/Dockerfile.launcher
|
||||
context: .
|
||||
- image: kfp-driver
|
||||
dockerfile: backend/Dockerfile.driver
|
||||
context: .
|
||||
- image: kfp-cache-deployer
|
||||
dockerfile: backend/src/cache/deployer/Dockerfile
|
||||
context: .
|
||||
- image: kfp-cache-server
|
||||
dockerfile: backend/Dockerfile.cacheserver
|
||||
context: .
|
||||
- image: kfp-metadata-writer
|
||||
dockerfile: backend/metadata_writer/Dockerfile
|
||||
context: .
|
||||
- image: kfp-metadata-envoy
|
||||
dockerfile: third_party/metadata_envoy/Dockerfile
|
||||
context: .
|
||||
- image: kfp-inverse-proxy-agent
|
||||
dockerfile: proxy/Dockerfile
|
||||
context: ./proxy
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{env.SOURCE_BRANCH}}
|
||||
- name: Log in to the Container registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ env.IMAGE_REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Check if image tag already exists
|
||||
id: check_tag
|
||||
env:
|
||||
IMAGE: ${{ env.IMAGE_REGISTRY }}/${{ env.IMAGE_ORG }}/${{ matrix.image }}:${{env.TARGET_IMAGE_TAG}}
|
||||
OVERWRITE: ${{ env.OVERWRITE_IMAGES }}
|
||||
run: |
|
||||
if docker manifest inspect ${IMAGE} > /dev/null 2>&1; then
|
||||
echo "Image tag already exists!"
|
||||
if [ "$OVERWRITE" == "false" ]; then
|
||||
echo "Overwrite is set to false, exiting."
|
||||
exit 1
|
||||
else
|
||||
echo "Overwrite is set to true, proceeding with push."
|
||||
fi
|
||||
else
|
||||
echo "No tag conflict, safe to push."
|
||||
fi
|
||||
|
||||
# This step uses docker/metadata-action to extract tags and labels
|
||||
# that will be applied to the specified image. The id "meta" allows
|
||||
# the output of this step to be referenced in a subsequent step.
|
||||
# The images value provides the base name for the tags and labels.
|
||||
- name: Extract metadata (tags, labels) for Build
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
if: steps.check_tag.outcome == 'success'
|
||||
with:
|
||||
images: ${{ env.IMAGE_REGISTRY }}/${{ env.IMAGE_ORG }}/${{ matrix.image }}
|
||||
tags: |
|
||||
type=raw,value=${{env.TARGET_IMAGE_TAG}}
|
||||
type=raw,value=latest,enable=${{ env.SET_LATEST == 'true'}}
|
||||
type=sha,enable=${{ env.ADD_SHA_TAG == 'true' }}
|
||||
|
||||
# Build the image. If the build succeeds, it pushes the image to GitHub
|
||||
# Packages. It uses the context parameter to define the build's context
|
||||
# as the set of files located in the specified path.
|
||||
- name: Build and push Image
|
||||
id: push
|
||||
uses: docker/build-push-action@v6
|
||||
if: steps.check_tag.outcome == 'success'
|
||||
with:
|
||||
context: ${{ matrix.context }}
|
||||
file: ${{ matrix.dockerfile }}
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
|
||||
# This step generates an artifact attestation for the image,
|
||||
# which is an unforgeable statement about where and how it was built.
|
||||
# It increases supply chain security for people who consume the
|
||||
# image.
|
||||
# Ref: https://docs.github.com/en/actions/security-for-github-actions/using-artifact-attestations/using-artifact-attestations-to-establish-provenance-for-builds
|
||||
- name: Generate artifact attestation
|
||||
uses: actions/attest-build-provenance@v1
|
||||
if: steps.check_tag.outcome == 'success'
|
||||
with:
|
||||
subject-name: ${{ env.IMAGE_REGISTRY }}/${{ env.IMAGE_ORG }}/${{ matrix.image }}
|
||||
subject-digest: ${{ steps.push.outputs.digest }}
|
||||
|
|
@ -0,0 +1,97 @@
|
|||
name: k8s execution tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [master]
|
||||
|
||||
pull_request:
|
||||
paths:
|
||||
- '.github/workflows/kfp-kubernetes-execution-tests.yml'
|
||||
- '.github/resources/**'
|
||||
- 'sdk/python/**'
|
||||
- 'api/v2alpha1/**'
|
||||
- 'kubernetes_platform/**'
|
||||
- 'backend/**'
|
||||
- '!**/*.md'
|
||||
- '!**/OWNERS'
|
||||
|
||||
jobs:
|
||||
kfp-kubernetes-execution-tests:
|
||||
runs-on: ubuntu-24.04
|
||||
strategy:
|
||||
matrix:
|
||||
k8s_version: [ "v1.29.2", "v1.31.0" ]
|
||||
name: kfp-kubernetes execution tests - K8s ${{ matrix.k8s_version }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.9'
|
||||
|
||||
# This is intended to address disk space issues that have surfaced
|
||||
# intermittently during CI -
|
||||
# https://github.com/actions/runner-images/issues/2840#issuecomment-1284059930
|
||||
- name: Free up space in /dev/root
|
||||
run: |
|
||||
echo "Disk usage before clean up:"
|
||||
df -h
|
||||
sudo rm -rf /usr/share/dotnet
|
||||
sudo rm -rf "$AGENT_TOOLSDIRECTORY"
|
||||
echo "Disk usage after clean up:"
|
||||
df -h
|
||||
|
||||
- name: Create KFP cluster
|
||||
id: create-kfp-cluster
|
||||
uses: ./.github/actions/kfp-cluster
|
||||
with:
|
||||
k8s_version: ${{ matrix.k8s_version }}
|
||||
continue-on-error: true
|
||||
|
||||
- name: Forward API port
|
||||
if: ${{ steps.create-kfp-cluster.outcome == 'success' }}
|
||||
id: forward-api-port
|
||||
run: ./.github/resources/scripts/forward-port.sh "kubeflow" "ml-pipeline" 8888 8888
|
||||
continue-on-error: true
|
||||
|
||||
- name: Install protobuf dependencies & kfp-pipeline-spec
|
||||
if: ${{ steps.forward-api-port.outcome == 'success' }}
|
||||
id: install-protobuf-deps
|
||||
uses: ./.github/actions/protobuf
|
||||
|
||||
- name: Install kfp & kfp-kubernetes from source
|
||||
if: ${{ steps.install-protobuf-deps.outcome == 'success' }}
|
||||
id: install-kfp-k8s-deps
|
||||
uses: ./.github/actions/kfp-k8s
|
||||
|
||||
- name: Install requirements
|
||||
id: install-requirements
|
||||
if: ${{ steps.install-kfp-kubernetes.outcome == 'success' }}
|
||||
run: pip install -r ./test/kfp-kubernetes-execution-tests/requirements.txt
|
||||
|
||||
- name: Run tests
|
||||
id: test
|
||||
if: ${{ steps.install-kfp-k8s-deps.outcome == 'success' }}
|
||||
env:
|
||||
PULL_NUMBER: ${{ github.event.pull_request.number }}
|
||||
REPO_NAME: ${{ github.repository }}
|
||||
run: |
|
||||
export KFP_ENDPOINT="http://localhost:8888"
|
||||
export TIMEOUT_SECONDS=2700
|
||||
pytest ./test/kfp-kubernetes-execution-tests/sdk_execution_tests.py --asyncio-task-timeout $TIMEOUT_SECONDS
|
||||
continue-on-error: true
|
||||
|
||||
- name: Collect failed logs
|
||||
if: ${{ steps.create-kfp-cluster.outcome != 'success' || steps.forward-api-port.outcome != 'success' || steps.test.outcome != 'success'}}
|
||||
run: |
|
||||
./.github/resources/scripts/collect-logs.sh --ns kubeflow --output /tmp/tmp_pod_log.txt
|
||||
exit 1
|
||||
|
||||
- name: Collect test results
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: kfp-execution-tests-artifacts-k8s-${{ matrix.k8s_version }}
|
||||
path: /tmp/tmp*/*
|
|
@ -0,0 +1,42 @@
|
|||
name: kfp-kubernetes library tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [master]
|
||||
pull_request:
|
||||
paths:
|
||||
- '.github/workflows/kfp-kubernetes-library-test.yml'
|
||||
- 'sdk/python/**'
|
||||
- 'api/v2alpha1/**'
|
||||
- 'kubernetes_platform/**'
|
||||
- '!**/*.md'
|
||||
- '!**/OWNERS'
|
||||
|
||||
jobs:
|
||||
kfp-kubernetes-library-test:
|
||||
runs-on: ubuntu-24.04
|
||||
strategy:
|
||||
matrix:
|
||||
python: [
|
||||
{ 'version': '3.9' },
|
||||
{ 'version': '3.13' }
|
||||
]
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{matrix.python.version}}
|
||||
|
||||
- name: Install protobuf dependencies & kfp-pipeline-spec
|
||||
id: install-protobuf-deps
|
||||
uses: ./.github/actions/protobuf
|
||||
|
||||
- name: Install kfp & kfp-kubernetes from source
|
||||
id: install-kfp-k8s-deps
|
||||
uses: ./.github/actions/kfp-k8s
|
||||
|
||||
- name: Run tests
|
||||
run: pytest ./kubernetes_platform/python/test
|
|
@ -0,0 +1,105 @@
|
|||
name: KFP Samples
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [master]
|
||||
pull_request:
|
||||
paths:
|
||||
- '.github/resources/**'
|
||||
- '.github/workflows/kfp-samples.yml'
|
||||
- '.github/workflows/kubeflow-pipelines-integration-v2.yml'
|
||||
- 'api/**'
|
||||
- 'backend/**'
|
||||
- 'samples/**'
|
||||
- 'samples/core/dataflow/**'
|
||||
- 'samples/core/parameterized_tfx_oss/**'
|
||||
- '!**/*.md'
|
||||
- '!**/OWNERS'
|
||||
|
||||
jobs:
|
||||
samples:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
k8s_version: [ "v1.29.2", "v1.31.0" ]
|
||||
name: KFP Samples - K8s ${{ matrix.k8s_version }}
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.9
|
||||
|
||||
- name: Free up space in /dev/root
|
||||
run: |
|
||||
echo "Disk usage before clean up:"
|
||||
df -h
|
||||
sudo rm -rf /usr/share/dotnet
|
||||
sudo rm -rf "$AGENT_TOOLSDIRECTORY"
|
||||
sudo rm -rf /usr/share/dotnet
|
||||
sudo rm -rf /opt/ghc
|
||||
sudo rm -rf /usr/local/share/boost
|
||||
sudo rm -rf /usr/local/lib/android
|
||||
sudo rm -rf /usr/local/.ghcup
|
||||
sudo rm -rf /usr/share/swift
|
||||
sudo rm -rf /opt/hostedtoolcache/CodeQL || true
|
||||
sudo rm -rf /opt/hostedtoolcache/Java_* || true
|
||||
sudo rm -rf /opt/hostedtoolcache/Ruby || true
|
||||
sudo rm -rf /opt/hostedtoolcache/PyPy || true
|
||||
sudo rm -rf /opt/hostedtoolcache/boost || true
|
||||
docker system prune -af --volumes
|
||||
docker image prune -af
|
||||
echo "Disk usage after clean up:"
|
||||
df -h
|
||||
|
||||
- name: Create KFP cluster
|
||||
id: create-kfp-cluster
|
||||
uses: ./.github/actions/kfp-cluster
|
||||
with:
|
||||
k8s_version: ${{ matrix.k8s_version }}
|
||||
continue-on-error: true
|
||||
|
||||
- name: Build and upload the sample Modelcar image to Kind
|
||||
id: build-sample-modelcar-image
|
||||
if: ${{ steps.create-kfp-cluster.outcome == 'success' }}
|
||||
run: |
|
||||
docker build -f samples/v2/modelcar/Dockerfile -t registry.domain.local/modelcar:test .
|
||||
kind --name kfp load docker-image registry.domain.local/modelcar:test
|
||||
continue-on-error: true
|
||||
|
||||
- name: Forward API port
|
||||
id: forward-api-port
|
||||
if: ${{ steps.build-sample-modelcar-image.outcome == 'success' }}
|
||||
run: ./.github/resources/scripts/forward-port.sh "kubeflow" "ml-pipeline" 8888 8888
|
||||
continue-on-error: true
|
||||
|
||||
- name: Install protobuf dependencies & kfp-pipeline-spec
|
||||
id: install-protobuf-deps
|
||||
uses: ./.github/actions/protobuf
|
||||
|
||||
- name: Install kfp & kfp-kubernetes from source
|
||||
id: install-kfp-k8s-deps
|
||||
uses: ./.github/actions/kfp-k8s
|
||||
|
||||
- name: Run Samples Tests
|
||||
id: tests
|
||||
if: ${{ steps.forward-api-port.outcome == 'success' }}
|
||||
run: |
|
||||
python3 -u ./samples/v2/sample_test.py
|
||||
continue-on-error: true
|
||||
|
||||
- name: Collect failed logs
|
||||
if: ${{ steps.create-kfp-cluster.outcome != 'success' || steps.forward-api-port.outcome != 'success' || steps.tests.outcome != 'success' }}
|
||||
run: |
|
||||
./.github/resources/scripts/collect-logs.sh --ns kubeflow --output /tmp/tmp_pod_log.txt
|
||||
exit 1
|
||||
|
||||
- name: Collect test results
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: kfp-samples-tests-artifacts-k8s-${{ matrix.k8s_version }}
|
||||
path: /tmp/tmp*/*
|
|
@ -0,0 +1,34 @@
|
|||
name: KFP Runtime Code Tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [master]
|
||||
|
||||
pull_request:
|
||||
paths:
|
||||
- '.github/workflows/kfp-sdk-runtime-tests.yml'
|
||||
- 'sdk/python/**'
|
||||
- 'test/presubmit-test-kfp-runtime-code.sh'
|
||||
- '!**/*.md'
|
||||
- '!**/OWNERS'
|
||||
|
||||
jobs:
|
||||
kfp-runtime-tests:
|
||||
runs-on: ubuntu-24.04
|
||||
strategy:
|
||||
matrix:
|
||||
python: ['3.9', '3.13']
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ matrix.python }}
|
||||
|
||||
- name: Run KFP Runtime Code Tests
|
||||
run: |
|
||||
export PULL_NUMBER="${{ github.event.inputs.pull_number || github.event.pull_request.number }}"
|
||||
export REPO_NAME="${{ github.repository }}"
|
||||
./test/presubmit-test-kfp-runtime-code.sh
|
|
@ -0,0 +1,49 @@
|
|||
name: KFP SDK Tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [master]
|
||||
pull_request:
|
||||
paths:
|
||||
- 'api/**'
|
||||
- 'sdk/**'
|
||||
- 'test/presubmit-tests-sdk.sh'
|
||||
- '.github/workflows/kfp-sdk-tests.yml'
|
||||
- '!**/*.md'
|
||||
- '!**/OWNERS'
|
||||
|
||||
jobs:
|
||||
sdk-tests:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ['3.9', '3.13']
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
|
||||
- name: Install protobuf dependencies & kfp-pipeline-spec
|
||||
id: install-protobuf-deps
|
||||
uses: ./.github/actions/protobuf
|
||||
|
||||
- name: Install kfp & kfp-kubernetes from source
|
||||
id: install-kfp-k8s-deps
|
||||
uses: ./.github/actions/kfp-k8s
|
||||
|
||||
- name: Install Test dependencies
|
||||
run: |
|
||||
pip install coveralls==3.3.1
|
||||
pip install -r sdk/python/requirements-dev.txt
|
||||
|
||||
- name: Run SDK Tests
|
||||
env:
|
||||
# We setup the env in the CI
|
||||
SETUP_ENV: false
|
||||
run: |
|
||||
./test/presubmit-tests-sdk.sh
|
|
@ -0,0 +1,53 @@
|
|||
name: KFP Webhook Integration
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
pull_request:
|
||||
paths:
|
||||
- '.github/resources/**'
|
||||
- '.github/workflows/kfp-webhooks.yml'
|
||||
- 'backend/**'
|
||||
- '!**/*.md'
|
||||
- '!**/OWNERS'
|
||||
|
||||
jobs:
|
||||
webhook-tests:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
k8s_version: [ "v1.29.2", "v1.31.0" ]
|
||||
name: KFP Webhooks - K8s ${{ matrix.k8s_version }}
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Create KFP cluster
|
||||
id: create-kfp-cluster
|
||||
uses: ./.github/actions/kfp-cluster
|
||||
with:
|
||||
k8s_version: ${{ matrix.k8s_version }}
|
||||
pipeline_store: kubernetes
|
||||
continue-on-error: true
|
||||
|
||||
- name: Run Webhook Integration Tests
|
||||
id: tests
|
||||
if: ${{ steps.create-kfp-cluster.outcome == 'success' }}
|
||||
run: |
|
||||
make -C backend/test/integration test-webhook
|
||||
continue-on-error: true
|
||||
|
||||
- name: Collect failed logs
|
||||
if: ${{ steps.create-kfp-cluster.outcome != 'success' || steps.tests.outcome != 'success' }}
|
||||
run: |
|
||||
./.github/resources/scripts/collect-logs.sh --ns kubeflow --output /tmp/tmp_pod_log.txt
|
||||
exit 1
|
||||
|
||||
- name: Collect test results
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: kfp-samples-tests-artifacts-k8s-${{ matrix.k8s_version }}
|
||||
path: /tmp/tmp*/*
|
|
@ -0,0 +1,21 @@
|
|||
name: KFP Manifests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [master]
|
||||
pull_request:
|
||||
paths:
|
||||
- '.github/workflows/kubeflow-pipelines-manifests.yml'
|
||||
- 'manifests/kustomize/**'
|
||||
- '!**/*.md'
|
||||
- '!**/OWNERS'
|
||||
|
||||
jobs:
|
||||
kubeflow-pipelines-manifests:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Run tests
|
||||
run: ./manifests/kustomize/hack/presubmit.sh
|
|
@ -7,23 +7,42 @@ on:
|
|||
jobs:
|
||||
run_tests:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
k8s_version: [ "v1.29.2", "v1.31.0" ]
|
||||
name: Periodic Functional Tests - K8s ${{ matrix.k8s_version }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
- name: Create k8s Kind Cluster
|
||||
uses: container-tools/kind-action@v2
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
cluster_name: kfp-tekton
|
||||
kubectl_version: v1.29.2
|
||||
version: v0.22.0
|
||||
node_image: kindest/node:v1.29.2
|
||||
python-version: 3.9
|
||||
- name: Create KFP cluster
|
||||
uses: ./.github/actions/kfp-cluster
|
||||
with:
|
||||
k8s_version: ${{ matrix.k8s_version }}
|
||||
|
||||
- name: Port forward kfp apiserver
|
||||
run: |
|
||||
nohup kubectl port-forward --namespace kubeflow svc/ml-pipeline 8888:8888 &
|
||||
|
||||
- name: Run Functional Tests
|
||||
id: tests
|
||||
run: |
|
||||
log_dir=$(mktemp -d)
|
||||
./test/kfp-functional-test/kfp-functional-test.sh > $log_dir/periodic_tests.txt
|
||||
continue-on-error: true
|
||||
|
||||
- name: Collect failed logs
|
||||
if: steps.tests.outcome != 'success'
|
||||
run: |
|
||||
./.github/resources/scripts/collect-logs.sh --ns kubeflow --output /tmp/tmp_pod_log.txt
|
||||
exit 1
|
||||
|
||||
- name: Collect test results
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: periodic-functional-artifacts
|
||||
path: /tmp/tmp.*/*
|
||||
name: periodic-functional-artifacts-k8s-${{ matrix.k8s_version }}
|
||||
path: /tmp/tmp*/*
|
||||
|
|
|
@ -0,0 +1,101 @@
|
|||
name: PR Commands
|
||||
on:
|
||||
issue_comment:
|
||||
types:
|
||||
- created
|
||||
env:
|
||||
DEFAULT_BRANCH: master
|
||||
jobs:
|
||||
process-command:
|
||||
runs-on: ubuntu-latest
|
||||
# Fail early if the command is not recognized
|
||||
if: contains(github.event.comment.body, '/ok-to-test')
|
||||
outputs:
|
||||
PR_SHA: ${{ steps.fetch-pr-sha.outputs.PR_SHA }}
|
||||
steps:
|
||||
- name: Checkout Main Branch
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ env.DEFAULT_BRANCH }}
|
||||
- name: Check if the author is a member or Owner
|
||||
id: check-condition
|
||||
run: |
|
||||
if [[ "${{ github.event.comment.author_association }}" == "MEMBER" || "${{ github.event.comment.author_association }}" == "OWNER" ]]; then
|
||||
echo "condition_met=true" >> $GITHUB_ENV
|
||||
else
|
||||
echo "User does not have permission to trigger this command."
|
||||
echo "condition_met=false" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
- name: Leave a Comment on Precondition Fail
|
||||
if: env.condition_met == 'false'
|
||||
env:
|
||||
message: 🚫 This command cannot be processed. Only organization members or owners can use the commands.
|
||||
run: |
|
||||
echo "${{ secrets.GITHUB_TOKEN }}" | gh auth login --with-token
|
||||
gh issue comment ${{ github.event.issue.number }} --repo "${{ github.repository }}" --body "${{ env.message }}"
|
||||
echo ${message}
|
||||
exit 1
|
||||
|
||||
- name: Check if comment is on a pull request
|
||||
id: check-pr
|
||||
run: |
|
||||
if [[ -z "${{ github.event.issue.pull_request }}" ]]; then
|
||||
echo "Comment is not on a pull request."
|
||||
exit 1
|
||||
fi
|
||||
echo "PR_URL=${{ github.event.issue.pull_request.url }}" >> $GITHUB_ENV
|
||||
|
||||
- name: Fetch pull request sha
|
||||
id: fetch-pr-sha
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
PR_URL="${PR_URL}"
|
||||
PR_DATA=$(curl -s -H "Authorization: Bearer $GITHUB_TOKEN" -H "Accept: application/vnd.github.v3+json" "$PR_URL")
|
||||
PR_SHA=$(echo "$PR_DATA" | jq -r '.head.sha')
|
||||
echo "PR_SHA=$PR_SHA" >> $GITHUB_OUTPUT
|
||||
|
||||
# Add other commands as separate jobs
|
||||
approve:
|
||||
runs-on: ubuntu-latest
|
||||
needs: process-command
|
||||
if: contains(github.event.comment.body, '/ok-to-test')
|
||||
steps:
|
||||
- name: Checkout Main Branch
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ env.DEFAULT_BRANCH }}
|
||||
- name: Approve Runs
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
PR_SHA: ${{ needs.process-command.outputs.PR_SHA }}
|
||||
run: |
|
||||
runs=$(curl -s -H "Authorization: Bearer $GITHUB_TOKEN" \
|
||||
-H "Accept: application/vnd.github.v3+json" \
|
||||
"https://api.github.com/repos/${{ github.repository }}/actions/runs?head_sha=${{ env.PR_SHA }}" | \
|
||||
jq -r '.workflow_runs[] | select(.conclusion == "action_required") | .id')
|
||||
|
||||
if [[ -z "$runs" ]]; then
|
||||
echo "No workflow runs found for the given head SHA."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Found workflow runs requiring approval: $runs"
|
||||
# Approve each workflow run
|
||||
for run_id in $runs; do
|
||||
curl -X POST -H "Authorization: Bearer $GITHUB_TOKEN" \
|
||||
-H "Accept: application/vnd.github.v3+json" \
|
||||
"https://api.github.com/repos/${{ github.repository }}/actions/runs/$run_id/approve"
|
||||
echo "Approved workflow run: $run_id"
|
||||
done
|
||||
msg="Approvals successfully granted for pending runs."
|
||||
echo "output_msg=${msg}" >> $GITHUB_ENV
|
||||
|
||||
- name: Leave a Comment
|
||||
env:
|
||||
message: ${{ env.output_msg }}
|
||||
run: |
|
||||
echo "${{ secrets.GITHUB_TOKEN }}" | gh auth login --with-token
|
||||
gh issue comment ${{ github.event.issue.number }} --repo "${{ github.repository }}" --body "${{ env.message }}"
|
||||
|
|
@ -0,0 +1,30 @@
|
|||
name: pre-commit
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
|
||||
jobs:
|
||||
pre-commit:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v3
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version-file: go.mod
|
||||
- name: golangci-lint
|
||||
uses: golangci/golangci-lint-action@v8
|
||||
with:
|
||||
version: v2.3
|
||||
args: --new-from-rev HEAD
|
||||
#- uses: pre-commit/action@v3.0.1
|
||||
# # This is set to only run the golangci-lint pre-commit hooks
|
||||
# # Remove in a later PR to run all hooks
|
||||
# with:
|
||||
# go-version: '>=1.24.2'
|
||||
# extra_args: golangci-lint --all-files
|
|
@ -10,6 +10,8 @@ on:
|
|||
paths:
|
||||
- 'backend/**'
|
||||
- 'test/presubmit-backend-test.sh'
|
||||
- '!**/*.md'
|
||||
- '!**/OWNERS'
|
||||
|
||||
jobs:
|
||||
backend-tests:
|
||||
|
@ -20,9 +22,12 @@ jobs:
|
|||
uses: actions/checkout@v2
|
||||
|
||||
- name: Set up Go
|
||||
uses: actions/setup-go@v3
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: '1.21'
|
||||
go-version-file: go.mod
|
||||
|
||||
- name: Run Backend Tests
|
||||
run: ./test/presubmit-backend-test.sh
|
||||
run: |
|
||||
export GIT_BRANCH=${{ github.head_ref || github.ref_name }}
|
||||
export GIT_REPO=${{ github.event.pull_request.head.repo.full_name }}
|
||||
./test/presubmit-backend-test.sh
|
||||
|
|
|
@ -0,0 +1,44 @@
|
|||
name: KFP Readthedocs Release Readiness
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ master ]
|
||||
pull_request:
|
||||
paths:
|
||||
- api/**
|
||||
- sdk/**
|
||||
- kubernetes_platform/**
|
||||
- .github/workflows/readthedocs-builds.yml
|
||||
- .readthedocs.yml
|
||||
|
||||
jobs:
|
||||
test-readthedocs-builds:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
pip install -r docs/sdk/requirements.txt
|
||||
|
||||
- name: Build KFP SDK Docs
|
||||
working-directory: docs/sdk
|
||||
run: |
|
||||
sphinx-build -b html . _build/html
|
||||
|
||||
- name: Build KFP Kubernetes SDK Docs
|
||||
working-directory: kubernetes_platform/python/docs
|
||||
run: |
|
||||
sphinx-build -b html . _build/html
|
||||
|
||||
- name: Test K8s platform release script
|
||||
working-directory: kubernetes_platform/python
|
||||
run: |
|
||||
KFP_KUBERNETES_VERSION=$(python -c 'from kfp.kubernetes.__init__ import __version__; print(__version__)')
|
||||
source create_release_branch.sh
|
|
@ -0,0 +1,36 @@
|
|||
name: KFP Component YAML Test
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [master]
|
||||
|
||||
pull_request:
|
||||
paths:
|
||||
- 'components/**/*.yaml'
|
||||
- 'test/presubmit-component-yaml.sh'
|
||||
- 'sdk/python/**'
|
||||
- 'api/v2alpha1/**'
|
||||
- '.github/workflows/sdk-component-yaml.yml'
|
||||
- '!**/*.md'
|
||||
- '!**/OWNERS'
|
||||
|
||||
jobs:
|
||||
test-component-yaml-kfp:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.9
|
||||
|
||||
- name: Install protobuf dependencies
|
||||
uses: ./.github/actions/protobuf
|
||||
|
||||
- name: Install requirements
|
||||
run: pip install -r ./test/sdk-execution-tests/requirements.txt
|
||||
|
||||
- name: Run component YAML tests
|
||||
run: ./test/presubmit-component-yaml.sh
|
|
@ -0,0 +1,28 @@
|
|||
name: KFP SDK Docformatter Test
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [master]
|
||||
|
||||
pull_request:
|
||||
paths:
|
||||
- 'sdk/python/**'
|
||||
- 'test/presubmit-docformatter-sdk.sh'
|
||||
- '.github/workflows/sdk-docformatter.yml'
|
||||
- '!**/*.md'
|
||||
- '!**/OWNERS'
|
||||
|
||||
jobs:
|
||||
test-docformatter-kfp-sdk:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.9
|
||||
|
||||
- name: Run docformatter tests
|
||||
run: ./test/presubmit-docformatter-sdk.sh
|
|
@ -0,0 +1,103 @@
|
|||
name: KFP SDK execution tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [master]
|
||||
|
||||
pull_request:
|
||||
paths:
|
||||
- '.github/workflows/sdk-execution.yml'
|
||||
- '.github/resources/**'
|
||||
- 'sdk/python/**'
|
||||
- 'api/v2alpha1/**'
|
||||
- 'backend/**'
|
||||
- '!**/*.md'
|
||||
- '!**/OWNERS'
|
||||
|
||||
jobs:
|
||||
sdk-execution-tests:
|
||||
runs-on: ubuntu-24.04
|
||||
strategy:
|
||||
matrix:
|
||||
k8s_version: [ "v1.29.2", "v1.31.0" ]
|
||||
name: KFP SDK Execution Tests - K8s ${{ matrix.k8s_version }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# This is intended to address disk space issues that have surfaced
|
||||
# intermittently during CI -
|
||||
# https://github.com/actions/runner-images/issues/2840#issuecomment-1284059930
|
||||
- name: Free up space in /dev/root
|
||||
run: |
|
||||
echo "Disk usage before clean up:"
|
||||
df -h
|
||||
sudo rm -rf /usr/share/dotnet
|
||||
sudo rm -rf "$AGENT_TOOLSDIRECTORY"
|
||||
echo "Disk usage after clean up:"
|
||||
df -h
|
||||
|
||||
# This must occur after "Free up space" step
|
||||
# otherwise python version will be overridden
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.11'
|
||||
|
||||
- name: Create KFP cluster
|
||||
id: create-kfp-cluster
|
||||
uses: ./.github/actions/kfp-cluster
|
||||
with:
|
||||
k8s_version: ${{ matrix.k8s_version }}
|
||||
continue-on-error: true
|
||||
|
||||
- name: Forward API port
|
||||
id: forward-api-port
|
||||
if: ${{ steps.create-kfp-cluster.outcome == 'success' }}
|
||||
run: ./.github/resources/scripts/forward-port.sh "kubeflow" "ml-pipeline" 8888 8888
|
||||
continue-on-error: true
|
||||
|
||||
- name: Forward MLMD port
|
||||
id: forward-mlmd-port
|
||||
if: ${{ steps.create-kfp-cluster.outcome == 'success' }}
|
||||
run: kubectl -n kubeflow port-forward svc/metadata-grpc-service 8080:8080 &
|
||||
continue-on-error: true
|
||||
|
||||
- name: Forward Minio port
|
||||
id: forward-minio-port
|
||||
if: ${{ steps.forward-mlmd-port.outcome == 'success' }}
|
||||
run: kubectl -n kubeflow port-forward service/minio-service 9000:9000 &
|
||||
continue-on-error: true
|
||||
|
||||
- name: Install protobuf dependencies & kfp-pipeline-spec
|
||||
if: ${{ steps.forward-api-port.outcome == 'success' }}
|
||||
id: install-protobuf-deps
|
||||
uses: ./.github/actions/protobuf
|
||||
|
||||
- name: Install requirements
|
||||
id: install-requirements
|
||||
run: pip install -r ./test/sdk-execution-tests/requirements.txt
|
||||
|
||||
- name: Run tests
|
||||
id: tests
|
||||
env:
|
||||
PULL_NUMBER: ${{ github.event.pull_request.number }}
|
||||
REPO_NAME: ${{ github.repository }}
|
||||
run: |
|
||||
export KFP_ENDPOINT="http://localhost:8888"
|
||||
export TIMEOUT_SECONDS=2700
|
||||
pytest -v -n 5 ./test/sdk-execution-tests/sdk_execution_tests.py
|
||||
continue-on-error: true
|
||||
|
||||
- name: Collect failed logs
|
||||
if: ${{ steps.create-kfp-cluster.outcome != 'success' || steps.forward-api-port.outcome != 'success' || steps.forward-mlmd-port.outcome != 'success' || steps.forward-minio-port.outcome != 'success' || steps.tests.outcome != 'success'}}
|
||||
run: |
|
||||
./.github/resources/scripts/collect-logs.sh --ns kubeflow --output /tmp/tmp_pod_log.txt
|
||||
exit 1
|
||||
|
||||
- name: Collect test results
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: periodic-functional-artifacts-k8s-${{ matrix.k8s_version }}
|
||||
path: /tmp/tmp*/*
|
|
@ -0,0 +1,29 @@
|
|||
name: KFP SDK Isort Test
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [master]
|
||||
|
||||
pull_request:
|
||||
paths:
|
||||
- 'api/**'
|
||||
- 'sdk/python/**'
|
||||
- 'test/presubmit-isort-sdk.sh'
|
||||
- '.github/workflows/sdk-isort.yml'
|
||||
- '!**/*.md'
|
||||
- '!**/OWNERS'
|
||||
|
||||
jobs:
|
||||
test-isort-kfp-sdk:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.9
|
||||
|
||||
- name: Run isort tests
|
||||
run: ./test/presubmit-isort-sdk.sh
|
|
@ -0,0 +1,28 @@
|
|||
name: KFP SDK Upgrade Test
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [master]
|
||||
|
||||
pull_request:
|
||||
paths:
|
||||
- 'sdk/python/**'
|
||||
- 'test/presubmit-test-sdk-upgrade.sh'
|
||||
- '.github/workflows/sdk-upgrade.yml'
|
||||
- '!**/*.md'
|
||||
- '!**/OWNERS'
|
||||
|
||||
jobs:
|
||||
test-upgrade-kfp-sdk:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.9
|
||||
|
||||
- name: Run SDK upgrade tests
|
||||
run: ./test/presubmit-test-sdk-upgrade.sh
|
|
@ -0,0 +1,33 @@
|
|||
name: KFP SDK YAPF Tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [master]
|
||||
|
||||
pull_request:
|
||||
paths:
|
||||
- 'api/**'
|
||||
- 'sdk/python/**'
|
||||
- 'test/presubmit-yapf-sdk.sh'
|
||||
- '.github/workflows/sdk-yapf.yml'
|
||||
- '!**/*.md'
|
||||
- '!**/OWNERS'
|
||||
|
||||
jobs:
|
||||
yapf-sdk:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.9'
|
||||
|
||||
- name: Install dependencies
|
||||
run: pip install yapf
|
||||
|
||||
- name: Run YAPF SDK Tests
|
||||
run: ./test/presubmit-yapf-sdk.sh
|
|
@ -21,8 +21,8 @@ jobs:
|
|||
- uses: actions/stale@v5
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
days-before-stale: 60
|
||||
days-before-close: 21
|
||||
days-before-stale: 365
|
||||
days-before-close: 183 # half a year
|
||||
stale-issue-message: >
|
||||
This issue has been automatically marked as stale because it has not had
|
||||
recent activity. It will be closed if no further activity occurs. Thank you
|
||||
|
|
|
@ -0,0 +1,85 @@
|
|||
name: KFP upgrade tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [master]
|
||||
|
||||
pull_request:
|
||||
paths:
|
||||
- '.github/workflows/upgrade-test.yml'
|
||||
- '.github/resources/**'
|
||||
- 'backend/**'
|
||||
- 'manifests/kustomize/**'
|
||||
- '!**/*.md'
|
||||
- '!**/OWNERS'
|
||||
|
||||
jobs:
|
||||
upgrade-test:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
k8s_version: [ "v1.29.2", "v1.31.0" ]
|
||||
name: KFP upgrade tests - K8s ${{ matrix.k8s_version }}
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.9
|
||||
|
||||
- name: Create KFP cluster
|
||||
id: create-kfp-cluster
|
||||
uses: ./.github/actions/kfp-cluster
|
||||
with:
|
||||
k8s_version: ${{ matrix.k8s_version }}
|
||||
continue-on-error: true
|
||||
|
||||
- name: Forward API port
|
||||
id: forward-api-port
|
||||
if: ${{ steps.create-kfp-cluster.outcome == 'success' }}
|
||||
run: ./.github/resources/scripts/forward-port.sh "kubeflow" "ml-pipeline" 8888 8888
|
||||
continue-on-error: true
|
||||
|
||||
- name: Prepare upgrade tests
|
||||
id: upgrade-tests
|
||||
if: ${{ steps.forward-api-port.outcome == 'success' }}
|
||||
working-directory: backend/test/integration
|
||||
run: go test -v ./... -namespace kubeflow -args -runUpgradeTests=true -testify.m=Prepare
|
||||
continue-on-error: true
|
||||
|
||||
- name: Prepare verification tests
|
||||
id: verification-tests
|
||||
if: ${{ steps.forward-api-port.outcome == 'success' }}
|
||||
working-directory: backend/test/integration
|
||||
run: go test -v ./... -namespace kubeflow -args -runUpgradeTests=true -testify.m=Verify
|
||||
continue-on-error: true
|
||||
|
||||
- name: Prepare upgrade tests v2
|
||||
id: upgrade-tests-v2
|
||||
if: ${{ steps.forward-api-port.outcome == 'success' }}
|
||||
working-directory: backend/test/v2/integration/
|
||||
run: go test -v ./... -namespace kubeflow -args -runUpgradeTests=true -testify.m=Prepare
|
||||
continue-on-error: true
|
||||
|
||||
- name: Prepare verification tests v2
|
||||
id: verification-tests-v2
|
||||
if: ${{ steps.forward-api-port.outcome == 'success' }}
|
||||
working-directory: backend/test/v2/integration
|
||||
run: go test -v ./... -namespace kubeflow -args -runUpgradeTests=true -testify.m=Verify
|
||||
continue-on-error: true
|
||||
|
||||
- name: Collect failed logs
|
||||
if: ${{ steps.create-kfp-cluster.outcome != 'success' || steps.forward-api-port.outcome != 'success' || steps.upgrade-tests.outcome != 'success' || steps.upgrade-tests-v2.outcome != 'success' || steps.verification-tests.outcome != 'success' || steps.verification-tests-v2.outcome != 'success' }}
|
||||
run: |
|
||||
./.github/resources/scripts/collect-logs.sh --ns kubeflow --output /tmp/tmp_pod_log.txt
|
||||
exit 1
|
||||
|
||||
- name: Collect test results
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: periodic-functional-artifacts-k8s-${{ matrix.k8s_version }}
|
||||
path: /tmp/tmp*/*
|
|
@ -0,0 +1,99 @@
|
|||
name: Validate Generated Files
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
pull_request:
|
||||
paths:
|
||||
- '.github/workflows/validate-generated-files.yml'
|
||||
- 'backend/api/**/*.proto'
|
||||
- 'backend/api/**/go_http_client/**'
|
||||
- 'backend/api/**/go_client/**'
|
||||
- 'backend/api/**/python_http_client/**'
|
||||
- 'backend/api/**/swagger/**'
|
||||
- 'api/**/*.proto'
|
||||
- 'api/**/*.go'
|
||||
- 'kubernetes_platform/**/*.proto'
|
||||
- 'kubernetes_platform/**/*.go'
|
||||
- 'backend/src/crd/kubernetes/**/*.go'
|
||||
- 'manifests/kustomize/base/crds/*.yaml'
|
||||
- '!**/*.md'
|
||||
- '!**/OWNERS'
|
||||
|
||||
jobs:
|
||||
validate-generated-files:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install Go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version-file: go.mod
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.9
|
||||
|
||||
- name: Install protobuf dependencies & kfp-pipeline-spec
|
||||
id: install-protobuf-deps
|
||||
uses: ./.github/actions/protobuf
|
||||
with:
|
||||
generate_golang_proto: "true"
|
||||
|
||||
- name: Install kfp & kfp-kubernetes from source
|
||||
id: install-kfp-k8s-deps
|
||||
uses: ./.github/actions/kfp-k8s
|
||||
with:
|
||||
generate_golang_proto: "true"
|
||||
|
||||
- name: Generate K8s Native API CRDs
|
||||
working-directory: ./backend/src/crd/kubernetes
|
||||
run: make generate manifests
|
||||
|
||||
- name: Generate backend proto code v2beta1
|
||||
working-directory: ./backend/api
|
||||
env:
|
||||
API_VERSION: v2beta1
|
||||
run: make generate
|
||||
|
||||
- name: Generate backend proto code v1beta1
|
||||
working-directory: ./backend/api
|
||||
env:
|
||||
API_VERSION: v1beta1
|
||||
run: make generate
|
||||
|
||||
- name: Generate backend proto code v2beta1
|
||||
working-directory: ./backend/api
|
||||
env:
|
||||
API_VERSION: v2beta1
|
||||
run: make generate-kfp-server-api-package
|
||||
|
||||
- name: Generate backend proto code v1beta1
|
||||
working-directory: ./backend/api
|
||||
env:
|
||||
API_VERSION: v1beta1
|
||||
run: make generate-kfp-server-api-package
|
||||
|
||||
- name: Check for Changes
|
||||
run: make check-diff
|
||||
|
||||
validate-backwards-compabitiblity:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install Go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version-file: go.mod
|
||||
|
||||
- name: Validate Go proto code backwards compatibility
|
||||
working-directory: ./backend/test/proto_tests
|
||||
env:
|
||||
UPDATE_EXPORTED: false
|
||||
run: go test .
|
|
@ -12,6 +12,7 @@ bower_components/
|
|||
|
||||
# Build output
|
||||
dist
|
||||
__debug_bin*
|
||||
|
||||
# Web server
|
||||
frontend/server/dist
|
||||
|
@ -56,10 +57,6 @@ bazel-*
|
|||
# VSCode
|
||||
.vscode
|
||||
|
||||
# test yaml
|
||||
sdk/python/tests/compiler/pipeline.yaml
|
||||
sdk/python/tests/compiler/testdata/testpackage/pipeline.yaml
|
||||
|
||||
# Test temporary files
|
||||
_artifacts
|
||||
|
||||
|
@ -88,3 +85,11 @@ __pycache__
|
|||
|
||||
# kfp local execution default directory
|
||||
local_outputs/
|
||||
|
||||
# Ignore the Kind cluster kubeconfig
|
||||
kubeconfig_dev-pipelines-api
|
||||
|
||||
# Ignore debug Driver Dockerfile produced from `make -C backend image_driver_debug`
|
||||
backend/Dockerfile.driver-debug
|
||||
|
||||
backend/src/crd/kubernetes/bin
|
||||
|
|
|
@ -1,27 +1,34 @@
|
|||
version: "2"
|
||||
|
||||
run:
|
||||
timeout: 30m
|
||||
skip-files:
|
||||
- "api\\*.go$"
|
||||
- "backend\\api\\*.go"
|
||||
|
||||
issues:
|
||||
max-same-issues: 0
|
||||
|
||||
linters:
|
||||
disable-all: true
|
||||
enable: # please keep this alphabetized
|
||||
default: none
|
||||
enable:
|
||||
- gocritic
|
||||
- gosimple
|
||||
- govet
|
||||
- ineffassign
|
||||
- misspell
|
||||
- staticcheck
|
||||
- stylecheck
|
||||
- unused
|
||||
|
||||
exclusions:
|
||||
paths:
|
||||
- "api/*.go"
|
||||
- "backend/api/*.go"
|
||||
|
||||
settings:
|
||||
misspell:
|
||||
locale: US
|
||||
staticcheck:
|
||||
checks:
|
||||
- "all"
|
||||
|
||||
linters-settings: # please keep this alphabetized
|
||||
misspell:
|
||||
locale: US
|
||||
staticcheck:
|
||||
checks:
|
||||
- "all"
|
||||
formatters:
|
||||
enable:
|
||||
- gofmt
|
||||
- goimports
|
|
@ -15,12 +15,18 @@ repos:
|
|||
- id: double-quote-string-fixer
|
||||
- id: no-commit-to-branch
|
||||
args: [--branch, master]
|
||||
- repo: https://github.com/PyCQA/flake8
|
||||
rev: 7.1.1
|
||||
hooks:
|
||||
- id: flake8
|
||||
args:
|
||||
- --select=W605
|
||||
# required formatting jobs (run these last)
|
||||
|
||||
# add comment "noqa" to ignore an import that should not be removed
|
||||
# (e.g., for an import with desired side-effects)
|
||||
- repo: https://github.com/hadialqattan/pycln
|
||||
rev: v2.1.1
|
||||
rev: v2.5.0
|
||||
hooks:
|
||||
- id: pycln
|
||||
name: pycln
|
||||
|
@ -37,7 +43,7 @@ repos:
|
|||
hooks:
|
||||
- id: yapf
|
||||
- repo: https://github.com/pycqa/docformatter
|
||||
rev: v1.4
|
||||
rev: v1.7.7
|
||||
hooks:
|
||||
- id: docformatter
|
||||
name: docformatter
|
||||
|
@ -49,7 +55,7 @@ repos:
|
|||
|
||||
# Golang pre-submit hooks
|
||||
- repo: https://github.com/golangci/golangci-lint
|
||||
rev: v1.52.2
|
||||
rev: v2.1.2
|
||||
hooks:
|
||||
- id: golangci-lint
|
||||
name: golangci-lint
|
||||
|
@ -59,3 +65,11 @@ repos:
|
|||
language: golang
|
||||
require_serial: true
|
||||
pass_filenames: false
|
||||
- id: golangci-lint
|
||||
name: golangci-lint fmt
|
||||
description: Formatter for Go.
|
||||
entry: golangci-lint fmt
|
||||
types: [go]
|
||||
language: golang
|
||||
require_serial: true
|
||||
pass_filenames: false
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
|
||||
version: 2
|
||||
sphinx:
|
||||
configuration: docs/conf.py
|
||||
configuration: docs/sdk/conf.py
|
||||
python:
|
||||
install:
|
||||
- requirements: docs/requirements.txt
|
||||
- requirements: docs/sdk/requirements.txt
|
||||
build:
|
||||
os: ubuntu-22.04
|
||||
tools:
|
||||
python: "3.8"
|
||||
python: "3.9"
|
||||
|
|
|
@ -1,652 +0,0 @@
|
|||
# Copyright 2018 The Kubeflow Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
steps:
|
||||
|
||||
# Marketplace Major.Minor parsing
|
||||
- id: "parseMajorMinorVersion"
|
||||
name: gcr.io/cloud-builders/docker
|
||||
entrypoint: /bin/bash
|
||||
args:
|
||||
- -ceux
|
||||
- |
|
||||
# Parse major minor version and save to a file for reusing in other steps.
|
||||
# e.g. 1.0.0-rc.1 and 1.0.1 are parsed as 1.0
|
||||
echo $TAG_NAME | sed -e "s#\([0-9]\+[.][0-9]\+\)[.].*#\1#" > /workspace/mm.ver
|
||||
|
||||
# Pull and retag images for pipeline components
|
||||
- id: 'retagComponentImages'
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
entrypoint: bash
|
||||
waitFor: ['-']
|
||||
args:
|
||||
- -ceux
|
||||
- |
|
||||
images=(
|
||||
"ml-pipeline-kubeflow-deployer"
|
||||
"ml-pipeline-kubeflow-tf-trainer"
|
||||
"ml-pipeline-kubeflow-tf-trainer-gpu"
|
||||
"ml-pipeline-kubeflow-tfjob"
|
||||
"ml-pipeline-local-confusion-matrix"
|
||||
"ml-pipeline-local-roc"
|
||||
)
|
||||
for image in "${images[@]}"
|
||||
do
|
||||
from_image="gcr.io/$PROJECT_ID/$image:$COMMIT_SHA"
|
||||
target_image="gcr.io/ml-pipeline/$image:$TAG_NAME"
|
||||
docker pull $from_image
|
||||
docker tag $from_image $target_image
|
||||
docker push $target_image
|
||||
done
|
||||
|
||||
# Pull and retag the images for the pipeline system
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['pull', 'gcr.io/$PROJECT_ID/frontend:$COMMIT_SHA']
|
||||
id: 'pullFrontend'
|
||||
- id: 'tagFrontendForMarketplaceMajorMin'
|
||||
waitFor: ['pullFrontend', 'parseMajorMinorVersion']
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
entrypoint: bash
|
||||
args:
|
||||
- -ceux
|
||||
- |
|
||||
docker tag gcr.io/$PROJECT_ID/frontend:$COMMIT_SHA gcr.io/ml-pipeline/frontend:$TAG_NAME
|
||||
docker tag gcr.io/$PROJECT_ID/frontend:$COMMIT_SHA gcr.io/ml-pipeline/frontend:$COMMIT_SHA
|
||||
docker tag gcr.io/$PROJECT_ID/frontend:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines/frontend:$TAG_NAME
|
||||
docker tag gcr.io/$PROJECT_ID/frontend:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines-test/frontend:$TAG_NAME
|
||||
docker tag gcr.io/$PROJECT_ID/frontend:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines/frontend:$(cat /workspace/mm.ver)
|
||||
docker tag gcr.io/$PROJECT_ID/frontend:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines-test/frontend:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/frontend:$TAG_NAME
|
||||
docker push gcr.io/ml-pipeline/frontend:$COMMIT_SHA
|
||||
docker push gcr.io/ml-pipeline/google/pipelines/frontend:$TAG_NAME
|
||||
docker push gcr.io/ml-pipeline/google/pipelines-test/frontend:$TAG_NAME
|
||||
docker push gcr.io/ml-pipeline/google/pipelines/frontend:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/google/pipelines-test/frontend:$(cat /workspace/mm.ver)
|
||||
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['pull', 'gcr.io/$PROJECT_ID/api-server:$COMMIT_SHA']
|
||||
id: 'pullAPIServer'
|
||||
- id: 'tagAPIServerForMarketplaceMajorMinor'
|
||||
waitFor: ['pullAPIServer', 'parseMajorMinorVersion']
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
entrypoint: bash
|
||||
args:
|
||||
- -ceux
|
||||
- |
|
||||
docker tag gcr.io/$PROJECT_ID/api-server:$COMMIT_SHA 'gcr.io/ml-pipeline/api-server:$TAG_NAME'
|
||||
docker tag gcr.io/$PROJECT_ID/api-server:$COMMIT_SHA 'gcr.io/ml-pipeline/api-server:$COMMIT_SHA'
|
||||
docker tag gcr.io/$PROJECT_ID/api-server:$COMMIT_SHA 'gcr.io/ml-pipeline/google/pipelines/apiserver:$TAG_NAME'
|
||||
docker tag gcr.io/$PROJECT_ID/api-server:$COMMIT_SHA 'gcr.io/ml-pipeline/google/pipelines-test/apiserver:$TAG_NAME'
|
||||
docker tag gcr.io/$PROJECT_ID/api-server:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines/apiserver:$(cat /workspace/mm.ver)
|
||||
docker tag gcr.io/$PROJECT_ID/api-server:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines-test/apiserver:$(cat /workspace/mm.ver)
|
||||
docker push 'gcr.io/ml-pipeline/api-server:$TAG_NAME'
|
||||
docker push 'gcr.io/ml-pipeline/api-server:$COMMIT_SHA'
|
||||
docker push 'gcr.io/ml-pipeline/google/pipelines/apiserver:$TAG_NAME'
|
||||
docker push 'gcr.io/ml-pipeline/google/pipelines-test/apiserver:$TAG_NAME'
|
||||
docker push gcr.io/ml-pipeline/google/pipelines/apiserver:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/google/pipelines-test/apiserver:$(cat /workspace/mm.ver)
|
||||
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['pull', 'gcr.io/$PROJECT_ID/scheduledworkflow:$COMMIT_SHA']
|
||||
id: 'pullScheduledworkflow'
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/scheduledworkflow:$COMMIT_SHA', 'gcr.io/ml-pipeline/scheduledworkflow:$TAG_NAME']
|
||||
id: 'tagScheduledworkflowVersionNumber'
|
||||
waitFor: ['pullScheduledworkflow']
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/scheduledworkflow:$COMMIT_SHA', 'gcr.io/ml-pipeline/scheduledworkflow:$COMMIT_SHA']
|
||||
id: 'tagScheduledworkflowCommitSHA'
|
||||
waitFor: ['pullScheduledworkflow']
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/scheduledworkflow:$COMMIT_SHA', 'gcr.io/ml-pipeline/google/pipelines/scheduledworkflow:$TAG_NAME']
|
||||
id: 'tagScheduledworkflowForMarketplace'
|
||||
waitFor: ['pullScheduledworkflow']
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/scheduledworkflow:$COMMIT_SHA', 'gcr.io/ml-pipeline/google/pipelines-test/scheduledworkflow:$TAG_NAME']
|
||||
id: 'tagScheduledworkflowForMarketplaceTest'
|
||||
waitFor: ['pullScheduledworkflow']
|
||||
- id: 'tagScheduledworkflowForMarketplaceMajorMinor'
|
||||
waitFor: ['pullScheduledworkflow', 'parseMajorMinorVersion']
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
entrypoint: bash
|
||||
args:
|
||||
- -ceux
|
||||
- |
|
||||
docker tag gcr.io/$PROJECT_ID/scheduledworkflow:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines/scheduledworkflow:$(cat /workspace/mm.ver)
|
||||
docker tag gcr.io/$PROJECT_ID/scheduledworkflow:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines-test/scheduledworkflow:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/google/pipelines/scheduledworkflow:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/google/pipelines-test/scheduledworkflow:$(cat /workspace/mm.ver)
|
||||
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['pull', 'gcr.io/$PROJECT_ID/viewer-crd-controller:$COMMIT_SHA']
|
||||
id: 'pullViewerCrdController'
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/viewer-crd-controller:$COMMIT_SHA', 'gcr.io/ml-pipeline/viewer-crd-controller:$TAG_NAME']
|
||||
id: 'tagViewerCrdControllerVersionNumber'
|
||||
waitFor: ['pullViewerCrdController']
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/viewer-crd-controller:$COMMIT_SHA', 'gcr.io/ml-pipeline/viewer-crd-controller:$COMMIT_SHA']
|
||||
id: 'tagViewerCrdControllerCommitSHA'
|
||||
waitFor: ['pullViewerCrdController']
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/viewer-crd-controller:$COMMIT_SHA', 'gcr.io/ml-pipeline/google/pipelines/viewercrd:$TAG_NAME']
|
||||
id: 'tagViewerCrdControllerForMarketplace'
|
||||
waitFor: ['pullViewerCrdController']
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/viewer-crd-controller:$COMMIT_SHA', 'gcr.io/ml-pipeline/google/pipelines-test/viewercrd:$TAG_NAME']
|
||||
id: 'tagViewerCrdControllerForMarketplaceTest'
|
||||
waitFor: ['pullViewerCrdController']
|
||||
- id: 'tagViewerCrdControllerForMarketplaceMajorMinor'
|
||||
waitFor: ['pullViewerCrdController', 'parseMajorMinorVersion']
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
entrypoint: bash
|
||||
args:
|
||||
- -ceux
|
||||
- |
|
||||
docker tag gcr.io/$PROJECT_ID/viewer-crd-controller:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines/viewercrd:$(cat /workspace/mm.ver)
|
||||
docker tag gcr.io/$PROJECT_ID/viewer-crd-controller:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines-test/viewercrd:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/google/pipelines/viewercrd:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/google/pipelines-test/viewercrd:$(cat /workspace/mm.ver)
|
||||
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['pull', 'gcr.io/$PROJECT_ID/persistenceagent:$COMMIT_SHA']
|
||||
id: 'pullPersistenceagent'
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/persistenceagent:$COMMIT_SHA', 'gcr.io/ml-pipeline/persistenceagent:$TAG_NAME']
|
||||
id: 'tagPersistenceagentVersionNumber'
|
||||
waitFor: ['pullPersistenceagent']
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/persistenceagent:$COMMIT_SHA', 'gcr.io/ml-pipeline/persistenceagent:$COMMIT_SHA']
|
||||
id: 'tagPersistenceagentCommitSHA'
|
||||
waitFor: ['pullPersistenceagent']
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/persistenceagent:$COMMIT_SHA', 'gcr.io/ml-pipeline/google/pipelines/persistenceagent:$TAG_NAME']
|
||||
id: 'tagPersistenceagentForMarketplace'
|
||||
waitFor: ['pullPersistenceagent']
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/persistenceagent:$COMMIT_SHA', 'gcr.io/ml-pipeline/google/pipelines-test/persistenceagent:$TAG_NAME']
|
||||
id: 'tagPersistenceagentForMarketplaceTest'
|
||||
waitFor: ['pullPersistenceagent']
|
||||
- id: 'tagPersistenceagentForMarketplaceMajorMinor'
|
||||
waitFor: ['pullPersistenceagent', 'parseMajorMinorVersion']
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
entrypoint: bash
|
||||
args:
|
||||
- -ceux
|
||||
- |
|
||||
docker tag gcr.io/$PROJECT_ID/persistenceagent:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines/persistenceagent:$(cat /workspace/mm.ver)
|
||||
docker tag gcr.io/$PROJECT_ID/persistenceagent:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines-test/persistenceagent:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/google/pipelines/persistenceagent:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/google/pipelines-test/persistenceagent:$(cat /workspace/mm.ver)
|
||||
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['pull', 'gcr.io/$PROJECT_ID/inverse-proxy-agent:$COMMIT_SHA']
|
||||
id: 'pullInverseProxyAgent'
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/inverse-proxy-agent:$COMMIT_SHA', 'gcr.io/ml-pipeline/inverse-proxy-agent:$TAG_NAME']
|
||||
id: 'tagInverseProxyAgentVersionNumber'
|
||||
waitFor: ['pullInverseProxyAgent']
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/inverse-proxy-agent:$COMMIT_SHA', 'gcr.io/ml-pipeline/inverse-proxy-agent:$COMMIT_SHA']
|
||||
id: 'tagInverseProxyAgentCommitSHA'
|
||||
waitFor: ['pullInverseProxyAgent']
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/inverse-proxy-agent:$COMMIT_SHA', 'gcr.io/ml-pipeline/google/pipelines/proxyagent:$TAG_NAME']
|
||||
id: 'tagInverseProxyAgentForMarketplace'
|
||||
waitFor: ['pullInverseProxyAgent']
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/inverse-proxy-agent:$COMMIT_SHA', 'gcr.io/ml-pipeline/google/pipelines-test/proxyagent:$TAG_NAME']
|
||||
id: 'tagInverseProxyAgentForMarketplaceTest'
|
||||
waitFor: ['pullInverseProxyAgent']
|
||||
- id: 'tagInverseProxyAgentForMarketplaceMajorMinor'
|
||||
waitFor: ['pullInverseProxyAgent', 'parseMajorMinorVersion']
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
entrypoint: bash
|
||||
args:
|
||||
- -ceux
|
||||
- |
|
||||
docker tag gcr.io/$PROJECT_ID/inverse-proxy-agent:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines/proxyagent:$(cat /workspace/mm.ver)
|
||||
docker tag gcr.io/$PROJECT_ID/inverse-proxy-agent:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines-test/proxyagent:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/google/pipelines/proxyagent:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/google/pipelines-test/proxyagent:$(cat /workspace/mm.ver)
|
||||
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['pull', 'gcr.io/$PROJECT_ID/visualization-server:$COMMIT_SHA']
|
||||
id: 'pullVisualizationServer'
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/visualization-server:$COMMIT_SHA', 'gcr.io/ml-pipeline/visualization-server:$TAG_NAME']
|
||||
id: 'tagVisualizationServerVersionNumber'
|
||||
waitFor: ['pullVisualizationServer']
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/visualization-server:$COMMIT_SHA', 'gcr.io/ml-pipeline/visualization-server:$COMMIT_SHA']
|
||||
id: 'tagVisualizationServerCommitSHA'
|
||||
waitFor: ['pullVisualizationServer']
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/visualization-server:$COMMIT_SHA', 'gcr.io/ml-pipeline/google/pipelines/visualizationserver:$TAG_NAME']
|
||||
id: 'tagVisualizationServerForMarketplace'
|
||||
waitFor: ['pullVisualizationServer']
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/visualization-server:$COMMIT_SHA', 'gcr.io/ml-pipeline/google/pipelines-test/visualizationserver:$TAG_NAME']
|
||||
id: 'tagVisualizationServerForMarketplaceTest'
|
||||
waitFor: ['pullVisualizationServer']
|
||||
- id: 'tagVisualizationServerForMarketplaceMajorMinor'
|
||||
waitFor: ['pullVisualizationServer', 'parseMajorMinorVersion']
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
entrypoint: bash
|
||||
args:
|
||||
- -ceux
|
||||
- |
|
||||
docker tag gcr.io/$PROJECT_ID/visualization-server:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines/visualizationserver:$(cat /workspace/mm.ver)
|
||||
docker tag gcr.io/$PROJECT_ID/visualization-server:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines-test/visualizationserver:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/google/pipelines/visualizationserver:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/google/pipelines-test/visualizationserver:$(cat /workspace/mm.ver)
|
||||
|
||||
# ! Sync to the same MLMD version:
|
||||
# * backend/metadata_writer/requirements.in and requirements.txt
|
||||
# * @kubeflow/frontend/src/mlmd/generated
|
||||
# * .cloudbuild.yaml and .release.cloudbuild.yaml
|
||||
# * manifests/kustomize/base/metadata/base/metadata-grpc-deployment.yaml
|
||||
# * test/tag_for_hosted.sh
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['pull', 'gcr.io/tfx-oss-public/ml_metadata_store_server:1.14.0']
|
||||
id: 'pullMetadataServer'
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/tfx-oss-public/ml_metadata_store_server:1.14.0', 'gcr.io/ml-pipeline/google/pipelines/metadataserver:$TAG_NAME']
|
||||
id: 'tagMetadataServerForMarketplace'
|
||||
waitFor: ['pullMetadataServer']
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/tfx-oss-public/ml_metadata_store_server:1.14.0', 'gcr.io/ml-pipeline/google/pipelines-test/metadataserver:$TAG_NAME']
|
||||
id: 'tagMetadataServerForMarketplaceTest'
|
||||
waitFor: ['pullMetadataServer']
|
||||
- id: 'tagMetadataServerForMarketplaceMajorMinor'
|
||||
waitFor: ['pullMetadataServer', 'parseMajorMinorVersion']
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
entrypoint: bash
|
||||
args:
|
||||
- -ceux
|
||||
- |
|
||||
docker tag gcr.io/tfx-oss-public/ml_metadata_store_server:1.14.0 gcr.io/ml-pipeline/google/pipelines/metadataserver:$(cat /workspace/mm.ver)
|
||||
docker tag gcr.io/tfx-oss-public/ml_metadata_store_server:1.14.0 gcr.io/ml-pipeline/google/pipelines-test/metadataserver:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/google/pipelines/metadataserver:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/google/pipelines-test/metadataserver:$(cat /workspace/mm.ver)
|
||||
|
||||
- id: 'pullMetadataWriter'
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['pull', 'gcr.io/$PROJECT_ID/metadata-writer:$COMMIT_SHA']
|
||||
waitFor: ['-']
|
||||
- id: 'tagMetadataWriterVersionNumber'
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/metadata-writer:$COMMIT_SHA', 'gcr.io/ml-pipeline/metadata-writer:$TAG_NAME']
|
||||
waitFor: ['pullMetadataWriter']
|
||||
- id: 'tagMetadataWriterCommitSHA'
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/metadata-writer:$COMMIT_SHA', 'gcr.io/ml-pipeline/metadata-writer:$COMMIT_SHA']
|
||||
waitFor: ['pullMetadataWriter']
|
||||
- id: 'tagMetadataWriterForMarketplace'
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/metadata-writer:$COMMIT_SHA', 'gcr.io/ml-pipeline/google/pipelines/metadatawriter:$TAG_NAME']
|
||||
waitFor: ['pullMetadataWriter']
|
||||
- id: 'tagMetadataWriterForMarketplaceTest'
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/metadata-writer:$COMMIT_SHA', 'gcr.io/ml-pipeline/google/pipelines-test/metadatawriter:$TAG_NAME']
|
||||
waitFor: ['pullMetadataWriter']
|
||||
- id: 'tagMetadataWriterForMarketplaceMajorMinor'
|
||||
waitFor: ['pullMetadataWriter', 'parseMajorMinorVersion']
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
entrypoint: bash
|
||||
args:
|
||||
- -ceux
|
||||
- |
|
||||
docker tag gcr.io/$PROJECT_ID/metadata-writer:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines/metadatawriter:$(cat /workspace/mm.ver)
|
||||
docker tag gcr.io/$PROJECT_ID/metadata-writer:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines-test/metadatawriter:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/google/pipelines/metadatawriter:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/google/pipelines-test/metadatawriter:$(cat /workspace/mm.ver)
|
||||
|
||||
- id: 'pullCacheServer'
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['pull', 'gcr.io/$PROJECT_ID/cache-server:$COMMIT_SHA']
|
||||
waitFor: ['-']
|
||||
- id: 'tagCacheServerVersionNumber'
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/cache-server:$COMMIT_SHA', 'gcr.io/ml-pipeline/cache-server:$TAG_NAME']
|
||||
waitFor: ['pullCacheServer']
|
||||
- id: 'tagCacheServerCommitSHA'
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/cache-server:$COMMIT_SHA', 'gcr.io/ml-pipeline/cache-server:$COMMIT_SHA']
|
||||
waitFor: ['pullCacheServer']
|
||||
- id: 'tagCacheServerForMarketplace'
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/cache-server:$COMMIT_SHA', 'gcr.io/ml-pipeline/google/pipelines/cacheserver:$TAG_NAME']
|
||||
waitFor: ['pullCacheServer']
|
||||
- id: 'tagCacheServerForMarketplaceTest'
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/cache-server:$COMMIT_SHA', 'gcr.io/ml-pipeline/google/pipelines-test/cacheserver:$TAG_NAME']
|
||||
waitFor: ['pullCacheServer']
|
||||
- id: 'tagCacheServerForMarketplaceMajorMinor'
|
||||
waitFor: ['pullCacheServer', 'parseMajorMinorVersion']
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
entrypoint: bash
|
||||
args:
|
||||
- -ceux
|
||||
- |
|
||||
docker tag gcr.io/$PROJECT_ID/cache-server:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines/cacheserver:$(cat /workspace/mm.ver)
|
||||
docker tag gcr.io/$PROJECT_ID/cache-server:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines-test/cacheserver:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/google/pipelines/cacheserver:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/google/pipelines-test/cacheserver:$(cat /workspace/mm.ver)
|
||||
|
||||
- id: 'pullCacheDeployer'
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['pull', 'gcr.io/$PROJECT_ID/cache-deployer:$COMMIT_SHA']
|
||||
waitFor: ['-']
|
||||
- id: 'tagCacheDeployerVersionNumber'
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/cache-deployer:$COMMIT_SHA', 'gcr.io/ml-pipeline/cache-deployer:$TAG_NAME']
|
||||
waitFor: ['pullCacheDeployer']
|
||||
- id: 'tagCacheDeployerCommitSHA'
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/cache-deployer:$COMMIT_SHA', 'gcr.io/ml-pipeline/cache-deployer:$COMMIT_SHA']
|
||||
waitFor: ['pullCacheDeployer']
|
||||
- id: 'tagCacheDeployerForMarketplace'
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/cache-deployer:$COMMIT_SHA', 'gcr.io/ml-pipeline/google/pipelines/cachedeployer:$TAG_NAME']
|
||||
waitFor: ['pullCacheDeployer']
|
||||
- id: 'tagCacheDeployerForMarketplaceTest'
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/cache-deployer:$COMMIT_SHA', 'gcr.io/ml-pipeline/google/pipelines-test/cachedeployer:$TAG_NAME']
|
||||
waitFor: ['pullCacheDeployer']
|
||||
- id: 'tagCacheDeployerForMarketplaceMajorMinor'
|
||||
waitFor: ['pullCacheDeployer', 'parseMajorMinorVersion']
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
entrypoint: bash
|
||||
args:
|
||||
- -ceux
|
||||
- |
|
||||
docker tag gcr.io/$PROJECT_ID/cache-deployer:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines/cachedeployer:$(cat /workspace/mm.ver)
|
||||
docker tag gcr.io/$PROJECT_ID/cache-deployer:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines-test/cachedeployer:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/google/pipelines/cachedeployer:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/google/pipelines-test/cachedeployer:$(cat /workspace/mm.ver)
|
||||
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['pull', 'gcr.io/$PROJECT_ID/metadata-envoy:$COMMIT_SHA']
|
||||
id: 'pullMetadataEnvoy'
|
||||
- id: 'tagMetadataEnvoyVersionNumber'
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/metadata-envoy:$COMMIT_SHA', 'gcr.io/ml-pipeline/metadata-envoy:$TAG_NAME']
|
||||
waitFor: ['pullMetadataEnvoy']
|
||||
- id: 'tagMetadataEnvoyCommitSHA'
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/metadata-envoy:$COMMIT_SHA', 'gcr.io/ml-pipeline/metadata-envoy:$COMMIT_SHA']
|
||||
waitFor: ['pullMetadataEnvoy']
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/metadata-envoy:$COMMIT_SHA', 'gcr.io/ml-pipeline/google/pipelines/metadataenvoy:$TAG_NAME']
|
||||
id: 'tagMetadataEnvoyForMarketplace'
|
||||
waitFor: ['pullMetadataEnvoy']
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/metadata-envoy:$COMMIT_SHA', 'gcr.io/ml-pipeline/google/pipelines-test/metadataenvoy:$TAG_NAME']
|
||||
id: 'tagMetadataEnvoyForMarketplaceTest'
|
||||
waitFor: ['pullMetadataEnvoy']
|
||||
- id: 'tagMetadataEnvoyForMarketplaceMajorMinor'
|
||||
waitFor: ['pullMetadataEnvoy', 'parseMajorMinorVersion']
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
entrypoint: bash
|
||||
args:
|
||||
- -ceux
|
||||
- |
|
||||
docker tag gcr.io/$PROJECT_ID/metadata-envoy:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines/metadataenvoy:$(cat /workspace/mm.ver)
|
||||
docker tag gcr.io/$PROJECT_ID/metadata-envoy:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines-test/metadataenvoy:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/google/pipelines/metadataenvoy:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/google/pipelines-test/metadataenvoy:$(cat /workspace/mm.ver)
|
||||
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['pull', 'gcr.io/ml-pipeline/minio:RELEASE.2019-08-14T20-37-41Z-license-compliance']
|
||||
id: 'pullMinio'
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/ml-pipeline/minio:RELEASE.2019-08-14T20-37-41Z-license-compliance', 'gcr.io/ml-pipeline/google/pipelines/minio:$TAG_NAME']
|
||||
id: 'tagMinioForMarketplace'
|
||||
waitFor: ['pullMinio']
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/ml-pipeline/minio:RELEASE.2019-08-14T20-37-41Z-license-compliance', 'gcr.io/ml-pipeline/google/pipelines-test/minio:$TAG_NAME']
|
||||
id: 'tagMinioForMarketplaceTest'
|
||||
waitFor: ['pullMinio']
|
||||
- id: 'tagMinioForMarketplaceMajorMinor'
|
||||
waitFor: ['pullMinio', 'parseMajorMinorVersion']
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
entrypoint: bash
|
||||
args:
|
||||
- -ceux
|
||||
- |
|
||||
docker tag gcr.io/ml-pipeline/minio:RELEASE.2019-08-14T20-37-41Z-license-compliance gcr.io/ml-pipeline/google/pipelines/minio:$(cat /workspace/mm.ver)
|
||||
docker tag gcr.io/ml-pipeline/minio:RELEASE.2019-08-14T20-37-41Z-license-compliance gcr.io/ml-pipeline/google/pipelines-test/minio:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/google/pipelines/minio:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/google/pipelines-test/minio:$(cat /workspace/mm.ver)
|
||||
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['pull', 'gcr.io/ml-pipeline/mysql:8.0.26']
|
||||
id: 'pullMysql'
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/ml-pipeline/mysql:8.0.26', 'gcr.io/ml-pipeline/google/pipelines/mysql:$TAG_NAME']
|
||||
id: 'tagMySqlForMarketplace'
|
||||
waitFor: ['pullMysql']
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/ml-pipeline/mysql:8.0.26', 'gcr.io/ml-pipeline/google/pipelines-test/mysql:$TAG_NAME']
|
||||
id: 'tagMySqlForMarketplaceTest'
|
||||
waitFor: ['pullMysql']
|
||||
- id: 'tagMySqlForMarketplaceMajorMinor'
|
||||
waitFor: ['pullMysql', 'parseMajorMinorVersion']
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
entrypoint: bash
|
||||
args:
|
||||
- -ceux
|
||||
- |
|
||||
docker tag gcr.io/ml-pipeline/mysql:8.0.26 gcr.io/ml-pipeline/google/pipelines/mysql:$(cat /workspace/mm.ver)
|
||||
docker tag gcr.io/ml-pipeline/mysql:8.0.26 gcr.io/ml-pipeline/google/pipelines-test/mysql:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/google/pipelines/mysql:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/google/pipelines-test/mysql:$(cat /workspace/mm.ver)
|
||||
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['pull', 'gcr.io/cloudsql-docker/gce-proxy:1.25.0']
|
||||
id: 'pullCloudsqlProxy'
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/cloudsql-docker/gce-proxy:1.25.0', 'gcr.io/ml-pipeline/google/pipelines/cloudsqlproxy:$TAG_NAME']
|
||||
id: 'tagCloudSqlProxyForMarketplace'
|
||||
waitFor: ['pullCloudsqlProxy']
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/cloudsql-docker/gce-proxy:1.25.0', 'gcr.io/ml-pipeline/google/pipelines-test/cloudsqlproxy:$TAG_NAME']
|
||||
id: 'tagCloudSqlProxyForMarketplaceTest'
|
||||
waitFor: ['pullCloudsqlProxy']
|
||||
- id: 'tagCloudSqlProxyForMarketplaceMajorMinor'
|
||||
waitFor: ['pullCloudsqlProxy', 'parseMajorMinorVersion']
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
entrypoint: bash
|
||||
args:
|
||||
- -ceux
|
||||
- |
|
||||
docker tag gcr.io/cloudsql-docker/gce-proxy:1.25.0 gcr.io/ml-pipeline/google/pipelines/cloudsqlproxy:$(cat /workspace/mm.ver)
|
||||
docker tag gcr.io/cloudsql-docker/gce-proxy:1.25.0 gcr.io/ml-pipeline/google/pipelines-test/cloudsqlproxy:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/google/pipelines/cloudsqlproxy:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/google/pipelines-test/cloudsqlproxy:$(cat /workspace/mm.ver)
|
||||
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['pull', 'gcr.io/ml-pipeline/argoexec:v3.4.16-license-compliance']
|
||||
id: 'pullArgoExecutor'
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/ml-pipeline/argoexec:v3.4.16-license-compliance', 'gcr.io/ml-pipeline/google/pipelines/argoexecutor:$TAG_NAME']
|
||||
id: 'tagArgoExecutorForMarketplace'
|
||||
waitFor: ['pullArgoExecutor']
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/ml-pipeline/argoexec:v3.4.16-license-compliance', 'gcr.io/ml-pipeline/google/pipelines-test/argoexecutor:$TAG_NAME']
|
||||
id: 'tagArgoExecutorForMarketplaceTest'
|
||||
waitFor: ['pullArgoExecutor']
|
||||
- id: 'tagArgoExecutorForMarketplaceMajorMinor'
|
||||
waitFor: ['pullArgoExecutor', 'parseMajorMinorVersion']
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
entrypoint: bash
|
||||
args:
|
||||
- -ceux
|
||||
- |
|
||||
docker tag gcr.io/ml-pipeline/argoexec:v3.4.16-license-compliance gcr.io/ml-pipeline/google/pipelines/argoexecutor:$(cat /workspace/mm.ver)
|
||||
docker tag gcr.io/ml-pipeline/argoexec:v3.4.16-license-compliance gcr.io/ml-pipeline/google/pipelines-test/argoexecutor:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/google/pipelines/argoexecutor:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/google/pipelines-test/argoexecutor:$(cat /workspace/mm.ver)
|
||||
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['pull', 'gcr.io/ml-pipeline/workflow-controller:v3.4.16-license-compliance']
|
||||
id: 'pullArgoWorkflowController'
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/ml-pipeline/workflow-controller:v3.4.16-license-compliance', 'gcr.io/ml-pipeline/google/pipelines/argoworkflowcontroller:$TAG_NAME']
|
||||
id: 'tagArgoWorkflowControllerForMarketplace'
|
||||
waitFor: ['pullArgoWorkflowController']
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/ml-pipeline/workflow-controller:v3.4.16-license-compliance', 'gcr.io/ml-pipeline/google/pipelines-test/argoworkflowcontroller:$TAG_NAME']
|
||||
id: 'tagArgoWorkflowControllerForMarketplaceTest'
|
||||
waitFor: ['pullArgoWorkflowController']
|
||||
- id: 'tagArgoWorkflowControllerForMarketplaceMajorMinor'
|
||||
waitFor: ['pullArgoWorkflowController', 'parseMajorMinorVersion']
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
entrypoint: bash
|
||||
args:
|
||||
- -ceux
|
||||
- |
|
||||
docker tag gcr.io/ml-pipeline/workflow-controller:v3.4.16-license-compliance gcr.io/ml-pipeline/google/pipelines/argoworkflowcontroller:$(cat /workspace/mm.ver)
|
||||
docker tag gcr.io/ml-pipeline/workflow-controller:v3.4.16-license-compliance gcr.io/ml-pipeline/google/pipelines-test/argoworkflowcontroller:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/google/pipelines/argoworkflowcontroller:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/google/pipelines-test/argoworkflowcontroller:$(cat /workspace/mm.ver)
|
||||
|
||||
# Marketplace specific deployer and specific primary image
|
||||
- id: 'pullMarketplaceDeployer'
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['pull', 'gcr.io/$PROJECT_ID/deployer:$COMMIT_SHA']
|
||||
waitFor: ['-']
|
||||
- id: 'tagMarketplaceDeployerVersionNumber'
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/deployer:$COMMIT_SHA', 'gcr.io/ml-pipeline/google/pipelines/deployer:$TAG_NAME']
|
||||
waitFor: ['pullMarketplaceDeployer']
|
||||
- id: 'tagMarketplaceDeployerVersionNumberTest'
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/deployer:$COMMIT_SHA', 'gcr.io/ml-pipeline/google/pipelines-test/deployer:$TAG_NAME']
|
||||
waitFor: ['pullMarketplaceDeployer']
|
||||
- id: 'tagMarketplaceDeployerVersionNumberMajorMinor'
|
||||
waitFor: ['pullMarketplaceDeployer', 'parseMajorMinorVersion']
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
entrypoint: bash
|
||||
args:
|
||||
- -ceux
|
||||
- |
|
||||
docker tag gcr.io/$PROJECT_ID/deployer:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines/deployer:$(cat /workspace/mm.ver)
|
||||
docker tag gcr.io/$PROJECT_ID/deployer:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines-test/deployer:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/google/pipelines/deployer:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/google/pipelines-test/deployer:$(cat /workspace/mm.ver)
|
||||
|
||||
- id: 'tagMarketplacePrimaryVersionNumber'
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/deployer:$COMMIT_SHA', 'gcr.io/ml-pipeline/google/pipelines:$TAG_NAME']
|
||||
waitFor: ['pullMarketplaceDeployer']
|
||||
- id: 'tagMarketplacePrimaryVersionNumberTest'
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
args: ['tag', 'gcr.io/$PROJECT_ID/deployer:$COMMIT_SHA', 'gcr.io/ml-pipeline/google/pipelines-test:$TAG_NAME']
|
||||
waitFor: ['pullMarketplaceDeployer']
|
||||
- id: 'tagMarketplacePrimaryVersionNumberMajorMinor'
|
||||
waitFor: ['pullMarketplaceDeployer', 'parseMajorMinorVersion']
|
||||
name: 'gcr.io/cloud-builders/docker'
|
||||
entrypoint: bash
|
||||
args:
|
||||
- -ceux
|
||||
- |
|
||||
docker tag gcr.io/$PROJECT_ID/deployer:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines:$(cat /workspace/mm.ver)
|
||||
docker tag gcr.io/$PROJECT_ID/deployer:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines-test:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/google/pipelines:$(cat /workspace/mm.ver)
|
||||
docker push gcr.io/ml-pipeline/google/pipelines-test:$(cat /workspace/mm.ver)
|
||||
|
||||
# # Copy the Python SDK
|
||||
# - name: 'gcr.io/cloud-builders/gsutil'
|
||||
# args: ['cp', 'gs://$PROJECT_ID/builds/$COMMIT_SHA/kfp.tar.gz', '/workspace/']
|
||||
# id: 'copyPythonSDKLocal'
|
||||
# - name: 'gcr.io/cloud-builders/gsutil'
|
||||
# args: ['cp', '/workspace/kfp.tar.gz', 'gs://ml-pipeline/release/$TAG_NAME/kfp.tar.gz']
|
||||
# id: 'copyPythonSDK'
|
||||
# waitFor: ['copyPythonSDKLocal']
|
||||
# - name: 'gcr.io/cloud-builders/gsutil'
|
||||
# args: ['cp', '/workspace/kfp.tar.gz', 'gs://ml-pipeline/release/latest/kfp.tar.gz']
|
||||
# id: 'copyPythonSDKToLatest'
|
||||
# waitFor: ['copyPythonSDKLocal']
|
||||
|
||||
# # Copy the Python Component SDK
|
||||
# - name: 'gcr.io/cloud-builders/gsutil'
|
||||
# args: ['cp', 'gs://$PROJECT_ID/builds/$COMMIT_SHA/kfp-component.tar.gz', '/workspace/']
|
||||
# id: 'copyPythonComponentSDKLocal'
|
||||
# - name: 'gcr.io/cloud-builders/gsutil'
|
||||
# args: ['cp', '/workspace/kfp-component.tar.gz', 'gs://ml-pipeline/release/$TAG_NAME/kfp-component.tar.gz']
|
||||
# id: 'copyPythonComponentSDK'
|
||||
# waitFor: ['copyPythonComponentSDKLocal']
|
||||
# - name: 'gcr.io/cloud-builders/gsutil'
|
||||
# args: ['cp', '/workspace/kfp-component.tar.gz', 'gs://ml-pipeline/release/latest/kfp-component.tar.gz']
|
||||
# id: 'copyPythonComponentSDKToLatest'
|
||||
# waitFor: ['copyPythonComponentSDKLocal']
|
||||
|
||||
images:
|
||||
- 'gcr.io/ml-pipeline/scheduledworkflow:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/scheduledworkflow:$COMMIT_SHA'
|
||||
- 'gcr.io/ml-pipeline/persistenceagent:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/persistenceagent:$COMMIT_SHA'
|
||||
- 'gcr.io/ml-pipeline/viewer-crd-controller:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/viewer-crd-controller:$COMMIT_SHA'
|
||||
- 'gcr.io/ml-pipeline/inverse-proxy-agent:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/inverse-proxy-agent:$COMMIT_SHA'
|
||||
- 'gcr.io/ml-pipeline/visualization-server:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/visualization-server:$COMMIT_SHA'
|
||||
- 'gcr.io/ml-pipeline/metadata-envoy:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/metadata-envoy:$COMMIT_SHA'
|
||||
- 'gcr.io/ml-pipeline/metadata-writer:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/metadata-writer:$COMMIT_SHA'
|
||||
- 'gcr.io/ml-pipeline/cache-server:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/cache-server:$COMMIT_SHA'
|
||||
- 'gcr.io/ml-pipeline/cache-deployer:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/cache-deployer:$COMMIT_SHA'
|
||||
- 'gcr.io/ml-pipeline/google/pipelines/scheduledworkflow:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/google/pipelines/viewercrd:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/google/pipelines/persistenceagent:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/google/pipelines/proxyagent:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/google/pipelines/visualizationserver:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/google/pipelines/metadataserver:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/google/pipelines/minio:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/google/pipelines/mysql:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/google/pipelines/cloudsqlproxy:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/google/pipelines/argoexecutor:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/google/pipelines/argoworkflowcontroller:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/google/pipelines/metadataenvoy:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/google/pipelines/metadatawriter:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/google/pipelines/deployer:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/google/pipelines/cacheserver:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/google/pipelines/cachedeployer:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/google/pipelines:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/google/pipelines-test/scheduledworkflow:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/google/pipelines-test/viewercrd:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/google/pipelines-test/persistenceagent:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/google/pipelines-test/proxyagent:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/google/pipelines-test/visualizationserver:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/google/pipelines-test/metadataserver:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/google/pipelines-test/minio:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/google/pipelines-test/mysql:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/google/pipelines-test/cloudsqlproxy:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/google/pipelines-test/argoexecutor:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/google/pipelines-test/argoworkflowcontroller:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/google/pipelines-test/metadataenvoy:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/google/pipelines-test/metadatawriter:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/google/pipelines-test/cacheserver:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/google/pipelines-test/cachedeployer:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/google/pipelines-test/deployer:$TAG_NAME'
|
||||
- 'gcr.io/ml-pipeline/google/pipelines-test:$TAG_NAME'
|
||||
timeout: '2000s'
|
||||
tags:
|
||||
- release-on-tag
|
|
@ -0,0 +1,12 @@
|
|||
# Adopters of Kubeflow Pipelines
|
||||
|
||||
Below are the adopters of project Kubeflow Pipelines. If you are using Kubeflow Pipelines
|
||||
please add yourself into the following list by a pull request.
|
||||
Please keep the list in alphabetical order.
|
||||
|
||||
| Organization | Contact | Description of Use |
|
||||
|------------------------------------------------------------------------------|---------------------------------------------------------------------------------------------------|--------------------------------------------------------|
|
||||
| [Capital One](https://www.capitalone.com/) | [@droctothorpe](https://github.com/droctothorpe) | ML/AI Workflow orchestration |
|
||||
| [IBM Research Foundation Model Data Engineering Team](https://www.research.ibm.com/) | [@yuanchi2807](https://github.com/yuanchi2807), [@roytman](https://github.com/roytman) | Foundation Model Data Engineering |
|
||||
| [Red Hat](https://www.redhat.com/) | [@franciscojavierarceo](https://github.com/franciscojavierarceo) | ML/AI & Data orchestration |
|
||||
|
496
CHANGELOG.md
496
CHANGELOG.md
|
@ -1,5 +1,455 @@
|
|||
# Changelog
|
||||
|
||||
## [2.14.0](https://github.com/kubeflow/pipelines/compare/2.5.0...2.14.0) (2025-08-05)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* **backend:** Add CLI flags to support Kubernetes native API implementation ([\#11907](https://github.com/kubeflow/pipelines/issues/11907)) ([c368ac6](https://github.com/kubeflow/pipelines/commit/c368ac6881b75331a3b7aa31e3adb36eacf858a1))
|
||||
* **backend:** Add migration script to create Pipeline and PipelineVersion objects from the database to Kubernetes API ([\#11884](https://github.com/kubeflow/pipelines/issues/11884)) ([988477a](https://github.com/kubeflow/pipelines/commit/988477a7de1c9376abb51618c4b926226afe587f))
|
||||
* **backend:** Add support for platform specs on K8s native API ([\#12016](https://github.com/kubeflow/pipelines/issues/12016)) ([04407fb](https://github.com/kubeflow/pipelines/commit/04407fbe9397def09fd1e12df34190ba95f5d8a2))
|
||||
* **backend:** Add the Kubernetes native pipeline store ([\#11881](https://github.com/kubeflow/pipelines/issues/11881)) ([c03127d](https://github.com/kubeflow/pipelines/commit/c03127d9671ed2ac29350840b4a56c5cf0c227eb))
|
||||
* **backend:** add the option to enable/disable cache globally ([\#11831](https://github.com/kubeflow/pipelines/issues/11831)) ([9aebb62](https://github.com/kubeflow/pipelines/commit/9aebb62be1a9412d960fc681787468e1e3ab2001))
|
||||
* **backend:** Allow the launcher command to be configurable ([\#11888](https://github.com/kubeflow/pipelines/issues/11888)) ([70d2888](https://github.com/kubeflow/pipelines/commit/70d28885f27bff40397f9c9ea790e5985deb91e1))
|
||||
* **backend:** parameterize retryStrategy input in Argo workflow ([\#11861](https://github.com/kubeflow/pipelines/issues/11861)) ([9245739](https://github.com/kubeflow/pipelines/commit/9245739f6fdd76769cd477f8952706cfe0eabc34))
|
||||
* **backend:** support for optional input parameters in nested pipelines ([\#11980](https://github.com/kubeflow/pipelines/issues/11980)) ([ecfe94e](https://github.com/kubeflow/pipelines/commit/ecfe94ebc3adc0dd41da9a944056ce4170ce9064))
|
||||
* **backend:** Support more than one label & annotations setting per component ([\#12049](https://github.com/kubeflow/pipelines/issues/12049)) ([a870b1a](https://github.com/kubeflow/pipelines/commit/a870b1a325dae0c82c8b6f57941468ee1aea960b))
|
||||
* **backend:** update Argo Workflow Compiler to create workspace PVCs ([\#11982](https://github.com/kubeflow/pipelines/issues/11982)) ([daac099](https://github.com/kubeflow/pipelines/commit/daac099508865670f41eeeef135fa22f9ec880f1))
|
||||
* **backend:** Use native k8s probes for api-server and visualization ([\#11960](https://github.com/kubeflow/pipelines/issues/11960)) ([cc78308](https://github.com/kubeflow/pipelines/commit/cc7830812ae3ced24962238dcbf5f113f28c4772))
|
||||
* **backend/frontend:** Add the name field for pipelines and pipeline versions ([\#11952](https://github.com/kubeflow/pipelines/issues/11952)) ([ea20731](https://github.com/kubeflow/pipelines/commit/ea207310601ffaf807b148a79a9ddcb8d4812886))
|
||||
* **backend/sdk:** enable dsl.Collected for parameters & artifacts ([\#11725](https://github.com/kubeflow/pipelines/issues/11725)) ([ed828b5](https://github.com/kubeflow/pipelines/commit/ed828b513aef4826f1f05e47168cf7b08a3c74ab))
|
||||
* **backend/sdk:** support PipelineTaskFinalStatus input ([\#11953](https://github.com/kubeflow/pipelines/issues/11953)) ([0d857b6](https://github.com/kubeflow/pipelines/commit/0d857b6f8a1261477bb05bc65aa474d992b57084))
|
||||
* **docs:** erdiagram of kfp-db ([\#12009](https://github.com/kubeflow/pipelines/issues/12009)) ([99326e1](https://github.com/kubeflow/pipelines/commit/99326e1bd13e6223c1cfb657524c9e5926d95ce5))
|
||||
* **docs:** Guide to report security vulnerabilities ([\#12044](https://github.com/kubeflow/pipelines/issues/12044)) ([9aa3dfb](https://github.com/kubeflow/pipelines/commit/9aa3dfb0d9810e3d3940b728e2e39dad1e956587))
|
||||
* **frontend:** Add "Always Use Latest Version" option for recurring runs (fixes [\#11581](https://github.com/kubeflow/pipelines/issues/11581)) ([\#11755](https://github.com/kubeflow/pipelines/issues/11755)) ([0e7e806](https://github.com/kubeflow/pipelines/commit/0e7e806b4cd52c20397220d6e0e8db79cae35894))
|
||||
* **proto:** Add WorkspaceConfig and KubernetesWorkspaceConfig message types to pipeline_spec proto ([\#11921](https://github.com/kubeflow/pipelines/issues/11921)) ([67f9b7d](https://github.com/kubeflow/pipelines/commit/67f9b7d73c895fd0fd027488ba20c255918d735e))
|
||||
* **sdk:** Add support for compiling pipelines to Kubernetes native format in SDK ([\#12012](https://github.com/kubeflow/pipelines/issues/12012)) ([dc398f6](https://github.com/kubeflow/pipelines/commit/dc398f689eb0b19e86fdbb554b33d9f6cb1095e3))
|
||||
* **sdk:** Add Support for Docker Container Run Arguments ([\#12006](https://github.com/kubeflow/pipelines/issues/12006)) ([268e089](https://github.com/kubeflow/pipelines/commit/268e0898ecdd3f9447988b6e675676eb21c584f0))
|
||||
* **sdk:** update PipelineConfig to reflect new workspace Protobuf changes ([\#11934](https://github.com/kubeflow/pipelines/issues/11934)) ([dda6033](https://github.com/kubeflow/pipelines/commit/dda6033a03a8f69e51c6672d964169521744357b))
|
||||
* **ui:** add `ESC` shortcut for closing SidePanel. Fixes [\#11873](https://github.com/kubeflow/pipelines/issues/11873) ([\#11874](https://github.com/kubeflow/pipelines/issues/11874)) ([c3d05eb](https://github.com/kubeflow/pipelines/commit/c3d05eb0b1ee492098c9db769371d975ebf0241a))
|
||||
* add openshift env to manifests ([\#11932](https://github.com/kubeflow/pipelines/issues/11932)) ([8329e64](https://github.com/kubeflow/pipelines/commit/8329e64716dab0e3485381f712aeadc80beb05f3))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **backend:** omit unknown fields in json marshaling ([\#12101](https://github.com/kubeflow/pipelines/issues/12101)) ([000a111](https://github.com/kubeflow/pipelines/commit/000a111396213847a437296afb511564ccc0c60d))
|
||||
* **deps:** revert metadata_writer k8s version ([\#12099](https://github.com/kubeflow/pipelines/issues/12099)) ([c64feac](https://github.com/kubeflow/pipelines/commit/c64feac20edf6129523fd5d8241d1f7140107fdd))
|
||||
* **sdk,backend:** Make the workspace size required ([\#12094](https://github.com/kubeflow/pipelines/issues/12094)) ([4bd3d4b](https://github.com/kubeflow/pipelines/commit/4bd3d4b4e99b5af38380ddad9693a2a0bbe4e968))
|
||||
* backwards compatibility for pipeline spec task_name ([\#12061](https://github.com/kubeflow/pipelines/issues/12061)) ([bcb9ee4](https://github.com/kubeflow/pipelines/commit/bcb9ee4324c4606f34ac000315b8b2f60df8c31e))
|
||||
* **apiserver:** fix typos in resource_manager.go: ([\#11998](https://github.com/kubeflow/pipelines/issues/11998)) ([3154ef9](https://github.com/kubeflow/pipelines/commit/3154ef9258c1dfb179fdb5d036989b126b2412fe))
|
||||
* **backend:** Fix boolean flag syntax for `--cache_disabled` and update test to cover pipelines with outputs ([\#12001](https://github.com/kubeflow/pipelines/issues/12001)) ([f240685](https://github.com/kubeflow/pipelines/commit/f240685bf3169251ca343fc985bbb5607be3f727))
|
||||
* **backend:** Fix the pipeline samples ([\#11967](https://github.com/kubeflow/pipelines/issues/11967)) ([b477269](https://github.com/kubeflow/pipelines/commit/b4772693ae0f7d90425f604ebdafbda1a1c4a5f3))
|
||||
* **backend:** increase max_metadata_size for ml-metadata grpc server ([\#12062](https://github.com/kubeflow/pipelines/issues/12062)) ([09ced38](https://github.com/kubeflow/pipelines/commit/09ced38500725c5e8542ce4885eee8d3cee58f02))
|
||||
* **backend:** Stop logging the strack trace on benign user errors ([\#11883](https://github.com/kubeflow/pipelines/issues/11883)) ([56da004](https://github.com/kubeflow/pipelines/commit/56da004d91d8db9b46b57544d1ad6699ddb4de4c))
|
||||
* **backend/sdk:** update proto packages ([\#12067](https://github.com/kubeflow/pipelines/issues/12067)) ([cc35187](https://github.com/kubeflow/pipelines/commit/cc35187dff270008c6bef505c828f442773ec97d))
|
||||
* **cache:** Optimize cache expiration query solves [\#11614](https://github.com/kubeflow/pipelines/issues/11614) ([\#11920](https://github.com/kubeflow/pipelines/issues/11920)) ([faa2c8c](https://github.com/kubeflow/pipelines/commit/faa2c8cd16bd5dddec2c30dd456f6d8f55f12471))
|
||||
* **components:** Changed method_whitelist. Fixes [\#11880](https://github.com/kubeflow/pipelines/issues/11880) ([\#11961](https://github.com/kubeflow/pipelines/issues/11961)) ([a123d53](https://github.com/kubeflow/pipelines/commit/a123d53be0158b961e8527999392358a4403d191))
|
||||
* **sdk:** Fixes for Identifying Untagged Images for Running ([\#11984](https://github.com/kubeflow/pipelines/issues/11984)) ([e8e23f0](https://github.com/kubeflow/pipelines/commit/e8e23f0d7a0a583b93e11e5690504306f4e0091e))
|
||||
* **sdk:** Move version info to version.py for editable installs. ([\#11997](https://github.com/kubeflow/pipelines/issues/11997)) ([ada935a](https://github.com/kubeflow/pipelines/commit/ada935a0ad86d02329e3cb6b9a36a399d9fd4a79))
|
||||
* **sdk:** Support partial replace of placeholders in dict/list objects ([\#12039](https://github.com/kubeflow/pipelines/issues/12039)) ([ce84873](https://github.com/kubeflow/pipelines/commit/ce848730b8f4bf8a2d07f3b53691d9801b0e60f5))
|
||||
* Delete the pipeline along with its all versions. Fixes [\#11665](https://github.com/kubeflow/pipelines/issues/11665) ([\#12019](https://github.com/kubeflow/pipelines/issues/12019)) ([ad2730c](https://github.com/kubeflow/pipelines/commit/ad2730cf19febf2d8a0a7d42ac8eafe1bb458a14))
|
||||
* input resolution with set_display_name ([\#11938](https://github.com/kubeflow/pipelines/issues/11938)) ([7f60100](https://github.com/kubeflow/pipelines/commit/7f60100602deefa3ed6ffbef22a948b7790c360f))
|
||||
* **sdk:** fix pip install for dev ([\#11891](https://github.com/kubeflow/pipelines/issues/11891)) ([4503eae](https://github.com/kubeflow/pipelines/commit/4503eae10d19518bc35310c57d2ef957b31f959c))
|
||||
* **sdk:** resolve issue when creating pipeline version from pipeline name using the cli. Fixes [\#11810](https://github.com/kubeflow/pipelines/issues/11810) ([\#11866](https://github.com/kubeflow/pipelines/issues/11866)) ([c68640d](https://github.com/kubeflow/pipelines/commit/c68640d95038f1b577caa44a2ff0bd966d059b94))
|
||||
* **sdk:** Resolves issue when using ParallelFor with param and depending tasks ([\#11903](https://github.com/kubeflow/pipelines/issues/11903)) ([ef94ccd](https://github.com/kubeflow/pipelines/commit/ef94ccd734957bdee3bbb98ea043738bb5795dc7))
|
||||
* **test:** Fix the frontend sample test ([\#11968](https://github.com/kubeflow/pipelines/issues/11968)) ([5447563](https://github.com/kubeflow/pipelines/commit/54475637a241f55957149a32b80cb8c44d8f6458))
|
||||
* **tests:** free up space in kfp samples test workflow ([\#11942](https://github.com/kubeflow/pipelines/issues/11942)) ([8fe090d](https://github.com/kubeflow/pipelines/commit/8fe090d461d7c2e1226c2fc46a80479790bcd2e5))
|
||||
|
||||
|
||||
### Other Pull Requests
|
||||
|
||||
* revert changelog ([62082a4](https://github.com/kubeflow/pipelines/commit/62082a4812b84ed00102f05a81e8e924f02f36fe))
|
||||
* test generating backend proto go code ([\#12108](https://github.com/kubeflow/pipelines/issues/12108)) ([5685e88](https://github.com/kubeflow/pipelines/commit/5685e884f459da6d1f63c46a3185a7a24b22dfca))
|
||||
* update python packages to 2.14 ([3ffde88](https://github.com/kubeflow/pipelines/commit/3ffde88e5f306268e00ccbe0e450dd76e7baf1bf))
|
||||
* add versioning policy for KFP ([\#12105](https://github.com/kubeflow/pipelines/issues/12105)) ([9f5abab](https://github.com/kubeflow/pipelines/commit/9f5abab7d2d995b5a5f197c7e697fb30bc8c9ab7))
|
||||
* feat(sdk) Add Input Parameter support for node affinity ([\#12028](https://github.com/kubeflow/pipelines/issues/12028)) ([ecf488b](https://github.com/kubeflow/pipelines/commit/ecf488b65fed923595ed048a2d0e9ba3d932f409))
|
||||
* Fix broken Pipeline Root documentation link ([\#12051](https://github.com/kubeflow/pipelines/issues/12051)) ([f20cec5](https://github.com/kubeflow/pipelines/commit/f20cec5b7097a628f18765d6160dd2316f41dec4))
|
||||
* add new KFP maintainers ([\#12059](https://github.com/kubeflow/pipelines/issues/12059)) ([69a1846](https://github.com/kubeflow/pipelines/commit/69a184637a97458037d65f6a9b938013d9e2f579))
|
||||
* user master for dev manifests ([\#11977](https://github.com/kubeflow/pipelines/issues/11977)) ([5181358](https://github.com/kubeflow/pipelines/commit/5181358d6a4ca4d0923ac90733cf83470763cdd2))
|
||||
* feat(frontend) Use native k8s probes for pipeline-ui ([\#11955](https://github.com/kubeflow/pipelines/issues/11955)) ([48468ae](https://github.com/kubeflow/pipelines/commit/48468ae1fa126f8668e275817f77023a585175f9))
|
||||
* feat(backend) implement retryStrategy for nested pipelines ([\#11908](https://github.com/kubeflow/pipelines/issues/11908)) ([beae62f](https://github.com/kubeflow/pipelines/commit/beae62fb528fc9044af54bf2c46771727d5d22b2))
|
||||
* - fix(launcher): missing executorInput parameter values caused by {{$}} evaluation order ([\#11925](https://github.com/kubeflow/pipelines/issues/11925)) ([3337b5e](https://github.com/kubeflow/pipelines/commit/3337b5e32377653968f477b05e5cbd5d6a081bdf))
|
||||
* add maintainer to kfp ([\#11900](https://github.com/kubeflow/pipelines/issues/11900)) ([e276474](https://github.com/kubeflow/pipelines/commit/e276474f970a1a92db7cf8c01d5ef716acc3ee4b))
|
||||
* No public description ([d90e4e8](https://github.com/kubeflow/pipelines/commit/d90e4e8a54fdd08a73ca9b0ebb404e7cb6035f7c))
|
||||
* update release doc paths & make script executable ([\#11871](https://github.com/kubeflow/pipelines/issues/11871)) ([8a402c1](https://github.com/kubeflow/pipelines/commit/8a402c10a8e3d36f964fb451760319c99a185e8f))
|
||||
|
||||
## [2.5.0](https://github.com/kubeflow/pipelines/compare/2.4.1...2.5.0) (2025-04-29)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* **backend:** Add a mutating webhook for the PipelineVersion kind ([\#11782](https://github.com/kubeflow/pipelines/issues/11782)) ([c9be64d](https://github.com/kubeflow/pipelines/commit/c9be64dca362a33dcfad186fe579066a646a6df1))
|
||||
* **backend:** Add the ability to set a proxy for accessing external resources ([\#11771](https://github.com/kubeflow/pipelines/issues/11771)) ([6e3548f](https://github.com/kubeflow/pipelines/commit/6e3548f33e226ba374e4d43a175ae8ac9018e268))
|
||||
* **backend:** Add types for KFP Kubernete Native API ([\#11672](https://github.com/kubeflow/pipelines/issues/11672)) ([0d9a7b0](https://github.com/kubeflow/pipelines/commit/0d9a7b00e926130b07058ea71148fbb9cab69d2b))
|
||||
* **backend:** Create a validating webhook for the PipelineVersion kind ([\#11774](https://github.com/kubeflow/pipelines/issues/11774)) ([2efcde5](https://github.com/kubeflow/pipelines/commit/2efcde5efd3952b91ea79a5ee6dbf064282f719a))
|
||||
* **backend:** implement logs as artifacts ([\#11762](https://github.com/kubeflow/pipelines/issues/11762)) ([cd3e747](https://github.com/kubeflow/pipelines/commit/cd3e747b5de3d7e1e338e309cc57311dd4a91258))
|
||||
* **backend:** implement logs as artifacts + CI updates ([\#11809](https://github.com/kubeflow/pipelines/issues/11809)) ([464ca39](https://github.com/kubeflow/pipelines/commit/464ca3974fbbc46e022f863e49c4fbaabd1a8265))
|
||||
* **backend/sdk:** Add input parameterization for various k8s resources ([\#11770](https://github.com/kubeflow/pipelines/issues/11770)) ([fd1b48b](https://github.com/kubeflow/pipelines/commit/fd1b48b4712038afe8a78e37843672d4773dc080))
|
||||
* **proto:** Add TTL fields to KFP IR yaml proto ([\#11758](https://github.com/kubeflow/pipelines/issues/11758)) ([c5aba41](https://github.com/kubeflow/pipelines/commit/c5aba41bcaf3c214d984db4571c1ecae4a0d551d))
|
||||
* **sdk:** add upload pipeline and upload pipeline version from pipeline function ([\#11804](https://github.com/kubeflow/pipelines/issues/11804)) ([1ad4f60](https://github.com/kubeflow/pipelines/commit/1ad4f608a0b9dea2362cf89f9cf7abdebf20e080))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **backend:** Fix run submissions with OwnerReferencesPermissionEnforcement on ([\#11821](https://github.com/kubeflow/pipelines/issues/11821)) ([69ba50b](https://github.com/kubeflow/pipelines/commit/69ba50b3fb03bd8441f833950a6c77835a2d47a1))
|
||||
* **backend:** fixed Dockerfile ([\#11841](https://github.com/kubeflow/pipelines/issues/11841)) ([d38418e](https://github.com/kubeflow/pipelines/commit/d38418efeadec3ea3bea55b3373bcc311dacc135))
|
||||
* **backend:** Include missing go.mod for cacheserver/viewercontroller images ([\#11776](https://github.com/kubeflow/pipelines/issues/11776)) ([715ed40](https://github.com/kubeflow/pipelines/commit/715ed40b92f9bca521f94e0df5201425d9d30866))
|
||||
* **components:** Set tensorboard_experiment_name to random uuid when uploading plots in Starry Net pipeline ([cc56d04](https://github.com/kubeflow/pipelines/commit/cc56d04c46d01666a8e091b124473c2654e1b6d3))
|
||||
* **deployment:** Update kustomize manifests to use new label and patch syntax ([\#11733](https://github.com/kubeflow/pipelines/issues/11733)) ([230c1b8](https://github.com/kubeflow/pipelines/commit/230c1b8f1332ffab575b2e69b65d9a6958167195))
|
||||
* **deps:** widen urllib3 upper bound to <3.0.0 ([\#11819](https://github.com/kubeflow/pipelines/issues/11819)) ([866ff35](https://github.com/kubeflow/pipelines/commit/866ff3556a4454ccb52f1594bbca4167a04c0d3e))
|
||||
* **docs:** Remove Podman as backend README pre-req ([\#11824](https://github.com/kubeflow/pipelines/issues/11824)) ([88cff55](https://github.com/kubeflow/pipelines/commit/88cff559142e5a985cf31620f07b71244645cb4a))
|
||||
* **docs:** Use the latest driver and launcher images in the dev environment ([\#11820](https://github.com/kubeflow/pipelines/issues/11820)) ([92e4921](https://github.com/kubeflow/pipelines/commit/92e4921c4cce8155093bf7e332abfbf03bd6eaef))
|
||||
* **local:** warn about oci:// not supported too ([\#11794](https://github.com/kubeflow/pipelines/issues/11794)) ([564522c](https://github.com/kubeflow/pipelines/commit/564522c42de9136dec67f1bf29590bdd64bf2333))
|
||||
* **metadata-writer:** use mlmd_store.get_context_types() instead of workaround ([\#11753](https://github.com/kubeflow/pipelines/issues/11753)) ([35041ef](https://github.com/kubeflow/pipelines/commit/35041ef2bd4d9b3261f1250f5803786ed9e453fe))
|
||||
* **sdk:** Add SDK support for setting resource limits on older KFP versions ([\#11839](https://github.com/kubeflow/pipelines/issues/11839)) ([f9d487c](https://github.com/kubeflow/pipelines/commit/f9d487cb605727f357f58783db298d96898b24d1))
|
||||
* **sdk:** allow google-cloud-storage < 4 ([\#11735](https://github.com/kubeflow/pipelines/issues/11735)) ([bd4fc5c](https://github.com/kubeflow/pipelines/commit/bd4fc5c6677402d5f2d9ac45481ac86f25da4640))
|
||||
* **sdk:** avoid conflicting component names in DAG when reusing pipelines ([\#11071](https://github.com/kubeflow/pipelines/issues/11071)) ([d1b15ef](https://github.com/kubeflow/pipelines/commit/d1b15ef4da33cbeafa491564318c7e2a68dc431f))
|
||||
* **tests:** free up space in some test runners ([\#11818](https://github.com/kubeflow/pipelines/issues/11818)) ([478ca08](https://github.com/kubeflow/pipelines/commit/478ca089012e64edd371feff4ece9d0d156d4710))
|
||||
* minio fsgroup for popular clusters ([\#11734](https://github.com/kubeflow/pipelines/issues/11734)) ([8d0ae53](https://github.com/kubeflow/pipelines/commit/8d0ae5381e8366905c90009c56fd0e4807e94f0f))
|
||||
|
||||
|
||||
### Other Pull Requests
|
||||
|
||||
* Fix Integration tests ([0359551](https://github.com/kubeflow/pipelines/commit/0359551b7601165ba8bf7cc24fdc1858224c0d2d))
|
||||
* add list or dict support for add toleration json ([fb18235](https://github.com/kubeflow/pipelines/commit/fb182355f08e41eff1ac530be1afac0bad69e15d))
|
||||
* add backend support for toleration lists. ([90909fc](https://github.com/kubeflow/pipelines/commit/90909fc0ef58b71362017a3e48c924b38c389183))
|
||||
* switch selenium image to ghcr ([7529bbe](https://github.com/kubeflow/pipelines/commit/7529bbeba7f245366ca1cbc280169e20a7100a6a))
|
||||
* add missing release note updates to sdk main branch ([\#11842](https://github.com/kubeflow/pipelines/issues/11842)) ([611d582](https://github.com/kubeflow/pipelines/commit/611d5820049dc51ddf261d7d1368c4858dad5159))
|
||||
* fix component retry test ([\#11836](https://github.com/kubeflow/pipelines/issues/11836)) ([598826e](https://github.com/kubeflow/pipelines/commit/598826e1ccfecb5f34716876053a22cdc6605ae4))
|
||||
* **chore:** add cleanup resources to sdk execution tests ([\#11823](https://github.com/kubeflow/pipelines/issues/11823)) ([eee4986](https://github.com/kubeflow/pipelines/commit/eee4986f180cd4e7469a65a3c5f4ffbf3ec0b46c))
|
||||
* update driver & launcher image handling ([\#11533](https://github.com/kubeflow/pipelines/issues/11533)) ([38a4653](https://github.com/kubeflow/pipelines/commit/38a46533fcd47aa31e825109e2bf6940d127910a))
|
||||
* **chore:** add image builds for default branch ([\#11800](https://github.com/kubeflow/pipelines/issues/11800)) ([eacb586](https://github.com/kubeflow/pipelines/commit/eacb586f6225bb277642f4977552f799850e06a1))
|
||||
* fix setup env for kfp k8s lib tests ([\#11798](https://github.com/kubeflow/pipelines/issues/11798)) ([f10c7bf](https://github.com/kubeflow/pipelines/commit/f10c7bfbbcf01eb25f2fa8a437da62bbf07dc1f5))
|
||||
* Handle optional pipeline inputs in the driver ([\#11788](https://github.com/kubeflow/pipelines/issues/11788)) ([bb7a108](https://github.com/kubeflow/pipelines/commit/bb7a1082c4c5a3fb308aac2bf37bab476c3c4df6))
|
||||
* Fix recurring run output when always using latest ([\#11790](https://github.com/kubeflow/pipelines/issues/11790)) ([048f283](https://github.com/kubeflow/pipelines/commit/048f28332b6a0b6684632e76dcb284de2f81d829))
|
||||
* increase stale action timers ([\#11792](https://github.com/kubeflow/pipelines/issues/11792)) ([ade8a2d](https://github.com/kubeflow/pipelines/commit/ade8a2d072efa9897a5a0173316836236d629238))
|
||||
* Fix PSS restricted warnings ([\#11751](https://github.com/kubeflow/pipelines/issues/11751)) ([01999b8](https://github.com/kubeflow/pipelines/commit/01999b8fea23db52da0f633e475c457fc06ca531))
|
||||
* fix(CI) Github action is vulnerable to code execution via `comment body` ([\#11772](https://github.com/kubeflow/pipelines/issues/11772)) ([95c3f2c](https://github.com/kubeflow/pipelines/commit/95c3f2c04d8f19b8b656ddbda046ed9f2c81130a))
|
||||
* Fix Istio sidecar injection by moving from annotations to labels ([\#11750](https://github.com/kubeflow/pipelines/issues/11750)) ([df4e9c2](https://github.com/kubeflow/pipelines/commit/df4e9c2bf5b645f4a3fa831b073846eae5eaceb7))
|
||||
|
||||
## [2.5.0](https://github.com/kubeflow/pipelines/compare/2.3.0...2.5.0) (2025-04-28)
|
||||
|
||||
|
||||
### ⚠ BREAKING CHANGES
|
||||
|
||||
* **sdk:** stop auto-populating metrics as dag output ([\#11362](https://github.com/kubeflow/pipelines/issues/11362))
|
||||
* **components:** Deprecate preview.custom_job module
|
||||
* **sdk:** Pin kfp-pipeline-spec==0.4.0, kfp-server-api>=2.1.0,<2.4.0 ([\#11192](https://github.com/kubeflow/pipelines/issues/11192))
|
||||
|
||||
### Features
|
||||
|
||||
* **api:** add PipelineConfig to api to re-implement pipeline-level config ([\#11333](https://github.com/kubeflow/pipelines/issues/11333)) ([c2f5649](https://github.com/kubeflow/pipelines/commit/c2f56495b9b1e9eda1b44b6106e12d5290a89ed7))
|
||||
* **api:** Add SemaphoreKey and MutexName fields to proto ([\#11384](https://github.com/kubeflow/pipelines/issues/11384)) ([915cc55](https://github.com/kubeflow/pipelines/commit/915cc552f56359454b91870df0e5eea1ecda2218))
|
||||
* **backend:** Add a mutating webhook for the PipelineVersion kind ([\#11782](https://github.com/kubeflow/pipelines/issues/11782)) ([c9be64d](https://github.com/kubeflow/pipelines/commit/c9be64dca362a33dcfad186fe579066a646a6df1))
|
||||
* **backend:** add configurable S3 path style support ([\#11246](https://github.com/kubeflow/pipelines/issues/11246)) ([85fdd73](https://github.com/kubeflow/pipelines/commit/85fdd73ae0bb1c2ce01da6311807b37cfc589710))
|
||||
* **backend:** Add Parallelism Limit to ParallelFor tasks. Fixes [\#8718](https://github.com/kubeflow/pipelines/issues/8718) ([\#10798](https://github.com/kubeflow/pipelines/issues/10798)) ([b7d8c97](https://github.com/kubeflow/pipelines/commit/b7d8c97d65af575b71efe6755eb67b0bb9126f01))
|
||||
* **backend:** Add support for importing models stored in the Modelcar format (sidecar) ([\#11606](https://github.com/kubeflow/pipelines/issues/11606)) ([cc1c435](https://github.com/kubeflow/pipelines/commit/cc1c435f1e06aad3e9b83e57768512a63460b15b))
|
||||
* **backend:** Add support for job and task placeholders in the KFP backend ([\#11599](https://github.com/kubeflow/pipelines/issues/11599)) ([6a13f4b](https://github.com/kubeflow/pipelines/commit/6a13f4bad07e5a22006b73b21515df9d597222f0))
|
||||
* **backend:** add support for uploading new sample pipeline vers ([\#11553](https://github.com/kubeflow/pipelines/issues/11553)) ([d2ddb2e](https://github.com/kubeflow/pipelines/commit/d2ddb2ed1c2afa64fd6014c95190416ff5cdd621))
|
||||
* **backend:** Add the ability to set a proxy for accessing external resources ([\#11771](https://github.com/kubeflow/pipelines/issues/11771)) ([6e3548f](https://github.com/kubeflow/pipelines/commit/6e3548f33e226ba374e4d43a175ae8ac9018e268))
|
||||
* **backend:** Add types for KFP Kubernete Native API ([\#11672](https://github.com/kubeflow/pipelines/issues/11672)) ([0d9a7b0](https://github.com/kubeflow/pipelines/commit/0d9a7b00e926130b07058ea71148fbb9cab69d2b))
|
||||
* **backend:** Allow recurring runs to always use the latest pipeline version ([\#11560](https://github.com/kubeflow/pipelines/issues/11560)) ([9c5b72c](https://github.com/kubeflow/pipelines/commit/9c5b72c2d0b298fc1b6c984e135b74e7a736a7b9))
|
||||
* **backend:** configurable log level for driver / launcher images ([\#11278](https://github.com/kubeflow/pipelines/issues/11278)) ([d2c0376](https://github.com/kubeflow/pipelines/commit/d2c0376b0aff70304fe049c415dc97a22d883966))
|
||||
* **backend:** Create a validating webhook for the PipelineVersion kind ([\#11774](https://github.com/kubeflow/pipelines/issues/11774)) ([2efcde5](https://github.com/kubeflow/pipelines/commit/2efcde5efd3952b91ea79a5ee6dbf064282f719a))
|
||||
* **backend:** implement logs as artifacts ([\#11762](https://github.com/kubeflow/pipelines/issues/11762)) ([cd3e747](https://github.com/kubeflow/pipelines/commit/cd3e747b5de3d7e1e338e309cc57311dd4a91258))
|
||||
* **backend:** implement logs as artifacts + CI updates ([\#11809](https://github.com/kubeflow/pipelines/issues/11809)) ([464ca39](https://github.com/kubeflow/pipelines/commit/464ca3974fbbc46e022f863e49c4fbaabd1a8265))
|
||||
* **backend:** implement subdag output resolution ([\#11196](https://github.com/kubeflow/pipelines/issues/11196)) ([c5b787a](https://github.com/kubeflow/pipelines/commit/c5b787aacc4fddeeb1ebc526a83159540cd7b311))
|
||||
* **backend:** Remove PipelineSpec Template storage from ObjStore responsibilies. Fixes [\#10509](https://github.com/kubeflow/pipelines/issues/10509) ([\#10790](https://github.com/kubeflow/pipelines/issues/10790)) ([374b18b](https://github.com/kubeflow/pipelines/commit/374b18bc3366a51f4b92821cdb3a942bc12343a0))
|
||||
* **backend/sdk:** Add input parameterization for various k8s resources ([\#11770](https://github.com/kubeflow/pipelines/issues/11770)) ([fd1b48b](https://github.com/kubeflow/pipelines/commit/fd1b48b4712038afe8a78e37843672d4773dc080))
|
||||
* **cli:** expose existing_token client property ([\#11400](https://github.com/kubeflow/pipelines/issues/11400)) ([35793be](https://github.com/kubeflow/pipelines/commit/35793be4168584b1084169b723bfb216aa4a03b6))
|
||||
* **component:** Created Snowflake data unload component ([\#11349](https://github.com/kubeflow/pipelines/issues/11349)) ([22e7780](https://github.com/kubeflow/pipelines/commit/22e77805ed41a72837f7cd15a9d679f42169b253))
|
||||
* **component:** execute in a virtual env ([\#11326](https://github.com/kubeflow/pipelines/issues/11326)) ([df28e89](https://github.com/kubeflow/pipelines/commit/df28e891c4374f7eac98cc6a4892b6e6c35a43f2))
|
||||
* **components:** Add reservation_affinity support in v1.create_custom_training_job_from_component ([c84241b](https://github.com/kubeflow/pipelines/commit/c84241b7362c0351109bc0ddbc2f697479ff8675))
|
||||
* **components:** add strategy to v1 GCPC custom job components/utils ([1cdd648](https://github.com/kubeflow/pipelines/commit/1cdd648239ff850bf5baae48e4e7bd1b24330dd5))
|
||||
* **components:** Deprecate preview.custom_job module ([abbd915](https://github.com/kubeflow/pipelines/commit/abbd915a2ac32b22151efef662b937601602ba9d))
|
||||
* **components:** Fix typos: 'statgey' -> 'strategy'in v1 GCPC custom job components/utils ([dcaf5a4](https://github.com/kubeflow/pipelines/commit/dcaf5a48e5feae6e61de6c033bee5f8f6675a630))
|
||||
* **components:** Introduce max_wait_duration to custom job to v1 GCPC custom job components/utils ([6cb7cf7](https://github.com/kubeflow/pipelines/commit/6cb7cf71fa81cc27e87a002f3d7685d9fc828d88))
|
||||
* **docs:** Add architecture diagram ([\#11490](https://github.com/kubeflow/pipelines/issues/11490)) ([3e423d8](https://github.com/kubeflow/pipelines/commit/3e423d8d1c0f8f7700a4b2138d8e9bd20a02a9f2))
|
||||
* **docs:** Replace ADRs with KEPs for documentation ([\#11535](https://github.com/kubeflow/pipelines/issues/11535)) ([7497b65](https://github.com/kubeflow/pipelines/commit/7497b65067aa1e596605c03e8dc4c07c963f907b))
|
||||
* **frontend/backend:** Allow the ability to sort experiments by last run creation. Fixes [\#10884](https://github.com/kubeflow/pipelines/issues/10884) ([\#11163](https://github.com/kubeflow/pipelines/issues/11163)) ([db8669c](https://github.com/kubeflow/pipelines/commit/db8669c33e60bb8910710359c0638d21ec27ac7c))
|
||||
* **proto:** Add TTL fields to KFP IR yaml proto ([\#11758](https://github.com/kubeflow/pipelines/issues/11758)) ([c5aba41](https://github.com/kubeflow/pipelines/commit/c5aba41bcaf3c214d984db4571c1ecae4a0d551d))
|
||||
* **sdk:** Add Input Parameter support for configmap, secrets, node selectors, tolerations, pull secrets ([\#11621](https://github.com/kubeflow/pipelines/issues/11621)) ([7838009](https://github.com/kubeflow/pipelines/commit/78380095385be25e69b891ccb312b2857a200fdd))
|
||||
* **sdk:** add upload pipeline and upload pipeline version from pipeline function ([\#11804](https://github.com/kubeflow/pipelines/issues/11804)) ([1ad4f60](https://github.com/kubeflow/pipelines/commit/1ad4f608a0b9dea2362cf89f9cf7abdebf20e080))
|
||||
* Introduce cache_key for cache key customization ([\#11434](https://github.com/kubeflow/pipelines/issues/11434)) ([50b367f](https://github.com/kubeflow/pipelines/commit/50b367f232b2d37b762745c8b4296a29c9d8fd45))
|
||||
* Introduce cache_key to sdk ([\#11466](https://github.com/kubeflow/pipelines/issues/11466)) ([42fc132](https://github.com/kubeflow/pipelines/commit/42fc13261628d764296607d9e12ecad13e721a68))
|
||||
* **sdk:** add PipelineConfig to DSL to re-implement pipeline-level config ([\#11112](https://github.com/kubeflow/pipelines/issues/11112)) ([df4d787](https://github.com/kubeflow/pipelines/commit/df4d7878c4ce25c801a916351bcbce1266a9daf1))
|
||||
* **sdk:** Allow disabling default caching via a CLI flag and env var ([\#11222](https://github.com/kubeflow/pipelines/issues/11222)) ([3f49522](https://github.com/kubeflow/pipelines/commit/3f495229f26ef08360048d050dfe014ca4b57b4f))
|
||||
* **sdk:** Pin kfp-pipeline-spec==0.4.0, kfp-server-api>=2.1.0,<2.4.0 ([\#11192](https://github.com/kubeflow/pipelines/issues/11192)) ([dfd4cc1](https://github.com/kubeflow/pipelines/commit/dfd4cc1e537523b04b01b6e209b5760bd2a007d5))
|
||||
* **sdk:** stop auto-populating metrics as dag output ([\#11362](https://github.com/kubeflow/pipelines/issues/11362)) ([8d018af](https://github.com/kubeflow/pipelines/commit/8d018aff6ed14b5bed7b3f90d9f450b3144ae18e))
|
||||
* **sdk:** support dynamic machine type parameters in pipeline task setters ([\#11097](https://github.com/kubeflow/pipelines/issues/11097)) ([70aaf8a](https://github.com/kubeflow/pipelines/commit/70aaf8a9a469607dc6e4aad58d40b39c75363b99))
|
||||
* **sdk/backend:** Add support for placeholders in resource limits ([\#11501](https://github.com/kubeflow/pipelines/issues/11501)) ([7c931ae](https://github.com/kubeflow/pipelines/commit/7c931ae20197b2309d7a8462f6ce099882a8f915))
|
||||
* **sdk/backend:** enable parameterization of container images ([\#11404](https://github.com/kubeflow/pipelines/issues/11404)) ([22e85de](https://github.com/kubeflow/pipelines/commit/22e85de2bcbd2ff5ed2a099e4f11a39ff27e4190))
|
||||
* **testing:** use kustomize to patch deployments before deploy ([\#11294](https://github.com/kubeflow/pipelines/issues/11294)) ([be863a8](https://github.com/kubeflow/pipelines/commit/be863a852997718701a1ee548d9db86dca7ffc33))
|
||||
* add fields in SinglePlatformSpec ([\#11299](https://github.com/kubeflow/pipelines/issues/11299)) ([a0d313e](https://github.com/kubeflow/pipelines/commit/a0d313e095c2b5fc1a32809c38cf96b13e5772b2))
|
||||
* **workflows:** use built images in Github workflows ([\#11284](https://github.com/kubeflow/pipelines/issues/11284)) ([1550b36](https://github.com/kubeflow/pipelines/commit/1550b363aed3745b476d2b3798725432329e8cea))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **backend:** Allow initializing the Kubernetes client with a kubeconfig ([\#11443](https://github.com/kubeflow/pipelines/issues/11443)) ([87bdb7c](https://github.com/kubeflow/pipelines/commit/87bdb7c3b1126ae5e899826be0834c11764edbae))
|
||||
* **backend:** Fix enable_caching issues when handling PVC creation/deletion ([\#11411](https://github.com/kubeflow/pipelines/issues/11411)) ([027ca8b](https://github.com/kubeflow/pipelines/commit/027ca8b9c9fe2bb603b2a08c843e726ed4837a1d))
|
||||
* **backend:** Fix run submissions with OwnerReferencesPermissionEnforcement on ([\#11821](https://github.com/kubeflow/pipelines/issues/11821)) ([69ba50b](https://github.com/kubeflow/pipelines/commit/69ba50b3fb03bd8441f833950a6c77835a2d47a1))
|
||||
* **backend:** fixed Dockerfile ([\#11841](https://github.com/kubeflow/pipelines/issues/11841)) ([d38418e](https://github.com/kubeflow/pipelines/commit/d38418efeadec3ea3bea55b3373bcc311dacc135))
|
||||
* **backend:** fixes DAG status update to reflect completion of all tasks ([\#11651](https://github.com/kubeflow/pipelines/issues/11651)) ([7719b38](https://github.com/kubeflow/pipelines/commit/7719b38061d489246294bd53b49aacfc105c8a19))
|
||||
* **backend:** handle client side HTTP timeouts to fix crashes of metadata-writer. Fixes [\#8200](https://github.com/kubeflow/pipelines/issues/8200) ([\#11361](https://github.com/kubeflow/pipelines/issues/11361)) ([94a21cc](https://github.com/kubeflow/pipelines/commit/94a21cc7e27a3824732e7f4c09a4d8b826dde5b8))
|
||||
* **backend:** ignore unknown fields for pb json unmarshaling ([\#11662](https://github.com/kubeflow/pipelines/issues/11662)) ([9afe23e](https://github.com/kubeflow/pipelines/commit/9afe23e74866f30f7333ea47abfe2d9efa4098c7))
|
||||
* **backend:** Include missing go.mod for cacheserver/viewercontroller images ([\#11776](https://github.com/kubeflow/pipelines/issues/11776)) ([715ed40](https://github.com/kubeflow/pipelines/commit/715ed40b92f9bca521f94e0df5201425d9d30866))
|
||||
* **backend:** modelToCRDTrigger was not including periodic schedule correctly ([\#11475](https://github.com/kubeflow/pipelines/issues/11475)) ([97acacb](https://github.com/kubeflow/pipelines/commit/97acacbd2a0b72d442398ca04382ac1e6d9aa37f))
|
||||
* **backend:** parallelFor resolve upstream inputs. Fixes [\#11520](https://github.com/kubeflow/pipelines/issues/11520) ([\#11627](https://github.com/kubeflow/pipelines/issues/11627)) ([f7c0616](https://github.com/kubeflow/pipelines/commit/f7c0616db75ac92581f973e8f05f962b965255b1))
|
||||
* **backend:** randomizing output uri path to avoid overwriting. Fixes [\#10186](https://github.com/kubeflow/pipelines/issues/10186) ([\#11243](https://github.com/kubeflow/pipelines/issues/11243)) ([219725d](https://github.com/kubeflow/pipelines/commit/219725d9f02b690cf0829a21faf092a3e4c65531))
|
||||
* **backend:** remove unused function argument ([\#11425](https://github.com/kubeflow/pipelines/issues/11425)) ([7f2278f](https://github.com/kubeflow/pipelines/commit/7f2278f25222992bedfcae5b613a7a06430f4542))
|
||||
* **backend:** removed old version comment ([\#11549](https://github.com/kubeflow/pipelines/issues/11549)) ([906b5c0](https://github.com/kubeflow/pipelines/commit/906b5c084146506e71685d2324566bd15dc25bec))
|
||||
* **backend:** Replaced hardcoded ServiceAccount with default config ([\#11578](https://github.com/kubeflow/pipelines/issues/11578)) ([18641e1](https://github.com/kubeflow/pipelines/commit/18641e16cbac7512f8f63f001acafd8a0bf52924))
|
||||
* **backend:** return error properly ([\#11424](https://github.com/kubeflow/pipelines/issues/11424)) ([13f83cf](https://github.com/kubeflow/pipelines/commit/13f83cf745eb5628d6ae5b25c1ca979d8c6d92ad))
|
||||
* **backend:** set default value to true for ForcePathStyle ([\#11281](https://github.com/kubeflow/pipelines/issues/11281)) ([391de8c](https://github.com/kubeflow/pipelines/commit/391de8ca9ec68fe4cd85bba6c82348386fc79842))
|
||||
* **backend:** stop heartbeat status updates for ScheduledWorkflows. Fixes [\#8757](https://github.com/kubeflow/pipelines/issues/8757) ([\#11363](https://github.com/kubeflow/pipelines/issues/11363)) ([9ccec4c](https://github.com/kubeflow/pipelines/commit/9ccec4c7d1aff4d2bfdb20cf4fd1f9d64b8632f4))
|
||||
* **backend:** Synced ScheduledWorkflow CRs on apiserver startup ([\#11469](https://github.com/kubeflow/pipelines/issues/11469)) ([d21fca6](https://github.com/kubeflow/pipelines/commit/d21fca650c8152d992ad5f7f590f70b1368bc60b))
|
||||
* **backend:** the metacontroller is broken since [\#11474](https://github.com/kubeflow/pipelines/issues/11474) ([\#11608](https://github.com/kubeflow/pipelines/issues/11608)) ([a40163f](https://github.com/kubeflow/pipelines/commit/a40163fdf2fe281cda91baf2f122c23664d5fcb9))
|
||||
* **backend:** upgrade go version to 1.22.12 to fix CVE-2024-45336 ([\#11631](https://github.com/kubeflow/pipelines/issues/11631)) ([87498e8](https://github.com/kubeflow/pipelines/commit/87498e8b60a167eccfef7cc29f888808ca954155))
|
||||
* **backend:** upgrade PyYMAL to fix metadata_writer build error ([\#11231](https://github.com/kubeflow/pipelines/issues/11231)) ([a4119a6](https://github.com/kubeflow/pipelines/commit/a4119a6bf1fe220c84aaa5caa7051c423b5f145e))
|
||||
* **backend:** upgrade various old dependencies ([\#11448](https://github.com/kubeflow/pipelines/issues/11448)) ([803d7a8](https://github.com/kubeflow/pipelines/commit/803d7a8ebb00924107b890de01e2a53af78d9a5e))
|
||||
* **backend:** Use an Argo Workflow exit lifecycle hook for exit handlers ([\#11470](https://github.com/kubeflow/pipelines/issues/11470)) ([3059f7c](https://github.com/kubeflow/pipelines/commit/3059f7c124dc95f867e6f755f7c0720aaa32d48b))
|
||||
* **CI:** Use the correct image registry for replacements in integration tests ([\#11564](https://github.com/kubeflow/pipelines/issues/11564)) ([ac9b257](https://github.com/kubeflow/pipelines/commit/ac9b257a7a249c4b4c20b04d4c95ff8354c5b4e0))
|
||||
* **components:** Fix create_custom_training_job_from_component default location ([04d600b](https://github.com/kubeflow/pipelines/commit/04d600b2d36405f34799306c5d24287c75e31595))
|
||||
* **components:** remove default prediction column names in evaluation regression component to fix issues with bigquery data source ([753a2f1](https://github.com/kubeflow/pipelines/commit/753a2f148ac3f001bc785acc6359295e6fe521fd))
|
||||
* **components:** Set tensorboard_experiment_name to random uuid when uploading plots in Starry Net pipeline ([cc56d04](https://github.com/kubeflow/pipelines/commit/cc56d04c46d01666a8e091b124473c2654e1b6d3))
|
||||
* **deployment:** Update kustomize manifests to use new label and patch syntax ([\#11733](https://github.com/kubeflow/pipelines/issues/11733)) ([230c1b8](https://github.com/kubeflow/pipelines/commit/230c1b8f1332ffab575b2e69b65d9a6958167195))
|
||||
* **deps:** widen urllib3 upper bound to <3.0.0 ([\#11819](https://github.com/kubeflow/pipelines/issues/11819)) ([866ff35](https://github.com/kubeflow/pipelines/commit/866ff3556a4454ccb52f1594bbca4167a04c0d3e))
|
||||
* **docs:** Remove Podman as backend README pre-req ([\#11824](https://github.com/kubeflow/pipelines/issues/11824)) ([88cff55](https://github.com/kubeflow/pipelines/commit/88cff559142e5a985cf31620f07b71244645cb4a))
|
||||
* **docs:** Use the latest driver and launcher images in the dev environment ([\#11820](https://github.com/kubeflow/pipelines/issues/11820)) ([92e4921](https://github.com/kubeflow/pipelines/commit/92e4921c4cce8155093bf7e332abfbf03bd6eaef))
|
||||
* **frontend:** compatibility with pod_names v1 ([\#11682](https://github.com/kubeflow/pipelines/issues/11682)) ([afb3b14](https://github.com/kubeflow/pipelines/commit/afb3b1461bdd8c4d4cbc697abe1d7d1acfcdc38f))
|
||||
* **frontend:** Detailed information of nodes is not displayed when clicking the node. Fixes [\#11325](https://github.com/kubeflow/pipelines/issues/11325) ([\#11493](https://github.com/kubeflow/pipelines/issues/11493)) ([028d81b](https://github.com/kubeflow/pipelines/commit/028d81b624629d4610ddcdced5b982437ff88d08))
|
||||
* **frontend:** first time choosing a pipeline definition is VERY slow. Fixes [\#10897](https://github.com/kubeflow/pipelines/issues/10897) ([\#11130](https://github.com/kubeflow/pipelines/issues/11130)) ([cfb3b31](https://github.com/kubeflow/pipelines/commit/cfb3b3149d9ba02daec584af77ef763f936cd727))
|
||||
* **frontend:** Fix the frontend image build with Node 22 ([\#11524](https://github.com/kubeflow/pipelines/issues/11524)) ([533a3c6](https://github.com/kubeflow/pipelines/commit/533a3c6b667eb11b9cd7da2f6fe334252867fcc5))
|
||||
* **frontend:** fixes Default pipeline input params are missing from the GUI. Fixes [\#11515](https://github.com/kubeflow/pipelines/issues/11515) ([\#11518](https://github.com/kubeflow/pipelines/issues/11518)) ([8fe2157](https://github.com/kubeflow/pipelines/commit/8fe21574c644543fef55a2d515681d23fdfce508))
|
||||
* **frontend:** fixes optional pipeline inputs. Fixes [\#11632](https://github.com/kubeflow/pipelines/issues/11632) ([\#11657](https://github.com/kubeflow/pipelines/issues/11657)) ([a6b944b](https://github.com/kubeflow/pipelines/commit/a6b944b894a4a297a6310f5028a8c27e3603ac16))
|
||||
* **frontend:** restrict file explorer to show only .yaml, .yml, .zip, and .tar.gz files ([\#11623](https://github.com/kubeflow/pipelines/issues/11623)) ([c0778ba](https://github.com/kubeflow/pipelines/commit/c0778ba88c359d119453c2acc94c0168b3f53772))
|
||||
* **local:** warn about oci:// not supported too ([\#11794](https://github.com/kubeflow/pipelines/issues/11794)) ([564522c](https://github.com/kubeflow/pipelines/commit/564522c42de9136dec67f1bf29590bdd64bf2333))
|
||||
* **manifests:** Upgrading metacontroller to v4.11.22 ([\#11656](https://github.com/kubeflow/pipelines/issues/11656)) ([ebaaf75](https://github.com/kubeflow/pipelines/commit/ebaaf756319ac4ac9498aca5f7dfb3978ff36496))
|
||||
* **metadata-writer:** use mlmd_store.get_context_types() instead of workaround ([\#11753](https://github.com/kubeflow/pipelines/issues/11753)) ([35041ef](https://github.com/kubeflow/pipelines/commit/35041ef2bd4d9b3261f1250f5803786ed9e453fe))
|
||||
* **sdk:** accelerator type setting in kfp ([\#11373](https://github.com/kubeflow/pipelines/issues/11373)) ([64e3900](https://github.com/kubeflow/pipelines/commit/64e390069d6c60c97ea03e833529a0930398620f))
|
||||
* **sdk:** Add error handling. Fixes [\#11164](https://github.com/kubeflow/pipelines/issues/11164) ([\#11356](https://github.com/kubeflow/pipelines/issues/11356)) ([4a64fe9](https://github.com/kubeflow/pipelines/commit/4a64fe9532556a48585b9966db8e10c7de0a8d37))
|
||||
* **sdk:** Add SDK support for setting resource limits on older KFP versions ([\#11839](https://github.com/kubeflow/pipelines/issues/11839)) ([f9d487c](https://github.com/kubeflow/pipelines/commit/f9d487cb605727f357f58783db298d96898b24d1))
|
||||
* **sdk:** allow google-cloud-storage < 4 ([\#11735](https://github.com/kubeflow/pipelines/issues/11735)) ([bd4fc5c](https://github.com/kubeflow/pipelines/commit/bd4fc5c6677402d5f2d9ac45481ac86f25da4640))
|
||||
* **sdk:** avoid conflicting component names in DAG when reusing pipelines ([\#11071](https://github.com/kubeflow/pipelines/issues/11071)) ([d1b15ef](https://github.com/kubeflow/pipelines/commit/d1b15ef4da33cbeafa491564318c7e2a68dc431f))
|
||||
* **sdk:** Backport fixes in kubeflow/pipelines#11075 ([\#11392](https://github.com/kubeflow/pipelines/issues/11392)) ([6ebf4aa](https://github.com/kubeflow/pipelines/commit/6ebf4aae0335424d3bc88175fd06a2b2ba05251f))
|
||||
* **sdk:** dsl.component docstring typo ([\#11547](https://github.com/kubeflow/pipelines/issues/11547)) ([dbefbb8](https://github.com/kubeflow/pipelines/commit/dbefbb8ee935d8c2e86090121274e5d94dddf84e))
|
||||
* **tests:** free up space in some test runners ([\#11818](https://github.com/kubeflow/pipelines/issues/11818)) ([478ca08](https://github.com/kubeflow/pipelines/commit/478ca089012e64edd371feff4ece9d0d156d4710))
|
||||
* Extend env variables in ml-pipeline-ui deployment ([\#11552](https://github.com/kubeflow/pipelines/issues/11552)) ([a469b10](https://github.com/kubeflow/pipelines/commit/a469b10806a02ed01f6d7d08cdd90e8fc44b8a86))
|
||||
* minio fsgroup for popular clusters ([\#11734](https://github.com/kubeflow/pipelines/issues/11734)) ([8d0ae53](https://github.com/kubeflow/pipelines/commit/8d0ae5381e8366905c90009c56fd0e4807e94f0f))
|
||||
* Update broken api-connect link ([\#11521](https://github.com/kubeflow/pipelines/issues/11521)) ([a81b513](https://github.com/kubeflow/pipelines/commit/a81b51339c650b8b1fca9eeb7a2932bdfaab409f))
|
||||
* **tests:** remove redundant integration test wf ([\#11322](https://github.com/kubeflow/pipelines/issues/11322)) ([6a35ee5](https://github.com/kubeflow/pipelines/commit/6a35ee5144fba1c72badc7c52161d8a49f34804e))
|
||||
* **ui:** Disable GKE Metadata as default Fixes: [\#11247](https://github.com/kubeflow/pipelines/issues/11247), fixes [\#11260](https://github.com/kubeflow/pipelines/issues/11260) ([\#11403](https://github.com/kubeflow/pipelines/issues/11403)) ([23f718d](https://github.com/kubeflow/pipelines/commit/23f718d02e402bad5c9da1a3d76da5c4a97743b3))
|
||||
* **ui:** Fixes V1 Run detail unnecessary reloading. Fixes [\#10590](https://github.com/kubeflow/pipelines/issues/10590) ([\#11214](https://github.com/kubeflow/pipelines/issues/11214)) ([eee095e](https://github.com/kubeflow/pipelines/commit/eee095e5c8d53c0eae45165f72549afe5a5cb0e3))
|
||||
* **workflows:** patch reversed launcher / driver in workflow matrix ([\#11238](https://github.com/kubeflow/pipelines/issues/11238)) ([ceeda01](https://github.com/kubeflow/pipelines/commit/ceeda01d0a35bd84b79d8f2e7aa2e029cb1de06b))
|
||||
|
||||
|
||||
### Other Pull Requests
|
||||
|
||||
* Fix Integration tests ([0359551](https://github.com/kubeflow/pipelines/commit/0359551b7601165ba8bf7cc24fdc1858224c0d2d))
|
||||
* add list or dict support for add toleration json ([fb18235](https://github.com/kubeflow/pipelines/commit/fb182355f08e41eff1ac530be1afac0bad69e15d))
|
||||
* add backend support for toleration lists. ([90909fc](https://github.com/kubeflow/pipelines/commit/90909fc0ef58b71362017a3e48c924b38c389183))
|
||||
* switch selenium image to ghcr ([7529bbe](https://github.com/kubeflow/pipelines/commit/7529bbeba7f245366ca1cbc280169e20a7100a6a))
|
||||
* add missing release note updates to sdk main branch ([\#11842](https://github.com/kubeflow/pipelines/issues/11842)) ([611d582](https://github.com/kubeflow/pipelines/commit/611d5820049dc51ddf261d7d1368c4858dad5159))
|
||||
* fix component retry test ([\#11836](https://github.com/kubeflow/pipelines/issues/11836)) ([598826e](https://github.com/kubeflow/pipelines/commit/598826e1ccfecb5f34716876053a22cdc6605ae4))
|
||||
* **chore:** add cleanup resources to sdk execution tests ([\#11823](https://github.com/kubeflow/pipelines/issues/11823)) ([eee4986](https://github.com/kubeflow/pipelines/commit/eee4986f180cd4e7469a65a3c5f4ffbf3ec0b46c))
|
||||
* update driver & launcher image handling ([\#11533](https://github.com/kubeflow/pipelines/issues/11533)) ([38a4653](https://github.com/kubeflow/pipelines/commit/38a46533fcd47aa31e825109e2bf6940d127910a))
|
||||
* **chore:** add image builds for default branch ([\#11800](https://github.com/kubeflow/pipelines/issues/11800)) ([eacb586](https://github.com/kubeflow/pipelines/commit/eacb586f6225bb277642f4977552f799850e06a1))
|
||||
* fix setup env for kfp k8s lib tests ([\#11798](https://github.com/kubeflow/pipelines/issues/11798)) ([f10c7bf](https://github.com/kubeflow/pipelines/commit/f10c7bfbbcf01eb25f2fa8a437da62bbf07dc1f5))
|
||||
* Handle optional pipeline inputs in the driver ([\#11788](https://github.com/kubeflow/pipelines/issues/11788)) ([bb7a108](https://github.com/kubeflow/pipelines/commit/bb7a1082c4c5a3fb308aac2bf37bab476c3c4df6))
|
||||
* Fix recurring run output when always using latest ([\#11790](https://github.com/kubeflow/pipelines/issues/11790)) ([048f283](https://github.com/kubeflow/pipelines/commit/048f28332b6a0b6684632e76dcb284de2f81d829))
|
||||
* increase stale action timers ([\#11792](https://github.com/kubeflow/pipelines/issues/11792)) ([ade8a2d](https://github.com/kubeflow/pipelines/commit/ade8a2d072efa9897a5a0173316836236d629238))
|
||||
* Fix PSS restricted warnings ([\#11751](https://github.com/kubeflow/pipelines/issues/11751)) ([01999b8](https://github.com/kubeflow/pipelines/commit/01999b8fea23db52da0f633e475c457fc06ca531))
|
||||
* fix(CI) Github action is vulnerable to code execution via `comment body` ([\#11772](https://github.com/kubeflow/pipelines/issues/11772)) ([95c3f2c](https://github.com/kubeflow/pipelines/commit/95c3f2c04d8f19b8b656ddbda046ed9f2c81130a))
|
||||
* Fix Istio sidecar injection by moving from annotations to labels ([\#11750](https://github.com/kubeflow/pipelines/issues/11750)) ([df4e9c2](https://github.com/kubeflow/pipelines/commit/df4e9c2bf5b645f4a3fa831b073846eae5eaceb7))
|
||||
* remove unused function ([\#11719](https://github.com/kubeflow/pipelines/issues/11719)) ([89c8bd7](https://github.com/kubeflow/pipelines/commit/89c8bd7274e2d3141a48045427f12faa4e52f029))
|
||||
* fix(backend) fix execution-level retry on the Argo Workflows backend ([\#11673](https://github.com/kubeflow/pipelines/issues/11673)) ([30210e3](https://github.com/kubeflow/pipelines/commit/30210e33bf257ff06727550d1c59e6bcc7158ab7))
|
||||
* Ignore empty PULL_NUMBER environment variables in SDK tests ([\#11714](https://github.com/kubeflow/pipelines/issues/11714)) ([a7ec34f](https://github.com/kubeflow/pipelines/commit/a7ec34f571c367e9f1957019992f3823865f4a5c))
|
||||
* Fix format string in get_kfp_package_path ([\#11712](https://github.com/kubeflow/pipelines/issues/11712)) ([7d8e921](https://github.com/kubeflow/pipelines/commit/7d8e9211f6625d76958023535967bf37b36d9b7a))
|
||||
* Use the correct SDK version in the SDK execution tests CI ([\#11683](https://github.com/kubeflow/pipelines/issues/11683)) ([355f78c](https://github.com/kubeflow/pipelines/commit/355f78c51b084026b8e01db16a06cb93515ff67a))
|
||||
* Limit the number of parallel tests in SDK execution tests ([\#11680](https://github.com/kubeflow/pipelines/issues/11680)) ([976fba8](https://github.com/kubeflow/pipelines/commit/976fba871fa2331c44ab9723744791051c8f9732))
|
||||
* Allow system.Model artifacts in the Modelcar format ([\#11674](https://github.com/kubeflow/pipelines/issues/11674)) ([0afb12d](https://github.com/kubeflow/pipelines/commit/0afb12d6a7fcce6e06a8991a228a0bebf734dabf))
|
||||
* (test) : Collect and upload logs when test fails for k8s ([\#11618](https://github.com/kubeflow/pipelines/issues/11618)) ([8ca7ec1](https://github.com/kubeflow/pipelines/commit/8ca7ec1768f82f50b3b46606f39632eab11b8fe6))
|
||||
* fix(backend) fix run retry for argo ([\#11585](https://github.com/kubeflow/pipelines/issues/11585)) ([b131566](https://github.com/kubeflow/pipelines/commit/b1315667be8f03973898113b3204e375d1f015a4))
|
||||
* Refer to central KEP template ([\#11593](https://github.com/kubeflow/pipelines/issues/11593)) ([7bb0c44](https://github.com/kubeflow/pipelines/commit/7bb0c448cd17953d96e46dcd73972b52b35f5789))
|
||||
* [chore][backend] Add workflow to validate affected generated files ([\#11539](https://github.com/kubeflow/pipelines/issues/11539)) ([22c3724](https://github.com/kubeflow/pipelines/commit/22c372437d76e5bbaec5a65c884384dbcabe2d55))
|
||||
* chor(test) : Fix kfp-sdk-test for different python versions ([\#11559](https://github.com/kubeflow/pipelines/issues/11559)) ([926aec5](https://github.com/kubeflow/pipelines/commit/926aec55d491eb9fb3abc3db3b4a903cf9dd22d8))
|
||||
* chore(frontend) : Fix frontend failing ci test ([\#11575](https://github.com/kubeflow/pipelines/issues/11575)) ([d3a016d](https://github.com/kubeflow/pipelines/commit/d3a016dd645ba828ec375428faa733e0203d278e))
|
||||
* Fix typo in documentation for contribution and developer guide ([\#11537](https://github.com/kubeflow/pipelines/issues/11537)) ([1234c8d](https://github.com/kubeflow/pipelines/commit/1234c8d6fef914d07ab634a266e1e076c152fd06))
|
||||
* switch release/api generator images to ghcr ([\#11528](https://github.com/kubeflow/pipelines/issues/11528)) ([83791e7](https://github.com/kubeflow/pipelines/commit/83791e7703f3761b90fcce376caaf70d826cc488))
|
||||
* add remaining dockerfiles to build workflow ([\#11522](https://github.com/kubeflow/pipelines/issues/11522)) ([682d3ac](https://github.com/kubeflow/pipelines/commit/682d3aca5fb92622fb6a1cd94e5984fea4d90471))
|
||||
* Fix the failing exit handler SDK execution tests ([\#11519](https://github.com/kubeflow/pipelines/issues/11519)) ([8bce9c4](https://github.com/kubeflow/pipelines/commit/8bce9c4ef6b047d3b71206a97f66ca567e1a7e4f))
|
||||
* nominate reviewers for backend ([\#11508](https://github.com/kubeflow/pipelines/issues/11508)) ([56e6116](https://github.com/kubeflow/pipelines/commit/56e6116d054898f8dbe73990da3836e10e8b7523))
|
||||
* nominate approver & reviewer for backend ([\#11507](https://github.com/kubeflow/pipelines/issues/11507)) ([81ebd7f](https://github.com/kubeflow/pipelines/commit/81ebd7ff9b0376c44928f2398f48196e38d92cd3))
|
||||
* feat[frontend]: implement artifact-repositories configmap support ([\#11354](https://github.com/kubeflow/pipelines/issues/11354)) ([467f30c](https://github.com/kubeflow/pipelines/commit/467f30cf613ecfe181e7bf9c03cb2eef7ae1ea2d))
|
||||
* Add-Create-Experiment-button-when-selecting-experiement-final ([\#11332](https://github.com/kubeflow/pipelines/issues/11332)) ([c5f162d](https://github.com/kubeflow/pipelines/commit/c5f162d552e5ae405689066736acf730b9147606))
|
||||
* adding chore as a new template. Fixes [\#11263](https://github.com/kubeflow/pipelines/issues/11263) ([\#11317](https://github.com/kubeflow/pipelines/issues/11317)) ([f256d86](https://github.com/kubeflow/pipelines/commit/f256d86fbb9bfff0388cd6ef9df1120e49e995d8))
|
||||
* correct lastrun unittest timestamps ([\#11270](https://github.com/kubeflow/pipelines/issues/11270)) ([6f6c8ae](https://github.com/kubeflow/pipelines/commit/6f6c8aeda9d5c219e2958df94dce93ca5a88a6ea))
|
||||
* replaced deprecated image repos with registry.k8s.io ([\#11152](https://github.com/kubeflow/pipelines/issues/11152)) ([d23b72b](https://github.com/kubeflow/pipelines/commit/d23b72bf12f55f123a16f53eb35d061180ad9ac4))
|
||||
|
||||
### [2.4.1](https://github.com/kubeflow/pipelines/compare/2.4.0...2.4.1) (2025-03-01)
|
||||
|
||||
|
||||
### Other Pull Requests
|
||||
|
||||
* update driver/launcher images for 2.4.1 ([6e64e4d](https://github.com/kubeflow/pipelines/commit/6e64e4d553ff49a1de4c1840490ab9ca337d08cc))
|
||||
* Cherry Pick Commits for 2.4.1 ([\#11716](https://github.com/kubeflow/pipelines/issues/11716)) ([78fab71](https://github.com/kubeflow/pipelines/commit/78fab71ef1831ae6659c7834bf78d56daa55623d))
|
||||
|
||||
## [2.4.0](https://github.com/kubeflow/pipelines/compare/2.3.0...2.4.0) (2025-01-16)
|
||||
|
||||
|
||||
### ⚠ BREAKING CHANGES
|
||||
|
||||
* **sdk:** stop auto-populating metrics as dag output ([\#11362](https://github.com/kubeflow/pipelines/issues/11362))
|
||||
* **components:** Deprecate preview.custom_job module
|
||||
* **sdk:** Pin kfp-pipeline-spec==0.4.0, kfp-server-api>=2.1.0,<2.4.0 ([\#11192](https://github.com/kubeflow/pipelines/issues/11192))
|
||||
|
||||
### Features
|
||||
|
||||
* **sdk/backend:** Add support for placeholders in resource limits ([\#11501](https://github.com/kubeflow/pipelines/issues/11501)) ([7c931ae](https://github.com/kubeflow/pipelines/commit/7c931ae20197b2309d7a8462f6ce099882a8f915))
|
||||
* Introduce cache_key for cache key customization ([\#11434](https://github.com/kubeflow/pipelines/issues/11434)) ([50b367f](https://github.com/kubeflow/pipelines/commit/50b367f232b2d37b762745c8b4296a29c9d8fd45))
|
||||
* **api:** add PipelineConfig to api to re-implement pipeline-level config ([\#11333](https://github.com/kubeflow/pipelines/issues/11333)) ([c2f5649](https://github.com/kubeflow/pipelines/commit/c2f56495b9b1e9eda1b44b6106e12d5290a89ed7))
|
||||
* **backend:** add configurable S3 path style support ([\#11246](https://github.com/kubeflow/pipelines/issues/11246)) ([85fdd73](https://github.com/kubeflow/pipelines/commit/85fdd73ae0bb1c2ce01da6311807b37cfc589710))
|
||||
* **backend:** Add Parallelism Limit to ParallelFor tasks. Fixes [\#8718](https://github.com/kubeflow/pipelines/issues/8718) ([\#10798](https://github.com/kubeflow/pipelines/issues/10798)) ([b7d8c97](https://github.com/kubeflow/pipelines/commit/b7d8c97d65af575b71efe6755eb67b0bb9126f01))
|
||||
* **backend:** implement subdag output resolution ([\#11196](https://github.com/kubeflow/pipelines/issues/11196)) ([c5b787a](https://github.com/kubeflow/pipelines/commit/c5b787aacc4fddeeb1ebc526a83159540cd7b311))
|
||||
* **backend:** Remove PipelineSpec Template storage from ObjStore responsibilies. Fixes [\#10509](https://github.com/kubeflow/pipelines/issues/10509) ([\#10790](https://github.com/kubeflow/pipelines/issues/10790)) ([374b18b](https://github.com/kubeflow/pipelines/commit/374b18bc3366a51f4b92821cdb3a942bc12343a0))
|
||||
* **cli:** expose existing_token client property ([\#11400](https://github.com/kubeflow/pipelines/issues/11400)) ([35793be](https://github.com/kubeflow/pipelines/commit/35793be4168584b1084169b723bfb216aa4a03b6))
|
||||
* **component:** Created Snowflake data unload component ([\#11349](https://github.com/kubeflow/pipelines/issues/11349)) ([22e7780](https://github.com/kubeflow/pipelines/commit/22e77805ed41a72837f7cd15a9d679f42169b253))
|
||||
* **component:** execute in a virtual env ([\#11326](https://github.com/kubeflow/pipelines/issues/11326)) ([df28e89](https://github.com/kubeflow/pipelines/commit/df28e891c4374f7eac98cc6a4892b6e6c35a43f2))
|
||||
* **components:** Add reservation_affinity support in v1.create_custom_training_job_from_component ([c84241b](https://github.com/kubeflow/pipelines/commit/c84241b7362c0351109bc0ddbc2f697479ff8675))
|
||||
* **components:** add strategy to v1 GCPC custom job components/utils ([1cdd648](https://github.com/kubeflow/pipelines/commit/1cdd648239ff850bf5baae48e4e7bd1b24330dd5))
|
||||
* **components:** Deprecate preview.custom_job module ([abbd915](https://github.com/kubeflow/pipelines/commit/abbd915a2ac32b22151efef662b937601602ba9d))
|
||||
* **frontend/backend:** Allow the ability to sort experiments by last run creation. Fixes [\#10884](https://github.com/kubeflow/pipelines/issues/10884) ([\#11163](https://github.com/kubeflow/pipelines/issues/11163)) ([db8669c](https://github.com/kubeflow/pipelines/commit/db8669c33e60bb8910710359c0638d21ec27ac7c))
|
||||
* **sdk:** add PipelineConfig to DSL to re-implement pipeline-level config ([\#11112](https://github.com/kubeflow/pipelines/issues/11112)) ([df4d787](https://github.com/kubeflow/pipelines/commit/df4d7878c4ce25c801a916351bcbce1266a9daf1))
|
||||
* **sdk:** Allow disabling default caching via a CLI flag and env var ([\#11222](https://github.com/kubeflow/pipelines/issues/11222)) ([3f49522](https://github.com/kubeflow/pipelines/commit/3f495229f26ef08360048d050dfe014ca4b57b4f))
|
||||
* **sdk:** Pin kfp-pipeline-spec==0.4.0, kfp-server-api>=2.1.0,<2.4.0 ([\#11192](https://github.com/kubeflow/pipelines/issues/11192)) ([dfd4cc1](https://github.com/kubeflow/pipelines/commit/dfd4cc1e537523b04b01b6e209b5760bd2a007d5))
|
||||
* **sdk:** stop auto-populating metrics as dag output ([\#11362](https://github.com/kubeflow/pipelines/issues/11362)) ([8d018af](https://github.com/kubeflow/pipelines/commit/8d018aff6ed14b5bed7b3f90d9f450b3144ae18e))
|
||||
* **sdk/backend:** enable parameterization of container images ([\#11404](https://github.com/kubeflow/pipelines/issues/11404)) ([22e85de](https://github.com/kubeflow/pipelines/commit/22e85de2bcbd2ff5ed2a099e4f11a39ff27e4190))
|
||||
* **testing:** use kustomize to patch deployments before deploy ([\#11294](https://github.com/kubeflow/pipelines/issues/11294)) ([be863a8](https://github.com/kubeflow/pipelines/commit/be863a852997718701a1ee548d9db86dca7ffc33))
|
||||
* add fields in SinglePlatformSpec ([\#11299](https://github.com/kubeflow/pipelines/issues/11299)) ([a0d313e](https://github.com/kubeflow/pipelines/commit/a0d313e095c2b5fc1a32809c38cf96b13e5772b2))
|
||||
* **sdk:** support dynamic machine type parameters in pipeline task setters ([\#11097](https://github.com/kubeflow/pipelines/issues/11097)) ([70aaf8a](https://github.com/kubeflow/pipelines/commit/70aaf8a9a469607dc6e4aad58d40b39c75363b99))
|
||||
* **workflows:** use built images in Github workflows ([\#11284](https://github.com/kubeflow/pipelines/issues/11284)) ([1550b36](https://github.com/kubeflow/pipelines/commit/1550b363aed3745b476d2b3798725432329e8cea))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **backend:** Allow initializing the Kubernetes client with a kubeconfig ([\#11443](https://github.com/kubeflow/pipelines/issues/11443)) ([87bdb7c](https://github.com/kubeflow/pipelines/commit/87bdb7c3b1126ae5e899826be0834c11764edbae))
|
||||
* **backend:** handle client side HTTP timeouts to fix crashes of metadata-writer. Fixes [\#8200](https://github.com/kubeflow/pipelines/issues/8200) ([\#11361](https://github.com/kubeflow/pipelines/issues/11361)) ([94a21cc](https://github.com/kubeflow/pipelines/commit/94a21cc7e27a3824732e7f4c09a4d8b826dde5b8))
|
||||
* **backend:** modelToCRDTrigger was not including periodic schedule correctly ([\#11475](https://github.com/kubeflow/pipelines/issues/11475)) ([97acacb](https://github.com/kubeflow/pipelines/commit/97acacbd2a0b72d442398ca04382ac1e6d9aa37f))
|
||||
* **backend:** randomizing output uri path to avoid overwriting. Fixes [\#10186](https://github.com/kubeflow/pipelines/issues/10186) ([\#11243](https://github.com/kubeflow/pipelines/issues/11243)) ([219725d](https://github.com/kubeflow/pipelines/commit/219725d9f02b690cf0829a21faf092a3e4c65531))
|
||||
* **backend:** remove unused function argument ([\#11425](https://github.com/kubeflow/pipelines/issues/11425)) ([7f2278f](https://github.com/kubeflow/pipelines/commit/7f2278f25222992bedfcae5b613a7a06430f4542))
|
||||
* **backend:** return error properly ([\#11424](https://github.com/kubeflow/pipelines/issues/11424)) ([13f83cf](https://github.com/kubeflow/pipelines/commit/13f83cf745eb5628d6ae5b25c1ca979d8c6d92ad))
|
||||
* **backend:** set default value to true for ForcePathStyle ([\#11281](https://github.com/kubeflow/pipelines/issues/11281)) ([391de8c](https://github.com/kubeflow/pipelines/commit/391de8ca9ec68fe4cd85bba6c82348386fc79842))
|
||||
* **backend:** stop heartbeat status updates for ScheduledWorkflows. Fixes [\#8757](https://github.com/kubeflow/pipelines/issues/8757) ([\#11363](https://github.com/kubeflow/pipelines/issues/11363)) ([9ccec4c](https://github.com/kubeflow/pipelines/commit/9ccec4c7d1aff4d2bfdb20cf4fd1f9d64b8632f4))
|
||||
* **backend:** Synced ScheduledWorkflow CRs on apiserver startup ([\#11469](https://github.com/kubeflow/pipelines/issues/11469)) ([d21fca6](https://github.com/kubeflow/pipelines/commit/d21fca650c8152d992ad5f7f590f70b1368bc60b))
|
||||
* **backend:** upgrade PyYMAL to fix metadata_writer build error ([\#11231](https://github.com/kubeflow/pipelines/issues/11231)) ([a4119a6](https://github.com/kubeflow/pipelines/commit/a4119a6bf1fe220c84aaa5caa7051c423b5f145e))
|
||||
* **backend:** upgrade various old dependencies ([\#11448](https://github.com/kubeflow/pipelines/issues/11448)) ([803d7a8](https://github.com/kubeflow/pipelines/commit/803d7a8ebb00924107b890de01e2a53af78d9a5e))
|
||||
* **backend:** Use an Argo Workflow exit lifecycle hook for exit handlers ([\#11470](https://github.com/kubeflow/pipelines/issues/11470)) ([3059f7c](https://github.com/kubeflow/pipelines/commit/3059f7c124dc95f867e6f755f7c0720aaa32d48b))
|
||||
* **components:** Fix create_custom_training_job_from_component default location ([04d600b](https://github.com/kubeflow/pipelines/commit/04d600b2d36405f34799306c5d24287c75e31595))
|
||||
* **components:** remove default prediction column names in evaluation regression component to fix issues with bigquery data source ([753a2f1](https://github.com/kubeflow/pipelines/commit/753a2f148ac3f001bc785acc6359295e6fe521fd))
|
||||
* **frontend:** Detailed information of nodes is not displayed when clicking the node. Fixes [\#11325](https://github.com/kubeflow/pipelines/issues/11325) ([\#11493](https://github.com/kubeflow/pipelines/issues/11493)) ([028d81b](https://github.com/kubeflow/pipelines/commit/028d81b624629d4610ddcdced5b982437ff88d08))
|
||||
* **frontend:** first time choosing a pipeline definition is VERY slow. Fixes [\#10897](https://github.com/kubeflow/pipelines/issues/10897) ([\#11130](https://github.com/kubeflow/pipelines/issues/11130)) ([cfb3b31](https://github.com/kubeflow/pipelines/commit/cfb3b3149d9ba02daec584af77ef763f936cd727))
|
||||
* **frontend:** Fix the frontend image build with Node 22 ([\#11524](https://github.com/kubeflow/pipelines/issues/11524)) ([\#11525](https://github.com/kubeflow/pipelines/issues/11525)) ([2e47604](https://github.com/kubeflow/pipelines/commit/2e4760435ff988063dba6e21707e910bf748e5ff))
|
||||
* **sdk:** accelerator type setting in kfp ([\#11373](https://github.com/kubeflow/pipelines/issues/11373)) ([64e3900](https://github.com/kubeflow/pipelines/commit/64e390069d6c60c97ea03e833529a0930398620f))
|
||||
* **sdk:** Add error handling. Fixes [\#11164](https://github.com/kubeflow/pipelines/issues/11164) ([\#11356](https://github.com/kubeflow/pipelines/issues/11356)) ([4a64fe9](https://github.com/kubeflow/pipelines/commit/4a64fe9532556a48585b9966db8e10c7de0a8d37))
|
||||
* **sdk:** Backport fixes in kubeflow/pipelines#11075 ([\#11392](https://github.com/kubeflow/pipelines/issues/11392)) ([6ebf4aa](https://github.com/kubeflow/pipelines/commit/6ebf4aae0335424d3bc88175fd06a2b2ba05251f))
|
||||
* **tests:** remove redundant integration test wf ([\#11322](https://github.com/kubeflow/pipelines/issues/11322)) ([6a35ee5](https://github.com/kubeflow/pipelines/commit/6a35ee5144fba1c72badc7c52161d8a49f34804e))
|
||||
* **ui:** Disable GKE Metadata as default Fixes: [\#11247](https://github.com/kubeflow/pipelines/issues/11247), fixes [\#11260](https://github.com/kubeflow/pipelines/issues/11260) ([\#11403](https://github.com/kubeflow/pipelines/issues/11403)) ([23f718d](https://github.com/kubeflow/pipelines/commit/23f718d02e402bad5c9da1a3d76da5c4a97743b3))
|
||||
* **ui:** Fixes V1 Run detail unnecessary reloading. Fixes [\#10590](https://github.com/kubeflow/pipelines/issues/10590) ([\#11214](https://github.com/kubeflow/pipelines/issues/11214)) ([eee095e](https://github.com/kubeflow/pipelines/commit/eee095e5c8d53c0eae45165f72549afe5a5cb0e3))
|
||||
* **workflows:** patch reversed launcher / driver in workflow matrix ([\#11238](https://github.com/kubeflow/pipelines/issues/11238)) ([ceeda01](https://github.com/kubeflow/pipelines/commit/ceeda01d0a35bd84b79d8f2e7aa2e029cb1de06b))
|
||||
|
||||
|
||||
### Other Pull Requests
|
||||
|
||||
* switch release/api generator images to ghcr ([\#11528](https://github.com/kubeflow/pipelines/issues/11528)) ([\#11530](https://github.com/kubeflow/pipelines/issues/11530)) ([33fca73](https://github.com/kubeflow/pipelines/commit/33fca73fd6f96751db3c7e2c4795acfc7980b649))
|
||||
* add remaining dockerfiles to build workflow ([\#11522](https://github.com/kubeflow/pipelines/issues/11522)) ([682d3ac](https://github.com/kubeflow/pipelines/commit/682d3aca5fb92622fb6a1cd94e5984fea4d90471))
|
||||
* Fix the failing exit handler SDK execution tests ([\#11519](https://github.com/kubeflow/pipelines/issues/11519)) ([8bce9c4](https://github.com/kubeflow/pipelines/commit/8bce9c4ef6b047d3b71206a97f66ca567e1a7e4f))
|
||||
* nominate reviewers for backend ([\#11508](https://github.com/kubeflow/pipelines/issues/11508)) ([56e6116](https://github.com/kubeflow/pipelines/commit/56e6116d054898f8dbe73990da3836e10e8b7523))
|
||||
* nominate approver & reviewer for backend ([\#11507](https://github.com/kubeflow/pipelines/issues/11507)) ([81ebd7f](https://github.com/kubeflow/pipelines/commit/81ebd7ff9b0376c44928f2398f48196e38d92cd3))
|
||||
* feat[frontend]: implement artifact-repositories configmap support ([\#11354](https://github.com/kubeflow/pipelines/issues/11354)) ([467f30c](https://github.com/kubeflow/pipelines/commit/467f30cf613ecfe181e7bf9c03cb2eef7ae1ea2d))
|
||||
* Add-Create-Experiment-button-when-selecting-experiement-final ([\#11332](https://github.com/kubeflow/pipelines/issues/11332)) ([c5f162d](https://github.com/kubeflow/pipelines/commit/c5f162d552e5ae405689066736acf730b9147606))
|
||||
* adding chore as a new template. Fixes [\#11263](https://github.com/kubeflow/pipelines/issues/11263) ([\#11317](https://github.com/kubeflow/pipelines/issues/11317)) ([f256d86](https://github.com/kubeflow/pipelines/commit/f256d86fbb9bfff0388cd6ef9df1120e49e995d8))
|
||||
* correct lastrun unittest timestamps ([\#11270](https://github.com/kubeflow/pipelines/issues/11270)) ([6f6c8ae](https://github.com/kubeflow/pipelines/commit/6f6c8aeda9d5c219e2958df94dce93ca5a88a6ea))
|
||||
* replaced deprecated image repos with registry.k8s.io ([\#11152](https://github.com/kubeflow/pipelines/issues/11152)) ([d23b72b](https://github.com/kubeflow/pipelines/commit/d23b72bf12f55f123a16f53eb35d061180ad9ac4))
|
||||
|
||||
## [2.3.0](https://github.com/kubeflow/pipelines/compare/2.2.0...2.3.0) (2024-09-06)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* **api:** Add new ResourceSpec proto fields to support dynamic values. ([\#11075](https://github.com/kubeflow/pipelines/issues/11075)) ([83dcf1a](https://github.com/kubeflow/pipelines/commit/83dcf1a60919f5bcc0c644c8fdff94ad686cad07))
|
||||
* **backend:** mount EmptyDir volumes for launcher write locations ([\#10857](https://github.com/kubeflow/pipelines/issues/10857)) ([65839ce](https://github.com/kubeflow/pipelines/commit/65839ced31969affe23ecc483c95872684e7dd0b))
|
||||
* **backend:** move comp logic to workflow params ([\#10979](https://github.com/kubeflow/pipelines/issues/10979)) ([0e37fd6](https://github.com/kubeflow/pipelines/commit/0e37fd66a4f15bb7ce31ecceec7421ba1bd65fd6))
|
||||
* **Backend + SDK:** Update kfp backend and kubernetes sdk to support EmptyDir ([\#10913](https://github.com/kubeflow/pipelines/issues/10913)) ([7506a8e](https://github.com/kubeflow/pipelines/commit/7506a8eafae9876a4f1e1a0e86024f5231369e8d))
|
||||
* **component:** internal ([383cbcd](https://github.com/kubeflow/pipelines/commit/383cbcd0655395582b11f79f29e4816f84d6d91b))
|
||||
* **component:** internal ([8041ea8](https://github.com/kubeflow/pipelines/commit/8041ea8740371925d76c698c8a94a4859fc2bda7))
|
||||
* **component:** internal ([c06d9ae](https://github.com/kubeflow/pipelines/commit/c06d9aee96726bbae73e5f7562edfc8330fac61e))
|
||||
* **component:** internal ([ad03bb6](https://github.com/kubeflow/pipelines/commit/ad03bb6daa03442a7fda19904a97db3745d5044f))
|
||||
* **components:** Add role_field_name and model_name as input parameters to llm_evaluation_preprocessor component to support gemini model's input and output schema ([de346d5](https://github.com/kubeflow/pipelines/commit/de346d5590b1df94cee528f949a067fb3aae578f))
|
||||
* **components:** Add Starry Net forecasting pipeline to public preview ([3a0566e](https://github.com/kubeflow/pipelines/commit/3a0566e8dc27d8fbd13174d6a6aed4daddb2405b))
|
||||
* **components:** create infer preprocessor component ([e9d6876](https://github.com/kubeflow/pipelines/commit/e9d687690bea2d88263ca331d049cb6c68495f02))
|
||||
* **components:** Create the write_user_defined_error function ([454a654](https://github.com/kubeflow/pipelines/commit/454a65428977a4e470c0d72525a96c3f4a0344f3))
|
||||
* **components:** create utility class for preprocessors and use it in rlhf preprocessor and infer preprocessor ([cd16a33](https://github.com/kubeflow/pipelines/commit/cd16a33e735b30a85b2e736039f72c2ed6d26507))
|
||||
* **components:** internal ([e8f0208](https://github.com/kubeflow/pipelines/commit/e8f0208777ad4e9260ff630a37b0d90cda8c4f76))
|
||||
* **components:** internal ([6f62203](https://github.com/kubeflow/pipelines/commit/6f62203ccfadeaf60d16ce1f65ed25ac4408c649))
|
||||
* **components:** internal ([2eb9bec](https://github.com/kubeflow/pipelines/commit/2eb9bec1ae8debca6698daa65dd69da91ec3461f))
|
||||
* **components:** internal ([c6f658b](https://github.com/kubeflow/pipelines/commit/c6f658b0e70d353a1ef0aae4ae05307ef3223eb4))
|
||||
* **components:** internal ([e128bdb](https://github.com/kubeflow/pipelines/commit/e128bdbaf3b2071926e8a5c23041cc692329bb39))
|
||||
* **components:** internal ([abe5257](https://github.com/kubeflow/pipelines/commit/abe525737f713e752560ee039553b5cc6d5af89d))
|
||||
* **components:** internal ([522b593](https://github.com/kubeflow/pipelines/commit/522b5933b00439f4981498ecea3f800fde3a6506))
|
||||
* **components:** internal ([461d892](https://github.com/kubeflow/pipelines/commit/461d892db80987f5d9a1f5bf9575f667bfb66a76))
|
||||
* **components:** release LLM Model Evaluation image version v0.7 ([4f36fe3](https://github.com/kubeflow/pipelines/commit/4f36fe3637eb52980e559615595dfab7aa539a87))
|
||||
* **components:** Retry on batch prediction internal errors in AutoSxS ([5d9f4ab](https://github.com/kubeflow/pipelines/commit/5d9f4ab929747748e4a616c89fac9de37fc81485))
|
||||
* **components:** Support dynamic machine parameters in preview.custom_job.utils.create_custom_training_job_from_component ([e44dfa7](https://github.com/kubeflow/pipelines/commit/e44dfa7e898a1f4e8c315502719d9c7c709f1b01))
|
||||
* **components:** Support dynamic machine type paramters in CustomTrainingJobOp ([\#10883](https://github.com/kubeflow/pipelines/issues/10883)) ([b57f9e8](https://github.com/kubeflow/pipelines/commit/b57f9e858880afcbeac51a5d4e978133be6c0d50))
|
||||
* **components:** Support dynamic values for boot_disk_type, boot_disk_size in preview.custom_job.utils.create_custom_training_job_from_component ([7b7918e](https://github.com/kubeflow/pipelines/commit/7b7918ebf8c30e6ceec99283ef20dbc02fdf6a42))
|
||||
* **components:** Support parsing Gemini BP outputs in AutoSxS pipeline ([b4f91a3](https://github.com/kubeflow/pipelines/commit/b4f91a3f2cf1d2d8925e698bebd3cb5b9baaaaac))
|
||||
* **components:** Update Starry Net image tags ([bf5104f](https://github.com/kubeflow/pipelines/commit/bf5104fcff6a6c2db8d8e39522c04eca1bb3fc93))
|
||||
* **components:** Use GetModel integration test to manually test write_user_defined_error function ([609c637](https://github.com/kubeflow/pipelines/commit/609c637811e46eab2d5c9a915913a1520c979a88))
|
||||
* **components:** use preprocessor utility methods for the upload model graph ([7908ed6](https://github.com/kubeflow/pipelines/commit/7908ed664653143d335ba3e9227484347e64577d))
|
||||
* **frontend&backend:** Add UI support for object store customization and prefixes ([\#10787](https://github.com/kubeflow/pipelines/issues/10787)) ([6723d3d](https://github.com/kubeflow/pipelines/commit/6723d3d5a92181b7ee75304671256bbe5ac18582))
|
||||
* **GH workflow:** migrate periodic functional tests to GH actions ([\#10751](https://github.com/kubeflow/pipelines/issues/10751)) ([c4d7ec3](https://github.com/kubeflow/pipelines/commit/c4d7ec33b409eda9c4076915f4ed1c2db0ee8ef6))
|
||||
* **internal:** Adding proto field to support regional quota ([c8f08ba](https://github.com/kubeflow/pipelines/commit/c8f08ba49f92f53269d71425666c0bc3a687615d))
|
||||
* **kubernetes_platform:** Add empty dir mount ([\#10892](https://github.com/kubeflow/pipelines/issues/10892)) ([10aaf43](https://github.com/kubeflow/pipelines/commit/10aaf431367e974bf6c73306acf6a7fd40e36942))
|
||||
* **kubernetes_platform:** Update kubernetes_platform go package to include EnabledSharedMemory ([\#10703](https://github.com/kubeflow/pipelines/issues/10703)) ([7c63599](https://github.com/kubeflow/pipelines/commit/7c6359984314472bf801ea1ba8b0e8c5d9e2be2c))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **components:** add check and add log to call out the fallback to the default model checkpoint and remove the model checkpoint check condition in RLHF GCPC ([92c3178](https://github.com/kubeflow/pipelines/commit/92c317824a3697d955cb78cc9a85ecac58a3366c))
|
||||
* **components:** Add input param `autorater_prompt_parameters` to `online_evaluation_pairwise` component ([cf7450b](https://github.com/kubeflow/pipelines/commit/cf7450b109ba349b50aef2413517c0ec3961adf8))
|
||||
* **components:** Add staging and temp locations to prophet trainer component ([00440f7](https://github.com/kubeflow/pipelines/commit/00440f7df4abae08823df9ab5b48de703a8a773c))
|
||||
* **components:** Fix to model batch explanation component for Structured Data pipelines ([289f64f](https://github.com/kubeflow/pipelines/commit/289f64fe948c1aede1886789045aaf1e2ac8f699))
|
||||
* **components:** Pass moddel name to eval_runner to process batch prediction's output as per the output schema of model used ([0d3e79a](https://github.com/kubeflow/pipelines/commit/0d3e79adc7bade905c112160781fed0feef3f595))
|
||||
* **components:** Remove unused import function_based from infer pipeline ([e369bd3](https://github.com/kubeflow/pipelines/commit/e369bd3ebd91831465ba346ab271549c549c6745))
|
||||
* **components:** Use instance.target_field_name format for text-bison models only, use target_field_name for gemini models ([1a07ffa](https://github.com/kubeflow/pipelines/commit/1a07ffa9111ac5ccea6ad527e8950b084aa4fc79))
|
||||
* **docs:** add note about protoc dependency and version ([\#10895](https://github.com/kubeflow/pipelines/issues/10895)) ([d911c8b](https://github.com/kubeflow/pipelines/commit/d911c8b73b49cd0a4ffda132f494f31d0d48095a))
|
||||
* **docs:** IR -> Pipeline Spec ([\#11031](https://github.com/kubeflow/pipelines/issues/11031)) ([1e95eb6](https://github.com/kubeflow/pipelines/commit/1e95eb628545baaf691270c4ddb45034ecef8fa4))
|
||||
* **frontend:** fixes filter pipeline text box shows error when typing anything in it. Fixes [\#10241](https://github.com/kubeflow/pipelines/issues/10241) ([\#11096](https://github.com/kubeflow/pipelines/issues/11096)) ([51d2c92](https://github.com/kubeflow/pipelines/commit/51d2c92b551751b758f544ca32a7ec0f67d09558))
|
||||
* **frontend:** reduce list run latency ([\#10797](https://github.com/kubeflow/pipelines/issues/10797)) ([768ece4](https://github.com/kubeflow/pipelines/commit/768ece44e69e328400ca437c9ebe2c959b26d8a8))
|
||||
* **frontend:** retrieve archived logs from correct location ([\#11010](https://github.com/kubeflow/pipelines/issues/11010)) ([2e6e634](https://github.com/kubeflow/pipelines/commit/2e6e634de43c42fee88dfe2abfdb0e4155dc4f95))
|
||||
* **sdk:** Add required auth scopes to RegistryClient for GCP service accounts credentials ([\#10819](https://github.com/kubeflow/pipelines/issues/10819)) ([04b4cad](https://github.com/kubeflow/pipelines/commit/04b4cad8cdc88810628f5e683cada57a7f42be47))
|
||||
* **sdk:** Kfp support for pip trusted host ([\#11151](https://github.com/kubeflow/pipelines/issues/11151)) ([3efa029](https://github.com/kubeflow/pipelines/commit/3efa02984c44190ee6ea98af1a6905d67a986af0))
|
||||
* **ui:** fixes empty string value in pipeline parameters ([\#11175](https://github.com/kubeflow/pipelines/issues/11175)) ([e9c77ec](https://github.com/kubeflow/pipelines/commit/e9c77ec6d5c900be6ca77f513610a603c41ed2ce))
|
||||
* Basic sample tests - sequential is flaky ([\#11138](https://github.com/kubeflow/pipelines/issues/11138)) ([e1d172b](https://github.com/kubeflow/pipelines/commit/e1d172bb5740f743be45e43f602085baed1c5495))
|
||||
* Incorrect typing in samples/cores/loop_parallism and fixing loop_parameter examples ([\#11062](https://github.com/kubeflow/pipelines/issues/11062)) ([1612dac](https://github.com/kubeflow/pipelines/commit/1612dac4adacbe1f2a041eb4eacb94663c2ba286))
|
||||
* re-enable exit hanler test. ([\#11100](https://github.com/kubeflow/pipelines/issues/11100)) ([000ef60](https://github.com/kubeflow/pipelines/commit/000ef60080b6ece6274910057898f9a0f33a9a7a))
|
||||
* **kubernetes_platform:** fix api-generator docker mount for SELinux ([\#10890](https://github.com/kubeflow/pipelines/issues/10890)) ([e69078b](https://github.com/kubeflow/pipelines/commit/e69078b2b65c0e34fd56499bbe34da882dc6e009))
|
||||
* **manifests:** Move metacontroller to the top in kustmization.yaml ([\#10669](https://github.com/kubeflow/pipelines/issues/10669)) ([4e9fe75](https://github.com/kubeflow/pipelines/commit/4e9fe75d4564bbcdde7cd358298361e94d4a20be))
|
||||
* **sdk:** Throw 'exit_task cannot depend on any other tasks.' error when an ExitHandler has a parameter dependent on other task ([\#11005](https://github.com/kubeflow/pipelines/issues/11005)) ([08185e7](https://github.com/kubeflow/pipelines/commit/08185e71717ef628be3cbe2cdeb1fd55b25581d4))
|
||||
|
||||
|
||||
### Other Pull Requests
|
||||
|
||||
* **fix:** reduce executor logs ([\#11169](https://github.com/kubeflow/pipelines/issues/11169)) ([d64554b](https://github.com/kubeflow/pipelines/commit/d64554b1a8880c69186776f92a3af75fafd0135d))
|
||||
* correct artifact preview behavior in UI ([\#11059](https://github.com/kubeflow/pipelines/issues/11059)) ([2c91fb7](https://github.com/kubeflow/pipelines/commit/2c91fb797ed5e95bb51ae80c4daa2c6b9334b51b))
|
||||
* docs:fixing broken links in readme ([\#11108](https://github.com/kubeflow/pipelines/issues/11108)) ([a3adf94](https://github.com/kubeflow/pipelines/commit/a3adf9471c49f8a529f4389853210212880f9d16))
|
||||
* Fix view edit cluster roles ([\#11067](https://github.com/kubeflow/pipelines/issues/11067)) ([43cdc20](https://github.com/kubeflow/pipelines/commit/43cdc2081dcb1f0cac87c58188e88a23370fb67e))
|
||||
* add dev docs for kfp/sdk ([\#11046](https://github.com/kubeflow/pipelines/issues/11046)) ([eea7d48](https://github.com/kubeflow/pipelines/commit/eea7d483793c2b4f8c3aa20b1500535f7fbf50d4))
|
||||
* No public description ([e3a4980](https://github.com/kubeflow/pipelines/commit/e3a498020a28788b3df71db310dd266a537ef243))
|
||||
* Upgrade go version to 1.21 ([\#10911](https://github.com/kubeflow/pipelines/issues/10911)) ([bdc3bb1](https://github.com/kubeflow/pipelines/commit/bdc3bb1f0d5850332d4fc0851a2d1730ead62427))
|
||||
* Expose starry_net yaml to GitHub ([\#10943](https://github.com/kubeflow/pipelines/issues/10943)) ([f1e2314](https://github.com/kubeflow/pipelines/commit/f1e23142b1eabc977d85736dfd4bdbdc019cfcb2))
|
||||
|
||||
## [2.2.0](https://github.com/kubeflow/pipelines/compare/2.1.0...2.2.0) (2024-04-30)
|
||||
|
||||
|
||||
|
@ -6233,9 +6683,9 @@ Detailed PR can be found [here](https://github.com/kubeflow/pipelines/commits)
|
|||
## [0.1.27](https://github.com/kubeflow/pipelines/tree/0.1.27) (2019-08-22)
|
||||
[Full Changelog](https://github.com/kubeflow/pipelines/compare/0.1.26...0.1.27)
|
||||
|
||||
**Merged pull requests:**
|
||||
**Merged pull requests:**
|
||||
|
||||
- update namespaced-install.yaml [\#1926](https://github.com/kubeflow/pipelines/pull/1926) ([IronPan](https://github.com/IronPan))
|
||||
- update namespaced-install.yaml [\#1926](https://github.com/kubeflow/pipelines/pull/1926) ([IronPan](https://github.com/IronPan))
|
||||
- Fix lint related issue [\#1922](https://github.com/kubeflow/pipelines/pull/1922) ([numerology](https://github.com/numerology))
|
||||
- Cleanup pipeline-lite deployment [\#1921](https://github.com/kubeflow/pipelines/pull/1921) ([IronPan](https://github.com/IronPan))
|
||||
- Allow visualization kernel timeout to be specifiable via environment variables [\#1920](https://github.com/kubeflow/pipelines/pull/1920) ([ajchili](https://github.com/ajchili))
|
||||
|
@ -6465,12 +6915,12 @@ Detailed PR can be found [here](https://github.com/kubeflow/pipelines/commits)
|
|||
- SDK - Travis configuration for Python 3.5 and 3.7 [\#1467](https://github.com/kubeflow/pipelines/pull/1467) ([kvalev](https://github.com/kvalev))
|
||||
- Add timeout out in dsl [\#1465](https://github.com/kubeflow/pipelines/pull/1465) ([gaoning777](https://github.com/gaoning777))
|
||||
|
||||
## [0.1.22](https://github.com/kubeflow/pipelines/tree/0.1.22) (2019-06-21)
|
||||
## [0.1.22](https://github.com/kubeflow/pipelines/tree/0.1.22) (2019-06-21)
|
||||
[Full Changelog](https://github.com/kubeflow/pipelines/compare/0.1.21...0.1.22)
|
||||
|
||||
**Merged pull requests:**
|
||||
**Merged pull requests:**
|
||||
|
||||
- increment sdk versions [\#1538](https://github.com/kubeflow/pipelines/pull/1538) ([hongye-sun](https://github.com/hongye-sun))
|
||||
- increment sdk versions [\#1538](https://github.com/kubeflow/pipelines/pull/1538) ([hongye-sun](https://github.com/hongye-sun))
|
||||
- SDK/Client - Added support for all APIs [\#1536](https://github.com/kubeflow/pipelines/pull/1536) ([Ark-kun](https://github.com/Ark-kun))
|
||||
- SDK/Client - Added the upload\_pipeline API [\#1535](https://github.com/kubeflow/pipelines/pull/1535) ([Ark-kun](https://github.com/Ark-kun))
|
||||
- Update Watson pipeline component source to the latest commit [\#1533](https://github.com/kubeflow/pipelines/pull/1533) ([Tomcli](https://github.com/Tomcli))
|
||||
|
@ -6576,12 +7026,12 @@ Detailed PR can be found [here](https://github.com/kubeflow/pipelines/commits)
|
|||
## [0.1.20](https://github.com/kubeflow/pipelines/tree/0.1.20) (2019-05-14)
|
||||
[Full Changelog](https://github.com/kubeflow/pipelines/compare/0.1.19...0.1.20)
|
||||
|
||||
**Closed issues:**
|
||||
**Closed issues:**
|
||||
|
||||
- Cannot create job for experiment via Pipelines Go CLI [\#1321](https://github.com/kubeflow/pipelines/issues/1321)
|
||||
- Support a container flow inside one pod [\#1313](https://github.com/kubeflow/pipelines/issues/1313)
|
||||
- toleration support for ContainerOp [\#1265](https://github.com/kubeflow/pipelines/issues/1265)
|
||||
- Can only create recurring run from within experiment page [\#1217](https://github.com/kubeflow/pipelines/issues/1217)
|
||||
- Cannot create job for experiment via Pipelines Go CLI [\#1321](https://github.com/kubeflow/pipelines/issues/1321)
|
||||
- Support a container flow inside one pod [\#1313](https://github.com/kubeflow/pipelines/issues/1313)
|
||||
- toleration support for ContainerOp [\#1265](https://github.com/kubeflow/pipelines/issues/1265)
|
||||
- Can only create recurring run from within experiment page [\#1217](https://github.com/kubeflow/pipelines/issues/1217)
|
||||
|
||||
**Merged pull requests:**
|
||||
|
||||
|
@ -6610,25 +7060,25 @@ Detailed PR can be found [here](https://github.com/kubeflow/pipelines/commits)
|
|||
- changelog for v0.1.19 [\#1296](https://github.com/kubeflow/pipelines/pull/1296) ([hongye-sun](https://github.com/hongye-sun))
|
||||
- add nuclio components \(to build/deploy, delete, invoke functions\) [\#1295](https://github.com/kubeflow/pipelines/pull/1295) ([yaronha](https://github.com/yaronha))
|
||||
- SDK - Failing faster in python\_op tests [\#1291](https://github.com/kubeflow/pipelines/pull/1291) ([Ark-kun](https://github.com/Ark-kun))
|
||||
- SDK - Renamed ModelBase.from\_struct/to\_struct to from\_dict/to\_dict [\#1290](https://github.com/kubeflow/pipelines/pull/1290) ([Ark-kun](https://github.com/Ark-kun))
|
||||
- SDK - Renamed ModelBase.from\_struct/to\_struct to from\_dict/to\_dict [\#1290](https://github.com/kubeflow/pipelines/pull/1290) ([Ark-kun](https://github.com/Ark-kun))
|
||||
- Backend - Marking auto-added artifacts as optional [\#1289](https://github.com/kubeflow/pipelines/pull/1289) ([Ark-kun](https://github.com/Ark-kun))
|
||||
- Update new Watson OpenScale components and pipeline [\#1287](https://github.com/kubeflow/pipelines/pull/1287) ([Tomcli](https://github.com/Tomcli))
|
||||
- Add AWS EMR and Athena components [\#1286](https://github.com/kubeflow/pipelines/pull/1286) ([Jeffwan](https://github.com/Jeffwan))
|
||||
- Add AWS EMR and Athena components [\#1286](https://github.com/kubeflow/pipelines/pull/1286) ([Jeffwan](https://github.com/Jeffwan))
|
||||
- Make confusion\_matrix and roc generic [\#1285](https://github.com/kubeflow/pipelines/pull/1285) ([Jeffwan](https://github.com/Jeffwan))
|
||||
- Components - Updating component versions in samples during release [\#1283](https://github.com/kubeflow/pipelines/pull/1283) ([Ark-kun](https://github.com/Ark-kun))
|
||||
- Sets the background color for KFP pages [\#1281](https://github.com/kubeflow/pipelines/pull/1281) ([rileyjbauer](https://github.com/rileyjbauer))
|
||||
- keep the api image name consistent between the presubmit test and staging [\#1279](https://github.com/kubeflow/pipelines/pull/1279) ([gaoning777](https://github.com/gaoning777))
|
||||
- Frontend - Add support for artifacts stored in S3 [\#1278](https://github.com/kubeflow/pipelines/pull/1278) ([Jeffwan](https://github.com/Jeffwan))
|
||||
- Components - Updating component versions in samples during release [\#1283](https://github.com/kubeflow/pipelines/pull/1283) ([Ark-kun](https://github.com/Ark-kun))
|
||||
- Sets the background color for KFP pages [\#1281](https://github.com/kubeflow/pipelines/pull/1281) ([rileyjbauer](https://github.com/rileyjbauer))
|
||||
- keep the api image name consistent between the presubmit test and staging [\#1279](https://github.com/kubeflow/pipelines/pull/1279) ([gaoning777](https://github.com/gaoning777))
|
||||
- Frontend - Add support for artifacts stored in S3 [\#1278](https://github.com/kubeflow/pipelines/pull/1278) ([Jeffwan](https://github.com/Jeffwan))
|
||||
- Release - Simplified python package building [\#1277](https://github.com/kubeflow/pipelines/pull/1277) ([Ark-kun](https://github.com/Ark-kun))
|
||||
- Add SageMaker components and example pipeline [\#1276](https://github.com/kubeflow/pipelines/pull/1276) ([Jeffwan](https://github.com/Jeffwan))
|
||||
- Add SageMaker components and example pipeline [\#1276](https://github.com/kubeflow/pipelines/pull/1276) ([Jeffwan](https://github.com/Jeffwan))
|
||||
- Tests/Travis - Simplified the Python SDK package installation [\#1275](https://github.com/kubeflow/pipelines/pull/1275) ([Ark-kun](https://github.com/Ark-kun))
|
||||
- Adds a toggle between one-off and recurring runs to NewRun page [\#1274](https://github.com/kubeflow/pipelines/pull/1274) ([rileyjbauer](https://github.com/rileyjbauer))
|
||||
- spark components [\#1272](https://github.com/kubeflow/pipelines/pull/1272) ([animeshsingh](https://github.com/animeshsingh))
|
||||
- Adds a toggle between one-off and recurring runs to NewRun page [\#1274](https://github.com/kubeflow/pipelines/pull/1274) ([rileyjbauer](https://github.com/rileyjbauer))
|
||||
- spark components [\#1272](https://github.com/kubeflow/pipelines/pull/1272) ([animeshsingh](https://github.com/animeshsingh))
|
||||
- support tolerations for ContainerOps [\#1269](https://github.com/kubeflow/pipelines/pull/1269) ([hamedhsn](https://github.com/hamedhsn))
|
||||
- make pending timeout customizable [\#1268](https://github.com/kubeflow/pipelines/pull/1268) ([cheyang](https://github.com/cheyang))
|
||||
- SDK/Client - Supporting pipeline packages with multiple files [\#1207](https://github.com/kubeflow/pipelines/pull/1207) ([Ark-kun](https://github.com/Ark-kun))
|
||||
- Retaining the component url, digest or tag when loading [\#1090](https://github.com/kubeflow/pipelines/pull/1090) ([Ark-kun](https://github.com/Ark-kun))
|
||||
- Allow to specify informers namespace in persistence agent [\#901](https://github.com/kubeflow/pipelines/pull/901) ([ywskycn](https://github.com/ywskycn))
|
||||
- make pending timeout customizable [\#1268](https://github.com/kubeflow/pipelines/pull/1268) ([cheyang](https://github.com/cheyang))
|
||||
- SDK/Client - Supporting pipeline packages with multiple files [\#1207](https://github.com/kubeflow/pipelines/pull/1207) ([Ark-kun](https://github.com/Ark-kun))
|
||||
- Retaining the component url, digest or tag when loading [\#1090](https://github.com/kubeflow/pipelines/pull/1090) ([Ark-kun](https://github.com/Ark-kun))
|
||||
- Allow to specify informers namespace in persistence agent [\#901](https://github.com/kubeflow/pipelines/pull/901) ([ywskycn](https://github.com/ywskycn))
|
||||
|
||||
|
||||
## [0.1.19](https://github.com/kubeflow/pipelines/tree/0.1.19) (2019-05-03)
|
||||
|
|
|
@ -181,6 +181,12 @@ usually have different reviewers.
|
|||
If you are not sure, or the PR doesn't fit into above scopes. You can either
|
||||
omit the scope because it's optional, or propose an additional scope here.
|
||||
|
||||
## Adding Kubernetes Enhancement Proposals (KEPs)
|
||||
|
||||
When a change requires a significant change to the underlying system, it should be preceded with an Kubernetes Enhancement Proposal (KEP).
|
||||
|
||||
KEPs are found in the `proposals` folder at the root of this repo. Read more about the process [here](proposals/README.md).
|
||||
|
||||
## Community Guidelines
|
||||
|
||||
This project follows
|
||||
|
|
|
@ -0,0 +1,13 @@
|
|||
|
||||
# Check diff for generated files
|
||||
.PHONY: check-diff
|
||||
check-diff:
|
||||
/bin/bash -c 'if [[ -n "$$(git status --porcelain)" ]]; then \
|
||||
echo "ERROR: Generated files are out of date"; \
|
||||
echo "Please regenerate using make clean all for api and kubernetes_platform"; \
|
||||
echo "Changes found in the following files:"; \
|
||||
git status; \
|
||||
echo "Diff of changes:"; \
|
||||
git diff; \
|
||||
exit 1; \
|
||||
fi'
|
13
OWNERS
13
OWNERS
|
@ -1,8 +1,15 @@
|
|||
# When adding a new root level approver (i.e a maintainer), ensure that the following locations are also updated
|
||||
# https://github.com/kubeflow/internal-acls/blob/master/github-orgs/kubeflow/org.yaml#L1085
|
||||
# https://github.com/kubeflow/internal-acls/blob/master/github-orgs/kubeflow/org.yaml#L1213
|
||||
# https://github.com/kubeflow/community/blob/master/wgs.yaml#L354
|
||||
|
||||
approvers:
|
||||
- chensun
|
||||
- IronPan
|
||||
- droctothorpe
|
||||
- HumairAK
|
||||
- james-jwu
|
||||
- zijianjoy
|
||||
- mprahl
|
||||
- zazulam
|
||||
reviewers:
|
||||
- chensun
|
||||
- zijianjoy
|
||||
- HumairAK
|
||||
|
|
46
README.md
46
README.md
|
@ -1,7 +1,11 @@
|
|||
# Kubeflow Pipelines
|
||||
|
||||
[](https://coveralls.io/github/kubeflow/pipelines?branch=master)
|
||||
[](https://kubeflow-pipelines.readthedocs.io/en/stable/?badge=latest)
|
||||
[](https://pypi.org/project/kfp)
|
||||
[](https://pypi.org/project/kfp)
|
||||
[](https://www.bestpractices.dev/projects/9938)
|
||||
[](https://deepwiki.com/kubeflow/pipelines)
|
||||
|
||||
## Overview of the Kubeflow pipelines service
|
||||
|
||||
|
@ -17,46 +21,58 @@ The Kubeflow pipelines service has the following goals:
|
|||
|
||||
## Installation
|
||||
|
||||
* Install Kubeflow Pipelines from choices described in [Installation Options for Kubeflow Pipelines](https://www.kubeflow.org/docs/pipelines/installation/overview/).
|
||||
* Kubeflow Pipelines can be installed as part of the [Kubeflow Platform](https://www.kubeflow.org/docs/started/installing-kubeflow/#kubeflow-platform). Alternatively you can deploy [Kubeflow Pipelines](https://www.kubeflow.org/docs/components/pipelines/operator-guides/installation/) as a standalone service.
|
||||
|
||||
* The Docker container runtime has been deprecated on Kubernetes 1.20+. Kubeflow Pipelines has switched to use [Emissary Executor](https://www.kubeflow.org/docs/components/pipelines/installation/choose-executor/#emissary-executor) by default from Kubeflow Pipelines 1.8. Emissary executor is Container runtime agnostic, meaning you are able to run Kubeflow Pipelines on Kubernetes cluster with any [Container runtimes](https://kubernetes.io/docs/setup/production-environment/container-runtimes/).
|
||||
* The Docker container runtime has been deprecated on Kubernetes 1.20+. Kubeflow Pipelines has switched to use [Emissary Executor](https://www.kubeflow.org/docs/components/pipelines/legacy-v1/installation/choose-executor/#emissary-executor) by default from Kubeflow Pipelines 1.8. Emissary executor is Container runtime agnostic, meaning you are able to run Kubeflow Pipelines on Kubernetes cluster with any [Container runtimes](https://kubernetes.io/docs/setup/production-environment/container-runtimes/).
|
||||
|
||||
## Documentation
|
||||
|
||||
Get started with your first pipeline and read further information in the [Kubeflow Pipelines overview](https://www.kubeflow.org/docs/components/pipelines/introduction/).
|
||||
Get started with your first pipeline and read further information in the [Kubeflow Pipelines overview](https://www.kubeflow.org/docs/components/pipelines/overview/).
|
||||
|
||||
See the various ways you can [use the Kubeflow Pipelines SDK](https://www.kubeflow.org/docs/pipelines/sdk/sdk-overview/).
|
||||
See the various ways you can [use the Kubeflow Pipelines SDK](https://kubeflow-pipelines.readthedocs.io/en/stable/).
|
||||
|
||||
See the Kubeflow [Pipelines API doc](https://www.kubeflow.org/docs/pipelines/reference/api/kubeflow-pipeline-api-spec/) for API specification.
|
||||
See the Kubeflow [Pipelines API doc](https://www.kubeflow.org/docs/components/pipelines/reference/api/kubeflow-pipeline-api-spec/) for API specification.
|
||||
|
||||
Consult the [Python SDK reference docs](https://kubeflow-pipelines.readthedocs.io/en/stable/) when writing pipelines using the Python SDK.
|
||||
|
||||
Refer to the [versioning policy](./docs/release/versioning-policy.md) and [feature stages](./docs/release/feature-stages.md) documentation for more information about how we manage versions and feature stages (such as Alpha, Beta, and Stable).
|
||||
## Deep Wiki
|
||||
Check out our AI Powered repo documentation on [DeepWiki](https://deepwiki.com/kubeflow/pipelines).
|
||||
|
||||
> :warning: Please note, this is AI generated and may not have completely accurate information.
|
||||
|
||||
## Contributing to Kubeflow Pipelines
|
||||
|
||||
Before you start contributing to Kubeflow Pipelines, read the guidelines in [How to Contribute](./CONTRIBUTING.md). To learn how to build and deploy Kubeflow Pipelines from source code, read the [developer guide](./developer_guide.md).
|
||||
|
||||
## Kubeflow Pipelines Community
|
||||
|
||||
## Kubeflow Pipelines Community Meeting
|
||||
### Community Meeting
|
||||
|
||||
The meeting is happening every other Wed 10-11AM (PST)
|
||||
[Calendar Invite](https://calendar.google.com/event?action=TEMPLATE&tmeid=NTdoNG5uMDBtcnJlYmdlOWt1c2lkY25jdmlfMjAxOTExMTNUMTgwMDAwWiBqZXNzaWV6aHVAZ29vZ2xlLmNvbQ&tmsrc=jessiezhu%40google.com&scp=ALL) or [Join Meeting Directly](https://meet.google.com/phd-ixfj-kcr/)
|
||||
The Kubeflow Pipelines Community Meeting occurs every other Wed 10-11AM (PST).
|
||||
|
||||
[Calendar Invite](https://calendar.google.com/event?action=TEMPLATE&tmeid=NTdoNG5uMDBtcnJlYmdlOWt1c2lkY25jdmlfMjAxOTExMTNUMTgwMDAwWiBqZXNzaWV6aHVAZ29vZ2xlLmNvbQ&tmsrc=jessiezhu%40google.com&scp=ALL)
|
||||
|
||||
[Direct Meeting Link](https://zoom.us/j/92607298595?pwd%3DVlKLUbiguGkbT9oKbaoDmCxrhbRop7.1&sa=D&source=calendar&ust=1736264977415448&usg=AOvVaw1EIkjFsKy0d4yQPptIJS3x)
|
||||
|
||||
[Meeting notes](http://bit.ly/kfp-meeting-notes)
|
||||
|
||||
## Kubeflow Pipelines Slack Channel
|
||||
### Slack
|
||||
|
||||
[#kubeflow-pipelines](https://kubeflow.slack.com)
|
||||
We also have a slack channel (#kubeflow-pipelines) on the Cloud Native Computing Foundation Slack workspace. You can find more details at [https://www.kubeflow.org/docs/about/community/#kubeflow-slack-channels](https://www.kubeflow.org/docs/about/community/#kubeflow-slack-channels)
|
||||
|
||||
## Architecture
|
||||
|
||||
Details about the KFP Architecture can be found at [Architecture.md](docs/Architecture.md)
|
||||
|
||||
## Blog posts
|
||||
|
||||
* [From Raw Data to Model Serving: A Blueprint for the AI/ML Lifecycle with Kubeflow](https://blog.kubeflow.org/fraud-detection-e2e/) (By [Helber Belmiro](https://github.com/hbelmiro))
|
||||
* [Getting started with Kubeflow Pipelines](https://cloud.google.com/blog/products/ai-machine-learning/getting-started-kubeflow-pipelines) (By Amy Unruh)
|
||||
* How to create and deploy a Kubeflow Machine Learning Pipeline (By Lak Lakshmanan)
|
||||
* [Part 1: How to create and deploy a Kubeflow Machine Learning Pipeline](https://towardsdatascience.com/how-to-create-and-deploy-a-kubeflow-machine-learning-pipeline-part-1-efea7a4b650f)
|
||||
* [Part 2: How to deploy Jupyter notebooks as components of a Kubeflow ML pipeline](https://towardsdatascience.com/how-to-deploy-jupyter-notebooks-as-components-of-a-kubeflow-ml-pipeline-part-2-b1df77f4e5b3)
|
||||
* [Part 1: How to create and deploy a Kubeflow Machine Learning Pipeline](https://medium.com/data-science/how-to-create-and-deploy-a-kubeflow-machine-learning-pipeline-part-1-efea7a4b650f)
|
||||
* [Part 2: How to deploy Jupyter notebooks as components of a Kubeflow ML pipeline](https://medium.com/data-science/how-to-deploy-jupyter-notebooks-as-components-of-a-kubeflow-ml-pipeline-part-2-b1df77f4e5b3)
|
||||
* [Part 3: How to carry out CI/CD in Machine Learning (“MLOps”) using Kubeflow ML pipelines](https://medium.com/google-cloud/how-to-carry-out-ci-cd-in-machine-learning-mlops-using-kubeflow-ml-pipelines-part-3-bdaf68082112)
|
||||
* [Tekton optimizations for Kubeflow Pipelines 2.0](https://developer.ibm.com/blogs/awb-tekton-optimizations-for-kubeflow-pipelines-2-0) (By Tommy Li)
|
||||
|
||||
## Acknowledgments
|
||||
|
||||
Kubeflow pipelines uses [Argo Workflows](https://github.com/argoproj/argo-workflows) by default under the hood to orchestrate Kubernetes resources. The Argo community has been very supportive and we are very grateful. Additionally there is Tekton backend available as well. To access it, please refer to [Kubeflow Pipelines with Tekton repository](https://github.com/kubeflow/kfp-tekton).
|
||||
Kubeflow pipelines uses [Argo Workflows](https://github.com/argoproj/argo-workflows) by default under the hood to orchestrate Kubernetes resources. The Argo community has been very supportive and we are very grateful.
|
||||
|
|
125
RELEASE.md
125
RELEASE.md
|
@ -1,6 +1,5 @@
|
|||
# Kubeflow Pipelines Release Process
|
||||
|
||||
<!-- This TOC is auto generated by "markdown all in one" VS Code plugin -->
|
||||
- [Kubeflow Pipelines Release Process](#kubeflow-pipelines-release-process)
|
||||
- [Schedule](#schedule)
|
||||
- [Release Tags and Branches](#release-tags-and-branches)
|
||||
|
@ -13,11 +12,16 @@
|
|||
- [Cutting a release branch (Optional)](#cutting-a-release-branch-optional)
|
||||
- [Before release](#before-release)
|
||||
- [Releasing from release branch](#releasing-from-release-branch)
|
||||
- [Release KFP Python Packages](#releasing-kfp-python-packages)
|
||||
- [Create GitHub Release](#create-github-release)
|
||||
- [Sync Master Branch with Release](#sync-master-branch-with-latest-release)
|
||||
- [Release Process Development](#release-process-development)
|
||||
|
||||
## Schedule
|
||||
|
||||
Kubeflow Pipelines has weekly patch releases and monthly minor releases.
|
||||
Kubeflow Pipelines has quarterly minor releases. Patch releases occur on a
|
||||
need basis and don't currently operate on a schedule.
|
||||
|
||||
Patch releases only contain bug fixes, while minor releases have new features
|
||||
additionally.
|
||||
|
||||
|
@ -85,9 +89,7 @@ if you only want to use or contribute to this repo.
|
|||
|
||||
- OS: Linux or MacOS
|
||||
- Permissions needed
|
||||
- Can create a branch in github.com/kubeflow/pipelines.
|
||||
- (Before [#4840](https://github.com/kubeflow/pipelines/issues/4840) is resolved) one would need the admin access to kubeflow/pipelines repo.
|
||||
- Can trigger cloudbuild jobs in `google.com/ml-pipeline-test` GCP project.
|
||||
- Admin access to kubeflow/pipelines repo.
|
||||
- Tools that should be in your `$PATH`
|
||||
- docker
|
||||
- python3
|
||||
|
@ -167,11 +169,11 @@ Do the following things before a release:
|
|||
console.log(Array.from(document.querySelectorAll('[id^="issue_"][id*="_link"]')).map(el => /issue_(.*)_link/.exec(el.id)[1]).join(' '))
|
||||
```
|
||||
|
||||
1. Verify cloudbuild and postsubmit tests are passing: visit <https://github.com/kubeflow/pipelines/commits/master> for master branch.
|
||||
1. Verify release branch CI is passing: visit <https://github.com/kubeflow/pipelines/commits/master> for master branch.
|
||||
|
||||

|
||||

|
||||
|
||||
If not, contact the KFP team to determine if the failure(s) would block the release. You can also retry the failed job by opening the detail page of prow job, and click the refresh button next ot the job title.
|
||||
If not, contact the KFP team to determine if the failure(s) would block the release.
|
||||
|
||||
### Releasing from release branch
|
||||
|
||||
|
@ -184,7 +186,7 @@ Note, when releasing from master, all the below mentions of "release branch" mea
|
|||
- `1.0.1`
|
||||
- `1.1.0`
|
||||
- ...
|
||||
Set the version by using `VERSION=<version-value>`. Contact @chensun if you are not sure what next version should be.
|
||||
Set the version by using `VERSION=<version-value>`. Contact @chensun or @HumairAK if you are not sure what next version should be.
|
||||
|
||||
1. Update all version refs in release branch by
|
||||
|
||||
|
@ -195,39 +197,17 @@ Note, when releasing from master, all the below mentions of "release branch" mea
|
|||
It will prompt you whether to push it to release branch. Press `y` and hit `Enter`.
|
||||
|
||||
Note, the script will clone kubeflow/pipelines repo into a temporary location on your computer, make those changes and attempt to push to upstream, so that it won't interfere with your current git repo.
|
||||
|
||||
> [!Note]
|
||||
> If you see error "docker.sock: connect: permission error", you need to [allow managing docker as a non-root user](https://docs.docker.com/engine/install/linux-postinstall/#manage-docker-as-a-non-root-user).
|
||||
|
||||
If you see error "docker.sock: connect: permission error", you need to [allow managing docker as a non-root user](https://docs.docker.com/engine/install/linux-postinstall/#manage-docker-as-a-non-root-user).
|
||||
1. Build the release images by using the [Build images from sources](https://github.com/kubeflow/pipelines/actions/workflows/image-builds.yml).
|
||||
|
||||
1. View related cloudbuild jobs' statuses by clicking the latest commit's status icon
|
||||
in the release branch. Refer to the screenshot below to find the page.
|
||||
The target tag should be `$VERSION`.
|
||||
|
||||
1. Wait and make sure the `build-each-commit` cloudbuild job that builds all images
|
||||
in gcr.io/ml-pipeline-test succeeded. If it fails, please click "View more details
|
||||
on Google Cloud Build" and then "Retry".
|
||||
)
|
||||
|
||||
NOTE: you can find your latest release commit in <https://github.com/kubeflow/pipelines/commits/master> and select your release branch.
|
||||

|
||||
|
||||
1. Select the `release-on-tag` cloudbuild job that copies built images and artifacts to
|
||||
public image registry and gcs bucket. This job should have already failed because
|
||||
artifacts haven't been built. Now, please click "View more details on Google Cloud Build"
|
||||
and then "Retry", because after waiting for previous step, artifacts are now ready.
|
||||
|
||||
NOTE: **DO NOT** click the "Re-run" button from GitHub Actions status page.
|
||||
It will create a build with "Branch: $BRANCH" instead of "TAG: $VERSION".
|
||||
Open "View more details on Google Cloud Build", and rerun from there.
|
||||
|
||||

|
||||
|
||||

|
||||
|
||||
TODO: we should have an automation KFP cluster, and the waiting and submiting
|
||||
`release-on-tag` cloudbuild task should happen automatically.
|
||||
|
||||
NOTE: postsubmit tests will most likely fail for the release commit, this is expected, postsubmit
|
||||
tests start right after the commit is in GitHub repo, but some artifacts they depend on are still
|
||||
being built by the processes in these two steps.
|
||||
1. Search "PyPI" in Google internal release doc for getting password of kubeflow-pipelines user.
|
||||
### Releasing KFP Python Packages
|
||||
|
||||
1. Release `kfp-server-api` python packages to PyPI.
|
||||
|
||||
|
@ -241,15 +221,42 @@ and then "Retry", because after waiting for previous step, artifacts are now rea
|
|||
```
|
||||
|
||||
1. Release `kfp` python packages to PyPI. (Note: Please skip this step for backend release, this step will be handled by SDK release.)
|
||||
|
||||
|
||||
Update the SDK version in `version.py` and `readthedocs` `versions.json`, example PR [here](https://github.com/kubeflow/pipelines/pull/11715/files).
|
||||
|
||||
```bash
|
||||
pip3 install twine --user
|
||||
gsutil cp gs://ml-pipeline/release/$VERSION/kfp.tar.gz kfp-$VERSION.tar.gz
|
||||
cd sdk/python
|
||||
./build.sh kfp-$VERSION.tar.gz
|
||||
python3 -m twine upload kfp-$VERSION.tar.gz
|
||||
```
|
||||
|
||||
!!! The file name must contain the version. See <https://github.com/kubeflow/pipelines/issues/1292>
|
||||
|
||||
1. Release `kfp-kubernetes` python packages to PyPI. (Note: Please skip this step for backend release, this step will be handled by SDK release.)
|
||||
|
||||
Update the KFP Kubernetes SDK version in `__init__.py` and `readthedocs` `versions.json`, example PR [here](https://github.com/kubeflow/pipelines/pull/11380).
|
||||
|
||||
```bash
|
||||
export KFP_KUBERNETES_VERSION=
|
||||
pip3 install twine --user
|
||||
cd kubernetes_platform/python
|
||||
./create_release_branch.sh
|
||||
```
|
||||
|
||||
Follow the output push instructions to **commit and push the branch to KFP**, then do the following:
|
||||
|
||||
```bash
|
||||
# set this to the appropriate version that matches what was set in __init__.py earlier
|
||||
export KFP_KUBERNETES_VERSION=
|
||||
cd kubernetes_platform/python
|
||||
./release.sh
|
||||
```
|
||||
|
||||
Note that this script will build the package, test install, and push to PyPi.
|
||||
|
||||
### Create GitHub Release
|
||||
|
||||
1. Create a GitHub release using `$VERSION` git tag and title `Version $VERSION`,
|
||||
fill in the description. Detailed steps:
|
||||
|
||||
|
@ -260,9 +267,9 @@ fill in the description. Detailed steps:
|
|||
1. Use this template for public releases and replace the `$VERSION` with real values.
|
||||
|
||||
<pre>
|
||||
To deploy Kubeflow Pipelines in an existing cluster, follow the instruction in [here](https://www.kubeflow.org/docs/pipelines/standalone-deployment-gcp/) or via UI [here](https://console.cloud.google.com/ai-platform/pipelines)
|
||||
To deploy Kubeflow Pipelines in an existing cluster, follow the instruction in [here](https://www.kubeflow.org/docs/components/pipelines/operator-guides/installation/)
|
||||
|
||||
Install python SDK (python 3.7 above) by running:
|
||||
Install python SDK (python 3.9 above) by running:
|
||||
|
||||
```bash
|
||||
python3 -m pip install kfp kfp-server-api --upgrade
|
||||
|
@ -275,9 +282,9 @@ fill in the description. Detailed steps:
|
|||
***This is a prerelease*** checkbox in the GitHub release UI.
|
||||
|
||||
<pre>
|
||||
To deploy Kubeflow Pipelines in an existing cluster, follow the instruction in [here](https://www.kubeflow.org/docs/pipelines/standalone-deployment-gcp/).
|
||||
To deploy Kubeflow Pipelines in an existing cluster, follow the instruction in [here](https://www.kubeflow.org/docs/components/pipelines/operator-guides/installation/).
|
||||
|
||||
Install kfp-server-api package (python 3.7 above) by running:
|
||||
Install kfp-server-api package (python 3.9 above) by running:
|
||||
|
||||
```bash
|
||||
python3 -m pip install kfp-server-api==$VERSION --upgrade
|
||||
|
@ -289,6 +296,8 @@ fill in the description. Detailed steps:
|
|||
|
||||
NOTE, kfp python SDK is **NOT** included and released separately.
|
||||
</pre>
|
||||
|
||||
### Sync Master Branch with latest release
|
||||
|
||||
1. **(Do this step only when releasing from a NON-master release branch)**
|
||||
Update master branch to the same version and include latest changelog:
|
||||
|
@ -321,8 +330,32 @@ Update master branch to the same version and include latest changelog:
|
|||
|
||||
and create a PR to update the version, e.g. <https://github.com/kubeflow/website/pull/1942>.
|
||||
|
||||
1. Follow [Upgrade KFP](https://github.com/kubeflow/testing/tree/master/test-infra/kfp) instruction to upgrade KFP manifests in test-infra.
|
||||
|
||||
## Release Process Development
|
||||
|
||||
Please refer to [./test/release](./test/release).
|
||||
|
||||
## Versioning Policy in KFP
|
||||
|
||||
Starting from version **2.14**, all major and minor versions (X.Y) of the Kubeflow Pipelines (KFP) components are aligned. The following components are included in this alignment:
|
||||
|
||||
* **KFP Backend / UI**
|
||||
* **KFP Python SDK**
|
||||
* **KFP Python Kubernetes Platform SDK**
|
||||
* **KFP Python Pipeline Specification**
|
||||
* **KFP Server API**
|
||||
|
||||
### Versioning and Compatibility Policy
|
||||
|
||||
* **API Compatibility:**
|
||||
All KFP components sharing the same major and minor version (X.Y) are guaranteed to be API-compatible.
|
||||
|
||||
* **Backward Compatibility:**
|
||||
The KFP project will make a *best effort* to maintain backward compatibility within a given **major version** for all Python SDK packages.
|
||||
Specifically:
|
||||
|
||||
* Newer versions of the KFP Python SDK within the same major release (e.g., 2.x) should continue to function with older versions of the KFP backend.
|
||||
* However, newly introduced features in a later SDK minor version may require a matching or newer backend version to function correctly. For example:
|
||||
* A feature introduced in `kfp==2.15` is not guaranteed to be supported by a `2.14` backend. In such cases, upgrading the backend to version `2.15` or later is necessary.
|
||||
|
||||
* **Patch Releases:**
|
||||
Patch versions (X.Y.Z) may include bug fixes, maintenance updates, and minor feature enhancements. These changes must not break API compatibility or violate the support guarantees outlined above.
|
||||
|
|
65
SECURITY.md
65
SECURITY.md
|
@ -1,5 +1,64 @@
|
|||
# Private Security Vulnerability Reporting
|
||||
# Security Policy
|
||||
|
||||
When reporting a vulnerability, please include a description of the issue, the steps you took to create the issue, affected versions, and, if known, mitigations for the issue. If the issue is confirmed as a vulnerability, we will open a Security Advisory. This project follows a 90 day disclosure timeline.
|
||||
## Supported Versions
|
||||
|
||||
To report a security issue, follow [these instructions](https://docs.github.com/en/code-security/security-advisories/guidance-on-reporting-and-writing/privately-reporting-a-security-vulnerability).
|
||||
Kubeflow Pipelines versions are expressed as `X.Y.Z`, where X is the major version,
|
||||
Y is the minor version, and Z is the patch version, following the
|
||||
[Semantic Versioning](https://semver.org/) terminology.
|
||||
|
||||
The Kubeflow Pipelines project maintains release branches for the most recent two minor releases.
|
||||
Applicable fixes, including security fixes, may be backported to those two release branches,
|
||||
depending on severity and feasibility.
|
||||
|
||||
Users are encouraged to stay updated with the latest releases to benefit from security patches and
|
||||
improvements.
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
We're extremely grateful for security researchers and users that report vulnerabilities to the
|
||||
Kubeflow Open Source Community. All reports are thoroughly investigated by Kubeflow projects owners.
|
||||
|
||||
You can use the following ways to report security vulnerabilities privately:
|
||||
|
||||
- Using the Kubeflow Pipelines repository [GitHub Security Advisory](https://github.com/kubeflow/pipelines/security/advisories/new).
|
||||
- Using our private Kubeflow Steering Committee mailing list: ksc@kubeflow.org.
|
||||
|
||||
Please provide detailed information to help us understand and address the issue promptly.
|
||||
|
||||
## Disclosure Process
|
||||
|
||||
**Acknowledgment**: We will acknowledge receipt of your report within 10 business days.
|
||||
|
||||
**Assessment**: The Kubeflow projects owners will investigate the reported issue to determine its
|
||||
validity and severity.
|
||||
|
||||
**Resolution**: If the issue is confirmed, we will work on a fix and prepare a release.
|
||||
|
||||
**Notification**: Once a fix is available, we will notify the reporter and coordinate a public
|
||||
disclosure.
|
||||
|
||||
**Public Disclosure**: Details of the vulnerability and the fix will be published in the project's
|
||||
release notes and communicated through appropriate channels.
|
||||
|
||||
## Prevention Mechanisms
|
||||
|
||||
Kubeflow Pipelines employs several measures to prevent security issues:
|
||||
|
||||
**Code Reviews**: All code changes are reviewed by maintainers to ensure code quality and security.
|
||||
|
||||
**Dependency Management**: Regular updates and monitoring of dependencies (e.g. Dependabot) to
|
||||
address known vulnerabilities.
|
||||
|
||||
**Continuous Integration**: Automated testing and security checks are integrated into the CI/CD pipeline.
|
||||
|
||||
**Image Scanning**: Container images are scanned for vulnerabilities.
|
||||
|
||||
## Communication Channels
|
||||
|
||||
For the general questions please join the following resources:
|
||||
|
||||
- Kubeflow [Slack channels](https://www.kubeflow.org/docs/about/community/#kubeflow-slack-channels).
|
||||
|
||||
- Kubeflow discuss [mailing list](https://www.kubeflow.org/docs/about/community/#kubeflow-mailing-list).
|
||||
|
||||
Please **do not report** security vulnerabilities through public channels.
|
||||
|
|
62
api/Makefile
62
api/Makefile
|
@ -12,8 +12,8 @@
|
|||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# Contact one of Bobgy, capri-xiyue or zijianjoy if this remote image needs an update.
|
||||
PREBUILT_REMOTE_IMAGE=gcr.io/ml-pipeline-test/api-generator:latest
|
||||
# Contact one of chensun, HumairAK if this remote image needs an update.
|
||||
PREBUILT_REMOTE_IMAGE=ghcr.io/kubeflow/kfp-api-generator:1.2
|
||||
|
||||
.PHONY: all
|
||||
all: golang python
|
||||
|
@ -36,10 +36,23 @@ clean-go:
|
|||
rm -rf v2alpha1/go
|
||||
rm -f v2alpha1/google/rpc/status.proto
|
||||
|
||||
# Generate Python package.
|
||||
# Build Python package using pre-built image
|
||||
.PHONY: python
|
||||
python: v2alpha1/pipeline_spec.proto v2alpha1/google/rpc/status.proto
|
||||
python3 v2alpha1/python/generate_proto.py && cd v2alpha1/python && python3 setup.py bdist_wheel
|
||||
python: python fetch-protos
|
||||
docker run --interactive --rm \
|
||||
--user $$(id -u):$$(id -g) \
|
||||
-e HOME=/tmp \
|
||||
-v "$$(pwd)/..":"/go/src/github.com/kubeflow/pipelines":z \
|
||||
$(PREBUILT_REMOTE_IMAGE) \
|
||||
sh -c 'cd /go/src/github.com/kubeflow/pipelines/api/v2alpha1/python && \
|
||||
python3 -m pip install --user --break-system-packages -r requirements.txt && \
|
||||
python3 generate_proto.py && \
|
||||
python3 setup.py sdist bdist_wheel --dist-dir ./dist'
|
||||
|
||||
# Build and locally install Python package using editable mode for development.
|
||||
.PHONY: python-dev
|
||||
python-dev: v2alpha1/pipeline_spec.proto fetch-protos
|
||||
python3 v2alpha1/python/generate_proto.py && cd v2alpha1/python && pip install -e .
|
||||
|
||||
# Delete all generated Python packages
|
||||
.PHONY: clean-python
|
||||
|
@ -47,12 +60,13 @@ clean-python:
|
|||
rm -rf v2alpha1/python/build
|
||||
rm -rf v2alpha1/python/dist
|
||||
rm -rf v2alpha1/python/kfp_pipeline_spec.egg-info
|
||||
rm -rf v2alpha1/google
|
||||
rm -f v2alpha1/python/kfp/pipeline_spec/pipeline_spec_pb2.py
|
||||
rm -f v2alpha1/google/rpc/status.proto
|
||||
|
||||
##########################
|
||||
###########################################
|
||||
# The following are IMPLEMENTATION DETAILS.
|
||||
##########################
|
||||
###########################################
|
||||
|
||||
# Generates proto packages locally, this should only be called:
|
||||
# * during development
|
||||
|
@ -60,7 +74,7 @@ clean-python:
|
|||
.PHONY: generate
|
||||
generate: go_pipelinespec go_cachekey
|
||||
|
||||
go_pipelinespec: v2alpha1/pipeline_spec.proto v2alpha1/google/rpc/status.proto
|
||||
go_pipelinespec: v2alpha1/pipeline_spec.proto fetch-protos
|
||||
mkdir -p v2alpha1/go/pipelinespec
|
||||
cd v2alpha1 && protoc -I=. \
|
||||
--go_out=go/pipelinespec \
|
||||
|
@ -74,12 +88,32 @@ go_cachekey: v2alpha1/pipeline_spec.proto v2alpha1/cache_key.proto
|
|||
--go_opt=paths=source_relative \
|
||||
cache_key.proto
|
||||
|
||||
# Fetch dependency proto
|
||||
v2alpha1/google/rpc/status.proto:
|
||||
mkdir -p v2alpha1/google/rpc
|
||||
wget -O v2alpha1/google/rpc/status.proto https://raw.githubusercontent.com/googleapis/googleapis/047d3a8ac7f75383855df0166144f891d7af08d9/google/rpc/status.proto
|
||||
#########################################
|
||||
# The following are dependencies
|
||||
# Required for compiling the proto files
|
||||
#########################################
|
||||
|
||||
GOOGLEAPIS_COMMIT ?= fecd7d35f46753b45bf4519f6342495a181740c9
|
||||
PROTOBUF_TAG ?= v26.0
|
||||
PROTO_DST_DIR := v2alpha1/google
|
||||
TMP_PROTOBUF_DIR := /tmp/protobuf-src
|
||||
|
||||
.PHONY: fetch-protos fetch-googleapis fetch-protobuf protoc-gen-go clean-protobuf-tmp
|
||||
|
||||
fetch-protos: fetch-googleapis fetch-protobuf
|
||||
|
||||
fetch-googleapis:
|
||||
@echo "Downloading google/rpc/status.proto from googleapis@$(GOOGLEAPIS_COMMIT)..."
|
||||
mkdir -p $(PROTO_DST_DIR)/rpc
|
||||
wget -qO $(PROTO_DST_DIR)/rpc/status.proto https://raw.githubusercontent.com/googleapis/googleapis/$(GOOGLEAPIS_COMMIT)/google/rpc/status.proto
|
||||
|
||||
fetch-protobuf: clean-protobuf-tmp
|
||||
@git clone --depth 1 --branch $(PROTOBUF_TAG) https://github.com/protocolbuffers/protobuf.git $(TMP_PROTOBUF_DIR)
|
||||
@mkdir -p $(PROTO_DST_DIR)/protobuf
|
||||
@cp $(TMP_PROTOBUF_DIR)/src/google/protobuf/*.proto $(PROTO_DST_DIR)/protobuf/
|
||||
|
||||
clean-protobuf-tmp:
|
||||
@rm -rf $(TMP_PROTOBUF_DIR)
|
||||
|
||||
# protoc-gen-go is already installed in api-generator image
|
||||
.PHONY: protoc-gen-go
|
||||
protoc-gen-go:
|
||||
go install google.golang.org/protobuf/cmd/protoc-gen-go
|
||||
|
|
|
@ -1,7 +1,9 @@
|
|||
approvers:
|
||||
- chensun
|
||||
- connor-mccarthy
|
||||
- neuromage
|
||||
reviewers:
|
||||
- chensun
|
||||
- connor-mccarthy
|
||||
- droctothorpe
|
||||
- zazulam
|
||||
- mprahl
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
module github.com/kubeflow/pipelines/api
|
||||
|
||||
go 1.16
|
||||
go 1.23
|
||||
|
||||
require (
|
||||
google.golang.org/genproto v0.0.0-20211026145609-4688e4c4e024
|
||||
|
|
|
@ -14,8 +14,8 @@
|
|||
|
||||
// Code generated by protoc-gen-go. DO NOT EDIT.
|
||||
// versions:
|
||||
// protoc-gen-go v1.26.0
|
||||
// protoc v3.17.3
|
||||
// protoc-gen-go v1.36.6
|
||||
// protoc v6.31.1
|
||||
// source: cache_key.proto
|
||||
|
||||
package cachekey
|
||||
|
@ -27,6 +27,7 @@ import (
|
|||
structpb "google.golang.org/protobuf/types/known/structpb"
|
||||
reflect "reflect"
|
||||
sync "sync"
|
||||
unsafe "unsafe"
|
||||
)
|
||||
|
||||
const (
|
||||
|
@ -37,26 +38,23 @@ const (
|
|||
)
|
||||
|
||||
type CacheKey struct {
|
||||
state protoimpl.MessageState
|
||||
sizeCache protoimpl.SizeCache
|
||||
unknownFields protoimpl.UnknownFields
|
||||
|
||||
InputArtifactNames map[string]*ArtifactNameList `protobuf:"bytes,1,rep,name=inputArtifactNames,proto3" json:"inputArtifactNames,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
|
||||
// Deprecated: Do not use.
|
||||
InputParameters map[string]*pipelinespec.Value `protobuf:"bytes,2,rep,name=inputParameters,proto3" json:"inputParameters,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
|
||||
OutputArtifactsSpec map[string]*pipelinespec.RuntimeArtifact `protobuf:"bytes,3,rep,name=outputArtifactsSpec,proto3" json:"outputArtifactsSpec,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
|
||||
OutputParametersSpec map[string]string `protobuf:"bytes,4,rep,name=outputParametersSpec,proto3" json:"outputParametersSpec,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
|
||||
state protoimpl.MessageState `protogen:"open.v1"`
|
||||
InputArtifactNames map[string]*ArtifactNameList `protobuf:"bytes,1,rep,name=inputArtifactNames,proto3" json:"inputArtifactNames,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"`
|
||||
// Deprecated: Marked as deprecated in cache_key.proto.
|
||||
InputParameters map[string]*pipelinespec.Value `protobuf:"bytes,2,rep,name=inputParameters,proto3" json:"inputParameters,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"`
|
||||
OutputArtifactsSpec map[string]*pipelinespec.RuntimeArtifact `protobuf:"bytes,3,rep,name=outputArtifactsSpec,proto3" json:"outputArtifactsSpec,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"`
|
||||
OutputParametersSpec map[string]string `protobuf:"bytes,4,rep,name=outputParametersSpec,proto3" json:"outputParametersSpec,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"`
|
||||
ContainerSpec *ContainerSpec `protobuf:"bytes,5,opt,name=containerSpec,proto3" json:"containerSpec,omitempty"`
|
||||
InputParameterValues map[string]*structpb.Value `protobuf:"bytes,6,rep,name=input_parameter_values,json=inputParameterValues,proto3" json:"input_parameter_values,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
|
||||
InputParameterValues map[string]*structpb.Value `protobuf:"bytes,6,rep,name=input_parameter_values,json=inputParameterValues,proto3" json:"input_parameter_values,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"`
|
||||
unknownFields protoimpl.UnknownFields
|
||||
sizeCache protoimpl.SizeCache
|
||||
}
|
||||
|
||||
func (x *CacheKey) Reset() {
|
||||
*x = CacheKey{}
|
||||
if protoimpl.UnsafeEnabled {
|
||||
mi := &file_cache_key_proto_msgTypes[0]
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
mi := &file_cache_key_proto_msgTypes[0]
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
|
||||
func (x *CacheKey) String() string {
|
||||
|
@ -67,7 +65,7 @@ func (*CacheKey) ProtoMessage() {}
|
|||
|
||||
func (x *CacheKey) ProtoReflect() protoreflect.Message {
|
||||
mi := &file_cache_key_proto_msgTypes[0]
|
||||
if protoimpl.UnsafeEnabled && x != nil {
|
||||
if x != nil {
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
if ms.LoadMessageInfo() == nil {
|
||||
ms.StoreMessageInfo(mi)
|
||||
|
@ -89,7 +87,7 @@ func (x *CacheKey) GetInputArtifactNames() map[string]*ArtifactNameList {
|
|||
return nil
|
||||
}
|
||||
|
||||
// Deprecated: Do not use.
|
||||
// Deprecated: Marked as deprecated in cache_key.proto.
|
||||
func (x *CacheKey) GetInputParameters() map[string]*pipelinespec.Value {
|
||||
if x != nil {
|
||||
return x.InputParameters
|
||||
|
@ -126,21 +124,18 @@ func (x *CacheKey) GetInputParameterValues() map[string]*structpb.Value {
|
|||
}
|
||||
|
||||
type ContainerSpec struct {
|
||||
state protoimpl.MessageState
|
||||
sizeCache protoimpl.SizeCache
|
||||
state protoimpl.MessageState `protogen:"open.v1"`
|
||||
Image string `protobuf:"bytes,1,opt,name=image,proto3" json:"image,omitempty"`
|
||||
CmdArgs []string `protobuf:"bytes,2,rep,name=cmdArgs,proto3" json:"cmdArgs,omitempty"`
|
||||
unknownFields protoimpl.UnknownFields
|
||||
|
||||
Image string `protobuf:"bytes,1,opt,name=image,proto3" json:"image,omitempty"`
|
||||
CmdArgs []string `protobuf:"bytes,2,rep,name=cmdArgs,proto3" json:"cmdArgs,omitempty"`
|
||||
sizeCache protoimpl.SizeCache
|
||||
}
|
||||
|
||||
func (x *ContainerSpec) Reset() {
|
||||
*x = ContainerSpec{}
|
||||
if protoimpl.UnsafeEnabled {
|
||||
mi := &file_cache_key_proto_msgTypes[1]
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
mi := &file_cache_key_proto_msgTypes[1]
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
|
||||
func (x *ContainerSpec) String() string {
|
||||
|
@ -151,7 +146,7 @@ func (*ContainerSpec) ProtoMessage() {}
|
|||
|
||||
func (x *ContainerSpec) ProtoReflect() protoreflect.Message {
|
||||
mi := &file_cache_key_proto_msgTypes[1]
|
||||
if protoimpl.UnsafeEnabled && x != nil {
|
||||
if x != nil {
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
if ms.LoadMessageInfo() == nil {
|
||||
ms.StoreMessageInfo(mi)
|
||||
|
@ -181,20 +176,17 @@ func (x *ContainerSpec) GetCmdArgs() []string {
|
|||
}
|
||||
|
||||
type ArtifactNameList struct {
|
||||
state protoimpl.MessageState
|
||||
sizeCache protoimpl.SizeCache
|
||||
state protoimpl.MessageState `protogen:"open.v1"`
|
||||
ArtifactNames []string `protobuf:"bytes,1,rep,name=artifactNames,proto3" json:"artifactNames,omitempty"`
|
||||
unknownFields protoimpl.UnknownFields
|
||||
|
||||
ArtifactNames []string `protobuf:"bytes,1,rep,name=artifactNames,proto3" json:"artifactNames,omitempty"`
|
||||
sizeCache protoimpl.SizeCache
|
||||
}
|
||||
|
||||
func (x *ArtifactNameList) Reset() {
|
||||
*x = ArtifactNameList{}
|
||||
if protoimpl.UnsafeEnabled {
|
||||
mi := &file_cache_key_proto_msgTypes[2]
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
mi := &file_cache_key_proto_msgTypes[2]
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
|
||||
func (x *ArtifactNameList) String() string {
|
||||
|
@ -205,7 +197,7 @@ func (*ArtifactNameList) ProtoMessage() {}
|
|||
|
||||
func (x *ArtifactNameList) ProtoReflect() protoreflect.Message {
|
||||
mi := &file_cache_key_proto_msgTypes[2]
|
||||
if protoimpl.UnsafeEnabled && x != nil {
|
||||
if x != nil {
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
if ms.LoadMessageInfo() == nil {
|
||||
ms.StoreMessageInfo(mi)
|
||||
|
@ -229,105 +221,51 @@ func (x *ArtifactNameList) GetArtifactNames() []string {
|
|||
|
||||
var File_cache_key_proto protoreflect.FileDescriptor
|
||||
|
||||
var file_cache_key_proto_rawDesc = []byte{
|
||||
0x0a, 0x0f, 0x63, 0x61, 0x63, 0x68, 0x65, 0x5f, 0x6b, 0x65, 0x79, 0x2e, 0x70, 0x72, 0x6f, 0x74,
|
||||
0x6f, 0x12, 0x0c, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x1a,
|
||||
0x1c, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66,
|
||||
0x2f, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x13, 0x70,
|
||||
0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x73, 0x70, 0x65, 0x63, 0x2e, 0x70, 0x72, 0x6f,
|
||||
0x74, 0x6f, 0x22, 0x8a, 0x08, 0x0a, 0x08, 0x43, 0x61, 0x63, 0x68, 0x65, 0x4b, 0x65, 0x79, 0x12,
|
||||
0x5e, 0x0a, 0x12, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74,
|
||||
0x4e, 0x61, 0x6d, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2e, 0x2e, 0x6d, 0x6c,
|
||||
0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x43, 0x61, 0x63, 0x68, 0x65,
|
||||
0x4b, 0x65, 0x79, 0x2e, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63,
|
||||
0x74, 0x4e, 0x61, 0x6d, 0x65, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x12, 0x69, 0x6e, 0x70,
|
||||
0x75, 0x74, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x4e, 0x61, 0x6d, 0x65, 0x73, 0x12,
|
||||
0x59, 0x0a, 0x0f, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65,
|
||||
0x72, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2b, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69,
|
||||
0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x43, 0x61, 0x63, 0x68, 0x65, 0x4b, 0x65, 0x79,
|
||||
0x2e, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73,
|
||||
0x45, 0x6e, 0x74, 0x72, 0x79, 0x42, 0x02, 0x18, 0x01, 0x52, 0x0f, 0x69, 0x6e, 0x70, 0x75, 0x74,
|
||||
0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x12, 0x61, 0x0a, 0x13, 0x6f, 0x75,
|
||||
0x74, 0x70, 0x75, 0x74, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x73, 0x53, 0x70, 0x65,
|
||||
0x63, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70,
|
||||
0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x43, 0x61, 0x63, 0x68, 0x65, 0x4b, 0x65, 0x79, 0x2e,
|
||||
0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x73, 0x53,
|
||||
0x70, 0x65, 0x63, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x13, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74,
|
||||
0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x73, 0x53, 0x70, 0x65, 0x63, 0x12, 0x64, 0x0a,
|
||||
0x14, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72,
|
||||
0x73, 0x53, 0x70, 0x65, 0x63, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x30, 0x2e, 0x6d, 0x6c,
|
||||
0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x43, 0x61, 0x63, 0x68, 0x65,
|
||||
0x4b, 0x65, 0x79, 0x2e, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65,
|
||||
0x74, 0x65, 0x72, 0x73, 0x53, 0x70, 0x65, 0x63, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x14, 0x6f,
|
||||
0x75, 0x74, 0x70, 0x75, 0x74, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x53,
|
||||
0x70, 0x65, 0x63, 0x12, 0x41, 0x0a, 0x0d, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72,
|
||||
0x53, 0x70, 0x65, 0x63, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x6d, 0x6c, 0x5f,
|
||||
0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x43, 0x6f, 0x6e, 0x74, 0x61, 0x69,
|
||||
0x6e, 0x65, 0x72, 0x53, 0x70, 0x65, 0x63, 0x52, 0x0d, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e,
|
||||
0x65, 0x72, 0x53, 0x70, 0x65, 0x63, 0x12, 0x66, 0x0a, 0x16, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x5f,
|
||||
0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73,
|
||||
0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x30, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65,
|
||||
0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x43, 0x61, 0x63, 0x68, 0x65, 0x4b, 0x65, 0x79, 0x2e, 0x49,
|
||||
0x6e, 0x70, 0x75, 0x74, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x56, 0x61, 0x6c,
|
||||
0x75, 0x65, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x14, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x50,
|
||||
0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x1a, 0x65,
|
||||
0x0a, 0x17, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x4e,
|
||||
0x61, 0x6d, 0x65, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79,
|
||||
0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x34, 0x0a, 0x05, 0x76,
|
||||
0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x6d, 0x6c, 0x5f,
|
||||
0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61,
|
||||
0x63, 0x74, 0x4e, 0x61, 0x6d, 0x65, 0x4c, 0x69, 0x73, 0x74, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75,
|
||||
0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, 0x57, 0x0a, 0x14, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x50, 0x61,
|
||||
0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a,
|
||||
0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12,
|
||||
0x29, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13,
|
||||
0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x56, 0x61,
|
||||
0x6c, 0x75, 0x65, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, 0x65,
|
||||
0x0a, 0x18, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74,
|
||||
0x73, 0x53, 0x70, 0x65, 0x63, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65,
|
||||
0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x33, 0x0a, 0x05,
|
||||
0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x6d, 0x6c,
|
||||
0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x52, 0x75, 0x6e, 0x74, 0x69,
|
||||
0x6d, 0x65, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75,
|
||||
0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, 0x47, 0x0a, 0x19, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x50,
|
||||
0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x53, 0x70, 0x65, 0x63, 0x45, 0x6e, 0x74,
|
||||
0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52,
|
||||
0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20,
|
||||
0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, 0x5f,
|
||||
0x0a, 0x19, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72,
|
||||
0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b,
|
||||
0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x2c, 0x0a,
|
||||
0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x67,
|
||||
0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x56,
|
||||
0x61, 0x6c, 0x75, 0x65, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22,
|
||||
0x3f, 0x0a, 0x0d, 0x43, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x53, 0x70, 0x65, 0x63,
|
||||
0x12, 0x14, 0x0a, 0x05, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52,
|
||||
0x05, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x63, 0x6d, 0x64, 0x41, 0x72, 0x67,
|
||||
0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x09, 0x52, 0x07, 0x63, 0x6d, 0x64, 0x41, 0x72, 0x67, 0x73,
|
||||
0x22, 0x38, 0x0a, 0x10, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x4e, 0x61, 0x6d, 0x65,
|
||||
0x4c, 0x69, 0x73, 0x74, 0x12, 0x24, 0x0a, 0x0d, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74,
|
||||
0x4e, 0x61, 0x6d, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0d, 0x61, 0x72, 0x74,
|
||||
0x69, 0x66, 0x61, 0x63, 0x74, 0x4e, 0x61, 0x6d, 0x65, 0x73, 0x42, 0x38, 0x5a, 0x36, 0x67, 0x69,
|
||||
0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f,
|
||||
0x77, 0x2f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2f, 0x61, 0x70, 0x69, 0x2f,
|
||||
0x76, 0x32, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2f, 0x67, 0x6f, 0x2f, 0x63, 0x61, 0x63, 0x68,
|
||||
0x65, 0x6b, 0x65, 0x79, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
|
||||
}
|
||||
const file_cache_key_proto_rawDesc = "" +
|
||||
"\n" +
|
||||
"\x0fcache_key.proto\x12\fml_pipelines\x1a\x1cgoogle/protobuf/struct.proto\x1a\x13pipeline_spec.proto\"\x8a\b\n" +
|
||||
"\bCacheKey\x12^\n" +
|
||||
"\x12inputArtifactNames\x18\x01 \x03(\v2..ml_pipelines.CacheKey.InputArtifactNamesEntryR\x12inputArtifactNames\x12Y\n" +
|
||||
"\x0finputParameters\x18\x02 \x03(\v2+.ml_pipelines.CacheKey.InputParametersEntryB\x02\x18\x01R\x0finputParameters\x12a\n" +
|
||||
"\x13outputArtifactsSpec\x18\x03 \x03(\v2/.ml_pipelines.CacheKey.OutputArtifactsSpecEntryR\x13outputArtifactsSpec\x12d\n" +
|
||||
"\x14outputParametersSpec\x18\x04 \x03(\v20.ml_pipelines.CacheKey.OutputParametersSpecEntryR\x14outputParametersSpec\x12A\n" +
|
||||
"\rcontainerSpec\x18\x05 \x01(\v2\x1b.ml_pipelines.ContainerSpecR\rcontainerSpec\x12f\n" +
|
||||
"\x16input_parameter_values\x18\x06 \x03(\v20.ml_pipelines.CacheKey.InputParameterValuesEntryR\x14inputParameterValues\x1ae\n" +
|
||||
"\x17InputArtifactNamesEntry\x12\x10\n" +
|
||||
"\x03key\x18\x01 \x01(\tR\x03key\x124\n" +
|
||||
"\x05value\x18\x02 \x01(\v2\x1e.ml_pipelines.ArtifactNameListR\x05value:\x028\x01\x1aW\n" +
|
||||
"\x14InputParametersEntry\x12\x10\n" +
|
||||
"\x03key\x18\x01 \x01(\tR\x03key\x12)\n" +
|
||||
"\x05value\x18\x02 \x01(\v2\x13.ml_pipelines.ValueR\x05value:\x028\x01\x1ae\n" +
|
||||
"\x18OutputArtifactsSpecEntry\x12\x10\n" +
|
||||
"\x03key\x18\x01 \x01(\tR\x03key\x123\n" +
|
||||
"\x05value\x18\x02 \x01(\v2\x1d.ml_pipelines.RuntimeArtifactR\x05value:\x028\x01\x1aG\n" +
|
||||
"\x19OutputParametersSpecEntry\x12\x10\n" +
|
||||
"\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n" +
|
||||
"\x05value\x18\x02 \x01(\tR\x05value:\x028\x01\x1a_\n" +
|
||||
"\x19InputParameterValuesEntry\x12\x10\n" +
|
||||
"\x03key\x18\x01 \x01(\tR\x03key\x12,\n" +
|
||||
"\x05value\x18\x02 \x01(\v2\x16.google.protobuf.ValueR\x05value:\x028\x01\"?\n" +
|
||||
"\rContainerSpec\x12\x14\n" +
|
||||
"\x05image\x18\x01 \x01(\tR\x05image\x12\x18\n" +
|
||||
"\acmdArgs\x18\x02 \x03(\tR\acmdArgs\"8\n" +
|
||||
"\x10ArtifactNameList\x12$\n" +
|
||||
"\rartifactNames\x18\x01 \x03(\tR\rartifactNamesB8Z6github.com/kubeflow/pipelines/api/v2alpha1/go/cachekeyb\x06proto3"
|
||||
|
||||
var (
|
||||
file_cache_key_proto_rawDescOnce sync.Once
|
||||
file_cache_key_proto_rawDescData = file_cache_key_proto_rawDesc
|
||||
file_cache_key_proto_rawDescData []byte
|
||||
)
|
||||
|
||||
func file_cache_key_proto_rawDescGZIP() []byte {
|
||||
file_cache_key_proto_rawDescOnce.Do(func() {
|
||||
file_cache_key_proto_rawDescData = protoimpl.X.CompressGZIP(file_cache_key_proto_rawDescData)
|
||||
file_cache_key_proto_rawDescData = protoimpl.X.CompressGZIP(unsafe.Slice(unsafe.StringData(file_cache_key_proto_rawDesc), len(file_cache_key_proto_rawDesc)))
|
||||
})
|
||||
return file_cache_key_proto_rawDescData
|
||||
}
|
||||
|
||||
var file_cache_key_proto_msgTypes = make([]protoimpl.MessageInfo, 8)
|
||||
var file_cache_key_proto_goTypes = []interface{}{
|
||||
var file_cache_key_proto_goTypes = []any{
|
||||
(*CacheKey)(nil), // 0: ml_pipelines.CacheKey
|
||||
(*ContainerSpec)(nil), // 1: ml_pipelines.ContainerSpec
|
||||
(*ArtifactNameList)(nil), // 2: ml_pipelines.ArtifactNameList
|
||||
|
@ -363,49 +301,11 @@ func file_cache_key_proto_init() {
|
|||
if File_cache_key_proto != nil {
|
||||
return
|
||||
}
|
||||
if !protoimpl.UnsafeEnabled {
|
||||
file_cache_key_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
|
||||
switch v := v.(*CacheKey); i {
|
||||
case 0:
|
||||
return &v.state
|
||||
case 1:
|
||||
return &v.sizeCache
|
||||
case 2:
|
||||
return &v.unknownFields
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
file_cache_key_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} {
|
||||
switch v := v.(*ContainerSpec); i {
|
||||
case 0:
|
||||
return &v.state
|
||||
case 1:
|
||||
return &v.sizeCache
|
||||
case 2:
|
||||
return &v.unknownFields
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
file_cache_key_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} {
|
||||
switch v := v.(*ArtifactNameList); i {
|
||||
case 0:
|
||||
return &v.state
|
||||
case 1:
|
||||
return &v.sizeCache
|
||||
case 2:
|
||||
return &v.unknownFields
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
}
|
||||
type x struct{}
|
||||
out := protoimpl.TypeBuilder{
|
||||
File: protoimpl.DescBuilder{
|
||||
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
|
||||
RawDescriptor: file_cache_key_proto_rawDesc,
|
||||
RawDescriptor: unsafe.Slice(unsafe.StringData(file_cache_key_proto_rawDesc), len(file_cache_key_proto_rawDesc)),
|
||||
NumEnums: 0,
|
||||
NumMessages: 8,
|
||||
NumExtensions: 0,
|
||||
|
@ -416,7 +316,6 @@ func file_cache_key_proto_init() {
|
|||
MessageInfos: file_cache_key_proto_msgTypes,
|
||||
}.Build()
|
||||
File_cache_key_proto = out.File
|
||||
file_cache_key_proto_rawDesc = nil
|
||||
file_cache_key_proto_goTypes = nil
|
||||
file_cache_key_proto_depIdxs = nil
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -93,6 +93,8 @@ message ComponentSpec {
|
|||
DagSpec dag = 3;
|
||||
string executor_label = 4;
|
||||
}
|
||||
// Supports platform-specific component features.
|
||||
repeated SinglePlatformSpec single_platform_specs = 5;
|
||||
}
|
||||
|
||||
// A DAG contains multiple tasks.
|
||||
|
@ -290,7 +292,7 @@ message TaskInputsSpec {
|
|||
|
||||
// Represents an input parameter. The value can be taken from an upstream
|
||||
// task's output parameter (if specifying `producer_task` and
|
||||
// `output_parameter_key`, or it can be a runtime value, which can either be
|
||||
// `output_parameter_key`), or it can be a runtime value, which can either be
|
||||
// determined at compile-time, or from a pipeline parameter.
|
||||
message InputParameterSpec {
|
||||
// Represents an upstream task's output parameter.
|
||||
|
@ -305,9 +307,9 @@ message TaskInputsSpec {
|
|||
|
||||
// Represents an upstream task's final status. The field can only be set if
|
||||
// the schema version is `2.0.0`. The resolved input parameter will be a
|
||||
// json payload in string type.
|
||||
// JSON payload in string type.
|
||||
message TaskFinalStatus {
|
||||
// The name of the upsteram task where the final status is coming from.
|
||||
// The name of the upstream task where the final status is coming from.
|
||||
string producer_task = 1;
|
||||
}
|
||||
|
||||
|
@ -318,7 +320,7 @@ message TaskInputsSpec {
|
|||
ValueOrRuntimeParameter runtime_value = 2;
|
||||
// Pass the input parameter from parent component input parameter.
|
||||
string component_input_parameter = 3;
|
||||
// The final status of an uptream task.
|
||||
// The final status of an upstream task.
|
||||
TaskFinalStatus task_final_status = 5;
|
||||
}
|
||||
|
||||
|
@ -459,6 +461,9 @@ message PipelineTaskSpec {
|
|||
message CachingOptions {
|
||||
// Whether or not to enable cache for this task. Defaults to false.
|
||||
bool enable_cache = 1;
|
||||
// Customized cache key for this task. If set, the cache_key will be used
|
||||
// as the key for the task's cache.
|
||||
string cache_key = 2;
|
||||
}
|
||||
CachingOptions caching_options = 6;
|
||||
|
||||
|
@ -522,7 +527,7 @@ message PipelineTaskSpec {
|
|||
// t2.outputs.parameters = { 'p': 'v2' }
|
||||
// t2.outputs.artifacts = { 'a': [a2] }
|
||||
// parent_task.outputs.parameters = { 'p': '["v1", "v2"]' }
|
||||
// parent_task.outputs.aritfacts = { 'a': [a1, a2] }
|
||||
// parent_task.outputs.artifacts = { 'a': [a1, a2] }
|
||||
oneof iterator {
|
||||
// Iterator to iterate over an artifact input.
|
||||
ArtifactIteratorSpec artifact_iterator = 9;
|
||||
|
@ -586,7 +591,7 @@ message ArtifactIteratorSpec {
|
|||
// The spec of a parameter iterator. It supports fan-out a workflow from a
|
||||
// string parameter which contains a JSON array.
|
||||
message ParameterIteratorSpec {
|
||||
// Specifies the spec to decribe the parameter items to iterate.
|
||||
// Specifies the spec to describe the parameter items to iterate.
|
||||
message ItemsSpec {
|
||||
// Specifies where to get the collection of items to iterate. The iterator
|
||||
// will create a sub-task for each item of the collection and pass the item
|
||||
|
@ -618,7 +623,7 @@ message PipelineInfo {
|
|||
// Required field. The name of the pipeline.
|
||||
// The name will be used to create or find pipeline context in MLMD.
|
||||
string name = 1;
|
||||
|
||||
|
||||
// Optional fields. The readable display name for the pipeline template.
|
||||
// Should not exceed 1024 characters.
|
||||
string display_name = 2;
|
||||
|
@ -722,29 +727,66 @@ message PipelineDeploymentConfig {
|
|||
message ResourceSpec {
|
||||
// The limit of the number of vCPU cores. This container execution needs
|
||||
// at most cpu_limit vCPU to run.
|
||||
double cpu_limit = 1;
|
||||
// Deprecated. Use [ResourceSpec.resource_cpu_limit] instead.
|
||||
double cpu_limit = 1 [deprecated = true];
|
||||
|
||||
// The memory limit in GB. This container execution needs at most
|
||||
// memory_limit RAM to run.
|
||||
double memory_limit = 2;
|
||||
// Deprecated. Use [ResourceSpec.resource_memory_limit] instead.
|
||||
double memory_limit = 2 [deprecated = true];
|
||||
|
||||
// The request of the number of vCPU cores. This container execution
|
||||
// needs at least cpu_request vCPU to run.
|
||||
double cpu_request = 5;
|
||||
// Deprecated. Use [ResourceSpec.resource_cpu_request] instead.
|
||||
double cpu_request = 5 [deprecated = true];
|
||||
|
||||
// The memory request in GB. This container execution needs at least
|
||||
// memory_request RAM to run.
|
||||
double memory_request = 6;
|
||||
// Deprecated. Use [ResourceSpec.resource_memory_request] instead.
|
||||
double memory_request = 6 [deprecated = true];
|
||||
|
||||
// The limit of the number of vCPU cores. This container execution needs
|
||||
// at most resource_cpu_limit vCPU to run. Handles static values and
|
||||
// placeholders.
|
||||
string resource_cpu_limit = 7;
|
||||
|
||||
// The memory limit in GB. This container execution needs
|
||||
// at most resource_memory_limit RAM to run. Handles static values and
|
||||
// placeholders.
|
||||
string resource_memory_limit = 8;
|
||||
|
||||
// The request of the number of vCPU cores. This container
|
||||
// execution needs at least resource_cpu_request vCPU to run. Handles
|
||||
// static values and placeholders.
|
||||
string resource_cpu_request = 9;
|
||||
|
||||
// The memory request in GB. This container execution
|
||||
// needs at least resource_memory_request RAM to run. Handles static
|
||||
// values and placeholders.
|
||||
string resource_memory_request = 10;
|
||||
|
||||
// The specification on the accelerators being attached to this container.
|
||||
message AcceleratorConfig {
|
||||
// The type of accelerators.
|
||||
string type = 1;
|
||||
// Deprecated. Use [ResourceSpec.AcceleratorConfig.resource_type]
|
||||
// instead.
|
||||
string type = 1 [deprecated = true];
|
||||
|
||||
// The number of accelerators.
|
||||
int64 count = 2;
|
||||
// Deprecated. Use [ResourceSpec.AcceleratorConfig.resource_count]
|
||||
// instead.
|
||||
int64 count = 2 [deprecated = true];
|
||||
|
||||
// The type of accelerators. Handles static values and
|
||||
// placeholders.
|
||||
string resource_type = 3;
|
||||
|
||||
// The number of accelerators. Handles static values and
|
||||
// placeholders.
|
||||
string resource_count = 4;
|
||||
}
|
||||
AcceleratorConfig accelerator = 3;
|
||||
|
||||
|
||||
reserved 4;
|
||||
}
|
||||
ResourceSpec resources = 5;
|
||||
|
@ -1045,11 +1087,55 @@ message PlatformSpec {
|
|||
message SinglePlatformSpec {
|
||||
// Mirrors PipelineSpec.deployment_spec structure
|
||||
PlatformDeploymentConfig deployment_spec = 1;
|
||||
}
|
||||
|
||||
// Name of the platform. For example, "google_cloud"
|
||||
string platform = 2;
|
||||
|
||||
// Arbitrary configuration, which will be defined by the platform
|
||||
// protos/libraries.
|
||||
google.protobuf.Struct config = 3;
|
||||
|
||||
PipelineConfig pipelineConfig = 4;
|
||||
}
|
||||
|
||||
message PlatformDeploymentConfig {
|
||||
// Map of executor label to executor-level config
|
||||
// Mirrors PipelineSpec.deployment_spec.executors structure
|
||||
map<string, google.protobuf.Struct> executors = 1;
|
||||
}
|
||||
|
||||
message WorkspaceConfig {
|
||||
// Size of the workspace
|
||||
// Example: "250Gi"
|
||||
// See https://kubernetes.io/docs/reference/kubernetes-api/common-definitions/quantity/ for valid quantity formats
|
||||
string size = 1;
|
||||
|
||||
// Kubernetes specific configuration for the workspace
|
||||
optional KubernetesWorkspaceConfig kubernetes = 2;
|
||||
}
|
||||
|
||||
message KubernetesWorkspaceConfig {
|
||||
// Patch of a PersistentVolumeClaim (PVC) spec to override defaults set on the API server for the workspace PVC
|
||||
// Example: {
|
||||
// "storageClassName": "super-fast-storage",
|
||||
// "accessModes": ["ReadWriteMany"]
|
||||
// }
|
||||
optional google.protobuf.Struct pvc_spec_patch = 1;
|
||||
}
|
||||
|
||||
// Spec for pipeline-level config options. See PipelineConfig DSL class.
|
||||
message PipelineConfig {
|
||||
// Name of the semaphore key to control pipeline concurrency
|
||||
string semaphore_key = 1;
|
||||
|
||||
// Name of the mutex to ensure mutual exclusion
|
||||
string mutex_name = 2;
|
||||
|
||||
// Time to live configuration after the pipeline run is completed for
|
||||
// ephemeral resources created by the pipeline run.
|
||||
int32 resource_ttl = 3;
|
||||
|
||||
// Configuration for a shared storage workspace that persists for the duration of the pipeline run.
|
||||
// The workspace can be configured with size and Kubernetes-specific settings to override default PVC configurations.
|
||||
optional WorkspaceConfig workspace = 4;
|
||||
}
|
||||
|
|
|
@ -0,0 +1,8 @@
|
|||
# Typically we can't support multiple major versions of protobuf
|
||||
# The runtime protobuf package MUST be aligned with the protobuf
|
||||
# libraries used to generate the code (protoc, protoc-gen-go, etc.)
|
||||
# For example protobuf 5.x aligns with protoc 26.x-29.x but
|
||||
# 6.x aligns with 30.x+.
|
||||
# See for support tiers:
|
||||
# https://protobuf.dev/support/version-support/#python
|
||||
protobuf==6.31.1,<7.0
|
|
@ -13,9 +13,21 @@
|
|||
# limitations under the License.
|
||||
|
||||
import setuptools
|
||||
import os
|
||||
from typing import List
|
||||
|
||||
NAME = 'kfp-pipeline-spec'
|
||||
VERSION = '0.3.0'
|
||||
VERSION = '2.14.0'
|
||||
|
||||
def get_requirements(requirements_file: str) -> List[str]:
|
||||
"""Read requirements from requirements.in."""
|
||||
|
||||
file_path = os.path.join(os.path.dirname(__file__), requirements_file)
|
||||
with open(file_path, 'r') as f:
|
||||
lines = f.readlines()
|
||||
lines = [line.strip() for line in lines]
|
||||
lines = [line for line in lines if not line.startswith('#') and line]
|
||||
return lines
|
||||
|
||||
setuptools.setup(
|
||||
name=NAME,
|
||||
|
@ -25,8 +37,8 @@ setuptools.setup(
|
|||
author_email='kubeflow-pipelines@google.com',
|
||||
url='https://github.com/kubeflow/pipelines',
|
||||
packages=setuptools.find_namespace_packages(include=['kfp.*']),
|
||||
python_requires='>=3.7.0,<3.13.0',
|
||||
install_requires=['protobuf>=4.21.1,<5'],
|
||||
python_requires='>=3.9.0',
|
||||
install_requires=get_requirements('requirements.txt'),
|
||||
include_package_data=True,
|
||||
license='Apache 2.0',
|
||||
)
|
||||
|
|
|
@ -13,28 +13,29 @@
|
|||
# limitations under the License.
|
||||
|
||||
# 1. Build api server application
|
||||
FROM golang:1.21.7-bookworm as builder
|
||||
FROM golang:1.24-bookworm AS builder
|
||||
RUN apt-get update && apt-get install -y cmake clang musl-dev openssl
|
||||
WORKDIR /go/src/github.com/kubeflow/pipelines
|
||||
|
||||
COPY ./go.mod ./
|
||||
COPY ./go.sum ./
|
||||
COPY ./kubernetes_platform/go.mod ./kubernetes_platform/go.mod
|
||||
COPY ./api/go.mod ./api/go.mod
|
||||
|
||||
RUN GO111MODULE=on go mod download
|
||||
|
||||
COPY . .
|
||||
RUN GO111MODULE=on go build -o /bin/apiserver backend/src/apiserver/*.go
|
||||
# Check licenses and comply with license terms.
|
||||
RUN ./hack/install-go-licenses.sh
|
||||
# First, make sure there's no forbidden license.
|
||||
RUN go-licenses check ./backend/src/apiserver
|
||||
RUN go-licenses csv ./backend/src/apiserver > /tmp/licenses.csv && \
|
||||
diff /tmp/licenses.csv backend/third_party_licenses/apiserver.csv && \
|
||||
go-licenses save ./backend/src/apiserver --save_path /tmp/NOTICES
|
||||
|
||||
# 2. Compile preloaded pipeline samples
|
||||
FROM python:3.8 as compiler
|
||||
FROM python:3.9 AS compiler
|
||||
RUN apt-get update -y && apt-get install --no-install-recommends -y -q default-jdk python3-setuptools python3-dev jq
|
||||
RUN wget https://bootstrap.pypa.io/get-pip.py && python3 get-pip.py
|
||||
COPY backend/requirements.txt .
|
||||
RUN python3 -m pip install -r requirements.txt --no-cache-dir
|
||||
|
||||
# Downloading Argo CLI so that the samples are validated
|
||||
ENV ARGO_VERSION v3.4.16
|
||||
ENV ARGO_VERSION=v3.6.7
|
||||
RUN curl -sLO https://github.com/argoproj/argo-workflows/releases/download/${ARGO_VERSION}/argo-linux-amd64.gz && \
|
||||
gunzip argo-linux-amd64.gz && \
|
||||
chmod +x argo-linux-amd64 && \
|
||||
|
@ -47,9 +48,9 @@ COPY backend/src/apiserver/config/sample_config.json /samples/
|
|||
# Compiling the preloaded samples.
|
||||
# The default image is replaced with the GCR-hosted python image.
|
||||
RUN set -e; \
|
||||
< /samples/sample_config.json jq .[].file --raw-output | while read pipeline_yaml; do \
|
||||
< /samples/sample_config.json jq ".pipelines[].file" --raw-output | while read pipeline_yaml; do \
|
||||
pipeline_py="${pipeline_yaml%.yaml}"; \
|
||||
python3 "$pipeline_py"; \
|
||||
echo "Compiling: \"$pipeline_py\"" && python3 "$pipeline_py" && echo -n "Output: " && ls "$pipeline_py.yaml"; \
|
||||
done
|
||||
|
||||
# 3. Start api web server
|
||||
|
@ -59,22 +60,19 @@ ARG COMMIT_SHA=unknown
|
|||
ENV COMMIT_SHA=${COMMIT_SHA}
|
||||
ARG TAG_NAME=unknown
|
||||
ENV TAG_NAME=${TAG_NAME}
|
||||
ENV LOG_LEVEL info
|
||||
ENV LOG_LEVEL=info
|
||||
|
||||
WORKDIR /bin
|
||||
|
||||
# Adding CA certificate so API server can download pipeline through URL and wget is used for liveness/readiness probe command
|
||||
RUN apt-get update && apt-get install -y ca-certificates wget
|
||||
|
||||
COPY backend/src/apiserver/config/ /config
|
||||
COPY --from=builder /bin/apiserver /bin/apiserver
|
||||
|
||||
# Copy licenses and notices.
|
||||
COPY --from=builder /tmp/licenses.csv /third_party/licenses.csv
|
||||
COPY --from=builder /tmp/NOTICES /third_party/NOTICES
|
||||
COPY --from=compiler /samples/ /samples/
|
||||
RUN chmod +x /bin/apiserver
|
||||
|
||||
# Adding CA certificate so API server can download pipeline through URL and wget is used for liveness/readiness probe command
|
||||
RUN apt-get update && apt-get install -y ca-certificates wget
|
||||
|
||||
# Pin sample doc links to the commit that built the backend image
|
||||
RUN sed -E "s#/(blob|tree)/master/#/\1/${COMMIT_SHA}/#g" -i /config/sample_config.json && \
|
||||
sed -E "s/%252Fmaster/%252F${COMMIT_SHA}/#g" -i /config/sample_config.json
|
||||
|
|
|
@ -13,25 +13,25 @@
|
|||
# limitations under the License.
|
||||
|
||||
# Dockerfile for building the source code of cache_server
|
||||
FROM golang:1.21.7-alpine3.19 as builder
|
||||
FROM golang:1.24-alpine as builder
|
||||
|
||||
RUN apk update && apk upgrade && \
|
||||
apk add --no-cache bash git openssh gcc musl-dev
|
||||
|
||||
WORKDIR /go/src/github.com/kubeflow/pipelines
|
||||
|
||||
COPY ./go.mod ./
|
||||
COPY ./go.sum ./
|
||||
COPY ./kubernetes_platform/go.mod ./kubernetes_platform/go.mod
|
||||
COPY ./api/go.mod ./api/go.mod
|
||||
|
||||
RUN GO111MODULE=on go mod download
|
||||
|
||||
COPY . .
|
||||
|
||||
RUN GO111MODULE=on go build -o /bin/cache_server backend/src/cache/*.go
|
||||
|
||||
# Check licenses and comply with license terms.
|
||||
RUN ./hack/install-go-licenses.sh
|
||||
# First, make sure there's no forbidden license.
|
||||
RUN go-licenses check ./backend/src/cache
|
||||
RUN go-licenses csv ./backend/src/cache > /tmp/licenses.csv && \
|
||||
diff /tmp/licenses.csv backend/third_party_licenses/cache_server.csv && \
|
||||
go-licenses save ./backend/src/cache --save_path /tmp/NOTICES
|
||||
|
||||
FROM alpine:3.19
|
||||
FROM alpine
|
||||
|
||||
RUN adduser -S appuser
|
||||
USER appuser
|
||||
|
@ -39,8 +39,5 @@ USER appuser
|
|||
WORKDIR /bin
|
||||
|
||||
COPY --from=builder /bin/cache_server /bin/cache_server
|
||||
# Copy licenses and notices.
|
||||
COPY --from=builder /tmp/licenses.csv /third_party/licenses.csv
|
||||
COPY --from=builder /tmp/NOTICES /third_party/NOTICES
|
||||
|
||||
ENTRYPOINT [ "/bin/cache_server" ]
|
||||
|
|
|
@ -13,12 +13,18 @@
|
|||
# limitations under the License.
|
||||
|
||||
# Dockerfile for building the source code of conformance tests
|
||||
FROM golang:1.21.7-alpine3.19 as builder
|
||||
FROM golang:1.24-alpine as builder
|
||||
|
||||
RUN apk update && apk upgrade && \
|
||||
apk add --no-cache bash git openssh gcc musl-dev
|
||||
|
||||
WORKDIR /go/src/github.com/kubeflow/pipelines
|
||||
|
||||
COPY ./go.mod ./
|
||||
COPY ./go.sum ./
|
||||
|
||||
RUN GO111MODULE=on go mod download
|
||||
|
||||
COPY . .
|
||||
|
||||
# Compile the test
|
||||
|
@ -34,7 +40,7 @@ RUN chmod +x /test/integration/run.sh
|
|||
RUN tar -czvf /test.tar.gz /test
|
||||
|
||||
|
||||
FROM alpine:3.8
|
||||
FROM alpine:3.9
|
||||
|
||||
COPY --from=builder /test.tar.gz /
|
||||
RUN tar -xzvf /test.tar.gz
|
||||
|
|
|
@ -12,20 +12,22 @@
|
|||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
FROM golang:1.21.7-alpine3.19 as builder
|
||||
FROM golang:1.24-alpine AS builder
|
||||
|
||||
ARG GCFLAGS=""
|
||||
|
||||
WORKDIR /go/src/github.com/kubeflow/pipelines
|
||||
|
||||
COPY ./go.mod ./
|
||||
COPY ./go.sum ./
|
||||
COPY ./kubernetes_platform/go.mod ./kubernetes_platform/go.mod
|
||||
COPY ./api/go.mod ./api/go.mod
|
||||
|
||||
RUN GO111MODULE=on go mod download
|
||||
|
||||
COPY . .
|
||||
|
||||
RUN GO111MODULE=on CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build -tags netgo -ldflags '-extldflags "-static"' -o /bin/driver ./backend/src/v2/cmd/driver/*.go
|
||||
|
||||
# Check licenses and comply with license terms.
|
||||
RUN ./hack/install-go-licenses.sh
|
||||
# First, make sure there's no forbidden license.
|
||||
RUN go-licenses check ./backend/src/v2/cmd/driver
|
||||
RUN go-licenses csv ./backend/src/v2/cmd/driver > /tmp/licenses.csv && \
|
||||
diff /tmp/licenses.csv backend/third_party_licenses/driver.csv && \
|
||||
go-licenses save ./backend/src/v2/cmd/driver --save_path /tmp/NOTICES
|
||||
RUN GO111MODULE=on CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build -tags netgo -gcflags="${GCFLAGS}" -ldflags '-extldflags "-static"' -o /bin/driver ./backend/src/v2/cmd/driver/*.go
|
||||
|
||||
FROM alpine:3.19
|
||||
|
||||
|
@ -35,8 +37,5 @@ USER appuser
|
|||
WORKDIR /bin
|
||||
|
||||
COPY --from=builder /bin/driver /bin/driver
|
||||
# Copy licenses and notices.
|
||||
COPY --from=builder /tmp/licenses.csv /third_party/licenses.csv
|
||||
COPY --from=builder /tmp/NOTICES /third_party/NOTICES
|
||||
|
||||
ENTRYPOINT [ "/bin/driver" ]
|
||||
ENTRYPOINT [ "/bin/driver" ]
|
||||
|
|
|
@ -12,21 +12,21 @@
|
|||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
FROM golang:1.21.7-alpine3.19 as builder
|
||||
FROM golang:1.24-alpine AS builder
|
||||
|
||||
WORKDIR /go/src/github.com/kubeflow/pipelines
|
||||
|
||||
COPY ./go.mod ./
|
||||
COPY ./go.sum ./
|
||||
COPY ./kubernetes_platform/go.mod ./kubernetes_platform/go.mod
|
||||
COPY ./api/go.mod ./api/go.mod
|
||||
|
||||
RUN GO111MODULE=on go mod download
|
||||
|
||||
COPY . .
|
||||
|
||||
RUN GO111MODULE=on CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build -tags netgo -ldflags '-extldflags "-static"' -o /bin/launcher-v2 ./backend/src/v2/cmd/launcher-v2/*.go
|
||||
|
||||
# Check licenses and comply with license terms.
|
||||
RUN ./hack/install-go-licenses.sh
|
||||
# First, make sure there's no forbidden license.
|
||||
RUN go-licenses check ./backend/src/v2/cmd/launcher-v2
|
||||
RUN go-licenses csv ./backend/src/v2/cmd/launcher-v2 > /tmp/licenses.csv && \
|
||||
diff /tmp/licenses.csv backend/third_party_licenses/launcher.csv && \
|
||||
go-licenses save ./backend/src/v2/cmd/launcher-v2 --save_path /tmp/NOTICES
|
||||
|
||||
FROM alpine:3.19
|
||||
|
||||
RUN adduser -S appuser
|
||||
|
@ -35,8 +35,5 @@ USER appuser
|
|||
WORKDIR /bin
|
||||
|
||||
COPY --from=builder /bin/launcher-v2 /bin/launcher-v2
|
||||
# Copy licenses and notices.
|
||||
COPY --from=builder /tmp/licenses.csv /third_party/licenses.csv
|
||||
COPY --from=builder /tmp/NOTICES /third_party/NOTICES
|
||||
|
||||
ENTRYPOINT [ "/bin/launcher-v2" ]
|
||||
ENTRYPOINT [ "/bin/launcher-v2" ]
|
||||
|
|
|
@ -12,23 +12,24 @@
|
|||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
FROM golang:1.21.7-alpine3.19 as builder
|
||||
|
||||
WORKDIR /go/src/github.com/kubeflow/pipelines
|
||||
COPY . .
|
||||
FROM golang:1.24-alpine AS builder
|
||||
|
||||
# Needed musl-dev for github.com/mattn/go-sqlite3
|
||||
RUN apk update && apk upgrade && \
|
||||
apk add --no-cache bash git openssh gcc musl-dev
|
||||
|
||||
WORKDIR /go/src/github.com/kubeflow/pipelines
|
||||
|
||||
COPY ./go.mod ./
|
||||
COPY ./go.sum ./
|
||||
COPY ./kubernetes_platform/go.mod ./kubernetes_platform/go.mod
|
||||
COPY ./api/go.mod ./api/go.mod
|
||||
|
||||
RUN GO111MODULE=on go mod download
|
||||
|
||||
COPY . .
|
||||
|
||||
RUN GO111MODULE=on go build -o /bin/persistence_agent backend/src/agent/persistence/*.go
|
||||
# Check licenses and comply with license terms.
|
||||
RUN ./hack/install-go-licenses.sh
|
||||
# First, make sure there's no forbidden license.
|
||||
RUN go-licenses check ./backend/src/agent/persistence
|
||||
RUN go-licenses csv ./backend/src/agent/persistence > /tmp/licenses.csv && \
|
||||
diff /tmp/licenses.csv backend/third_party_licenses/persistence_agent.csv && \
|
||||
go-licenses save ./backend/src/agent/persistence --save_path /tmp/NOTICES
|
||||
|
||||
FROM alpine:3.19
|
||||
|
||||
|
@ -38,19 +39,15 @@ USER appuser
|
|||
WORKDIR /bin
|
||||
|
||||
COPY --from=builder /bin/persistence_agent /bin/persistence_agent
|
||||
# Copy licenses and notices.
|
||||
COPY --from=builder /tmp/licenses.csv /third_party/licenses.csv
|
||||
COPY --from=builder /tmp/NOTICES /third_party/NOTICES
|
||||
|
||||
ENV NAMESPACE ""
|
||||
ENV NAMESPACE=""
|
||||
|
||||
# Set Workflow TTL to 1 day. The way to use a different value for a particular Kubeflow Pipelines deployment is demonstrated in manifests/kustomize/base/pipeline/ml-pipeline-persistenceagent-deployment.yaml
|
||||
ENV TTL_SECONDS_AFTER_WORKFLOW_FINISH 86400
|
||||
ENV TTL_SECONDS_AFTER_WORKFLOW_FINISH=86400
|
||||
|
||||
# NUM_WORKERS indicates now many worker goroutines
|
||||
ENV NUM_WORKERS 2
|
||||
ENV LOG_LEVEL info
|
||||
ENV NUM_WORKERS=2
|
||||
ENV LOG_LEVEL=info
|
||||
|
||||
ENV EXECUTIONTYPE Workflow
|
||||
ENV EXECUTIONTYPE=Workflow
|
||||
|
||||
CMD persistence_agent --logtostderr=true --namespace=${NAMESPACE} --ttlSecondsAfterWorkflowFinish=${TTL_SECONDS_AFTER_WORKFLOW_FINISH} --numWorker ${NUM_WORKERS} --executionType ${EXECUTIONTYPE} --logLevel=${LOG_LEVEL}
|
||||
CMD persistence_agent --logtostderr=true --namespace=${NAMESPACE} --ttlSecondsAfterWorkflowFinish=${TTL_SECONDS_AFTER_WORKFLOW_FINISH} --numWorker ${NUM_WORKERS} --executionType ${EXECUTIONTYPE} --logLevel=${LOG_LEVEL}
|
||||
|
|
|
@ -12,23 +12,24 @@
|
|||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
FROM golang:1.21.7-alpine3.19 as builder
|
||||
|
||||
WORKDIR /go/src/github.com/kubeflow/pipelines
|
||||
COPY . .
|
||||
FROM golang:1.24-alpine AS builder
|
||||
|
||||
# Needed musl-dev for github.com/mattn/go-sqlite3
|
||||
RUN apk update && apk upgrade && \
|
||||
apk add --no-cache bash git openssh gcc musl-dev
|
||||
|
||||
WORKDIR /go/src/github.com/kubeflow/pipelines
|
||||
|
||||
COPY ./go.mod ./
|
||||
COPY ./go.sum ./
|
||||
COPY ./kubernetes_platform/go.mod ./kubernetes_platform/go.mod
|
||||
COPY ./api/go.mod ./api/go.mod
|
||||
|
||||
RUN GO111MODULE=on go mod download
|
||||
|
||||
COPY . .
|
||||
|
||||
RUN GO111MODULE=on go build -o /bin/controller backend/src/crd/controller/scheduledworkflow/*.go
|
||||
# Check licenses and comply with license terms.
|
||||
RUN ./hack/install-go-licenses.sh
|
||||
# First, make sure there's no forbidden license.
|
||||
RUN go-licenses check ./backend/src/crd/controller/scheduledworkflow
|
||||
RUN go-licenses csv ./backend/src/crd/controller/scheduledworkflow > /tmp/licenses.csv && \
|
||||
diff /tmp/licenses.csv backend/third_party_licenses/swf.csv && \
|
||||
go-licenses save ./backend/src/crd/controller/scheduledworkflow --save_path /tmp/NOTICES
|
||||
|
||||
FROM alpine:3.19
|
||||
|
||||
|
@ -40,11 +41,7 @@ USER appuser
|
|||
WORKDIR /bin
|
||||
|
||||
COPY --from=builder /bin/controller /bin/controller
|
||||
# Copy licenses and notices.
|
||||
COPY --from=builder /tmp/licenses.csv /third_party/licenses.csv
|
||||
COPY --from=builder /tmp/NOTICES /third_party/NOTICES
|
||||
|
||||
ENV NAMESPACE ""
|
||||
ENV LOG_LEVEL info
|
||||
ENV NAMESPACE=""
|
||||
ENV LOG_LEVEL=info
|
||||
|
||||
CMD /bin/controller --logtostderr=true --namespace=${NAMESPACE} --logLevel=${LOG_LEVEL}
|
||||
|
|
|
@ -12,22 +12,23 @@
|
|||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
FROM golang:1.21.7-alpine3.19 as builder
|
||||
FROM golang:1.24-alpine as builder
|
||||
|
||||
RUN apk update && apk upgrade
|
||||
RUN apk add --no-cache git gcc musl-dev
|
||||
|
||||
WORKDIR /src/github.com/kubeflow/pipelines
|
||||
|
||||
COPY ./go.mod ./
|
||||
COPY ./go.sum ./
|
||||
COPY ./kubernetes_platform/go.mod ./kubernetes_platform/go.mod
|
||||
COPY ./api/go.mod ./api/go.mod
|
||||
|
||||
RUN GO111MODULE=on go mod download
|
||||
|
||||
COPY . .
|
||||
|
||||
RUN GO111MODULE=on go build -o /bin/controller backend/src/crd/controller/viewer/*.go
|
||||
# Check licenses and comply with license terms.
|
||||
RUN ./hack/install-go-licenses.sh
|
||||
# First, make sure there's no forbidden license.
|
||||
RUN go-licenses check ./backend/src/crd/controller/viewer
|
||||
RUN go-licenses csv ./backend/src/crd/controller/viewer > /tmp/licenses.csv && \
|
||||
diff /tmp/licenses.csv backend/third_party_licenses/viewer.csv && \
|
||||
go-licenses save ./backend/src/crd/controller/viewer --save_path /tmp/NOTICES
|
||||
|
||||
FROM alpine
|
||||
WORKDIR /bin
|
||||
|
@ -35,9 +36,6 @@ WORKDIR /bin
|
|||
COPY --from=builder /bin/controller /bin/controller
|
||||
RUN chmod +x /bin/controller
|
||||
|
||||
# Copy licenses and notices.
|
||||
COPY --from=builder /tmp/licenses.csv /third_party/licenses.csv
|
||||
COPY --from=builder /tmp/NOTICES /third_party/NOTICES
|
||||
|
||||
ENV MAX_NUM_VIEWERS "50"
|
||||
ENV NAMESPACE "kubeflow"
|
||||
|
|
|
@ -21,12 +21,12 @@
|
|||
FROM tensorflow/tensorflow:2.10.1
|
||||
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y wget curl tar openssl
|
||||
&& apt-get install -y wget curl tar openssl \
|
||||
&& curl https://dl.google.com/dl/cloudsdk/release/google-cloud-sdk.tar.gz > /tmp/google-cloud-sdk.tar.gz \
|
||||
&& mkdir -p /usr/local/gcloud \
|
||||
&& tar -C /usr/local/gcloud -xf /tmp/google-cloud-sdk.tar.gz \
|
||||
&& /usr/local/gcloud/google-cloud-sdk/install.sh
|
||||
|
||||
RUN curl https://dl.google.com/dl/cloudsdk/release/google-cloud-sdk.tar.gz > /tmp/google-cloud-sdk.tar.gz
|
||||
RUN mkdir -p /usr/local/gcloud
|
||||
RUN tar -C /usr/local/gcloud -xf /tmp/google-cloud-sdk.tar.gz
|
||||
RUN /usr/local/gcloud/google-cloud-sdk/install.sh
|
||||
ENV PATH $PATH:/usr/local/gcloud/google-cloud-sdk/bin
|
||||
|
||||
WORKDIR /src
|
||||
|
|
155
backend/Makefile
155
backend/Makefile
|
@ -1,82 +1,139 @@
|
|||
BUILD=build
|
||||
MOD_ROOT=..
|
||||
CSV_PATH=backend/third_party_licenses
|
||||
KIND_NAME ?= dev-pipelines-api
|
||||
|
||||
CERT_MANAGER_VERSION ?= v1.16.2
|
||||
|
||||
# Container Build Params
|
||||
CONTAINER_ENGINE ?= docker
|
||||
CONTAINER_ENGINE ?= $(shell \
|
||||
if command -v docker >/dev/null 2>&1; then \
|
||||
echo docker; \
|
||||
elif command -v podman >/dev/null 2>&1; then \
|
||||
echo podman; \
|
||||
fi \
|
||||
)
|
||||
|
||||
# IMG_REGISTRY can be used to automatically prepend registry details. e.g. "quay.io/kubeflow/"
|
||||
IMG_REGISTRY ?=
|
||||
IMG_TAG_APISERVER ?= apiserver
|
||||
IMG_TAG_PERSISTENCEAGENT ?= persistence-agent
|
||||
IMG_TAG_CACHESERVER ?= cache-server
|
||||
IMG_TAG_SCHEDULEDWORKFLOW ?= scheduledworkflow
|
||||
IMG_TAG_VIEWERCONTROLLER ?= viewercontroller
|
||||
IMG_TAG_VISUALIZATION ?= visualization
|
||||
IMG_TAG_DRIVER ?= kfp-driver
|
||||
IMG_TAG_LAUNCHER ?= kfp-launcher
|
||||
IMG_TAG_WEBHOOK_PROXY ?= domain.local/kfp/webhook-proxy:latest
|
||||
|
||||
# Whenever build command for any of the binaries change, we should update them both here and in backend/Dockerfiles.
|
||||
|
||||
.PHONY: all
|
||||
all: license_apiserver license_persistence_agent license_cache_server license_swf license_viewer license_driver license_launcher
|
||||
all: image_all
|
||||
|
||||
.PHONY: clean
|
||||
clean:
|
||||
rm -rf $(BUILD)
|
||||
|
||||
$(BUILD)/apiserver:
|
||||
GO111MODULE=on go build -o $(BUILD)/apiserver github.com/kubeflow/pipelines/backend/src/apiserver
|
||||
$(BUILD)/persistence_agent:
|
||||
GO111MODULE=on go build -o $(BUILD)/persistence_agent github.com/kubeflow/pipelines/backend/src/agent/persistence
|
||||
$(BUILD)/cache_server:
|
||||
GO111MODULE=on go build -o $(BUILD)/cache_server github.com/kubeflow/pipelines/backend/src/cache
|
||||
$(BUILD)/swf:
|
||||
GO111MODULE=on go build -o $(BUILD)/swf github.com/kubeflow/pipelines/backend/src/crd/controller/scheduledworkflow
|
||||
$(BUILD)/viewer:
|
||||
GO111MODULE=on go build -o $(BUILD)/viewer github.com/kubeflow/pipelines/backend/src/crd/controller/viewer
|
||||
$(BUILD)/driver:
|
||||
GO111MODULE=on go build -o $(BUILD)/driver github.com/kubeflow/pipelines/backend/src/v2/cmd/driver
|
||||
$(BUILD)/launcher:
|
||||
GO111MODULE=on go build -o $(BUILD)/launcher github.com/kubeflow/pipelines/backend/src/v2/cmd/launcher-v2
|
||||
|
||||
# Update licenses info after dependencies changed.
|
||||
# See README.md#updating-licenses-info section for more details.
|
||||
.PHONY: license_apiserver
|
||||
license_apiserver: $(BUILD)/apiserver
|
||||
cd $(MOD_ROOT) && go-licenses csv ./backend/src/apiserver > $(CSV_PATH)/apiserver.csv
|
||||
.PHONY: license_persistence_agent
|
||||
license_persistence_agent: $(BUILD)/persistence_agent
|
||||
cd $(MOD_ROOT) && go-licenses csv ./backend/src/agent/persistence > $(CSV_PATH)/persistence_agent.csv
|
||||
.PHONY: license_cache_server
|
||||
license_cache_server: $(BUILD)/cache_server
|
||||
cd $(MOD_ROOT) && go-licenses csv ./backend/src/cache > $(CSV_PATH)/cache_server.csv
|
||||
.PHONY: license_swf
|
||||
license_swf: $(BUILD)/swf
|
||||
cd $(MOD_ROOT) && go-licenses csv ./backend/src/crd/controller/scheduledworkflow > $(CSV_PATH)/swf.csv
|
||||
.PHONY: license_viewer
|
||||
license_viewer: $(BUILD)/viewer
|
||||
cd $(MOD_ROOT) && go-licenses csv ./backend/src/crd/controller/viewer > $(CSV_PATH)/viewer.csv
|
||||
.PHONY: license_driver
|
||||
license_driver: $(BUILD)/driver
|
||||
cd $(MOD_ROOT) && go-licenses csv ./backend/src/v2/cmd/driver > $(CSV_PATH)/driver.csv
|
||||
.PHONY: license_launcher
|
||||
license_launcher: $(BUILD)/launcher
|
||||
cd $(MOD_ROOT) && go-licenses csv ./backend/src/v2/cmd/launcher-v2 > $(CSV_PATH)/launcher.csv
|
||||
|
||||
.PHONY: image_all
|
||||
image_all: image_apiserver image_persistence_agent image_cache image_swf image_viewer image_visualization
|
||||
image_all: image_apiserver image_persistence_agent image_cache image_swf image_viewer image_visualization image_driver image_launcher
|
||||
|
||||
.PHONY: image_apiserver
|
||||
image_apiserver:
|
||||
cd $(MOD_ROOT) && ${CONTAINER_ENGINE} build -t ${IMG_TAG_APISERVER} -f backend/Dockerfile .
|
||||
cd $(MOD_ROOT) && ${CONTAINER_ENGINE} build --platform linux/amd64 -t ${IMG_REGISTRY}${IMG_TAG_APISERVER} -f backend/Dockerfile .
|
||||
.PHONY: image_persistence_agent
|
||||
image_persistence_agent:
|
||||
cd $(MOD_ROOT) && ${CONTAINER_ENGINE} build -t ${IMG_TAG_PERSISTENCEAGENT} -f backend/Dockerfile.persistenceagent .
|
||||
cd $(MOD_ROOT) && ${CONTAINER_ENGINE} build --platform linux/amd64 -t ${IMG_REGISTRY}${IMG_TAG_PERSISTENCEAGENT} -f backend/Dockerfile.persistenceagent .
|
||||
.PHONY: image_cache
|
||||
image_cache:
|
||||
cd $(MOD_ROOT) && ${CONTAINER_ENGINE} build -t ${IMG_TAG_CACHESERVER} -f backend/Dockerfile.cacheserver .
|
||||
cd $(MOD_ROOT) && ${CONTAINER_ENGINE} build --platform linux/amd64 -t ${IMG_REGISTRY}${IMG_TAG_CACHESERVER} -f backend/Dockerfile.cacheserver .
|
||||
.PHONY: image_swf
|
||||
image_swf:
|
||||
cd $(MOD_ROOT) && ${CONTAINER_ENGINE} build -t ${IMG_TAG_SCHEDULEDWORKFLOW} -f backend/Dockerfile.scheduledworkflow .
|
||||
cd $(MOD_ROOT) && ${CONTAINER_ENGINE} build --platform linux/amd64 -t ${IMG_REGISTRY}${IMG_TAG_SCHEDULEDWORKFLOW} -f backend/Dockerfile.scheduledworkflow .
|
||||
.PHONY: image_viewer
|
||||
image_viewer:
|
||||
cd $(MOD_ROOT) && ${CONTAINER_ENGINE} build -t ${IMG_TAG_VIEWERCONTROLLER} -f backend/Dockerfile.viewercontroller .
|
||||
cd $(MOD_ROOT) && ${CONTAINER_ENGINE} build --platform linux/amd64 -t ${IMG_REGISTRY}${IMG_TAG_VIEWERCONTROLLER} -f backend/Dockerfile.viewercontroller .
|
||||
.PHONY: image_visualization
|
||||
image_visualization:
|
||||
cd $(MOD_ROOT) && ${CONTAINER_ENGINE} build -t ${IMG_TAG_VISUALIZATION} -f backend/Dockerfile.visualization .
|
||||
cd $(MOD_ROOT) && ${CONTAINER_ENGINE} build --platform linux/amd64 -t ${IMG_REGISTRY}${IMG_TAG_VISUALIZATION} -f backend/Dockerfile.visualization .
|
||||
.PHONY: image_driver
|
||||
image_driver:
|
||||
cd $(MOD_ROOT) && ${CONTAINER_ENGINE} build --platform linux/amd64 -t ${IMG_REGISTRY}${IMG_TAG_DRIVER} -f backend/Dockerfile.driver .
|
||||
.PHONY: image_driver_debug
|
||||
image_driver_debug:
|
||||
cd $(MOD_ROOT) && sed -e '/RUN .*go mod download/a\
|
||||
RUN go install github.com/go-delve/delve/cmd/dlv@latest' \
|
||||
-e '/COPY .*\/bin\/driver \/bin\/driver/a\
|
||||
COPY . \/go\/src\/github.com\/kubeflow\/pipelines\
|
||||
COPY --from=builder /go/bin/dlv /bin/dlv\
|
||||
EXPOSE 2345' \
|
||||
backend/Dockerfile.driver > backend/Dockerfile.driver-debug
|
||||
cd $(MOD_ROOT) && ${CONTAINER_ENGINE} build --platform linux/amd64 --build-arg GCFLAGS="all=-N -l" -t ${IMG_REGISTRY}${IMG_TAG_DRIVER}:debug -f backend/Dockerfile.driver-debug .
|
||||
.PHONY: image_launcher
|
||||
image_launcher:
|
||||
cd $(MOD_ROOT) && ${CONTAINER_ENGINE} build --platform linux/amd64 -t ${IMG_REGISTRY}${IMG_TAG_LAUNCHER} -f backend/Dockerfile.launcher .
|
||||
|
||||
.PHONY: install-cert-manager
|
||||
install-cert-manager:
|
||||
kubectl apply -f https://github.com/cert-manager/cert-manager/releases/download/$(CERT_MANAGER_VERSION)/cert-manager.yaml
|
||||
kubectl wait deployment -n cert-manager cert-manager --for condition=Available=True --timeout=180s
|
||||
kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=cert-manager -n cert-manager --timeout=180s
|
||||
|
||||
# Creates a Kind cluster and Deploys a standalone KFP instance
|
||||
# In the Kubeflow namespace.
|
||||
.PHONY: kind-cluster-agnostic
|
||||
kind-cluster-agnostic:
|
||||
# Deploy Kind Cluster
|
||||
kind create cluster --name $(KIND_NAME)
|
||||
kubectl config use-context kind-$(KIND_NAME)
|
||||
kind get kubeconfig --name $(KIND_NAME) > $(CURDIR)/../kubeconfig_$(KIND_NAME)
|
||||
# Deploy cluster resources required by KFP
|
||||
kubectl apply -k $(CURDIR)/../manifests/kustomize/cluster-scoped-resources
|
||||
kubectl wait --for condition=established --timeout=1m crd/applications.app.k8s.io
|
||||
# Deploy KFP
|
||||
kubectl apply -k $(CURDIR)/../manifests/kustomize/env/platform-agnostic
|
||||
kubectl -n kubeflow wait --for condition=Available --timeout=10m deployment/mysql
|
||||
kubectl -n kubeflow wait --for condition=Available --timeout=3m deployment/metadata-grpc-deployment
|
||||
kubectl -n kubeflow wait --for condition=Available --timeout=3m deployment/ml-pipeline
|
||||
# Switch to Kubeflow namespace context
|
||||
kubectl config set-context --current --namespace=kubeflow
|
||||
|
||||
.PHONY: dev-kind-cluster
|
||||
dev-kind-cluster:
|
||||
${CONTAINER_ENGINE} build -t ${IMG_TAG_WEBHOOK_PROXY} -f $(CURDIR)/../tools/kind/Dockerfile.webhook-proxy $(CURDIR)/../tools/kind
|
||||
-kind create cluster --name $(KIND_NAME) --config $(CURDIR)/../tools/kind/kind-config.yaml
|
||||
kubectl config use-context kind-$(KIND_NAME)
|
||||
kind get kubeconfig --name $(KIND_NAME) > $(CURDIR)/../kubeconfig_$(KIND_NAME)
|
||||
@if [ "${CONTAINER_ENGINE}" = "docker" ]; then \
|
||||
kind --name ${KIND_NAME} load docker-image ${IMG_TAG_WEBHOOK_PROXY}; \
|
||||
else \
|
||||
bash -c "kind load --name ${KIND_NAME} image-archive <( ${CONTAINER_ENGINE} save ${IMG_TAG_WEBHOOK_PROXY})"; \
|
||||
fi
|
||||
$(MAKE) install-cert-manager
|
||||
kubectl apply -k $(CURDIR)/../manifests/kustomize/cluster-scoped-resources
|
||||
kubectl wait --for condition=established --timeout=1m crd/applications.app.k8s.io
|
||||
kubectl apply -k $(CURDIR)/../manifests/kustomize/env/dev-kind
|
||||
kubectl apply -f $(CURDIR)/../tools/kind/webhook-proxy.yaml
|
||||
kubectl -n kubeflow wait --for condition=Available --timeout=10m deployment/mysql
|
||||
kubectl -n kubeflow wait --for condition=Available --timeout=3m deployment/metadata-grpc-deployment
|
||||
|
||||
.PHONY: kind-load-driver-debug
|
||||
kind-load-driver-debug:
|
||||
@if [ "${CONTAINER_ENGINE}" = "docker" ]; then \
|
||||
kind --name ${KIND_NAME} load docker-image ${IMG_TAG_DRIVER}:debug
|
||||
else \
|
||||
bash -c "kind load --name ${KIND_NAME} image-archive <( ${CONTAINER_ENGINE} save ${IMG_TAG_DRIVER})"; \
|
||||
fi
|
||||
|
||||
.PHONY: kind-build-and-load-driver-debug
|
||||
kind-build-and-load-driver-debug: image_driver_debug kind-load-driver-debug
|
||||
|
||||
.PHONY: lint-and-format
|
||||
lint-and-format: lint format
|
||||
|
||||
.PHONY: lint
|
||||
lint:
|
||||
golangci-lint run --new-from-rev HEAD --fix
|
||||
|
||||
.PHONY: format
|
||||
format:
|
||||
golangci-lint fmt
|
|
@ -1,8 +1,14 @@
|
|||
approvers:
|
||||
- chensun
|
||||
- Tomcli
|
||||
- rimolive
|
||||
- hbelmiro
|
||||
- mprahl
|
||||
reviewers:
|
||||
- chensun
|
||||
- Tomcli
|
||||
- hbelmiro
|
||||
- HumairAK
|
||||
- rimolive
|
||||
- mprahl
|
||||
- gmfrasca
|
||||
- droctothorpe
|
||||
- zazulam
|
||||
|
|
|
@ -1,7 +1,20 @@
|
|||
# Kubeflow Pipelines Backend
|
||||
|
||||
## Overview
|
||||
|
||||
This directory contains code for the components that comprise the Kubeflow
|
||||
Pipelines backend.
|
||||
|
||||
## Building & Testing
|
||||
This README will help you set up your coding environment in order to build and run the Kubeflow Pipelines backend. The KFP backend powers the core functionality of the KFP platform, handling API requests, workflow management, and data persistence.
|
||||
|
||||
## Prerequisites
|
||||
Before you begin, ensure you have:
|
||||
- [Go installed](https://go.dev/doc/install)
|
||||
- Docker or Podman installed (for building container images)
|
||||
|
||||
Note that you may need to restart your shell after installing these resources in order for the changes to take effect.
|
||||
|
||||
## Testing
|
||||
|
||||
To run all unittests for backend:
|
||||
|
||||
|
@ -9,72 +22,335 @@ To run all unittests for backend:
|
|||
go test -v -cover ./backend/...
|
||||
```
|
||||
|
||||
If running a [local API server](#run-the-kfp-backend-locally-with-a-kind-cluster), you can run the integration tests
|
||||
with:
|
||||
|
||||
```bash
|
||||
LOCAL_API_SERVER=true go test -v ./backend/test/v2/integration/... -namespace kubeflow -args -runIntegrationTests=true
|
||||
```
|
||||
|
||||
To run a specific test, you can use the `-run` flag. For example, to run the `TestCacheSingleRun` test in the
|
||||
`TestCache` suite, you can use the `-run 'TestCache/TestCacheSingleRun'` flag to the above command.
|
||||
|
||||
## Build
|
||||
|
||||
The API server itself can be built using:
|
||||
|
||||
```
|
||||
go build -o /tmp/apiserver backend/src/apiserver/*.go
|
||||
```
|
||||
|
||||
## Code Style
|
||||
The API server image can be built from the root folder of the repo using:
|
||||
```
|
||||
export API_SERVER_IMAGE=api_server
|
||||
docker build -f backend/Dockerfile . --tag $API_SERVER_IMAGE
|
||||
```
|
||||
### Deploying the APIServer (from the image you built) on Kubernetes
|
||||
|
||||
First, push your image to a registry that is accessible from your Kubernetes cluster.
|
||||
|
||||
Then, run:
|
||||
```
|
||||
kubectl edit deployment.v1.apps/ml-pipeline -n kubeflow
|
||||
```
|
||||
You'll see the field reference the api server container image (`spec.containers[0].image: gcr.io/ml-pipeline/api-server:<image-version>`).
|
||||
Change it to point to your own build, after saving and closing the file, apiserver will restart with your change.
|
||||
|
||||
### Building client library and swagger files
|
||||
|
||||
After making changes to proto files, the Go client libraries, Python client libraries and swagger files
|
||||
need to be regenerated and checked-in. Refer to [backend/api](./api/README.md) for details.
|
||||
|
||||
### Updating python dependencies
|
||||
|
||||
[pip-tools](https://github.com/jazzband/pip-tools) is used to manage python
|
||||
dependencies. To update dependencies, edit [requirements.in](requirements.in)
|
||||
and run `./update_requirements.sh` to update and pin the transitive
|
||||
dependencies.
|
||||
|
||||
|
||||
### Building conformance tests (WIP)
|
||||
|
||||
Run
|
||||
```
|
||||
docker build . -f backend/Dockerfile.conformance -t <tag>
|
||||
```
|
||||
|
||||
## API Server Development
|
||||
|
||||
### Run the KFP Backend Locally With a Kind Cluster
|
||||
|
||||
This deploys a local Kubernetes cluster leveraging [kind](https://kind.sigs.k8s.io/), with all the components required
|
||||
to run the Kubeflow Pipelines API server. Note that the `ml-pipeline` `Deployment` (API server) has its replicas set to
|
||||
0 so that the API server can be run locally for debugging and faster development. The local API server is available by
|
||||
pods on the cluster using the `ml-pipeline` `Service`.
|
||||
|
||||
#### Prerequisites
|
||||
|
||||
* The [kind CLI](https://kind.sigs.k8s.io/docs/user/quick-start/#installation) is installed.
|
||||
* The following ports are available on your localhost: 3000, 3306, 8080, 9000, and 8889. If these are unavailable,
|
||||
modify [kind-config.yaml](../tools/kind/kind-config.yaml) and configure the API server with alternative ports when
|
||||
running locally.
|
||||
* If using a Mac, you will need to modify the
|
||||
[Endpoints](../manifests/kustomize/env/dev-kind/forward-local-api-endpoint.yaml) manifest to leverage the bridge
|
||||
network interface through Docker/Podman Desktop. See
|
||||
[kind #1200](https://github.com/kubernetes-sigs/kind/issues/1200#issuecomment-1304855791) for an example manifest.
|
||||
* Optional: VSCode is installed to leverage a sample `launch.json` file.
|
||||
* This relies on dlv: (go install -v github.com/go-delve/delve/cmd/dlv@latest)
|
||||
|
||||
#### Provisioning the Cluster
|
||||
|
||||
To provision the kind cluster, run the following from the Git repository's root directory:
|
||||
|
||||
```bash
|
||||
make -C backend dev-kind-cluster
|
||||
```
|
||||
|
||||
This may take several minutes since there are many pods. Note that many pods will be in "CrashLoopBackOff" status until
|
||||
all the pods have started.
|
||||
|
||||
> [!NOTE]
|
||||
> The config in the `make` command above sets the `ml-pipeline` `Deployment` (api server) to have 0 replicas. The intent is to replace it with a locally running API server for debugging and faster development. See the following steps to run the API server locally, and connect it to the KFP backend on your Kind cluster. Note that other backend components (for example, the persistence agent) may show errors until the API server is brought up and connected to the cluster.
|
||||
|
||||
#### Launching the API Server With VSCode
|
||||
|
||||
After the cluster is provisioned, you may leverage the following sample `.vscode/launch.json` file to run the API
|
||||
server locally:
|
||||
|
||||
```json
|
||||
{
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"name": "Launch API Server (Kind)",
|
||||
"type": "go",
|
||||
"request": "launch",
|
||||
"mode": "debug",
|
||||
"program": "${workspaceFolder}/backend/src/apiserver",
|
||||
"env": {
|
||||
"POD_NAMESPACE": "kubeflow",
|
||||
"DBCONFIG_MYSQLCONFIG_HOST": "localhost",
|
||||
"MINIO_SERVICE_SERVICE_HOST": "localhost",
|
||||
"MINIO_SERVICE_SERVICE_PORT": "9000",
|
||||
"METADATA_GRPC_SERVICE_SERVICE_HOST": "localhost",
|
||||
"METADATA_GRPC_SERVICE_SERVICE_PORT": "8080",
|
||||
"ML_PIPELINE_VISUALIZATIONSERVER_SERVICE_HOST": "localhost",
|
||||
"ML_PIPELINE_VISUALIZATIONSERVER_SERVICE_PORT": "8888",
|
||||
"V2_LAUNCHER_IMAGE": "ghcr.io/kubeflow/kfp-launcher:master",
|
||||
"V2_DRIVER_IMAGE": "ghcr.io/kubeflow/kfp-driver:master"
|
||||
},
|
||||
"args": [
|
||||
"--config",
|
||||
"${workspaceFolder}/backend/src/apiserver/config",
|
||||
"-logtostderr=true"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
#### Using the Environment
|
||||
|
||||
Once the cluster is provisioned and the API server is running, you can access the API server at
|
||||
[http://localhost:8888](http://localhost:8888)
|
||||
(e.g. [http://localhost:8888/apis/v2beta1/pipelines](http://localhost:8888/apis/v2beta1/pipelines)).
|
||||
|
||||
You can also access the Kubeflow Pipelines web interface at [http://localhost:3000](http://localhost:3000).
|
||||
|
||||
You can also directly connect to the MariaDB database server with:
|
||||
|
||||
```bash
|
||||
mysql -h 127.0.0.1 -u root
|
||||
```
|
||||
|
||||
### Scheduled Workflow Development
|
||||
|
||||
If you also want to run the Scheduled Workflow controller locally, stop the controller on the cluster with:
|
||||
|
||||
```bash
|
||||
kubectl -n kubeflow scale deployment ml-pipeline-scheduledworkflow --replicas=0
|
||||
```
|
||||
|
||||
Then you may leverage the following sample `.vscode/launch.json` file to run the Scheduled Workflow controller locally:
|
||||
|
||||
```json
|
||||
{
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"name": "Launch Scheduled Workflow controller (Kind)",
|
||||
"type": "go",
|
||||
"request": "launch",
|
||||
"mode": "debug",
|
||||
"program": "${workspaceFolder}/backend/src/crd/controller/scheduledworkflow",
|
||||
"env": {
|
||||
"CRON_SCHEDULE_TIMEZONE": "UTC"
|
||||
},
|
||||
"args": [
|
||||
"-namespace=kubeflow",
|
||||
"-kubeconfig=${workspaceFolder}/kubeconfig_dev-pipelines-api",
|
||||
"-mlPipelineAPIServerName=localhost"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
### Remote Debug the Driver
|
||||
|
||||
These instructions assume you are leveraging the Kind cluster in the
|
||||
[Run Locally With a Kind Cluster](#run-locally-with-a-kind-cluster) section.
|
||||
|
||||
#### Build the Driver Image With Debug Prerequisites
|
||||
|
||||
Run the following to create the `backend/Dockerfile.driver-debug` file and build the container image
|
||||
tagged as `kfp-driver:debug`. This container image is based on `backend/Dockerfile.driver` but installs
|
||||
[Delve](https://github.com/go-delve/delve), builds the binary without compiler optimizations so the binary matches the
|
||||
source code (via `GCFLAGS="all=-N -l"`), and copies the source code to the destination container for the debugger.
|
||||
Any changes to the Driver code will require rebuilding this container image.
|
||||
|
||||
```bash
|
||||
make -C backend image_driver_debug
|
||||
```
|
||||
|
||||
Then load the container image in the Kind cluster.
|
||||
|
||||
```bash
|
||||
make -C backend kind-load-driver-debug
|
||||
```
|
||||
|
||||
Alternatively, you can use this Make target that does both.
|
||||
|
||||
```bash
|
||||
make -C kind-build-and-load-driver-debug
|
||||
```
|
||||
|
||||
#### Run the API Server With Debug Configuration
|
||||
|
||||
You may use the following VS Code `launch.json` file to run the API server which overrides the Driver
|
||||
command to use Delve and the Driver image to use debug image built previously.
|
||||
|
||||
VSCode configuration:
|
||||
```json
|
||||
{
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"name": "Launch API server (Kind) (Debug Driver)",
|
||||
"type": "go",
|
||||
"request": "launch",
|
||||
"mode": "debug",
|
||||
"program": "${workspaceFolder}/backend/src/apiserver",
|
||||
"env": {
|
||||
"POD_NAMESPACE": "kubeflow",
|
||||
"DBCONFIG_MYSQLCONFIG_HOST": "localhost",
|
||||
"MINIO_SERVICE_SERVICE_HOST": "localhost",
|
||||
"MINIO_SERVICE_SERVICE_PORT": "9000",
|
||||
"METADATA_GRPC_SERVICE_SERVICE_HOST": "localhost",
|
||||
"METADATA_GRPC_SERVICE_SERVICE_PORT": "8080",
|
||||
"ML_PIPELINE_VISUALIZATIONSERVER_SERVICE_HOST": "localhost",
|
||||
"ML_PIPELINE_VISUALIZATIONSERVER_SERVICE_PORT": "8888",
|
||||
"V2_LAUNCHER_IMAGE": "ghcr.io/kubeflow/kfp-launcher:master",
|
||||
"V2_DRIVER_IMAGE": "kfp-driver:debug",
|
||||
"V2_DRIVER_COMMAND": "dlv exec --listen=:2345 --headless=true --api-version=2 --log /bin/driver --"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
GoLand configuration:
|
||||
1. Create a new Go Build configuration
|
||||
2. Set **Run Kind** to Directory and set **Directory** to /backend/src/apiserver absolute path
|
||||
3. Set the following environment variables
|
||||
|
||||
| Argument | Value |
|
||||
|----------------------------------------------|-----------|
|
||||
| POD_NAMESPACE | kubeflow |
|
||||
| DBCONFIG_MYSQLCONFIG_HOST | localhost |
|
||||
| MINIO_SERVICE_SERVICE_HOST | localhost |
|
||||
| MINIO_SERVICE_SERVICE_PORT | 9000 |
|
||||
| METADATA_GRPC_SERVICE_SERVICE_HOST | localhost |
|
||||
| METADATA_GRPC_SERVICE_SERVICE_PORT | 8080 |
|
||||
| ML_PIPELINE_VISUALIZATIONSERVER_SERVICE_HOST | localhost |
|
||||
| ML_PIPELINE_VISUALIZATIONSERVER_SERVICE_PORT | 8888 |
|
||||
| V2_LAUNCHER_IMAGE | localhost |
|
||||
| V2_DRIVER_IMAGE | localhost |
|
||||
| V2_DRIVER_COMMAND | dlv --listen=:2345 --headless=true --api-version=2 --accept-multiclient exec /bin/driver -- |
|
||||
4. Set the following program arguments: --config ./backend/src/apiserver/config -logtostderr=true --sampleconfig ./backend/src/apiserver/config/test_sample_config.json
|
||||
|
||||
#### Starting a Remote Debug Session
|
||||
|
||||
Start by launching a pipeline. This will eventually create a Driver pod that is waiting for a remote debug connection.
|
||||
|
||||
You can see the pods with the following command.
|
||||
|
||||
```bash
|
||||
kubectl -n kubeflow get pods -w
|
||||
```
|
||||
|
||||
Once you see a pod with `-driver` in the name such as `hello-world-clph9-system-dag-driver-10974850`, port forward
|
||||
the Delve port in the pod to your localhost (replace `<driver pod name>` with the actual name).
|
||||
|
||||
```bash
|
||||
kubectl -n kubeflow port-forward <driver pod name> 2345:2345
|
||||
```
|
||||
|
||||
Set a breakpoint on the Driver code in VS Code. Then remotely connect to the Delve debug session with the following VS
|
||||
Code `launch.json` file:
|
||||
|
||||
VSCode configuration:
|
||||
```json
|
||||
{
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"name": "Connect to remote driver",
|
||||
"type": "go",
|
||||
"request": "attach",
|
||||
"mode": "remote",
|
||||
"remotePath": "/go/src/github.com/kubeflow/pipelines",
|
||||
"port": 2345,
|
||||
"host": "127.0.0.1",
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
GoLand configuration:
|
||||
1. Create a new Go Remote configuration and title it "Delve debug session"
|
||||
2. Set **Host** to localhost
|
||||
3. Set **Port** to 2345
|
||||
|
||||
Once the Driver pod succeeds, the remote debug session will close. Then repeat the process of forwarding the port
|
||||
of subsequent Driver pods and starting remote debug sessions in VS Code until the pipeline completes.
|
||||
|
||||
For debugging a specific Driver pod, you'll need to continuously port forward and connect to the remote debug session
|
||||
without a breakpoint so that Delve will continue execution until the Driver pod you are interested in starts up. At that
|
||||
point, you can set a break point, port forward, and connect to the remote debug session to debug that specific Driver
|
||||
pod.
|
||||
|
||||
### Using a Webhook Proxy for Local Development in a Kind Cluster
|
||||
|
||||
The Kubeflow Pipelines API server typically runs over HTTPS when deployed in a Kubernetes cluster. However, during local development, it operates over HTTP, which Kubernetes admission webhooks do not support (they require HTTPS). This incompatibility prevents webhooks from functioning correctly in a local Kind cluster.
|
||||
|
||||
To resolve this, a webhook proxy acts as a bridge, allowing webhooks to communicate with the API server even when it runs over HTTP.
|
||||
|
||||
This is used by default when using the `dev-kind-cluster` Make target.
|
||||
|
||||
### Deleting the Kind Cluster
|
||||
|
||||
Run the following to delete the cluster (once you are finished):
|
||||
|
||||
```bash
|
||||
kind delete clusters dev-pipelines-api
|
||||
```
|
||||
|
||||
## Contributing
|
||||
### Code Style
|
||||
|
||||
Backend codebase follows the [Google's Go Style Guide](https://google.github.io/styleguide/go/). Please, take time to get familiar with the [best practices](https://google.github.io/styleguide/go/best-practices). It is not intended to be exhaustive, but it often helps minimizing guesswork among developers and keep codebase uniform and consistent.
|
||||
|
||||
We use [golangci-lint](https://golangci-lint.run/) tool that can catch common mistakes locally (see detailed configuration [here](https://github.com/kubeflow/pipelines/blob/master/.golangci.yaml)). It can be [conveniently integrated](https://golangci-lint.run/usage/integrations/) with multiple popular IDEs such as VS Code or Vim.
|
||||
|
||||
Finally, it is advised to install [pre-commit](https://pre-commit.com/) in order to automate linter checks (see configuration [here](https://github.com/kubeflow/pipelines/blob/master/.pre-commit-config.yaml))
|
||||
|
||||
## Building APIServer image locally
|
||||
|
||||
The API server image can be built from the root folder of the repo using:
|
||||
```
|
||||
export API_SERVER_IMAGE=api_server
|
||||
docker build -f backend/Dockerfile . --tag $API_SERVER_IMAGE
|
||||
```
|
||||
## Deploy APIServer with the image you own build
|
||||
|
||||
Run
|
||||
```
|
||||
kubectl edit deployment.v1.apps/ml-pipeline -n kubeflow
|
||||
```
|
||||
You'll see the field reference the api server docker image.
|
||||
Change it to point to your own build, after saving and closing the file, apiserver will restart with your change.
|
||||
|
||||
## Building client library and swagger files
|
||||
|
||||
After making changes to proto files, the Go client libraries, Python client libraries and swagger files
|
||||
need to be regenerated and checked-in. Refer to [backend/api](./api/README.md) for details.
|
||||
|
||||
## Updating licenses info
|
||||
|
||||
1. [Install go-licenses tool](../hack/install-go-licenses.sh) and refer to [its documentation](https://github.com/google/go-licenses) for how to use it.
|
||||
|
||||
|
||||
2. Run the tool to update all licenses:
|
||||
|
||||
```bash
|
||||
make all
|
||||
```
|
||||
|
||||
## Updating python dependencies
|
||||
|
||||
[pip-tools](https://github.com/jazzband/pip-tools) is used to manage python
|
||||
dependencies. To update dependencies, edit [requirements.in](requirements.in)
|
||||
and run `./update_requirements.sh` to update and pin the transitive
|
||||
dependencies.
|
||||
|
||||
# Visualization Server Instructions
|
||||
|
||||
## Updating python dependencies
|
||||
|
||||
[pip-tools](https://github.com/jazzband/pip-tools) is used to manage python
|
||||
dependencies. To update dependencies, edit [requirements.in](requirements.in)
|
||||
and run `./update_requirements.sh` to update and pin the transitive
|
||||
dependencies.
|
||||
|
||||
|
||||
## Building conformance tests (WIP)
|
||||
|
||||
Run
|
||||
```
|
||||
docker build . -f backend/Dockerfile.conformance -t <tag>
|
||||
```
|
||||
|
|
|
@ -13,42 +13,71 @@
|
|||
# limitations under the License.
|
||||
|
||||
# Generate client code (go & json) from API protocol buffers
|
||||
FROM golang:1.20 as generator
|
||||
ENV GRPC_GATEWAY_VERSION v1.9.6
|
||||
ENV GO_SWAGGER_VERSION v0.18.0
|
||||
ENV GOLANG_PROTOBUF_VERSION v1.5.1
|
||||
ENV GRPC_VERSION v1.23.0
|
||||
ENV PROTOC_VERSION 3.17.3
|
||||
FROM golang:1.24 as generator
|
||||
ENV GRPC_GATEWAY_VERSION v2.27.1
|
||||
ENV GO_SWAGGER_VERSION v0.32.3
|
||||
ENV GRPC_VERSION v1.73.0
|
||||
ENV PROTOC_VERSION 31.1
|
||||
ENV GOBIN=/go/bin
|
||||
# The googleapis repo doesn't use GitHub releases or version tags,
|
||||
# so we pin a specific commit to make the clone reproducible.
|
||||
ENV GOOGLEAPIS_COMMIT 68d5196a529174df97c28c70622ffc1c3721815f
|
||||
|
||||
# **Note** that protoc-gen-go-grpc is packaged with grpc-go but is versioned
|
||||
# separately. You can find the releases for protoc-gen-go-grpc here:
|
||||
# https://github.com/grpc/grpc-go/releases
|
||||
# **Note** that these also include releases for grpc-go which is the grpc Go
|
||||
# runtime package. protoc-gen-go-grpc is the package used for generating
|
||||
# Go GRPC code from .proto files.
|
||||
# to list recent protoc-gen-go-grpc versions you can also do:
|
||||
# go list -m -versions google.golang.org/grpc/cmd/protoc-gen-go-grpc
|
||||
# PROTOC_GEN_GO_GRPC & PROTOBUF_GO versions should match reasonably close to each other.
|
||||
# You can check the protobuf in the go.mod for protoc-gen-go-grpc, like here:
|
||||
# https://github.com/grpc/grpc-go/blob/cmd/protoc-gen-go-grpc/v1.5.1/cmd/protoc-gen-go-grpc/go.mod#L7
|
||||
# **Note** That BOTH PROTOC_GEN_GO_GRPC & PROTOBUF_GO here are used for
|
||||
# Generating GO Code. These versions should be identical to the
|
||||
# runtime Go packages (in the project go.mod)
|
||||
ENV PROTOC_GEN_GO_GRPC v1.5.1
|
||||
ENV PROTOBUF_GO=v1.36.6
|
||||
|
||||
# Install protoc.
|
||||
RUN apt-get update -y && apt-get install -y jq sed unzip
|
||||
RUN curl -L -o protoc.zip https://github.com/protocolbuffers/protobuf/releases/download/v${PROTOC_VERSION}/protoc-${PROTOC_VERSION}-linux-x86_64.zip
|
||||
RUN unzip -o protoc.zip -d /usr/ bin/protoc
|
||||
RUN unzip -o protoc.zip -d /usr/ 'include/*'
|
||||
RUN unzip -o protoc.zip -d /tmp/protoc && \
|
||||
mv /tmp/protoc/bin/protoc /usr/bin/protoc && \
|
||||
chmod +x /usr/bin/protoc
|
||||
RUN unzip -o protoc.zip 'include/*' -d /tmp/protoc && \
|
||||
mv /tmp/protoc/include/* /usr/include
|
||||
RUN rm -f protoc.zip
|
||||
ENV PROTOCCOMPILER /usr/bin/protoc
|
||||
ENV PROTOCINCLUDE /usr/include/google/protobuf
|
||||
|
||||
# Need grpc-gateway source code for -I in protoc command.
|
||||
WORKDIR /go/src/github.com
|
||||
RUN mkdir grpc-ecosystem && cd grpc-ecosystem && git clone --depth 1 --branch $GRPC_GATEWAY_VERSION https://github.com/grpc-ecosystem/grpc-gateway.git
|
||||
RUN mkdir grpc && git clone --depth 1 --branch $GRPC_VERSION https://github.com/grpc/grpc-go
|
||||
|
||||
# Install protoc-gen-rpc-gateway && protoc-gen-swagger.
|
||||
RUN cd grpc-ecosystem/grpc-gateway && GO111MODULE=on go mod vendor
|
||||
RUN go install github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway@latest
|
||||
RUN go install github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger@latest
|
||||
RUN go install github.com/grpc-ecosystem/grpc-gateway/v2/protoc-gen-grpc-gateway@${GRPC_GATEWAY_VERSION}
|
||||
RUN go install github.com/grpc-ecosystem/grpc-gateway/v2/protoc-gen-openapiv2@${GRPC_GATEWAY_VERSION}
|
||||
|
||||
# Need to explicitly provide the googleapis protos and the OpenAPI options that were previously present in the grpc-gateway repo.
|
||||
RUN git init /googleapis && \
|
||||
cd /googleapis && \
|
||||
git remote add origin https://github.com/googleapis/googleapis.git && \
|
||||
git fetch --depth 1 origin ${GOOGLEAPIS_COMMIT} && \
|
||||
git checkout FETCH_HEAD
|
||||
RUN mkdir -p /protoc-gen-openapiv2 && \
|
||||
cp -r /go/pkg/mod/github.com/grpc-ecosystem/grpc-gateway/v2@${GRPC_GATEWAY_VERSION}/protoc-gen-openapiv2/options /protoc-gen-openapiv2/options
|
||||
|
||||
# Download go-swagger binary.
|
||||
RUN curl -LO "https://github.com/go-swagger/go-swagger/releases/download/${GO_SWAGGER_VERSION}/swagger_linux_amd64"
|
||||
RUN chmod +x swagger_linux_amd64 && mv swagger_linux_amd64 /usr/bin/swagger
|
||||
|
||||
# Need protobuf source code for -I in protoc command.
|
||||
RUN mkdir golang && cd golang && git clone --depth 1 --branch $GOLANG_PROTOBUF_VERSION https://github.com/golang/protobuf.git
|
||||
# Install protoc-gen-go.
|
||||
RUN cd golang/protobuf && GO111MODULE=on go mod vendor
|
||||
RUN go install github.com/golang/protobuf/protoc-gen-go@latest
|
||||
RUN go install google.golang.org/protobuf/cmd/protoc-gen-go@${PROTOBUF_GO}
|
||||
RUN go install google.golang.org/grpc/cmd/protoc-gen-go-grpc@${PROTOC_GEN_GO_GRPC}
|
||||
|
||||
# Needed for buildling python packages requiring protoc
|
||||
RUN apt-get update && apt-get install -y python3-pip
|
||||
RUN pip3 install --upgrade pip setuptools wheel --break-system-packages
|
||||
|
||||
# WORKAROUND: https://github.com/docker-library/golang/issues/225#issuecomment-403170792
|
||||
ENV XDG_CACHE_HOME /tmp/.cache
|
||||
|
|
|
@ -15,12 +15,14 @@
|
|||
# Makefile to generate KFP api clients from proto.
|
||||
|
||||
IMAGE_TAG=kfp-api-generator
|
||||
# Contact chensun or zijianjoy if this remote image needs an update.
|
||||
REMOTE_IMAGE=gcr.io/ml-pipeline-test/api-generator
|
||||
# Image generated by https://github.com/kubeflow/pipelines/pull/10580
|
||||
# Keep in sync with the version used in test/release/Dockerfile.release
|
||||
PREBUILT_REMOTE_IMAGE=gcr.io/ml-pipeline-test/api-generator@sha256:41fd3e60ba40430a4c3d87e03be817c5f63b2dfed23059ec9d6bca62ce0cc39c
|
||||
# Contact chensun or HumairAK if this remote image needs an update.
|
||||
REMOTE_IMAGE=ghcr.io/kubeflow/kfp-api-generator
|
||||
# Assume the latest API version by default.
|
||||
API_VERSION ?= v2beta1
|
||||
|
||||
# Keep in sync with the version used in test/release/Dockerfile.release
|
||||
PREBUILT_REMOTE_IMAGE=ghcr.io/kubeflow/kfp-api-generator:1.2
|
||||
RELEASE_IMAGE=ghcr.io/kubeflow/kfp-release:1.2.1
|
||||
CONTAINER_ENGINE ?= docker
|
||||
|
||||
# Generate clients using a pre-built api-generator image.
|
||||
|
@ -32,11 +34,21 @@ generate: fetch-dependencies hack/generator.sh $(API_VERSION)/*.proto
|
|||
--mount type=bind,source="$$(pwd)/../..",target=/go/src/github.com/kubeflow/pipelines \
|
||||
$(PREBUILT_REMOTE_IMAGE) /go/src/github.com/kubeflow/pipelines/backend/api/hack/generator.sh
|
||||
|
||||
# Use the release image since it has some additional dependencies
|
||||
# required by kfp-pipeline-ser generation
|
||||
.PHONY: generate-kfp-server-api-package
|
||||
generate-kfp-server-api-package:
|
||||
${CONTAINER_ENGINE} run --interactive --rm \
|
||||
-e API_VERSION=$(API_VERSION) \
|
||||
--user $$(id -u):$$(id -g) \
|
||||
--mount type=bind,source="$$(pwd)/../..",target=/go/src/github.com/kubeflow/pipelines \
|
||||
$(RELEASE_IMAGE) /go/src/github.com/kubeflow/pipelines/backend/api/build_kfp_server_api_python_package.sh
|
||||
|
||||
|
||||
# Fetch dependency proto
|
||||
.PHONY: fetch-dependencies
|
||||
fetch-dependencies: v2beta1/google/rpc/status.proto
|
||||
|
||||
# TODO(gkcalat): add this as a submodule?
|
||||
v2beta1/google/rpc/status.proto:
|
||||
mkdir -p v2beta1/google/rpc
|
||||
wget -O v2beta1/google/rpc/status.proto https://raw.githubusercontent.com/googleapis/googleapis/047d3a8ac7f75383855df0166144f891d7af08d9/google/rpc/status.proto
|
||||
|
|
|
@ -12,7 +12,7 @@ Tools needed:
|
|||
Set the environment variable `API_VERSION` to the version that you want to generate. We use `v1beta1` as example here.
|
||||
|
||||
```bash
|
||||
export API_VERSION="v1beta1"
|
||||
export API_VERSION="v2beta1"
|
||||
```
|
||||
|
||||
## Compiling `.proto` files to Go client and swagger definitions
|
||||
|
@ -81,7 +81,9 @@ API definitions in this folder are used to generate [`v1beta1`](https://www.kube
|
|||
|
||||
API generator image is defined in [Dockerfile](`./Dockerfile`). If you need to update the container, follow these steps:
|
||||
|
||||
1. Update the [Dockerfile](`./Dockerfile`) and build the image by running `docker build -t gcr.io/ml-pipeline-test/api-generator:latest .`
|
||||
1. Push the new container by running `docker push gcr.io/ml-pipeline-test/api-generator:latest` (requires to be [authenticated](https://cloud.google.com/container-registry/docs/advanced-authentication)).
|
||||
1. Login to GHCR container registry: `echo "<PAT>" | docker login ghcr.io -u <USERNAME> --password-stdin`
|
||||
* Replace `<PAT>` with a GitHub Personal Access Token (PAT) with the write:packages and `read:packages` scopes, as well as `delete:packages` if needed.
|
||||
1. Update the [Dockerfile](`./Dockerfile`) and build the image by running `docker build -t ghcr.io/kubeflow/kfp-api-generator:$VERSION .`
|
||||
1. Push the new container by running `docker push ghcr.io/kubeflow/kfp-api-generator:$VERSION`.
|
||||
1. Update the `PREBUILT_REMOTE_IMAGE` variable in the [Makefile](./Makefile) to point to your new image.
|
||||
1. Similarly, push a new version of the release tools image to `gcr.io/ml-pipeline-test/release:latest` and run `make push` in [test/release/Makefile](../../test/release/Makefile).
|
||||
1. Similarly, push a new version of the release tools image to `ghcr.io/kubeflow/kfp-release:$VERSION` and run `make push` in [test/release/Makefile](../../test/release/Makefile).
|
||||
|
|
|
@ -58,6 +58,10 @@ java -jar "$codegen_file" generate -g python -t "$CURRENT_DIR/$API_VERSION/pytho
|
|||
"packageUrl": "https://github.com/kubeflow/pipelines"
|
||||
}')
|
||||
|
||||
echo "Removing unnecessary GitLab and TravisCI generated files"
|
||||
rm $CURRENT_DIR/$API_VERSION/python_http_client/.gitlab-ci.yml
|
||||
rm $CURRENT_DIR/$API_VERSION/python_http_client/.travis.yml
|
||||
|
||||
echo "Copying LICENSE to $DIR"
|
||||
cp "$CURRENT_DIR/../../LICENSE" "$DIR"
|
||||
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue