From b28c2cbca699cacc1073c6c5fd2c17bbb917c8b1 Mon Sep 17 00:00:00 2001 From: kfp-tekton-bot <65624628+kfp-tekton-bot@users.noreply.github.com> Date: Mon, 3 Aug 2020 20:38:18 -0700 Subject: [PATCH] KFP 1.0 Rebase (#251) --- .cloudbuild.yaml | 38 +- .github/PULL_REQUEST_TEMPLATE.md | 20 + .gitignore | 4 + .release.cloudbuild.yaml | 47 +- .travis.yml | 10 +- CONTRIBUTING.md | 82 +- VERSION | 2 +- backend/Dockerfile | 29 +- .../build_kfp_server_api_python_package.sh | 43 +- backend/api/experiment.proto | 27 +- backend/api/generate_api.sh | 18 +- .../archive_experiment_parameters.go | 5 +- .../experiment_service_client.go | 8 +- .../list_experiment_parameters.go | 16 +- .../unarchive_experiment_parameters.go | 5 +- .../experiment_model/api_experiment.go | 2 +- .../job_service/list_jobs_parameters.go | 16 +- .../job_model/api_list_jobs_response.go | 4 +- .../delete_pipeline_parameters.go | 5 +- .../delete_pipeline_version_parameters.go | 5 +- .../get_pipeline_parameters.go | 5 +- .../get_pipeline_version_parameters.go | 5 +- ...et_pipeline_version_template_parameters.go | 5 +- .../get_template_parameters.go | 5 +- .../list_pipeline_versions_parameters.go | 16 +- .../list_pipelines_parameters.go | 16 +- .../pipeline_service_client.go | 14 +- .../api_get_template_response.go | 3 +- .../api_list_pipeline_versions_response.go | 4 +- .../api_list_pipelines_response.go | 4 +- .../go_http_client/pipeline_model/api_url.go | 2 +- .../run_service/archive_run_parameters.go | 5 +- .../run_service/delete_run_parameters.go | 5 +- .../run_service/get_run_parameters.go | 5 +- .../run_service/list_runs_parameters.go | 18 +- .../run_service/retry_run_parameters.go | 5 +- .../run_service/run_service_client.go | 2 +- .../run_service/terminate_run_parameters.go | 5 +- .../run_service/unarchive_run_parameters.go | 5 +- .../run_model/api_list_runs_response.go | 4 +- .../api/go_http_client/run_model/api_run.go | 4 +- backend/api/job.proto | 27 +- backend/api/pipeline.proto | 57 +- backend/api/python_http_client/.gitignore | 66 + backend/api/python_http_client/.gitlab-ci.yml | 47 + .../.openapi-generator-ignore | 23 + .../.openapi-generator/VERSION | 1 + backend/api/python_http_client/.travis.yml | 31 + backend/api/python_http_client/LICENSE | 201 ++ backend/api/python_http_client/README.md | 188 ++ .../docs/ApiCronSchedule.md | 12 + .../python_http_client/docs/ApiExperiment.md | 15 + .../docs/ApiGetTemplateResponse.md | 10 + backend/api/python_http_client/docs/ApiJob.md | 24 + .../docs/ApiListExperimentsResponse.md | 12 + .../docs/ApiListJobsResponse.md | 12 + .../docs/ApiListPipelineVersionsResponse.md | 12 + .../docs/ApiListPipelinesResponse.md | 12 + .../docs/ApiListRunsResponse.md | 12 + .../python_http_client/docs/ApiParameter.md | 11 + .../docs/ApiPeriodicSchedule.md | 12 + .../python_http_client/docs/ApiPipeline.md | 17 + .../docs/ApiPipelineRuntime.md | 11 + .../docs/ApiPipelineSpec.md | 14 + .../docs/ApiPipelineVersion.md | 16 + .../docs/ApiReadArtifactResponse.md | 10 + .../docs/ApiRelationship.md | 9 + .../docs/ApiReportRunMetricsRequest.md | 11 + .../docs/ApiReportRunMetricsResponse.md | 10 + .../python_http_client/docs/ApiResourceKey.md | 11 + .../docs/ApiResourceReference.md | 12 + .../docs/ApiResourceType.md | 9 + backend/api/python_http_client/docs/ApiRun.md | 22 + .../python_http_client/docs/ApiRunDetail.md | 11 + .../python_http_client/docs/ApiRunMetric.md | 13 + .../api/python_http_client/docs/ApiStatus.md | 12 + .../api/python_http_client/docs/ApiTrigger.md | 12 + backend/api/python_http_client/docs/ApiUrl.md | 10 + .../docs/ExperimentServiceApi.md | 480 ++++ .../docs/ExperimentStorageState.md | 9 + .../api/python_http_client/docs/JobMode.md | 10 + .../python_http_client/docs/JobServiceApi.md | 480 ++++ .../docs/PipelineServiceApi.md | 794 ++++++ .../docs/PipelineUploadServiceApi.md | 168 ++ .../python_http_client/docs/ProtobufAny.md | 12 + ...RunMetricsResponseReportRunMetricResult.md | 13 + ...ricsResponseReportRunMetricResultStatus.md | 10 + .../docs/RunMetricFormat.md | 10 + .../python_http_client/docs/RunServiceApi.md | 794 ++++++ .../docs/RunStorageState.md | 9 + backend/api/python_http_client/git_push.sh | 72 + .../kfp_server_api/__init__.py | 84 + .../kfp_server_api/api/__init__.py | 24 + .../api/experiment_service_api.py | 818 ++++++ .../kfp_server_api/api/job_service_api.py | 818 ++++++ .../api/pipeline_service_api.py | 1327 ++++++++++ .../api/pipeline_upload_service_api.py | 331 +++ .../kfp_server_api/api/run_service_api.py | 1343 ++++++++++ .../kfp_server_api/api_client.py | 680 +++++ .../kfp_server_api/configuration.py | 417 +++ .../kfp_server_api/exceptions.py | 134 + .../kfp_server_api/models/__init__.py | 65 + .../models/api_cron_schedule.py | 186 ++ .../kfp_server_api/models/api_experiment.py | 272 ++ .../models/api_get_template_response.py | 136 + .../kfp_server_api/models/api_job.py | 516 ++++ .../models/api_list_experiments_response.py | 192 ++ .../models/api_list_jobs_response.py | 192 ++ .../api_list_pipeline_versions_response.py | 190 ++ .../models/api_list_pipelines_response.py | 190 ++ .../models/api_list_runs_response.py | 190 ++ .../kfp_server_api/models/api_parameter.py | 160 ++ .../models/api_periodic_schedule.py | 186 ++ .../kfp_server_api/models/api_pipeline.py | 328 +++ .../models/api_pipeline_runtime.py | 164 ++ .../models/api_pipeline_spec.py | 248 ++ .../models/api_pipeline_version.py | 302 +++ .../models/api_read_artifact_response.py | 139 + .../kfp_server_api/models/api_relationship.py | 115 + .../models/api_report_run_metrics_request.py | 164 ++ .../models/api_report_run_metrics_response.py | 134 + .../kfp_server_api/models/api_resource_key.py | 162 ++ .../models/api_resource_reference.py | 188 ++ .../models/api_resource_type.py | 118 + .../kfp_server_api/models/api_run.py | 464 ++++ .../kfp_server_api/models/api_run_detail.py | 160 ++ .../kfp_server_api/models/api_run_metric.py | 218 ++ .../kfp_server_api/models/api_status.py | 186 ++ .../kfp_server_api/models/api_trigger.py | 160 ++ .../kfp_server_api/models/api_url.py | 136 + .../models/experiment_storage_state.py | 115 + .../kfp_server_api/models/job_mode.py | 115 + .../kfp_server_api/models/protobuf_any.py | 167 ++ ...trics_response_report_run_metric_result.py | 218 ++ ...esponse_report_run_metric_result_status.py | 117 + .../models/run_metric_format.py | 115 + .../models/run_storage_state.py | 114 + .../python_http_client/kfp_server_api/rest.py | 305 +++ .../api/python_http_client/requirements.txt | 6 + backend/api/python_http_client/setup.cfg | 2 + backend/api/python_http_client/setup.py | 55 + .../python_http_client/test-requirements.txt | 3 + .../api/python_http_client/test/__init__.py | 14 + .../test/test_api_cron_schedule.py | 68 + .../test/test_api_experiment.py | 78 + .../test/test_api_get_template_response.py | 66 + .../python_http_client/test/test_api_job.py | 104 + .../test_api_list_experiments_response.py | 83 + .../test/test_api_list_jobs_response.py | 109 + ...est_api_list_pipeline_versions_response.py | 89 + .../test/test_api_list_pipelines_response.py | 97 + .../test/test_api_list_runs_response.py | 105 + .../test/test_api_parameter.py | 67 + .../test/test_api_periodic_schedule.py | 68 + .../test/test_api_pipeline.py | 97 + .../test/test_api_pipeline_runtime.py | 67 + .../test/test_api_pipeline_spec.py | 74 + .../test/test_api_pipeline_version.py | 84 + .../test/test_api_read_artifact_response.py | 66 + .../test/test_api_relationship.py | 65 + .../test_api_report_run_metrics_request.py | 73 + .../test_api_report_run_metrics_response.py | 72 + .../test/test_api_resource_key.py | 67 + .../test/test_api_resource_reference.py | 70 + .../test/test_api_resource_type.py | 65 + .../python_http_client/test/test_api_run.py | 100 + .../test/test_api_run_detail.py | 104 + .../test/test_api_run_metric.py | 69 + .../test/test_api_status.py | 72 + .../test/test_api_trigger.py | 73 + .../python_http_client/test/test_api_url.py | 66 + .../test/test_experiment_service_api.py | 89 + .../test/test_experiment_storage_state.py | 65 + .../python_http_client/test/test_job_mode.py | 65 + .../test/test_job_service_api.py | 89 + .../test/test_pipeline_service_api.py | 117 + .../test/test_pipeline_upload_service_api.py | 59 + .../test/test_protobuf_any.py | 67 + ...trics_response_report_run_metric_result.py | 69 + ...esponse_report_run_metric_result_status.py | 65 + .../test/test_run_metric_format.py | 65 + .../test/test_run_service_api.py | 117 + .../test/test_run_storage_state.py | 65 + backend/api/python_http_client/tox.ini | 9 + .../api/python_http_client_template/README.md | 5 + .../python_http_client_template/api.mustache | 270 ++ .../model.mustache | 246 ++ .../partial_header.mustache | 14 + backend/api/run.proto | 43 +- backend/api/swagger/experiment.swagger.json | 21 +- backend/api/swagger/job.swagger.json | 18 +- .../swagger/kfp_api_single_file.swagger.json | 136 +- backend/api/swagger/pipeline.swagger.json | 45 +- backend/api/swagger/run.swagger.json | 39 +- backend/metadata_writer/requirements.in | 2 +- backend/metadata_writer/requirements.txt | 2 +- .../metadata_writer/src/metadata_helpers.py | 13 +- .../metadata_writer/src/metadata_writer.py | 22 +- backend/requirements.in | 2 +- backend/requirements.txt | 172 +- backend/src/apiserver/main.go | 2 + backend/src/apiserver/model/BUILD.bazel | 1 + backend/src/apiserver/model/run.go | 1 + backend/src/apiserver/model/run_test.go | 51 + .../src/apiserver/server/auth_server_test.go | 23 + .../src/apiserver/server/pipeline_server.go | 7 +- .../apiserver/server/pipeline_server_test.go | 22 + backend/src/apiserver/server/util.go | 19 +- backend/src/apiserver/server/util_test.go | 28 + .../src/apiserver/storage/experiment_store.go | 2 +- .../src/apiserver/storage/pipeline_store.go | 8 +- .../apiserver/storage/pipeline_store_test.go | 83 + .../apiserver/visualization/requirements.txt | 2 + .../visualization/third_party_licenses.csv | 2 +- .../cache/deployer/deploy-cache-service.sh | 34 +- backend/src/common/util/workflow.go | 18 +- backend/test/integration/upgrade_test.go | 21 +- components/XGBoost/Predict/component.py | 45 + components/XGBoost/Predict/component.yaml | 106 + components/XGBoost/Train/component.py | 79 + components/XGBoost/Train/component.yaml | 209 ++ .../XGBoost/_samples/sample_pipeline.py | 33 + .../ApacheParquet/_samples/sample_pipeline.py | 37 + .../from_ApacheArrowFeather/component.py | 27 + .../from_ApacheArrowFeather/component.yaml | 74 + .../ApacheParquet/from_CSV/component.py | 26 + .../ApacheParquet/from_CSV/component.yaml | 72 + .../ApacheParquet/from_TSV/component.py | 26 + .../ApacheParquet/from_TSV/component.yaml | 72 + .../to_ApacheArrowFeather/component.py | 27 + .../to_ApacheArrowFeather/component.yaml | 75 + components/aws/sagemaker/.gitignore | 2 + components/aws/sagemaker/README.md | 45 + .../batch_transform/src/batch_transform.py | 12 +- .../codebuild/integration-test.buildspec.yml | 16 +- .../scripts/construct_environment_array.sh | 10 + .../codebuild/unit-test.buildspec.yml | 3 + components/aws/sagemaker/common/_utils.py | 48 +- components/aws/sagemaker/deploy/README.md | 2 +- components/aws/sagemaker/deploy/src/deploy.py | 18 +- .../aws/sagemaker/ground_truth/component.yaml | 4 +- .../ground_truth/src/ground_truth.py | 5 +- .../sagemaker/hyperparameter_tuning/README.md | 2 +- .../src/hyperparameter_tuning.py | 14 +- .../aws/sagemaker/model/src/create_model.py | 5 +- .../tests/integration_tests/.env.example | 15 + .../tests/integration_tests/Dockerfile | 43 + .../tests/integration_tests/README.md | 55 +- .../test_groundtruth_component.py | 87 + .../component_tests/test_train_component.py | 3 +- .../test_workteam_component.py | 84 + .../tests/integration_tests/conftest.py | 62 +- .../tests/integration_tests/environment.yml | 1 + .../tests/integration_tests/pytest.ini | 4 +- .../config/create-workteam/config.yaml | 10 + .../config/fsx-mnist-training/config.yaml | 36 + .../config.yaml | 22 + .../config/kmeans-mnist-endpoint/config.yaml | 1 + .../definition/create_endpoint_pipeline.py | 4 +- .../definition/create_model_pipeline.py | 2 +- .../definition/groundtruth_pipeline.py | 59 + .../resources/definition/hpo_pipeline.py | 2 +- .../resources/definition/training_pipeline.py | 6 +- .../definition/transform_job_pipeline.py | 4 +- .../resources/definition/workteam_pipeline.py | 36 + .../tests/integration_tests/scripts/fsx_setup | 79 + .../scripts/generate_iam_role | 68 + .../scripts/generate_trust_policy | 39 + .../scripts/run_integration_tests | 206 ++ .../tests/integration_tests/utils/__init__.py | 17 + .../utils/kfp_client_utils.py | 22 +- .../utils/sagemaker_utils.py | 43 + .../unit_tests/tests/test_batch_transform.py | 143 +- .../tests/unit_tests/tests/test_deploy.py | 161 +- .../unit_tests/tests/test_ground_truth.py | 157 +- .../tests/unit_tests/tests/test_hpo.py | 345 ++- .../tests/unit_tests/tests/test_model.py | 102 +- .../tests/unit_tests/tests/test_train.py | 44 +- .../tests/unit_tests/tests/test_workteam.py | 71 +- components/aws/sagemaker/train/README.md | 6 +- components/aws/sagemaker/train/src/train.py | 6 +- .../aws/sagemaker/workteam/src/workteam.py | 5 +- .../Chicago_Taxi_Trips/component.yaml | 41 + .../tfx/Evaluator/component.py | 0 .../tfx/Evaluator/component.yaml | 0 .../tfx/Evaluator/with_URI_IO/component.py | 0 .../tfx/Evaluator/with_URI_IO/component.yaml | 0 .../BigQueryExampleGen/component.py | 0 .../BigQueryExampleGen/component.yaml | 0 .../with_URI_IO/component.py | 0 .../with_URI_IO/component.yaml | 0 .../tfx/ExampleGen/CsvExampleGen/component.py | 0 .../ExampleGen/CsvExampleGen/component.yaml | 0 .../CsvExampleGen/with_URI_IO/component.py | 0 .../CsvExampleGen/with_URI_IO/component.yaml | 0 .../ExampleGen/ImportExampleGen/component.py | 0 .../ImportExampleGen/component.yaml | 0 .../ImportExampleGen/with_URI_IO/component.py | 0 .../with_URI_IO/component.yaml | 0 .../tfx/ExampleValidator/component.py | 0 .../tfx/ExampleValidator/component.yaml | 0 .../ExampleValidator/with_URI_IO/component.py | 0 .../with_URI_IO/component.yaml | 0 components/{ => deprecated}/tfx/README.md | 0 .../tfx/SchemaGen/component.py | 0 .../tfx/SchemaGen/component.yaml | 0 .../tfx/SchemaGen/with_URI_IO/component.py | 0 .../tfx/SchemaGen/with_URI_IO/component.yaml | 0 .../tfx/StatisticsGen/component.py | 0 .../tfx/StatisticsGen/component.yaml | 0 .../StatisticsGen/with_URI_IO/component.py | 0 .../StatisticsGen/with_URI_IO/component.yaml | 0 .../{ => deprecated}/tfx/Trainer/component.py | 0 .../tfx/Trainer/component.yaml | 0 .../tfx/Trainer/with_URI_IO/component.py | 0 .../tfx/Trainer/with_URI_IO/component.yaml | 0 .../tfx/Transform/component.py | 0 .../tfx/Transform/component.yaml | 0 .../tfx/Transform/with_URI_IO/component.py | 0 .../tfx/Transform/with_URI_IO/component.yaml | 0 .../tfx/_samples/TFX_Dataflow_pipeline.ipynb | 0 .../tfx/_samples/TFX_pipeline.ipynb | 0 .../diagnostics/diagnose_me/component.yaml | 3 + components/gcp/bigquery/query/README.md | 2 +- components/gcp/bigquery/query/component.yaml | 2 +- components/gcp/bigquery/query/sample.ipynb | 2 +- .../kfp_component/google/dataflow/_client.py | 2 +- .../google/ml_engine/_create_job.py | 18 +- .../kfp_component/google/ml_engine/_train.py | 6 +- .../container/component_sdk/python/setup.py | 2 +- .../google/ml_engine/test__create_job.py | 21 + .../tests/google/ml_engine/test__train.py | 7 +- .../gcp/dataflow/launch_python/README.md | 2 +- .../gcp/dataflow/launch_python/component.yaml | 2 +- .../gcp/dataflow/launch_python/sample.ipynb | 2 +- .../gcp/dataflow/launch_template/README.md | 2 +- .../dataflow/launch_template/component.yaml | 2 +- .../gcp/dataflow/launch_template/sample.ipynb | 2 +- .../gcp/dataproc/create_cluster/README.md | 2 +- .../dataproc/create_cluster/component.yaml | 2 +- .../gcp/dataproc/create_cluster/sample.ipynb | 2 +- .../gcp/dataproc/delete_cluster/README.md | 2 +- .../dataproc/delete_cluster/component.yaml | 2 +- .../gcp/dataproc/delete_cluster/sample.ipynb | 2 +- .../gcp/dataproc/submit_hadoop_job/README.md | 2 +- .../dataproc/submit_hadoop_job/component.yaml | 2 +- .../dataproc/submit_hadoop_job/sample.ipynb | 2 +- .../gcp/dataproc/submit_hive_job/README.md | 2 +- .../dataproc/submit_hive_job/component.yaml | 2 +- .../gcp/dataproc/submit_hive_job/sample.ipynb | 2 +- .../gcp/dataproc/submit_pig_job/README.md | 2 +- .../dataproc/submit_pig_job/component.yaml | 2 +- .../gcp/dataproc/submit_pig_job/sample.ipynb | 2 +- .../gcp/dataproc/submit_pyspark_job/README.md | 2 +- .../submit_pyspark_job/component.yaml | 2 +- .../dataproc/submit_pyspark_job/sample.ipynb | 2 +- .../gcp/dataproc/submit_spark_job/README.md | 2 +- .../dataproc/submit_spark_job/component.yaml | 2 +- .../dataproc/submit_spark_job/sample.ipynb | 2 +- .../dataproc/submit_sparksql_job/README.md | 2 +- .../submit_sparksql_job/component.yaml | 2 +- .../dataproc/submit_sparksql_job/sample.ipynb | 2 +- .../gcp/ml_engine/batch_predict/README.md | 2 +- .../ml_engine/batch_predict/component.yaml | 2 +- .../gcp/ml_engine/batch_predict/sample.ipynb | 2 +- components/gcp/ml_engine/deploy/README.md | 2 +- .../gcp/ml_engine/deploy/component.yaml | 2 +- components/gcp/ml_engine/deploy/sample.ipynb | 2 +- components/gcp/ml_engine/train/README.md | 5 +- components/gcp/ml_engine/train/component.yaml | 9 +- components/gcp/ml_engine/train/sample.ipynb | 9 +- components/git/clone/component.yaml | 3 + .../google-cloud/storage/list/component.yaml | 3 + components/kubeflow/deployer/component.yaml | 2 +- components/kubeflow/dnntrainer/component.yaml | 2 +- components/kubeflow/kfserving/Dockerfile | 2 +- components/kubeflow/kfserving/component.yaml | 4 +- components/kubeflow/kfserving/src/app.py | 35 +- .../kfserving/src/kfservingdeployer.py | 301 ++- .../local/confusion_matrix/component.yaml | 2 +- components/local/roc/component.yaml | 2 +- components/pipeline_component_repository.yaml | 2 + components/release-in-place.sh | 76 + components/release.sh | 0 components/test_load_all_components.sh | 7 +- components/third_party_licenses.csv | 6 +- docs/source/kfp.client.rst | 8 + docs/source/kfp.dsl.rst | 4 + docs/source/kfp.server_api.rst | 48 + frontend/global-setup.js | 6 + frontend/mock-backend/mock-api-middleware.ts | 6 +- frontend/package-lock.json | 20 +- frontend/package.json | 1 + frontend/server/handlers/pod-logs.ts | 13 +- frontend/src/Css.tsx | 2 + frontend/src/components/ArtifactLink.tsx | 9 +- frontend/src/components/Banner.test.tsx | 29 +- frontend/src/components/Banner.tsx | 6 +- frontend/src/components/ExperimentList.tsx | 2 +- frontend/src/components/PodYaml.test.tsx | 4 +- frontend/src/components/PodYaml.tsx | 4 +- frontend/src/components/Trigger.test.tsx | 95 +- frontend/src/components/Trigger.tsx | 67 +- .../__snapshots__/Banner.test.tsx.snap | 18 + .../__snapshots__/Trigger.test.tsx.snap | 10 +- .../components/viewers/ConfusionMatrix.tsx | 48 +- .../ConfusionMatrix.test.tsx.snap | 805 ++---- .../config/sample_config_from_backend.json | 4 +- frontend/src/lib/Apis.test.ts | 6 +- frontend/src/lib/Apis.ts | 2 +- frontend/src/lib/CompareUtils.ts | 2 +- frontend/src/lib/OutputArtifactLoader.test.ts | 17 +- frontend/src/lib/OutputArtifactLoader.ts | 7 +- frontend/src/lib/TriggerUtils.test.ts | 40 + frontend/src/lib/TriggerUtils.ts | 110 +- frontend/src/lib/Utils.test.ts | 9 + frontend/src/lib/Utils.tsx | 20 + frontend/src/lib/WorkflowParser.ts | 2 +- frontend/src/pages/ExperimentList.tsx | 2 +- frontend/src/pages/GettingStarted.test.tsx | 52 +- frontend/src/pages/GettingStarted.tsx | 11 +- frontend/src/pages/NewRun.test.tsx | 49 + frontend/src/pages/NewRun.tsx | 16 +- frontend/src/pages/RunDetails.test.tsx | 147 +- frontend/src/pages/RunDetails.tsx | 214 +- frontend/src/pages/Status.tsx | 4 +- .../GettingStarted.test.tsx.snap | 32 +- .../pages/__snapshots__/NewRun.test.tsx.snap | 14 + .../__snapshots__/RunDetails.test.tsx.snap | 18 +- go.mod | 2 + go.sum | 2 + hack/check-release-needed-tools.sh | 38 + hack/release-imp.sh | 48 + hack/release.sh | 49 + .../templates/application.yaml | 59 +- .../kubeflow-pipelines/templates/argo.yaml | 2 +- .../templates/gcp_secret.yaml | 11 - .../kubeflow-pipelines/templates/minio.yaml | 10 +- .../kubeflow-pipelines/templates/mysql.yaml | 5 - .../templates/pipeline.yaml | 22 +- .../chart/kubeflow-pipelines/values.yaml | 12 +- manifests/gcp_marketplace/hack/release.sh | 30 + manifests/gcp_marketplace/schema.yaml | 100 +- .../kustomize/base/argo/kustomization.yaml | 1 - .../base/argo/minio-artifact-secret.yaml | 8 - .../argo/workflow-controller-configmap.yaml | 6 +- .../cache-deployer-clusterrolebinding.yaml | 2 +- .../cluster-scoped/cache-deployer-sa.yaml | 4 + .../cluster-scoped/kustomization.yaml | 4 + .../base/cache-deployer/kustomization.yaml | 12 +- .../base/cache/cache-deployment.yaml | 12 +- .../kustomize/base/cache/kustomization.yaml | 14 +- manifests/kustomize/base/kustomization.yaml | 125 +- .../base/metadata/kustomization.yaml | 18 +- .../metadata/metadata-envoy-deployment.yaml | 2 +- ...gmap.yaml => metadata-grpc-configmap.yaml} | 0 .../metadata/metadata-grpc-deployment.yaml | 12 +- .../kustomize/base/mysql/kustomization.yaml | 5 - .../kustomize/base/mysql/mysql-configmap.yaml | 10 - manifests/kustomize/base/params.env | 2 +- manifests/kustomize/base/params.yaml | 16 - .../kustomize/base/pipeline-application.yaml | 4 +- .../base/pipeline/kustomization.yaml | 18 +- .../metadata-writer/kustomization.yaml | 10 + .../metadata-writer-deployment.yaml | 0 .../metadata-writer-role.yaml | 0 .../metadata-writer-rolebinding.yaml | 0 .../metadata-writer}/metadata-writer-sa.yaml | 0 .../ml-pipeline-apiserver-deployment.yaml | 27 +- .../ml-pipeline-scheduledworkflow-role.yaml | 9 +- .../pipeline/ml-pipeline-ui-configmap.yaml | 11 + .../pipeline/ml-pipeline-ui-deployment.yaml | 20 + .../base/pipeline/ml-pipeline-ui-role.yaml | 2 - .../base/pipeline/ml-pipeline-ui-service.yaml | 6 +- .../viewer-sa.yaml} | 2 +- .../kustomization.yaml | 35 +- .../cluster-scoped-resources/namespace.yaml | 2 +- .../cluster-scoped-resources/params.env | 1 - .../cluster-scoped-resources/params.yaml | 2 - .../cloudsql-proxy-deployment.yaml | 9 +- .../gcp/cloudsql-proxy/cloudsql-proxy-sa.yaml | 4 + .../env/gcp/cloudsql-proxy/kustomization.yaml | 3 +- .../env/gcp/gcp-configurations-patch.yaml | 13 +- .../gcp-default-configmap.yaml | 8 - .../gcp/gcp-default-config/kustomization.yaml | 5 - .../env/gcp/inverse-proxy/kustomization.yaml | 4 +- .../kustomize/env/gcp/kustomization.yaml | 17 - .../gcp/minio-gcs-gateway/kustomization.yaml | 10 +- .../minio-artifact-secret.env | 2 + .../minio-gcs-gateway-deployment.yaml | 18 +- .../minio-gcs-gateway-sa.yaml | 4 + .../minio/kustomization.yaml | 7 + .../minio/minio-artifact-secret.env | 2 + .../minio/minio-deployment.yaml | 12 +- .../minio/minio-service.yaml | 5 +- .../mysql/mysql-service.yaml | 7 +- .../kustomize/gcp-workload-identity-setup.sh | 123 +- manifests/kustomize/hack/format.sh | 39 + manifests/kustomize/hack/release.sh | 42 + manifests/kustomize/sample/README.md | 21 +- .../kustomization.yaml | 10 +- .../cluster-scoped-resources/params.env | 1 - manifests/kustomize/sample/kustomization.yaml | 2 +- package-lock.json | 2283 +++++++++++++++++ package.json | 72 + proxy/attempt-register-vm-on-proxy.sh | 17 +- release/RELEASE.md | 12 - sdk/python/README.md | 2 +- sdk/python/kfp_tekton/__init__.py | 2 +- sdk/python/kfp_tekton/compiler/compiler.py | 25 +- sdk/python/kfp_tekton/compiler/main.py | 11 +- sdk/python/requirements.in | 2 +- sdk/python/requirements.txt | 2 +- sdk/python/setup.py | 6 +- sdk/python/tests/README.md | 6 +- .../tests/compiler/testdata/affinity.yaml | 2 - .../compiler/testdata/basic_no_decorator.yaml | 2 - .../compiler/testdata/big_data_passing.yaml | 166 +- .../tests/compiler/testdata/compose.yaml | 2 - .../tests/compiler/testdata/condition.yaml | 2 - .../tests/compiler/testdata/exit_handler.yaml | 2 - .../compiler/testdata/hidden_output_file.yaml | 2 - .../compiler/testdata/imagepullsecrets.yaml | 2 - .../compiler/testdata/init_container.yaml | 2 - .../testdata/input_artifact_raw_value.yaml | 2 - sdk/python/tests/compiler/testdata/katib.yaml | 2 - .../compiler/testdata/load_from_yaml.yaml | 2 - .../tests/compiler/testdata/loop_static.yaml | 2 - .../compiler/testdata/node_selector.yaml | 2 - .../compiler/testdata/parallel_join.yaml | 2 - .../parallel_join_with_argo_vars.yaml | 2 - .../parallel_join_with_artifacts.yaml | 2 - .../testdata/pipeline_transformers.yaml | 2 - .../compiler/testdata/pipelineparams.yaml | 2 - .../compiler/testdata/resourceop_basic.yaml | 2 - sdk/python/tests/compiler/testdata/retry.yaml | 2 - .../tests/compiler/testdata/sequential.yaml | 2 - .../tests/compiler/testdata/sidecar.yaml | 2 - .../tests/compiler/testdata/timeout.yaml | 2 - .../tests/compiler/testdata/tolerations.yaml | 2 - .../tests/compiler/testdata/volume.yaml | 2 - .../tests/compiler/testdata/volume_op.yaml | 2 - .../compiler/testdata/volume_snapshot_op.yaml | 2 - .../compiler/testdata/withitem_nested.yaml | 2 - sdk/python/tests/test_kfp_samples.sh | 4 +- test/cloudbuild/batch_build.yaml | 5 + test/deploy-cluster.sh | 2 +- test/deploy-pipeline-lite.sh | 3 +- ...stsubmit-tests-with-pipeline-deployment.sh | 5 +- test/presubmit-backend-test.sh | 27 + test/presubmit-tests.gke.sh | 65 - test/sample-test/requirements.in | 2 +- test/sample-test/requirements.txt | 168 +- test/sample-test/sample_test_launcher.py | 10 +- tools/bazel_builder/BUILD | 17 + tools/benchmarks/pipeline_service_api.ipynb | 16 +- tools/benchmarks/run_service_api.ipynb | 20 +- 557 files changed, 29674 insertions(+), 2362 deletions(-) create mode 100644 backend/api/python_http_client/.gitignore create mode 100644 backend/api/python_http_client/.gitlab-ci.yml create mode 100644 backend/api/python_http_client/.openapi-generator-ignore create mode 100644 backend/api/python_http_client/.openapi-generator/VERSION create mode 100644 backend/api/python_http_client/.travis.yml create mode 100644 backend/api/python_http_client/LICENSE create mode 100644 backend/api/python_http_client/README.md create mode 100644 backend/api/python_http_client/docs/ApiCronSchedule.md create mode 100644 backend/api/python_http_client/docs/ApiExperiment.md create mode 100644 backend/api/python_http_client/docs/ApiGetTemplateResponse.md create mode 100644 backend/api/python_http_client/docs/ApiJob.md create mode 100644 backend/api/python_http_client/docs/ApiListExperimentsResponse.md create mode 100644 backend/api/python_http_client/docs/ApiListJobsResponse.md create mode 100644 backend/api/python_http_client/docs/ApiListPipelineVersionsResponse.md create mode 100644 backend/api/python_http_client/docs/ApiListPipelinesResponse.md create mode 100644 backend/api/python_http_client/docs/ApiListRunsResponse.md create mode 100644 backend/api/python_http_client/docs/ApiParameter.md create mode 100644 backend/api/python_http_client/docs/ApiPeriodicSchedule.md create mode 100644 backend/api/python_http_client/docs/ApiPipeline.md create mode 100644 backend/api/python_http_client/docs/ApiPipelineRuntime.md create mode 100644 backend/api/python_http_client/docs/ApiPipelineSpec.md create mode 100644 backend/api/python_http_client/docs/ApiPipelineVersion.md create mode 100644 backend/api/python_http_client/docs/ApiReadArtifactResponse.md create mode 100644 backend/api/python_http_client/docs/ApiRelationship.md create mode 100644 backend/api/python_http_client/docs/ApiReportRunMetricsRequest.md create mode 100644 backend/api/python_http_client/docs/ApiReportRunMetricsResponse.md create mode 100644 backend/api/python_http_client/docs/ApiResourceKey.md create mode 100644 backend/api/python_http_client/docs/ApiResourceReference.md create mode 100644 backend/api/python_http_client/docs/ApiResourceType.md create mode 100644 backend/api/python_http_client/docs/ApiRun.md create mode 100644 backend/api/python_http_client/docs/ApiRunDetail.md create mode 100644 backend/api/python_http_client/docs/ApiRunMetric.md create mode 100644 backend/api/python_http_client/docs/ApiStatus.md create mode 100644 backend/api/python_http_client/docs/ApiTrigger.md create mode 100644 backend/api/python_http_client/docs/ApiUrl.md create mode 100644 backend/api/python_http_client/docs/ExperimentServiceApi.md create mode 100644 backend/api/python_http_client/docs/ExperimentStorageState.md create mode 100644 backend/api/python_http_client/docs/JobMode.md create mode 100644 backend/api/python_http_client/docs/JobServiceApi.md create mode 100644 backend/api/python_http_client/docs/PipelineServiceApi.md create mode 100644 backend/api/python_http_client/docs/PipelineUploadServiceApi.md create mode 100644 backend/api/python_http_client/docs/ProtobufAny.md create mode 100644 backend/api/python_http_client/docs/ReportRunMetricsResponseReportRunMetricResult.md create mode 100644 backend/api/python_http_client/docs/ReportRunMetricsResponseReportRunMetricResultStatus.md create mode 100644 backend/api/python_http_client/docs/RunMetricFormat.md create mode 100644 backend/api/python_http_client/docs/RunServiceApi.md create mode 100644 backend/api/python_http_client/docs/RunStorageState.md create mode 100644 backend/api/python_http_client/git_push.sh create mode 100644 backend/api/python_http_client/kfp_server_api/__init__.py create mode 100644 backend/api/python_http_client/kfp_server_api/api/__init__.py create mode 100644 backend/api/python_http_client/kfp_server_api/api/experiment_service_api.py create mode 100644 backend/api/python_http_client/kfp_server_api/api/job_service_api.py create mode 100644 backend/api/python_http_client/kfp_server_api/api/pipeline_service_api.py create mode 100644 backend/api/python_http_client/kfp_server_api/api/pipeline_upload_service_api.py create mode 100644 backend/api/python_http_client/kfp_server_api/api/run_service_api.py create mode 100644 backend/api/python_http_client/kfp_server_api/api_client.py create mode 100644 backend/api/python_http_client/kfp_server_api/configuration.py create mode 100644 backend/api/python_http_client/kfp_server_api/exceptions.py create mode 100644 backend/api/python_http_client/kfp_server_api/models/__init__.py create mode 100644 backend/api/python_http_client/kfp_server_api/models/api_cron_schedule.py create mode 100644 backend/api/python_http_client/kfp_server_api/models/api_experiment.py create mode 100644 backend/api/python_http_client/kfp_server_api/models/api_get_template_response.py create mode 100644 backend/api/python_http_client/kfp_server_api/models/api_job.py create mode 100644 backend/api/python_http_client/kfp_server_api/models/api_list_experiments_response.py create mode 100644 backend/api/python_http_client/kfp_server_api/models/api_list_jobs_response.py create mode 100644 backend/api/python_http_client/kfp_server_api/models/api_list_pipeline_versions_response.py create mode 100644 backend/api/python_http_client/kfp_server_api/models/api_list_pipelines_response.py create mode 100644 backend/api/python_http_client/kfp_server_api/models/api_list_runs_response.py create mode 100644 backend/api/python_http_client/kfp_server_api/models/api_parameter.py create mode 100644 backend/api/python_http_client/kfp_server_api/models/api_periodic_schedule.py create mode 100644 backend/api/python_http_client/kfp_server_api/models/api_pipeline.py create mode 100644 backend/api/python_http_client/kfp_server_api/models/api_pipeline_runtime.py create mode 100644 backend/api/python_http_client/kfp_server_api/models/api_pipeline_spec.py create mode 100644 backend/api/python_http_client/kfp_server_api/models/api_pipeline_version.py create mode 100644 backend/api/python_http_client/kfp_server_api/models/api_read_artifact_response.py create mode 100644 backend/api/python_http_client/kfp_server_api/models/api_relationship.py create mode 100644 backend/api/python_http_client/kfp_server_api/models/api_report_run_metrics_request.py create mode 100644 backend/api/python_http_client/kfp_server_api/models/api_report_run_metrics_response.py create mode 100644 backend/api/python_http_client/kfp_server_api/models/api_resource_key.py create mode 100644 backend/api/python_http_client/kfp_server_api/models/api_resource_reference.py create mode 100644 backend/api/python_http_client/kfp_server_api/models/api_resource_type.py create mode 100644 backend/api/python_http_client/kfp_server_api/models/api_run.py create mode 100644 backend/api/python_http_client/kfp_server_api/models/api_run_detail.py create mode 100644 backend/api/python_http_client/kfp_server_api/models/api_run_metric.py create mode 100644 backend/api/python_http_client/kfp_server_api/models/api_status.py create mode 100644 backend/api/python_http_client/kfp_server_api/models/api_trigger.py create mode 100644 backend/api/python_http_client/kfp_server_api/models/api_url.py create mode 100644 backend/api/python_http_client/kfp_server_api/models/experiment_storage_state.py create mode 100644 backend/api/python_http_client/kfp_server_api/models/job_mode.py create mode 100644 backend/api/python_http_client/kfp_server_api/models/protobuf_any.py create mode 100644 backend/api/python_http_client/kfp_server_api/models/report_run_metrics_response_report_run_metric_result.py create mode 100644 backend/api/python_http_client/kfp_server_api/models/report_run_metrics_response_report_run_metric_result_status.py create mode 100644 backend/api/python_http_client/kfp_server_api/models/run_metric_format.py create mode 100644 backend/api/python_http_client/kfp_server_api/models/run_storage_state.py create mode 100644 backend/api/python_http_client/kfp_server_api/rest.py create mode 100644 backend/api/python_http_client/requirements.txt create mode 100644 backend/api/python_http_client/setup.cfg create mode 100644 backend/api/python_http_client/setup.py create mode 100644 backend/api/python_http_client/test-requirements.txt create mode 100644 backend/api/python_http_client/test/__init__.py create mode 100644 backend/api/python_http_client/test/test_api_cron_schedule.py create mode 100644 backend/api/python_http_client/test/test_api_experiment.py create mode 100644 backend/api/python_http_client/test/test_api_get_template_response.py create mode 100644 backend/api/python_http_client/test/test_api_job.py create mode 100644 backend/api/python_http_client/test/test_api_list_experiments_response.py create mode 100644 backend/api/python_http_client/test/test_api_list_jobs_response.py create mode 100644 backend/api/python_http_client/test/test_api_list_pipeline_versions_response.py create mode 100644 backend/api/python_http_client/test/test_api_list_pipelines_response.py create mode 100644 backend/api/python_http_client/test/test_api_list_runs_response.py create mode 100644 backend/api/python_http_client/test/test_api_parameter.py create mode 100644 backend/api/python_http_client/test/test_api_periodic_schedule.py create mode 100644 backend/api/python_http_client/test/test_api_pipeline.py create mode 100644 backend/api/python_http_client/test/test_api_pipeline_runtime.py create mode 100644 backend/api/python_http_client/test/test_api_pipeline_spec.py create mode 100644 backend/api/python_http_client/test/test_api_pipeline_version.py create mode 100644 backend/api/python_http_client/test/test_api_read_artifact_response.py create mode 100644 backend/api/python_http_client/test/test_api_relationship.py create mode 100644 backend/api/python_http_client/test/test_api_report_run_metrics_request.py create mode 100644 backend/api/python_http_client/test/test_api_report_run_metrics_response.py create mode 100644 backend/api/python_http_client/test/test_api_resource_key.py create mode 100644 backend/api/python_http_client/test/test_api_resource_reference.py create mode 100644 backend/api/python_http_client/test/test_api_resource_type.py create mode 100644 backend/api/python_http_client/test/test_api_run.py create mode 100644 backend/api/python_http_client/test/test_api_run_detail.py create mode 100644 backend/api/python_http_client/test/test_api_run_metric.py create mode 100644 backend/api/python_http_client/test/test_api_status.py create mode 100644 backend/api/python_http_client/test/test_api_trigger.py create mode 100644 backend/api/python_http_client/test/test_api_url.py create mode 100644 backend/api/python_http_client/test/test_experiment_service_api.py create mode 100644 backend/api/python_http_client/test/test_experiment_storage_state.py create mode 100644 backend/api/python_http_client/test/test_job_mode.py create mode 100644 backend/api/python_http_client/test/test_job_service_api.py create mode 100644 backend/api/python_http_client/test/test_pipeline_service_api.py create mode 100644 backend/api/python_http_client/test/test_pipeline_upload_service_api.py create mode 100644 backend/api/python_http_client/test/test_protobuf_any.py create mode 100644 backend/api/python_http_client/test/test_report_run_metrics_response_report_run_metric_result.py create mode 100644 backend/api/python_http_client/test/test_report_run_metrics_response_report_run_metric_result_status.py create mode 100644 backend/api/python_http_client/test/test_run_metric_format.py create mode 100644 backend/api/python_http_client/test/test_run_service_api.py create mode 100644 backend/api/python_http_client/test/test_run_storage_state.py create mode 100644 backend/api/python_http_client/tox.ini create mode 100644 backend/api/python_http_client_template/README.md create mode 100644 backend/api/python_http_client_template/api.mustache create mode 100644 backend/api/python_http_client_template/model.mustache create mode 100644 backend/api/python_http_client_template/partial_header.mustache create mode 100644 backend/src/apiserver/model/run_test.go create mode 100644 components/XGBoost/Predict/component.py create mode 100644 components/XGBoost/Predict/component.yaml create mode 100644 components/XGBoost/Train/component.py create mode 100644 components/XGBoost/Train/component.yaml create mode 100644 components/XGBoost/_samples/sample_pipeline.py create mode 100644 components/_converters/ApacheParquet/_samples/sample_pipeline.py create mode 100644 components/_converters/ApacheParquet/from_ApacheArrowFeather/component.py create mode 100644 components/_converters/ApacheParquet/from_ApacheArrowFeather/component.yaml create mode 100644 components/_converters/ApacheParquet/from_CSV/component.py create mode 100644 components/_converters/ApacheParquet/from_CSV/component.yaml create mode 100644 components/_converters/ApacheParquet/from_TSV/component.py create mode 100644 components/_converters/ApacheParquet/from_TSV/component.yaml create mode 100644 components/_converters/ApacheParquet/to_ApacheArrowFeather/component.py create mode 100644 components/_converters/ApacheParquet/to_ApacheArrowFeather/component.yaml create mode 100644 components/aws/sagemaker/.gitignore create mode 100644 components/aws/sagemaker/README.md create mode 100755 components/aws/sagemaker/codebuild/scripts/construct_environment_array.sh create mode 100644 components/aws/sagemaker/tests/integration_tests/.env.example create mode 100644 components/aws/sagemaker/tests/integration_tests/Dockerfile create mode 100644 components/aws/sagemaker/tests/integration_tests/component_tests/test_groundtruth_component.py create mode 100644 components/aws/sagemaker/tests/integration_tests/component_tests/test_workteam_component.py create mode 100644 components/aws/sagemaker/tests/integration_tests/resources/config/create-workteam/config.yaml create mode 100644 components/aws/sagemaker/tests/integration_tests/resources/config/fsx-mnist-training/config.yaml create mode 100644 components/aws/sagemaker/tests/integration_tests/resources/config/image-classification-groundtruth/config.yaml create mode 100644 components/aws/sagemaker/tests/integration_tests/resources/definition/groundtruth_pipeline.py create mode 100644 components/aws/sagemaker/tests/integration_tests/resources/definition/workteam_pipeline.py create mode 100755 components/aws/sagemaker/tests/integration_tests/scripts/fsx_setup create mode 100755 components/aws/sagemaker/tests/integration_tests/scripts/generate_iam_role create mode 100755 components/aws/sagemaker/tests/integration_tests/scripts/generate_trust_policy create mode 100755 components/aws/sagemaker/tests/integration_tests/scripts/run_integration_tests create mode 100644 components/datasets/Chicago_Taxi_Trips/component.yaml rename components/{ => deprecated}/tfx/Evaluator/component.py (100%) rename components/{ => deprecated}/tfx/Evaluator/component.yaml (100%) rename components/{ => deprecated}/tfx/Evaluator/with_URI_IO/component.py (100%) rename components/{ => deprecated}/tfx/Evaluator/with_URI_IO/component.yaml (100%) rename components/{ => deprecated}/tfx/ExampleGen/BigQueryExampleGen/component.py (100%) rename components/{ => deprecated}/tfx/ExampleGen/BigQueryExampleGen/component.yaml (100%) rename components/{ => deprecated}/tfx/ExampleGen/BigQueryExampleGen/with_URI_IO/component.py (100%) rename components/{ => deprecated}/tfx/ExampleGen/BigQueryExampleGen/with_URI_IO/component.yaml (100%) rename components/{ => deprecated}/tfx/ExampleGen/CsvExampleGen/component.py (100%) rename components/{ => deprecated}/tfx/ExampleGen/CsvExampleGen/component.yaml (100%) rename components/{ => deprecated}/tfx/ExampleGen/CsvExampleGen/with_URI_IO/component.py (100%) rename components/{ => deprecated}/tfx/ExampleGen/CsvExampleGen/with_URI_IO/component.yaml (100%) rename components/{ => deprecated}/tfx/ExampleGen/ImportExampleGen/component.py (100%) rename components/{ => deprecated}/tfx/ExampleGen/ImportExampleGen/component.yaml (100%) rename components/{ => deprecated}/tfx/ExampleGen/ImportExampleGen/with_URI_IO/component.py (100%) rename components/{ => deprecated}/tfx/ExampleGen/ImportExampleGen/with_URI_IO/component.yaml (100%) rename components/{ => deprecated}/tfx/ExampleValidator/component.py (100%) rename components/{ => deprecated}/tfx/ExampleValidator/component.yaml (100%) rename components/{ => deprecated}/tfx/ExampleValidator/with_URI_IO/component.py (100%) rename components/{ => deprecated}/tfx/ExampleValidator/with_URI_IO/component.yaml (100%) rename components/{ => deprecated}/tfx/README.md (100%) rename components/{ => deprecated}/tfx/SchemaGen/component.py (100%) rename components/{ => deprecated}/tfx/SchemaGen/component.yaml (100%) rename components/{ => deprecated}/tfx/SchemaGen/with_URI_IO/component.py (100%) rename components/{ => deprecated}/tfx/SchemaGen/with_URI_IO/component.yaml (100%) rename components/{ => deprecated}/tfx/StatisticsGen/component.py (100%) rename components/{ => deprecated}/tfx/StatisticsGen/component.yaml (100%) rename components/{ => deprecated}/tfx/StatisticsGen/with_URI_IO/component.py (100%) rename components/{ => deprecated}/tfx/StatisticsGen/with_URI_IO/component.yaml (100%) rename components/{ => deprecated}/tfx/Trainer/component.py (100%) rename components/{ => deprecated}/tfx/Trainer/component.yaml (100%) rename components/{ => deprecated}/tfx/Trainer/with_URI_IO/component.py (100%) rename components/{ => deprecated}/tfx/Trainer/with_URI_IO/component.yaml (100%) rename components/{ => deprecated}/tfx/Transform/component.py (100%) rename components/{ => deprecated}/tfx/Transform/component.yaml (100%) rename components/{ => deprecated}/tfx/Transform/with_URI_IO/component.py (100%) rename components/{ => deprecated}/tfx/Transform/with_URI_IO/component.yaml (100%) rename components/{ => deprecated}/tfx/_samples/TFX_Dataflow_pipeline.ipynb (100%) rename components/{ => deprecated}/tfx/_samples/TFX_pipeline.ipynb (100%) create mode 100644 components/pipeline_component_repository.yaml create mode 100755 components/release-in-place.sh mode change 100755 => 100644 components/release.sh create mode 100644 docs/source/kfp.server_api.rst create mode 100644 frontend/global-setup.js create mode 100755 hack/check-release-needed-tools.sh create mode 100755 hack/release-imp.sh create mode 100755 hack/release.sh delete mode 100644 manifests/gcp_marketplace/chart/kubeflow-pipelines/templates/gcp_secret.yaml create mode 100755 manifests/gcp_marketplace/hack/release.sh delete mode 100644 manifests/kustomize/base/argo/minio-artifact-secret.yaml create mode 100644 manifests/kustomize/base/cache-deployer/cluster-scoped/cache-deployer-sa.yaml rename manifests/kustomize/base/metadata/{metadata-configmap.yaml => metadata-grpc-configmap.yaml} (100%) delete mode 100644 manifests/kustomize/base/mysql/kustomization.yaml delete mode 100644 manifests/kustomize/base/mysql/mysql-configmap.yaml create mode 100644 manifests/kustomize/base/pipeline/metadata-writer/kustomization.yaml rename manifests/kustomize/base/{metadata => pipeline/metadata-writer}/metadata-writer-deployment.yaml (100%) rename manifests/kustomize/base/{metadata => pipeline/metadata-writer}/metadata-writer-role.yaml (100%) rename manifests/kustomize/base/{metadata => pipeline/metadata-writer}/metadata-writer-rolebinding.yaml (100%) rename manifests/kustomize/base/{metadata => pipeline/metadata-writer}/metadata-writer-sa.yaml (100%) create mode 100644 manifests/kustomize/base/pipeline/ml-pipeline-ui-configmap.yaml rename manifests/kustomize/base/{cache-deployer/cache-deployer-sa.yaml => pipeline/viewer-sa.yaml} (51%) delete mode 100644 manifests/kustomize/cluster-scoped-resources/params.env create mode 100644 manifests/kustomize/env/gcp/cloudsql-proxy/cloudsql-proxy-sa.yaml delete mode 100644 manifests/kustomize/env/gcp/gcp-default-config/gcp-default-configmap.yaml delete mode 100644 manifests/kustomize/env/gcp/gcp-default-config/kustomization.yaml create mode 100644 manifests/kustomize/env/gcp/minio-gcs-gateway/minio-artifact-secret.env create mode 100644 manifests/kustomize/env/gcp/minio-gcs-gateway/minio-gcs-gateway-sa.yaml create mode 100644 manifests/kustomize/env/platform-agnostic/minio/minio-artifact-secret.env create mode 100755 manifests/kustomize/hack/format.sh create mode 100755 manifests/kustomize/hack/release.sh delete mode 100644 manifests/kustomize/sample/cluster-scoped-resources/params.env create mode 100644 package-lock.json create mode 100644 package.json delete mode 100644 release/RELEASE.md create mode 100755 test/presubmit-backend-test.sh delete mode 100755 test/presubmit-tests.gke.sh create mode 100644 tools/bazel_builder/BUILD diff --git a/.cloudbuild.yaml b/.cloudbuild.yaml index 9acdb403e..7d9ebf3c0 100644 --- a/.cloudbuild.yaml +++ b/.cloudbuild.yaml @@ -48,23 +48,28 @@ steps: waitFor: ['preparePythonComponentSDK'] # Build the pipeline system images -- name: 'debian' - entrypoint: '/bin/bash' - args: ['-c', 'sed -i -e "s/ARG DATE/ENV DATE \"$(date -u)\"/" /workspace/frontend/Dockerfile'] - id: 'prepareFrontend' - waitFor: ["-"] - name: 'gcr.io/cloud-builders/docker' - args: ['build', '-t', 'gcr.io/$PROJECT_ID/frontend:$COMMIT_SHA', - '--build-arg', 'COMMIT_HASH=$COMMIT_SHA', - '--build-arg', 'TAG_NAME=$TAG_NAME', - '-f', '/workspace/frontend/Dockerfile', '/workspace'] + entrypoint: /bin/bash + args: + - -ceux + - | + sed -i -e "s/ARG DATE/ENV DATE \"$(date -u)\"/" /workspace/frontend/Dockerfile + docker build -t gcr.io/$PROJECT_ID/frontend:$COMMIT_SHA \ + --build-arg COMMIT_HASH=$COMMIT_SHA \ + --build-arg TAG_NAME="$(cat /workspace/VERSION)" \ + -f /workspace/frontend/Dockerfile \ + /workspace id: 'buildFrontend' - waitFor: ['prepareFrontend'] + waitFor: ['-'] - name: 'gcr.io/cloud-builders/docker' - args: ['build', '-t', 'gcr.io/$PROJECT_ID/api-server:$COMMIT_SHA', - '--build-arg', 'COMMIT_SHA=$COMMIT_SHA', - '--build-arg', 'TAG_NAME=$TAG_NAME', - '-f', '/workspace/backend/Dockerfile', '/workspace'] + entrypoint: /bin/bash + args: + - -ceux + - | + docker build -t gcr.io/$PROJECT_ID/api-server:$COMMIT_SHA \ + --build-arg COMMIT_SHA=$COMMIT_SHA \ + --build-arg TAG_NAME="$(cat /workspace/VERSION)" \ + -f /workspace/backend/Dockerfile /workspace id: 'buildApiServer' waitFor: ['copyPythonSDK'] @@ -228,7 +233,8 @@ steps: - -ceux - | # Parse major minor version and save to a file for reusing in other steps. - cat /workspace/VERSION | sed -e "s#[^0-9]*\([0-9]*\)[.]\([0-9]*\)[.]\([0-9]*\)#\1.\2#" > /workspace/mm.ver + # e.g. 1.0.0-rc.1 and 1.0.1 are parsed as 1.0 + cat /workspace/VERSION | sed -e "s#\([0-9]\+[.][0-9]\+\)[.].*#\1#" > /workspace/mm.ver # Tag for Hosted - Tag to hosted folder with MKP friendly name - id: 'tagForHosted' @@ -288,3 +294,5 @@ timeout: '3600s' options: diskSizeGb: 300 machineType: 'N1_HIGHCPU_8' +tags: +- build-each-commit diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index bd177f445..f768b5804 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -9,3 +9,23 @@ Resolves # * Tekton Version (use `tkn version`): * Kubernetes Version (use `kubectl version`): * OS (e.g. from `/etc/os-release`): + +**Checklist:** +- [ ] The title for your pull request (PR) should follow our title convention. [Learn more about the pull request title convention used in this repository](https://github.com/kubeflow/pipelines/blob/master/CONTRIBUTING.md#pull-request-title-convention). + + PR titles examples: + * `fix(frontend): fixes empty page. Fixes #1234` + Use `fix` to indicate that this PR fixes a bug. + * `feat(backend): configurable service account. Fixes #1234, fixes #1235` + Use `feat` to indicate that this PR adds a new feature. + * `chore: set up changelog generation tools` + Use `chore` to indicate that this PR makes some changes that users don't need to know. + * `test: fix CI failure. Part of #1234` + Use `part of` to indicate that a PR is working on an issue, but shouldn't close the issue when merged. + +- [ ] Do you want this pull request (PR) cherry-picked into the current release branch? + + If yes, use one of the following options: + + * **(Recommended.)** Ask the PR approver to add the `cherrypick-approved` label to this PR. The release manager adds this PR to the release branch in a batch update. + * After this PR is merged, create a cherry-pick PR to add these changes to the release branch. (For more information about creating a cherry-pick PR, see the [Kubeflow Pipelines release guide](https://github.com/kubeflow/pipelines/blob/master/RELEASE.md#option--git-cherry-pick).) diff --git a/.gitignore b/.gitignore index 7169c3041..ee2814caf 100644 --- a/.gitignore +++ b/.gitignore @@ -94,3 +94,7 @@ _artifacts # Generated Python SDK documentation docs/_build + +# sed backups +*.bak + diff --git a/.release.cloudbuild.yaml b/.release.cloudbuild.yaml index 886109c20..43a91aa43 100644 --- a/.release.cloudbuild.yaml +++ b/.release.cloudbuild.yaml @@ -22,7 +22,40 @@ steps: - -ceux - | # Parse major minor version and save to a file for reusing in other steps. - echo $TAG_NAME | sed -e "s#[^0-9]*\([0-9]*\)[.]\([0-9]*\)[.]\([0-9]*\)#\1.\2#" > /workspace/mm.ver + # e.g. 1.0.0-rc.1 and 1.0.1 are parsed as 1.0 + echo $TAG_NAME | sed -e "s#\([0-9]\+[.][0-9]\+\)[.].*#\1#" > /workspace/mm.ver + +# Pull and retag images for pipeline components +- id: 'retagComponentImages' + name: 'gcr.io/cloud-builders/docker' + entrypoint: bash + waitFor: ['-'] + args: + - -ceux + - | + images=( + "ml-pipeline-kubeflow-deployer" + "ml-pipeline-kubeflow-tf-trainer" + "ml-pipeline-kubeflow-tf-trainer-gpu" + "ml-pipeline-kubeflow-tfjob" + "ml-pipeline-dataproc-analyze" + "ml-pipeline-dataproc-create-cluster" + "ml-pipeline-dataproc-delete-cluster" + "ml-pipeline-dataproc-predict" + "ml-pipeline-dataproc-transform" + "ml-pipeline-dataproc-train" + "ml-pipeline-local-confusion-matrix" + "ml-pipeline-local-roc" + "ml-pipeline-gcp" + ) + for image in "${images[@]}" + do + from_image="gcr.io/$PROJECT_ID/$image:$COMMIT_SHA" + target_image="gcr.io/ml-pipeline/$image:$TAG_NAME" + docker pull $from_image + docker tag $from_image $target_image + docker push $target_image + done # Pull and retag the images for the pipeline system - name: 'gcr.io/cloud-builders/docker' @@ -348,6 +381,14 @@ steps: - name: 'gcr.io/cloud-builders/docker' args: ['pull', 'gcr.io/$PROJECT_ID/metadata-envoy:$COMMIT_SHA'] id: 'pullMetadataEnvoy' +- id: 'tagMetadataEnvoyVersionNumber' + name: 'gcr.io/cloud-builders/docker' + args: ['tag', 'gcr.io/$PROJECT_ID/metadata-envoy:$COMMIT_SHA', 'gcr.io/ml-pipeline/metadata-envoy:$TAG_NAME'] + waitFor: ['pullMetadataEnvoy'] +- id: 'tagMetadataEnvoyCommitSHA' + name: 'gcr.io/cloud-builders/docker' + args: ['tag', 'gcr.io/$PROJECT_ID/metadata-envoy:$COMMIT_SHA', 'gcr.io/ml-pipeline/metadata-envoy:$COMMIT_SHA'] + waitFor: ['pullMetadataEnvoy'] - name: 'gcr.io/cloud-builders/docker' args: ['tag', 'gcr.io/$PROJECT_ID/metadata-envoy:$COMMIT_SHA', 'gcr.io/ml-pipeline/google/pipelines/metadataenvoy:$TAG_NAME'] id: 'tagMetadataEnvoyForMarketplace' @@ -565,6 +606,8 @@ images: - 'gcr.io/ml-pipeline/inverse-proxy-agent:$COMMIT_SHA' - 'gcr.io/ml-pipeline/visualization-server:$TAG_NAME' - 'gcr.io/ml-pipeline/visualization-server:$COMMIT_SHA' +- 'gcr.io/ml-pipeline/metadata-envoy:$TAG_NAME' +- 'gcr.io/ml-pipeline/metadata-envoy:$COMMIT_SHA' - 'gcr.io/ml-pipeline/metadata-writer:$TAG_NAME' - 'gcr.io/ml-pipeline/metadata-writer:$COMMIT_SHA' - 'gcr.io/ml-pipeline/cache-server:$TAG_NAME' @@ -606,3 +649,5 @@ images: - 'gcr.io/ml-pipeline/google/pipelines-test/deployer:$TAG_NAME' - 'gcr.io/ml-pipeline/google/pipelines-test:$TAG_NAME' timeout: '1200s' +tags: +- release-on-tag diff --git a/.travis.yml b/.travis.yml index 228987c52..a40cea4e5 100644 --- a/.travis.yml +++ b/.travis.yml @@ -80,12 +80,15 @@ matrix: # # Visualization test dependencies # - cd $TRAVIS_BUILD_DIR/backend/src/apiserver/visualization # - pip3 install -r requirements-test.txt +# # Using Argo to lint all compiled workflows +# - export LOCAL_BIN="${HOME}/.local/bin" +# - mkdir -p "$LOCAL_BIN" +# - export PATH="${PATH}:$LOCAL_BIN" # Unnecessary - Travis already has it in PATH +# - wget --quiet -O "${LOCAL_BIN}/argo" https://github.com/argoproj/argo/releases/download/v2.4.3/argo-linux-amd64 && chmod +x "${LOCAL_BIN}/argo" # script: &1 # DSL tests # - cd $TRAVIS_BUILD_DIR/sdk/python # - python3 -m pip install -e . # - cd $TRAVIS_BUILD_DIR # Changing the current directory to the repo root for correct coverall paths -# - coverage run --source=kfp --append sdk/python/tests/dsl/main.py -# - coverage run --source=kfp --append sdk/python/tests/compiler/main.py # - coverage run --source=kfp --append -m unittest discover --verbose --start-dir sdk/python/tests --top-level-directory=sdk/python # #- coveralls # @@ -101,6 +104,9 @@ matrix: # - git clone https://github.com/tensorflow/tfx.git # - cd $TRAVIS_BUILD_DIR/tfx # - pip3 install --upgrade pip +# - pip3 install --upgrade 'numpy>=1.16,<1.17' +# # Specify transitive dependency to get around: https://github.com/pypa/pip/issues/8583 +# - pip3 install --upgrade 'google-auth>=1.18.0' # - set -x # - set -e # - python3 setup.py bdist_wheel diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 5a8a58d22..437aad932 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -12,6 +12,11 @@ just a few small guidelines you need to follow. - [Development Guidelines](#development-guidelines) - [Coding Style](#coding-style) - [Code Reviews](#code-reviews) + - [Pull Requests](#pull-requests) + - [Pull Request Title Convention](#pull-request-title-convention) + - [PR Title Structure](#pr-title-structure) + - [PR Type](#pr-type) + - [PR Scope](#pr-scope) - [Get Involved](#get-involved) @@ -52,7 +57,9 @@ use GitHub pull requests for this purpose. Consult [GitHub Help](https://help.github.com/articles/about-pull-requests/) for more information on using pull requests. -The following should be viewed as Best Practices unless you know better ones +## Pull Requests + +The following should be viewed as _Best Practices_ unless you know better ones (please submit a guidelines PR). | Practice | Rationale | @@ -62,6 +69,79 @@ The following should be viewed as Best Practices unless you know better ones | Refine commit messages | Your commit messages should be in the imperative tense and clearly describe your feature upon first glance. See [this article](https://chris.beams.io/posts/git-commit/) for guidelines. | Reference an issue | Issues are a great way to gather design feedback from the community. To save yourself time on a controversial PR, first cut an issue for any major feature work. | +## Pull Request Title Convention + +We enforce a pull request (PR) title convention to quickly indicate the type and scope of a PR. +PR titles become commit messages when PRs are merged. We also parse PR titles to generate the changelog. + +PR titles should: +* Provide a user-friendly description of the change. +* Follow the [Conventional Commits specification](https://www.conventionalcommits.org/en/v1.0.0/). +* Specifies issue(s) fixed, or worked on at the end of the title. + +Examples: +* `fix(ui): fixes empty page. Fixes #1234` +* `feat(backend): configurable service account. Fixes #1234, fixes #1235` +* `chore: refactor some files` +* `test: fix CI failure. Part of #1234` + +The following sections describe the details of the PR title convention. + +### PR Title Structure + +PR titles should use the following structure. +``` +[optional scope]: [ Fixes #] +``` + +Replace the following: + +* **``**: The PR type describes the reason for the change, such as `fix` to indicate that the PR fixes a bug. More information about PR types is available in the next section. +* **`[optional scope]`**: (Optional.) The PR scope describes the part of Kubeflow Pipelines that this PR changes, such as `frontend` to indicate that the change affects the user interface. Choose a scope according to [PR Scope section](#pr-scope). +* **``**: A user friendly description of this change. +* **`[ Fixes #]`**: (Optional.) Specifies the issues fixed by this PR. + +### PR Type + +Type can be one of the following: +* **feat**: A new feature. +* **fix**: A bug fix. However, a PR that fixes test infrastructure is not user facing, so it should use the test type instead. +* **docs**: Documentation changes. +* **chore**: Anything else that does not need to be user facing. +* **test**: Adding or updating tests only. Please note, **feat** and **fix** PRs should have related tests too. +* **refactor**: A code change that neither fixes a bug nor adds a feature. +* **perf**: A code change that improves performance. + +Note, only feature, fix and perf type PRs will be included in CHANGELOG, because they are user facing. + +If you think the PR contains multiple types, you can choose the major one or +split the PR to focused sub-PRs. + +If you are not sure which type your PR is and it does not have user impact, +use `chore` as the fallback. + +### PR Scope + +Scope is optional, it can be one of the following: +* **frontend**: user interface or frontend server related, folder `frontend`, `frontend/server` +* **backend**: Backend, folder `backend` +* **sdk**: `kfp` python package, folder `sdk` +* **sdk/client**: `kfp-server-api` python package, folder `backend/api/python_http_client` +* **components**: Pipeline components, folder `components` +* **deployment**: Kustomize or gcp marketplace manifests, folder `manifests` +* **metadata**: Related to machine learning metadata (MLMD), folder `backend/metadata_writer` +* **cache**: Caching, folder `backend/src/cache` +* **swf**: Scheduled workflow, folder `backend/src/crd/controller/scheduledworkflow` +* **viewer**: Tensorboard viewer, folder `backend/src/crd/controller/viewer` + +If you think the PR is related to multiple scopes, you can choose the major one or +split the PR to focused sub-PRs. Note, splitting large PRs that affect multiple +scopes can help make it easier to get your PR reviewed, since different scopes +usually have different reviewers. + +If you are not sure, or the PR doesn't fit into above scopes. You can either +omit the scope because it's optional, or propose an additional scope here. + ## Get Involved * [Slack](http://kubeflow.slack.com/) diff --git a/VERSION b/VERSION index 4b9fcbec1..0d91a54c7 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -0.5.1 +0.3.0 diff --git a/backend/Dockerfile b/backend/Dockerfile index 7595f05c6..b51a0dc41 100644 --- a/backend/Dockerfile +++ b/backend/Dockerfile @@ -32,7 +32,7 @@ RUN go build -o /go/src/github.com/kubeflow/pipelines/bazel-bin/backend/src/apis # Compile FROM python:3.5 as compiler RUN apt-get update -y && \ - apt-get install --no-install-recommends -y -q default-jdk python3-setuptools python3-dev + apt-get install --no-install-recommends -y -q default-jdk python3-setuptools python3-dev jq RUN wget https://bootstrap.pypa.io/get-pip.py && python3 get-pip.py COPY backend/requirements.txt . RUN python3 -m pip install -r requirements.txt @@ -47,23 +47,18 @@ COPY sdk sdk WORKDIR /go/src/github.com/kubeflow/pipelines/sdk/python RUN python3 setup.py install -WORKDIR /samples -COPY ./samples . +WORKDIR / +COPY ./samples /samples +COPY backend/src/apiserver/config/sample_config.json /samples/ -# We need to check that all samples have been compiled without error. -# For find program, the -exec argument is a filter predicate just like -name. It -# only affects whether the file is "found", not the find's exit code. -# One way to solve this problem is to check whether we have any python pipelines -# that cannot compile. Here the exit code is the number of such files: -# RUN bash -e -c 'exit $(find . -maxdepth 2 -name "*.py" ! -exec dsl-compile --py {} --output {}.tar.gz \; -print | wc -l)' -# I think it's better to just use a shell loop though. -# RUN for pipeline in $(find . -maxdepth 2 -name '*.py' -type f); do dsl-compile --py "$pipeline" --output "$pipeline.tar.gz"; done -# The "for" loop breaks on all whitespace, so we either need to override IFS or -# use the "read" command instead. -RUN line="import kfp;kfp.components.default_base_image_or_builder='gcr.io/google-appengine/python:2020-03-31-141326";\ - set -e; find flip-coin -maxdepth 2 -name '*.py' -type f | while read pipeline; do \ - awk -v text="$line" '!/^#/ && !p {print text; p=1} 1' "$pipeline" && \ - python3 "$pipeline"; \ +# Compiling the preloaded samples. +# The default image is replaced with the GCR-hosted python image. +RUN set -e; \ + < /samples/sample_config.json jq .[].file --raw-output | while read pipeline_yaml; do \ + pipeline_py="${pipeline_yaml%.yaml}.py"; \ + mv "$pipeline_py" "${pipeline_py}.tmp"; \ + echo 'import kfp; kfp.components.default_base_image_or_builder="gcr.io/google-appengine/python:2020-03-31-141326"' | cat - "${pipeline_py}.tmp" > "$pipeline_py"; \ + python3 "$pipeline_py"; \ done FROM golang:1.13.0 diff --git a/backend/api/build_kfp_server_api_python_package.sh b/backend/api/build_kfp_server_api_python_package.sh index 44f73c704..92d73ba89 100755 --- a/backend/api/build_kfp_server_api_python_package.sh +++ b/backend/api/build_kfp_server_api_python_package.sh @@ -1,6 +1,6 @@ #!/bin/bash -e # -# Copyright 2018 Google LLC +# Copyright 2018-2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -27,48 +27,53 @@ # brew cask install caskroom/versions/java8 # brew install jq -VERSION="$1" - +DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" > /dev/null && pwd)" +REPO_ROOT="$DIR/../.." +VERSION="$(cat $REPO_ROOT/VERSION)" if [ -z "$VERSION" ]; then - echo "Usage: build_kfp_server_api_python_package.sh " + echo "ERROR: $REPO_ROOT/VERSION is empty" exit 1 fi -codegen_file=/tmp/swagger-codegen-cli.jar -# Browse all versions in: https://repo1.maven.org/maven2/io/swagger/swagger-codegen-cli/2.4.7/ -codegen_uri=https://repo1.maven.org/maven2/io/swagger/swagger-codegen-cli/2.4.7/swagger-codegen-cli-2.4.7.jar +codegen_file=/tmp/openapi-generator-cli.jar +# Browse all versions in: https://repo1.maven.org/maven2/org/openapitools/openapi-generator-cli/ +codegen_uri="https://repo1.maven.org/maven2/org/openapitools/openapi-generator-cli/4.3.1/openapi-generator-cli-4.3.1.jar" if ! [ -f "$codegen_file" ]; then - wget --no-verbose "$codegen_uri" -O "$codegen_file" + curl -L "$codegen_uri" -o "$codegen_file" fi pushd "$(dirname "$0")" -DIR=$(mktemp -d) +CURRENT_DIR="$(pwd)" +DIR="$CURRENT_DIR/python_http_client" +swagger_file="$CURRENT_DIR/swagger/kfp_api_single_file.swagger.json" -swagger_file=$(mktemp) - -echo "Merging all Swagger API definitions to $swagger_file." -jq -s ' - reduce .[] as $item ({}; . * $item) | - .info.title = "KF Pipelines API" | - .info.description = "Generated python client for the KF Pipelines server API" -' ./swagger/{run,job,pipeline,experiment,pipeline.upload}.swagger.json > "$swagger_file" +echo "Removing old content in DIR first." +rm -rf "$DIR" echo "Generating python code from swagger json in $DIR." -java -jar "$codegen_file" generate -l python -i "$swagger_file" -o "$DIR" -c <(echo '{ +java -jar "$codegen_file" generate -g python -t "$CURRENT_DIR/python_http_client_template" -i "$swagger_file" -o "$DIR" -c <(echo '{ "packageName": "kfp_server_api", - "projectName": "kfp-server-api", "packageVersion": "'"$VERSION"'", "packageUrl": "https://github.com/kubeflow/pipelines" }') +echo "Copying LICENSE to $DIR" +cp "$CURRENT_DIR/../../LICENSE" "$DIR" + echo "Building the python package in $DIR." pushd "$DIR" python3 setup.py --quiet sdist popd +echo "Adding license header for generated python files in $DIR." +go get -u github.com/google/addlicense +addlicense "$DIR" + echo "Run the following commands to update the package on PyPI" echo "python3 -m pip install twine" echo "python3 -m twine upload --username kubeflow-pipelines $DIR/dist/*" +echo "Please also push local changes to github.com/kubeflow/pipelines" + popd diff --git a/backend/api/experiment.proto b/backend/api/experiment.proto index 0ca43bbbc..d570a00fb 100644 --- a/backend/api/experiment.proto +++ b/backend/api/experiment.proto @@ -59,7 +59,7 @@ option (grpc.gateway.protoc_gen_swagger.options.openapiv2_swagger) = { }; service ExperimentService { - //Create a new experiment. + // Creates a new experiment. rpc CreateExperiment(CreateExperimentRequest) returns (Experiment) { option (google.api.http) = { post: "/apis/v1beta1/experiments" @@ -67,35 +67,38 @@ service ExperimentService { }; } - //Find a specific experiment by ID. + // Finds a specific experiment by ID. rpc GetExperiment(GetExperimentRequest) returns (Experiment) { option (google.api.http) = { get: "/apis/v1beta1/experiments/{id}" }; } - //Find all experiments. + // Finds all experiments. Supports pagination, and sorting on certain fields. rpc ListExperiment(ListExperimentsRequest) returns (ListExperimentsResponse) { option (google.api.http) = { get: "/apis/v1beta1/experiments" }; } - //Delete an experiment. + // Deletes an experiment without deleting the experiment's runs and jobs. To + // avoid unexpected behaviors, delete an experiment's runs and jobs before + // deleting the experiment. rpc DeleteExperiment(DeleteExperimentRequest) returns (google.protobuf.Empty) { option (google.api.http) = { delete: "/apis/v1beta1/experiments/{id}" }; } - //Archive an experiment. + // Archives an experiment and the experiment's runs and jobs. rpc ArchiveExperiment(ArchiveExperimentRequest) returns (google.protobuf.Empty) { option (google.api.http) = { post: "/apis/v1beta1/experiments/{id}:archive" }; } - //Restore an archived experiment. + // Restores an archived experiment. The experiment's archived runs and jobs + // will stay archived. rpc UnarchiveExperiment(UnarchiveExperimentRequest) returns (google.protobuf.Empty) { option (google.api.http) = { post: "/apis/v1beta1/experiments/{id}:unarchive" @@ -114,10 +117,17 @@ message GetExperimentRequest { } message ListExperimentsRequest { + // A page token to request the next page of results. The token is acquried + // from the nextPageToken field of the response from the previous + // ListExperiment call or can be omitted when fetching the first page. string page_token = 1; + + // The number of experiments to be listed per page. If there are more + // experiments than this number, the response message will contain a + // nextPageToken field you can use to fetch the next page. int32 page_size = 2; - // Can be format of "field_name", "field_name asc" or "field_name des" + // Can be format of "field_name", "field_name asc" or "field_name desc" // Ascending by default. string sort_by = 3; @@ -171,13 +181,16 @@ message Experiment { STORAGESTATE_ARCHIVED = 2; } + // Output. Specifies whether this experiment is in archived or available state. StorageState storage_state = 6; } message ArchiveExperimentRequest { + // The ID of the experiment to be archived. string id = 1; } message UnarchiveExperimentRequest { + // The ID of the experiment to be restored. string id = 1; } diff --git a/backend/api/generate_api.sh b/backend/api/generate_api.sh index 8937e235a..c2c42f134 100755 --- a/backend/api/generate_api.sh +++ b/backend/api/generate_api.sh @@ -1,6 +1,6 @@ #!/bin/bash -# Copyright 2018 Google LLC +# Copyright 2018-2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -21,8 +21,15 @@ set -ex -BAZEL_BINDIR=$(bazel info bazel-bin) DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" > /dev/null && pwd)" +REPO_ROOT="$DIR/../.." +VERSION="$(cat $REPO_ROOT/VERSION)" +if [ -z "$VERSION" ]; then + echo "ERROR: $REPO_ROOT/VERSION is empty" + exit 1 +fi + +BAZEL_BINDIR=$(bazel info bazel-bin) SWAGGER_CMD=${DIR}/../../bazel-bin/external/com_github_go_swagger/cmd/swagger/*stripped/swagger AUTOGEN_CMD="${DIR}/../../bazel-bin/external/com_github_mbrukman_autogen/autogen_tool" GENERATED_GO_PROTO_FILES="${BAZEL_BINDIR}/backend/api/api_generated_go_sources/src/github.com/kubeflow/pipelines/backend/api/go_client/*.go" @@ -40,7 +47,6 @@ bazel build @com_github_go_swagger//cmd/swagger # Build .pb.go and .gw.pb.go files from the proto sources. bazel build //backend/api:api_generated_go_sources -set -x # Copy the generated files into the source tree and add license. for f in $GENERATED_GO_PROTO_FILES; do target=${DIR}/go_client/$(basename ${f}) @@ -57,7 +63,9 @@ jq -s ' reduce .[] as $item ({}; . * $item) | .info.title = "Kubeflow Pipelines API" | .info.description = "This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition." | - .info.version = "0.1.38" + .info.version = "'$VERSION'" | + .info.contact = { "name": "google", "email": "kubeflow-pipelines@google.com", "url": "https://www.google.com" } | + .info.license = { "name": "Apache 2.0", "url": "https://raw.githubusercontent.com/kubeflow/pipelines/master/LICENSE" } ' ${DIR}/swagger/{run,job,pipeline,experiment,pipeline.upload}.swagger.json > "${DIR}/swagger/kfp_api_single_file.swagger.json" # Generate Go HTTP client from the swagger files. @@ -124,3 +132,5 @@ find ${DIR}/go_http_client/ -name "*.go" -exec ${AUTOGEN_CMD} -i --no-tlc -c "Go # Finally, run gazelle to add BUILD files for the generated code. bazel run //:gazelle +# HACK: remove unnecessary BUILD.bazels +rm -f "$REPO_ROOT/sdk/python/kfp/components/structures/BUILD.bazel" "$REPO_ROOT/tools/metadatastore-upgrade/BUILD.bazel" diff --git a/backend/api/go_http_client/experiment_client/experiment_service/archive_experiment_parameters.go b/backend/api/go_http_client/experiment_client/experiment_service/archive_experiment_parameters.go index 471e6a5bf..276ca5340 100644 --- a/backend/api/go_http_client/experiment_client/experiment_service/archive_experiment_parameters.go +++ b/backend/api/go_http_client/experiment_client/experiment_service/archive_experiment_parameters.go @@ -75,7 +75,10 @@ for the archive experiment operation typically these are written to a http.Reque */ type ArchiveExperimentParams struct { - /*ID*/ + /*ID + The ID of the experiment to be archived. + + */ ID string timeout time.Duration diff --git a/backend/api/go_http_client/experiment_client/experiment_service/experiment_service_client.go b/backend/api/go_http_client/experiment_client/experiment_service/experiment_service_client.go index 8544a2030..423ddcffe 100644 --- a/backend/api/go_http_client/experiment_client/experiment_service/experiment_service_client.go +++ b/backend/api/go_http_client/experiment_client/experiment_service/experiment_service_client.go @@ -39,7 +39,7 @@ type Client struct { } /* -ArchiveExperiment archives an experiment +ArchiveExperiment archives an experiment and the experiment s runs and jobs */ func (a *Client) ArchiveExperiment(params *ArchiveExperimentParams, authInfo runtime.ClientAuthInfoWriter) (*ArchiveExperimentOK, error) { // TODO: Validate the params before sending @@ -97,7 +97,7 @@ func (a *Client) CreateExperiment(params *CreateExperimentParams, authInfo runti } /* -DeleteExperiment deletes an experiment +DeleteExperiment deletes an experiment without deleting the experiment s runs and jobs to avoid unexpected behaviors delete an experiment s runs and jobs before deleting the experiment */ func (a *Client) DeleteExperiment(params *DeleteExperimentParams, authInfo runtime.ClientAuthInfoWriter) (*DeleteExperimentOK, error) { // TODO: Validate the params before sending @@ -155,7 +155,7 @@ func (a *Client) GetExperiment(params *GetExperimentParams, authInfo runtime.Cli } /* -ListExperiment finds all experiments +ListExperiment finds all experiments supports pagination and sorting on certain fields */ func (a *Client) ListExperiment(params *ListExperimentParams, authInfo runtime.ClientAuthInfoWriter) (*ListExperimentOK, error) { // TODO: Validate the params before sending @@ -184,7 +184,7 @@ func (a *Client) ListExperiment(params *ListExperimentParams, authInfo runtime.C } /* -UnarchiveExperiment restores an archived experiment +UnarchiveExperiment restores an archived experiment the experiment s archived runs and jobs will stay archived */ func (a *Client) UnarchiveExperiment(params *UnarchiveExperimentParams, authInfo runtime.ClientAuthInfoWriter) (*UnarchiveExperimentOK, error) { // TODO: Validate the params before sending diff --git a/backend/api/go_http_client/experiment_client/experiment_service/list_experiment_parameters.go b/backend/api/go_http_client/experiment_client/experiment_service/list_experiment_parameters.go index e724148ca..5ad3a5ae0 100644 --- a/backend/api/go_http_client/experiment_client/experiment_service/list_experiment_parameters.go +++ b/backend/api/go_http_client/experiment_client/experiment_service/list_experiment_parameters.go @@ -95,9 +95,19 @@ type ListExperimentParams struct { */ Filter *string - /*PageSize*/ + /*PageSize + The number of experiments to be listed per page. If there are more + experiments than this number, the response message will contain a + nextPageToken field you can use to fetch the next page. + + */ PageSize *int32 - /*PageToken*/ + /*PageToken + A page token to request the next page of results. The token is acquried + from the nextPageToken field of the response from the previous + ListExperiment call or can be omitted when fetching the first page. + + */ PageToken *string /*ResourceReferenceKeyID The ID of the resource that referred to. @@ -110,7 +120,7 @@ type ListExperimentParams struct { */ ResourceReferenceKeyType *string /*SortBy - Can be format of "field_name", "field_name asc" or "field_name des" + Can be format of "field_name", "field_name asc" or "field_name desc" Ascending by default. */ diff --git a/backend/api/go_http_client/experiment_client/experiment_service/unarchive_experiment_parameters.go b/backend/api/go_http_client/experiment_client/experiment_service/unarchive_experiment_parameters.go index 297a2959e..b8260fa46 100644 --- a/backend/api/go_http_client/experiment_client/experiment_service/unarchive_experiment_parameters.go +++ b/backend/api/go_http_client/experiment_client/experiment_service/unarchive_experiment_parameters.go @@ -75,7 +75,10 @@ for the unarchive experiment operation typically these are written to a http.Req */ type UnarchiveExperimentParams struct { - /*ID*/ + /*ID + The ID of the experiment to be restored. + + */ ID string timeout time.Duration diff --git a/backend/api/go_http_client/experiment_model/api_experiment.go b/backend/api/go_http_client/experiment_model/api_experiment.go index 3f7e75205..cf4cdb627 100644 --- a/backend/api/go_http_client/experiment_model/api_experiment.go +++ b/backend/api/go_http_client/experiment_model/api_experiment.go @@ -50,7 +50,7 @@ type APIExperiment struct { // For Experiment, the only valid resource reference is a single Namespace. ResourceReferences []*APIResourceReference `json:"resource_references"` - // storage state + // Output. Specifies whether this experiment is in archived or available state. StorageState ExperimentStorageState `json:"storage_state,omitempty"` } diff --git a/backend/api/go_http_client/job_client/job_service/list_jobs_parameters.go b/backend/api/go_http_client/job_client/job_service/list_jobs_parameters.go index 41f4d6911..0b2a6cd11 100644 --- a/backend/api/go_http_client/job_client/job_service/list_jobs_parameters.go +++ b/backend/api/go_http_client/job_client/job_service/list_jobs_parameters.go @@ -95,9 +95,19 @@ type ListJobsParams struct { */ Filter *string - /*PageSize*/ + /*PageSize + The number of jobs to be listed per page. If there are more jobs than this + number, the response message will contain a nextPageToken field you can use + to fetch the next page. + + */ PageSize *int32 - /*PageToken*/ + /*PageToken + A page token to request the next page of results. The token is acquried + from the nextPageToken field of the response from the previous + ListJobs call or can be omitted when fetching the first page. + + */ PageToken *string /*ResourceReferenceKeyID The ID of the resource that referred to. @@ -110,7 +120,7 @@ type ListJobsParams struct { */ ResourceReferenceKeyType *string /*SortBy - Can be format of "field_name", "field_name asc" or "field_name des". + Can be format of "field_name", "field_name asc" or "field_name desc". Ascending by default. */ diff --git a/backend/api/go_http_client/job_model/api_list_jobs_response.go b/backend/api/go_http_client/job_model/api_list_jobs_response.go index 4af64499c..64464d46b 100644 --- a/backend/api/go_http_client/job_model/api_list_jobs_response.go +++ b/backend/api/go_http_client/job_model/api_list_jobs_response.go @@ -35,10 +35,10 @@ type APIListJobsResponse struct { // A list of jobs returned. Jobs []*APIJob `json:"jobs"` - // next page token + // The token to list the next page of jobs. NextPageToken string `json:"next_page_token,omitempty"` - // total size + // The total number of jobs for the given query. TotalSize int32 `json:"total_size,omitempty"` } diff --git a/backend/api/go_http_client/pipeline_client/pipeline_service/delete_pipeline_parameters.go b/backend/api/go_http_client/pipeline_client/pipeline_service/delete_pipeline_parameters.go index 1c919efca..7da5fa3f8 100644 --- a/backend/api/go_http_client/pipeline_client/pipeline_service/delete_pipeline_parameters.go +++ b/backend/api/go_http_client/pipeline_client/pipeline_service/delete_pipeline_parameters.go @@ -75,7 +75,10 @@ for the delete pipeline operation typically these are written to a http.Request */ type DeletePipelineParams struct { - /*ID*/ + /*ID + The ID of the pipeline to be deleted. + + */ ID string timeout time.Duration diff --git a/backend/api/go_http_client/pipeline_client/pipeline_service/delete_pipeline_version_parameters.go b/backend/api/go_http_client/pipeline_client/pipeline_service/delete_pipeline_version_parameters.go index 6fbb9b4a2..5b38996ac 100644 --- a/backend/api/go_http_client/pipeline_client/pipeline_service/delete_pipeline_version_parameters.go +++ b/backend/api/go_http_client/pipeline_client/pipeline_service/delete_pipeline_version_parameters.go @@ -75,7 +75,10 @@ for the delete pipeline version operation typically these are written to a http. */ type DeletePipelineVersionParams struct { - /*VersionID*/ + /*VersionID + The ID of the pipeline version to be deleted. + + */ VersionID string timeout time.Duration diff --git a/backend/api/go_http_client/pipeline_client/pipeline_service/get_pipeline_parameters.go b/backend/api/go_http_client/pipeline_client/pipeline_service/get_pipeline_parameters.go index b58421251..b48781282 100644 --- a/backend/api/go_http_client/pipeline_client/pipeline_service/get_pipeline_parameters.go +++ b/backend/api/go_http_client/pipeline_client/pipeline_service/get_pipeline_parameters.go @@ -75,7 +75,10 @@ for the get pipeline operation typically these are written to a http.Request */ type GetPipelineParams struct { - /*ID*/ + /*ID + The ID of the pipeline to be retrieved. + + */ ID string timeout time.Duration diff --git a/backend/api/go_http_client/pipeline_client/pipeline_service/get_pipeline_version_parameters.go b/backend/api/go_http_client/pipeline_client/pipeline_service/get_pipeline_version_parameters.go index 3ac3c316f..4c450131b 100644 --- a/backend/api/go_http_client/pipeline_client/pipeline_service/get_pipeline_version_parameters.go +++ b/backend/api/go_http_client/pipeline_client/pipeline_service/get_pipeline_version_parameters.go @@ -75,7 +75,10 @@ for the get pipeline version operation typically these are written to a http.Req */ type GetPipelineVersionParams struct { - /*VersionID*/ + /*VersionID + The ID of the pipeline version to be retrieved. + + */ VersionID string timeout time.Duration diff --git a/backend/api/go_http_client/pipeline_client/pipeline_service/get_pipeline_version_template_parameters.go b/backend/api/go_http_client/pipeline_client/pipeline_service/get_pipeline_version_template_parameters.go index 818718fd8..e925887db 100644 --- a/backend/api/go_http_client/pipeline_client/pipeline_service/get_pipeline_version_template_parameters.go +++ b/backend/api/go_http_client/pipeline_client/pipeline_service/get_pipeline_version_template_parameters.go @@ -75,7 +75,10 @@ for the get pipeline version template operation typically these are written to a */ type GetPipelineVersionTemplateParams struct { - /*VersionID*/ + /*VersionID + The ID of the pipeline version whose template is to be retrieved. + + */ VersionID string timeout time.Duration diff --git a/backend/api/go_http_client/pipeline_client/pipeline_service/get_template_parameters.go b/backend/api/go_http_client/pipeline_client/pipeline_service/get_template_parameters.go index aa9ca0f34..2f130281b 100644 --- a/backend/api/go_http_client/pipeline_client/pipeline_service/get_template_parameters.go +++ b/backend/api/go_http_client/pipeline_client/pipeline_service/get_template_parameters.go @@ -75,7 +75,10 @@ for the get template operation typically these are written to a http.Request */ type GetTemplateParams struct { - /*ID*/ + /*ID + The ID of the pipeline whose template is to be retrieved. + + */ ID string timeout time.Duration diff --git a/backend/api/go_http_client/pipeline_client/pipeline_service/list_pipeline_versions_parameters.go b/backend/api/go_http_client/pipeline_client/pipeline_service/list_pipeline_versions_parameters.go index 840d9ad01..84ef0ff89 100644 --- a/backend/api/go_http_client/pipeline_client/pipeline_service/list_pipeline_versions_parameters.go +++ b/backend/api/go_http_client/pipeline_client/pipeline_service/list_pipeline_versions_parameters.go @@ -94,9 +94,19 @@ type ListPipelineVersionsParams struct { */ Filter *string - /*PageSize*/ + /*PageSize + The number of pipeline versions to be listed per page. If there are more + pipeline versions than this number, the response message will contain a + nextPageToken field you can use to fetch the next page. + + */ PageSize *int32 - /*PageToken*/ + /*PageToken + A page token to request the next page of results. The token is acquried + from the nextPageToken field of the response from the previous + ListPipelineVersions call or can be omitted when fetching the first page. + + */ PageToken *string /*ResourceKeyID The ID of the resource that referred to. @@ -109,7 +119,7 @@ type ListPipelineVersionsParams struct { */ ResourceKeyType *string /*SortBy - Can be format of "field_name", "field_name asc" or "field_name des" + Can be format of "field_name", "field_name asc" or "field_name desc" Ascending by default. */ diff --git a/backend/api/go_http_client/pipeline_client/pipeline_service/list_pipelines_parameters.go b/backend/api/go_http_client/pipeline_client/pipeline_service/list_pipelines_parameters.go index d8ad913f5..acd96a999 100644 --- a/backend/api/go_http_client/pipeline_client/pipeline_service/list_pipelines_parameters.go +++ b/backend/api/go_http_client/pipeline_client/pipeline_service/list_pipelines_parameters.go @@ -83,12 +83,22 @@ type ListPipelinesParams struct { */ Filter *string - /*PageSize*/ + /*PageSize + The number of pipelines to be listed per page. If there are more pipelines + than this number, the response message will contain a valid value in the + nextPageToken field. + + */ PageSize *int32 - /*PageToken*/ + /*PageToken + A page token to request the next page of results. The token is acquried + from the nextPageToken field of the response from the previous + ListPipelines call. + + */ PageToken *string /*SortBy - Can be format of "field_name", "field_name asc" or "field_name des" + Can be format of "field_name", "field_name asc" or "field_name desc" Ascending by default. */ diff --git a/backend/api/go_http_client/pipeline_client/pipeline_service/pipeline_service_client.go b/backend/api/go_http_client/pipeline_client/pipeline_service/pipeline_service_client.go index 3f041cfd4..a516dcfa6 100644 --- a/backend/api/go_http_client/pipeline_client/pipeline_service/pipeline_service_client.go +++ b/backend/api/go_http_client/pipeline_client/pipeline_service/pipeline_service_client.go @@ -39,7 +39,7 @@ type Client struct { } /* -CreatePipeline adds a pipeline +CreatePipeline creates a pipeline */ func (a *Client) CreatePipeline(params *CreatePipelineParams, authInfo runtime.ClientAuthInfoWriter) (*CreatePipelineOK, error) { // TODO: Validate the params before sending @@ -68,7 +68,7 @@ func (a *Client) CreatePipeline(params *CreatePipelineParams, authInfo runtime.C } /* -CreatePipelineVersion create pipeline version API +CreatePipelineVersion adds a pipeline version to the specified pipeline */ func (a *Client) CreatePipelineVersion(params *CreatePipelineVersionParams, authInfo runtime.ClientAuthInfoWriter) (*CreatePipelineVersionOK, error) { // TODO: Validate the params before sending @@ -97,7 +97,7 @@ func (a *Client) CreatePipelineVersion(params *CreatePipelineVersionParams, auth } /* -DeletePipeline deletes a pipeline +DeletePipeline deletes a pipeline and its pipeline versions */ func (a *Client) DeletePipeline(params *DeletePipelineParams, authInfo runtime.ClientAuthInfoWriter) (*DeletePipelineOK, error) { // TODO: Validate the params before sending @@ -126,7 +126,7 @@ func (a *Client) DeletePipeline(params *DeletePipelineParams, authInfo runtime.C } /* -DeletePipelineVersion delete pipeline version API +DeletePipelineVersion deletes a pipeline version by pipeline version ID if the deleted pipeline version is the default pipeline version the pipeline s default version changes to the pipeline s most recent pipeline version if there are no remaining pipeline versions the pipeline will have no default version examines the run service api ipynb notebook to learn more about creating a run using a pipeline version https github com kubeflow pipelines blob master tools benchmarks run service api ipynb */ func (a *Client) DeletePipelineVersion(params *DeletePipelineVersionParams, authInfo runtime.ClientAuthInfoWriter) (*DeletePipelineVersionOK, error) { // TODO: Validate the params before sending @@ -184,7 +184,7 @@ func (a *Client) GetPipeline(params *GetPipelineParams, authInfo runtime.ClientA } /* -GetPipelineVersion get pipeline version API +GetPipelineVersion gets a pipeline version by pipeline version ID */ func (a *Client) GetPipelineVersion(params *GetPipelineVersionParams, authInfo runtime.ClientAuthInfoWriter) (*GetPipelineVersionOK, error) { // TODO: Validate the params before sending @@ -213,7 +213,7 @@ func (a *Client) GetPipelineVersion(params *GetPipelineVersionParams, authInfo r } /* -GetPipelineVersionTemplate get pipeline version template API +GetPipelineVersionTemplate returns a y a m l template that contains the specified pipeline version s description parameters and metadata */ func (a *Client) GetPipelineVersionTemplate(params *GetPipelineVersionTemplateParams, authInfo runtime.ClientAuthInfoWriter) (*GetPipelineVersionTemplateOK, error) { // TODO: Validate the params before sending @@ -271,7 +271,7 @@ func (a *Client) GetTemplate(params *GetTemplateParams, authInfo runtime.ClientA } /* -ListPipelineVersions list pipeline versions API +ListPipelineVersions lists all pipeline versions of a given pipeline */ func (a *Client) ListPipelineVersions(params *ListPipelineVersionsParams, authInfo runtime.ClientAuthInfoWriter) (*ListPipelineVersionsOK, error) { // TODO: Validate the params before sending diff --git a/backend/api/go_http_client/pipeline_model/api_get_template_response.go b/backend/api/go_http_client/pipeline_model/api_get_template_response.go index b1ddc6420..4a1eeca86 100644 --- a/backend/api/go_http_client/pipeline_model/api_get_template_response.go +++ b/backend/api/go_http_client/pipeline_model/api_get_template_response.go @@ -29,7 +29,8 @@ import ( // swagger:model apiGetTemplateResponse type APIGetTemplateResponse struct { - // template + // The template of the pipeline specified in a GetTemplate request, or of a + // pipeline version specified in a GetPipelinesVersionTemplate request. Template string `json:"template,omitempty"` } diff --git a/backend/api/go_http_client/pipeline_model/api_list_pipeline_versions_response.go b/backend/api/go_http_client/pipeline_model/api_list_pipeline_versions_response.go index a741068ea..bbec05938 100644 --- a/backend/api/go_http_client/pipeline_model/api_list_pipeline_versions_response.go +++ b/backend/api/go_http_client/pipeline_model/api_list_pipeline_versions_response.go @@ -32,10 +32,10 @@ import ( // swagger:model apiListPipelineVersionsResponse type APIListPipelineVersionsResponse struct { - // next page token + // The token to list the next page of pipeline versions. NextPageToken string `json:"next_page_token,omitempty"` - // total size + // The total number of pipeline versions for the given query. TotalSize int32 `json:"total_size,omitempty"` // versions diff --git a/backend/api/go_http_client/pipeline_model/api_list_pipelines_response.go b/backend/api/go_http_client/pipeline_model/api_list_pipelines_response.go index a0b430061..89176d21d 100644 --- a/backend/api/go_http_client/pipeline_model/api_list_pipelines_response.go +++ b/backend/api/go_http_client/pipeline_model/api_list_pipelines_response.go @@ -32,13 +32,13 @@ import ( // swagger:model apiListPipelinesResponse type APIListPipelinesResponse struct { - // next page token + // The token to list the next page of pipelines. NextPageToken string `json:"next_page_token,omitempty"` // pipelines Pipelines []*APIPipeline `json:"pipelines"` - // total size + // The total number of pipelines for the given query. TotalSize int32 `json:"total_size,omitempty"` } diff --git a/backend/api/go_http_client/pipeline_model/api_url.go b/backend/api/go_http_client/pipeline_model/api_url.go index f2b59f4cf..d48a8ab07 100644 --- a/backend/api/go_http_client/pipeline_model/api_url.go +++ b/backend/api/go_http_client/pipeline_model/api_url.go @@ -29,7 +29,7 @@ import ( // swagger:model apiUrl type APIURL struct { - // pipeline url + // URL of the pipeline definition or the pipeline version definition. PipelineURL string `json:"pipeline_url,omitempty"` } diff --git a/backend/api/go_http_client/run_client/run_service/archive_run_parameters.go b/backend/api/go_http_client/run_client/run_service/archive_run_parameters.go index 6199fd7e4..4b7220bf0 100644 --- a/backend/api/go_http_client/run_client/run_service/archive_run_parameters.go +++ b/backend/api/go_http_client/run_client/run_service/archive_run_parameters.go @@ -75,7 +75,10 @@ for the archive run operation typically these are written to a http.Request */ type ArchiveRunParams struct { - /*ID*/ + /*ID + The ID of the run to be archived. + + */ ID string timeout time.Duration diff --git a/backend/api/go_http_client/run_client/run_service/delete_run_parameters.go b/backend/api/go_http_client/run_client/run_service/delete_run_parameters.go index a69d318fc..2083d995f 100644 --- a/backend/api/go_http_client/run_client/run_service/delete_run_parameters.go +++ b/backend/api/go_http_client/run_client/run_service/delete_run_parameters.go @@ -75,7 +75,10 @@ for the delete run operation typically these are written to a http.Request */ type DeleteRunParams struct { - /*ID*/ + /*ID + The ID of the run to be deleted. + + */ ID string timeout time.Duration diff --git a/backend/api/go_http_client/run_client/run_service/get_run_parameters.go b/backend/api/go_http_client/run_client/run_service/get_run_parameters.go index d25095d8a..2deb3990f 100644 --- a/backend/api/go_http_client/run_client/run_service/get_run_parameters.go +++ b/backend/api/go_http_client/run_client/run_service/get_run_parameters.go @@ -75,7 +75,10 @@ for the get run operation typically these are written to a http.Request */ type GetRunParams struct { - /*RunID*/ + /*RunID + The ID of the run to be retrieved. + + */ RunID string timeout time.Duration diff --git a/backend/api/go_http_client/run_client/run_service/list_runs_parameters.go b/backend/api/go_http_client/run_client/run_service/list_runs_parameters.go index 6494854ae..54f316640 100644 --- a/backend/api/go_http_client/run_client/run_service/list_runs_parameters.go +++ b/backend/api/go_http_client/run_client/run_service/list_runs_parameters.go @@ -95,9 +95,19 @@ type ListRunsParams struct { */ Filter *string - /*PageSize*/ + /*PageSize + The number of runs to be listed per page. If there are more runs than this + number, the response message will contain a nextPageToken field you can use + to fetch the next page. + + */ PageSize *int32 - /*PageToken*/ + /*PageToken + A page token to request the next page of results. The token is acquried + from the nextPageToken field of the response from the previous + ListRuns call or can be omitted when fetching the first page. + + */ PageToken *string /*ResourceReferenceKeyID The ID of the resource that referred to. @@ -110,8 +120,8 @@ type ListRunsParams struct { */ ResourceReferenceKeyType *string /*SortBy - Can be format of "field_name", "field_name asc" or "field_name des" - (Example, "name asc" or "id des"). Ascending by default. + Can be format of "field_name", "field_name asc" or "field_name desc" + (Example, "name asc" or "id desc"). Ascending by default. */ SortBy *string diff --git a/backend/api/go_http_client/run_client/run_service/retry_run_parameters.go b/backend/api/go_http_client/run_client/run_service/retry_run_parameters.go index ebe257f8b..c3b9da1c1 100644 --- a/backend/api/go_http_client/run_client/run_service/retry_run_parameters.go +++ b/backend/api/go_http_client/run_client/run_service/retry_run_parameters.go @@ -75,7 +75,10 @@ for the retry run operation typically these are written to a http.Request */ type RetryRunParams struct { - /*RunID*/ + /*RunID + The ID of the run to be retried. + + */ RunID string timeout time.Duration diff --git a/backend/api/go_http_client/run_client/run_service/run_service_client.go b/backend/api/go_http_client/run_client/run_service/run_service_client.go index 215317ce1..f482d72d9 100644 --- a/backend/api/go_http_client/run_client/run_service/run_service_client.go +++ b/backend/api/go_http_client/run_client/run_service/run_service_client.go @@ -242,7 +242,7 @@ func (a *Client) ReportRunMetrics(params *ReportRunMetricsParams, authInfo runti } /* -RetryRun res initiate a failed or terminated run +RetryRun res initiates a failed or terminated run */ func (a *Client) RetryRun(params *RetryRunParams, authInfo runtime.ClientAuthInfoWriter) (*RetryRunOK, error) { // TODO: Validate the params before sending diff --git a/backend/api/go_http_client/run_client/run_service/terminate_run_parameters.go b/backend/api/go_http_client/run_client/run_service/terminate_run_parameters.go index 8ed4662e9..ff67c3256 100644 --- a/backend/api/go_http_client/run_client/run_service/terminate_run_parameters.go +++ b/backend/api/go_http_client/run_client/run_service/terminate_run_parameters.go @@ -75,7 +75,10 @@ for the terminate run operation typically these are written to a http.Request */ type TerminateRunParams struct { - /*RunID*/ + /*RunID + The ID of the run to be terminated. + + */ RunID string timeout time.Duration diff --git a/backend/api/go_http_client/run_client/run_service/unarchive_run_parameters.go b/backend/api/go_http_client/run_client/run_service/unarchive_run_parameters.go index f97c8bae9..88bc3e2bf 100644 --- a/backend/api/go_http_client/run_client/run_service/unarchive_run_parameters.go +++ b/backend/api/go_http_client/run_client/run_service/unarchive_run_parameters.go @@ -75,7 +75,10 @@ for the unarchive run operation typically these are written to a http.Request */ type UnarchiveRunParams struct { - /*ID*/ + /*ID + The ID of the run to be restored. + + */ ID string timeout time.Duration diff --git a/backend/api/go_http_client/run_model/api_list_runs_response.go b/backend/api/go_http_client/run_model/api_list_runs_response.go index 7b929b8d8..a122cbf5f 100644 --- a/backend/api/go_http_client/run_model/api_list_runs_response.go +++ b/backend/api/go_http_client/run_model/api_list_runs_response.go @@ -32,13 +32,13 @@ import ( // swagger:model apiListRunsResponse type APIListRunsResponse struct { - // next page token + // The token to list the next page of runs. NextPageToken string `json:"next_page_token,omitempty"` // runs Runs []*APIRun `json:"runs"` - // total size + // The total number of runs for the given query. TotalSize int32 `json:"total_size,omitempty"` } diff --git a/backend/api/go_http_client/run_model/api_run.go b/backend/api/go_http_client/run_model/api_run.go index 069cbdb84..cc6402ce8 100644 --- a/backend/api/go_http_client/run_model/api_run.go +++ b/backend/api/go_http_client/run_model/api_run.go @@ -65,6 +65,8 @@ type APIRun struct { PipelineSpec *APIPipelineSpec `json:"pipeline_spec,omitempty"` // Optional input field. Specify which resource this run belongs to. + // When creating a run from a particular pipeline version, the pipeline + // version can be specified here. ResourceReferences []*APIResourceReference `json:"resource_references"` // Output. When this run is scheduled to run. This could be different from @@ -81,7 +83,7 @@ type APIRun struct { // One of [Pending, Running, Succeeded, Skipped, Failed, Error] Status string `json:"status,omitempty"` - // storage state + // Output. Specify whether this run is in archived or available mode. StorageState RunStorageState `json:"storage_state,omitempty"` } diff --git a/backend/api/job.proto b/backend/api/job.proto index 7910740a1..26fb5ae48 100644 --- a/backend/api/job.proto +++ b/backend/api/job.proto @@ -63,7 +63,7 @@ option (grpc.gateway.protoc_gen_swagger.options.openapiv2_swagger) = { service JobService { - //Create a new job. + // Creates a new job. rpc CreateJob(CreateJobRequest) returns (Job) { option (google.api.http) = { post: "/apis/v1beta1/jobs" @@ -71,35 +71,35 @@ service JobService { }; } - //Find a specific job by ID. + // Finds a specific job by ID. rpc GetJob(GetJobRequest) returns (Job) { option (google.api.http) = { get: "/apis/v1beta1/jobs/{id}" }; } - //Find all jobs. + // Finds all jobs. rpc ListJobs(ListJobsRequest) returns (ListJobsResponse) { option (google.api.http) = { get: "/apis/v1beta1/jobs" }; } - //Restarts a job that was previously stopped. All runs associated with the job will continue. + // Restarts a job that was previously stopped. All runs associated with the job will continue. rpc EnableJob(EnableJobRequest) returns (google.protobuf.Empty) { option (google.api.http) = { post: "/apis/v1beta1/jobs/{id}/enable" }; } - //Stops a job and all its associated runs. The job is not deleted. + // Stops a job and all its associated runs. The job is not deleted. rpc DisableJob(DisableJobRequest) returns (google.protobuf.Empty) { option (google.api.http) = { post: "/apis/v1beta1/jobs/{id}/disable" }; } - //Delete a job. + // Deletes a job. rpc DeleteJob(DeleteJobRequest) returns (google.protobuf.Empty) { option (google.api.http) = { delete: "/apis/v1beta1/jobs/{id}" @@ -118,10 +118,17 @@ message GetJobRequest { } message ListJobsRequest { + // A page token to request the next page of results. The token is acquried + // from the nextPageToken field of the response from the previous + // ListJobs call or can be omitted when fetching the first page. string page_token = 1; + + // The number of jobs to be listed per page. If there are more jobs than this + // number, the response message will contain a nextPageToken field you can use + // to fetch the next page. int32 page_size = 2; - // Can be format of "field_name", "field_name asc" or "field_name des". + // Can be format of "field_name", "field_name asc" or "field_name desc". // Ascending by default. string sort_by = 3; @@ -139,7 +146,11 @@ message ListJobsRequest { message ListJobsResponse { // A list of jobs returned. repeated Job jobs = 1; + + // The total number of jobs for the given query. int32 total_size = 3; + + // The token to list the next page of jobs. string next_page_token = 2; } @@ -252,4 +263,4 @@ message Job { // If false, the job will catch up on each past interval. bool no_catchup = 17; } -// Next field number of Job will be 19 \ No newline at end of file +// Next field number of Job will be 19 diff --git a/backend/api/pipeline.proto b/backend/api/pipeline.proto index e00133202..07cd844ae 100644 --- a/backend/api/pipeline.proto +++ b/backend/api/pipeline.proto @@ -61,7 +61,7 @@ option (grpc.gateway.protoc_gen_swagger.options.openapiv2_swagger) = { }; service PipelineService { - //Add a pipeline. + // Creates a pipeline. rpc CreatePipeline(CreatePipelineRequest) returns (Pipeline) { option (google.api.http) = { post: "/apis/v1beta1/pipelines" @@ -69,34 +69,35 @@ service PipelineService { }; } - //Find a specific pipeline by ID. + // Finds a specific pipeline by ID. rpc GetPipeline(GetPipelineRequest) returns (Pipeline) { option (google.api.http) = { get: "/apis/v1beta1/pipelines/{id}" }; } - //Find all pipelines. + // Finds all pipelines. rpc ListPipelines(ListPipelinesRequest) returns (ListPipelinesResponse) { option (google.api.http) = { get: "/apis/v1beta1/pipelines" }; } - //Delete a pipeline. + // Deletes a pipeline and its pipeline versions. rpc DeletePipeline(DeletePipelineRequest) returns (google.protobuf.Empty) { option (google.api.http) = { delete: "/apis/v1beta1/pipelines/{id}" }; } - //Returns a single YAML template that contains the description, parameters, and metadata associated with the pipeline provided. + // Returns a single YAML template that contains the description, parameters, and metadata associated with the pipeline provided. rpc GetTemplate(GetTemplateRequest) returns (GetTemplateResponse) { option (google.api.http) = { get: "/apis/v1beta1/pipelines/{id}/templates" }; } + // Adds a pipeline version to the specified pipeline. rpc CreatePipelineVersion(CreatePipelineVersionRequest) returns (PipelineVersion) { option (google.api.http) = { @@ -105,12 +106,14 @@ service PipelineService { }; } + // Gets a pipeline version by pipeline version ID. rpc GetPipelineVersion(GetPipelineVersionRequest) returns (PipelineVersion) { option (google.api.http) = { get: "/apis/v1beta1/pipeline_versions/{version_id}" }; } + // Lists all pipeline versions of a given pipeline. rpc ListPipelineVersions(ListPipelineVersionsRequest) returns (ListPipelineVersionsResponse) { option (google.api.http) = { @@ -118,6 +121,12 @@ service PipelineService { }; } + // Deletes a pipeline version by pipeline version ID. If the deleted pipeline + // version is the default pipeline version, the pipeline's default version + // changes to the pipeline's most recent pipeline version. If there are no + // remaining pipeline versions, the pipeline will have no default version. + // Examines the run_service_api.ipynb notebook to learn more about creating a + // run using a pipeline version (https://github.com/kubeflow/pipelines/blob/master/tools/benchmarks/run_service_api.ipynb). rpc DeletePipelineVersion(DeletePipelineVersionRequest) returns (google.protobuf.Empty) { option (google.api.http) = { @@ -125,6 +134,7 @@ service PipelineService { }; } + // Returns a YAML template that contains the specified pipeline version's description, parameters and metadata. rpc GetPipelineVersionTemplate(GetPipelineVersionTemplateRequest) returns (GetTemplateResponse) { option (google.api.http) = { get: "/apis/v1beta1/pipeline_versions/{version_id}/templates" @@ -133,6 +143,7 @@ service PipelineService { } message Url { + // URL of the pipeline definition or the pipeline version definition. string pipeline_url = 1; } @@ -144,13 +155,22 @@ message CreatePipelineRequest { } message GetPipelineRequest { + // The ID of the pipeline to be retrieved. string id = 1; } message ListPipelinesRequest { + // A page token to request the next page of results. The token is acquried + // from the nextPageToken field of the response from the previous + // ListPipelines call. string page_token = 1; + + // The number of pipelines to be listed per page. If there are more pipelines + // than this number, the response message will contain a valid value in the + // nextPageToken field. int32 page_size = 2; - // Can be format of "field_name", "field_name asc" or "field_name des" + + // Can be format of "field_name", "field_name asc" or "field_name desc" // Ascending by default. string sort_by = 3; @@ -162,23 +182,32 @@ message ListPipelinesRequest { message ListPipelinesResponse { repeated Pipeline pipelines = 1; + + // The total number of pipelines for the given query. int32 total_size = 3; + + // The token to list the next page of pipelines. string next_page_token = 2; } message DeletePipelineRequest { + // The ID of the pipeline to be deleted. string id = 1; } message GetTemplateRequest { + // The ID of the pipeline whose template is to be retrieved. string id = 1; } message GetTemplateResponse { + // The template of the pipeline specified in a GetTemplate request, or of a + // pipeline version specified in a GetPipelinesVersionTemplate request. string template = 1; } message GetPipelineVersionTemplateRequest { + // The ID of the pipeline version whose template is to be retrieved. string version_id = 1; } @@ -189,16 +218,25 @@ message CreatePipelineVersionRequest { } message GetPipelineVersionRequest { + // The ID of the pipeline version to be retrieved. string version_id = 1; } message ListPipelineVersionsRequest { // ResourceKey specifies the pipeline whose versions are to be listed. ResourceKey resource_key = 1; + + // The number of pipeline versions to be listed per page. If there are more + // pipeline versions than this number, the response message will contain a + // nextPageToken field you can use to fetch the next page. int32 page_size = 2; + + // A page token to request the next page of results. The token is acquried + // from the nextPageToken field of the response from the previous + // ListPipelineVersions call or can be omitted when fetching the first page. string page_token = 3; - // Can be format of "field_name", "field_name asc" or "field_name des" + // Can be format of "field_name", "field_name asc" or "field_name desc" // Ascending by default. string sort_by = 4; // A base-64 encoded, JSON-serialized Filter protocol buffer (see @@ -208,11 +246,16 @@ message ListPipelineVersionsRequest { message ListPipelineVersionsResponse { repeated PipelineVersion versions = 1; + + // The token to list the next page of pipeline versions. string next_page_token = 2; + + // The total number of pipeline versions for the given query. int32 total_size = 3; } message DeletePipelineVersionRequest { + // The ID of the pipeline version to be deleted. string version_id = 1; } diff --git a/backend/api/python_http_client/.gitignore b/backend/api/python_http_client/.gitignore new file mode 100644 index 000000000..43995bd42 --- /dev/null +++ b/backend/api/python_http_client/.gitignore @@ -0,0 +1,66 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +env/ +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +*.egg-info/ +.installed.cfg +*.egg + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*,cover +.hypothesis/ +venv/ +.venv/ +.python-version +.pytest_cache + +# Translations +*.mo +*.pot + +# Django stuff: +*.log + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +#Ipython Notebook +.ipynb_checkpoints diff --git a/backend/api/python_http_client/.gitlab-ci.yml b/backend/api/python_http_client/.gitlab-ci.yml new file mode 100644 index 000000000..50f737306 --- /dev/null +++ b/backend/api/python_http_client/.gitlab-ci.yml @@ -0,0 +1,47 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# ref: https://docs.gitlab.com/ee/ci/README.html + +stages: + - test + +.nosetest: + stage: test + script: + - pip install -r requirements.txt + - pip install -r test-requirements.txt + - pytest --cov=kfp_server_api + +nosetest-2.7: + extends: .nosetest + image: python:2.7-alpine +nosetest-3.3: + extends: .nosetest + image: python:3.3-alpine +nosetest-3.4: + extends: .nosetest + image: python:3.4-alpine +nosetest-3.5: + extends: .nosetest + image: python:3.5-alpine +nosetest-3.6: + extends: .nosetest + image: python:3.6-alpine +nosetest-3.7: + extends: .nosetest + image: python:3.7-alpine +nosetest-3.8: + extends: .nosetest + image: python:3.8-alpine diff --git a/backend/api/python_http_client/.openapi-generator-ignore b/backend/api/python_http_client/.openapi-generator-ignore new file mode 100644 index 000000000..7484ee590 --- /dev/null +++ b/backend/api/python_http_client/.openapi-generator-ignore @@ -0,0 +1,23 @@ +# OpenAPI Generator Ignore +# Generated by openapi-generator https://github.com/openapitools/openapi-generator + +# Use this file to prevent files from being overwritten by the generator. +# The patterns follow closely to .gitignore or .dockerignore. + +# As an example, the C# client generator defines ApiClient.cs. +# You can make changes and tell OpenAPI Generator to ignore just this file by uncommenting the following line: +#ApiClient.cs + +# You can match any string of characters against a directory, file or extension with a single asterisk (*): +#foo/*/qux +# The above matches foo/bar/qux and foo/baz/qux, but not foo/bar/baz/qux + +# You can recursively match patterns against a directory, file or extension with a double asterisk (**): +#foo/**/qux +# This matches foo/bar/qux, foo/baz/qux, and foo/bar/baz/qux + +# You can also negate patterns with an exclamation (!). +# For example, you can ignore all files in a docs folder with the file extension .md: +#docs/*.md +# Then explicitly reverse the ignore rule for a single file: +#!docs/README.md diff --git a/backend/api/python_http_client/.openapi-generator/VERSION b/backend/api/python_http_client/.openapi-generator/VERSION new file mode 100644 index 000000000..ecedc98d1 --- /dev/null +++ b/backend/api/python_http_client/.openapi-generator/VERSION @@ -0,0 +1 @@ +4.3.1 \ No newline at end of file diff --git a/backend/api/python_http_client/.travis.yml b/backend/api/python_http_client/.travis.yml new file mode 100644 index 000000000..f754bfbb9 --- /dev/null +++ b/backend/api/python_http_client/.travis.yml @@ -0,0 +1,31 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# ref: https://docs.travis-ci.com/user/languages/python +language: python +python: + - "2.7" + - "3.2" + - "3.3" + - "3.4" + - "3.5" + - "3.6" + - "3.7" + - "3.8" +# command to install dependencies +install: + - "pip install -r requirements.txt" + - "pip install -r test-requirements.txt" +# command to run tests +script: pytest --cov=kfp_server_api diff --git a/backend/api/python_http_client/LICENSE b/backend/api/python_http_client/LICENSE new file mode 100644 index 000000000..261eeb9e9 --- /dev/null +++ b/backend/api/python_http_client/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/backend/api/python_http_client/README.md b/backend/api/python_http_client/README.md new file mode 100644 index 000000000..d5f7e4957 --- /dev/null +++ b/backend/api/python_http_client/README.md @@ -0,0 +1,188 @@ +# kfp-server-api +This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + +This Python package is automatically generated by the [OpenAPI Generator](https://openapi-generator.tech) project: + +- API version: 1.0.0 +- Package version: 1.0.0 +- Build package: org.openapitools.codegen.languages.PythonClientCodegen +For more information, please visit [https://www.google.com](https://www.google.com) + +## Requirements. + +Python 2.7 and 3.4+ + +## Installation & Usage +### pip install + +If the python package is hosted on a repository, you can install directly using: + +```sh +pip install git+https://github.com/GIT_USER_ID/GIT_REPO_ID.git +``` +(you may need to run `pip` with root permission: `sudo pip install git+https://github.com/GIT_USER_ID/GIT_REPO_ID.git`) + +Then import the package: +```python +import kfp_server_api +``` + +### Setuptools + +Install via [Setuptools](http://pypi.python.org/pypi/setuptools). + +```sh +python setup.py install --user +``` +(or `sudo python setup.py install` to install the package for all users) + +Then import the package: +```python +import kfp_server_api +``` + +## Getting Started + +Please follow the [installation procedure](#installation--usage) and then run the following: + +```python +from __future__ import print_function + +import time +import kfp_server_api +from kfp_server_api.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to http://localhost +# See configuration.py for a list of all supported configuration parameters. +configuration = kfp_server_api.Configuration( + host = "http://localhost" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: Bearer +configuration = kfp_server_api.Configuration( + host = "http://localhost", + api_key = { + 'authorization': 'YOUR_API_KEY' + } +) +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['authorization'] = 'Bearer' + + +# Enter a context with an instance of the API client +with kfp_server_api.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = kfp_server_api.ExperimentServiceApi(api_client) + id = 'id_example' # str | The ID of the experiment to be archived. + + try: + # Archives an experiment and the experiment's runs and jobs. + api_response = api_instance.archive_experiment(id) + pprint(api_response) + except ApiException as e: + print("Exception when calling ExperimentServiceApi->archive_experiment: %s\n" % e) + +``` + +## Documentation for API Endpoints + +All URIs are relative to *http://localhost* + +Class | Method | HTTP request | Description +------------ | ------------- | ------------- | ------------- +*ExperimentServiceApi* | [**archive_experiment**](docs/ExperimentServiceApi.md#archive_experiment) | **POST** /apis/v1beta1/experiments/{id}:archive | Archives an experiment and the experiment's runs and jobs. +*ExperimentServiceApi* | [**create_experiment**](docs/ExperimentServiceApi.md#create_experiment) | **POST** /apis/v1beta1/experiments | Creates a new experiment. +*ExperimentServiceApi* | [**delete_experiment**](docs/ExperimentServiceApi.md#delete_experiment) | **DELETE** /apis/v1beta1/experiments/{id} | Deletes an experiment without deleting the experiment's runs and jobs. To avoid unexpected behaviors, delete an experiment's runs and jobs before deleting the experiment. +*ExperimentServiceApi* | [**get_experiment**](docs/ExperimentServiceApi.md#get_experiment) | **GET** /apis/v1beta1/experiments/{id} | Finds a specific experiment by ID. +*ExperimentServiceApi* | [**list_experiment**](docs/ExperimentServiceApi.md#list_experiment) | **GET** /apis/v1beta1/experiments | Finds all experiments. Supports pagination, and sorting on certain fields. +*ExperimentServiceApi* | [**unarchive_experiment**](docs/ExperimentServiceApi.md#unarchive_experiment) | **POST** /apis/v1beta1/experiments/{id}:unarchive | Restores an archived experiment. The experiment's archived runs and jobs will stay archived. +*JobServiceApi* | [**create_job**](docs/JobServiceApi.md#create_job) | **POST** /apis/v1beta1/jobs | Creates a new job. +*JobServiceApi* | [**delete_job**](docs/JobServiceApi.md#delete_job) | **DELETE** /apis/v1beta1/jobs/{id} | Deletes a job. +*JobServiceApi* | [**disable_job**](docs/JobServiceApi.md#disable_job) | **POST** /apis/v1beta1/jobs/{id}/disable | Stops a job and all its associated runs. The job is not deleted. +*JobServiceApi* | [**enable_job**](docs/JobServiceApi.md#enable_job) | **POST** /apis/v1beta1/jobs/{id}/enable | Restarts a job that was previously stopped. All runs associated with the job will continue. +*JobServiceApi* | [**get_job**](docs/JobServiceApi.md#get_job) | **GET** /apis/v1beta1/jobs/{id} | Finds a specific job by ID. +*JobServiceApi* | [**list_jobs**](docs/JobServiceApi.md#list_jobs) | **GET** /apis/v1beta1/jobs | Finds all jobs. +*PipelineServiceApi* | [**create_pipeline**](docs/PipelineServiceApi.md#create_pipeline) | **POST** /apis/v1beta1/pipelines | Creates a pipeline. +*PipelineServiceApi* | [**create_pipeline_version**](docs/PipelineServiceApi.md#create_pipeline_version) | **POST** /apis/v1beta1/pipeline_versions | Adds a pipeline version to the specified pipeline. +*PipelineServiceApi* | [**delete_pipeline**](docs/PipelineServiceApi.md#delete_pipeline) | **DELETE** /apis/v1beta1/pipelines/{id} | Deletes a pipeline and its pipeline versions. +*PipelineServiceApi* | [**delete_pipeline_version**](docs/PipelineServiceApi.md#delete_pipeline_version) | **DELETE** /apis/v1beta1/pipeline_versions/{version_id} | Deletes a pipeline version by pipeline version ID. If the deleted pipeline version is the default pipeline version, the pipeline's default version changes to the pipeline's most recent pipeline version. If there are no remaining pipeline versions, the pipeline will have no default version. Examines the run_service_api.ipynb notebook to learn more about creating a run using a pipeline version (https://github.com/kubeflow/pipelines/blob/master/tools/benchmarks/run_service_api.ipynb). +*PipelineServiceApi* | [**get_pipeline**](docs/PipelineServiceApi.md#get_pipeline) | **GET** /apis/v1beta1/pipelines/{id} | Finds a specific pipeline by ID. +*PipelineServiceApi* | [**get_pipeline_version**](docs/PipelineServiceApi.md#get_pipeline_version) | **GET** /apis/v1beta1/pipeline_versions/{version_id} | Gets a pipeline version by pipeline version ID. +*PipelineServiceApi* | [**get_pipeline_version_template**](docs/PipelineServiceApi.md#get_pipeline_version_template) | **GET** /apis/v1beta1/pipeline_versions/{version_id}/templates | Returns a YAML template that contains the specified pipeline version's description, parameters and metadata. +*PipelineServiceApi* | [**get_template**](docs/PipelineServiceApi.md#get_template) | **GET** /apis/v1beta1/pipelines/{id}/templates | Returns a single YAML template that contains the description, parameters, and metadata associated with the pipeline provided. +*PipelineServiceApi* | [**list_pipeline_versions**](docs/PipelineServiceApi.md#list_pipeline_versions) | **GET** /apis/v1beta1/pipeline_versions | Lists all pipeline versions of a given pipeline. +*PipelineServiceApi* | [**list_pipelines**](docs/PipelineServiceApi.md#list_pipelines) | **GET** /apis/v1beta1/pipelines | Finds all pipelines. +*PipelineUploadServiceApi* | [**upload_pipeline**](docs/PipelineUploadServiceApi.md#upload_pipeline) | **POST** /apis/v1beta1/pipelines/upload | +*PipelineUploadServiceApi* | [**upload_pipeline_version**](docs/PipelineUploadServiceApi.md#upload_pipeline_version) | **POST** /apis/v1beta1/pipelines/upload_version | +*RunServiceApi* | [**archive_run**](docs/RunServiceApi.md#archive_run) | **POST** /apis/v1beta1/runs/{id}:archive | Archives a run. +*RunServiceApi* | [**create_run**](docs/RunServiceApi.md#create_run) | **POST** /apis/v1beta1/runs | Creates a new run. +*RunServiceApi* | [**delete_run**](docs/RunServiceApi.md#delete_run) | **DELETE** /apis/v1beta1/runs/{id} | Deletes a run. +*RunServiceApi* | [**get_run**](docs/RunServiceApi.md#get_run) | **GET** /apis/v1beta1/runs/{run_id} | Finds a specific run by ID. +*RunServiceApi* | [**list_runs**](docs/RunServiceApi.md#list_runs) | **GET** /apis/v1beta1/runs | Finds all runs. +*RunServiceApi* | [**read_artifact**](docs/RunServiceApi.md#read_artifact) | **GET** /apis/v1beta1/runs/{run_id}/nodes/{node_id}/artifacts/{artifact_name}:read | Finds a run's artifact data. +*RunServiceApi* | [**report_run_metrics**](docs/RunServiceApi.md#report_run_metrics) | **POST** /apis/v1beta1/runs/{run_id}:reportMetrics | ReportRunMetrics reports metrics of a run. Each metric is reported in its own transaction, so this API accepts partial failures. Metric can be uniquely identified by (run_id, node_id, name). Duplicate reporting will be ignored by the API. First reporting wins. +*RunServiceApi* | [**retry_run**](docs/RunServiceApi.md#retry_run) | **POST** /apis/v1beta1/runs/{run_id}/retry | Re-initiates a failed or terminated run. +*RunServiceApi* | [**terminate_run**](docs/RunServiceApi.md#terminate_run) | **POST** /apis/v1beta1/runs/{run_id}/terminate | Terminates an active run. +*RunServiceApi* | [**unarchive_run**](docs/RunServiceApi.md#unarchive_run) | **POST** /apis/v1beta1/runs/{id}:unarchive | Restores an archived run. + + +## Documentation For Models + + - [ApiCronSchedule](docs/ApiCronSchedule.md) + - [ApiExperiment](docs/ApiExperiment.md) + - [ApiGetTemplateResponse](docs/ApiGetTemplateResponse.md) + - [ApiJob](docs/ApiJob.md) + - [ApiListExperimentsResponse](docs/ApiListExperimentsResponse.md) + - [ApiListJobsResponse](docs/ApiListJobsResponse.md) + - [ApiListPipelineVersionsResponse](docs/ApiListPipelineVersionsResponse.md) + - [ApiListPipelinesResponse](docs/ApiListPipelinesResponse.md) + - [ApiListRunsResponse](docs/ApiListRunsResponse.md) + - [ApiParameter](docs/ApiParameter.md) + - [ApiPeriodicSchedule](docs/ApiPeriodicSchedule.md) + - [ApiPipeline](docs/ApiPipeline.md) + - [ApiPipelineRuntime](docs/ApiPipelineRuntime.md) + - [ApiPipelineSpec](docs/ApiPipelineSpec.md) + - [ApiPipelineVersion](docs/ApiPipelineVersion.md) + - [ApiReadArtifactResponse](docs/ApiReadArtifactResponse.md) + - [ApiRelationship](docs/ApiRelationship.md) + - [ApiReportRunMetricsRequest](docs/ApiReportRunMetricsRequest.md) + - [ApiReportRunMetricsResponse](docs/ApiReportRunMetricsResponse.md) + - [ApiResourceKey](docs/ApiResourceKey.md) + - [ApiResourceReference](docs/ApiResourceReference.md) + - [ApiResourceType](docs/ApiResourceType.md) + - [ApiRun](docs/ApiRun.md) + - [ApiRunDetail](docs/ApiRunDetail.md) + - [ApiRunMetric](docs/ApiRunMetric.md) + - [ApiStatus](docs/ApiStatus.md) + - [ApiTrigger](docs/ApiTrigger.md) + - [ApiUrl](docs/ApiUrl.md) + - [ExperimentStorageState](docs/ExperimentStorageState.md) + - [JobMode](docs/JobMode.md) + - [ProtobufAny](docs/ProtobufAny.md) + - [ReportRunMetricsResponseReportRunMetricResult](docs/ReportRunMetricsResponseReportRunMetricResult.md) + - [ReportRunMetricsResponseReportRunMetricResultStatus](docs/ReportRunMetricsResponseReportRunMetricResultStatus.md) + - [RunMetricFormat](docs/RunMetricFormat.md) + - [RunStorageState](docs/RunStorageState.md) + + +## Documentation For Authorization + + +## Bearer + +- **Type**: API key +- **API key parameter name**: authorization +- **Location**: HTTP header + + +## Author + +kubeflow-pipelines@google.com + + diff --git a/backend/api/python_http_client/docs/ApiCronSchedule.md b/backend/api/python_http_client/docs/ApiCronSchedule.md new file mode 100644 index 000000000..140139b47 --- /dev/null +++ b/backend/api/python_http_client/docs/ApiCronSchedule.md @@ -0,0 +1,12 @@ +# ApiCronSchedule + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**start_time** | **datetime** | | [optional] +**end_time** | **datetime** | | [optional] +**cron** | **str** | | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/backend/api/python_http_client/docs/ApiExperiment.md b/backend/api/python_http_client/docs/ApiExperiment.md new file mode 100644 index 000000000..c481c73cc --- /dev/null +++ b/backend/api/python_http_client/docs/ApiExperiment.md @@ -0,0 +1,15 @@ +# ApiExperiment + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | Output. Unique experiment ID. Generated by API server. | [optional] +**name** | **str** | Required input field. Unique experiment name provided by user. | [optional] +**description** | **str** | | [optional] +**created_at** | **datetime** | Output. The time that the experiment created. | [optional] +**resource_references** | [**list[ApiResourceReference]**](ApiResourceReference.md) | Optional input field. Specify which resource this run belongs to. For Experiment, the only valid resource reference is a single Namespace. | [optional] +**storage_state** | [**ExperimentStorageState**](ExperimentStorageState.md) | | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/backend/api/python_http_client/docs/ApiGetTemplateResponse.md b/backend/api/python_http_client/docs/ApiGetTemplateResponse.md new file mode 100644 index 000000000..478c48166 --- /dev/null +++ b/backend/api/python_http_client/docs/ApiGetTemplateResponse.md @@ -0,0 +1,10 @@ +# ApiGetTemplateResponse + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**template** | **str** | The template of the pipeline specified in a GetTemplate request, or of a pipeline version specified in a GetPipelinesVersionTemplate request. | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/backend/api/python_http_client/docs/ApiJob.md b/backend/api/python_http_client/docs/ApiJob.md new file mode 100644 index 000000000..6f4b0cbd4 --- /dev/null +++ b/backend/api/python_http_client/docs/ApiJob.md @@ -0,0 +1,24 @@ +# ApiJob + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | Output. Unique run ID. Generated by API server. | [optional] +**name** | **str** | Required input field. Job name provided by user. Not unique. | [optional] +**description** | **str** | | [optional] +**pipeline_spec** | [**ApiPipelineSpec**](ApiPipelineSpec.md) | | [optional] +**resource_references** | [**list[ApiResourceReference]**](ApiResourceReference.md) | Optional input field. Specify which resource this job belongs to. | [optional] +**service_account** | **str** | Optional input field. Specify which Kubernetes service account this job uses. | [optional] +**max_concurrency** | **str** | | [optional] +**trigger** | [**ApiTrigger**](ApiTrigger.md) | | [optional] +**mode** | [**JobMode**](JobMode.md) | | [optional] +**created_at** | **datetime** | Output. The time this job is created. | [optional] +**updated_at** | **datetime** | Output. The last time this job is updated. | [optional] +**status** | **str** | | [optional] +**error** | **str** | In case any error happens retrieving a job field, only job ID and the error message is returned. Client has the flexibility of choosing how to handle error. This is especially useful during listing call. | [optional] +**enabled** | **bool** | Input. Whether the job is enabled or not. | [optional] +**no_catchup** | **bool** | Optional input field. Whether the job should catch up if behind schedule. If true, the job will only schedule the latest interval if behind schedule. If false, the job will catch up on each past interval. | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/backend/api/python_http_client/docs/ApiListExperimentsResponse.md b/backend/api/python_http_client/docs/ApiListExperimentsResponse.md new file mode 100644 index 000000000..0f6529e36 --- /dev/null +++ b/backend/api/python_http_client/docs/ApiListExperimentsResponse.md @@ -0,0 +1,12 @@ +# ApiListExperimentsResponse + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**experiments** | [**list[ApiExperiment]**](ApiExperiment.md) | A list of experiments returned. | [optional] +**total_size** | **int** | The total number of experiments for the given query. | [optional] +**next_page_token** | **str** | The token to list the next page of experiments. | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/backend/api/python_http_client/docs/ApiListJobsResponse.md b/backend/api/python_http_client/docs/ApiListJobsResponse.md new file mode 100644 index 000000000..8a94f7cc0 --- /dev/null +++ b/backend/api/python_http_client/docs/ApiListJobsResponse.md @@ -0,0 +1,12 @@ +# ApiListJobsResponse + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**jobs** | [**list[ApiJob]**](ApiJob.md) | A list of jobs returned. | [optional] +**total_size** | **int** | The total number of jobs for the given query. | [optional] +**next_page_token** | **str** | The token to list the next page of jobs. | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/backend/api/python_http_client/docs/ApiListPipelineVersionsResponse.md b/backend/api/python_http_client/docs/ApiListPipelineVersionsResponse.md new file mode 100644 index 000000000..9ab2eacb2 --- /dev/null +++ b/backend/api/python_http_client/docs/ApiListPipelineVersionsResponse.md @@ -0,0 +1,12 @@ +# ApiListPipelineVersionsResponse + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**versions** | [**list[ApiPipelineVersion]**](ApiPipelineVersion.md) | | [optional] +**next_page_token** | **str** | The token to list the next page of pipeline versions. | [optional] +**total_size** | **int** | The total number of pipeline versions for the given query. | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/backend/api/python_http_client/docs/ApiListPipelinesResponse.md b/backend/api/python_http_client/docs/ApiListPipelinesResponse.md new file mode 100644 index 000000000..cb9d319b7 --- /dev/null +++ b/backend/api/python_http_client/docs/ApiListPipelinesResponse.md @@ -0,0 +1,12 @@ +# ApiListPipelinesResponse + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**pipelines** | [**list[ApiPipeline]**](ApiPipeline.md) | | [optional] +**total_size** | **int** | The total number of pipelines for the given query. | [optional] +**next_page_token** | **str** | The token to list the next page of pipelines. | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/backend/api/python_http_client/docs/ApiListRunsResponse.md b/backend/api/python_http_client/docs/ApiListRunsResponse.md new file mode 100644 index 000000000..4b93b22d1 --- /dev/null +++ b/backend/api/python_http_client/docs/ApiListRunsResponse.md @@ -0,0 +1,12 @@ +# ApiListRunsResponse + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**runs** | [**list[ApiRun]**](ApiRun.md) | | [optional] +**total_size** | **int** | The total number of runs for the given query. | [optional] +**next_page_token** | **str** | The token to list the next page of runs. | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/backend/api/python_http_client/docs/ApiParameter.md b/backend/api/python_http_client/docs/ApiParameter.md new file mode 100644 index 000000000..c13ddecea --- /dev/null +++ b/backend/api/python_http_client/docs/ApiParameter.md @@ -0,0 +1,11 @@ +# ApiParameter + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | | [optional] +**value** | **str** | | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/backend/api/python_http_client/docs/ApiPeriodicSchedule.md b/backend/api/python_http_client/docs/ApiPeriodicSchedule.md new file mode 100644 index 000000000..0c382cbd8 --- /dev/null +++ b/backend/api/python_http_client/docs/ApiPeriodicSchedule.md @@ -0,0 +1,12 @@ +# ApiPeriodicSchedule + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**start_time** | **datetime** | | [optional] +**end_time** | **datetime** | | [optional] +**interval_second** | **str** | | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/backend/api/python_http_client/docs/ApiPipeline.md b/backend/api/python_http_client/docs/ApiPipeline.md new file mode 100644 index 000000000..00495bcaf --- /dev/null +++ b/backend/api/python_http_client/docs/ApiPipeline.md @@ -0,0 +1,17 @@ +# ApiPipeline + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | Output. Unique pipeline ID. Generated by API server. | [optional] +**created_at** | **datetime** | Output. The time this pipeline is created. | [optional] +**name** | **str** | Optional input field. Pipeline name provided by user. If not specified, file name is used as pipeline name. | [optional] +**description** | **str** | Optional input field. Describing the purpose of the job. | [optional] +**parameters** | [**list[ApiParameter]**](ApiParameter.md) | Output. The input parameters for this pipeline. TODO(jingzhang36): replace this parameters field with the parameters field inside PipelineVersion when all usage of the former has been changed to use the latter. | [optional] +**url** | [**ApiUrl**](ApiUrl.md) | | [optional] +**error** | **str** | In case any error happens retrieving a pipeline field, only pipeline ID and the error message is returned. Client has the flexibility of choosing how to handle error. This is especially useful during listing call. | [optional] +**default_version** | [**ApiPipelineVersion**](ApiPipelineVersion.md) | | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/backend/api/python_http_client/docs/ApiPipelineRuntime.md b/backend/api/python_http_client/docs/ApiPipelineRuntime.md new file mode 100644 index 000000000..c32e26cee --- /dev/null +++ b/backend/api/python_http_client/docs/ApiPipelineRuntime.md @@ -0,0 +1,11 @@ +# ApiPipelineRuntime + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**pipeline_manifest** | **str** | Output. The runtime JSON manifest of the pipeline, including the status of pipeline steps and fields need for UI visualization etc. | [optional] +**workflow_manifest** | **str** | Output. The runtime JSON manifest of the argo workflow. This is deprecated after pipeline_runtime_manifest is in use. | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/backend/api/python_http_client/docs/ApiPipelineSpec.md b/backend/api/python_http_client/docs/ApiPipelineSpec.md new file mode 100644 index 000000000..b0e2f2c4d --- /dev/null +++ b/backend/api/python_http_client/docs/ApiPipelineSpec.md @@ -0,0 +1,14 @@ +# ApiPipelineSpec + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**pipeline_id** | **str** | Optional input field. The ID of the pipeline user uploaded before. | [optional] +**pipeline_name** | **str** | Optional output field. The name of the pipeline. Not empty if the pipeline id is not empty. | [optional] +**workflow_manifest** | **str** | Optional input field. The marshalled raw argo JSON workflow. This will be deprecated when pipeline_manifest is in use. | [optional] +**pipeline_manifest** | **str** | Optional input field. The raw pipeline JSON spec. | [optional] +**parameters** | [**list[ApiParameter]**](ApiParameter.md) | The parameter user provide to inject to the pipeline JSON. If a default value of a parameter exist in the JSON, the value user provided here will replace. | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/backend/api/python_http_client/docs/ApiPipelineVersion.md b/backend/api/python_http_client/docs/ApiPipelineVersion.md new file mode 100644 index 000000000..fea8886aa --- /dev/null +++ b/backend/api/python_http_client/docs/ApiPipelineVersion.md @@ -0,0 +1,16 @@ +# ApiPipelineVersion + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | Output. Unique version ID. Generated by API server. | [optional] +**name** | **str** | Optional input field. Version name provided by user. | [optional] +**created_at** | **datetime** | Output. The time this pipeline version is created. | [optional] +**parameters** | [**list[ApiParameter]**](ApiParameter.md) | Output. The input parameters for this pipeline. | [optional] +**code_source_url** | **str** | Input. Optional. Pipeline version code source. | [optional] +**package_url** | [**ApiUrl**](ApiUrl.md) | | [optional] +**resource_references** | [**list[ApiResourceReference]**](ApiResourceReference.md) | Input. Required. E.g., specify which pipeline this pipeline version belongs to. | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/backend/api/python_http_client/docs/ApiReadArtifactResponse.md b/backend/api/python_http_client/docs/ApiReadArtifactResponse.md new file mode 100644 index 000000000..f2e187c4b --- /dev/null +++ b/backend/api/python_http_client/docs/ApiReadArtifactResponse.md @@ -0,0 +1,10 @@ +# ApiReadArtifactResponse + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**data** | **str** | The bytes of the artifact content. | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/backend/api/python_http_client/docs/ApiRelationship.md b/backend/api/python_http_client/docs/ApiRelationship.md new file mode 100644 index 000000000..3e6ef6777 --- /dev/null +++ b/backend/api/python_http_client/docs/ApiRelationship.md @@ -0,0 +1,9 @@ +# ApiRelationship + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/backend/api/python_http_client/docs/ApiReportRunMetricsRequest.md b/backend/api/python_http_client/docs/ApiReportRunMetricsRequest.md new file mode 100644 index 000000000..2a7027eba --- /dev/null +++ b/backend/api/python_http_client/docs/ApiReportRunMetricsRequest.md @@ -0,0 +1,11 @@ +# ApiReportRunMetricsRequest + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**run_id** | **str** | Required. The parent run ID of the metric. | [optional] +**metrics** | [**list[ApiRunMetric]**](ApiRunMetric.md) | List of metrics to report. | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/backend/api/python_http_client/docs/ApiReportRunMetricsResponse.md b/backend/api/python_http_client/docs/ApiReportRunMetricsResponse.md new file mode 100644 index 000000000..b1b7e188b --- /dev/null +++ b/backend/api/python_http_client/docs/ApiReportRunMetricsResponse.md @@ -0,0 +1,10 @@ +# ApiReportRunMetricsResponse + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**results** | [**list[ReportRunMetricsResponseReportRunMetricResult]**](ReportRunMetricsResponseReportRunMetricResult.md) | | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/backend/api/python_http_client/docs/ApiResourceKey.md b/backend/api/python_http_client/docs/ApiResourceKey.md new file mode 100644 index 000000000..f9ea3dd7b --- /dev/null +++ b/backend/api/python_http_client/docs/ApiResourceKey.md @@ -0,0 +1,11 @@ +# ApiResourceKey + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**type** | [**ApiResourceType**](ApiResourceType.md) | | [optional] +**id** | **str** | The ID of the resource that referred to. | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/backend/api/python_http_client/docs/ApiResourceReference.md b/backend/api/python_http_client/docs/ApiResourceReference.md new file mode 100644 index 000000000..acbb24ed2 --- /dev/null +++ b/backend/api/python_http_client/docs/ApiResourceReference.md @@ -0,0 +1,12 @@ +# ApiResourceReference + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**key** | [**ApiResourceKey**](ApiResourceKey.md) | | [optional] +**name** | **str** | The name of the resource that referred to. | [optional] +**relationship** | [**ApiRelationship**](ApiRelationship.md) | | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/backend/api/python_http_client/docs/ApiResourceType.md b/backend/api/python_http_client/docs/ApiResourceType.md new file mode 100644 index 000000000..0d3f76887 --- /dev/null +++ b/backend/api/python_http_client/docs/ApiResourceType.md @@ -0,0 +1,9 @@ +# ApiResourceType + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/backend/api/python_http_client/docs/ApiRun.md b/backend/api/python_http_client/docs/ApiRun.md new file mode 100644 index 000000000..adef1ebfd --- /dev/null +++ b/backend/api/python_http_client/docs/ApiRun.md @@ -0,0 +1,22 @@ +# ApiRun + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | Output. Unique run ID. Generated by API server. | [optional] +**name** | **str** | Required input field. Name provided by user, or auto generated if run is created by scheduled job. Not unique. | [optional] +**storage_state** | [**RunStorageState**](RunStorageState.md) | | [optional] +**description** | **str** | | [optional] +**pipeline_spec** | [**ApiPipelineSpec**](ApiPipelineSpec.md) | | [optional] +**resource_references** | [**list[ApiResourceReference]**](ApiResourceReference.md) | Optional input field. Specify which resource this run belongs to. When creating a run from a particular pipeline version, the pipeline version can be specified here. | [optional] +**service_account** | **str** | Optional input field. Specify which Kubernetes service account this run uses. | [optional] +**created_at** | **datetime** | Output. The time that the run created. | [optional] +**scheduled_at** | **datetime** | Output. When this run is scheduled to run. This could be different from created_at. For example, if a run is from a backfilling job that was supposed to run 2 month ago, the scheduled_at is 2 month ago, v.s. created_at is the current time. | [optional] +**finished_at** | **datetime** | Output. The time this run is finished. | [optional] +**status** | **str** | | [optional] +**error** | **str** | In case any error happens retrieving a run field, only run ID and the error message is returned. Client has the flexibility of choosing how to handle error. This is especially useful during listing call. | [optional] +**metrics** | [**list[ApiRunMetric]**](ApiRunMetric.md) | Output. The metrics of the run. The metrics are reported by ReportMetrics API. | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/backend/api/python_http_client/docs/ApiRunDetail.md b/backend/api/python_http_client/docs/ApiRunDetail.md new file mode 100644 index 000000000..2d4be03b1 --- /dev/null +++ b/backend/api/python_http_client/docs/ApiRunDetail.md @@ -0,0 +1,11 @@ +# ApiRunDetail + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**run** | [**ApiRun**](ApiRun.md) | | [optional] +**pipeline_runtime** | [**ApiPipelineRuntime**](ApiPipelineRuntime.md) | | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/backend/api/python_http_client/docs/ApiRunMetric.md b/backend/api/python_http_client/docs/ApiRunMetric.md new file mode 100644 index 000000000..03f6e5e1a --- /dev/null +++ b/backend/api/python_http_client/docs/ApiRunMetric.md @@ -0,0 +1,13 @@ +# ApiRunMetric + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Required. The user defined name of the metric. It must between 1 and 63 characters long and must conform to the following regular expression: `[a-z]([-a-z0-9]*[a-z0-9])?`. | [optional] +**node_id** | **str** | Required. The runtime node ID which reports the metric. The node ID can be found in the RunDetail.workflow.Status. Metric with same (node_id, name) are considerd as duplicate. Only the first reporting will be recorded. Max length is 128. | [optional] +**number_value** | **float** | The number value of the metric. | [optional] +**format** | [**RunMetricFormat**](RunMetricFormat.md) | | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/backend/api/python_http_client/docs/ApiStatus.md b/backend/api/python_http_client/docs/ApiStatus.md new file mode 100644 index 000000000..6a3feb57f --- /dev/null +++ b/backend/api/python_http_client/docs/ApiStatus.md @@ -0,0 +1,12 @@ +# ApiStatus + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**error** | **str** | | [optional] +**code** | **int** | | [optional] +**details** | [**list[ProtobufAny]**](ProtobufAny.md) | | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/backend/api/python_http_client/docs/ApiTrigger.md b/backend/api/python_http_client/docs/ApiTrigger.md new file mode 100644 index 000000000..ff6b3c271 --- /dev/null +++ b/backend/api/python_http_client/docs/ApiTrigger.md @@ -0,0 +1,12 @@ +# ApiTrigger + +Trigger defines what starts a pipeline run. +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**cron_schedule** | [**ApiCronSchedule**](ApiCronSchedule.md) | | [optional] +**periodic_schedule** | [**ApiPeriodicSchedule**](ApiPeriodicSchedule.md) | | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/backend/api/python_http_client/docs/ApiUrl.md b/backend/api/python_http_client/docs/ApiUrl.md new file mode 100644 index 000000000..3b112d2b4 --- /dev/null +++ b/backend/api/python_http_client/docs/ApiUrl.md @@ -0,0 +1,10 @@ +# ApiUrl + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**pipeline_url** | **str** | URL of the pipeline definition or the pipeline version definition. | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/backend/api/python_http_client/docs/ExperimentServiceApi.md b/backend/api/python_http_client/docs/ExperimentServiceApi.md new file mode 100644 index 000000000..d1de9b8f5 --- /dev/null +++ b/backend/api/python_http_client/docs/ExperimentServiceApi.md @@ -0,0 +1,480 @@ +# kfp_server_api.ExperimentServiceApi + +All URIs are relative to *http://localhost* + +Method | HTTP request | Description +------------- | ------------- | ------------- +[**archive_experiment**](ExperimentServiceApi.md#archive_experiment) | **POST** /apis/v1beta1/experiments/{id}:archive | Archives an experiment and the experiment's runs and jobs. +[**create_experiment**](ExperimentServiceApi.md#create_experiment) | **POST** /apis/v1beta1/experiments | Creates a new experiment. +[**delete_experiment**](ExperimentServiceApi.md#delete_experiment) | **DELETE** /apis/v1beta1/experiments/{id} | Deletes an experiment without deleting the experiment's runs and jobs. To avoid unexpected behaviors, delete an experiment's runs and jobs before deleting the experiment. +[**get_experiment**](ExperimentServiceApi.md#get_experiment) | **GET** /apis/v1beta1/experiments/{id} | Finds a specific experiment by ID. +[**list_experiment**](ExperimentServiceApi.md#list_experiment) | **GET** /apis/v1beta1/experiments | Finds all experiments. Supports pagination, and sorting on certain fields. +[**unarchive_experiment**](ExperimentServiceApi.md#unarchive_experiment) | **POST** /apis/v1beta1/experiments/{id}:unarchive | Restores an archived experiment. The experiment's archived runs and jobs will stay archived. + + +# **archive_experiment** +> object archive_experiment(id) + +Archives an experiment and the experiment's runs and jobs. + +### Example + +* Api Key Authentication (Bearer): +```python +from __future__ import print_function +import time +import kfp_server_api +from kfp_server_api.rest import ApiException +from pprint import pprint +# Defining the host is optional and defaults to http://localhost +# See configuration.py for a list of all supported configuration parameters. +configuration = kfp_server_api.Configuration( + host = "http://localhost" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: Bearer +configuration = kfp_server_api.Configuration( + host = "http://localhost", + api_key = { + 'authorization': 'YOUR_API_KEY' + } +) +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['authorization'] = 'Bearer' + +# Enter a context with an instance of the API client +with kfp_server_api.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = kfp_server_api.ExperimentServiceApi(api_client) + id = 'id_example' # str | The ID of the experiment to be archived. + + try: + # Archives an experiment and the experiment's runs and jobs. + api_response = api_instance.archive_experiment(id) + pprint(api_response) + except ApiException as e: + print("Exception when calling ExperimentServiceApi->archive_experiment: %s\n" % e) +``` + +### Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **id** | **str**| The ID of the experiment to be archived. | + +### Return type + +**object** + +### Authorization + +[Bearer](../README.md#Bearer) + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | A successful response. | - | +**0** | | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **create_experiment** +> ApiExperiment create_experiment(body) + +Creates a new experiment. + +### Example + +* Api Key Authentication (Bearer): +```python +from __future__ import print_function +import time +import kfp_server_api +from kfp_server_api.rest import ApiException +from pprint import pprint +# Defining the host is optional and defaults to http://localhost +# See configuration.py for a list of all supported configuration parameters. +configuration = kfp_server_api.Configuration( + host = "http://localhost" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: Bearer +configuration = kfp_server_api.Configuration( + host = "http://localhost", + api_key = { + 'authorization': 'YOUR_API_KEY' + } +) +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['authorization'] = 'Bearer' + +# Enter a context with an instance of the API client +with kfp_server_api.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = kfp_server_api.ExperimentServiceApi(api_client) + body = kfp_server_api.ApiExperiment() # ApiExperiment | The experiment to be created. + + try: + # Creates a new experiment. + api_response = api_instance.create_experiment(body) + pprint(api_response) + except ApiException as e: + print("Exception when calling ExperimentServiceApi->create_experiment: %s\n" % e) +``` + +### Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **body** | [**ApiExperiment**](ApiExperiment.md)| The experiment to be created. | + +### Return type + +[**ApiExperiment**](ApiExperiment.md) + +### Authorization + +[Bearer](../README.md#Bearer) + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | A successful response. | - | +**0** | | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **delete_experiment** +> object delete_experiment(id) + +Deletes an experiment without deleting the experiment's runs and jobs. To avoid unexpected behaviors, delete an experiment's runs and jobs before deleting the experiment. + +### Example + +* Api Key Authentication (Bearer): +```python +from __future__ import print_function +import time +import kfp_server_api +from kfp_server_api.rest import ApiException +from pprint import pprint +# Defining the host is optional and defaults to http://localhost +# See configuration.py for a list of all supported configuration parameters. +configuration = kfp_server_api.Configuration( + host = "http://localhost" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: Bearer +configuration = kfp_server_api.Configuration( + host = "http://localhost", + api_key = { + 'authorization': 'YOUR_API_KEY' + } +) +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['authorization'] = 'Bearer' + +# Enter a context with an instance of the API client +with kfp_server_api.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = kfp_server_api.ExperimentServiceApi(api_client) + id = 'id_example' # str | The ID of the experiment to be deleted. + + try: + # Deletes an experiment without deleting the experiment's runs and jobs. To avoid unexpected behaviors, delete an experiment's runs and jobs before deleting the experiment. + api_response = api_instance.delete_experiment(id) + pprint(api_response) + except ApiException as e: + print("Exception when calling ExperimentServiceApi->delete_experiment: %s\n" % e) +``` + +### Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **id** | **str**| The ID of the experiment to be deleted. | + +### Return type + +**object** + +### Authorization + +[Bearer](../README.md#Bearer) + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | A successful response. | - | +**0** | | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_experiment** +> ApiExperiment get_experiment(id) + +Finds a specific experiment by ID. + +### Example + +* Api Key Authentication (Bearer): +```python +from __future__ import print_function +import time +import kfp_server_api +from kfp_server_api.rest import ApiException +from pprint import pprint +# Defining the host is optional and defaults to http://localhost +# See configuration.py for a list of all supported configuration parameters. +configuration = kfp_server_api.Configuration( + host = "http://localhost" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: Bearer +configuration = kfp_server_api.Configuration( + host = "http://localhost", + api_key = { + 'authorization': 'YOUR_API_KEY' + } +) +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['authorization'] = 'Bearer' + +# Enter a context with an instance of the API client +with kfp_server_api.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = kfp_server_api.ExperimentServiceApi(api_client) + id = 'id_example' # str | The ID of the experiment to be retrieved. + + try: + # Finds a specific experiment by ID. + api_response = api_instance.get_experiment(id) + pprint(api_response) + except ApiException as e: + print("Exception when calling ExperimentServiceApi->get_experiment: %s\n" % e) +``` + +### Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **id** | **str**| The ID of the experiment to be retrieved. | + +### Return type + +[**ApiExperiment**](ApiExperiment.md) + +### Authorization + +[Bearer](../README.md#Bearer) + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | A successful response. | - | +**0** | | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **list_experiment** +> ApiListExperimentsResponse list_experiment(page_token=page_token, page_size=page_size, sort_by=sort_by, filter=filter, resource_reference_key_type=resource_reference_key_type, resource_reference_key_id=resource_reference_key_id) + +Finds all experiments. Supports pagination, and sorting on certain fields. + +### Example + +* Api Key Authentication (Bearer): +```python +from __future__ import print_function +import time +import kfp_server_api +from kfp_server_api.rest import ApiException +from pprint import pprint +# Defining the host is optional and defaults to http://localhost +# See configuration.py for a list of all supported configuration parameters. +configuration = kfp_server_api.Configuration( + host = "http://localhost" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: Bearer +configuration = kfp_server_api.Configuration( + host = "http://localhost", + api_key = { + 'authorization': 'YOUR_API_KEY' + } +) +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['authorization'] = 'Bearer' + +# Enter a context with an instance of the API client +with kfp_server_api.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = kfp_server_api.ExperimentServiceApi(api_client) + page_token = 'page_token_example' # str | A page token to request the next page of results. The token is acquried from the nextPageToken field of the response from the previous ListExperiment call or can be omitted when fetching the first page. (optional) +page_size = 56 # int | The number of experiments to be listed per page. If there are more experiments than this number, the response message will contain a nextPageToken field you can use to fetch the next page. (optional) +sort_by = 'sort_by_example' # str | Can be format of \"field_name\", \"field_name asc\" or \"field_name desc\" Ascending by default. (optional) +filter = 'filter_example' # str | A url-encoded, JSON-serialized Filter protocol buffer (see [filter.proto](https://github.com/kubeflow/pipelines/ blob/master/backend/api/filter.proto)). (optional) +resource_reference_key_type = 'UNKNOWN_RESOURCE_TYPE' # str | The type of the resource that referred to. (optional) (default to 'UNKNOWN_RESOURCE_TYPE') +resource_reference_key_id = 'resource_reference_key_id_example' # str | The ID of the resource that referred to. (optional) + + try: + # Finds all experiments. Supports pagination, and sorting on certain fields. + api_response = api_instance.list_experiment(page_token=page_token, page_size=page_size, sort_by=sort_by, filter=filter, resource_reference_key_type=resource_reference_key_type, resource_reference_key_id=resource_reference_key_id) + pprint(api_response) + except ApiException as e: + print("Exception when calling ExperimentServiceApi->list_experiment: %s\n" % e) +``` + +### Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **page_token** | **str**| A page token to request the next page of results. The token is acquried from the nextPageToken field of the response from the previous ListExperiment call or can be omitted when fetching the first page. | [optional] + **page_size** | **int**| The number of experiments to be listed per page. If there are more experiments than this number, the response message will contain a nextPageToken field you can use to fetch the next page. | [optional] + **sort_by** | **str**| Can be format of \"field_name\", \"field_name asc\" or \"field_name desc\" Ascending by default. | [optional] + **filter** | **str**| A url-encoded, JSON-serialized Filter protocol buffer (see [filter.proto](https://github.com/kubeflow/pipelines/ blob/master/backend/api/filter.proto)). | [optional] + **resource_reference_key_type** | **str**| The type of the resource that referred to. | [optional] [default to 'UNKNOWN_RESOURCE_TYPE'] + **resource_reference_key_id** | **str**| The ID of the resource that referred to. | [optional] + +### Return type + +[**ApiListExperimentsResponse**](ApiListExperimentsResponse.md) + +### Authorization + +[Bearer](../README.md#Bearer) + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | A successful response. | - | +**0** | | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **unarchive_experiment** +> object unarchive_experiment(id) + +Restores an archived experiment. The experiment's archived runs and jobs will stay archived. + +### Example + +* Api Key Authentication (Bearer): +```python +from __future__ import print_function +import time +import kfp_server_api +from kfp_server_api.rest import ApiException +from pprint import pprint +# Defining the host is optional and defaults to http://localhost +# See configuration.py for a list of all supported configuration parameters. +configuration = kfp_server_api.Configuration( + host = "http://localhost" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: Bearer +configuration = kfp_server_api.Configuration( + host = "http://localhost", + api_key = { + 'authorization': 'YOUR_API_KEY' + } +) +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['authorization'] = 'Bearer' + +# Enter a context with an instance of the API client +with kfp_server_api.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = kfp_server_api.ExperimentServiceApi(api_client) + id = 'id_example' # str | The ID of the experiment to be restored. + + try: + # Restores an archived experiment. The experiment's archived runs and jobs will stay archived. + api_response = api_instance.unarchive_experiment(id) + pprint(api_response) + except ApiException as e: + print("Exception when calling ExperimentServiceApi->unarchive_experiment: %s\n" % e) +``` + +### Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **id** | **str**| The ID of the experiment to be restored. | + +### Return type + +**object** + +### Authorization + +[Bearer](../README.md#Bearer) + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | A successful response. | - | +**0** | | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + diff --git a/backend/api/python_http_client/docs/ExperimentStorageState.md b/backend/api/python_http_client/docs/ExperimentStorageState.md new file mode 100644 index 000000000..ffb7e705b --- /dev/null +++ b/backend/api/python_http_client/docs/ExperimentStorageState.md @@ -0,0 +1,9 @@ +# ExperimentStorageState + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/backend/api/python_http_client/docs/JobMode.md b/backend/api/python_http_client/docs/JobMode.md new file mode 100644 index 000000000..b2e238c19 --- /dev/null +++ b/backend/api/python_http_client/docs/JobMode.md @@ -0,0 +1,10 @@ +# JobMode + +Required input. - DISABLED: The job won't schedule any run if disabled. +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/backend/api/python_http_client/docs/JobServiceApi.md b/backend/api/python_http_client/docs/JobServiceApi.md new file mode 100644 index 000000000..761a38a49 --- /dev/null +++ b/backend/api/python_http_client/docs/JobServiceApi.md @@ -0,0 +1,480 @@ +# kfp_server_api.JobServiceApi + +All URIs are relative to *http://localhost* + +Method | HTTP request | Description +------------- | ------------- | ------------- +[**create_job**](JobServiceApi.md#create_job) | **POST** /apis/v1beta1/jobs | Creates a new job. +[**delete_job**](JobServiceApi.md#delete_job) | **DELETE** /apis/v1beta1/jobs/{id} | Deletes a job. +[**disable_job**](JobServiceApi.md#disable_job) | **POST** /apis/v1beta1/jobs/{id}/disable | Stops a job and all its associated runs. The job is not deleted. +[**enable_job**](JobServiceApi.md#enable_job) | **POST** /apis/v1beta1/jobs/{id}/enable | Restarts a job that was previously stopped. All runs associated with the job will continue. +[**get_job**](JobServiceApi.md#get_job) | **GET** /apis/v1beta1/jobs/{id} | Finds a specific job by ID. +[**list_jobs**](JobServiceApi.md#list_jobs) | **GET** /apis/v1beta1/jobs | Finds all jobs. + + +# **create_job** +> ApiJob create_job(body) + +Creates a new job. + +### Example + +* Api Key Authentication (Bearer): +```python +from __future__ import print_function +import time +import kfp_server_api +from kfp_server_api.rest import ApiException +from pprint import pprint +# Defining the host is optional and defaults to http://localhost +# See configuration.py for a list of all supported configuration parameters. +configuration = kfp_server_api.Configuration( + host = "http://localhost" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: Bearer +configuration = kfp_server_api.Configuration( + host = "http://localhost", + api_key = { + 'authorization': 'YOUR_API_KEY' + } +) +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['authorization'] = 'Bearer' + +# Enter a context with an instance of the API client +with kfp_server_api.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = kfp_server_api.JobServiceApi(api_client) + body = kfp_server_api.ApiJob() # ApiJob | The job to be created + + try: + # Creates a new job. + api_response = api_instance.create_job(body) + pprint(api_response) + except ApiException as e: + print("Exception when calling JobServiceApi->create_job: %s\n" % e) +``` + +### Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **body** | [**ApiJob**](ApiJob.md)| The job to be created | + +### Return type + +[**ApiJob**](ApiJob.md) + +### Authorization + +[Bearer](../README.md#Bearer) + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | A successful response. | - | +**0** | | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **delete_job** +> object delete_job(id) + +Deletes a job. + +### Example + +* Api Key Authentication (Bearer): +```python +from __future__ import print_function +import time +import kfp_server_api +from kfp_server_api.rest import ApiException +from pprint import pprint +# Defining the host is optional and defaults to http://localhost +# See configuration.py for a list of all supported configuration parameters. +configuration = kfp_server_api.Configuration( + host = "http://localhost" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: Bearer +configuration = kfp_server_api.Configuration( + host = "http://localhost", + api_key = { + 'authorization': 'YOUR_API_KEY' + } +) +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['authorization'] = 'Bearer' + +# Enter a context with an instance of the API client +with kfp_server_api.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = kfp_server_api.JobServiceApi(api_client) + id = 'id_example' # str | The ID of the job to be deleted + + try: + # Deletes a job. + api_response = api_instance.delete_job(id) + pprint(api_response) + except ApiException as e: + print("Exception when calling JobServiceApi->delete_job: %s\n" % e) +``` + +### Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **id** | **str**| The ID of the job to be deleted | + +### Return type + +**object** + +### Authorization + +[Bearer](../README.md#Bearer) + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | A successful response. | - | +**0** | | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **disable_job** +> object disable_job(id) + +Stops a job and all its associated runs. The job is not deleted. + +### Example + +* Api Key Authentication (Bearer): +```python +from __future__ import print_function +import time +import kfp_server_api +from kfp_server_api.rest import ApiException +from pprint import pprint +# Defining the host is optional and defaults to http://localhost +# See configuration.py for a list of all supported configuration parameters. +configuration = kfp_server_api.Configuration( + host = "http://localhost" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: Bearer +configuration = kfp_server_api.Configuration( + host = "http://localhost", + api_key = { + 'authorization': 'YOUR_API_KEY' + } +) +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['authorization'] = 'Bearer' + +# Enter a context with an instance of the API client +with kfp_server_api.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = kfp_server_api.JobServiceApi(api_client) + id = 'id_example' # str | The ID of the job to be disabled + + try: + # Stops a job and all its associated runs. The job is not deleted. + api_response = api_instance.disable_job(id) + pprint(api_response) + except ApiException as e: + print("Exception when calling JobServiceApi->disable_job: %s\n" % e) +``` + +### Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **id** | **str**| The ID of the job to be disabled | + +### Return type + +**object** + +### Authorization + +[Bearer](../README.md#Bearer) + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | A successful response. | - | +**0** | | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **enable_job** +> object enable_job(id) + +Restarts a job that was previously stopped. All runs associated with the job will continue. + +### Example + +* Api Key Authentication (Bearer): +```python +from __future__ import print_function +import time +import kfp_server_api +from kfp_server_api.rest import ApiException +from pprint import pprint +# Defining the host is optional and defaults to http://localhost +# See configuration.py for a list of all supported configuration parameters. +configuration = kfp_server_api.Configuration( + host = "http://localhost" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: Bearer +configuration = kfp_server_api.Configuration( + host = "http://localhost", + api_key = { + 'authorization': 'YOUR_API_KEY' + } +) +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['authorization'] = 'Bearer' + +# Enter a context with an instance of the API client +with kfp_server_api.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = kfp_server_api.JobServiceApi(api_client) + id = 'id_example' # str | The ID of the job to be enabled + + try: + # Restarts a job that was previously stopped. All runs associated with the job will continue. + api_response = api_instance.enable_job(id) + pprint(api_response) + except ApiException as e: + print("Exception when calling JobServiceApi->enable_job: %s\n" % e) +``` + +### Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **id** | **str**| The ID of the job to be enabled | + +### Return type + +**object** + +### Authorization + +[Bearer](../README.md#Bearer) + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | A successful response. | - | +**0** | | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_job** +> ApiJob get_job(id) + +Finds a specific job by ID. + +### Example + +* Api Key Authentication (Bearer): +```python +from __future__ import print_function +import time +import kfp_server_api +from kfp_server_api.rest import ApiException +from pprint import pprint +# Defining the host is optional and defaults to http://localhost +# See configuration.py for a list of all supported configuration parameters. +configuration = kfp_server_api.Configuration( + host = "http://localhost" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: Bearer +configuration = kfp_server_api.Configuration( + host = "http://localhost", + api_key = { + 'authorization': 'YOUR_API_KEY' + } +) +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['authorization'] = 'Bearer' + +# Enter a context with an instance of the API client +with kfp_server_api.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = kfp_server_api.JobServiceApi(api_client) + id = 'id_example' # str | The ID of the job to be retrieved + + try: + # Finds a specific job by ID. + api_response = api_instance.get_job(id) + pprint(api_response) + except ApiException as e: + print("Exception when calling JobServiceApi->get_job: %s\n" % e) +``` + +### Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **id** | **str**| The ID of the job to be retrieved | + +### Return type + +[**ApiJob**](ApiJob.md) + +### Authorization + +[Bearer](../README.md#Bearer) + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | A successful response. | - | +**0** | | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **list_jobs** +> ApiListJobsResponse list_jobs(page_token=page_token, page_size=page_size, sort_by=sort_by, resource_reference_key_type=resource_reference_key_type, resource_reference_key_id=resource_reference_key_id, filter=filter) + +Finds all jobs. + +### Example + +* Api Key Authentication (Bearer): +```python +from __future__ import print_function +import time +import kfp_server_api +from kfp_server_api.rest import ApiException +from pprint import pprint +# Defining the host is optional and defaults to http://localhost +# See configuration.py for a list of all supported configuration parameters. +configuration = kfp_server_api.Configuration( + host = "http://localhost" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: Bearer +configuration = kfp_server_api.Configuration( + host = "http://localhost", + api_key = { + 'authorization': 'YOUR_API_KEY' + } +) +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['authorization'] = 'Bearer' + +# Enter a context with an instance of the API client +with kfp_server_api.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = kfp_server_api.JobServiceApi(api_client) + page_token = 'page_token_example' # str | A page token to request the next page of results. The token is acquried from the nextPageToken field of the response from the previous ListJobs call or can be omitted when fetching the first page. (optional) +page_size = 56 # int | The number of jobs to be listed per page. If there are more jobs than this number, the response message will contain a nextPageToken field you can use to fetch the next page. (optional) +sort_by = 'sort_by_example' # str | Can be format of \"field_name\", \"field_name asc\" or \"field_name desc\". Ascending by default. (optional) +resource_reference_key_type = 'UNKNOWN_RESOURCE_TYPE' # str | The type of the resource that referred to. (optional) (default to 'UNKNOWN_RESOURCE_TYPE') +resource_reference_key_id = 'resource_reference_key_id_example' # str | The ID of the resource that referred to. (optional) +filter = 'filter_example' # str | A url-encoded, JSON-serialized Filter protocol buffer (see [filter.proto](https://github.com/kubeflow/pipelines/ blob/master/backend/api/filter.proto)). (optional) + + try: + # Finds all jobs. + api_response = api_instance.list_jobs(page_token=page_token, page_size=page_size, sort_by=sort_by, resource_reference_key_type=resource_reference_key_type, resource_reference_key_id=resource_reference_key_id, filter=filter) + pprint(api_response) + except ApiException as e: + print("Exception when calling JobServiceApi->list_jobs: %s\n" % e) +``` + +### Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **page_token** | **str**| A page token to request the next page of results. The token is acquried from the nextPageToken field of the response from the previous ListJobs call or can be omitted when fetching the first page. | [optional] + **page_size** | **int**| The number of jobs to be listed per page. If there are more jobs than this number, the response message will contain a nextPageToken field you can use to fetch the next page. | [optional] + **sort_by** | **str**| Can be format of \"field_name\", \"field_name asc\" or \"field_name desc\". Ascending by default. | [optional] + **resource_reference_key_type** | **str**| The type of the resource that referred to. | [optional] [default to 'UNKNOWN_RESOURCE_TYPE'] + **resource_reference_key_id** | **str**| The ID of the resource that referred to. | [optional] + **filter** | **str**| A url-encoded, JSON-serialized Filter protocol buffer (see [filter.proto](https://github.com/kubeflow/pipelines/ blob/master/backend/api/filter.proto)). | [optional] + +### Return type + +[**ApiListJobsResponse**](ApiListJobsResponse.md) + +### Authorization + +[Bearer](../README.md#Bearer) + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | A successful response. | - | +**0** | | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + diff --git a/backend/api/python_http_client/docs/PipelineServiceApi.md b/backend/api/python_http_client/docs/PipelineServiceApi.md new file mode 100644 index 000000000..f89fda29a --- /dev/null +++ b/backend/api/python_http_client/docs/PipelineServiceApi.md @@ -0,0 +1,794 @@ +# kfp_server_api.PipelineServiceApi + +All URIs are relative to *http://localhost* + +Method | HTTP request | Description +------------- | ------------- | ------------- +[**create_pipeline**](PipelineServiceApi.md#create_pipeline) | **POST** /apis/v1beta1/pipelines | Creates a pipeline. +[**create_pipeline_version**](PipelineServiceApi.md#create_pipeline_version) | **POST** /apis/v1beta1/pipeline_versions | Adds a pipeline version to the specified pipeline. +[**delete_pipeline**](PipelineServiceApi.md#delete_pipeline) | **DELETE** /apis/v1beta1/pipelines/{id} | Deletes a pipeline and its pipeline versions. +[**delete_pipeline_version**](PipelineServiceApi.md#delete_pipeline_version) | **DELETE** /apis/v1beta1/pipeline_versions/{version_id} | Deletes a pipeline version by pipeline version ID. If the deleted pipeline version is the default pipeline version, the pipeline's default version changes to the pipeline's most recent pipeline version. If there are no remaining pipeline versions, the pipeline will have no default version. Examines the run_service_api.ipynb notebook to learn more about creating a run using a pipeline version (https://github.com/kubeflow/pipelines/blob/master/tools/benchmarks/run_service_api.ipynb). +[**get_pipeline**](PipelineServiceApi.md#get_pipeline) | **GET** /apis/v1beta1/pipelines/{id} | Finds a specific pipeline by ID. +[**get_pipeline_version**](PipelineServiceApi.md#get_pipeline_version) | **GET** /apis/v1beta1/pipeline_versions/{version_id} | Gets a pipeline version by pipeline version ID. +[**get_pipeline_version_template**](PipelineServiceApi.md#get_pipeline_version_template) | **GET** /apis/v1beta1/pipeline_versions/{version_id}/templates | Returns a YAML template that contains the specified pipeline version's description, parameters and metadata. +[**get_template**](PipelineServiceApi.md#get_template) | **GET** /apis/v1beta1/pipelines/{id}/templates | Returns a single YAML template that contains the description, parameters, and metadata associated with the pipeline provided. +[**list_pipeline_versions**](PipelineServiceApi.md#list_pipeline_versions) | **GET** /apis/v1beta1/pipeline_versions | Lists all pipeline versions of a given pipeline. +[**list_pipelines**](PipelineServiceApi.md#list_pipelines) | **GET** /apis/v1beta1/pipelines | Finds all pipelines. + + +# **create_pipeline** +> ApiPipeline create_pipeline(body) + +Creates a pipeline. + +### Example + +* Api Key Authentication (Bearer): +```python +from __future__ import print_function +import time +import kfp_server_api +from kfp_server_api.rest import ApiException +from pprint import pprint +# Defining the host is optional and defaults to http://localhost +# See configuration.py for a list of all supported configuration parameters. +configuration = kfp_server_api.Configuration( + host = "http://localhost" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: Bearer +configuration = kfp_server_api.Configuration( + host = "http://localhost", + api_key = { + 'authorization': 'YOUR_API_KEY' + } +) +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['authorization'] = 'Bearer' + +# Enter a context with an instance of the API client +with kfp_server_api.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = kfp_server_api.PipelineServiceApi(api_client) + body = kfp_server_api.ApiPipeline() # ApiPipeline | + + try: + # Creates a pipeline. + api_response = api_instance.create_pipeline(body) + pprint(api_response) + except ApiException as e: + print("Exception when calling PipelineServiceApi->create_pipeline: %s\n" % e) +``` + +### Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **body** | [**ApiPipeline**](ApiPipeline.md)| | + +### Return type + +[**ApiPipeline**](ApiPipeline.md) + +### Authorization + +[Bearer](../README.md#Bearer) + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | A successful response. | - | +**0** | | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **create_pipeline_version** +> ApiPipelineVersion create_pipeline_version(body) + +Adds a pipeline version to the specified pipeline. + +### Example + +* Api Key Authentication (Bearer): +```python +from __future__ import print_function +import time +import kfp_server_api +from kfp_server_api.rest import ApiException +from pprint import pprint +# Defining the host is optional and defaults to http://localhost +# See configuration.py for a list of all supported configuration parameters. +configuration = kfp_server_api.Configuration( + host = "http://localhost" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: Bearer +configuration = kfp_server_api.Configuration( + host = "http://localhost", + api_key = { + 'authorization': 'YOUR_API_KEY' + } +) +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['authorization'] = 'Bearer' + +# Enter a context with an instance of the API client +with kfp_server_api.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = kfp_server_api.PipelineServiceApi(api_client) + body = kfp_server_api.ApiPipelineVersion() # ApiPipelineVersion | ResourceReference inside PipelineVersion specifies the pipeline that this version belongs to. + + try: + # Adds a pipeline version to the specified pipeline. + api_response = api_instance.create_pipeline_version(body) + pprint(api_response) + except ApiException as e: + print("Exception when calling PipelineServiceApi->create_pipeline_version: %s\n" % e) +``` + +### Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **body** | [**ApiPipelineVersion**](ApiPipelineVersion.md)| ResourceReference inside PipelineVersion specifies the pipeline that this version belongs to. | + +### Return type + +[**ApiPipelineVersion**](ApiPipelineVersion.md) + +### Authorization + +[Bearer](../README.md#Bearer) + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | A successful response. | - | +**0** | | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **delete_pipeline** +> object delete_pipeline(id) + +Deletes a pipeline and its pipeline versions. + +### Example + +* Api Key Authentication (Bearer): +```python +from __future__ import print_function +import time +import kfp_server_api +from kfp_server_api.rest import ApiException +from pprint import pprint +# Defining the host is optional and defaults to http://localhost +# See configuration.py for a list of all supported configuration parameters. +configuration = kfp_server_api.Configuration( + host = "http://localhost" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: Bearer +configuration = kfp_server_api.Configuration( + host = "http://localhost", + api_key = { + 'authorization': 'YOUR_API_KEY' + } +) +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['authorization'] = 'Bearer' + +# Enter a context with an instance of the API client +with kfp_server_api.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = kfp_server_api.PipelineServiceApi(api_client) + id = 'id_example' # str | The ID of the pipeline to be deleted. + + try: + # Deletes a pipeline and its pipeline versions. + api_response = api_instance.delete_pipeline(id) + pprint(api_response) + except ApiException as e: + print("Exception when calling PipelineServiceApi->delete_pipeline: %s\n" % e) +``` + +### Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **id** | **str**| The ID of the pipeline to be deleted. | + +### Return type + +**object** + +### Authorization + +[Bearer](../README.md#Bearer) + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | A successful response. | - | +**0** | | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **delete_pipeline_version** +> object delete_pipeline_version(version_id) + +Deletes a pipeline version by pipeline version ID. If the deleted pipeline version is the default pipeline version, the pipeline's default version changes to the pipeline's most recent pipeline version. If there are no remaining pipeline versions, the pipeline will have no default version. Examines the run_service_api.ipynb notebook to learn more about creating a run using a pipeline version (https://github.com/kubeflow/pipelines/blob/master/tools/benchmarks/run_service_api.ipynb). + +### Example + +* Api Key Authentication (Bearer): +```python +from __future__ import print_function +import time +import kfp_server_api +from kfp_server_api.rest import ApiException +from pprint import pprint +# Defining the host is optional and defaults to http://localhost +# See configuration.py for a list of all supported configuration parameters. +configuration = kfp_server_api.Configuration( + host = "http://localhost" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: Bearer +configuration = kfp_server_api.Configuration( + host = "http://localhost", + api_key = { + 'authorization': 'YOUR_API_KEY' + } +) +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['authorization'] = 'Bearer' + +# Enter a context with an instance of the API client +with kfp_server_api.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = kfp_server_api.PipelineServiceApi(api_client) + version_id = 'version_id_example' # str | The ID of the pipeline version to be deleted. + + try: + # Deletes a pipeline version by pipeline version ID. If the deleted pipeline version is the default pipeline version, the pipeline's default version changes to the pipeline's most recent pipeline version. If there are no remaining pipeline versions, the pipeline will have no default version. Examines the run_service_api.ipynb notebook to learn more about creating a run using a pipeline version (https://github.com/kubeflow/pipelines/blob/master/tools/benchmarks/run_service_api.ipynb). + api_response = api_instance.delete_pipeline_version(version_id) + pprint(api_response) + except ApiException as e: + print("Exception when calling PipelineServiceApi->delete_pipeline_version: %s\n" % e) +``` + +### Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **version_id** | **str**| The ID of the pipeline version to be deleted. | + +### Return type + +**object** + +### Authorization + +[Bearer](../README.md#Bearer) + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | A successful response. | - | +**0** | | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_pipeline** +> ApiPipeline get_pipeline(id) + +Finds a specific pipeline by ID. + +### Example + +* Api Key Authentication (Bearer): +```python +from __future__ import print_function +import time +import kfp_server_api +from kfp_server_api.rest import ApiException +from pprint import pprint +# Defining the host is optional and defaults to http://localhost +# See configuration.py for a list of all supported configuration parameters. +configuration = kfp_server_api.Configuration( + host = "http://localhost" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: Bearer +configuration = kfp_server_api.Configuration( + host = "http://localhost", + api_key = { + 'authorization': 'YOUR_API_KEY' + } +) +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['authorization'] = 'Bearer' + +# Enter a context with an instance of the API client +with kfp_server_api.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = kfp_server_api.PipelineServiceApi(api_client) + id = 'id_example' # str | The ID of the pipeline to be retrieved. + + try: + # Finds a specific pipeline by ID. + api_response = api_instance.get_pipeline(id) + pprint(api_response) + except ApiException as e: + print("Exception when calling PipelineServiceApi->get_pipeline: %s\n" % e) +``` + +### Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **id** | **str**| The ID of the pipeline to be retrieved. | + +### Return type + +[**ApiPipeline**](ApiPipeline.md) + +### Authorization + +[Bearer](../README.md#Bearer) + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | A successful response. | - | +**0** | | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_pipeline_version** +> ApiPipelineVersion get_pipeline_version(version_id) + +Gets a pipeline version by pipeline version ID. + +### Example + +* Api Key Authentication (Bearer): +```python +from __future__ import print_function +import time +import kfp_server_api +from kfp_server_api.rest import ApiException +from pprint import pprint +# Defining the host is optional and defaults to http://localhost +# See configuration.py for a list of all supported configuration parameters. +configuration = kfp_server_api.Configuration( + host = "http://localhost" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: Bearer +configuration = kfp_server_api.Configuration( + host = "http://localhost", + api_key = { + 'authorization': 'YOUR_API_KEY' + } +) +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['authorization'] = 'Bearer' + +# Enter a context with an instance of the API client +with kfp_server_api.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = kfp_server_api.PipelineServiceApi(api_client) + version_id = 'version_id_example' # str | The ID of the pipeline version to be retrieved. + + try: + # Gets a pipeline version by pipeline version ID. + api_response = api_instance.get_pipeline_version(version_id) + pprint(api_response) + except ApiException as e: + print("Exception when calling PipelineServiceApi->get_pipeline_version: %s\n" % e) +``` + +### Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **version_id** | **str**| The ID of the pipeline version to be retrieved. | + +### Return type + +[**ApiPipelineVersion**](ApiPipelineVersion.md) + +### Authorization + +[Bearer](../README.md#Bearer) + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | A successful response. | - | +**0** | | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_pipeline_version_template** +> ApiGetTemplateResponse get_pipeline_version_template(version_id) + +Returns a YAML template that contains the specified pipeline version's description, parameters and metadata. + +### Example + +* Api Key Authentication (Bearer): +```python +from __future__ import print_function +import time +import kfp_server_api +from kfp_server_api.rest import ApiException +from pprint import pprint +# Defining the host is optional and defaults to http://localhost +# See configuration.py for a list of all supported configuration parameters. +configuration = kfp_server_api.Configuration( + host = "http://localhost" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: Bearer +configuration = kfp_server_api.Configuration( + host = "http://localhost", + api_key = { + 'authorization': 'YOUR_API_KEY' + } +) +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['authorization'] = 'Bearer' + +# Enter a context with an instance of the API client +with kfp_server_api.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = kfp_server_api.PipelineServiceApi(api_client) + version_id = 'version_id_example' # str | The ID of the pipeline version whose template is to be retrieved. + + try: + # Returns a YAML template that contains the specified pipeline version's description, parameters and metadata. + api_response = api_instance.get_pipeline_version_template(version_id) + pprint(api_response) + except ApiException as e: + print("Exception when calling PipelineServiceApi->get_pipeline_version_template: %s\n" % e) +``` + +### Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **version_id** | **str**| The ID of the pipeline version whose template is to be retrieved. | + +### Return type + +[**ApiGetTemplateResponse**](ApiGetTemplateResponse.md) + +### Authorization + +[Bearer](../README.md#Bearer) + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | A successful response. | - | +**0** | | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_template** +> ApiGetTemplateResponse get_template(id) + +Returns a single YAML template that contains the description, parameters, and metadata associated with the pipeline provided. + +### Example + +* Api Key Authentication (Bearer): +```python +from __future__ import print_function +import time +import kfp_server_api +from kfp_server_api.rest import ApiException +from pprint import pprint +# Defining the host is optional and defaults to http://localhost +# See configuration.py for a list of all supported configuration parameters. +configuration = kfp_server_api.Configuration( + host = "http://localhost" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: Bearer +configuration = kfp_server_api.Configuration( + host = "http://localhost", + api_key = { + 'authorization': 'YOUR_API_KEY' + } +) +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['authorization'] = 'Bearer' + +# Enter a context with an instance of the API client +with kfp_server_api.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = kfp_server_api.PipelineServiceApi(api_client) + id = 'id_example' # str | The ID of the pipeline whose template is to be retrieved. + + try: + # Returns a single YAML template that contains the description, parameters, and metadata associated with the pipeline provided. + api_response = api_instance.get_template(id) + pprint(api_response) + except ApiException as e: + print("Exception when calling PipelineServiceApi->get_template: %s\n" % e) +``` + +### Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **id** | **str**| The ID of the pipeline whose template is to be retrieved. | + +### Return type + +[**ApiGetTemplateResponse**](ApiGetTemplateResponse.md) + +### Authorization + +[Bearer](../README.md#Bearer) + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | A successful response. | - | +**0** | | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **list_pipeline_versions** +> ApiListPipelineVersionsResponse list_pipeline_versions(resource_key_type=resource_key_type, resource_key_id=resource_key_id, page_size=page_size, page_token=page_token, sort_by=sort_by, filter=filter) + +Lists all pipeline versions of a given pipeline. + +### Example + +* Api Key Authentication (Bearer): +```python +from __future__ import print_function +import time +import kfp_server_api +from kfp_server_api.rest import ApiException +from pprint import pprint +# Defining the host is optional and defaults to http://localhost +# See configuration.py for a list of all supported configuration parameters. +configuration = kfp_server_api.Configuration( + host = "http://localhost" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: Bearer +configuration = kfp_server_api.Configuration( + host = "http://localhost", + api_key = { + 'authorization': 'YOUR_API_KEY' + } +) +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['authorization'] = 'Bearer' + +# Enter a context with an instance of the API client +with kfp_server_api.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = kfp_server_api.PipelineServiceApi(api_client) + resource_key_type = 'UNKNOWN_RESOURCE_TYPE' # str | The type of the resource that referred to. (optional) (default to 'UNKNOWN_RESOURCE_TYPE') +resource_key_id = 'resource_key_id_example' # str | The ID of the resource that referred to. (optional) +page_size = 56 # int | The number of pipeline versions to be listed per page. If there are more pipeline versions than this number, the response message will contain a nextPageToken field you can use to fetch the next page. (optional) +page_token = 'page_token_example' # str | A page token to request the next page of results. The token is acquried from the nextPageToken field of the response from the previous ListPipelineVersions call or can be omitted when fetching the first page. (optional) +sort_by = 'sort_by_example' # str | Can be format of \"field_name\", \"field_name asc\" or \"field_name desc\" Ascending by default. (optional) +filter = 'filter_example' # str | A base-64 encoded, JSON-serialized Filter protocol buffer (see filter.proto). (optional) + + try: + # Lists all pipeline versions of a given pipeline. + api_response = api_instance.list_pipeline_versions(resource_key_type=resource_key_type, resource_key_id=resource_key_id, page_size=page_size, page_token=page_token, sort_by=sort_by, filter=filter) + pprint(api_response) + except ApiException as e: + print("Exception when calling PipelineServiceApi->list_pipeline_versions: %s\n" % e) +``` + +### Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **resource_key_type** | **str**| The type of the resource that referred to. | [optional] [default to 'UNKNOWN_RESOURCE_TYPE'] + **resource_key_id** | **str**| The ID of the resource that referred to. | [optional] + **page_size** | **int**| The number of pipeline versions to be listed per page. If there are more pipeline versions than this number, the response message will contain a nextPageToken field you can use to fetch the next page. | [optional] + **page_token** | **str**| A page token to request the next page of results. The token is acquried from the nextPageToken field of the response from the previous ListPipelineVersions call or can be omitted when fetching the first page. | [optional] + **sort_by** | **str**| Can be format of \"field_name\", \"field_name asc\" or \"field_name desc\" Ascending by default. | [optional] + **filter** | **str**| A base-64 encoded, JSON-serialized Filter protocol buffer (see filter.proto). | [optional] + +### Return type + +[**ApiListPipelineVersionsResponse**](ApiListPipelineVersionsResponse.md) + +### Authorization + +[Bearer](../README.md#Bearer) + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | A successful response. | - | +**0** | | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **list_pipelines** +> ApiListPipelinesResponse list_pipelines(page_token=page_token, page_size=page_size, sort_by=sort_by, filter=filter) + +Finds all pipelines. + +### Example + +* Api Key Authentication (Bearer): +```python +from __future__ import print_function +import time +import kfp_server_api +from kfp_server_api.rest import ApiException +from pprint import pprint +# Defining the host is optional and defaults to http://localhost +# See configuration.py for a list of all supported configuration parameters. +configuration = kfp_server_api.Configuration( + host = "http://localhost" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: Bearer +configuration = kfp_server_api.Configuration( + host = "http://localhost", + api_key = { + 'authorization': 'YOUR_API_KEY' + } +) +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['authorization'] = 'Bearer' + +# Enter a context with an instance of the API client +with kfp_server_api.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = kfp_server_api.PipelineServiceApi(api_client) + page_token = 'page_token_example' # str | A page token to request the next page of results. The token is acquried from the nextPageToken field of the response from the previous ListPipelines call. (optional) +page_size = 56 # int | The number of pipelines to be listed per page. If there are more pipelines than this number, the response message will contain a valid value in the nextPageToken field. (optional) +sort_by = 'sort_by_example' # str | Can be format of \"field_name\", \"field_name asc\" or \"field_name desc\" Ascending by default. (optional) +filter = 'filter_example' # str | A url-encoded, JSON-serialized Filter protocol buffer (see [filter.proto](https://github.com/kubeflow/pipelines/ blob/master/backend/api/filter.proto)). (optional) + + try: + # Finds all pipelines. + api_response = api_instance.list_pipelines(page_token=page_token, page_size=page_size, sort_by=sort_by, filter=filter) + pprint(api_response) + except ApiException as e: + print("Exception when calling PipelineServiceApi->list_pipelines: %s\n" % e) +``` + +### Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **page_token** | **str**| A page token to request the next page of results. The token is acquried from the nextPageToken field of the response from the previous ListPipelines call. | [optional] + **page_size** | **int**| The number of pipelines to be listed per page. If there are more pipelines than this number, the response message will contain a valid value in the nextPageToken field. | [optional] + **sort_by** | **str**| Can be format of \"field_name\", \"field_name asc\" or \"field_name desc\" Ascending by default. | [optional] + **filter** | **str**| A url-encoded, JSON-serialized Filter protocol buffer (see [filter.proto](https://github.com/kubeflow/pipelines/ blob/master/backend/api/filter.proto)). | [optional] + +### Return type + +[**ApiListPipelinesResponse**](ApiListPipelinesResponse.md) + +### Authorization + +[Bearer](../README.md#Bearer) + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | A successful response. | - | +**0** | | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + diff --git a/backend/api/python_http_client/docs/PipelineUploadServiceApi.md b/backend/api/python_http_client/docs/PipelineUploadServiceApi.md new file mode 100644 index 000000000..634d39f91 --- /dev/null +++ b/backend/api/python_http_client/docs/PipelineUploadServiceApi.md @@ -0,0 +1,168 @@ +# kfp_server_api.PipelineUploadServiceApi + +All URIs are relative to *http://localhost* + +Method | HTTP request | Description +------------- | ------------- | ------------- +[**upload_pipeline**](PipelineUploadServiceApi.md#upload_pipeline) | **POST** /apis/v1beta1/pipelines/upload | +[**upload_pipeline_version**](PipelineUploadServiceApi.md#upload_pipeline_version) | **POST** /apis/v1beta1/pipelines/upload_version | + + +# **upload_pipeline** +> ApiPipeline upload_pipeline(uploadfile, name=name, description=description) + + + +### Example + +* Api Key Authentication (Bearer): +```python +from __future__ import print_function +import time +import kfp_server_api +from kfp_server_api.rest import ApiException +from pprint import pprint +# Defining the host is optional and defaults to http://localhost +# See configuration.py for a list of all supported configuration parameters. +configuration = kfp_server_api.Configuration( + host = "http://localhost" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: Bearer +configuration = kfp_server_api.Configuration( + host = "http://localhost", + api_key = { + 'authorization': 'YOUR_API_KEY' + } +) +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['authorization'] = 'Bearer' + +# Enter a context with an instance of the API client +with kfp_server_api.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = kfp_server_api.PipelineUploadServiceApi(api_client) + uploadfile = '/path/to/file' # file | The pipeline to upload. Maximum size of 32MB is supported. +name = 'name_example' # str | (optional) +description = 'description_example' # str | (optional) + + try: + api_response = api_instance.upload_pipeline(uploadfile, name=name, description=description) + pprint(api_response) + except ApiException as e: + print("Exception when calling PipelineUploadServiceApi->upload_pipeline: %s\n" % e) +``` + +### Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **uploadfile** | **file**| The pipeline to upload. Maximum size of 32MB is supported. | + **name** | **str**| | [optional] + **description** | **str**| | [optional] + +### Return type + +[**ApiPipeline**](ApiPipeline.md) + +### Authorization + +[Bearer](../README.md#Bearer) + +### HTTP request headers + + - **Content-Type**: multipart/form-data + - **Accept**: application/json + +### HTTP response details +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | | - | +**0** | | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **upload_pipeline_version** +> ApiPipelineVersion upload_pipeline_version(uploadfile, name=name, pipelineid=pipelineid) + + + +### Example + +* Api Key Authentication (Bearer): +```python +from __future__ import print_function +import time +import kfp_server_api +from kfp_server_api.rest import ApiException +from pprint import pprint +# Defining the host is optional and defaults to http://localhost +# See configuration.py for a list of all supported configuration parameters. +configuration = kfp_server_api.Configuration( + host = "http://localhost" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: Bearer +configuration = kfp_server_api.Configuration( + host = "http://localhost", + api_key = { + 'authorization': 'YOUR_API_KEY' + } +) +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['authorization'] = 'Bearer' + +# Enter a context with an instance of the API client +with kfp_server_api.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = kfp_server_api.PipelineUploadServiceApi(api_client) + uploadfile = '/path/to/file' # file | The pipeline to upload. Maximum size of 32MB is supported. +name = 'name_example' # str | (optional) +pipelineid = 'pipelineid_example' # str | (optional) + + try: + api_response = api_instance.upload_pipeline_version(uploadfile, name=name, pipelineid=pipelineid) + pprint(api_response) + except ApiException as e: + print("Exception when calling PipelineUploadServiceApi->upload_pipeline_version: %s\n" % e) +``` + +### Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **uploadfile** | **file**| The pipeline to upload. Maximum size of 32MB is supported. | + **name** | **str**| | [optional] + **pipelineid** | **str**| | [optional] + +### Return type + +[**ApiPipelineVersion**](ApiPipelineVersion.md) + +### Authorization + +[Bearer](../README.md#Bearer) + +### HTTP request headers + + - **Content-Type**: multipart/form-data + - **Accept**: application/json + +### HTTP response details +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | | - | +**0** | | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + diff --git a/backend/api/python_http_client/docs/ProtobufAny.md b/backend/api/python_http_client/docs/ProtobufAny.md new file mode 100644 index 000000000..463a60dcf --- /dev/null +++ b/backend/api/python_http_client/docs/ProtobufAny.md @@ -0,0 +1,12 @@ +# ProtobufAny + +`Any` contains an arbitrary serialized protocol buffer message along with a URL that describes the type of the serialized message. Protobuf library provides support to pack/unpack Any values in the form of utility functions or additional generated methods of the Any type. Example 1: Pack and unpack a message in C++. Foo foo = ...; Any any; any.PackFrom(foo); ... if (any.UnpackTo(&foo)) { ... } Example 2: Pack and unpack a message in Java. Foo foo = ...; Any any = Any.pack(foo); ... if (any.is(Foo.class)) { foo = any.unpack(Foo.class); } Example 3: Pack and unpack a message in Python. foo = Foo(...) any = Any() any.Pack(foo) ... if any.Is(Foo.DESCRIPTOR): any.Unpack(foo) ... Example 4: Pack and unpack a message in Go foo := &pb.Foo{...} any, err := ptypes.MarshalAny(foo) ... foo := &pb.Foo{} if err := ptypes.UnmarshalAny(any, foo); err != nil { ... } The pack methods provided by protobuf library will by default use 'type.googleapis.com/full.type.name' as the type URL and the unpack methods only use the fully qualified type name after the last '/' in the type URL, for example \"foo.bar.com/x/y.z\" will yield type name \"y.z\". JSON ==== The JSON representation of an `Any` value uses the regular representation of the deserialized, embedded message, with an additional field `@type` which contains the type URL. Example: package google.profile; message Person { string first_name = 1; string last_name = 2; } { \"@type\": \"type.googleapis.com/google.profile.Person\", \"firstName\": , \"lastName\": } If the embedded message type is well-known and has a custom JSON representation, that representation will be embedded adding a field `value` which holds the custom JSON in addition to the `@type` field. Example (for message [google.protobuf.Duration][]): { \"@type\": \"type.googleapis.com/google.protobuf.Duration\", \"value\": \"1.212s\" } +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**type_url** | **str** | A URL/resource name that uniquely identifies the type of the serialized protocol buffer message. The last segment of the URL's path must represent the fully qualified name of the type (as in `path/google.protobuf.Duration`). The name should be in a canonical form (e.g., leading \".\" is not accepted). In practice, teams usually precompile into the binary all types that they expect it to use in the context of Any. However, for URLs which use the scheme `http`, `https`, or no scheme, one can optionally set up a type server that maps type URLs to message definitions as follows: * If no scheme is provided, `https` is assumed. * An HTTP GET on the URL must yield a [google.protobuf.Type][] value in binary format, or produce an error. * Applications are allowed to cache lookup results based on the URL, or have them precompiled into a binary to avoid any lookup. Therefore, binary compatibility needs to be preserved on changes to types. (Use versioned type names to manage breaking changes.) Note: this functionality is not currently available in the official protobuf release, and it is not used for type URLs beginning with type.googleapis.com. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. | [optional] +**value** | **str** | Must be a valid serialized protocol buffer of the above specified type. | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/backend/api/python_http_client/docs/ReportRunMetricsResponseReportRunMetricResult.md b/backend/api/python_http_client/docs/ReportRunMetricsResponseReportRunMetricResult.md new file mode 100644 index 000000000..cf376f687 --- /dev/null +++ b/backend/api/python_http_client/docs/ReportRunMetricsResponseReportRunMetricResult.md @@ -0,0 +1,13 @@ +# ReportRunMetricsResponseReportRunMetricResult + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**metric_name** | **str** | Output. The name of the metric. | [optional] +**metric_node_id** | **str** | Output. The ID of the node which reports the metric. | [optional] +**status** | [**ReportRunMetricsResponseReportRunMetricResultStatus**](ReportRunMetricsResponseReportRunMetricResultStatus.md) | | [optional] +**message** | **str** | Output. The detailed message of the error of the reporting. | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/backend/api/python_http_client/docs/ReportRunMetricsResponseReportRunMetricResultStatus.md b/backend/api/python_http_client/docs/ReportRunMetricsResponseReportRunMetricResultStatus.md new file mode 100644 index 000000000..843f3b818 --- /dev/null +++ b/backend/api/python_http_client/docs/ReportRunMetricsResponseReportRunMetricResultStatus.md @@ -0,0 +1,10 @@ +# ReportRunMetricsResponseReportRunMetricResultStatus + + - UNSPECIFIED: Default value if not present. - OK: Indicates successful reporting. - INVALID_ARGUMENT: Indicates that the payload of the metric is invalid. - DUPLICATE_REPORTING: Indicates that the metric has been reported before. - INTERNAL_ERROR: Indicates that something went wrong in the server. +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/backend/api/python_http_client/docs/RunMetricFormat.md b/backend/api/python_http_client/docs/RunMetricFormat.md new file mode 100644 index 000000000..deef5e06c --- /dev/null +++ b/backend/api/python_http_client/docs/RunMetricFormat.md @@ -0,0 +1,10 @@ +# RunMetricFormat + + - UNSPECIFIED: Default value if not present. - RAW: Display value as its raw format. - PERCENTAGE: Display value in percentage format. +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/backend/api/python_http_client/docs/RunServiceApi.md b/backend/api/python_http_client/docs/RunServiceApi.md new file mode 100644 index 000000000..5d0cae832 --- /dev/null +++ b/backend/api/python_http_client/docs/RunServiceApi.md @@ -0,0 +1,794 @@ +# kfp_server_api.RunServiceApi + +All URIs are relative to *http://localhost* + +Method | HTTP request | Description +------------- | ------------- | ------------- +[**archive_run**](RunServiceApi.md#archive_run) | **POST** /apis/v1beta1/runs/{id}:archive | Archives a run. +[**create_run**](RunServiceApi.md#create_run) | **POST** /apis/v1beta1/runs | Creates a new run. +[**delete_run**](RunServiceApi.md#delete_run) | **DELETE** /apis/v1beta1/runs/{id} | Deletes a run. +[**get_run**](RunServiceApi.md#get_run) | **GET** /apis/v1beta1/runs/{run_id} | Finds a specific run by ID. +[**list_runs**](RunServiceApi.md#list_runs) | **GET** /apis/v1beta1/runs | Finds all runs. +[**read_artifact**](RunServiceApi.md#read_artifact) | **GET** /apis/v1beta1/runs/{run_id}/nodes/{node_id}/artifacts/{artifact_name}:read | Finds a run's artifact data. +[**report_run_metrics**](RunServiceApi.md#report_run_metrics) | **POST** /apis/v1beta1/runs/{run_id}:reportMetrics | ReportRunMetrics reports metrics of a run. Each metric is reported in its own transaction, so this API accepts partial failures. Metric can be uniquely identified by (run_id, node_id, name). Duplicate reporting will be ignored by the API. First reporting wins. +[**retry_run**](RunServiceApi.md#retry_run) | **POST** /apis/v1beta1/runs/{run_id}/retry | Re-initiates a failed or terminated run. +[**terminate_run**](RunServiceApi.md#terminate_run) | **POST** /apis/v1beta1/runs/{run_id}/terminate | Terminates an active run. +[**unarchive_run**](RunServiceApi.md#unarchive_run) | **POST** /apis/v1beta1/runs/{id}:unarchive | Restores an archived run. + + +# **archive_run** +> object archive_run(id) + +Archives a run. + +### Example + +* Api Key Authentication (Bearer): +```python +from __future__ import print_function +import time +import kfp_server_api +from kfp_server_api.rest import ApiException +from pprint import pprint +# Defining the host is optional and defaults to http://localhost +# See configuration.py for a list of all supported configuration parameters. +configuration = kfp_server_api.Configuration( + host = "http://localhost" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: Bearer +configuration = kfp_server_api.Configuration( + host = "http://localhost", + api_key = { + 'authorization': 'YOUR_API_KEY' + } +) +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['authorization'] = 'Bearer' + +# Enter a context with an instance of the API client +with kfp_server_api.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = kfp_server_api.RunServiceApi(api_client) + id = 'id_example' # str | The ID of the run to be archived. + + try: + # Archives a run. + api_response = api_instance.archive_run(id) + pprint(api_response) + except ApiException as e: + print("Exception when calling RunServiceApi->archive_run: %s\n" % e) +``` + +### Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **id** | **str**| The ID of the run to be archived. | + +### Return type + +**object** + +### Authorization + +[Bearer](../README.md#Bearer) + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | A successful response. | - | +**0** | | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **create_run** +> ApiRunDetail create_run(body) + +Creates a new run. + +### Example + +* Api Key Authentication (Bearer): +```python +from __future__ import print_function +import time +import kfp_server_api +from kfp_server_api.rest import ApiException +from pprint import pprint +# Defining the host is optional and defaults to http://localhost +# See configuration.py for a list of all supported configuration parameters. +configuration = kfp_server_api.Configuration( + host = "http://localhost" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: Bearer +configuration = kfp_server_api.Configuration( + host = "http://localhost", + api_key = { + 'authorization': 'YOUR_API_KEY' + } +) +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['authorization'] = 'Bearer' + +# Enter a context with an instance of the API client +with kfp_server_api.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = kfp_server_api.RunServiceApi(api_client) + body = kfp_server_api.ApiRun() # ApiRun | + + try: + # Creates a new run. + api_response = api_instance.create_run(body) + pprint(api_response) + except ApiException as e: + print("Exception when calling RunServiceApi->create_run: %s\n" % e) +``` + +### Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **body** | [**ApiRun**](ApiRun.md)| | + +### Return type + +[**ApiRunDetail**](ApiRunDetail.md) + +### Authorization + +[Bearer](../README.md#Bearer) + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | A successful response. | - | +**0** | | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **delete_run** +> object delete_run(id) + +Deletes a run. + +### Example + +* Api Key Authentication (Bearer): +```python +from __future__ import print_function +import time +import kfp_server_api +from kfp_server_api.rest import ApiException +from pprint import pprint +# Defining the host is optional and defaults to http://localhost +# See configuration.py for a list of all supported configuration parameters. +configuration = kfp_server_api.Configuration( + host = "http://localhost" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: Bearer +configuration = kfp_server_api.Configuration( + host = "http://localhost", + api_key = { + 'authorization': 'YOUR_API_KEY' + } +) +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['authorization'] = 'Bearer' + +# Enter a context with an instance of the API client +with kfp_server_api.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = kfp_server_api.RunServiceApi(api_client) + id = 'id_example' # str | The ID of the run to be deleted. + + try: + # Deletes a run. + api_response = api_instance.delete_run(id) + pprint(api_response) + except ApiException as e: + print("Exception when calling RunServiceApi->delete_run: %s\n" % e) +``` + +### Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **id** | **str**| The ID of the run to be deleted. | + +### Return type + +**object** + +### Authorization + +[Bearer](../README.md#Bearer) + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | A successful response. | - | +**0** | | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_run** +> ApiRunDetail get_run(run_id) + +Finds a specific run by ID. + +### Example + +* Api Key Authentication (Bearer): +```python +from __future__ import print_function +import time +import kfp_server_api +from kfp_server_api.rest import ApiException +from pprint import pprint +# Defining the host is optional and defaults to http://localhost +# See configuration.py for a list of all supported configuration parameters. +configuration = kfp_server_api.Configuration( + host = "http://localhost" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: Bearer +configuration = kfp_server_api.Configuration( + host = "http://localhost", + api_key = { + 'authorization': 'YOUR_API_KEY' + } +) +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['authorization'] = 'Bearer' + +# Enter a context with an instance of the API client +with kfp_server_api.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = kfp_server_api.RunServiceApi(api_client) + run_id = 'run_id_example' # str | The ID of the run to be retrieved. + + try: + # Finds a specific run by ID. + api_response = api_instance.get_run(run_id) + pprint(api_response) + except ApiException as e: + print("Exception when calling RunServiceApi->get_run: %s\n" % e) +``` + +### Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **run_id** | **str**| The ID of the run to be retrieved. | + +### Return type + +[**ApiRunDetail**](ApiRunDetail.md) + +### Authorization + +[Bearer](../README.md#Bearer) + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | A successful response. | - | +**0** | | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **list_runs** +> ApiListRunsResponse list_runs(page_token=page_token, page_size=page_size, sort_by=sort_by, resource_reference_key_type=resource_reference_key_type, resource_reference_key_id=resource_reference_key_id, filter=filter) + +Finds all runs. + +### Example + +* Api Key Authentication (Bearer): +```python +from __future__ import print_function +import time +import kfp_server_api +from kfp_server_api.rest import ApiException +from pprint import pprint +# Defining the host is optional and defaults to http://localhost +# See configuration.py for a list of all supported configuration parameters. +configuration = kfp_server_api.Configuration( + host = "http://localhost" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: Bearer +configuration = kfp_server_api.Configuration( + host = "http://localhost", + api_key = { + 'authorization': 'YOUR_API_KEY' + } +) +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['authorization'] = 'Bearer' + +# Enter a context with an instance of the API client +with kfp_server_api.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = kfp_server_api.RunServiceApi(api_client) + page_token = 'page_token_example' # str | A page token to request the next page of results. The token is acquried from the nextPageToken field of the response from the previous ListRuns call or can be omitted when fetching the first page. (optional) +page_size = 56 # int | The number of runs to be listed per page. If there are more runs than this number, the response message will contain a nextPageToken field you can use to fetch the next page. (optional) +sort_by = 'sort_by_example' # str | Can be format of \"field_name\", \"field_name asc\" or \"field_name desc\" (Example, \"name asc\" or \"id desc\"). Ascending by default. (optional) +resource_reference_key_type = 'UNKNOWN_RESOURCE_TYPE' # str | The type of the resource that referred to. (optional) (default to 'UNKNOWN_RESOURCE_TYPE') +resource_reference_key_id = 'resource_reference_key_id_example' # str | The ID of the resource that referred to. (optional) +filter = 'filter_example' # str | A url-encoded, JSON-serialized Filter protocol buffer (see [filter.proto](https://github.com/kubeflow/pipelines/ blob/master/backend/api/filter.proto)). (optional) + + try: + # Finds all runs. + api_response = api_instance.list_runs(page_token=page_token, page_size=page_size, sort_by=sort_by, resource_reference_key_type=resource_reference_key_type, resource_reference_key_id=resource_reference_key_id, filter=filter) + pprint(api_response) + except ApiException as e: + print("Exception when calling RunServiceApi->list_runs: %s\n" % e) +``` + +### Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **page_token** | **str**| A page token to request the next page of results. The token is acquried from the nextPageToken field of the response from the previous ListRuns call or can be omitted when fetching the first page. | [optional] + **page_size** | **int**| The number of runs to be listed per page. If there are more runs than this number, the response message will contain a nextPageToken field you can use to fetch the next page. | [optional] + **sort_by** | **str**| Can be format of \"field_name\", \"field_name asc\" or \"field_name desc\" (Example, \"name asc\" or \"id desc\"). Ascending by default. | [optional] + **resource_reference_key_type** | **str**| The type of the resource that referred to. | [optional] [default to 'UNKNOWN_RESOURCE_TYPE'] + **resource_reference_key_id** | **str**| The ID of the resource that referred to. | [optional] + **filter** | **str**| A url-encoded, JSON-serialized Filter protocol buffer (see [filter.proto](https://github.com/kubeflow/pipelines/ blob/master/backend/api/filter.proto)). | [optional] + +### Return type + +[**ApiListRunsResponse**](ApiListRunsResponse.md) + +### Authorization + +[Bearer](../README.md#Bearer) + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | A successful response. | - | +**0** | | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **read_artifact** +> ApiReadArtifactResponse read_artifact(run_id, node_id, artifact_name) + +Finds a run's artifact data. + +### Example + +* Api Key Authentication (Bearer): +```python +from __future__ import print_function +import time +import kfp_server_api +from kfp_server_api.rest import ApiException +from pprint import pprint +# Defining the host is optional and defaults to http://localhost +# See configuration.py for a list of all supported configuration parameters. +configuration = kfp_server_api.Configuration( + host = "http://localhost" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: Bearer +configuration = kfp_server_api.Configuration( + host = "http://localhost", + api_key = { + 'authorization': 'YOUR_API_KEY' + } +) +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['authorization'] = 'Bearer' + +# Enter a context with an instance of the API client +with kfp_server_api.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = kfp_server_api.RunServiceApi(api_client) + run_id = 'run_id_example' # str | The ID of the run. +node_id = 'node_id_example' # str | The ID of the running node. +artifact_name = 'artifact_name_example' # str | The name of the artifact. + + try: + # Finds a run's artifact data. + api_response = api_instance.read_artifact(run_id, node_id, artifact_name) + pprint(api_response) + except ApiException as e: + print("Exception when calling RunServiceApi->read_artifact: %s\n" % e) +``` + +### Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **run_id** | **str**| The ID of the run. | + **node_id** | **str**| The ID of the running node. | + **artifact_name** | **str**| The name of the artifact. | + +### Return type + +[**ApiReadArtifactResponse**](ApiReadArtifactResponse.md) + +### Authorization + +[Bearer](../README.md#Bearer) + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | A successful response. | - | +**0** | | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **report_run_metrics** +> ApiReportRunMetricsResponse report_run_metrics(run_id, body) + +ReportRunMetrics reports metrics of a run. Each metric is reported in its own transaction, so this API accepts partial failures. Metric can be uniquely identified by (run_id, node_id, name). Duplicate reporting will be ignored by the API. First reporting wins. + +### Example + +* Api Key Authentication (Bearer): +```python +from __future__ import print_function +import time +import kfp_server_api +from kfp_server_api.rest import ApiException +from pprint import pprint +# Defining the host is optional and defaults to http://localhost +# See configuration.py for a list of all supported configuration parameters. +configuration = kfp_server_api.Configuration( + host = "http://localhost" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: Bearer +configuration = kfp_server_api.Configuration( + host = "http://localhost", + api_key = { + 'authorization': 'YOUR_API_KEY' + } +) +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['authorization'] = 'Bearer' + +# Enter a context with an instance of the API client +with kfp_server_api.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = kfp_server_api.RunServiceApi(api_client) + run_id = 'run_id_example' # str | Required. The parent run ID of the metric. +body = kfp_server_api.ApiReportRunMetricsRequest() # ApiReportRunMetricsRequest | + + try: + # ReportRunMetrics reports metrics of a run. Each metric is reported in its own transaction, so this API accepts partial failures. Metric can be uniquely identified by (run_id, node_id, name). Duplicate reporting will be ignored by the API. First reporting wins. + api_response = api_instance.report_run_metrics(run_id, body) + pprint(api_response) + except ApiException as e: + print("Exception when calling RunServiceApi->report_run_metrics: %s\n" % e) +``` + +### Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **run_id** | **str**| Required. The parent run ID of the metric. | + **body** | [**ApiReportRunMetricsRequest**](ApiReportRunMetricsRequest.md)| | + +### Return type + +[**ApiReportRunMetricsResponse**](ApiReportRunMetricsResponse.md) + +### Authorization + +[Bearer](../README.md#Bearer) + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | A successful response. | - | +**0** | | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **retry_run** +> object retry_run(run_id) + +Re-initiates a failed or terminated run. + +### Example + +* Api Key Authentication (Bearer): +```python +from __future__ import print_function +import time +import kfp_server_api +from kfp_server_api.rest import ApiException +from pprint import pprint +# Defining the host is optional and defaults to http://localhost +# See configuration.py for a list of all supported configuration parameters. +configuration = kfp_server_api.Configuration( + host = "http://localhost" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: Bearer +configuration = kfp_server_api.Configuration( + host = "http://localhost", + api_key = { + 'authorization': 'YOUR_API_KEY' + } +) +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['authorization'] = 'Bearer' + +# Enter a context with an instance of the API client +with kfp_server_api.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = kfp_server_api.RunServiceApi(api_client) + run_id = 'run_id_example' # str | The ID of the run to be retried. + + try: + # Re-initiates a failed or terminated run. + api_response = api_instance.retry_run(run_id) + pprint(api_response) + except ApiException as e: + print("Exception when calling RunServiceApi->retry_run: %s\n" % e) +``` + +### Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **run_id** | **str**| The ID of the run to be retried. | + +### Return type + +**object** + +### Authorization + +[Bearer](../README.md#Bearer) + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | A successful response. | - | +**0** | | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **terminate_run** +> object terminate_run(run_id) + +Terminates an active run. + +### Example + +* Api Key Authentication (Bearer): +```python +from __future__ import print_function +import time +import kfp_server_api +from kfp_server_api.rest import ApiException +from pprint import pprint +# Defining the host is optional and defaults to http://localhost +# See configuration.py for a list of all supported configuration parameters. +configuration = kfp_server_api.Configuration( + host = "http://localhost" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: Bearer +configuration = kfp_server_api.Configuration( + host = "http://localhost", + api_key = { + 'authorization': 'YOUR_API_KEY' + } +) +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['authorization'] = 'Bearer' + +# Enter a context with an instance of the API client +with kfp_server_api.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = kfp_server_api.RunServiceApi(api_client) + run_id = 'run_id_example' # str | The ID of the run to be terminated. + + try: + # Terminates an active run. + api_response = api_instance.terminate_run(run_id) + pprint(api_response) + except ApiException as e: + print("Exception when calling RunServiceApi->terminate_run: %s\n" % e) +``` + +### Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **run_id** | **str**| The ID of the run to be terminated. | + +### Return type + +**object** + +### Authorization + +[Bearer](../README.md#Bearer) + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | A successful response. | - | +**0** | | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **unarchive_run** +> object unarchive_run(id) + +Restores an archived run. + +### Example + +* Api Key Authentication (Bearer): +```python +from __future__ import print_function +import time +import kfp_server_api +from kfp_server_api.rest import ApiException +from pprint import pprint +# Defining the host is optional and defaults to http://localhost +# See configuration.py for a list of all supported configuration parameters. +configuration = kfp_server_api.Configuration( + host = "http://localhost" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: Bearer +configuration = kfp_server_api.Configuration( + host = "http://localhost", + api_key = { + 'authorization': 'YOUR_API_KEY' + } +) +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['authorization'] = 'Bearer' + +# Enter a context with an instance of the API client +with kfp_server_api.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = kfp_server_api.RunServiceApi(api_client) + id = 'id_example' # str | The ID of the run to be restored. + + try: + # Restores an archived run. + api_response = api_instance.unarchive_run(id) + pprint(api_response) + except ApiException as e: + print("Exception when calling RunServiceApi->unarchive_run: %s\n" % e) +``` + +### Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **id** | **str**| The ID of the run to be restored. | + +### Return type + +**object** + +### Authorization + +[Bearer](../README.md#Bearer) + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | A successful response. | - | +**0** | | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + diff --git a/backend/api/python_http_client/docs/RunStorageState.md b/backend/api/python_http_client/docs/RunStorageState.md new file mode 100644 index 000000000..958cd0142 --- /dev/null +++ b/backend/api/python_http_client/docs/RunStorageState.md @@ -0,0 +1,9 @@ +# RunStorageState + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/backend/api/python_http_client/git_push.sh b/backend/api/python_http_client/git_push.sh new file mode 100644 index 000000000..594db2738 --- /dev/null +++ b/backend/api/python_http_client/git_push.sh @@ -0,0 +1,72 @@ +#!/bin/sh +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# ref: https://help.github.com/articles/adding-an-existing-project-to-github-using-the-command-line/ +# +# Usage example: /bin/sh ./git_push.sh wing328 openapi-pestore-perl "minor update" "gitlab.com" + +git_user_id=$1 +git_repo_id=$2 +release_note=$3 +git_host=$4 + +if [ "$git_host" = "" ]; then + git_host="github.com" + echo "[INFO] No command line input provided. Set \$git_host to $git_host" +fi + +if [ "$git_user_id" = "" ]; then + git_user_id="GIT_USER_ID" + echo "[INFO] No command line input provided. Set \$git_user_id to $git_user_id" +fi + +if [ "$git_repo_id" = "" ]; then + git_repo_id="GIT_REPO_ID" + echo "[INFO] No command line input provided. Set \$git_repo_id to $git_repo_id" +fi + +if [ "$release_note" = "" ]; then + release_note="Minor update" + echo "[INFO] No command line input provided. Set \$release_note to $release_note" +fi + +# Initialize the local directory as a Git repository +git init + +# Adds the files in the local repository and stages them for commit. +git add . + +# Commits the tracked changes and prepares them to be pushed to a remote repository. +git commit -m "$release_note" + +# Sets the new remote +git_remote=`git remote` +if [ "$git_remote" = "" ]; then # git remote not defined + + if [ "$GIT_TOKEN" = "" ]; then + echo "[INFO] \$GIT_TOKEN (environment variable) is not set. Using the git credential in your environment." + git remote add origin https://${git_host}/${git_user_id}/${git_repo_id}.git + else + git remote add origin https://${git_user_id}:${GIT_TOKEN}@${git_host}/${git_user_id}/${git_repo_id}.git + fi + +fi + +git pull origin master + +# Pushes (Forces) the changes in the local repository up to the remote repository +echo "Git pushing to https://${git_host}/${git_user_id}/${git_repo_id}.git" +git push origin master 2>&1 | grep -v 'To https' + diff --git a/backend/api/python_http_client/kfp_server_api/__init__.py b/backend/api/python_http_client/kfp_server_api/__init__.py new file mode 100644 index 000000000..bfea94019 --- /dev/null +++ b/backend/api/python_http_client/kfp_server_api/__init__.py @@ -0,0 +1,84 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +# flake8: noqa + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +__version__ = "1.0.0" + +# import apis into sdk package +from kfp_server_api.api.experiment_service_api import ExperimentServiceApi +from kfp_server_api.api.job_service_api import JobServiceApi +from kfp_server_api.api.pipeline_service_api import PipelineServiceApi +from kfp_server_api.api.pipeline_upload_service_api import PipelineUploadServiceApi +from kfp_server_api.api.run_service_api import RunServiceApi + +# import ApiClient +from kfp_server_api.api_client import ApiClient +from kfp_server_api.configuration import Configuration +from kfp_server_api.exceptions import OpenApiException +from kfp_server_api.exceptions import ApiTypeError +from kfp_server_api.exceptions import ApiValueError +from kfp_server_api.exceptions import ApiKeyError +from kfp_server_api.exceptions import ApiException +# import models into sdk package +from kfp_server_api.models.api_cron_schedule import ApiCronSchedule +from kfp_server_api.models.api_experiment import ApiExperiment +from kfp_server_api.models.api_get_template_response import ApiGetTemplateResponse +from kfp_server_api.models.api_job import ApiJob +from kfp_server_api.models.api_list_experiments_response import ApiListExperimentsResponse +from kfp_server_api.models.api_list_jobs_response import ApiListJobsResponse +from kfp_server_api.models.api_list_pipeline_versions_response import ApiListPipelineVersionsResponse +from kfp_server_api.models.api_list_pipelines_response import ApiListPipelinesResponse +from kfp_server_api.models.api_list_runs_response import ApiListRunsResponse +from kfp_server_api.models.api_parameter import ApiParameter +from kfp_server_api.models.api_periodic_schedule import ApiPeriodicSchedule +from kfp_server_api.models.api_pipeline import ApiPipeline +from kfp_server_api.models.api_pipeline_runtime import ApiPipelineRuntime +from kfp_server_api.models.api_pipeline_spec import ApiPipelineSpec +from kfp_server_api.models.api_pipeline_version import ApiPipelineVersion +from kfp_server_api.models.api_read_artifact_response import ApiReadArtifactResponse +from kfp_server_api.models.api_relationship import ApiRelationship +from kfp_server_api.models.api_report_run_metrics_request import ApiReportRunMetricsRequest +from kfp_server_api.models.api_report_run_metrics_response import ApiReportRunMetricsResponse +from kfp_server_api.models.api_resource_key import ApiResourceKey +from kfp_server_api.models.api_resource_reference import ApiResourceReference +from kfp_server_api.models.api_resource_type import ApiResourceType +from kfp_server_api.models.api_run import ApiRun +from kfp_server_api.models.api_run_detail import ApiRunDetail +from kfp_server_api.models.api_run_metric import ApiRunMetric +from kfp_server_api.models.api_status import ApiStatus +from kfp_server_api.models.api_trigger import ApiTrigger +from kfp_server_api.models.api_url import ApiUrl +from kfp_server_api.models.experiment_storage_state import ExperimentStorageState +from kfp_server_api.models.job_mode import JobMode +from kfp_server_api.models.protobuf_any import ProtobufAny +from kfp_server_api.models.report_run_metrics_response_report_run_metric_result import ReportRunMetricsResponseReportRunMetricResult +from kfp_server_api.models.report_run_metrics_response_report_run_metric_result_status import ReportRunMetricsResponseReportRunMetricResultStatus +from kfp_server_api.models.run_metric_format import RunMetricFormat +from kfp_server_api.models.run_storage_state import RunStorageState + diff --git a/backend/api/python_http_client/kfp_server_api/api/__init__.py b/backend/api/python_http_client/kfp_server_api/api/__init__.py new file mode 100644 index 000000000..2e114c8e4 --- /dev/null +++ b/backend/api/python_http_client/kfp_server_api/api/__init__.py @@ -0,0 +1,24 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +# flake8: noqa + +# import apis into api package +from kfp_server_api.api.experiment_service_api import ExperimentServiceApi +from kfp_server_api.api.job_service_api import JobServiceApi +from kfp_server_api.api.pipeline_service_api import PipelineServiceApi +from kfp_server_api.api.pipeline_upload_service_api import PipelineUploadServiceApi +from kfp_server_api.api.run_service_api import RunServiceApi diff --git a/backend/api/python_http_client/kfp_server_api/api/experiment_service_api.py b/backend/api/python_http_client/kfp_server_api/api/experiment_service_api.py new file mode 100644 index 000000000..42e9b227e --- /dev/null +++ b/backend/api/python_http_client/kfp_server_api/api/experiment_service_api.py @@ -0,0 +1,818 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import re # noqa: F401 + +# python 2 and python 3 compatibility library +import six + +from kfp_server_api.api_client import ApiClient +from kfp_server_api.exceptions import ( # noqa: F401 + ApiTypeError, + ApiValueError +) + + +class ExperimentServiceApi(object): + """NOTE: This class is auto generated by OpenAPI Generator + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None): + if api_client is None: + api_client = ApiClient() + self.api_client = api_client + + def archive_experiment(self, id, **kwargs): # noqa: E501 + """Archives an experiment and the experiment's runs and jobs. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.archive_experiment(id, async_req=True) + >>> result = thread.get() + + :param id: The ID of the experiment to be archived. (required) + :type id: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: object + """ + kwargs['_return_http_data_only'] = True + return self.archive_experiment_with_http_info(id, **kwargs) # noqa: E501 + + def archive_experiment_with_http_info(self, id, **kwargs): # noqa: E501 + """Archives an experiment and the experiment's runs and jobs. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.archive_experiment_with_http_info(id, async_req=True) + >>> result = thread.get() + + :param id: The ID of the experiment to be archived. (required) + :type id: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _return_http_data_only: response data without head status code + and headers + :type _return_http_data_only: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :type _preload_content: bool, optional + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: tuple(object, status_code(int), headers(HTTPHeaderDict)) + """ + + local_var_params = locals() + + all_params = [ + 'id' + ] + all_params.extend( + [ + 'async_req', + '_return_http_data_only', + '_preload_content', + '_request_timeout' + ] + ) + + for key, val in six.iteritems(local_var_params['kwargs']): + if key not in all_params: + raise ApiTypeError( + "Got an unexpected keyword argument '%s'" + " to method archive_experiment" % key + ) + local_var_params[key] = val + del local_var_params['kwargs'] + # verify the required parameter 'id' is set + if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501 + local_var_params['id'] is None): # noqa: E501 + raise ApiValueError("Missing the required parameter `id` when calling `archive_experiment`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'id' in local_var_params: + path_params['id'] = local_var_params['id'] # noqa: E501 + + query_params = [] + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = ['Bearer'] # noqa: E501 + + return self.api_client.call_api( + '/apis/v1beta1/experiments/{id}:archive', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='object', # noqa: E501 + auth_settings=auth_settings, + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats=collection_formats) + + def create_experiment(self, body, **kwargs): # noqa: E501 + """Creates a new experiment. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.create_experiment(body, async_req=True) + >>> result = thread.get() + + :param body: The experiment to be created. (required) + :type body: ApiExperiment + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: ApiExperiment + """ + kwargs['_return_http_data_only'] = True + return self.create_experiment_with_http_info(body, **kwargs) # noqa: E501 + + def create_experiment_with_http_info(self, body, **kwargs): # noqa: E501 + """Creates a new experiment. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.create_experiment_with_http_info(body, async_req=True) + >>> result = thread.get() + + :param body: The experiment to be created. (required) + :type body: ApiExperiment + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _return_http_data_only: response data without head status code + and headers + :type _return_http_data_only: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :type _preload_content: bool, optional + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: tuple(ApiExperiment, status_code(int), headers(HTTPHeaderDict)) + """ + + local_var_params = locals() + + all_params = [ + 'body' + ] + all_params.extend( + [ + 'async_req', + '_return_http_data_only', + '_preload_content', + '_request_timeout' + ] + ) + + for key, val in six.iteritems(local_var_params['kwargs']): + if key not in all_params: + raise ApiTypeError( + "Got an unexpected keyword argument '%s'" + " to method create_experiment" % key + ) + local_var_params[key] = val + del local_var_params['kwargs'] + # verify the required parameter 'body' is set + if self.api_client.client_side_validation and ('body' not in local_var_params or # noqa: E501 + local_var_params['body'] is None): # noqa: E501 + raise ApiValueError("Missing the required parameter `body` when calling `create_experiment`") # noqa: E501 + + collection_formats = {} + + path_params = {} + + query_params = [] + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + if 'body' in local_var_params: + body_params = local_var_params['body'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = ['Bearer'] # noqa: E501 + + return self.api_client.call_api( + '/apis/v1beta1/experiments', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='ApiExperiment', # noqa: E501 + auth_settings=auth_settings, + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats=collection_formats) + + def delete_experiment(self, id, **kwargs): # noqa: E501 + """Deletes an experiment without deleting the experiment's runs and jobs. To avoid unexpected behaviors, delete an experiment's runs and jobs before deleting the experiment. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.delete_experiment(id, async_req=True) + >>> result = thread.get() + + :param id: The ID of the experiment to be deleted. (required) + :type id: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: object + """ + kwargs['_return_http_data_only'] = True + return self.delete_experiment_with_http_info(id, **kwargs) # noqa: E501 + + def delete_experiment_with_http_info(self, id, **kwargs): # noqa: E501 + """Deletes an experiment without deleting the experiment's runs and jobs. To avoid unexpected behaviors, delete an experiment's runs and jobs before deleting the experiment. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.delete_experiment_with_http_info(id, async_req=True) + >>> result = thread.get() + + :param id: The ID of the experiment to be deleted. (required) + :type id: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _return_http_data_only: response data without head status code + and headers + :type _return_http_data_only: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :type _preload_content: bool, optional + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: tuple(object, status_code(int), headers(HTTPHeaderDict)) + """ + + local_var_params = locals() + + all_params = [ + 'id' + ] + all_params.extend( + [ + 'async_req', + '_return_http_data_only', + '_preload_content', + '_request_timeout' + ] + ) + + for key, val in six.iteritems(local_var_params['kwargs']): + if key not in all_params: + raise ApiTypeError( + "Got an unexpected keyword argument '%s'" + " to method delete_experiment" % key + ) + local_var_params[key] = val + del local_var_params['kwargs'] + # verify the required parameter 'id' is set + if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501 + local_var_params['id'] is None): # noqa: E501 + raise ApiValueError("Missing the required parameter `id` when calling `delete_experiment`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'id' in local_var_params: + path_params['id'] = local_var_params['id'] # noqa: E501 + + query_params = [] + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = ['Bearer'] # noqa: E501 + + return self.api_client.call_api( + '/apis/v1beta1/experiments/{id}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='object', # noqa: E501 + auth_settings=auth_settings, + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats=collection_formats) + + def get_experiment(self, id, **kwargs): # noqa: E501 + """Finds a specific experiment by ID. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.get_experiment(id, async_req=True) + >>> result = thread.get() + + :param id: The ID of the experiment to be retrieved. (required) + :type id: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: ApiExperiment + """ + kwargs['_return_http_data_only'] = True + return self.get_experiment_with_http_info(id, **kwargs) # noqa: E501 + + def get_experiment_with_http_info(self, id, **kwargs): # noqa: E501 + """Finds a specific experiment by ID. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.get_experiment_with_http_info(id, async_req=True) + >>> result = thread.get() + + :param id: The ID of the experiment to be retrieved. (required) + :type id: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _return_http_data_only: response data without head status code + and headers + :type _return_http_data_only: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :type _preload_content: bool, optional + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: tuple(ApiExperiment, status_code(int), headers(HTTPHeaderDict)) + """ + + local_var_params = locals() + + all_params = [ + 'id' + ] + all_params.extend( + [ + 'async_req', + '_return_http_data_only', + '_preload_content', + '_request_timeout' + ] + ) + + for key, val in six.iteritems(local_var_params['kwargs']): + if key not in all_params: + raise ApiTypeError( + "Got an unexpected keyword argument '%s'" + " to method get_experiment" % key + ) + local_var_params[key] = val + del local_var_params['kwargs'] + # verify the required parameter 'id' is set + if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501 + local_var_params['id'] is None): # noqa: E501 + raise ApiValueError("Missing the required parameter `id` when calling `get_experiment`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'id' in local_var_params: + path_params['id'] = local_var_params['id'] # noqa: E501 + + query_params = [] + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = ['Bearer'] # noqa: E501 + + return self.api_client.call_api( + '/apis/v1beta1/experiments/{id}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='ApiExperiment', # noqa: E501 + auth_settings=auth_settings, + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats=collection_formats) + + def list_experiment(self, **kwargs): # noqa: E501 + """Finds all experiments. Supports pagination, and sorting on certain fields. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.list_experiment(async_req=True) + >>> result = thread.get() + + :param page_token: A page token to request the next page of results. The token is acquried from the nextPageToken field of the response from the previous ListExperiment call or can be omitted when fetching the first page. + :type page_token: str + :param page_size: The number of experiments to be listed per page. If there are more experiments than this number, the response message will contain a nextPageToken field you can use to fetch the next page. + :type page_size: int + :param sort_by: Can be format of \"field_name\", \"field_name asc\" or \"field_name desc\" Ascending by default. + :type sort_by: str + :param filter: A url-encoded, JSON-serialized Filter protocol buffer (see [filter.proto](https://github.com/kubeflow/pipelines/ blob/master/backend/api/filter.proto)). + :type filter: str + :param resource_reference_key_type: The type of the resource that referred to. + :type resource_reference_key_type: str + :param resource_reference_key_id: The ID of the resource that referred to. + :type resource_reference_key_id: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: ApiListExperimentsResponse + """ + kwargs['_return_http_data_only'] = True + return self.list_experiment_with_http_info(**kwargs) # noqa: E501 + + def list_experiment_with_http_info(self, **kwargs): # noqa: E501 + """Finds all experiments. Supports pagination, and sorting on certain fields. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.list_experiment_with_http_info(async_req=True) + >>> result = thread.get() + + :param page_token: A page token to request the next page of results. The token is acquried from the nextPageToken field of the response from the previous ListExperiment call or can be omitted when fetching the first page. + :type page_token: str + :param page_size: The number of experiments to be listed per page. If there are more experiments than this number, the response message will contain a nextPageToken field you can use to fetch the next page. + :type page_size: int + :param sort_by: Can be format of \"field_name\", \"field_name asc\" or \"field_name desc\" Ascending by default. + :type sort_by: str + :param filter: A url-encoded, JSON-serialized Filter protocol buffer (see [filter.proto](https://github.com/kubeflow/pipelines/ blob/master/backend/api/filter.proto)). + :type filter: str + :param resource_reference_key_type: The type of the resource that referred to. + :type resource_reference_key_type: str + :param resource_reference_key_id: The ID of the resource that referred to. + :type resource_reference_key_id: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _return_http_data_only: response data without head status code + and headers + :type _return_http_data_only: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :type _preload_content: bool, optional + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: tuple(ApiListExperimentsResponse, status_code(int), headers(HTTPHeaderDict)) + """ + + local_var_params = locals() + + all_params = [ + 'page_token', + 'page_size', + 'sort_by', + 'filter', + 'resource_reference_key_type', + 'resource_reference_key_id' + ] + all_params.extend( + [ + 'async_req', + '_return_http_data_only', + '_preload_content', + '_request_timeout' + ] + ) + + for key, val in six.iteritems(local_var_params['kwargs']): + if key not in all_params: + raise ApiTypeError( + "Got an unexpected keyword argument '%s'" + " to method list_experiment" % key + ) + local_var_params[key] = val + del local_var_params['kwargs'] + + collection_formats = {} + + path_params = {} + + query_params = [] + if 'page_token' in local_var_params and local_var_params['page_token'] is not None: # noqa: E501 + query_params.append(('page_token', local_var_params['page_token'])) # noqa: E501 + if 'page_size' in local_var_params and local_var_params['page_size'] is not None: # noqa: E501 + query_params.append(('page_size', local_var_params['page_size'])) # noqa: E501 + if 'sort_by' in local_var_params and local_var_params['sort_by'] is not None: # noqa: E501 + query_params.append(('sort_by', local_var_params['sort_by'])) # noqa: E501 + if 'filter' in local_var_params and local_var_params['filter'] is not None: # noqa: E501 + query_params.append(('filter', local_var_params['filter'])) # noqa: E501 + if 'resource_reference_key_type' in local_var_params and local_var_params['resource_reference_key_type'] is not None: # noqa: E501 + query_params.append(('resource_reference_key.type', local_var_params['resource_reference_key_type'])) # noqa: E501 + if 'resource_reference_key_id' in local_var_params and local_var_params['resource_reference_key_id'] is not None: # noqa: E501 + query_params.append(('resource_reference_key.id', local_var_params['resource_reference_key_id'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = ['Bearer'] # noqa: E501 + + return self.api_client.call_api( + '/apis/v1beta1/experiments', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='ApiListExperimentsResponse', # noqa: E501 + auth_settings=auth_settings, + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats=collection_formats) + + def unarchive_experiment(self, id, **kwargs): # noqa: E501 + """Restores an archived experiment. The experiment's archived runs and jobs will stay archived. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.unarchive_experiment(id, async_req=True) + >>> result = thread.get() + + :param id: The ID of the experiment to be restored. (required) + :type id: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: object + """ + kwargs['_return_http_data_only'] = True + return self.unarchive_experiment_with_http_info(id, **kwargs) # noqa: E501 + + def unarchive_experiment_with_http_info(self, id, **kwargs): # noqa: E501 + """Restores an archived experiment. The experiment's archived runs and jobs will stay archived. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.unarchive_experiment_with_http_info(id, async_req=True) + >>> result = thread.get() + + :param id: The ID of the experiment to be restored. (required) + :type id: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _return_http_data_only: response data without head status code + and headers + :type _return_http_data_only: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :type _preload_content: bool, optional + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: tuple(object, status_code(int), headers(HTTPHeaderDict)) + """ + + local_var_params = locals() + + all_params = [ + 'id' + ] + all_params.extend( + [ + 'async_req', + '_return_http_data_only', + '_preload_content', + '_request_timeout' + ] + ) + + for key, val in six.iteritems(local_var_params['kwargs']): + if key not in all_params: + raise ApiTypeError( + "Got an unexpected keyword argument '%s'" + " to method unarchive_experiment" % key + ) + local_var_params[key] = val + del local_var_params['kwargs'] + # verify the required parameter 'id' is set + if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501 + local_var_params['id'] is None): # noqa: E501 + raise ApiValueError("Missing the required parameter `id` when calling `unarchive_experiment`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'id' in local_var_params: + path_params['id'] = local_var_params['id'] # noqa: E501 + + query_params = [] + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = ['Bearer'] # noqa: E501 + + return self.api_client.call_api( + '/apis/v1beta1/experiments/{id}:unarchive', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='object', # noqa: E501 + auth_settings=auth_settings, + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats=collection_formats) diff --git a/backend/api/python_http_client/kfp_server_api/api/job_service_api.py b/backend/api/python_http_client/kfp_server_api/api/job_service_api.py new file mode 100644 index 000000000..94d8979ac --- /dev/null +++ b/backend/api/python_http_client/kfp_server_api/api/job_service_api.py @@ -0,0 +1,818 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import re # noqa: F401 + +# python 2 and python 3 compatibility library +import six + +from kfp_server_api.api_client import ApiClient +from kfp_server_api.exceptions import ( # noqa: F401 + ApiTypeError, + ApiValueError +) + + +class JobServiceApi(object): + """NOTE: This class is auto generated by OpenAPI Generator + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None): + if api_client is None: + api_client = ApiClient() + self.api_client = api_client + + def create_job(self, body, **kwargs): # noqa: E501 + """Creates a new job. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.create_job(body, async_req=True) + >>> result = thread.get() + + :param body: The job to be created (required) + :type body: ApiJob + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: ApiJob + """ + kwargs['_return_http_data_only'] = True + return self.create_job_with_http_info(body, **kwargs) # noqa: E501 + + def create_job_with_http_info(self, body, **kwargs): # noqa: E501 + """Creates a new job. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.create_job_with_http_info(body, async_req=True) + >>> result = thread.get() + + :param body: The job to be created (required) + :type body: ApiJob + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _return_http_data_only: response data without head status code + and headers + :type _return_http_data_only: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :type _preload_content: bool, optional + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: tuple(ApiJob, status_code(int), headers(HTTPHeaderDict)) + """ + + local_var_params = locals() + + all_params = [ + 'body' + ] + all_params.extend( + [ + 'async_req', + '_return_http_data_only', + '_preload_content', + '_request_timeout' + ] + ) + + for key, val in six.iteritems(local_var_params['kwargs']): + if key not in all_params: + raise ApiTypeError( + "Got an unexpected keyword argument '%s'" + " to method create_job" % key + ) + local_var_params[key] = val + del local_var_params['kwargs'] + # verify the required parameter 'body' is set + if self.api_client.client_side_validation and ('body' not in local_var_params or # noqa: E501 + local_var_params['body'] is None): # noqa: E501 + raise ApiValueError("Missing the required parameter `body` when calling `create_job`") # noqa: E501 + + collection_formats = {} + + path_params = {} + + query_params = [] + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + if 'body' in local_var_params: + body_params = local_var_params['body'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = ['Bearer'] # noqa: E501 + + return self.api_client.call_api( + '/apis/v1beta1/jobs', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='ApiJob', # noqa: E501 + auth_settings=auth_settings, + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats=collection_formats) + + def delete_job(self, id, **kwargs): # noqa: E501 + """Deletes a job. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.delete_job(id, async_req=True) + >>> result = thread.get() + + :param id: The ID of the job to be deleted (required) + :type id: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: object + """ + kwargs['_return_http_data_only'] = True + return self.delete_job_with_http_info(id, **kwargs) # noqa: E501 + + def delete_job_with_http_info(self, id, **kwargs): # noqa: E501 + """Deletes a job. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.delete_job_with_http_info(id, async_req=True) + >>> result = thread.get() + + :param id: The ID of the job to be deleted (required) + :type id: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _return_http_data_only: response data without head status code + and headers + :type _return_http_data_only: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :type _preload_content: bool, optional + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: tuple(object, status_code(int), headers(HTTPHeaderDict)) + """ + + local_var_params = locals() + + all_params = [ + 'id' + ] + all_params.extend( + [ + 'async_req', + '_return_http_data_only', + '_preload_content', + '_request_timeout' + ] + ) + + for key, val in six.iteritems(local_var_params['kwargs']): + if key not in all_params: + raise ApiTypeError( + "Got an unexpected keyword argument '%s'" + " to method delete_job" % key + ) + local_var_params[key] = val + del local_var_params['kwargs'] + # verify the required parameter 'id' is set + if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501 + local_var_params['id'] is None): # noqa: E501 + raise ApiValueError("Missing the required parameter `id` when calling `delete_job`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'id' in local_var_params: + path_params['id'] = local_var_params['id'] # noqa: E501 + + query_params = [] + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = ['Bearer'] # noqa: E501 + + return self.api_client.call_api( + '/apis/v1beta1/jobs/{id}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='object', # noqa: E501 + auth_settings=auth_settings, + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats=collection_formats) + + def disable_job(self, id, **kwargs): # noqa: E501 + """Stops a job and all its associated runs. The job is not deleted. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.disable_job(id, async_req=True) + >>> result = thread.get() + + :param id: The ID of the job to be disabled (required) + :type id: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: object + """ + kwargs['_return_http_data_only'] = True + return self.disable_job_with_http_info(id, **kwargs) # noqa: E501 + + def disable_job_with_http_info(self, id, **kwargs): # noqa: E501 + """Stops a job and all its associated runs. The job is not deleted. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.disable_job_with_http_info(id, async_req=True) + >>> result = thread.get() + + :param id: The ID of the job to be disabled (required) + :type id: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _return_http_data_only: response data without head status code + and headers + :type _return_http_data_only: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :type _preload_content: bool, optional + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: tuple(object, status_code(int), headers(HTTPHeaderDict)) + """ + + local_var_params = locals() + + all_params = [ + 'id' + ] + all_params.extend( + [ + 'async_req', + '_return_http_data_only', + '_preload_content', + '_request_timeout' + ] + ) + + for key, val in six.iteritems(local_var_params['kwargs']): + if key not in all_params: + raise ApiTypeError( + "Got an unexpected keyword argument '%s'" + " to method disable_job" % key + ) + local_var_params[key] = val + del local_var_params['kwargs'] + # verify the required parameter 'id' is set + if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501 + local_var_params['id'] is None): # noqa: E501 + raise ApiValueError("Missing the required parameter `id` when calling `disable_job`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'id' in local_var_params: + path_params['id'] = local_var_params['id'] # noqa: E501 + + query_params = [] + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = ['Bearer'] # noqa: E501 + + return self.api_client.call_api( + '/apis/v1beta1/jobs/{id}/disable', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='object', # noqa: E501 + auth_settings=auth_settings, + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats=collection_formats) + + def enable_job(self, id, **kwargs): # noqa: E501 + """Restarts a job that was previously stopped. All runs associated with the job will continue. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.enable_job(id, async_req=True) + >>> result = thread.get() + + :param id: The ID of the job to be enabled (required) + :type id: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: object + """ + kwargs['_return_http_data_only'] = True + return self.enable_job_with_http_info(id, **kwargs) # noqa: E501 + + def enable_job_with_http_info(self, id, **kwargs): # noqa: E501 + """Restarts a job that was previously stopped. All runs associated with the job will continue. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.enable_job_with_http_info(id, async_req=True) + >>> result = thread.get() + + :param id: The ID of the job to be enabled (required) + :type id: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _return_http_data_only: response data without head status code + and headers + :type _return_http_data_only: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :type _preload_content: bool, optional + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: tuple(object, status_code(int), headers(HTTPHeaderDict)) + """ + + local_var_params = locals() + + all_params = [ + 'id' + ] + all_params.extend( + [ + 'async_req', + '_return_http_data_only', + '_preload_content', + '_request_timeout' + ] + ) + + for key, val in six.iteritems(local_var_params['kwargs']): + if key not in all_params: + raise ApiTypeError( + "Got an unexpected keyword argument '%s'" + " to method enable_job" % key + ) + local_var_params[key] = val + del local_var_params['kwargs'] + # verify the required parameter 'id' is set + if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501 + local_var_params['id'] is None): # noqa: E501 + raise ApiValueError("Missing the required parameter `id` when calling `enable_job`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'id' in local_var_params: + path_params['id'] = local_var_params['id'] # noqa: E501 + + query_params = [] + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = ['Bearer'] # noqa: E501 + + return self.api_client.call_api( + '/apis/v1beta1/jobs/{id}/enable', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='object', # noqa: E501 + auth_settings=auth_settings, + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats=collection_formats) + + def get_job(self, id, **kwargs): # noqa: E501 + """Finds a specific job by ID. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.get_job(id, async_req=True) + >>> result = thread.get() + + :param id: The ID of the job to be retrieved (required) + :type id: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: ApiJob + """ + kwargs['_return_http_data_only'] = True + return self.get_job_with_http_info(id, **kwargs) # noqa: E501 + + def get_job_with_http_info(self, id, **kwargs): # noqa: E501 + """Finds a specific job by ID. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.get_job_with_http_info(id, async_req=True) + >>> result = thread.get() + + :param id: The ID of the job to be retrieved (required) + :type id: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _return_http_data_only: response data without head status code + and headers + :type _return_http_data_only: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :type _preload_content: bool, optional + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: tuple(ApiJob, status_code(int), headers(HTTPHeaderDict)) + """ + + local_var_params = locals() + + all_params = [ + 'id' + ] + all_params.extend( + [ + 'async_req', + '_return_http_data_only', + '_preload_content', + '_request_timeout' + ] + ) + + for key, val in six.iteritems(local_var_params['kwargs']): + if key not in all_params: + raise ApiTypeError( + "Got an unexpected keyword argument '%s'" + " to method get_job" % key + ) + local_var_params[key] = val + del local_var_params['kwargs'] + # verify the required parameter 'id' is set + if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501 + local_var_params['id'] is None): # noqa: E501 + raise ApiValueError("Missing the required parameter `id` when calling `get_job`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'id' in local_var_params: + path_params['id'] = local_var_params['id'] # noqa: E501 + + query_params = [] + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = ['Bearer'] # noqa: E501 + + return self.api_client.call_api( + '/apis/v1beta1/jobs/{id}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='ApiJob', # noqa: E501 + auth_settings=auth_settings, + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats=collection_formats) + + def list_jobs(self, **kwargs): # noqa: E501 + """Finds all jobs. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.list_jobs(async_req=True) + >>> result = thread.get() + + :param page_token: A page token to request the next page of results. The token is acquried from the nextPageToken field of the response from the previous ListJobs call or can be omitted when fetching the first page. + :type page_token: str + :param page_size: The number of jobs to be listed per page. If there are more jobs than this number, the response message will contain a nextPageToken field you can use to fetch the next page. + :type page_size: int + :param sort_by: Can be format of \"field_name\", \"field_name asc\" or \"field_name desc\". Ascending by default. + :type sort_by: str + :param resource_reference_key_type: The type of the resource that referred to. + :type resource_reference_key_type: str + :param resource_reference_key_id: The ID of the resource that referred to. + :type resource_reference_key_id: str + :param filter: A url-encoded, JSON-serialized Filter protocol buffer (see [filter.proto](https://github.com/kubeflow/pipelines/ blob/master/backend/api/filter.proto)). + :type filter: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: ApiListJobsResponse + """ + kwargs['_return_http_data_only'] = True + return self.list_jobs_with_http_info(**kwargs) # noqa: E501 + + def list_jobs_with_http_info(self, **kwargs): # noqa: E501 + """Finds all jobs. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.list_jobs_with_http_info(async_req=True) + >>> result = thread.get() + + :param page_token: A page token to request the next page of results. The token is acquried from the nextPageToken field of the response from the previous ListJobs call or can be omitted when fetching the first page. + :type page_token: str + :param page_size: The number of jobs to be listed per page. If there are more jobs than this number, the response message will contain a nextPageToken field you can use to fetch the next page. + :type page_size: int + :param sort_by: Can be format of \"field_name\", \"field_name asc\" or \"field_name desc\". Ascending by default. + :type sort_by: str + :param resource_reference_key_type: The type of the resource that referred to. + :type resource_reference_key_type: str + :param resource_reference_key_id: The ID of the resource that referred to. + :type resource_reference_key_id: str + :param filter: A url-encoded, JSON-serialized Filter protocol buffer (see [filter.proto](https://github.com/kubeflow/pipelines/ blob/master/backend/api/filter.proto)). + :type filter: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _return_http_data_only: response data without head status code + and headers + :type _return_http_data_only: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :type _preload_content: bool, optional + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: tuple(ApiListJobsResponse, status_code(int), headers(HTTPHeaderDict)) + """ + + local_var_params = locals() + + all_params = [ + 'page_token', + 'page_size', + 'sort_by', + 'resource_reference_key_type', + 'resource_reference_key_id', + 'filter' + ] + all_params.extend( + [ + 'async_req', + '_return_http_data_only', + '_preload_content', + '_request_timeout' + ] + ) + + for key, val in six.iteritems(local_var_params['kwargs']): + if key not in all_params: + raise ApiTypeError( + "Got an unexpected keyword argument '%s'" + " to method list_jobs" % key + ) + local_var_params[key] = val + del local_var_params['kwargs'] + + collection_formats = {} + + path_params = {} + + query_params = [] + if 'page_token' in local_var_params and local_var_params['page_token'] is not None: # noqa: E501 + query_params.append(('page_token', local_var_params['page_token'])) # noqa: E501 + if 'page_size' in local_var_params and local_var_params['page_size'] is not None: # noqa: E501 + query_params.append(('page_size', local_var_params['page_size'])) # noqa: E501 + if 'sort_by' in local_var_params and local_var_params['sort_by'] is not None: # noqa: E501 + query_params.append(('sort_by', local_var_params['sort_by'])) # noqa: E501 + if 'resource_reference_key_type' in local_var_params and local_var_params['resource_reference_key_type'] is not None: # noqa: E501 + query_params.append(('resource_reference_key.type', local_var_params['resource_reference_key_type'])) # noqa: E501 + if 'resource_reference_key_id' in local_var_params and local_var_params['resource_reference_key_id'] is not None: # noqa: E501 + query_params.append(('resource_reference_key.id', local_var_params['resource_reference_key_id'])) # noqa: E501 + if 'filter' in local_var_params and local_var_params['filter'] is not None: # noqa: E501 + query_params.append(('filter', local_var_params['filter'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = ['Bearer'] # noqa: E501 + + return self.api_client.call_api( + '/apis/v1beta1/jobs', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='ApiListJobsResponse', # noqa: E501 + auth_settings=auth_settings, + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats=collection_formats) diff --git a/backend/api/python_http_client/kfp_server_api/api/pipeline_service_api.py b/backend/api/python_http_client/kfp_server_api/api/pipeline_service_api.py new file mode 100644 index 000000000..d0d5bc0a0 --- /dev/null +++ b/backend/api/python_http_client/kfp_server_api/api/pipeline_service_api.py @@ -0,0 +1,1327 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import re # noqa: F401 + +# python 2 and python 3 compatibility library +import six + +from kfp_server_api.api_client import ApiClient +from kfp_server_api.exceptions import ( # noqa: F401 + ApiTypeError, + ApiValueError +) + + +class PipelineServiceApi(object): + """NOTE: This class is auto generated by OpenAPI Generator + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None): + if api_client is None: + api_client = ApiClient() + self.api_client = api_client + + def create_pipeline(self, body, **kwargs): # noqa: E501 + """Creates a pipeline. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.create_pipeline(body, async_req=True) + >>> result = thread.get() + + :param body: (required) + :type body: ApiPipeline + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: ApiPipeline + """ + kwargs['_return_http_data_only'] = True + return self.create_pipeline_with_http_info(body, **kwargs) # noqa: E501 + + def create_pipeline_with_http_info(self, body, **kwargs): # noqa: E501 + """Creates a pipeline. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.create_pipeline_with_http_info(body, async_req=True) + >>> result = thread.get() + + :param body: (required) + :type body: ApiPipeline + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _return_http_data_only: response data without head status code + and headers + :type _return_http_data_only: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :type _preload_content: bool, optional + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: tuple(ApiPipeline, status_code(int), headers(HTTPHeaderDict)) + """ + + local_var_params = locals() + + all_params = [ + 'body' + ] + all_params.extend( + [ + 'async_req', + '_return_http_data_only', + '_preload_content', + '_request_timeout' + ] + ) + + for key, val in six.iteritems(local_var_params['kwargs']): + if key not in all_params: + raise ApiTypeError( + "Got an unexpected keyword argument '%s'" + " to method create_pipeline" % key + ) + local_var_params[key] = val + del local_var_params['kwargs'] + # verify the required parameter 'body' is set + if self.api_client.client_side_validation and ('body' not in local_var_params or # noqa: E501 + local_var_params['body'] is None): # noqa: E501 + raise ApiValueError("Missing the required parameter `body` when calling `create_pipeline`") # noqa: E501 + + collection_formats = {} + + path_params = {} + + query_params = [] + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + if 'body' in local_var_params: + body_params = local_var_params['body'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = ['Bearer'] # noqa: E501 + + return self.api_client.call_api( + '/apis/v1beta1/pipelines', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='ApiPipeline', # noqa: E501 + auth_settings=auth_settings, + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats=collection_formats) + + def create_pipeline_version(self, body, **kwargs): # noqa: E501 + """Adds a pipeline version to the specified pipeline. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.create_pipeline_version(body, async_req=True) + >>> result = thread.get() + + :param body: ResourceReference inside PipelineVersion specifies the pipeline that this version belongs to. (required) + :type body: ApiPipelineVersion + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: ApiPipelineVersion + """ + kwargs['_return_http_data_only'] = True + return self.create_pipeline_version_with_http_info(body, **kwargs) # noqa: E501 + + def create_pipeline_version_with_http_info(self, body, **kwargs): # noqa: E501 + """Adds a pipeline version to the specified pipeline. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.create_pipeline_version_with_http_info(body, async_req=True) + >>> result = thread.get() + + :param body: ResourceReference inside PipelineVersion specifies the pipeline that this version belongs to. (required) + :type body: ApiPipelineVersion + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _return_http_data_only: response data without head status code + and headers + :type _return_http_data_only: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :type _preload_content: bool, optional + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: tuple(ApiPipelineVersion, status_code(int), headers(HTTPHeaderDict)) + """ + + local_var_params = locals() + + all_params = [ + 'body' + ] + all_params.extend( + [ + 'async_req', + '_return_http_data_only', + '_preload_content', + '_request_timeout' + ] + ) + + for key, val in six.iteritems(local_var_params['kwargs']): + if key not in all_params: + raise ApiTypeError( + "Got an unexpected keyword argument '%s'" + " to method create_pipeline_version" % key + ) + local_var_params[key] = val + del local_var_params['kwargs'] + # verify the required parameter 'body' is set + if self.api_client.client_side_validation and ('body' not in local_var_params or # noqa: E501 + local_var_params['body'] is None): # noqa: E501 + raise ApiValueError("Missing the required parameter `body` when calling `create_pipeline_version`") # noqa: E501 + + collection_formats = {} + + path_params = {} + + query_params = [] + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + if 'body' in local_var_params: + body_params = local_var_params['body'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = ['Bearer'] # noqa: E501 + + return self.api_client.call_api( + '/apis/v1beta1/pipeline_versions', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='ApiPipelineVersion', # noqa: E501 + auth_settings=auth_settings, + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats=collection_formats) + + def delete_pipeline(self, id, **kwargs): # noqa: E501 + """Deletes a pipeline and its pipeline versions. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.delete_pipeline(id, async_req=True) + >>> result = thread.get() + + :param id: The ID of the pipeline to be deleted. (required) + :type id: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: object + """ + kwargs['_return_http_data_only'] = True + return self.delete_pipeline_with_http_info(id, **kwargs) # noqa: E501 + + def delete_pipeline_with_http_info(self, id, **kwargs): # noqa: E501 + """Deletes a pipeline and its pipeline versions. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.delete_pipeline_with_http_info(id, async_req=True) + >>> result = thread.get() + + :param id: The ID of the pipeline to be deleted. (required) + :type id: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _return_http_data_only: response data without head status code + and headers + :type _return_http_data_only: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :type _preload_content: bool, optional + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: tuple(object, status_code(int), headers(HTTPHeaderDict)) + """ + + local_var_params = locals() + + all_params = [ + 'id' + ] + all_params.extend( + [ + 'async_req', + '_return_http_data_only', + '_preload_content', + '_request_timeout' + ] + ) + + for key, val in six.iteritems(local_var_params['kwargs']): + if key not in all_params: + raise ApiTypeError( + "Got an unexpected keyword argument '%s'" + " to method delete_pipeline" % key + ) + local_var_params[key] = val + del local_var_params['kwargs'] + # verify the required parameter 'id' is set + if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501 + local_var_params['id'] is None): # noqa: E501 + raise ApiValueError("Missing the required parameter `id` when calling `delete_pipeline`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'id' in local_var_params: + path_params['id'] = local_var_params['id'] # noqa: E501 + + query_params = [] + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = ['Bearer'] # noqa: E501 + + return self.api_client.call_api( + '/apis/v1beta1/pipelines/{id}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='object', # noqa: E501 + auth_settings=auth_settings, + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats=collection_formats) + + def delete_pipeline_version(self, version_id, **kwargs): # noqa: E501 + """Deletes a pipeline version by pipeline version ID. If the deleted pipeline version is the default pipeline version, the pipeline's default version changes to the pipeline's most recent pipeline version. If there are no remaining pipeline versions, the pipeline will have no default version. Examines the run_service_api.ipynb notebook to learn more about creating a run using a pipeline version (https://github.com/kubeflow/pipelines/blob/master/tools/benchmarks/run_service_api.ipynb). # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.delete_pipeline_version(version_id, async_req=True) + >>> result = thread.get() + + :param version_id: The ID of the pipeline version to be deleted. (required) + :type version_id: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: object + """ + kwargs['_return_http_data_only'] = True + return self.delete_pipeline_version_with_http_info(version_id, **kwargs) # noqa: E501 + + def delete_pipeline_version_with_http_info(self, version_id, **kwargs): # noqa: E501 + """Deletes a pipeline version by pipeline version ID. If the deleted pipeline version is the default pipeline version, the pipeline's default version changes to the pipeline's most recent pipeline version. If there are no remaining pipeline versions, the pipeline will have no default version. Examines the run_service_api.ipynb notebook to learn more about creating a run using a pipeline version (https://github.com/kubeflow/pipelines/blob/master/tools/benchmarks/run_service_api.ipynb). # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.delete_pipeline_version_with_http_info(version_id, async_req=True) + >>> result = thread.get() + + :param version_id: The ID of the pipeline version to be deleted. (required) + :type version_id: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _return_http_data_only: response data without head status code + and headers + :type _return_http_data_only: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :type _preload_content: bool, optional + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: tuple(object, status_code(int), headers(HTTPHeaderDict)) + """ + + local_var_params = locals() + + all_params = [ + 'version_id' + ] + all_params.extend( + [ + 'async_req', + '_return_http_data_only', + '_preload_content', + '_request_timeout' + ] + ) + + for key, val in six.iteritems(local_var_params['kwargs']): + if key not in all_params: + raise ApiTypeError( + "Got an unexpected keyword argument '%s'" + " to method delete_pipeline_version" % key + ) + local_var_params[key] = val + del local_var_params['kwargs'] + # verify the required parameter 'version_id' is set + if self.api_client.client_side_validation and ('version_id' not in local_var_params or # noqa: E501 + local_var_params['version_id'] is None): # noqa: E501 + raise ApiValueError("Missing the required parameter `version_id` when calling `delete_pipeline_version`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'version_id' in local_var_params: + path_params['version_id'] = local_var_params['version_id'] # noqa: E501 + + query_params = [] + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = ['Bearer'] # noqa: E501 + + return self.api_client.call_api( + '/apis/v1beta1/pipeline_versions/{version_id}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='object', # noqa: E501 + auth_settings=auth_settings, + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats=collection_formats) + + def get_pipeline(self, id, **kwargs): # noqa: E501 + """Finds a specific pipeline by ID. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.get_pipeline(id, async_req=True) + >>> result = thread.get() + + :param id: The ID of the pipeline to be retrieved. (required) + :type id: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: ApiPipeline + """ + kwargs['_return_http_data_only'] = True + return self.get_pipeline_with_http_info(id, **kwargs) # noqa: E501 + + def get_pipeline_with_http_info(self, id, **kwargs): # noqa: E501 + """Finds a specific pipeline by ID. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.get_pipeline_with_http_info(id, async_req=True) + >>> result = thread.get() + + :param id: The ID of the pipeline to be retrieved. (required) + :type id: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _return_http_data_only: response data without head status code + and headers + :type _return_http_data_only: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :type _preload_content: bool, optional + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: tuple(ApiPipeline, status_code(int), headers(HTTPHeaderDict)) + """ + + local_var_params = locals() + + all_params = [ + 'id' + ] + all_params.extend( + [ + 'async_req', + '_return_http_data_only', + '_preload_content', + '_request_timeout' + ] + ) + + for key, val in six.iteritems(local_var_params['kwargs']): + if key not in all_params: + raise ApiTypeError( + "Got an unexpected keyword argument '%s'" + " to method get_pipeline" % key + ) + local_var_params[key] = val + del local_var_params['kwargs'] + # verify the required parameter 'id' is set + if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501 + local_var_params['id'] is None): # noqa: E501 + raise ApiValueError("Missing the required parameter `id` when calling `get_pipeline`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'id' in local_var_params: + path_params['id'] = local_var_params['id'] # noqa: E501 + + query_params = [] + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = ['Bearer'] # noqa: E501 + + return self.api_client.call_api( + '/apis/v1beta1/pipelines/{id}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='ApiPipeline', # noqa: E501 + auth_settings=auth_settings, + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats=collection_formats) + + def get_pipeline_version(self, version_id, **kwargs): # noqa: E501 + """Gets a pipeline version by pipeline version ID. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.get_pipeline_version(version_id, async_req=True) + >>> result = thread.get() + + :param version_id: The ID of the pipeline version to be retrieved. (required) + :type version_id: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: ApiPipelineVersion + """ + kwargs['_return_http_data_only'] = True + return self.get_pipeline_version_with_http_info(version_id, **kwargs) # noqa: E501 + + def get_pipeline_version_with_http_info(self, version_id, **kwargs): # noqa: E501 + """Gets a pipeline version by pipeline version ID. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.get_pipeline_version_with_http_info(version_id, async_req=True) + >>> result = thread.get() + + :param version_id: The ID of the pipeline version to be retrieved. (required) + :type version_id: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _return_http_data_only: response data without head status code + and headers + :type _return_http_data_only: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :type _preload_content: bool, optional + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: tuple(ApiPipelineVersion, status_code(int), headers(HTTPHeaderDict)) + """ + + local_var_params = locals() + + all_params = [ + 'version_id' + ] + all_params.extend( + [ + 'async_req', + '_return_http_data_only', + '_preload_content', + '_request_timeout' + ] + ) + + for key, val in six.iteritems(local_var_params['kwargs']): + if key not in all_params: + raise ApiTypeError( + "Got an unexpected keyword argument '%s'" + " to method get_pipeline_version" % key + ) + local_var_params[key] = val + del local_var_params['kwargs'] + # verify the required parameter 'version_id' is set + if self.api_client.client_side_validation and ('version_id' not in local_var_params or # noqa: E501 + local_var_params['version_id'] is None): # noqa: E501 + raise ApiValueError("Missing the required parameter `version_id` when calling `get_pipeline_version`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'version_id' in local_var_params: + path_params['version_id'] = local_var_params['version_id'] # noqa: E501 + + query_params = [] + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = ['Bearer'] # noqa: E501 + + return self.api_client.call_api( + '/apis/v1beta1/pipeline_versions/{version_id}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='ApiPipelineVersion', # noqa: E501 + auth_settings=auth_settings, + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats=collection_formats) + + def get_pipeline_version_template(self, version_id, **kwargs): # noqa: E501 + """Returns a YAML template that contains the specified pipeline version's description, parameters and metadata. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.get_pipeline_version_template(version_id, async_req=True) + >>> result = thread.get() + + :param version_id: The ID of the pipeline version whose template is to be retrieved. (required) + :type version_id: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: ApiGetTemplateResponse + """ + kwargs['_return_http_data_only'] = True + return self.get_pipeline_version_template_with_http_info(version_id, **kwargs) # noqa: E501 + + def get_pipeline_version_template_with_http_info(self, version_id, **kwargs): # noqa: E501 + """Returns a YAML template that contains the specified pipeline version's description, parameters and metadata. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.get_pipeline_version_template_with_http_info(version_id, async_req=True) + >>> result = thread.get() + + :param version_id: The ID of the pipeline version whose template is to be retrieved. (required) + :type version_id: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _return_http_data_only: response data without head status code + and headers + :type _return_http_data_only: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :type _preload_content: bool, optional + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: tuple(ApiGetTemplateResponse, status_code(int), headers(HTTPHeaderDict)) + """ + + local_var_params = locals() + + all_params = [ + 'version_id' + ] + all_params.extend( + [ + 'async_req', + '_return_http_data_only', + '_preload_content', + '_request_timeout' + ] + ) + + for key, val in six.iteritems(local_var_params['kwargs']): + if key not in all_params: + raise ApiTypeError( + "Got an unexpected keyword argument '%s'" + " to method get_pipeline_version_template" % key + ) + local_var_params[key] = val + del local_var_params['kwargs'] + # verify the required parameter 'version_id' is set + if self.api_client.client_side_validation and ('version_id' not in local_var_params or # noqa: E501 + local_var_params['version_id'] is None): # noqa: E501 + raise ApiValueError("Missing the required parameter `version_id` when calling `get_pipeline_version_template`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'version_id' in local_var_params: + path_params['version_id'] = local_var_params['version_id'] # noqa: E501 + + query_params = [] + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = ['Bearer'] # noqa: E501 + + return self.api_client.call_api( + '/apis/v1beta1/pipeline_versions/{version_id}/templates', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='ApiGetTemplateResponse', # noqa: E501 + auth_settings=auth_settings, + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats=collection_formats) + + def get_template(self, id, **kwargs): # noqa: E501 + """Returns a single YAML template that contains the description, parameters, and metadata associated with the pipeline provided. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.get_template(id, async_req=True) + >>> result = thread.get() + + :param id: The ID of the pipeline whose template is to be retrieved. (required) + :type id: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: ApiGetTemplateResponse + """ + kwargs['_return_http_data_only'] = True + return self.get_template_with_http_info(id, **kwargs) # noqa: E501 + + def get_template_with_http_info(self, id, **kwargs): # noqa: E501 + """Returns a single YAML template that contains the description, parameters, and metadata associated with the pipeline provided. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.get_template_with_http_info(id, async_req=True) + >>> result = thread.get() + + :param id: The ID of the pipeline whose template is to be retrieved. (required) + :type id: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _return_http_data_only: response data without head status code + and headers + :type _return_http_data_only: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :type _preload_content: bool, optional + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: tuple(ApiGetTemplateResponse, status_code(int), headers(HTTPHeaderDict)) + """ + + local_var_params = locals() + + all_params = [ + 'id' + ] + all_params.extend( + [ + 'async_req', + '_return_http_data_only', + '_preload_content', + '_request_timeout' + ] + ) + + for key, val in six.iteritems(local_var_params['kwargs']): + if key not in all_params: + raise ApiTypeError( + "Got an unexpected keyword argument '%s'" + " to method get_template" % key + ) + local_var_params[key] = val + del local_var_params['kwargs'] + # verify the required parameter 'id' is set + if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501 + local_var_params['id'] is None): # noqa: E501 + raise ApiValueError("Missing the required parameter `id` when calling `get_template`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'id' in local_var_params: + path_params['id'] = local_var_params['id'] # noqa: E501 + + query_params = [] + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = ['Bearer'] # noqa: E501 + + return self.api_client.call_api( + '/apis/v1beta1/pipelines/{id}/templates', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='ApiGetTemplateResponse', # noqa: E501 + auth_settings=auth_settings, + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats=collection_formats) + + def list_pipeline_versions(self, **kwargs): # noqa: E501 + """Lists all pipeline versions of a given pipeline. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.list_pipeline_versions(async_req=True) + >>> result = thread.get() + + :param resource_key_type: The type of the resource that referred to. + :type resource_key_type: str + :param resource_key_id: The ID of the resource that referred to. + :type resource_key_id: str + :param page_size: The number of pipeline versions to be listed per page. If there are more pipeline versions than this number, the response message will contain a nextPageToken field you can use to fetch the next page. + :type page_size: int + :param page_token: A page token to request the next page of results. The token is acquried from the nextPageToken field of the response from the previous ListPipelineVersions call or can be omitted when fetching the first page. + :type page_token: str + :param sort_by: Can be format of \"field_name\", \"field_name asc\" or \"field_name desc\" Ascending by default. + :type sort_by: str + :param filter: A base-64 encoded, JSON-serialized Filter protocol buffer (see filter.proto). + :type filter: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: ApiListPipelineVersionsResponse + """ + kwargs['_return_http_data_only'] = True + return self.list_pipeline_versions_with_http_info(**kwargs) # noqa: E501 + + def list_pipeline_versions_with_http_info(self, **kwargs): # noqa: E501 + """Lists all pipeline versions of a given pipeline. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.list_pipeline_versions_with_http_info(async_req=True) + >>> result = thread.get() + + :param resource_key_type: The type of the resource that referred to. + :type resource_key_type: str + :param resource_key_id: The ID of the resource that referred to. + :type resource_key_id: str + :param page_size: The number of pipeline versions to be listed per page. If there are more pipeline versions than this number, the response message will contain a nextPageToken field you can use to fetch the next page. + :type page_size: int + :param page_token: A page token to request the next page of results. The token is acquried from the nextPageToken field of the response from the previous ListPipelineVersions call or can be omitted when fetching the first page. + :type page_token: str + :param sort_by: Can be format of \"field_name\", \"field_name asc\" or \"field_name desc\" Ascending by default. + :type sort_by: str + :param filter: A base-64 encoded, JSON-serialized Filter protocol buffer (see filter.proto). + :type filter: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _return_http_data_only: response data without head status code + and headers + :type _return_http_data_only: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :type _preload_content: bool, optional + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: tuple(ApiListPipelineVersionsResponse, status_code(int), headers(HTTPHeaderDict)) + """ + + local_var_params = locals() + + all_params = [ + 'resource_key_type', + 'resource_key_id', + 'page_size', + 'page_token', + 'sort_by', + 'filter' + ] + all_params.extend( + [ + 'async_req', + '_return_http_data_only', + '_preload_content', + '_request_timeout' + ] + ) + + for key, val in six.iteritems(local_var_params['kwargs']): + if key not in all_params: + raise ApiTypeError( + "Got an unexpected keyword argument '%s'" + " to method list_pipeline_versions" % key + ) + local_var_params[key] = val + del local_var_params['kwargs'] + + collection_formats = {} + + path_params = {} + + query_params = [] + if 'resource_key_type' in local_var_params and local_var_params['resource_key_type'] is not None: # noqa: E501 + query_params.append(('resource_key.type', local_var_params['resource_key_type'])) # noqa: E501 + if 'resource_key_id' in local_var_params and local_var_params['resource_key_id'] is not None: # noqa: E501 + query_params.append(('resource_key.id', local_var_params['resource_key_id'])) # noqa: E501 + if 'page_size' in local_var_params and local_var_params['page_size'] is not None: # noqa: E501 + query_params.append(('page_size', local_var_params['page_size'])) # noqa: E501 + if 'page_token' in local_var_params and local_var_params['page_token'] is not None: # noqa: E501 + query_params.append(('page_token', local_var_params['page_token'])) # noqa: E501 + if 'sort_by' in local_var_params and local_var_params['sort_by'] is not None: # noqa: E501 + query_params.append(('sort_by', local_var_params['sort_by'])) # noqa: E501 + if 'filter' in local_var_params and local_var_params['filter'] is not None: # noqa: E501 + query_params.append(('filter', local_var_params['filter'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = ['Bearer'] # noqa: E501 + + return self.api_client.call_api( + '/apis/v1beta1/pipeline_versions', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='ApiListPipelineVersionsResponse', # noqa: E501 + auth_settings=auth_settings, + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats=collection_formats) + + def list_pipelines(self, **kwargs): # noqa: E501 + """Finds all pipelines. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.list_pipelines(async_req=True) + >>> result = thread.get() + + :param page_token: A page token to request the next page of results. The token is acquried from the nextPageToken field of the response from the previous ListPipelines call. + :type page_token: str + :param page_size: The number of pipelines to be listed per page. If there are more pipelines than this number, the response message will contain a valid value in the nextPageToken field. + :type page_size: int + :param sort_by: Can be format of \"field_name\", \"field_name asc\" or \"field_name desc\" Ascending by default. + :type sort_by: str + :param filter: A url-encoded, JSON-serialized Filter protocol buffer (see [filter.proto](https://github.com/kubeflow/pipelines/ blob/master/backend/api/filter.proto)). + :type filter: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: ApiListPipelinesResponse + """ + kwargs['_return_http_data_only'] = True + return self.list_pipelines_with_http_info(**kwargs) # noqa: E501 + + def list_pipelines_with_http_info(self, **kwargs): # noqa: E501 + """Finds all pipelines. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.list_pipelines_with_http_info(async_req=True) + >>> result = thread.get() + + :param page_token: A page token to request the next page of results. The token is acquried from the nextPageToken field of the response from the previous ListPipelines call. + :type page_token: str + :param page_size: The number of pipelines to be listed per page. If there are more pipelines than this number, the response message will contain a valid value in the nextPageToken field. + :type page_size: int + :param sort_by: Can be format of \"field_name\", \"field_name asc\" or \"field_name desc\" Ascending by default. + :type sort_by: str + :param filter: A url-encoded, JSON-serialized Filter protocol buffer (see [filter.proto](https://github.com/kubeflow/pipelines/ blob/master/backend/api/filter.proto)). + :type filter: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _return_http_data_only: response data without head status code + and headers + :type _return_http_data_only: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :type _preload_content: bool, optional + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: tuple(ApiListPipelinesResponse, status_code(int), headers(HTTPHeaderDict)) + """ + + local_var_params = locals() + + all_params = [ + 'page_token', + 'page_size', + 'sort_by', + 'filter' + ] + all_params.extend( + [ + 'async_req', + '_return_http_data_only', + '_preload_content', + '_request_timeout' + ] + ) + + for key, val in six.iteritems(local_var_params['kwargs']): + if key not in all_params: + raise ApiTypeError( + "Got an unexpected keyword argument '%s'" + " to method list_pipelines" % key + ) + local_var_params[key] = val + del local_var_params['kwargs'] + + collection_formats = {} + + path_params = {} + + query_params = [] + if 'page_token' in local_var_params and local_var_params['page_token'] is not None: # noqa: E501 + query_params.append(('page_token', local_var_params['page_token'])) # noqa: E501 + if 'page_size' in local_var_params and local_var_params['page_size'] is not None: # noqa: E501 + query_params.append(('page_size', local_var_params['page_size'])) # noqa: E501 + if 'sort_by' in local_var_params and local_var_params['sort_by'] is not None: # noqa: E501 + query_params.append(('sort_by', local_var_params['sort_by'])) # noqa: E501 + if 'filter' in local_var_params and local_var_params['filter'] is not None: # noqa: E501 + query_params.append(('filter', local_var_params['filter'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = ['Bearer'] # noqa: E501 + + return self.api_client.call_api( + '/apis/v1beta1/pipelines', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='ApiListPipelinesResponse', # noqa: E501 + auth_settings=auth_settings, + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats=collection_formats) diff --git a/backend/api/python_http_client/kfp_server_api/api/pipeline_upload_service_api.py b/backend/api/python_http_client/kfp_server_api/api/pipeline_upload_service_api.py new file mode 100644 index 000000000..78636e610 --- /dev/null +++ b/backend/api/python_http_client/kfp_server_api/api/pipeline_upload_service_api.py @@ -0,0 +1,331 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import re # noqa: F401 + +# python 2 and python 3 compatibility library +import six + +from kfp_server_api.api_client import ApiClient +from kfp_server_api.exceptions import ( # noqa: F401 + ApiTypeError, + ApiValueError +) + + +class PipelineUploadServiceApi(object): + """NOTE: This class is auto generated by OpenAPI Generator + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None): + if api_client is None: + api_client = ApiClient() + self.api_client = api_client + + def upload_pipeline(self, uploadfile, **kwargs): # noqa: E501 + """upload_pipeline # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.upload_pipeline(uploadfile, async_req=True) + >>> result = thread.get() + + :param uploadfile: The pipeline to upload. Maximum size of 32MB is supported. (required) + :type uploadfile: file + :param name: + :type name: str + :param description: + :type description: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: ApiPipeline + """ + kwargs['_return_http_data_only'] = True + return self.upload_pipeline_with_http_info(uploadfile, **kwargs) # noqa: E501 + + def upload_pipeline_with_http_info(self, uploadfile, **kwargs): # noqa: E501 + """upload_pipeline # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.upload_pipeline_with_http_info(uploadfile, async_req=True) + >>> result = thread.get() + + :param uploadfile: The pipeline to upload. Maximum size of 32MB is supported. (required) + :type uploadfile: file + :param name: + :type name: str + :param description: + :type description: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _return_http_data_only: response data without head status code + and headers + :type _return_http_data_only: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :type _preload_content: bool, optional + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: tuple(ApiPipeline, status_code(int), headers(HTTPHeaderDict)) + """ + + local_var_params = locals() + + all_params = [ + 'uploadfile', + 'name', + 'description' + ] + all_params.extend( + [ + 'async_req', + '_return_http_data_only', + '_preload_content', + '_request_timeout' + ] + ) + + for key, val in six.iteritems(local_var_params['kwargs']): + if key not in all_params: + raise ApiTypeError( + "Got an unexpected keyword argument '%s'" + " to method upload_pipeline" % key + ) + local_var_params[key] = val + del local_var_params['kwargs'] + # verify the required parameter 'uploadfile' is set + if self.api_client.client_side_validation and ('uploadfile' not in local_var_params or # noqa: E501 + local_var_params['uploadfile'] is None): # noqa: E501 + raise ApiValueError("Missing the required parameter `uploadfile` when calling `upload_pipeline`") # noqa: E501 + + collection_formats = {} + + path_params = {} + + query_params = [] + if 'name' in local_var_params and local_var_params['name'] is not None: # noqa: E501 + query_params.append(('name', local_var_params['name'])) # noqa: E501 + if 'description' in local_var_params and local_var_params['description'] is not None: # noqa: E501 + query_params.append(('description', local_var_params['description'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + if 'uploadfile' in local_var_params: + local_var_files['uploadfile'] = local_var_params['uploadfile'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['multipart/form-data']) # noqa: E501 + + # Authentication setting + auth_settings = ['Bearer'] # noqa: E501 + + return self.api_client.call_api( + '/apis/v1beta1/pipelines/upload', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='ApiPipeline', # noqa: E501 + auth_settings=auth_settings, + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats=collection_formats) + + def upload_pipeline_version(self, uploadfile, **kwargs): # noqa: E501 + """upload_pipeline_version # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.upload_pipeline_version(uploadfile, async_req=True) + >>> result = thread.get() + + :param uploadfile: The pipeline to upload. Maximum size of 32MB is supported. (required) + :type uploadfile: file + :param name: + :type name: str + :param pipelineid: + :type pipelineid: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: ApiPipelineVersion + """ + kwargs['_return_http_data_only'] = True + return self.upload_pipeline_version_with_http_info(uploadfile, **kwargs) # noqa: E501 + + def upload_pipeline_version_with_http_info(self, uploadfile, **kwargs): # noqa: E501 + """upload_pipeline_version # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.upload_pipeline_version_with_http_info(uploadfile, async_req=True) + >>> result = thread.get() + + :param uploadfile: The pipeline to upload. Maximum size of 32MB is supported. (required) + :type uploadfile: file + :param name: + :type name: str + :param pipelineid: + :type pipelineid: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _return_http_data_only: response data without head status code + and headers + :type _return_http_data_only: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :type _preload_content: bool, optional + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: tuple(ApiPipelineVersion, status_code(int), headers(HTTPHeaderDict)) + """ + + local_var_params = locals() + + all_params = [ + 'uploadfile', + 'name', + 'pipelineid' + ] + all_params.extend( + [ + 'async_req', + '_return_http_data_only', + '_preload_content', + '_request_timeout' + ] + ) + + for key, val in six.iteritems(local_var_params['kwargs']): + if key not in all_params: + raise ApiTypeError( + "Got an unexpected keyword argument '%s'" + " to method upload_pipeline_version" % key + ) + local_var_params[key] = val + del local_var_params['kwargs'] + # verify the required parameter 'uploadfile' is set + if self.api_client.client_side_validation and ('uploadfile' not in local_var_params or # noqa: E501 + local_var_params['uploadfile'] is None): # noqa: E501 + raise ApiValueError("Missing the required parameter `uploadfile` when calling `upload_pipeline_version`") # noqa: E501 + + collection_formats = {} + + path_params = {} + + query_params = [] + if 'name' in local_var_params and local_var_params['name'] is not None: # noqa: E501 + query_params.append(('name', local_var_params['name'])) # noqa: E501 + if 'pipelineid' in local_var_params and local_var_params['pipelineid'] is not None: # noqa: E501 + query_params.append(('pipelineid', local_var_params['pipelineid'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + if 'uploadfile' in local_var_params: + local_var_files['uploadfile'] = local_var_params['uploadfile'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['multipart/form-data']) # noqa: E501 + + # Authentication setting + auth_settings = ['Bearer'] # noqa: E501 + + return self.api_client.call_api( + '/apis/v1beta1/pipelines/upload_version', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='ApiPipelineVersion', # noqa: E501 + auth_settings=auth_settings, + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats=collection_formats) diff --git a/backend/api/python_http_client/kfp_server_api/api/run_service_api.py b/backend/api/python_http_client/kfp_server_api/api/run_service_api.py new file mode 100644 index 000000000..052997f77 --- /dev/null +++ b/backend/api/python_http_client/kfp_server_api/api/run_service_api.py @@ -0,0 +1,1343 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import re # noqa: F401 + +# python 2 and python 3 compatibility library +import six + +from kfp_server_api.api_client import ApiClient +from kfp_server_api.exceptions import ( # noqa: F401 + ApiTypeError, + ApiValueError +) + + +class RunServiceApi(object): + """NOTE: This class is auto generated by OpenAPI Generator + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None): + if api_client is None: + api_client = ApiClient() + self.api_client = api_client + + def archive_run(self, id, **kwargs): # noqa: E501 + """Archives a run. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.archive_run(id, async_req=True) + >>> result = thread.get() + + :param id: The ID of the run to be archived. (required) + :type id: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: object + """ + kwargs['_return_http_data_only'] = True + return self.archive_run_with_http_info(id, **kwargs) # noqa: E501 + + def archive_run_with_http_info(self, id, **kwargs): # noqa: E501 + """Archives a run. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.archive_run_with_http_info(id, async_req=True) + >>> result = thread.get() + + :param id: The ID of the run to be archived. (required) + :type id: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _return_http_data_only: response data without head status code + and headers + :type _return_http_data_only: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :type _preload_content: bool, optional + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: tuple(object, status_code(int), headers(HTTPHeaderDict)) + """ + + local_var_params = locals() + + all_params = [ + 'id' + ] + all_params.extend( + [ + 'async_req', + '_return_http_data_only', + '_preload_content', + '_request_timeout' + ] + ) + + for key, val in six.iteritems(local_var_params['kwargs']): + if key not in all_params: + raise ApiTypeError( + "Got an unexpected keyword argument '%s'" + " to method archive_run" % key + ) + local_var_params[key] = val + del local_var_params['kwargs'] + # verify the required parameter 'id' is set + if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501 + local_var_params['id'] is None): # noqa: E501 + raise ApiValueError("Missing the required parameter `id` when calling `archive_run`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'id' in local_var_params: + path_params['id'] = local_var_params['id'] # noqa: E501 + + query_params = [] + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = ['Bearer'] # noqa: E501 + + return self.api_client.call_api( + '/apis/v1beta1/runs/{id}:archive', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='object', # noqa: E501 + auth_settings=auth_settings, + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats=collection_formats) + + def create_run(self, body, **kwargs): # noqa: E501 + """Creates a new run. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.create_run(body, async_req=True) + >>> result = thread.get() + + :param body: (required) + :type body: ApiRun + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: ApiRunDetail + """ + kwargs['_return_http_data_only'] = True + return self.create_run_with_http_info(body, **kwargs) # noqa: E501 + + def create_run_with_http_info(self, body, **kwargs): # noqa: E501 + """Creates a new run. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.create_run_with_http_info(body, async_req=True) + >>> result = thread.get() + + :param body: (required) + :type body: ApiRun + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _return_http_data_only: response data without head status code + and headers + :type _return_http_data_only: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :type _preload_content: bool, optional + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: tuple(ApiRunDetail, status_code(int), headers(HTTPHeaderDict)) + """ + + local_var_params = locals() + + all_params = [ + 'body' + ] + all_params.extend( + [ + 'async_req', + '_return_http_data_only', + '_preload_content', + '_request_timeout' + ] + ) + + for key, val in six.iteritems(local_var_params['kwargs']): + if key not in all_params: + raise ApiTypeError( + "Got an unexpected keyword argument '%s'" + " to method create_run" % key + ) + local_var_params[key] = val + del local_var_params['kwargs'] + # verify the required parameter 'body' is set + if self.api_client.client_side_validation and ('body' not in local_var_params or # noqa: E501 + local_var_params['body'] is None): # noqa: E501 + raise ApiValueError("Missing the required parameter `body` when calling `create_run`") # noqa: E501 + + collection_formats = {} + + path_params = {} + + query_params = [] + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + if 'body' in local_var_params: + body_params = local_var_params['body'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = ['Bearer'] # noqa: E501 + + return self.api_client.call_api( + '/apis/v1beta1/runs', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='ApiRunDetail', # noqa: E501 + auth_settings=auth_settings, + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats=collection_formats) + + def delete_run(self, id, **kwargs): # noqa: E501 + """Deletes a run. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.delete_run(id, async_req=True) + >>> result = thread.get() + + :param id: The ID of the run to be deleted. (required) + :type id: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: object + """ + kwargs['_return_http_data_only'] = True + return self.delete_run_with_http_info(id, **kwargs) # noqa: E501 + + def delete_run_with_http_info(self, id, **kwargs): # noqa: E501 + """Deletes a run. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.delete_run_with_http_info(id, async_req=True) + >>> result = thread.get() + + :param id: The ID of the run to be deleted. (required) + :type id: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _return_http_data_only: response data without head status code + and headers + :type _return_http_data_only: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :type _preload_content: bool, optional + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: tuple(object, status_code(int), headers(HTTPHeaderDict)) + """ + + local_var_params = locals() + + all_params = [ + 'id' + ] + all_params.extend( + [ + 'async_req', + '_return_http_data_only', + '_preload_content', + '_request_timeout' + ] + ) + + for key, val in six.iteritems(local_var_params['kwargs']): + if key not in all_params: + raise ApiTypeError( + "Got an unexpected keyword argument '%s'" + " to method delete_run" % key + ) + local_var_params[key] = val + del local_var_params['kwargs'] + # verify the required parameter 'id' is set + if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501 + local_var_params['id'] is None): # noqa: E501 + raise ApiValueError("Missing the required parameter `id` when calling `delete_run`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'id' in local_var_params: + path_params['id'] = local_var_params['id'] # noqa: E501 + + query_params = [] + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = ['Bearer'] # noqa: E501 + + return self.api_client.call_api( + '/apis/v1beta1/runs/{id}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='object', # noqa: E501 + auth_settings=auth_settings, + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats=collection_formats) + + def get_run(self, run_id, **kwargs): # noqa: E501 + """Finds a specific run by ID. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.get_run(run_id, async_req=True) + >>> result = thread.get() + + :param run_id: The ID of the run to be retrieved. (required) + :type run_id: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: ApiRunDetail + """ + kwargs['_return_http_data_only'] = True + return self.get_run_with_http_info(run_id, **kwargs) # noqa: E501 + + def get_run_with_http_info(self, run_id, **kwargs): # noqa: E501 + """Finds a specific run by ID. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.get_run_with_http_info(run_id, async_req=True) + >>> result = thread.get() + + :param run_id: The ID of the run to be retrieved. (required) + :type run_id: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _return_http_data_only: response data without head status code + and headers + :type _return_http_data_only: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :type _preload_content: bool, optional + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: tuple(ApiRunDetail, status_code(int), headers(HTTPHeaderDict)) + """ + + local_var_params = locals() + + all_params = [ + 'run_id' + ] + all_params.extend( + [ + 'async_req', + '_return_http_data_only', + '_preload_content', + '_request_timeout' + ] + ) + + for key, val in six.iteritems(local_var_params['kwargs']): + if key not in all_params: + raise ApiTypeError( + "Got an unexpected keyword argument '%s'" + " to method get_run" % key + ) + local_var_params[key] = val + del local_var_params['kwargs'] + # verify the required parameter 'run_id' is set + if self.api_client.client_side_validation and ('run_id' not in local_var_params or # noqa: E501 + local_var_params['run_id'] is None): # noqa: E501 + raise ApiValueError("Missing the required parameter `run_id` when calling `get_run`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'run_id' in local_var_params: + path_params['run_id'] = local_var_params['run_id'] # noqa: E501 + + query_params = [] + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = ['Bearer'] # noqa: E501 + + return self.api_client.call_api( + '/apis/v1beta1/runs/{run_id}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='ApiRunDetail', # noqa: E501 + auth_settings=auth_settings, + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats=collection_formats) + + def list_runs(self, **kwargs): # noqa: E501 + """Finds all runs. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.list_runs(async_req=True) + >>> result = thread.get() + + :param page_token: A page token to request the next page of results. The token is acquried from the nextPageToken field of the response from the previous ListRuns call or can be omitted when fetching the first page. + :type page_token: str + :param page_size: The number of runs to be listed per page. If there are more runs than this number, the response message will contain a nextPageToken field you can use to fetch the next page. + :type page_size: int + :param sort_by: Can be format of \"field_name\", \"field_name asc\" or \"field_name desc\" (Example, \"name asc\" or \"id desc\"). Ascending by default. + :type sort_by: str + :param resource_reference_key_type: The type of the resource that referred to. + :type resource_reference_key_type: str + :param resource_reference_key_id: The ID of the resource that referred to. + :type resource_reference_key_id: str + :param filter: A url-encoded, JSON-serialized Filter protocol buffer (see [filter.proto](https://github.com/kubeflow/pipelines/ blob/master/backend/api/filter.proto)). + :type filter: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: ApiListRunsResponse + """ + kwargs['_return_http_data_only'] = True + return self.list_runs_with_http_info(**kwargs) # noqa: E501 + + def list_runs_with_http_info(self, **kwargs): # noqa: E501 + """Finds all runs. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.list_runs_with_http_info(async_req=True) + >>> result = thread.get() + + :param page_token: A page token to request the next page of results. The token is acquried from the nextPageToken field of the response from the previous ListRuns call or can be omitted when fetching the first page. + :type page_token: str + :param page_size: The number of runs to be listed per page. If there are more runs than this number, the response message will contain a nextPageToken field you can use to fetch the next page. + :type page_size: int + :param sort_by: Can be format of \"field_name\", \"field_name asc\" or \"field_name desc\" (Example, \"name asc\" or \"id desc\"). Ascending by default. + :type sort_by: str + :param resource_reference_key_type: The type of the resource that referred to. + :type resource_reference_key_type: str + :param resource_reference_key_id: The ID of the resource that referred to. + :type resource_reference_key_id: str + :param filter: A url-encoded, JSON-serialized Filter protocol buffer (see [filter.proto](https://github.com/kubeflow/pipelines/ blob/master/backend/api/filter.proto)). + :type filter: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _return_http_data_only: response data without head status code + and headers + :type _return_http_data_only: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :type _preload_content: bool, optional + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: tuple(ApiListRunsResponse, status_code(int), headers(HTTPHeaderDict)) + """ + + local_var_params = locals() + + all_params = [ + 'page_token', + 'page_size', + 'sort_by', + 'resource_reference_key_type', + 'resource_reference_key_id', + 'filter' + ] + all_params.extend( + [ + 'async_req', + '_return_http_data_only', + '_preload_content', + '_request_timeout' + ] + ) + + for key, val in six.iteritems(local_var_params['kwargs']): + if key not in all_params: + raise ApiTypeError( + "Got an unexpected keyword argument '%s'" + " to method list_runs" % key + ) + local_var_params[key] = val + del local_var_params['kwargs'] + + collection_formats = {} + + path_params = {} + + query_params = [] + if 'page_token' in local_var_params and local_var_params['page_token'] is not None: # noqa: E501 + query_params.append(('page_token', local_var_params['page_token'])) # noqa: E501 + if 'page_size' in local_var_params and local_var_params['page_size'] is not None: # noqa: E501 + query_params.append(('page_size', local_var_params['page_size'])) # noqa: E501 + if 'sort_by' in local_var_params and local_var_params['sort_by'] is not None: # noqa: E501 + query_params.append(('sort_by', local_var_params['sort_by'])) # noqa: E501 + if 'resource_reference_key_type' in local_var_params and local_var_params['resource_reference_key_type'] is not None: # noqa: E501 + query_params.append(('resource_reference_key.type', local_var_params['resource_reference_key_type'])) # noqa: E501 + if 'resource_reference_key_id' in local_var_params and local_var_params['resource_reference_key_id'] is not None: # noqa: E501 + query_params.append(('resource_reference_key.id', local_var_params['resource_reference_key_id'])) # noqa: E501 + if 'filter' in local_var_params and local_var_params['filter'] is not None: # noqa: E501 + query_params.append(('filter', local_var_params['filter'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = ['Bearer'] # noqa: E501 + + return self.api_client.call_api( + '/apis/v1beta1/runs', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='ApiListRunsResponse', # noqa: E501 + auth_settings=auth_settings, + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats=collection_formats) + + def read_artifact(self, run_id, node_id, artifact_name, **kwargs): # noqa: E501 + """Finds a run's artifact data. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.read_artifact(run_id, node_id, artifact_name, async_req=True) + >>> result = thread.get() + + :param run_id: The ID of the run. (required) + :type run_id: str + :param node_id: The ID of the running node. (required) + :type node_id: str + :param artifact_name: The name of the artifact. (required) + :type artifact_name: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: ApiReadArtifactResponse + """ + kwargs['_return_http_data_only'] = True + return self.read_artifact_with_http_info(run_id, node_id, artifact_name, **kwargs) # noqa: E501 + + def read_artifact_with_http_info(self, run_id, node_id, artifact_name, **kwargs): # noqa: E501 + """Finds a run's artifact data. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.read_artifact_with_http_info(run_id, node_id, artifact_name, async_req=True) + >>> result = thread.get() + + :param run_id: The ID of the run. (required) + :type run_id: str + :param node_id: The ID of the running node. (required) + :type node_id: str + :param artifact_name: The name of the artifact. (required) + :type artifact_name: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _return_http_data_only: response data without head status code + and headers + :type _return_http_data_only: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :type _preload_content: bool, optional + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: tuple(ApiReadArtifactResponse, status_code(int), headers(HTTPHeaderDict)) + """ + + local_var_params = locals() + + all_params = [ + 'run_id', + 'node_id', + 'artifact_name' + ] + all_params.extend( + [ + 'async_req', + '_return_http_data_only', + '_preload_content', + '_request_timeout' + ] + ) + + for key, val in six.iteritems(local_var_params['kwargs']): + if key not in all_params: + raise ApiTypeError( + "Got an unexpected keyword argument '%s'" + " to method read_artifact" % key + ) + local_var_params[key] = val + del local_var_params['kwargs'] + # verify the required parameter 'run_id' is set + if self.api_client.client_side_validation and ('run_id' not in local_var_params or # noqa: E501 + local_var_params['run_id'] is None): # noqa: E501 + raise ApiValueError("Missing the required parameter `run_id` when calling `read_artifact`") # noqa: E501 + # verify the required parameter 'node_id' is set + if self.api_client.client_side_validation and ('node_id' not in local_var_params or # noqa: E501 + local_var_params['node_id'] is None): # noqa: E501 + raise ApiValueError("Missing the required parameter `node_id` when calling `read_artifact`") # noqa: E501 + # verify the required parameter 'artifact_name' is set + if self.api_client.client_side_validation and ('artifact_name' not in local_var_params or # noqa: E501 + local_var_params['artifact_name'] is None): # noqa: E501 + raise ApiValueError("Missing the required parameter `artifact_name` when calling `read_artifact`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'run_id' in local_var_params: + path_params['run_id'] = local_var_params['run_id'] # noqa: E501 + if 'node_id' in local_var_params: + path_params['node_id'] = local_var_params['node_id'] # noqa: E501 + if 'artifact_name' in local_var_params: + path_params['artifact_name'] = local_var_params['artifact_name'] # noqa: E501 + + query_params = [] + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = ['Bearer'] # noqa: E501 + + return self.api_client.call_api( + '/apis/v1beta1/runs/{run_id}/nodes/{node_id}/artifacts/{artifact_name}:read', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='ApiReadArtifactResponse', # noqa: E501 + auth_settings=auth_settings, + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats=collection_formats) + + def report_run_metrics(self, run_id, body, **kwargs): # noqa: E501 + """ReportRunMetrics reports metrics of a run. Each metric is reported in its own transaction, so this API accepts partial failures. Metric can be uniquely identified by (run_id, node_id, name). Duplicate reporting will be ignored by the API. First reporting wins. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.report_run_metrics(run_id, body, async_req=True) + >>> result = thread.get() + + :param run_id: Required. The parent run ID of the metric. (required) + :type run_id: str + :param body: (required) + :type body: ApiReportRunMetricsRequest + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: ApiReportRunMetricsResponse + """ + kwargs['_return_http_data_only'] = True + return self.report_run_metrics_with_http_info(run_id, body, **kwargs) # noqa: E501 + + def report_run_metrics_with_http_info(self, run_id, body, **kwargs): # noqa: E501 + """ReportRunMetrics reports metrics of a run. Each metric is reported in its own transaction, so this API accepts partial failures. Metric can be uniquely identified by (run_id, node_id, name). Duplicate reporting will be ignored by the API. First reporting wins. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.report_run_metrics_with_http_info(run_id, body, async_req=True) + >>> result = thread.get() + + :param run_id: Required. The parent run ID of the metric. (required) + :type run_id: str + :param body: (required) + :type body: ApiReportRunMetricsRequest + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _return_http_data_only: response data without head status code + and headers + :type _return_http_data_only: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :type _preload_content: bool, optional + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: tuple(ApiReportRunMetricsResponse, status_code(int), headers(HTTPHeaderDict)) + """ + + local_var_params = locals() + + all_params = [ + 'run_id', + 'body' + ] + all_params.extend( + [ + 'async_req', + '_return_http_data_only', + '_preload_content', + '_request_timeout' + ] + ) + + for key, val in six.iteritems(local_var_params['kwargs']): + if key not in all_params: + raise ApiTypeError( + "Got an unexpected keyword argument '%s'" + " to method report_run_metrics" % key + ) + local_var_params[key] = val + del local_var_params['kwargs'] + # verify the required parameter 'run_id' is set + if self.api_client.client_side_validation and ('run_id' not in local_var_params or # noqa: E501 + local_var_params['run_id'] is None): # noqa: E501 + raise ApiValueError("Missing the required parameter `run_id` when calling `report_run_metrics`") # noqa: E501 + # verify the required parameter 'body' is set + if self.api_client.client_side_validation and ('body' not in local_var_params or # noqa: E501 + local_var_params['body'] is None): # noqa: E501 + raise ApiValueError("Missing the required parameter `body` when calling `report_run_metrics`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'run_id' in local_var_params: + path_params['run_id'] = local_var_params['run_id'] # noqa: E501 + + query_params = [] + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + if 'body' in local_var_params: + body_params = local_var_params['body'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = ['Bearer'] # noqa: E501 + + return self.api_client.call_api( + '/apis/v1beta1/runs/{run_id}:reportMetrics', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='ApiReportRunMetricsResponse', # noqa: E501 + auth_settings=auth_settings, + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats=collection_formats) + + def retry_run(self, run_id, **kwargs): # noqa: E501 + """Re-initiates a failed or terminated run. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.retry_run(run_id, async_req=True) + >>> result = thread.get() + + :param run_id: The ID of the run to be retried. (required) + :type run_id: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: object + """ + kwargs['_return_http_data_only'] = True + return self.retry_run_with_http_info(run_id, **kwargs) # noqa: E501 + + def retry_run_with_http_info(self, run_id, **kwargs): # noqa: E501 + """Re-initiates a failed or terminated run. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.retry_run_with_http_info(run_id, async_req=True) + >>> result = thread.get() + + :param run_id: The ID of the run to be retried. (required) + :type run_id: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _return_http_data_only: response data without head status code + and headers + :type _return_http_data_only: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :type _preload_content: bool, optional + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: tuple(object, status_code(int), headers(HTTPHeaderDict)) + """ + + local_var_params = locals() + + all_params = [ + 'run_id' + ] + all_params.extend( + [ + 'async_req', + '_return_http_data_only', + '_preload_content', + '_request_timeout' + ] + ) + + for key, val in six.iteritems(local_var_params['kwargs']): + if key not in all_params: + raise ApiTypeError( + "Got an unexpected keyword argument '%s'" + " to method retry_run" % key + ) + local_var_params[key] = val + del local_var_params['kwargs'] + # verify the required parameter 'run_id' is set + if self.api_client.client_side_validation and ('run_id' not in local_var_params or # noqa: E501 + local_var_params['run_id'] is None): # noqa: E501 + raise ApiValueError("Missing the required parameter `run_id` when calling `retry_run`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'run_id' in local_var_params: + path_params['run_id'] = local_var_params['run_id'] # noqa: E501 + + query_params = [] + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = ['Bearer'] # noqa: E501 + + return self.api_client.call_api( + '/apis/v1beta1/runs/{run_id}/retry', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='object', # noqa: E501 + auth_settings=auth_settings, + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats=collection_formats) + + def terminate_run(self, run_id, **kwargs): # noqa: E501 + """Terminates an active run. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.terminate_run(run_id, async_req=True) + >>> result = thread.get() + + :param run_id: The ID of the run to be terminated. (required) + :type run_id: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: object + """ + kwargs['_return_http_data_only'] = True + return self.terminate_run_with_http_info(run_id, **kwargs) # noqa: E501 + + def terminate_run_with_http_info(self, run_id, **kwargs): # noqa: E501 + """Terminates an active run. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.terminate_run_with_http_info(run_id, async_req=True) + >>> result = thread.get() + + :param run_id: The ID of the run to be terminated. (required) + :type run_id: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _return_http_data_only: response data without head status code + and headers + :type _return_http_data_only: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :type _preload_content: bool, optional + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: tuple(object, status_code(int), headers(HTTPHeaderDict)) + """ + + local_var_params = locals() + + all_params = [ + 'run_id' + ] + all_params.extend( + [ + 'async_req', + '_return_http_data_only', + '_preload_content', + '_request_timeout' + ] + ) + + for key, val in six.iteritems(local_var_params['kwargs']): + if key not in all_params: + raise ApiTypeError( + "Got an unexpected keyword argument '%s'" + " to method terminate_run" % key + ) + local_var_params[key] = val + del local_var_params['kwargs'] + # verify the required parameter 'run_id' is set + if self.api_client.client_side_validation and ('run_id' not in local_var_params or # noqa: E501 + local_var_params['run_id'] is None): # noqa: E501 + raise ApiValueError("Missing the required parameter `run_id` when calling `terminate_run`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'run_id' in local_var_params: + path_params['run_id'] = local_var_params['run_id'] # noqa: E501 + + query_params = [] + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = ['Bearer'] # noqa: E501 + + return self.api_client.call_api( + '/apis/v1beta1/runs/{run_id}/terminate', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='object', # noqa: E501 + auth_settings=auth_settings, + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats=collection_formats) + + def unarchive_run(self, id, **kwargs): # noqa: E501 + """Restores an archived run. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.unarchive_run(id, async_req=True) + >>> result = thread.get() + + :param id: The ID of the run to be restored. (required) + :type id: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: object + """ + kwargs['_return_http_data_only'] = True + return self.unarchive_run_with_http_info(id, **kwargs) # noqa: E501 + + def unarchive_run_with_http_info(self, id, **kwargs): # noqa: E501 + """Restores an archived run. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.unarchive_run_with_http_info(id, async_req=True) + >>> result = thread.get() + + :param id: The ID of the run to be restored. (required) + :type id: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _return_http_data_only: response data without head status code + and headers + :type _return_http_data_only: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :type _preload_content: bool, optional + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: tuple(object, status_code(int), headers(HTTPHeaderDict)) + """ + + local_var_params = locals() + + all_params = [ + 'id' + ] + all_params.extend( + [ + 'async_req', + '_return_http_data_only', + '_preload_content', + '_request_timeout' + ] + ) + + for key, val in six.iteritems(local_var_params['kwargs']): + if key not in all_params: + raise ApiTypeError( + "Got an unexpected keyword argument '%s'" + " to method unarchive_run" % key + ) + local_var_params[key] = val + del local_var_params['kwargs'] + # verify the required parameter 'id' is set + if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501 + local_var_params['id'] is None): # noqa: E501 + raise ApiValueError("Missing the required parameter `id` when calling `unarchive_run`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'id' in local_var_params: + path_params['id'] = local_var_params['id'] # noqa: E501 + + query_params = [] + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = ['Bearer'] # noqa: E501 + + return self.api_client.call_api( + '/apis/v1beta1/runs/{id}:unarchive', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='object', # noqa: E501 + auth_settings=auth_settings, + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats=collection_formats) diff --git a/backend/api/python_http_client/kfp_server_api/api_client.py b/backend/api/python_http_client/kfp_server_api/api_client.py new file mode 100644 index 000000000..a5c4856a3 --- /dev/null +++ b/backend/api/python_http_client/kfp_server_api/api_client.py @@ -0,0 +1,680 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + +from __future__ import absolute_import + +import atexit +import datetime +from dateutil.parser import parse +import json +import mimetypes +from multiprocessing.pool import ThreadPool +import os +import re +import tempfile + +# python 2 and python 3 compatibility library +import six +from six.moves.urllib.parse import quote + +from kfp_server_api.configuration import Configuration +import kfp_server_api.models +from kfp_server_api import rest +from kfp_server_api.exceptions import ApiValueError, ApiException + + +class ApiClient(object): + """Generic API client for OpenAPI client library builds. + + OpenAPI generic API client. This client handles the client- + server communication, and is invariant across implementations. Specifics of + the methods and models for each application are generated from the OpenAPI + templates. + + NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + Do not edit the class manually. + + :param configuration: .Configuration object for this client + :param header_name: a header to pass when making calls to the API. + :param header_value: a header value to pass when making calls to + the API. + :param cookie: a cookie to include in the header when making calls + to the API + :param pool_threads: The number of threads to use for async requests + to the API. More threads means more concurrent API requests. + """ + + PRIMITIVE_TYPES = (float, bool, bytes, six.text_type) + six.integer_types + NATIVE_TYPES_MAPPING = { + 'int': int, + 'long': int if six.PY3 else long, # noqa: F821 + 'float': float, + 'str': str, + 'bool': bool, + 'date': datetime.date, + 'datetime': datetime.datetime, + 'object': object, + } + _pool = None + + def __init__(self, configuration=None, header_name=None, header_value=None, + cookie=None, pool_threads=1): + if configuration is None: + configuration = Configuration.get_default_copy() + self.configuration = configuration + self.pool_threads = pool_threads + + self.rest_client = rest.RESTClientObject(configuration) + self.default_headers = {} + if header_name is not None: + self.default_headers[header_name] = header_value + self.cookie = cookie + # Set default User-Agent. + self.user_agent = 'OpenAPI-Generator/1.0.0/python' + self.client_side_validation = configuration.client_side_validation + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, traceback): + self.close() + + def close(self): + if self._pool: + self._pool.close() + self._pool.join() + self._pool = None + if hasattr(atexit, 'unregister'): + atexit.unregister(self.close) + + @property + def pool(self): + """Create thread pool on first request + avoids instantiating unused threadpool for blocking clients. + """ + if self._pool is None: + atexit.register(self.close) + self._pool = ThreadPool(self.pool_threads) + return self._pool + + @property + def user_agent(self): + """User agent for this API client""" + return self.default_headers['User-Agent'] + + @user_agent.setter + def user_agent(self, value): + self.default_headers['User-Agent'] = value + + def set_default_header(self, header_name, header_value): + self.default_headers[header_name] = header_value + + def __call_api( + self, resource_path, method, path_params=None, + query_params=None, header_params=None, body=None, post_params=None, + files=None, response_type=None, auth_settings=None, + _return_http_data_only=None, collection_formats=None, + _preload_content=True, _request_timeout=None, _host=None): + + config = self.configuration + + # header parameters + header_params = header_params or {} + header_params.update(self.default_headers) + if self.cookie: + header_params['Cookie'] = self.cookie + if header_params: + header_params = self.sanitize_for_serialization(header_params) + header_params = dict(self.parameters_to_tuples(header_params, + collection_formats)) + + # path parameters + if path_params: + path_params = self.sanitize_for_serialization(path_params) + path_params = self.parameters_to_tuples(path_params, + collection_formats) + for k, v in path_params: + # specified safe chars, encode everything + resource_path = resource_path.replace( + '{%s}' % k, + quote(str(v), safe=config.safe_chars_for_path_param) + ) + + # query parameters + if query_params: + query_params = self.sanitize_for_serialization(query_params) + query_params = self.parameters_to_tuples(query_params, + collection_formats) + + # post parameters + if post_params or files: + post_params = post_params if post_params else [] + post_params = self.sanitize_for_serialization(post_params) + post_params = self.parameters_to_tuples(post_params, + collection_formats) + post_params.extend(self.files_parameters(files)) + + # auth setting + self.update_params_for_auth(header_params, query_params, auth_settings) + + # body + if body: + body = self.sanitize_for_serialization(body) + + # request url + if _host is None: + url = self.configuration.host + resource_path + else: + # use server/host defined in path or operation instead + url = _host + resource_path + + try: + # perform request and return response + response_data = self.request( + method, url, query_params=query_params, headers=header_params, + post_params=post_params, body=body, + _preload_content=_preload_content, + _request_timeout=_request_timeout) + except ApiException as e: + e.body = e.body.decode('utf-8') if six.PY3 else e.body + raise e + + content_type = response_data.getheader('content-type') + + self.last_response = response_data + + return_data = response_data + + if not _preload_content: + return return_data + + if six.PY3 and response_type not in ["file", "bytes"]: + match = None + if content_type is not None: + match = re.search(r"charset=([a-zA-Z\-\d]+)[\s\;]?", content_type) + encoding = match.group(1) if match else "utf-8" + response_data.data = response_data.data.decode(encoding) + + # deserialize response data + if response_type: + return_data = self.deserialize(response_data, response_type) + else: + return_data = None + + if _return_http_data_only: + return (return_data) + else: + return (return_data, response_data.status, + response_data.getheaders()) + + def sanitize_for_serialization(self, obj): + """Builds a JSON POST object. + + If obj is None, return None. + If obj is str, int, long, float, bool, return directly. + If obj is datetime.datetime, datetime.date + convert to string in iso8601 format. + If obj is list, sanitize each element in the list. + If obj is dict, return the dict. + If obj is OpenAPI model, return the properties dict. + + :param obj: The data to serialize. + :return: The serialized form of data. + """ + if obj is None: + return None + elif isinstance(obj, self.PRIMITIVE_TYPES): + return obj + elif isinstance(obj, list): + return [self.sanitize_for_serialization(sub_obj) + for sub_obj in obj] + elif isinstance(obj, tuple): + return tuple(self.sanitize_for_serialization(sub_obj) + for sub_obj in obj) + elif isinstance(obj, (datetime.datetime, datetime.date)): + return obj.isoformat() + + if isinstance(obj, dict): + obj_dict = obj + else: + # Convert model obj to dict except + # attributes `openapi_types`, `attribute_map` + # and attributes which value is not None. + # Convert attribute name to json key in + # model definition for request. + obj_dict = {obj.attribute_map[attr]: getattr(obj, attr) + for attr, _ in six.iteritems(obj.openapi_types) + if getattr(obj, attr) is not None} + + return {key: self.sanitize_for_serialization(val) + for key, val in six.iteritems(obj_dict)} + + def deserialize(self, response, response_type): + """Deserializes response into an object. + + :param response: RESTResponse object to be deserialized. + :param response_type: class literal for + deserialized object, or string of class name. + + :return: deserialized object. + """ + # handle file downloading + # save response body into a tmp file and return the instance + if response_type == "file": + return self.__deserialize_file(response) + + # fetch data from response object + try: + data = json.loads(response.data) + except ValueError: + data = response.data + + return self.__deserialize(data, response_type) + + def __deserialize(self, data, klass): + """Deserializes dict, list, str into an object. + + :param data: dict, list or str. + :param klass: class literal, or string of class name. + + :return: object. + """ + if data is None: + return None + + if type(klass) == str: + if klass.startswith('list['): + sub_kls = re.match(r'list\[(.*)\]', klass).group(1) + return [self.__deserialize(sub_data, sub_kls) + for sub_data in data] + + if klass.startswith('dict('): + sub_kls = re.match(r'dict\(([^,]*), (.*)\)', klass).group(2) + return {k: self.__deserialize(v, sub_kls) + for k, v in six.iteritems(data)} + + # convert str to class + if klass in self.NATIVE_TYPES_MAPPING: + klass = self.NATIVE_TYPES_MAPPING[klass] + else: + klass = getattr(kfp_server_api.models, klass) + + if klass in self.PRIMITIVE_TYPES: + return self.__deserialize_primitive(data, klass) + elif klass == object: + return self.__deserialize_object(data) + elif klass == datetime.date: + return self.__deserialize_date(data) + elif klass == datetime.datetime: + return self.__deserialize_datetime(data) + else: + return self.__deserialize_model(data, klass) + + def call_api(self, resource_path, method, + path_params=None, query_params=None, header_params=None, + body=None, post_params=None, files=None, + response_type=None, auth_settings=None, async_req=None, + _return_http_data_only=None, collection_formats=None, + _preload_content=True, _request_timeout=None, _host=None): + """Makes the HTTP request (synchronous) and returns deserialized data. + + To make an async_req request, set the async_req parameter. + + :param resource_path: Path to method endpoint. + :param method: Method to call. + :param path_params: Path parameters in the url. + :param query_params: Query parameters in the url. + :param header_params: Header parameters to be + placed in the request header. + :param body: Request body. + :param post_params dict: Request post form parameters, + for `application/x-www-form-urlencoded`, `multipart/form-data`. + :param auth_settings list: Auth Settings names for the request. + :param response: Response data type. + :param files dict: key -> filename, value -> filepath, + for `multipart/form-data`. + :param async_req bool: execute request asynchronously + :param _return_http_data_only: response data without head status code + and headers + :param collection_formats: dict of collection formats for path, query, + header, and post parameters. + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: + If async_req parameter is True, + the request will be called asynchronously. + The method will return the request thread. + If parameter async_req is False or missing, + then the method will return the response directly. + """ + if not async_req: + return self.__call_api(resource_path, method, + path_params, query_params, header_params, + body, post_params, files, + response_type, auth_settings, + _return_http_data_only, collection_formats, + _preload_content, _request_timeout, _host) + + return self.pool.apply_async(self.__call_api, (resource_path, + method, path_params, + query_params, + header_params, body, + post_params, files, + response_type, + auth_settings, + _return_http_data_only, + collection_formats, + _preload_content, + _request_timeout, + _host)) + + def request(self, method, url, query_params=None, headers=None, + post_params=None, body=None, _preload_content=True, + _request_timeout=None): + """Makes the HTTP request using RESTClient.""" + if method == "GET": + return self.rest_client.GET(url, + query_params=query_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + headers=headers) + elif method == "HEAD": + return self.rest_client.HEAD(url, + query_params=query_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + headers=headers) + elif method == "OPTIONS": + return self.rest_client.OPTIONS(url, + query_params=query_params, + headers=headers, + _preload_content=_preload_content, + _request_timeout=_request_timeout) + elif method == "POST": + return self.rest_client.POST(url, + query_params=query_params, + headers=headers, + post_params=post_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body) + elif method == "PUT": + return self.rest_client.PUT(url, + query_params=query_params, + headers=headers, + post_params=post_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body) + elif method == "PATCH": + return self.rest_client.PATCH(url, + query_params=query_params, + headers=headers, + post_params=post_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body) + elif method == "DELETE": + return self.rest_client.DELETE(url, + query_params=query_params, + headers=headers, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body) + else: + raise ApiValueError( + "http method must be `GET`, `HEAD`, `OPTIONS`," + " `POST`, `PATCH`, `PUT` or `DELETE`." + ) + + def parameters_to_tuples(self, params, collection_formats): + """Get parameters as list of tuples, formatting collections. + + :param params: Parameters as dict or list of two-tuples + :param dict collection_formats: Parameter collection formats + :return: Parameters as list of tuples, collections formatted + """ + new_params = [] + if collection_formats is None: + collection_formats = {} + for k, v in six.iteritems(params) if isinstance(params, dict) else params: # noqa: E501 + if k in collection_formats: + collection_format = collection_formats[k] + if collection_format == 'multi': + new_params.extend((k, value) for value in v) + else: + if collection_format == 'ssv': + delimiter = ' ' + elif collection_format == 'tsv': + delimiter = '\t' + elif collection_format == 'pipes': + delimiter = '|' + else: # csv is the default + delimiter = ',' + new_params.append( + (k, delimiter.join(str(value) for value in v))) + else: + new_params.append((k, v)) + return new_params + + def files_parameters(self, files=None): + """Builds form parameters. + + :param files: File parameters. + :return: Form parameters with files. + """ + params = [] + + if files: + for k, v in six.iteritems(files): + if not v: + continue + file_names = v if type(v) is list else [v] + for n in file_names: + with open(n, 'rb') as f: + filename = os.path.basename(f.name) + filedata = f.read() + mimetype = (mimetypes.guess_type(filename)[0] or + 'application/octet-stream') + params.append( + tuple([k, tuple([filename, filedata, mimetype])])) + + return params + + def select_header_accept(self, accepts): + """Returns `Accept` based on an array of accepts provided. + + :param accepts: List of headers. + :return: Accept (e.g. application/json). + """ + if not accepts: + return + + accepts = [x.lower() for x in accepts] + + if 'application/json' in accepts: + return 'application/json' + else: + return ', '.join(accepts) + + def select_header_content_type(self, content_types): + """Returns `Content-Type` based on an array of content_types provided. + + :param content_types: List of content-types. + :return: Content-Type (e.g. application/json). + """ + if not content_types: + return 'application/json' + + content_types = [x.lower() for x in content_types] + + if 'application/json' in content_types or '*/*' in content_types: + return 'application/json' + else: + return content_types[0] + + def update_params_for_auth(self, headers, querys, auth_settings): + """Updates header and query params based on authentication setting. + + :param headers: Header parameters dict to be updated. + :param querys: Query parameters tuple list to be updated. + :param auth_settings: Authentication setting identifiers list. + """ + if not auth_settings: + return + + for auth in auth_settings: + auth_setting = self.configuration.auth_settings().get(auth) + if auth_setting: + if auth_setting['in'] == 'cookie': + headers['Cookie'] = auth_setting['value'] + elif auth_setting['in'] == 'header': + headers[auth_setting['key']] = auth_setting['value'] + elif auth_setting['in'] == 'query': + querys.append((auth_setting['key'], auth_setting['value'])) + else: + raise ApiValueError( + 'Authentication token must be in `query` or `header`' + ) + + def __deserialize_file(self, response): + """Deserializes body to file + + Saves response body into a file in a temporary folder, + using the filename from the `Content-Disposition` header if provided. + + :param response: RESTResponse. + :return: file path. + """ + fd, path = tempfile.mkstemp(dir=self.configuration.temp_folder_path) + os.close(fd) + os.remove(path) + + content_disposition = response.getheader("Content-Disposition") + if content_disposition: + filename = re.search(r'filename=[\'"]?([^\'"\s]+)[\'"]?', + content_disposition).group(1) + path = os.path.join(os.path.dirname(path), filename) + + with open(path, "wb") as f: + f.write(response.data) + + return path + + def __deserialize_primitive(self, data, klass): + """Deserializes string to primitive type. + + :param data: str. + :param klass: class literal. + + :return: int, long, float, str, bool. + """ + try: + return klass(data) + except UnicodeEncodeError: + return six.text_type(data) + except TypeError: + return data + + def __deserialize_object(self, value): + """Return an original value. + + :return: object. + """ + return value + + def __deserialize_date(self, string): + """Deserializes string to date. + + :param string: str. + :return: date. + """ + try: + return parse(string).date() + except ImportError: + return string + except ValueError: + raise rest.ApiException( + status=0, + reason="Failed to parse `{0}` as date object".format(string) + ) + + def __deserialize_datetime(self, string): + """Deserializes string to datetime. + + The string should be in iso8601 datetime format. + + :param string: str. + :return: datetime. + """ + try: + return parse(string) + except ImportError: + return string + except ValueError: + raise rest.ApiException( + status=0, + reason=( + "Failed to parse `{0}` as datetime object" + .format(string) + ) + ) + + def __deserialize_model(self, data, klass): + """Deserializes list or dict to model. + + :param data: dict, list. + :param klass: class literal. + :return: model object. + """ + has_discriminator = False + if (hasattr(klass, 'get_real_child_model') + and klass.discriminator_value_class_map): + has_discriminator = True + + if not klass.openapi_types and has_discriminator is False: + return data + + kwargs = {} + if (data is not None and + klass.openapi_types is not None and + isinstance(data, (list, dict))): + for attr, attr_type in six.iteritems(klass.openapi_types): + if klass.attribute_map[attr] in data: + value = data[klass.attribute_map[attr]] + kwargs[attr] = self.__deserialize(value, attr_type) + + instance = klass(**kwargs) + + if has_discriminator: + klass_name = instance.get_real_child_model(data) + if klass_name: + instance = self.__deserialize(data, klass_name) + return instance diff --git a/backend/api/python_http_client/kfp_server_api/configuration.py b/backend/api/python_http_client/kfp_server_api/configuration.py new file mode 100644 index 000000000..462607707 --- /dev/null +++ b/backend/api/python_http_client/kfp_server_api/configuration.py @@ -0,0 +1,417 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import copy +import logging +import multiprocessing +import sys +import urllib3 + +import six +from six.moves import http_client as httplib + + +class Configuration(object): + """NOTE: This class is auto generated by OpenAPI Generator + + Ref: https://openapi-generator.tech + Do not edit the class manually. + + :param host: Base url + :param api_key: Dict to store API key(s). + Each entry in the dict specifies an API key. + The dict key is the name of the security scheme in the OAS specification. + The dict value is the API key secret. + :param api_key_prefix: Dict to store API prefix (e.g. Bearer) + The dict key is the name of the security scheme in the OAS specification. + The dict value is an API key prefix when generating the auth data. + :param username: Username for HTTP basic authentication + :param password: Password for HTTP basic authentication + :param discard_unknown_keys: Boolean value indicating whether to discard + unknown properties. A server may send a response that includes additional + properties that are not known by the client in the following scenarios: + 1. The OpenAPI document is incomplete, i.e. it does not match the server + implementation. + 2. The client was generated using an older version of the OpenAPI document + and the server has been upgraded since then. + If a schema in the OpenAPI document defines the additionalProperties attribute, + then all undeclared properties received by the server are injected into the + additional properties map. In that case, there are undeclared properties, and + nothing to discard. + + :Example: + + API Key Authentication Example. + Given the following security scheme in the OpenAPI specification: + components: + securitySchemes: + cookieAuth: # name for the security scheme + type: apiKey + in: cookie + name: JSESSIONID # cookie name + + You can programmatically set the cookie: + +conf = kfp_server_api.Configuration( + api_key={'cookieAuth': 'abc123'} + api_key_prefix={'cookieAuth': 'JSESSIONID'} +) + + The following cookie will be added to the HTTP request: + Cookie: JSESSIONID abc123 + """ + + _default = None + + def __init__(self, host="http://localhost", + api_key=None, api_key_prefix=None, + username=None, password=None, + discard_unknown_keys=False, + ): + """Constructor + """ + self.host = host + """Default Base url + """ + self.temp_folder_path = None + """Temp file folder for downloading files + """ + # Authentication Settings + self.api_key = {} + if api_key: + self.api_key = api_key + """dict to store API key(s) + """ + self.api_key_prefix = {} + if api_key_prefix: + self.api_key_prefix = api_key_prefix + """dict to store API prefix (e.g. Bearer) + """ + self.refresh_api_key_hook = None + """function hook to refresh API key if expired + """ + self.username = username + """Username for HTTP basic authentication + """ + self.password = password + """Password for HTTP basic authentication + """ + self.discard_unknown_keys = discard_unknown_keys + self.logger = {} + """Logging Settings + """ + self.logger["package_logger"] = logging.getLogger("kfp_server_api") + self.logger["urllib3_logger"] = logging.getLogger("urllib3") + self.logger_format = '%(asctime)s %(levelname)s %(message)s' + """Log format + """ + self.logger_stream_handler = None + """Log stream handler + """ + self.logger_file_handler = None + """Log file handler + """ + self.logger_file = None + """Debug file location + """ + self.debug = False + """Debug switch + """ + + self.verify_ssl = True + """SSL/TLS verification + Set this to false to skip verifying SSL certificate when calling API + from https server. + """ + self.ssl_ca_cert = None + """Set this to customize the certificate file to verify the peer. + """ + self.cert_file = None + """client certificate file + """ + self.key_file = None + """client key file + """ + self.assert_hostname = None + """Set this to True/False to enable/disable SSL hostname verification. + """ + + self.connection_pool_maxsize = multiprocessing.cpu_count() * 5 + """urllib3 connection pool's maximum number of connections saved + per pool. urllib3 uses 1 connection as default value, but this is + not the best value when you are making a lot of possibly parallel + requests to the same host, which is often the case here. + cpu_count * 5 is used as default value to increase performance. + """ + + self.proxy = None + """Proxy URL + """ + self.proxy_headers = None + """Proxy headers + """ + self.safe_chars_for_path_param = '' + """Safe chars for path_param + """ + self.retries = None + """Adding retries to override urllib3 default value 3 + """ + # Disable client side validation + self.client_side_validation = True + + def __deepcopy__(self, memo): + cls = self.__class__ + result = cls.__new__(cls) + memo[id(self)] = result + for k, v in self.__dict__.items(): + if k not in ('logger', 'logger_file_handler'): + setattr(result, k, copy.deepcopy(v, memo)) + # shallow copy of loggers + result.logger = copy.copy(self.logger) + # use setters to configure loggers + result.logger_file = self.logger_file + result.debug = self.debug + return result + + def __setattr__(self, name, value): + object.__setattr__(self, name, value) + + @classmethod + def set_default(cls, default): + """Set default instance of configuration. + + It stores default configuration, which can be + returned by get_default_copy method. + + :param default: object of Configuration + """ + cls._default = copy.deepcopy(default) + + @classmethod + def get_default_copy(cls): + """Return new instance of configuration. + + This method returns newly created, based on default constructor, + object of Configuration class or returns a copy of default + configuration passed by the set_default method. + + :return: The configuration object. + """ + if cls._default is not None: + return copy.deepcopy(cls._default) + return Configuration() + + @property + def logger_file(self): + """The logger file. + + If the logger_file is None, then add stream handler and remove file + handler. Otherwise, add file handler and remove stream handler. + + :param value: The logger_file path. + :type: str + """ + return self.__logger_file + + @logger_file.setter + def logger_file(self, value): + """The logger file. + + If the logger_file is None, then add stream handler and remove file + handler. Otherwise, add file handler and remove stream handler. + + :param value: The logger_file path. + :type: str + """ + self.__logger_file = value + if self.__logger_file: + # If set logging file, + # then add file handler and remove stream handler. + self.logger_file_handler = logging.FileHandler(self.__logger_file) + self.logger_file_handler.setFormatter(self.logger_formatter) + for _, logger in six.iteritems(self.logger): + logger.addHandler(self.logger_file_handler) + + @property + def debug(self): + """Debug status + + :param value: The debug status, True or False. + :type: bool + """ + return self.__debug + + @debug.setter + def debug(self, value): + """Debug status + + :param value: The debug status, True or False. + :type: bool + """ + self.__debug = value + if self.__debug: + # if debug status is True, turn on debug logging + for _, logger in six.iteritems(self.logger): + logger.setLevel(logging.DEBUG) + # turn on httplib debug + httplib.HTTPConnection.debuglevel = 1 + else: + # if debug status is False, turn off debug logging, + # setting log level to default `logging.WARNING` + for _, logger in six.iteritems(self.logger): + logger.setLevel(logging.WARNING) + # turn off httplib debug + httplib.HTTPConnection.debuglevel = 0 + + @property + def logger_format(self): + """The logger format. + + The logger_formatter will be updated when sets logger_format. + + :param value: The format string. + :type: str + """ + return self.__logger_format + + @logger_format.setter + def logger_format(self, value): + """The logger format. + + The logger_formatter will be updated when sets logger_format. + + :param value: The format string. + :type: str + """ + self.__logger_format = value + self.logger_formatter = logging.Formatter(self.__logger_format) + + def get_api_key_with_prefix(self, identifier): + """Gets API key (with prefix if set). + + :param identifier: The identifier of apiKey. + :return: The token for api key authentication. + """ + if self.refresh_api_key_hook is not None: + self.refresh_api_key_hook(self) + key = self.api_key.get(identifier) + if key: + prefix = self.api_key_prefix.get(identifier) + if prefix: + return "%s %s" % (prefix, key) + else: + return key + + def get_basic_auth_token(self): + """Gets HTTP basic authentication header (string). + + :return: The token for basic HTTP authentication. + """ + username = "" + if self.username is not None: + username = self.username + password = "" + if self.password is not None: + password = self.password + return urllib3.util.make_headers( + basic_auth=username + ':' + password + ).get('authorization') + + def auth_settings(self): + """Gets Auth Settings dict for api client. + + :return: The Auth Settings information dict. + """ + auth = {} + if 'authorization' in self.api_key: + auth['Bearer'] = { + 'type': 'api_key', + 'in': 'header', + 'key': 'authorization', + 'value': self.get_api_key_with_prefix('authorization') + } + return auth + + def to_debug_report(self): + """Gets the essential information for debugging. + + :return: The report for debugging. + """ + return "Python SDK Debug Report:\n"\ + "OS: {env}\n"\ + "Python Version: {pyversion}\n"\ + "Version of the API: 1.0.0\n"\ + "SDK Package Version: 1.0.0".\ + format(env=sys.platform, pyversion=sys.version) + + def get_host_settings(self): + """Gets an array of host settings + + :return: An array of host settings + """ + return [ + { + 'url': "/", + 'description': "No description provided", + } + ] + + def get_host_from_settings(self, index, variables=None): + """Gets host URL based on the index and variables + :param index: array index of the host settings + :param variables: hash of variable and the corresponding value + :return: URL based on host settings + """ + variables = {} if variables is None else variables + servers = self.get_host_settings() + + try: + server = servers[index] + except IndexError: + raise ValueError( + "Invalid index {0} when selecting the host settings. " + "Must be less than {1}".format(index, len(servers))) + + url = server['url'] + + # go through variables and replace placeholders + for variable_name, variable in server['variables'].items(): + used_value = variables.get( + variable_name, variable['default_value']) + + if 'enum_values' in variable \ + and used_value not in variable['enum_values']: + raise ValueError( + "The variable `{0}` in the host URL has invalid value " + "{1}. Must be {2}.".format( + variable_name, variables[variable_name], + variable['enum_values'])) + + url = url.replace("{" + variable_name + "}", used_value) + + return url diff --git a/backend/api/python_http_client/kfp_server_api/exceptions.py b/backend/api/python_http_client/kfp_server_api/exceptions.py new file mode 100644 index 000000000..597a01a1b --- /dev/null +++ b/backend/api/python_http_client/kfp_server_api/exceptions.py @@ -0,0 +1,134 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +import six + + +class OpenApiException(Exception): + """The base exception class for all OpenAPIExceptions""" + + +class ApiTypeError(OpenApiException, TypeError): + def __init__(self, msg, path_to_item=None, valid_classes=None, + key_type=None): + """ Raises an exception for TypeErrors + + Args: + msg (str): the exception message + + Keyword Args: + path_to_item (list): a list of keys an indices to get to the + current_item + None if unset + valid_classes (tuple): the primitive classes that current item + should be an instance of + None if unset + key_type (bool): False if our value is a value in a dict + True if it is a key in a dict + False if our item is an item in a list + None if unset + """ + self.path_to_item = path_to_item + self.valid_classes = valid_classes + self.key_type = key_type + full_msg = msg + if path_to_item: + full_msg = "{0} at {1}".format(msg, render_path(path_to_item)) + super(ApiTypeError, self).__init__(full_msg) + + +class ApiValueError(OpenApiException, ValueError): + def __init__(self, msg, path_to_item=None): + """ + Args: + msg (str): the exception message + + Keyword Args: + path_to_item (list) the path to the exception in the + received_data dict. None if unset + """ + + self.path_to_item = path_to_item + full_msg = msg + if path_to_item: + full_msg = "{0} at {1}".format(msg, render_path(path_to_item)) + super(ApiValueError, self).__init__(full_msg) + + +class ApiKeyError(OpenApiException, KeyError): + def __init__(self, msg, path_to_item=None): + """ + Args: + msg (str): the exception message + + Keyword Args: + path_to_item (None/list) the path to the exception in the + received_data dict + """ + self.path_to_item = path_to_item + full_msg = msg + if path_to_item: + full_msg = "{0} at {1}".format(msg, render_path(path_to_item)) + super(ApiKeyError, self).__init__(full_msg) + + +class ApiException(OpenApiException): + + def __init__(self, status=None, reason=None, http_resp=None): + if http_resp: + self.status = http_resp.status + self.reason = http_resp.reason + self.body = http_resp.data + self.headers = http_resp.getheaders() + else: + self.status = status + self.reason = reason + self.body = None + self.headers = None + + def __str__(self): + """Custom error messages for exception""" + error_message = "({0})\n"\ + "Reason: {1}\n".format(self.status, self.reason) + if self.headers: + error_message += "HTTP response headers: {0}\n".format( + self.headers) + + if self.body: + error_message += "HTTP response body: {0}\n".format(self.body) + + return error_message + + +def render_path(path_to_item): + """Returns a string representation of a path""" + result = "" + for pth in path_to_item: + if isinstance(pth, six.integer_types): + result += "[{0}]".format(pth) + else: + result += "['{0}']".format(pth) + return result diff --git a/backend/api/python_http_client/kfp_server_api/models/__init__.py b/backend/api/python_http_client/kfp_server_api/models/__init__.py new file mode 100644 index 000000000..fabd16f3a --- /dev/null +++ b/backend/api/python_http_client/kfp_server_api/models/__init__.py @@ -0,0 +1,65 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +# flake8: noqa +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +# import models into model package +from kfp_server_api.models.api_cron_schedule import ApiCronSchedule +from kfp_server_api.models.api_experiment import ApiExperiment +from kfp_server_api.models.api_get_template_response import ApiGetTemplateResponse +from kfp_server_api.models.api_job import ApiJob +from kfp_server_api.models.api_list_experiments_response import ApiListExperimentsResponse +from kfp_server_api.models.api_list_jobs_response import ApiListJobsResponse +from kfp_server_api.models.api_list_pipeline_versions_response import ApiListPipelineVersionsResponse +from kfp_server_api.models.api_list_pipelines_response import ApiListPipelinesResponse +from kfp_server_api.models.api_list_runs_response import ApiListRunsResponse +from kfp_server_api.models.api_parameter import ApiParameter +from kfp_server_api.models.api_periodic_schedule import ApiPeriodicSchedule +from kfp_server_api.models.api_pipeline import ApiPipeline +from kfp_server_api.models.api_pipeline_runtime import ApiPipelineRuntime +from kfp_server_api.models.api_pipeline_spec import ApiPipelineSpec +from kfp_server_api.models.api_pipeline_version import ApiPipelineVersion +from kfp_server_api.models.api_read_artifact_response import ApiReadArtifactResponse +from kfp_server_api.models.api_relationship import ApiRelationship +from kfp_server_api.models.api_report_run_metrics_request import ApiReportRunMetricsRequest +from kfp_server_api.models.api_report_run_metrics_response import ApiReportRunMetricsResponse +from kfp_server_api.models.api_resource_key import ApiResourceKey +from kfp_server_api.models.api_resource_reference import ApiResourceReference +from kfp_server_api.models.api_resource_type import ApiResourceType +from kfp_server_api.models.api_run import ApiRun +from kfp_server_api.models.api_run_detail import ApiRunDetail +from kfp_server_api.models.api_run_metric import ApiRunMetric +from kfp_server_api.models.api_status import ApiStatus +from kfp_server_api.models.api_trigger import ApiTrigger +from kfp_server_api.models.api_url import ApiUrl +from kfp_server_api.models.experiment_storage_state import ExperimentStorageState +from kfp_server_api.models.job_mode import JobMode +from kfp_server_api.models.protobuf_any import ProtobufAny +from kfp_server_api.models.report_run_metrics_response_report_run_metric_result import ReportRunMetricsResponseReportRunMetricResult +from kfp_server_api.models.report_run_metrics_response_report_run_metric_result_status import ReportRunMetricsResponseReportRunMetricResultStatus +from kfp_server_api.models.run_metric_format import RunMetricFormat +from kfp_server_api.models.run_storage_state import RunStorageState diff --git a/backend/api/python_http_client/kfp_server_api/models/api_cron_schedule.py b/backend/api/python_http_client/kfp_server_api/models/api_cron_schedule.py new file mode 100644 index 000000000..955b4ad86 --- /dev/null +++ b/backend/api/python_http_client/kfp_server_api/models/api_cron_schedule.py @@ -0,0 +1,186 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from kfp_server_api.configuration import Configuration + + +class ApiCronSchedule(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'start_time': 'datetime', + 'end_time': 'datetime', + 'cron': 'str' + } + + attribute_map = { + 'start_time': 'start_time', + 'end_time': 'end_time', + 'cron': 'cron' + } + + def __init__(self, start_time=None, end_time=None, cron=None, local_vars_configuration=None): # noqa: E501 + """ApiCronSchedule - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + + self._start_time = None + self._end_time = None + self._cron = None + self.discriminator = None + + if start_time is not None: + self.start_time = start_time + if end_time is not None: + self.end_time = end_time + if cron is not None: + self.cron = cron + + @property + def start_time(self): + """Gets the start_time of this ApiCronSchedule. # noqa: E501 + + + :return: The start_time of this ApiCronSchedule. # noqa: E501 + :rtype: datetime + """ + return self._start_time + + @start_time.setter + def start_time(self, start_time): + """Sets the start_time of this ApiCronSchedule. + + + :param start_time: The start_time of this ApiCronSchedule. # noqa: E501 + :type start_time: datetime + """ + + self._start_time = start_time + + @property + def end_time(self): + """Gets the end_time of this ApiCronSchedule. # noqa: E501 + + + :return: The end_time of this ApiCronSchedule. # noqa: E501 + :rtype: datetime + """ + return self._end_time + + @end_time.setter + def end_time(self, end_time): + """Sets the end_time of this ApiCronSchedule. + + + :param end_time: The end_time of this ApiCronSchedule. # noqa: E501 + :type end_time: datetime + """ + + self._end_time = end_time + + @property + def cron(self): + """Gets the cron of this ApiCronSchedule. # noqa: E501 + + + :return: The cron of this ApiCronSchedule. # noqa: E501 + :rtype: str + """ + return self._cron + + @cron.setter + def cron(self, cron): + """Sets the cron of this ApiCronSchedule. + + + :param cron: The cron of this ApiCronSchedule. # noqa: E501 + :type cron: str + """ + + self._cron = cron + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, ApiCronSchedule): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, ApiCronSchedule): + return True + + return self.to_dict() != other.to_dict() diff --git a/backend/api/python_http_client/kfp_server_api/models/api_experiment.py b/backend/api/python_http_client/kfp_server_api/models/api_experiment.py new file mode 100644 index 000000000..849243a2d --- /dev/null +++ b/backend/api/python_http_client/kfp_server_api/models/api_experiment.py @@ -0,0 +1,272 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from kfp_server_api.configuration import Configuration + + +class ApiExperiment(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'id': 'str', + 'name': 'str', + 'description': 'str', + 'created_at': 'datetime', + 'resource_references': 'list[ApiResourceReference]', + 'storage_state': 'ExperimentStorageState' + } + + attribute_map = { + 'id': 'id', + 'name': 'name', + 'description': 'description', + 'created_at': 'created_at', + 'resource_references': 'resource_references', + 'storage_state': 'storage_state' + } + + def __init__(self, id=None, name=None, description=None, created_at=None, resource_references=None, storage_state=None, local_vars_configuration=None): # noqa: E501 + """ApiExperiment - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + + self._id = None + self._name = None + self._description = None + self._created_at = None + self._resource_references = None + self._storage_state = None + self.discriminator = None + + if id is not None: + self.id = id + if name is not None: + self.name = name + if description is not None: + self.description = description + if created_at is not None: + self.created_at = created_at + if resource_references is not None: + self.resource_references = resource_references + if storage_state is not None: + self.storage_state = storage_state + + @property + def id(self): + """Gets the id of this ApiExperiment. # noqa: E501 + + Output. Unique experiment ID. Generated by API server. # noqa: E501 + + :return: The id of this ApiExperiment. # noqa: E501 + :rtype: str + """ + return self._id + + @id.setter + def id(self, id): + """Sets the id of this ApiExperiment. + + Output. Unique experiment ID. Generated by API server. # noqa: E501 + + :param id: The id of this ApiExperiment. # noqa: E501 + :type id: str + """ + + self._id = id + + @property + def name(self): + """Gets the name of this ApiExperiment. # noqa: E501 + + Required input field. Unique experiment name provided by user. # noqa: E501 + + :return: The name of this ApiExperiment. # noqa: E501 + :rtype: str + """ + return self._name + + @name.setter + def name(self, name): + """Sets the name of this ApiExperiment. + + Required input field. Unique experiment name provided by user. # noqa: E501 + + :param name: The name of this ApiExperiment. # noqa: E501 + :type name: str + """ + + self._name = name + + @property + def description(self): + """Gets the description of this ApiExperiment. # noqa: E501 + + + :return: The description of this ApiExperiment. # noqa: E501 + :rtype: str + """ + return self._description + + @description.setter + def description(self, description): + """Sets the description of this ApiExperiment. + + + :param description: The description of this ApiExperiment. # noqa: E501 + :type description: str + """ + + self._description = description + + @property + def created_at(self): + """Gets the created_at of this ApiExperiment. # noqa: E501 + + Output. The time that the experiment created. # noqa: E501 + + :return: The created_at of this ApiExperiment. # noqa: E501 + :rtype: datetime + """ + return self._created_at + + @created_at.setter + def created_at(self, created_at): + """Sets the created_at of this ApiExperiment. + + Output. The time that the experiment created. # noqa: E501 + + :param created_at: The created_at of this ApiExperiment. # noqa: E501 + :type created_at: datetime + """ + + self._created_at = created_at + + @property + def resource_references(self): + """Gets the resource_references of this ApiExperiment. # noqa: E501 + + Optional input field. Specify which resource this run belongs to. For Experiment, the only valid resource reference is a single Namespace. # noqa: E501 + + :return: The resource_references of this ApiExperiment. # noqa: E501 + :rtype: list[ApiResourceReference] + """ + return self._resource_references + + @resource_references.setter + def resource_references(self, resource_references): + """Sets the resource_references of this ApiExperiment. + + Optional input field. Specify which resource this run belongs to. For Experiment, the only valid resource reference is a single Namespace. # noqa: E501 + + :param resource_references: The resource_references of this ApiExperiment. # noqa: E501 + :type resource_references: list[ApiResourceReference] + """ + + self._resource_references = resource_references + + @property + def storage_state(self): + """Gets the storage_state of this ApiExperiment. # noqa: E501 + + + :return: The storage_state of this ApiExperiment. # noqa: E501 + :rtype: ExperimentStorageState + """ + return self._storage_state + + @storage_state.setter + def storage_state(self, storage_state): + """Sets the storage_state of this ApiExperiment. + + + :param storage_state: The storage_state of this ApiExperiment. # noqa: E501 + :type storage_state: ExperimentStorageState + """ + + self._storage_state = storage_state + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, ApiExperiment): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, ApiExperiment): + return True + + return self.to_dict() != other.to_dict() diff --git a/backend/api/python_http_client/kfp_server_api/models/api_get_template_response.py b/backend/api/python_http_client/kfp_server_api/models/api_get_template_response.py new file mode 100644 index 000000000..a9aab9e4b --- /dev/null +++ b/backend/api/python_http_client/kfp_server_api/models/api_get_template_response.py @@ -0,0 +1,136 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from kfp_server_api.configuration import Configuration + + +class ApiGetTemplateResponse(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'template': 'str' + } + + attribute_map = { + 'template': 'template' + } + + def __init__(self, template=None, local_vars_configuration=None): # noqa: E501 + """ApiGetTemplateResponse - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + + self._template = None + self.discriminator = None + + if template is not None: + self.template = template + + @property + def template(self): + """Gets the template of this ApiGetTemplateResponse. # noqa: E501 + + The template of the pipeline specified in a GetTemplate request, or of a pipeline version specified in a GetPipelinesVersionTemplate request. # noqa: E501 + + :return: The template of this ApiGetTemplateResponse. # noqa: E501 + :rtype: str + """ + return self._template + + @template.setter + def template(self, template): + """Sets the template of this ApiGetTemplateResponse. + + The template of the pipeline specified in a GetTemplate request, or of a pipeline version specified in a GetPipelinesVersionTemplate request. # noqa: E501 + + :param template: The template of this ApiGetTemplateResponse. # noqa: E501 + :type template: str + """ + + self._template = template + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, ApiGetTemplateResponse): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, ApiGetTemplateResponse): + return True + + return self.to_dict() != other.to_dict() diff --git a/backend/api/python_http_client/kfp_server_api/models/api_job.py b/backend/api/python_http_client/kfp_server_api/models/api_job.py new file mode 100644 index 000000000..1fd859a9c --- /dev/null +++ b/backend/api/python_http_client/kfp_server_api/models/api_job.py @@ -0,0 +1,516 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from kfp_server_api.configuration import Configuration + + +class ApiJob(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'id': 'str', + 'name': 'str', + 'description': 'str', + 'pipeline_spec': 'ApiPipelineSpec', + 'resource_references': 'list[ApiResourceReference]', + 'service_account': 'str', + 'max_concurrency': 'str', + 'trigger': 'ApiTrigger', + 'mode': 'JobMode', + 'created_at': 'datetime', + 'updated_at': 'datetime', + 'status': 'str', + 'error': 'str', + 'enabled': 'bool', + 'no_catchup': 'bool' + } + + attribute_map = { + 'id': 'id', + 'name': 'name', + 'description': 'description', + 'pipeline_spec': 'pipeline_spec', + 'resource_references': 'resource_references', + 'service_account': 'service_account', + 'max_concurrency': 'max_concurrency', + 'trigger': 'trigger', + 'mode': 'mode', + 'created_at': 'created_at', + 'updated_at': 'updated_at', + 'status': 'status', + 'error': 'error', + 'enabled': 'enabled', + 'no_catchup': 'no_catchup' + } + + def __init__(self, id=None, name=None, description=None, pipeline_spec=None, resource_references=None, service_account=None, max_concurrency=None, trigger=None, mode=None, created_at=None, updated_at=None, status=None, error=None, enabled=None, no_catchup=None, local_vars_configuration=None): # noqa: E501 + """ApiJob - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + + self._id = None + self._name = None + self._description = None + self._pipeline_spec = None + self._resource_references = None + self._service_account = None + self._max_concurrency = None + self._trigger = None + self._mode = None + self._created_at = None + self._updated_at = None + self._status = None + self._error = None + self._enabled = None + self._no_catchup = None + self.discriminator = None + + if id is not None: + self.id = id + if name is not None: + self.name = name + if description is not None: + self.description = description + if pipeline_spec is not None: + self.pipeline_spec = pipeline_spec + if resource_references is not None: + self.resource_references = resource_references + if service_account is not None: + self.service_account = service_account + if max_concurrency is not None: + self.max_concurrency = max_concurrency + if trigger is not None: + self.trigger = trigger + if mode is not None: + self.mode = mode + if created_at is not None: + self.created_at = created_at + if updated_at is not None: + self.updated_at = updated_at + if status is not None: + self.status = status + if error is not None: + self.error = error + if enabled is not None: + self.enabled = enabled + if no_catchup is not None: + self.no_catchup = no_catchup + + @property + def id(self): + """Gets the id of this ApiJob. # noqa: E501 + + Output. Unique run ID. Generated by API server. # noqa: E501 + + :return: The id of this ApiJob. # noqa: E501 + :rtype: str + """ + return self._id + + @id.setter + def id(self, id): + """Sets the id of this ApiJob. + + Output. Unique run ID. Generated by API server. # noqa: E501 + + :param id: The id of this ApiJob. # noqa: E501 + :type id: str + """ + + self._id = id + + @property + def name(self): + """Gets the name of this ApiJob. # noqa: E501 + + Required input field. Job name provided by user. Not unique. # noqa: E501 + + :return: The name of this ApiJob. # noqa: E501 + :rtype: str + """ + return self._name + + @name.setter + def name(self, name): + """Sets the name of this ApiJob. + + Required input field. Job name provided by user. Not unique. # noqa: E501 + + :param name: The name of this ApiJob. # noqa: E501 + :type name: str + """ + + self._name = name + + @property + def description(self): + """Gets the description of this ApiJob. # noqa: E501 + + + :return: The description of this ApiJob. # noqa: E501 + :rtype: str + """ + return self._description + + @description.setter + def description(self, description): + """Sets the description of this ApiJob. + + + :param description: The description of this ApiJob. # noqa: E501 + :type description: str + """ + + self._description = description + + @property + def pipeline_spec(self): + """Gets the pipeline_spec of this ApiJob. # noqa: E501 + + + :return: The pipeline_spec of this ApiJob. # noqa: E501 + :rtype: ApiPipelineSpec + """ + return self._pipeline_spec + + @pipeline_spec.setter + def pipeline_spec(self, pipeline_spec): + """Sets the pipeline_spec of this ApiJob. + + + :param pipeline_spec: The pipeline_spec of this ApiJob. # noqa: E501 + :type pipeline_spec: ApiPipelineSpec + """ + + self._pipeline_spec = pipeline_spec + + @property + def resource_references(self): + """Gets the resource_references of this ApiJob. # noqa: E501 + + Optional input field. Specify which resource this job belongs to. # noqa: E501 + + :return: The resource_references of this ApiJob. # noqa: E501 + :rtype: list[ApiResourceReference] + """ + return self._resource_references + + @resource_references.setter + def resource_references(self, resource_references): + """Sets the resource_references of this ApiJob. + + Optional input field. Specify which resource this job belongs to. # noqa: E501 + + :param resource_references: The resource_references of this ApiJob. # noqa: E501 + :type resource_references: list[ApiResourceReference] + """ + + self._resource_references = resource_references + + @property + def service_account(self): + """Gets the service_account of this ApiJob. # noqa: E501 + + Optional input field. Specify which Kubernetes service account this job uses. # noqa: E501 + + :return: The service_account of this ApiJob. # noqa: E501 + :rtype: str + """ + return self._service_account + + @service_account.setter + def service_account(self, service_account): + """Sets the service_account of this ApiJob. + + Optional input field. Specify which Kubernetes service account this job uses. # noqa: E501 + + :param service_account: The service_account of this ApiJob. # noqa: E501 + :type service_account: str + """ + + self._service_account = service_account + + @property + def max_concurrency(self): + """Gets the max_concurrency of this ApiJob. # noqa: E501 + + + :return: The max_concurrency of this ApiJob. # noqa: E501 + :rtype: str + """ + return self._max_concurrency + + @max_concurrency.setter + def max_concurrency(self, max_concurrency): + """Sets the max_concurrency of this ApiJob. + + + :param max_concurrency: The max_concurrency of this ApiJob. # noqa: E501 + :type max_concurrency: str + """ + + self._max_concurrency = max_concurrency + + @property + def trigger(self): + """Gets the trigger of this ApiJob. # noqa: E501 + + + :return: The trigger of this ApiJob. # noqa: E501 + :rtype: ApiTrigger + """ + return self._trigger + + @trigger.setter + def trigger(self, trigger): + """Sets the trigger of this ApiJob. + + + :param trigger: The trigger of this ApiJob. # noqa: E501 + :type trigger: ApiTrigger + """ + + self._trigger = trigger + + @property + def mode(self): + """Gets the mode of this ApiJob. # noqa: E501 + + + :return: The mode of this ApiJob. # noqa: E501 + :rtype: JobMode + """ + return self._mode + + @mode.setter + def mode(self, mode): + """Sets the mode of this ApiJob. + + + :param mode: The mode of this ApiJob. # noqa: E501 + :type mode: JobMode + """ + + self._mode = mode + + @property + def created_at(self): + """Gets the created_at of this ApiJob. # noqa: E501 + + Output. The time this job is created. # noqa: E501 + + :return: The created_at of this ApiJob. # noqa: E501 + :rtype: datetime + """ + return self._created_at + + @created_at.setter + def created_at(self, created_at): + """Sets the created_at of this ApiJob. + + Output. The time this job is created. # noqa: E501 + + :param created_at: The created_at of this ApiJob. # noqa: E501 + :type created_at: datetime + """ + + self._created_at = created_at + + @property + def updated_at(self): + """Gets the updated_at of this ApiJob. # noqa: E501 + + Output. The last time this job is updated. # noqa: E501 + + :return: The updated_at of this ApiJob. # noqa: E501 + :rtype: datetime + """ + return self._updated_at + + @updated_at.setter + def updated_at(self, updated_at): + """Sets the updated_at of this ApiJob. + + Output. The last time this job is updated. # noqa: E501 + + :param updated_at: The updated_at of this ApiJob. # noqa: E501 + :type updated_at: datetime + """ + + self._updated_at = updated_at + + @property + def status(self): + """Gets the status of this ApiJob. # noqa: E501 + + + :return: The status of this ApiJob. # noqa: E501 + :rtype: str + """ + return self._status + + @status.setter + def status(self, status): + """Sets the status of this ApiJob. + + + :param status: The status of this ApiJob. # noqa: E501 + :type status: str + """ + + self._status = status + + @property + def error(self): + """Gets the error of this ApiJob. # noqa: E501 + + In case any error happens retrieving a job field, only job ID and the error message is returned. Client has the flexibility of choosing how to handle error. This is especially useful during listing call. # noqa: E501 + + :return: The error of this ApiJob. # noqa: E501 + :rtype: str + """ + return self._error + + @error.setter + def error(self, error): + """Sets the error of this ApiJob. + + In case any error happens retrieving a job field, only job ID and the error message is returned. Client has the flexibility of choosing how to handle error. This is especially useful during listing call. # noqa: E501 + + :param error: The error of this ApiJob. # noqa: E501 + :type error: str + """ + + self._error = error + + @property + def enabled(self): + """Gets the enabled of this ApiJob. # noqa: E501 + + Input. Whether the job is enabled or not. # noqa: E501 + + :return: The enabled of this ApiJob. # noqa: E501 + :rtype: bool + """ + return self._enabled + + @enabled.setter + def enabled(self, enabled): + """Sets the enabled of this ApiJob. + + Input. Whether the job is enabled or not. # noqa: E501 + + :param enabled: The enabled of this ApiJob. # noqa: E501 + :type enabled: bool + """ + + self._enabled = enabled + + @property + def no_catchup(self): + """Gets the no_catchup of this ApiJob. # noqa: E501 + + Optional input field. Whether the job should catch up if behind schedule. If true, the job will only schedule the latest interval if behind schedule. If false, the job will catch up on each past interval. # noqa: E501 + + :return: The no_catchup of this ApiJob. # noqa: E501 + :rtype: bool + """ + return self._no_catchup + + @no_catchup.setter + def no_catchup(self, no_catchup): + """Sets the no_catchup of this ApiJob. + + Optional input field. Whether the job should catch up if behind schedule. If true, the job will only schedule the latest interval if behind schedule. If false, the job will catch up on each past interval. # noqa: E501 + + :param no_catchup: The no_catchup of this ApiJob. # noqa: E501 + :type no_catchup: bool + """ + + self._no_catchup = no_catchup + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, ApiJob): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, ApiJob): + return True + + return self.to_dict() != other.to_dict() diff --git a/backend/api/python_http_client/kfp_server_api/models/api_list_experiments_response.py b/backend/api/python_http_client/kfp_server_api/models/api_list_experiments_response.py new file mode 100644 index 000000000..26b88d79e --- /dev/null +++ b/backend/api/python_http_client/kfp_server_api/models/api_list_experiments_response.py @@ -0,0 +1,192 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from kfp_server_api.configuration import Configuration + + +class ApiListExperimentsResponse(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'experiments': 'list[ApiExperiment]', + 'total_size': 'int', + 'next_page_token': 'str' + } + + attribute_map = { + 'experiments': 'experiments', + 'total_size': 'total_size', + 'next_page_token': 'next_page_token' + } + + def __init__(self, experiments=None, total_size=None, next_page_token=None, local_vars_configuration=None): # noqa: E501 + """ApiListExperimentsResponse - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + + self._experiments = None + self._total_size = None + self._next_page_token = None + self.discriminator = None + + if experiments is not None: + self.experiments = experiments + if total_size is not None: + self.total_size = total_size + if next_page_token is not None: + self.next_page_token = next_page_token + + @property + def experiments(self): + """Gets the experiments of this ApiListExperimentsResponse. # noqa: E501 + + A list of experiments returned. # noqa: E501 + + :return: The experiments of this ApiListExperimentsResponse. # noqa: E501 + :rtype: list[ApiExperiment] + """ + return self._experiments + + @experiments.setter + def experiments(self, experiments): + """Sets the experiments of this ApiListExperimentsResponse. + + A list of experiments returned. # noqa: E501 + + :param experiments: The experiments of this ApiListExperimentsResponse. # noqa: E501 + :type experiments: list[ApiExperiment] + """ + + self._experiments = experiments + + @property + def total_size(self): + """Gets the total_size of this ApiListExperimentsResponse. # noqa: E501 + + The total number of experiments for the given query. # noqa: E501 + + :return: The total_size of this ApiListExperimentsResponse. # noqa: E501 + :rtype: int + """ + return self._total_size + + @total_size.setter + def total_size(self, total_size): + """Sets the total_size of this ApiListExperimentsResponse. + + The total number of experiments for the given query. # noqa: E501 + + :param total_size: The total_size of this ApiListExperimentsResponse. # noqa: E501 + :type total_size: int + """ + + self._total_size = total_size + + @property + def next_page_token(self): + """Gets the next_page_token of this ApiListExperimentsResponse. # noqa: E501 + + The token to list the next page of experiments. # noqa: E501 + + :return: The next_page_token of this ApiListExperimentsResponse. # noqa: E501 + :rtype: str + """ + return self._next_page_token + + @next_page_token.setter + def next_page_token(self, next_page_token): + """Sets the next_page_token of this ApiListExperimentsResponse. + + The token to list the next page of experiments. # noqa: E501 + + :param next_page_token: The next_page_token of this ApiListExperimentsResponse. # noqa: E501 + :type next_page_token: str + """ + + self._next_page_token = next_page_token + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, ApiListExperimentsResponse): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, ApiListExperimentsResponse): + return True + + return self.to_dict() != other.to_dict() diff --git a/backend/api/python_http_client/kfp_server_api/models/api_list_jobs_response.py b/backend/api/python_http_client/kfp_server_api/models/api_list_jobs_response.py new file mode 100644 index 000000000..65f1ce6a9 --- /dev/null +++ b/backend/api/python_http_client/kfp_server_api/models/api_list_jobs_response.py @@ -0,0 +1,192 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from kfp_server_api.configuration import Configuration + + +class ApiListJobsResponse(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'jobs': 'list[ApiJob]', + 'total_size': 'int', + 'next_page_token': 'str' + } + + attribute_map = { + 'jobs': 'jobs', + 'total_size': 'total_size', + 'next_page_token': 'next_page_token' + } + + def __init__(self, jobs=None, total_size=None, next_page_token=None, local_vars_configuration=None): # noqa: E501 + """ApiListJobsResponse - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + + self._jobs = None + self._total_size = None + self._next_page_token = None + self.discriminator = None + + if jobs is not None: + self.jobs = jobs + if total_size is not None: + self.total_size = total_size + if next_page_token is not None: + self.next_page_token = next_page_token + + @property + def jobs(self): + """Gets the jobs of this ApiListJobsResponse. # noqa: E501 + + A list of jobs returned. # noqa: E501 + + :return: The jobs of this ApiListJobsResponse. # noqa: E501 + :rtype: list[ApiJob] + """ + return self._jobs + + @jobs.setter + def jobs(self, jobs): + """Sets the jobs of this ApiListJobsResponse. + + A list of jobs returned. # noqa: E501 + + :param jobs: The jobs of this ApiListJobsResponse. # noqa: E501 + :type jobs: list[ApiJob] + """ + + self._jobs = jobs + + @property + def total_size(self): + """Gets the total_size of this ApiListJobsResponse. # noqa: E501 + + The total number of jobs for the given query. # noqa: E501 + + :return: The total_size of this ApiListJobsResponse. # noqa: E501 + :rtype: int + """ + return self._total_size + + @total_size.setter + def total_size(self, total_size): + """Sets the total_size of this ApiListJobsResponse. + + The total number of jobs for the given query. # noqa: E501 + + :param total_size: The total_size of this ApiListJobsResponse. # noqa: E501 + :type total_size: int + """ + + self._total_size = total_size + + @property + def next_page_token(self): + """Gets the next_page_token of this ApiListJobsResponse. # noqa: E501 + + The token to list the next page of jobs. # noqa: E501 + + :return: The next_page_token of this ApiListJobsResponse. # noqa: E501 + :rtype: str + """ + return self._next_page_token + + @next_page_token.setter + def next_page_token(self, next_page_token): + """Sets the next_page_token of this ApiListJobsResponse. + + The token to list the next page of jobs. # noqa: E501 + + :param next_page_token: The next_page_token of this ApiListJobsResponse. # noqa: E501 + :type next_page_token: str + """ + + self._next_page_token = next_page_token + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, ApiListJobsResponse): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, ApiListJobsResponse): + return True + + return self.to_dict() != other.to_dict() diff --git a/backend/api/python_http_client/kfp_server_api/models/api_list_pipeline_versions_response.py b/backend/api/python_http_client/kfp_server_api/models/api_list_pipeline_versions_response.py new file mode 100644 index 000000000..a9878dbd8 --- /dev/null +++ b/backend/api/python_http_client/kfp_server_api/models/api_list_pipeline_versions_response.py @@ -0,0 +1,190 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from kfp_server_api.configuration import Configuration + + +class ApiListPipelineVersionsResponse(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'versions': 'list[ApiPipelineVersion]', + 'next_page_token': 'str', + 'total_size': 'int' + } + + attribute_map = { + 'versions': 'versions', + 'next_page_token': 'next_page_token', + 'total_size': 'total_size' + } + + def __init__(self, versions=None, next_page_token=None, total_size=None, local_vars_configuration=None): # noqa: E501 + """ApiListPipelineVersionsResponse - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + + self._versions = None + self._next_page_token = None + self._total_size = None + self.discriminator = None + + if versions is not None: + self.versions = versions + if next_page_token is not None: + self.next_page_token = next_page_token + if total_size is not None: + self.total_size = total_size + + @property + def versions(self): + """Gets the versions of this ApiListPipelineVersionsResponse. # noqa: E501 + + + :return: The versions of this ApiListPipelineVersionsResponse. # noqa: E501 + :rtype: list[ApiPipelineVersion] + """ + return self._versions + + @versions.setter + def versions(self, versions): + """Sets the versions of this ApiListPipelineVersionsResponse. + + + :param versions: The versions of this ApiListPipelineVersionsResponse. # noqa: E501 + :type versions: list[ApiPipelineVersion] + """ + + self._versions = versions + + @property + def next_page_token(self): + """Gets the next_page_token of this ApiListPipelineVersionsResponse. # noqa: E501 + + The token to list the next page of pipeline versions. # noqa: E501 + + :return: The next_page_token of this ApiListPipelineVersionsResponse. # noqa: E501 + :rtype: str + """ + return self._next_page_token + + @next_page_token.setter + def next_page_token(self, next_page_token): + """Sets the next_page_token of this ApiListPipelineVersionsResponse. + + The token to list the next page of pipeline versions. # noqa: E501 + + :param next_page_token: The next_page_token of this ApiListPipelineVersionsResponse. # noqa: E501 + :type next_page_token: str + """ + + self._next_page_token = next_page_token + + @property + def total_size(self): + """Gets the total_size of this ApiListPipelineVersionsResponse. # noqa: E501 + + The total number of pipeline versions for the given query. # noqa: E501 + + :return: The total_size of this ApiListPipelineVersionsResponse. # noqa: E501 + :rtype: int + """ + return self._total_size + + @total_size.setter + def total_size(self, total_size): + """Sets the total_size of this ApiListPipelineVersionsResponse. + + The total number of pipeline versions for the given query. # noqa: E501 + + :param total_size: The total_size of this ApiListPipelineVersionsResponse. # noqa: E501 + :type total_size: int + """ + + self._total_size = total_size + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, ApiListPipelineVersionsResponse): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, ApiListPipelineVersionsResponse): + return True + + return self.to_dict() != other.to_dict() diff --git a/backend/api/python_http_client/kfp_server_api/models/api_list_pipelines_response.py b/backend/api/python_http_client/kfp_server_api/models/api_list_pipelines_response.py new file mode 100644 index 000000000..5141dd70e --- /dev/null +++ b/backend/api/python_http_client/kfp_server_api/models/api_list_pipelines_response.py @@ -0,0 +1,190 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from kfp_server_api.configuration import Configuration + + +class ApiListPipelinesResponse(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'pipelines': 'list[ApiPipeline]', + 'total_size': 'int', + 'next_page_token': 'str' + } + + attribute_map = { + 'pipelines': 'pipelines', + 'total_size': 'total_size', + 'next_page_token': 'next_page_token' + } + + def __init__(self, pipelines=None, total_size=None, next_page_token=None, local_vars_configuration=None): # noqa: E501 + """ApiListPipelinesResponse - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + + self._pipelines = None + self._total_size = None + self._next_page_token = None + self.discriminator = None + + if pipelines is not None: + self.pipelines = pipelines + if total_size is not None: + self.total_size = total_size + if next_page_token is not None: + self.next_page_token = next_page_token + + @property + def pipelines(self): + """Gets the pipelines of this ApiListPipelinesResponse. # noqa: E501 + + + :return: The pipelines of this ApiListPipelinesResponse. # noqa: E501 + :rtype: list[ApiPipeline] + """ + return self._pipelines + + @pipelines.setter + def pipelines(self, pipelines): + """Sets the pipelines of this ApiListPipelinesResponse. + + + :param pipelines: The pipelines of this ApiListPipelinesResponse. # noqa: E501 + :type pipelines: list[ApiPipeline] + """ + + self._pipelines = pipelines + + @property + def total_size(self): + """Gets the total_size of this ApiListPipelinesResponse. # noqa: E501 + + The total number of pipelines for the given query. # noqa: E501 + + :return: The total_size of this ApiListPipelinesResponse. # noqa: E501 + :rtype: int + """ + return self._total_size + + @total_size.setter + def total_size(self, total_size): + """Sets the total_size of this ApiListPipelinesResponse. + + The total number of pipelines for the given query. # noqa: E501 + + :param total_size: The total_size of this ApiListPipelinesResponse. # noqa: E501 + :type total_size: int + """ + + self._total_size = total_size + + @property + def next_page_token(self): + """Gets the next_page_token of this ApiListPipelinesResponse. # noqa: E501 + + The token to list the next page of pipelines. # noqa: E501 + + :return: The next_page_token of this ApiListPipelinesResponse. # noqa: E501 + :rtype: str + """ + return self._next_page_token + + @next_page_token.setter + def next_page_token(self, next_page_token): + """Sets the next_page_token of this ApiListPipelinesResponse. + + The token to list the next page of pipelines. # noqa: E501 + + :param next_page_token: The next_page_token of this ApiListPipelinesResponse. # noqa: E501 + :type next_page_token: str + """ + + self._next_page_token = next_page_token + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, ApiListPipelinesResponse): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, ApiListPipelinesResponse): + return True + + return self.to_dict() != other.to_dict() diff --git a/backend/api/python_http_client/kfp_server_api/models/api_list_runs_response.py b/backend/api/python_http_client/kfp_server_api/models/api_list_runs_response.py new file mode 100644 index 000000000..57393719b --- /dev/null +++ b/backend/api/python_http_client/kfp_server_api/models/api_list_runs_response.py @@ -0,0 +1,190 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from kfp_server_api.configuration import Configuration + + +class ApiListRunsResponse(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'runs': 'list[ApiRun]', + 'total_size': 'int', + 'next_page_token': 'str' + } + + attribute_map = { + 'runs': 'runs', + 'total_size': 'total_size', + 'next_page_token': 'next_page_token' + } + + def __init__(self, runs=None, total_size=None, next_page_token=None, local_vars_configuration=None): # noqa: E501 + """ApiListRunsResponse - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + + self._runs = None + self._total_size = None + self._next_page_token = None + self.discriminator = None + + if runs is not None: + self.runs = runs + if total_size is not None: + self.total_size = total_size + if next_page_token is not None: + self.next_page_token = next_page_token + + @property + def runs(self): + """Gets the runs of this ApiListRunsResponse. # noqa: E501 + + + :return: The runs of this ApiListRunsResponse. # noqa: E501 + :rtype: list[ApiRun] + """ + return self._runs + + @runs.setter + def runs(self, runs): + """Sets the runs of this ApiListRunsResponse. + + + :param runs: The runs of this ApiListRunsResponse. # noqa: E501 + :type runs: list[ApiRun] + """ + + self._runs = runs + + @property + def total_size(self): + """Gets the total_size of this ApiListRunsResponse. # noqa: E501 + + The total number of runs for the given query. # noqa: E501 + + :return: The total_size of this ApiListRunsResponse. # noqa: E501 + :rtype: int + """ + return self._total_size + + @total_size.setter + def total_size(self, total_size): + """Sets the total_size of this ApiListRunsResponse. + + The total number of runs for the given query. # noqa: E501 + + :param total_size: The total_size of this ApiListRunsResponse. # noqa: E501 + :type total_size: int + """ + + self._total_size = total_size + + @property + def next_page_token(self): + """Gets the next_page_token of this ApiListRunsResponse. # noqa: E501 + + The token to list the next page of runs. # noqa: E501 + + :return: The next_page_token of this ApiListRunsResponse. # noqa: E501 + :rtype: str + """ + return self._next_page_token + + @next_page_token.setter + def next_page_token(self, next_page_token): + """Sets the next_page_token of this ApiListRunsResponse. + + The token to list the next page of runs. # noqa: E501 + + :param next_page_token: The next_page_token of this ApiListRunsResponse. # noqa: E501 + :type next_page_token: str + """ + + self._next_page_token = next_page_token + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, ApiListRunsResponse): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, ApiListRunsResponse): + return True + + return self.to_dict() != other.to_dict() diff --git a/backend/api/python_http_client/kfp_server_api/models/api_parameter.py b/backend/api/python_http_client/kfp_server_api/models/api_parameter.py new file mode 100644 index 000000000..c98089232 --- /dev/null +++ b/backend/api/python_http_client/kfp_server_api/models/api_parameter.py @@ -0,0 +1,160 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from kfp_server_api.configuration import Configuration + + +class ApiParameter(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'name': 'str', + 'value': 'str' + } + + attribute_map = { + 'name': 'name', + 'value': 'value' + } + + def __init__(self, name=None, value=None, local_vars_configuration=None): # noqa: E501 + """ApiParameter - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + + self._name = None + self._value = None + self.discriminator = None + + if name is not None: + self.name = name + if value is not None: + self.value = value + + @property + def name(self): + """Gets the name of this ApiParameter. # noqa: E501 + + + :return: The name of this ApiParameter. # noqa: E501 + :rtype: str + """ + return self._name + + @name.setter + def name(self, name): + """Sets the name of this ApiParameter. + + + :param name: The name of this ApiParameter. # noqa: E501 + :type name: str + """ + + self._name = name + + @property + def value(self): + """Gets the value of this ApiParameter. # noqa: E501 + + + :return: The value of this ApiParameter. # noqa: E501 + :rtype: str + """ + return self._value + + @value.setter + def value(self, value): + """Sets the value of this ApiParameter. + + + :param value: The value of this ApiParameter. # noqa: E501 + :type value: str + """ + + self._value = value + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, ApiParameter): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, ApiParameter): + return True + + return self.to_dict() != other.to_dict() diff --git a/backend/api/python_http_client/kfp_server_api/models/api_periodic_schedule.py b/backend/api/python_http_client/kfp_server_api/models/api_periodic_schedule.py new file mode 100644 index 000000000..2c864e776 --- /dev/null +++ b/backend/api/python_http_client/kfp_server_api/models/api_periodic_schedule.py @@ -0,0 +1,186 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from kfp_server_api.configuration import Configuration + + +class ApiPeriodicSchedule(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'start_time': 'datetime', + 'end_time': 'datetime', + 'interval_second': 'str' + } + + attribute_map = { + 'start_time': 'start_time', + 'end_time': 'end_time', + 'interval_second': 'interval_second' + } + + def __init__(self, start_time=None, end_time=None, interval_second=None, local_vars_configuration=None): # noqa: E501 + """ApiPeriodicSchedule - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + + self._start_time = None + self._end_time = None + self._interval_second = None + self.discriminator = None + + if start_time is not None: + self.start_time = start_time + if end_time is not None: + self.end_time = end_time + if interval_second is not None: + self.interval_second = interval_second + + @property + def start_time(self): + """Gets the start_time of this ApiPeriodicSchedule. # noqa: E501 + + + :return: The start_time of this ApiPeriodicSchedule. # noqa: E501 + :rtype: datetime + """ + return self._start_time + + @start_time.setter + def start_time(self, start_time): + """Sets the start_time of this ApiPeriodicSchedule. + + + :param start_time: The start_time of this ApiPeriodicSchedule. # noqa: E501 + :type start_time: datetime + """ + + self._start_time = start_time + + @property + def end_time(self): + """Gets the end_time of this ApiPeriodicSchedule. # noqa: E501 + + + :return: The end_time of this ApiPeriodicSchedule. # noqa: E501 + :rtype: datetime + """ + return self._end_time + + @end_time.setter + def end_time(self, end_time): + """Sets the end_time of this ApiPeriodicSchedule. + + + :param end_time: The end_time of this ApiPeriodicSchedule. # noqa: E501 + :type end_time: datetime + """ + + self._end_time = end_time + + @property + def interval_second(self): + """Gets the interval_second of this ApiPeriodicSchedule. # noqa: E501 + + + :return: The interval_second of this ApiPeriodicSchedule. # noqa: E501 + :rtype: str + """ + return self._interval_second + + @interval_second.setter + def interval_second(self, interval_second): + """Sets the interval_second of this ApiPeriodicSchedule. + + + :param interval_second: The interval_second of this ApiPeriodicSchedule. # noqa: E501 + :type interval_second: str + """ + + self._interval_second = interval_second + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, ApiPeriodicSchedule): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, ApiPeriodicSchedule): + return True + + return self.to_dict() != other.to_dict() diff --git a/backend/api/python_http_client/kfp_server_api/models/api_pipeline.py b/backend/api/python_http_client/kfp_server_api/models/api_pipeline.py new file mode 100644 index 000000000..4b875cf57 --- /dev/null +++ b/backend/api/python_http_client/kfp_server_api/models/api_pipeline.py @@ -0,0 +1,328 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from kfp_server_api.configuration import Configuration + + +class ApiPipeline(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'id': 'str', + 'created_at': 'datetime', + 'name': 'str', + 'description': 'str', + 'parameters': 'list[ApiParameter]', + 'url': 'ApiUrl', + 'error': 'str', + 'default_version': 'ApiPipelineVersion' + } + + attribute_map = { + 'id': 'id', + 'created_at': 'created_at', + 'name': 'name', + 'description': 'description', + 'parameters': 'parameters', + 'url': 'url', + 'error': 'error', + 'default_version': 'default_version' + } + + def __init__(self, id=None, created_at=None, name=None, description=None, parameters=None, url=None, error=None, default_version=None, local_vars_configuration=None): # noqa: E501 + """ApiPipeline - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + + self._id = None + self._created_at = None + self._name = None + self._description = None + self._parameters = None + self._url = None + self._error = None + self._default_version = None + self.discriminator = None + + if id is not None: + self.id = id + if created_at is not None: + self.created_at = created_at + if name is not None: + self.name = name + if description is not None: + self.description = description + if parameters is not None: + self.parameters = parameters + if url is not None: + self.url = url + if error is not None: + self.error = error + if default_version is not None: + self.default_version = default_version + + @property + def id(self): + """Gets the id of this ApiPipeline. # noqa: E501 + + Output. Unique pipeline ID. Generated by API server. # noqa: E501 + + :return: The id of this ApiPipeline. # noqa: E501 + :rtype: str + """ + return self._id + + @id.setter + def id(self, id): + """Sets the id of this ApiPipeline. + + Output. Unique pipeline ID. Generated by API server. # noqa: E501 + + :param id: The id of this ApiPipeline. # noqa: E501 + :type id: str + """ + + self._id = id + + @property + def created_at(self): + """Gets the created_at of this ApiPipeline. # noqa: E501 + + Output. The time this pipeline is created. # noqa: E501 + + :return: The created_at of this ApiPipeline. # noqa: E501 + :rtype: datetime + """ + return self._created_at + + @created_at.setter + def created_at(self, created_at): + """Sets the created_at of this ApiPipeline. + + Output. The time this pipeline is created. # noqa: E501 + + :param created_at: The created_at of this ApiPipeline. # noqa: E501 + :type created_at: datetime + """ + + self._created_at = created_at + + @property + def name(self): + """Gets the name of this ApiPipeline. # noqa: E501 + + Optional input field. Pipeline name provided by user. If not specified, file name is used as pipeline name. # noqa: E501 + + :return: The name of this ApiPipeline. # noqa: E501 + :rtype: str + """ + return self._name + + @name.setter + def name(self, name): + """Sets the name of this ApiPipeline. + + Optional input field. Pipeline name provided by user. If not specified, file name is used as pipeline name. # noqa: E501 + + :param name: The name of this ApiPipeline. # noqa: E501 + :type name: str + """ + + self._name = name + + @property + def description(self): + """Gets the description of this ApiPipeline. # noqa: E501 + + Optional input field. Describing the purpose of the job. # noqa: E501 + + :return: The description of this ApiPipeline. # noqa: E501 + :rtype: str + """ + return self._description + + @description.setter + def description(self, description): + """Sets the description of this ApiPipeline. + + Optional input field. Describing the purpose of the job. # noqa: E501 + + :param description: The description of this ApiPipeline. # noqa: E501 + :type description: str + """ + + self._description = description + + @property + def parameters(self): + """Gets the parameters of this ApiPipeline. # noqa: E501 + + Output. The input parameters for this pipeline. TODO(jingzhang36): replace this parameters field with the parameters field inside PipelineVersion when all usage of the former has been changed to use the latter. # noqa: E501 + + :return: The parameters of this ApiPipeline. # noqa: E501 + :rtype: list[ApiParameter] + """ + return self._parameters + + @parameters.setter + def parameters(self, parameters): + """Sets the parameters of this ApiPipeline. + + Output. The input parameters for this pipeline. TODO(jingzhang36): replace this parameters field with the parameters field inside PipelineVersion when all usage of the former has been changed to use the latter. # noqa: E501 + + :param parameters: The parameters of this ApiPipeline. # noqa: E501 + :type parameters: list[ApiParameter] + """ + + self._parameters = parameters + + @property + def url(self): + """Gets the url of this ApiPipeline. # noqa: E501 + + + :return: The url of this ApiPipeline. # noqa: E501 + :rtype: ApiUrl + """ + return self._url + + @url.setter + def url(self, url): + """Sets the url of this ApiPipeline. + + + :param url: The url of this ApiPipeline. # noqa: E501 + :type url: ApiUrl + """ + + self._url = url + + @property + def error(self): + """Gets the error of this ApiPipeline. # noqa: E501 + + In case any error happens retrieving a pipeline field, only pipeline ID and the error message is returned. Client has the flexibility of choosing how to handle error. This is especially useful during listing call. # noqa: E501 + + :return: The error of this ApiPipeline. # noqa: E501 + :rtype: str + """ + return self._error + + @error.setter + def error(self, error): + """Sets the error of this ApiPipeline. + + In case any error happens retrieving a pipeline field, only pipeline ID and the error message is returned. Client has the flexibility of choosing how to handle error. This is especially useful during listing call. # noqa: E501 + + :param error: The error of this ApiPipeline. # noqa: E501 + :type error: str + """ + + self._error = error + + @property + def default_version(self): + """Gets the default_version of this ApiPipeline. # noqa: E501 + + + :return: The default_version of this ApiPipeline. # noqa: E501 + :rtype: ApiPipelineVersion + """ + return self._default_version + + @default_version.setter + def default_version(self, default_version): + """Sets the default_version of this ApiPipeline. + + + :param default_version: The default_version of this ApiPipeline. # noqa: E501 + :type default_version: ApiPipelineVersion + """ + + self._default_version = default_version + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, ApiPipeline): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, ApiPipeline): + return True + + return self.to_dict() != other.to_dict() diff --git a/backend/api/python_http_client/kfp_server_api/models/api_pipeline_runtime.py b/backend/api/python_http_client/kfp_server_api/models/api_pipeline_runtime.py new file mode 100644 index 000000000..010356702 --- /dev/null +++ b/backend/api/python_http_client/kfp_server_api/models/api_pipeline_runtime.py @@ -0,0 +1,164 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from kfp_server_api.configuration import Configuration + + +class ApiPipelineRuntime(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'pipeline_manifest': 'str', + 'workflow_manifest': 'str' + } + + attribute_map = { + 'pipeline_manifest': 'pipeline_manifest', + 'workflow_manifest': 'workflow_manifest' + } + + def __init__(self, pipeline_manifest=None, workflow_manifest=None, local_vars_configuration=None): # noqa: E501 + """ApiPipelineRuntime - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + + self._pipeline_manifest = None + self._workflow_manifest = None + self.discriminator = None + + if pipeline_manifest is not None: + self.pipeline_manifest = pipeline_manifest + if workflow_manifest is not None: + self.workflow_manifest = workflow_manifest + + @property + def pipeline_manifest(self): + """Gets the pipeline_manifest of this ApiPipelineRuntime. # noqa: E501 + + Output. The runtime JSON manifest of the pipeline, including the status of pipeline steps and fields need for UI visualization etc. # noqa: E501 + + :return: The pipeline_manifest of this ApiPipelineRuntime. # noqa: E501 + :rtype: str + """ + return self._pipeline_manifest + + @pipeline_manifest.setter + def pipeline_manifest(self, pipeline_manifest): + """Sets the pipeline_manifest of this ApiPipelineRuntime. + + Output. The runtime JSON manifest of the pipeline, including the status of pipeline steps and fields need for UI visualization etc. # noqa: E501 + + :param pipeline_manifest: The pipeline_manifest of this ApiPipelineRuntime. # noqa: E501 + :type pipeline_manifest: str + """ + + self._pipeline_manifest = pipeline_manifest + + @property + def workflow_manifest(self): + """Gets the workflow_manifest of this ApiPipelineRuntime. # noqa: E501 + + Output. The runtime JSON manifest of the argo workflow. This is deprecated after pipeline_runtime_manifest is in use. # noqa: E501 + + :return: The workflow_manifest of this ApiPipelineRuntime. # noqa: E501 + :rtype: str + """ + return self._workflow_manifest + + @workflow_manifest.setter + def workflow_manifest(self, workflow_manifest): + """Sets the workflow_manifest of this ApiPipelineRuntime. + + Output. The runtime JSON manifest of the argo workflow. This is deprecated after pipeline_runtime_manifest is in use. # noqa: E501 + + :param workflow_manifest: The workflow_manifest of this ApiPipelineRuntime. # noqa: E501 + :type workflow_manifest: str + """ + + self._workflow_manifest = workflow_manifest + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, ApiPipelineRuntime): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, ApiPipelineRuntime): + return True + + return self.to_dict() != other.to_dict() diff --git a/backend/api/python_http_client/kfp_server_api/models/api_pipeline_spec.py b/backend/api/python_http_client/kfp_server_api/models/api_pipeline_spec.py new file mode 100644 index 000000000..b6120c178 --- /dev/null +++ b/backend/api/python_http_client/kfp_server_api/models/api_pipeline_spec.py @@ -0,0 +1,248 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from kfp_server_api.configuration import Configuration + + +class ApiPipelineSpec(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'pipeline_id': 'str', + 'pipeline_name': 'str', + 'workflow_manifest': 'str', + 'pipeline_manifest': 'str', + 'parameters': 'list[ApiParameter]' + } + + attribute_map = { + 'pipeline_id': 'pipeline_id', + 'pipeline_name': 'pipeline_name', + 'workflow_manifest': 'workflow_manifest', + 'pipeline_manifest': 'pipeline_manifest', + 'parameters': 'parameters' + } + + def __init__(self, pipeline_id=None, pipeline_name=None, workflow_manifest=None, pipeline_manifest=None, parameters=None, local_vars_configuration=None): # noqa: E501 + """ApiPipelineSpec - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + + self._pipeline_id = None + self._pipeline_name = None + self._workflow_manifest = None + self._pipeline_manifest = None + self._parameters = None + self.discriminator = None + + if pipeline_id is not None: + self.pipeline_id = pipeline_id + if pipeline_name is not None: + self.pipeline_name = pipeline_name + if workflow_manifest is not None: + self.workflow_manifest = workflow_manifest + if pipeline_manifest is not None: + self.pipeline_manifest = pipeline_manifest + if parameters is not None: + self.parameters = parameters + + @property + def pipeline_id(self): + """Gets the pipeline_id of this ApiPipelineSpec. # noqa: E501 + + Optional input field. The ID of the pipeline user uploaded before. # noqa: E501 + + :return: The pipeline_id of this ApiPipelineSpec. # noqa: E501 + :rtype: str + """ + return self._pipeline_id + + @pipeline_id.setter + def pipeline_id(self, pipeline_id): + """Sets the pipeline_id of this ApiPipelineSpec. + + Optional input field. The ID of the pipeline user uploaded before. # noqa: E501 + + :param pipeline_id: The pipeline_id of this ApiPipelineSpec. # noqa: E501 + :type pipeline_id: str + """ + + self._pipeline_id = pipeline_id + + @property + def pipeline_name(self): + """Gets the pipeline_name of this ApiPipelineSpec. # noqa: E501 + + Optional output field. The name of the pipeline. Not empty if the pipeline id is not empty. # noqa: E501 + + :return: The pipeline_name of this ApiPipelineSpec. # noqa: E501 + :rtype: str + """ + return self._pipeline_name + + @pipeline_name.setter + def pipeline_name(self, pipeline_name): + """Sets the pipeline_name of this ApiPipelineSpec. + + Optional output field. The name of the pipeline. Not empty if the pipeline id is not empty. # noqa: E501 + + :param pipeline_name: The pipeline_name of this ApiPipelineSpec. # noqa: E501 + :type pipeline_name: str + """ + + self._pipeline_name = pipeline_name + + @property + def workflow_manifest(self): + """Gets the workflow_manifest of this ApiPipelineSpec. # noqa: E501 + + Optional input field. The marshalled raw argo JSON workflow. This will be deprecated when pipeline_manifest is in use. # noqa: E501 + + :return: The workflow_manifest of this ApiPipelineSpec. # noqa: E501 + :rtype: str + """ + return self._workflow_manifest + + @workflow_manifest.setter + def workflow_manifest(self, workflow_manifest): + """Sets the workflow_manifest of this ApiPipelineSpec. + + Optional input field. The marshalled raw argo JSON workflow. This will be deprecated when pipeline_manifest is in use. # noqa: E501 + + :param workflow_manifest: The workflow_manifest of this ApiPipelineSpec. # noqa: E501 + :type workflow_manifest: str + """ + + self._workflow_manifest = workflow_manifest + + @property + def pipeline_manifest(self): + """Gets the pipeline_manifest of this ApiPipelineSpec. # noqa: E501 + + Optional input field. The raw pipeline JSON spec. # noqa: E501 + + :return: The pipeline_manifest of this ApiPipelineSpec. # noqa: E501 + :rtype: str + """ + return self._pipeline_manifest + + @pipeline_manifest.setter + def pipeline_manifest(self, pipeline_manifest): + """Sets the pipeline_manifest of this ApiPipelineSpec. + + Optional input field. The raw pipeline JSON spec. # noqa: E501 + + :param pipeline_manifest: The pipeline_manifest of this ApiPipelineSpec. # noqa: E501 + :type pipeline_manifest: str + """ + + self._pipeline_manifest = pipeline_manifest + + @property + def parameters(self): + """Gets the parameters of this ApiPipelineSpec. # noqa: E501 + + The parameter user provide to inject to the pipeline JSON. If a default value of a parameter exist in the JSON, the value user provided here will replace. # noqa: E501 + + :return: The parameters of this ApiPipelineSpec. # noqa: E501 + :rtype: list[ApiParameter] + """ + return self._parameters + + @parameters.setter + def parameters(self, parameters): + """Sets the parameters of this ApiPipelineSpec. + + The parameter user provide to inject to the pipeline JSON. If a default value of a parameter exist in the JSON, the value user provided here will replace. # noqa: E501 + + :param parameters: The parameters of this ApiPipelineSpec. # noqa: E501 + :type parameters: list[ApiParameter] + """ + + self._parameters = parameters + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, ApiPipelineSpec): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, ApiPipelineSpec): + return True + + return self.to_dict() != other.to_dict() diff --git a/backend/api/python_http_client/kfp_server_api/models/api_pipeline_version.py b/backend/api/python_http_client/kfp_server_api/models/api_pipeline_version.py new file mode 100644 index 000000000..f5853758d --- /dev/null +++ b/backend/api/python_http_client/kfp_server_api/models/api_pipeline_version.py @@ -0,0 +1,302 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from kfp_server_api.configuration import Configuration + + +class ApiPipelineVersion(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'id': 'str', + 'name': 'str', + 'created_at': 'datetime', + 'parameters': 'list[ApiParameter]', + 'code_source_url': 'str', + 'package_url': 'ApiUrl', + 'resource_references': 'list[ApiResourceReference]' + } + + attribute_map = { + 'id': 'id', + 'name': 'name', + 'created_at': 'created_at', + 'parameters': 'parameters', + 'code_source_url': 'code_source_url', + 'package_url': 'package_url', + 'resource_references': 'resource_references' + } + + def __init__(self, id=None, name=None, created_at=None, parameters=None, code_source_url=None, package_url=None, resource_references=None, local_vars_configuration=None): # noqa: E501 + """ApiPipelineVersion - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + + self._id = None + self._name = None + self._created_at = None + self._parameters = None + self._code_source_url = None + self._package_url = None + self._resource_references = None + self.discriminator = None + + if id is not None: + self.id = id + if name is not None: + self.name = name + if created_at is not None: + self.created_at = created_at + if parameters is not None: + self.parameters = parameters + if code_source_url is not None: + self.code_source_url = code_source_url + if package_url is not None: + self.package_url = package_url + if resource_references is not None: + self.resource_references = resource_references + + @property + def id(self): + """Gets the id of this ApiPipelineVersion. # noqa: E501 + + Output. Unique version ID. Generated by API server. # noqa: E501 + + :return: The id of this ApiPipelineVersion. # noqa: E501 + :rtype: str + """ + return self._id + + @id.setter + def id(self, id): + """Sets the id of this ApiPipelineVersion. + + Output. Unique version ID. Generated by API server. # noqa: E501 + + :param id: The id of this ApiPipelineVersion. # noqa: E501 + :type id: str + """ + + self._id = id + + @property + def name(self): + """Gets the name of this ApiPipelineVersion. # noqa: E501 + + Optional input field. Version name provided by user. # noqa: E501 + + :return: The name of this ApiPipelineVersion. # noqa: E501 + :rtype: str + """ + return self._name + + @name.setter + def name(self, name): + """Sets the name of this ApiPipelineVersion. + + Optional input field. Version name provided by user. # noqa: E501 + + :param name: The name of this ApiPipelineVersion. # noqa: E501 + :type name: str + """ + + self._name = name + + @property + def created_at(self): + """Gets the created_at of this ApiPipelineVersion. # noqa: E501 + + Output. The time this pipeline version is created. # noqa: E501 + + :return: The created_at of this ApiPipelineVersion. # noqa: E501 + :rtype: datetime + """ + return self._created_at + + @created_at.setter + def created_at(self, created_at): + """Sets the created_at of this ApiPipelineVersion. + + Output. The time this pipeline version is created. # noqa: E501 + + :param created_at: The created_at of this ApiPipelineVersion. # noqa: E501 + :type created_at: datetime + """ + + self._created_at = created_at + + @property + def parameters(self): + """Gets the parameters of this ApiPipelineVersion. # noqa: E501 + + Output. The input parameters for this pipeline. # noqa: E501 + + :return: The parameters of this ApiPipelineVersion. # noqa: E501 + :rtype: list[ApiParameter] + """ + return self._parameters + + @parameters.setter + def parameters(self, parameters): + """Sets the parameters of this ApiPipelineVersion. + + Output. The input parameters for this pipeline. # noqa: E501 + + :param parameters: The parameters of this ApiPipelineVersion. # noqa: E501 + :type parameters: list[ApiParameter] + """ + + self._parameters = parameters + + @property + def code_source_url(self): + """Gets the code_source_url of this ApiPipelineVersion. # noqa: E501 + + Input. Optional. Pipeline version code source. # noqa: E501 + + :return: The code_source_url of this ApiPipelineVersion. # noqa: E501 + :rtype: str + """ + return self._code_source_url + + @code_source_url.setter + def code_source_url(self, code_source_url): + """Sets the code_source_url of this ApiPipelineVersion. + + Input. Optional. Pipeline version code source. # noqa: E501 + + :param code_source_url: The code_source_url of this ApiPipelineVersion. # noqa: E501 + :type code_source_url: str + """ + + self._code_source_url = code_source_url + + @property + def package_url(self): + """Gets the package_url of this ApiPipelineVersion. # noqa: E501 + + + :return: The package_url of this ApiPipelineVersion. # noqa: E501 + :rtype: ApiUrl + """ + return self._package_url + + @package_url.setter + def package_url(self, package_url): + """Sets the package_url of this ApiPipelineVersion. + + + :param package_url: The package_url of this ApiPipelineVersion. # noqa: E501 + :type package_url: ApiUrl + """ + + self._package_url = package_url + + @property + def resource_references(self): + """Gets the resource_references of this ApiPipelineVersion. # noqa: E501 + + Input. Required. E.g., specify which pipeline this pipeline version belongs to. # noqa: E501 + + :return: The resource_references of this ApiPipelineVersion. # noqa: E501 + :rtype: list[ApiResourceReference] + """ + return self._resource_references + + @resource_references.setter + def resource_references(self, resource_references): + """Sets the resource_references of this ApiPipelineVersion. + + Input. Required. E.g., specify which pipeline this pipeline version belongs to. # noqa: E501 + + :param resource_references: The resource_references of this ApiPipelineVersion. # noqa: E501 + :type resource_references: list[ApiResourceReference] + """ + + self._resource_references = resource_references + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, ApiPipelineVersion): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, ApiPipelineVersion): + return True + + return self.to_dict() != other.to_dict() diff --git a/backend/api/python_http_client/kfp_server_api/models/api_read_artifact_response.py b/backend/api/python_http_client/kfp_server_api/models/api_read_artifact_response.py new file mode 100644 index 000000000..a723b8c73 --- /dev/null +++ b/backend/api/python_http_client/kfp_server_api/models/api_read_artifact_response.py @@ -0,0 +1,139 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from kfp_server_api.configuration import Configuration + + +class ApiReadArtifactResponse(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'data': 'str' + } + + attribute_map = { + 'data': 'data' + } + + def __init__(self, data=None, local_vars_configuration=None): # noqa: E501 + """ApiReadArtifactResponse - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + + self._data = None + self.discriminator = None + + if data is not None: + self.data = data + + @property + def data(self): + """Gets the data of this ApiReadArtifactResponse. # noqa: E501 + + The bytes of the artifact content. # noqa: E501 + + :return: The data of this ApiReadArtifactResponse. # noqa: E501 + :rtype: str + """ + return self._data + + @data.setter + def data(self, data): + """Sets the data of this ApiReadArtifactResponse. + + The bytes of the artifact content. # noqa: E501 + + :param data: The data of this ApiReadArtifactResponse. # noqa: E501 + :type data: str + """ + if (self.local_vars_configuration.client_side_validation and + data is not None and not re.search(r'^(?:[A-Za-z0-9+\/]{4})*(?:[A-Za-z0-9+\/]{2}==|[A-Za-z0-9+\/]{3}=)?$', data)): # noqa: E501 + raise ValueError(r"Invalid value for `data`, must be a follow pattern or equal to `/^(?:[A-Za-z0-9+\/]{4})*(?:[A-Za-z0-9+\/]{2}==|[A-Za-z0-9+\/]{3}=)?$/`") # noqa: E501 + + self._data = data + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, ApiReadArtifactResponse): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, ApiReadArtifactResponse): + return True + + return self.to_dict() != other.to_dict() diff --git a/backend/api/python_http_client/kfp_server_api/models/api_relationship.py b/backend/api/python_http_client/kfp_server_api/models/api_relationship.py new file mode 100644 index 000000000..dfba0c74f --- /dev/null +++ b/backend/api/python_http_client/kfp_server_api/models/api_relationship.py @@ -0,0 +1,115 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from kfp_server_api.configuration import Configuration + + +class ApiRelationship(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + allowed enum values + """ + UNKNOWN_RELATIONSHIP = "UNKNOWN_RELATIONSHIP" + OWNER = "OWNER" + CREATOR = "CREATOR" + + allowable_values = [UNKNOWN_RELATIONSHIP, OWNER, CREATOR] # noqa: E501 + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + } + + attribute_map = { + } + + def __init__(self, local_vars_configuration=None): # noqa: E501 + """ApiRelationship - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + self.discriminator = None + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, ApiRelationship): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, ApiRelationship): + return True + + return self.to_dict() != other.to_dict() diff --git a/backend/api/python_http_client/kfp_server_api/models/api_report_run_metrics_request.py b/backend/api/python_http_client/kfp_server_api/models/api_report_run_metrics_request.py new file mode 100644 index 000000000..5dee2a6df --- /dev/null +++ b/backend/api/python_http_client/kfp_server_api/models/api_report_run_metrics_request.py @@ -0,0 +1,164 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from kfp_server_api.configuration import Configuration + + +class ApiReportRunMetricsRequest(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'run_id': 'str', + 'metrics': 'list[ApiRunMetric]' + } + + attribute_map = { + 'run_id': 'run_id', + 'metrics': 'metrics' + } + + def __init__(self, run_id=None, metrics=None, local_vars_configuration=None): # noqa: E501 + """ApiReportRunMetricsRequest - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + + self._run_id = None + self._metrics = None + self.discriminator = None + + if run_id is not None: + self.run_id = run_id + if metrics is not None: + self.metrics = metrics + + @property + def run_id(self): + """Gets the run_id of this ApiReportRunMetricsRequest. # noqa: E501 + + Required. The parent run ID of the metric. # noqa: E501 + + :return: The run_id of this ApiReportRunMetricsRequest. # noqa: E501 + :rtype: str + """ + return self._run_id + + @run_id.setter + def run_id(self, run_id): + """Sets the run_id of this ApiReportRunMetricsRequest. + + Required. The parent run ID of the metric. # noqa: E501 + + :param run_id: The run_id of this ApiReportRunMetricsRequest. # noqa: E501 + :type run_id: str + """ + + self._run_id = run_id + + @property + def metrics(self): + """Gets the metrics of this ApiReportRunMetricsRequest. # noqa: E501 + + List of metrics to report. # noqa: E501 + + :return: The metrics of this ApiReportRunMetricsRequest. # noqa: E501 + :rtype: list[ApiRunMetric] + """ + return self._metrics + + @metrics.setter + def metrics(self, metrics): + """Sets the metrics of this ApiReportRunMetricsRequest. + + List of metrics to report. # noqa: E501 + + :param metrics: The metrics of this ApiReportRunMetricsRequest. # noqa: E501 + :type metrics: list[ApiRunMetric] + """ + + self._metrics = metrics + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, ApiReportRunMetricsRequest): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, ApiReportRunMetricsRequest): + return True + + return self.to_dict() != other.to_dict() diff --git a/backend/api/python_http_client/kfp_server_api/models/api_report_run_metrics_response.py b/backend/api/python_http_client/kfp_server_api/models/api_report_run_metrics_response.py new file mode 100644 index 000000000..cb3a2e294 --- /dev/null +++ b/backend/api/python_http_client/kfp_server_api/models/api_report_run_metrics_response.py @@ -0,0 +1,134 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from kfp_server_api.configuration import Configuration + + +class ApiReportRunMetricsResponse(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'results': 'list[ReportRunMetricsResponseReportRunMetricResult]' + } + + attribute_map = { + 'results': 'results' + } + + def __init__(self, results=None, local_vars_configuration=None): # noqa: E501 + """ApiReportRunMetricsResponse - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + + self._results = None + self.discriminator = None + + if results is not None: + self.results = results + + @property + def results(self): + """Gets the results of this ApiReportRunMetricsResponse. # noqa: E501 + + + :return: The results of this ApiReportRunMetricsResponse. # noqa: E501 + :rtype: list[ReportRunMetricsResponseReportRunMetricResult] + """ + return self._results + + @results.setter + def results(self, results): + """Sets the results of this ApiReportRunMetricsResponse. + + + :param results: The results of this ApiReportRunMetricsResponse. # noqa: E501 + :type results: list[ReportRunMetricsResponseReportRunMetricResult] + """ + + self._results = results + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, ApiReportRunMetricsResponse): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, ApiReportRunMetricsResponse): + return True + + return self.to_dict() != other.to_dict() diff --git a/backend/api/python_http_client/kfp_server_api/models/api_resource_key.py b/backend/api/python_http_client/kfp_server_api/models/api_resource_key.py new file mode 100644 index 000000000..56064f432 --- /dev/null +++ b/backend/api/python_http_client/kfp_server_api/models/api_resource_key.py @@ -0,0 +1,162 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from kfp_server_api.configuration import Configuration + + +class ApiResourceKey(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'ApiResourceType', + 'id': 'str' + } + + attribute_map = { + 'type': 'type', + 'id': 'id' + } + + def __init__(self, type=None, id=None, local_vars_configuration=None): # noqa: E501 + """ApiResourceKey - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + + self._type = None + self._id = None + self.discriminator = None + + if type is not None: + self.type = type + if id is not None: + self.id = id + + @property + def type(self): + """Gets the type of this ApiResourceKey. # noqa: E501 + + + :return: The type of this ApiResourceKey. # noqa: E501 + :rtype: ApiResourceType + """ + return self._type + + @type.setter + def type(self, type): + """Sets the type of this ApiResourceKey. + + + :param type: The type of this ApiResourceKey. # noqa: E501 + :type type: ApiResourceType + """ + + self._type = type + + @property + def id(self): + """Gets the id of this ApiResourceKey. # noqa: E501 + + The ID of the resource that referred to. # noqa: E501 + + :return: The id of this ApiResourceKey. # noqa: E501 + :rtype: str + """ + return self._id + + @id.setter + def id(self, id): + """Sets the id of this ApiResourceKey. + + The ID of the resource that referred to. # noqa: E501 + + :param id: The id of this ApiResourceKey. # noqa: E501 + :type id: str + """ + + self._id = id + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, ApiResourceKey): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, ApiResourceKey): + return True + + return self.to_dict() != other.to_dict() diff --git a/backend/api/python_http_client/kfp_server_api/models/api_resource_reference.py b/backend/api/python_http_client/kfp_server_api/models/api_resource_reference.py new file mode 100644 index 000000000..2179aab1c --- /dev/null +++ b/backend/api/python_http_client/kfp_server_api/models/api_resource_reference.py @@ -0,0 +1,188 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from kfp_server_api.configuration import Configuration + + +class ApiResourceReference(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'key': 'ApiResourceKey', + 'name': 'str', + 'relationship': 'ApiRelationship' + } + + attribute_map = { + 'key': 'key', + 'name': 'name', + 'relationship': 'relationship' + } + + def __init__(self, key=None, name=None, relationship=None, local_vars_configuration=None): # noqa: E501 + """ApiResourceReference - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + + self._key = None + self._name = None + self._relationship = None + self.discriminator = None + + if key is not None: + self.key = key + if name is not None: + self.name = name + if relationship is not None: + self.relationship = relationship + + @property + def key(self): + """Gets the key of this ApiResourceReference. # noqa: E501 + + + :return: The key of this ApiResourceReference. # noqa: E501 + :rtype: ApiResourceKey + """ + return self._key + + @key.setter + def key(self, key): + """Sets the key of this ApiResourceReference. + + + :param key: The key of this ApiResourceReference. # noqa: E501 + :type key: ApiResourceKey + """ + + self._key = key + + @property + def name(self): + """Gets the name of this ApiResourceReference. # noqa: E501 + + The name of the resource that referred to. # noqa: E501 + + :return: The name of this ApiResourceReference. # noqa: E501 + :rtype: str + """ + return self._name + + @name.setter + def name(self, name): + """Sets the name of this ApiResourceReference. + + The name of the resource that referred to. # noqa: E501 + + :param name: The name of this ApiResourceReference. # noqa: E501 + :type name: str + """ + + self._name = name + + @property + def relationship(self): + """Gets the relationship of this ApiResourceReference. # noqa: E501 + + + :return: The relationship of this ApiResourceReference. # noqa: E501 + :rtype: ApiRelationship + """ + return self._relationship + + @relationship.setter + def relationship(self, relationship): + """Sets the relationship of this ApiResourceReference. + + + :param relationship: The relationship of this ApiResourceReference. # noqa: E501 + :type relationship: ApiRelationship + """ + + self._relationship = relationship + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, ApiResourceReference): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, ApiResourceReference): + return True + + return self.to_dict() != other.to_dict() diff --git a/backend/api/python_http_client/kfp_server_api/models/api_resource_type.py b/backend/api/python_http_client/kfp_server_api/models/api_resource_type.py new file mode 100644 index 000000000..dffcbea3c --- /dev/null +++ b/backend/api/python_http_client/kfp_server_api/models/api_resource_type.py @@ -0,0 +1,118 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from kfp_server_api.configuration import Configuration + + +class ApiResourceType(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + allowed enum values + """ + UNKNOWN_RESOURCE_TYPE = "UNKNOWN_RESOURCE_TYPE" + EXPERIMENT = "EXPERIMENT" + JOB = "JOB" + PIPELINE = "PIPELINE" + PIPELINE_VERSION = "PIPELINE_VERSION" + NAMESPACE = "NAMESPACE" + + allowable_values = [UNKNOWN_RESOURCE_TYPE, EXPERIMENT, JOB, PIPELINE, PIPELINE_VERSION, NAMESPACE] # noqa: E501 + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + } + + attribute_map = { + } + + def __init__(self, local_vars_configuration=None): # noqa: E501 + """ApiResourceType - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + self.discriminator = None + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, ApiResourceType): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, ApiResourceType): + return True + + return self.to_dict() != other.to_dict() diff --git a/backend/api/python_http_client/kfp_server_api/models/api_run.py b/backend/api/python_http_client/kfp_server_api/models/api_run.py new file mode 100644 index 000000000..fe945f6f6 --- /dev/null +++ b/backend/api/python_http_client/kfp_server_api/models/api_run.py @@ -0,0 +1,464 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from kfp_server_api.configuration import Configuration + + +class ApiRun(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'id': 'str', + 'name': 'str', + 'storage_state': 'RunStorageState', + 'description': 'str', + 'pipeline_spec': 'ApiPipelineSpec', + 'resource_references': 'list[ApiResourceReference]', + 'service_account': 'str', + 'created_at': 'datetime', + 'scheduled_at': 'datetime', + 'finished_at': 'datetime', + 'status': 'str', + 'error': 'str', + 'metrics': 'list[ApiRunMetric]' + } + + attribute_map = { + 'id': 'id', + 'name': 'name', + 'storage_state': 'storage_state', + 'description': 'description', + 'pipeline_spec': 'pipeline_spec', + 'resource_references': 'resource_references', + 'service_account': 'service_account', + 'created_at': 'created_at', + 'scheduled_at': 'scheduled_at', + 'finished_at': 'finished_at', + 'status': 'status', + 'error': 'error', + 'metrics': 'metrics' + } + + def __init__(self, id=None, name=None, storage_state=None, description=None, pipeline_spec=None, resource_references=None, service_account=None, created_at=None, scheduled_at=None, finished_at=None, status=None, error=None, metrics=None, local_vars_configuration=None): # noqa: E501 + """ApiRun - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + + self._id = None + self._name = None + self._storage_state = None + self._description = None + self._pipeline_spec = None + self._resource_references = None + self._service_account = None + self._created_at = None + self._scheduled_at = None + self._finished_at = None + self._status = None + self._error = None + self._metrics = None + self.discriminator = None + + if id is not None: + self.id = id + if name is not None: + self.name = name + if storage_state is not None: + self.storage_state = storage_state + if description is not None: + self.description = description + if pipeline_spec is not None: + self.pipeline_spec = pipeline_spec + if resource_references is not None: + self.resource_references = resource_references + if service_account is not None: + self.service_account = service_account + if created_at is not None: + self.created_at = created_at + if scheduled_at is not None: + self.scheduled_at = scheduled_at + if finished_at is not None: + self.finished_at = finished_at + if status is not None: + self.status = status + if error is not None: + self.error = error + if metrics is not None: + self.metrics = metrics + + @property + def id(self): + """Gets the id of this ApiRun. # noqa: E501 + + Output. Unique run ID. Generated by API server. # noqa: E501 + + :return: The id of this ApiRun. # noqa: E501 + :rtype: str + """ + return self._id + + @id.setter + def id(self, id): + """Sets the id of this ApiRun. + + Output. Unique run ID. Generated by API server. # noqa: E501 + + :param id: The id of this ApiRun. # noqa: E501 + :type id: str + """ + + self._id = id + + @property + def name(self): + """Gets the name of this ApiRun. # noqa: E501 + + Required input field. Name provided by user, or auto generated if run is created by scheduled job. Not unique. # noqa: E501 + + :return: The name of this ApiRun. # noqa: E501 + :rtype: str + """ + return self._name + + @name.setter + def name(self, name): + """Sets the name of this ApiRun. + + Required input field. Name provided by user, or auto generated if run is created by scheduled job. Not unique. # noqa: E501 + + :param name: The name of this ApiRun. # noqa: E501 + :type name: str + """ + + self._name = name + + @property + def storage_state(self): + """Gets the storage_state of this ApiRun. # noqa: E501 + + + :return: The storage_state of this ApiRun. # noqa: E501 + :rtype: RunStorageState + """ + return self._storage_state + + @storage_state.setter + def storage_state(self, storage_state): + """Sets the storage_state of this ApiRun. + + + :param storage_state: The storage_state of this ApiRun. # noqa: E501 + :type storage_state: RunStorageState + """ + + self._storage_state = storage_state + + @property + def description(self): + """Gets the description of this ApiRun. # noqa: E501 + + + :return: The description of this ApiRun. # noqa: E501 + :rtype: str + """ + return self._description + + @description.setter + def description(self, description): + """Sets the description of this ApiRun. + + + :param description: The description of this ApiRun. # noqa: E501 + :type description: str + """ + + self._description = description + + @property + def pipeline_spec(self): + """Gets the pipeline_spec of this ApiRun. # noqa: E501 + + + :return: The pipeline_spec of this ApiRun. # noqa: E501 + :rtype: ApiPipelineSpec + """ + return self._pipeline_spec + + @pipeline_spec.setter + def pipeline_spec(self, pipeline_spec): + """Sets the pipeline_spec of this ApiRun. + + + :param pipeline_spec: The pipeline_spec of this ApiRun. # noqa: E501 + :type pipeline_spec: ApiPipelineSpec + """ + + self._pipeline_spec = pipeline_spec + + @property + def resource_references(self): + """Gets the resource_references of this ApiRun. # noqa: E501 + + Optional input field. Specify which resource this run belongs to. When creating a run from a particular pipeline version, the pipeline version can be specified here. # noqa: E501 + + :return: The resource_references of this ApiRun. # noqa: E501 + :rtype: list[ApiResourceReference] + """ + return self._resource_references + + @resource_references.setter + def resource_references(self, resource_references): + """Sets the resource_references of this ApiRun. + + Optional input field. Specify which resource this run belongs to. When creating a run from a particular pipeline version, the pipeline version can be specified here. # noqa: E501 + + :param resource_references: The resource_references of this ApiRun. # noqa: E501 + :type resource_references: list[ApiResourceReference] + """ + + self._resource_references = resource_references + + @property + def service_account(self): + """Gets the service_account of this ApiRun. # noqa: E501 + + Optional input field. Specify which Kubernetes service account this run uses. # noqa: E501 + + :return: The service_account of this ApiRun. # noqa: E501 + :rtype: str + """ + return self._service_account + + @service_account.setter + def service_account(self, service_account): + """Sets the service_account of this ApiRun. + + Optional input field. Specify which Kubernetes service account this run uses. # noqa: E501 + + :param service_account: The service_account of this ApiRun. # noqa: E501 + :type service_account: str + """ + + self._service_account = service_account + + @property + def created_at(self): + """Gets the created_at of this ApiRun. # noqa: E501 + + Output. The time that the run created. # noqa: E501 + + :return: The created_at of this ApiRun. # noqa: E501 + :rtype: datetime + """ + return self._created_at + + @created_at.setter + def created_at(self, created_at): + """Sets the created_at of this ApiRun. + + Output. The time that the run created. # noqa: E501 + + :param created_at: The created_at of this ApiRun. # noqa: E501 + :type created_at: datetime + """ + + self._created_at = created_at + + @property + def scheduled_at(self): + """Gets the scheduled_at of this ApiRun. # noqa: E501 + + Output. When this run is scheduled to run. This could be different from created_at. For example, if a run is from a backfilling job that was supposed to run 2 month ago, the scheduled_at is 2 month ago, v.s. created_at is the current time. # noqa: E501 + + :return: The scheduled_at of this ApiRun. # noqa: E501 + :rtype: datetime + """ + return self._scheduled_at + + @scheduled_at.setter + def scheduled_at(self, scheduled_at): + """Sets the scheduled_at of this ApiRun. + + Output. When this run is scheduled to run. This could be different from created_at. For example, if a run is from a backfilling job that was supposed to run 2 month ago, the scheduled_at is 2 month ago, v.s. created_at is the current time. # noqa: E501 + + :param scheduled_at: The scheduled_at of this ApiRun. # noqa: E501 + :type scheduled_at: datetime + """ + + self._scheduled_at = scheduled_at + + @property + def finished_at(self): + """Gets the finished_at of this ApiRun. # noqa: E501 + + Output. The time this run is finished. # noqa: E501 + + :return: The finished_at of this ApiRun. # noqa: E501 + :rtype: datetime + """ + return self._finished_at + + @finished_at.setter + def finished_at(self, finished_at): + """Sets the finished_at of this ApiRun. + + Output. The time this run is finished. # noqa: E501 + + :param finished_at: The finished_at of this ApiRun. # noqa: E501 + :type finished_at: datetime + """ + + self._finished_at = finished_at + + @property + def status(self): + """Gets the status of this ApiRun. # noqa: E501 + + + :return: The status of this ApiRun. # noqa: E501 + :rtype: str + """ + return self._status + + @status.setter + def status(self, status): + """Sets the status of this ApiRun. + + + :param status: The status of this ApiRun. # noqa: E501 + :type status: str + """ + + self._status = status + + @property + def error(self): + """Gets the error of this ApiRun. # noqa: E501 + + In case any error happens retrieving a run field, only run ID and the error message is returned. Client has the flexibility of choosing how to handle error. This is especially useful during listing call. # noqa: E501 + + :return: The error of this ApiRun. # noqa: E501 + :rtype: str + """ + return self._error + + @error.setter + def error(self, error): + """Sets the error of this ApiRun. + + In case any error happens retrieving a run field, only run ID and the error message is returned. Client has the flexibility of choosing how to handle error. This is especially useful during listing call. # noqa: E501 + + :param error: The error of this ApiRun. # noqa: E501 + :type error: str + """ + + self._error = error + + @property + def metrics(self): + """Gets the metrics of this ApiRun. # noqa: E501 + + Output. The metrics of the run. The metrics are reported by ReportMetrics API. # noqa: E501 + + :return: The metrics of this ApiRun. # noqa: E501 + :rtype: list[ApiRunMetric] + """ + return self._metrics + + @metrics.setter + def metrics(self, metrics): + """Sets the metrics of this ApiRun. + + Output. The metrics of the run. The metrics are reported by ReportMetrics API. # noqa: E501 + + :param metrics: The metrics of this ApiRun. # noqa: E501 + :type metrics: list[ApiRunMetric] + """ + + self._metrics = metrics + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, ApiRun): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, ApiRun): + return True + + return self.to_dict() != other.to_dict() diff --git a/backend/api/python_http_client/kfp_server_api/models/api_run_detail.py b/backend/api/python_http_client/kfp_server_api/models/api_run_detail.py new file mode 100644 index 000000000..e90e8a1e3 --- /dev/null +++ b/backend/api/python_http_client/kfp_server_api/models/api_run_detail.py @@ -0,0 +1,160 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from kfp_server_api.configuration import Configuration + + +class ApiRunDetail(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'run': 'ApiRun', + 'pipeline_runtime': 'ApiPipelineRuntime' + } + + attribute_map = { + 'run': 'run', + 'pipeline_runtime': 'pipeline_runtime' + } + + def __init__(self, run=None, pipeline_runtime=None, local_vars_configuration=None): # noqa: E501 + """ApiRunDetail - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + + self._run = None + self._pipeline_runtime = None + self.discriminator = None + + if run is not None: + self.run = run + if pipeline_runtime is not None: + self.pipeline_runtime = pipeline_runtime + + @property + def run(self): + """Gets the run of this ApiRunDetail. # noqa: E501 + + + :return: The run of this ApiRunDetail. # noqa: E501 + :rtype: ApiRun + """ + return self._run + + @run.setter + def run(self, run): + """Sets the run of this ApiRunDetail. + + + :param run: The run of this ApiRunDetail. # noqa: E501 + :type run: ApiRun + """ + + self._run = run + + @property + def pipeline_runtime(self): + """Gets the pipeline_runtime of this ApiRunDetail. # noqa: E501 + + + :return: The pipeline_runtime of this ApiRunDetail. # noqa: E501 + :rtype: ApiPipelineRuntime + """ + return self._pipeline_runtime + + @pipeline_runtime.setter + def pipeline_runtime(self, pipeline_runtime): + """Sets the pipeline_runtime of this ApiRunDetail. + + + :param pipeline_runtime: The pipeline_runtime of this ApiRunDetail. # noqa: E501 + :type pipeline_runtime: ApiPipelineRuntime + """ + + self._pipeline_runtime = pipeline_runtime + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, ApiRunDetail): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, ApiRunDetail): + return True + + return self.to_dict() != other.to_dict() diff --git a/backend/api/python_http_client/kfp_server_api/models/api_run_metric.py b/backend/api/python_http_client/kfp_server_api/models/api_run_metric.py new file mode 100644 index 000000000..193a27e25 --- /dev/null +++ b/backend/api/python_http_client/kfp_server_api/models/api_run_metric.py @@ -0,0 +1,218 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from kfp_server_api.configuration import Configuration + + +class ApiRunMetric(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'name': 'str', + 'node_id': 'str', + 'number_value': 'float', + 'format': 'RunMetricFormat' + } + + attribute_map = { + 'name': 'name', + 'node_id': 'node_id', + 'number_value': 'number_value', + 'format': 'format' + } + + def __init__(self, name=None, node_id=None, number_value=None, format=None, local_vars_configuration=None): # noqa: E501 + """ApiRunMetric - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + + self._name = None + self._node_id = None + self._number_value = None + self._format = None + self.discriminator = None + + if name is not None: + self.name = name + if node_id is not None: + self.node_id = node_id + if number_value is not None: + self.number_value = number_value + if format is not None: + self.format = format + + @property + def name(self): + """Gets the name of this ApiRunMetric. # noqa: E501 + + Required. The user defined name of the metric. It must between 1 and 63 characters long and must conform to the following regular expression: `[a-z]([-a-z0-9]*[a-z0-9])?`. # noqa: E501 + + :return: The name of this ApiRunMetric. # noqa: E501 + :rtype: str + """ + return self._name + + @name.setter + def name(self, name): + """Sets the name of this ApiRunMetric. + + Required. The user defined name of the metric. It must between 1 and 63 characters long and must conform to the following regular expression: `[a-z]([-a-z0-9]*[a-z0-9])?`. # noqa: E501 + + :param name: The name of this ApiRunMetric. # noqa: E501 + :type name: str + """ + + self._name = name + + @property + def node_id(self): + """Gets the node_id of this ApiRunMetric. # noqa: E501 + + Required. The runtime node ID which reports the metric. The node ID can be found in the RunDetail.workflow.Status. Metric with same (node_id, name) are considerd as duplicate. Only the first reporting will be recorded. Max length is 128. # noqa: E501 + + :return: The node_id of this ApiRunMetric. # noqa: E501 + :rtype: str + """ + return self._node_id + + @node_id.setter + def node_id(self, node_id): + """Sets the node_id of this ApiRunMetric. + + Required. The runtime node ID which reports the metric. The node ID can be found in the RunDetail.workflow.Status. Metric with same (node_id, name) are considerd as duplicate. Only the first reporting will be recorded. Max length is 128. # noqa: E501 + + :param node_id: The node_id of this ApiRunMetric. # noqa: E501 + :type node_id: str + """ + + self._node_id = node_id + + @property + def number_value(self): + """Gets the number_value of this ApiRunMetric. # noqa: E501 + + The number value of the metric. # noqa: E501 + + :return: The number_value of this ApiRunMetric. # noqa: E501 + :rtype: float + """ + return self._number_value + + @number_value.setter + def number_value(self, number_value): + """Sets the number_value of this ApiRunMetric. + + The number value of the metric. # noqa: E501 + + :param number_value: The number_value of this ApiRunMetric. # noqa: E501 + :type number_value: float + """ + + self._number_value = number_value + + @property + def format(self): + """Gets the format of this ApiRunMetric. # noqa: E501 + + + :return: The format of this ApiRunMetric. # noqa: E501 + :rtype: RunMetricFormat + """ + return self._format + + @format.setter + def format(self, format): + """Sets the format of this ApiRunMetric. + + + :param format: The format of this ApiRunMetric. # noqa: E501 + :type format: RunMetricFormat + """ + + self._format = format + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, ApiRunMetric): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, ApiRunMetric): + return True + + return self.to_dict() != other.to_dict() diff --git a/backend/api/python_http_client/kfp_server_api/models/api_status.py b/backend/api/python_http_client/kfp_server_api/models/api_status.py new file mode 100644 index 000000000..48093c5a2 --- /dev/null +++ b/backend/api/python_http_client/kfp_server_api/models/api_status.py @@ -0,0 +1,186 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from kfp_server_api.configuration import Configuration + + +class ApiStatus(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'error': 'str', + 'code': 'int', + 'details': 'list[ProtobufAny]' + } + + attribute_map = { + 'error': 'error', + 'code': 'code', + 'details': 'details' + } + + def __init__(self, error=None, code=None, details=None, local_vars_configuration=None): # noqa: E501 + """ApiStatus - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + + self._error = None + self._code = None + self._details = None + self.discriminator = None + + if error is not None: + self.error = error + if code is not None: + self.code = code + if details is not None: + self.details = details + + @property + def error(self): + """Gets the error of this ApiStatus. # noqa: E501 + + + :return: The error of this ApiStatus. # noqa: E501 + :rtype: str + """ + return self._error + + @error.setter + def error(self, error): + """Sets the error of this ApiStatus. + + + :param error: The error of this ApiStatus. # noqa: E501 + :type error: str + """ + + self._error = error + + @property + def code(self): + """Gets the code of this ApiStatus. # noqa: E501 + + + :return: The code of this ApiStatus. # noqa: E501 + :rtype: int + """ + return self._code + + @code.setter + def code(self, code): + """Sets the code of this ApiStatus. + + + :param code: The code of this ApiStatus. # noqa: E501 + :type code: int + """ + + self._code = code + + @property + def details(self): + """Gets the details of this ApiStatus. # noqa: E501 + + + :return: The details of this ApiStatus. # noqa: E501 + :rtype: list[ProtobufAny] + """ + return self._details + + @details.setter + def details(self, details): + """Sets the details of this ApiStatus. + + + :param details: The details of this ApiStatus. # noqa: E501 + :type details: list[ProtobufAny] + """ + + self._details = details + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, ApiStatus): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, ApiStatus): + return True + + return self.to_dict() != other.to_dict() diff --git a/backend/api/python_http_client/kfp_server_api/models/api_trigger.py b/backend/api/python_http_client/kfp_server_api/models/api_trigger.py new file mode 100644 index 000000000..b0950eff6 --- /dev/null +++ b/backend/api/python_http_client/kfp_server_api/models/api_trigger.py @@ -0,0 +1,160 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from kfp_server_api.configuration import Configuration + + +class ApiTrigger(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'cron_schedule': 'ApiCronSchedule', + 'periodic_schedule': 'ApiPeriodicSchedule' + } + + attribute_map = { + 'cron_schedule': 'cron_schedule', + 'periodic_schedule': 'periodic_schedule' + } + + def __init__(self, cron_schedule=None, periodic_schedule=None, local_vars_configuration=None): # noqa: E501 + """ApiTrigger - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + + self._cron_schedule = None + self._periodic_schedule = None + self.discriminator = None + + if cron_schedule is not None: + self.cron_schedule = cron_schedule + if periodic_schedule is not None: + self.periodic_schedule = periodic_schedule + + @property + def cron_schedule(self): + """Gets the cron_schedule of this ApiTrigger. # noqa: E501 + + + :return: The cron_schedule of this ApiTrigger. # noqa: E501 + :rtype: ApiCronSchedule + """ + return self._cron_schedule + + @cron_schedule.setter + def cron_schedule(self, cron_schedule): + """Sets the cron_schedule of this ApiTrigger. + + + :param cron_schedule: The cron_schedule of this ApiTrigger. # noqa: E501 + :type cron_schedule: ApiCronSchedule + """ + + self._cron_schedule = cron_schedule + + @property + def periodic_schedule(self): + """Gets the periodic_schedule of this ApiTrigger. # noqa: E501 + + + :return: The periodic_schedule of this ApiTrigger. # noqa: E501 + :rtype: ApiPeriodicSchedule + """ + return self._periodic_schedule + + @periodic_schedule.setter + def periodic_schedule(self, periodic_schedule): + """Sets the periodic_schedule of this ApiTrigger. + + + :param periodic_schedule: The periodic_schedule of this ApiTrigger. # noqa: E501 + :type periodic_schedule: ApiPeriodicSchedule + """ + + self._periodic_schedule = periodic_schedule + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, ApiTrigger): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, ApiTrigger): + return True + + return self.to_dict() != other.to_dict() diff --git a/backend/api/python_http_client/kfp_server_api/models/api_url.py b/backend/api/python_http_client/kfp_server_api/models/api_url.py new file mode 100644 index 000000000..e54254540 --- /dev/null +++ b/backend/api/python_http_client/kfp_server_api/models/api_url.py @@ -0,0 +1,136 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from kfp_server_api.configuration import Configuration + + +class ApiUrl(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'pipeline_url': 'str' + } + + attribute_map = { + 'pipeline_url': 'pipeline_url' + } + + def __init__(self, pipeline_url=None, local_vars_configuration=None): # noqa: E501 + """ApiUrl - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + + self._pipeline_url = None + self.discriminator = None + + if pipeline_url is not None: + self.pipeline_url = pipeline_url + + @property + def pipeline_url(self): + """Gets the pipeline_url of this ApiUrl. # noqa: E501 + + URL of the pipeline definition or the pipeline version definition. # noqa: E501 + + :return: The pipeline_url of this ApiUrl. # noqa: E501 + :rtype: str + """ + return self._pipeline_url + + @pipeline_url.setter + def pipeline_url(self, pipeline_url): + """Sets the pipeline_url of this ApiUrl. + + URL of the pipeline definition or the pipeline version definition. # noqa: E501 + + :param pipeline_url: The pipeline_url of this ApiUrl. # noqa: E501 + :type pipeline_url: str + """ + + self._pipeline_url = pipeline_url + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, ApiUrl): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, ApiUrl): + return True + + return self.to_dict() != other.to_dict() diff --git a/backend/api/python_http_client/kfp_server_api/models/experiment_storage_state.py b/backend/api/python_http_client/kfp_server_api/models/experiment_storage_state.py new file mode 100644 index 000000000..183f263bb --- /dev/null +++ b/backend/api/python_http_client/kfp_server_api/models/experiment_storage_state.py @@ -0,0 +1,115 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from kfp_server_api.configuration import Configuration + + +class ExperimentStorageState(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + allowed enum values + """ + UNSPECIFIED = "STORAGESTATE_UNSPECIFIED" + AVAILABLE = "STORAGESTATE_AVAILABLE" + ARCHIVED = "STORAGESTATE_ARCHIVED" + + allowable_values = [UNSPECIFIED, AVAILABLE, ARCHIVED] # noqa: E501 + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + } + + attribute_map = { + } + + def __init__(self, local_vars_configuration=None): # noqa: E501 + """ExperimentStorageState - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + self.discriminator = None + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, ExperimentStorageState): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, ExperimentStorageState): + return True + + return self.to_dict() != other.to_dict() diff --git a/backend/api/python_http_client/kfp_server_api/models/job_mode.py b/backend/api/python_http_client/kfp_server_api/models/job_mode.py new file mode 100644 index 000000000..b8da2d381 --- /dev/null +++ b/backend/api/python_http_client/kfp_server_api/models/job_mode.py @@ -0,0 +1,115 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from kfp_server_api.configuration import Configuration + + +class JobMode(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + allowed enum values + """ + UNKNOWN_MODE = "UNKNOWN_MODE" + ENABLED = "ENABLED" + DISABLED = "DISABLED" + + allowable_values = [UNKNOWN_MODE, ENABLED, DISABLED] # noqa: E501 + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + } + + attribute_map = { + } + + def __init__(self, local_vars_configuration=None): # noqa: E501 + """JobMode - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + self.discriminator = None + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, JobMode): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, JobMode): + return True + + return self.to_dict() != other.to_dict() diff --git a/backend/api/python_http_client/kfp_server_api/models/protobuf_any.py b/backend/api/python_http_client/kfp_server_api/models/protobuf_any.py new file mode 100644 index 000000000..448758207 --- /dev/null +++ b/backend/api/python_http_client/kfp_server_api/models/protobuf_any.py @@ -0,0 +1,167 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from kfp_server_api.configuration import Configuration + + +class ProtobufAny(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type_url': 'str', + 'value': 'str' + } + + attribute_map = { + 'type_url': 'type_url', + 'value': 'value' + } + + def __init__(self, type_url=None, value=None, local_vars_configuration=None): # noqa: E501 + """ProtobufAny - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + + self._type_url = None + self._value = None + self.discriminator = None + + if type_url is not None: + self.type_url = type_url + if value is not None: + self.value = value + + @property + def type_url(self): + """Gets the type_url of this ProtobufAny. # noqa: E501 + + A URL/resource name that uniquely identifies the type of the serialized protocol buffer message. The last segment of the URL's path must represent the fully qualified name of the type (as in `path/google.protobuf.Duration`). The name should be in a canonical form (e.g., leading \".\" is not accepted). In practice, teams usually precompile into the binary all types that they expect it to use in the context of Any. However, for URLs which use the scheme `http`, `https`, or no scheme, one can optionally set up a type server that maps type URLs to message definitions as follows: * If no scheme is provided, `https` is assumed. * An HTTP GET on the URL must yield a [google.protobuf.Type][] value in binary format, or produce an error. * Applications are allowed to cache lookup results based on the URL, or have them precompiled into a binary to avoid any lookup. Therefore, binary compatibility needs to be preserved on changes to types. (Use versioned type names to manage breaking changes.) Note: this functionality is not currently available in the official protobuf release, and it is not used for type URLs beginning with type.googleapis.com. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. # noqa: E501 + + :return: The type_url of this ProtobufAny. # noqa: E501 + :rtype: str + """ + return self._type_url + + @type_url.setter + def type_url(self, type_url): + """Sets the type_url of this ProtobufAny. + + A URL/resource name that uniquely identifies the type of the serialized protocol buffer message. The last segment of the URL's path must represent the fully qualified name of the type (as in `path/google.protobuf.Duration`). The name should be in a canonical form (e.g., leading \".\" is not accepted). In practice, teams usually precompile into the binary all types that they expect it to use in the context of Any. However, for URLs which use the scheme `http`, `https`, or no scheme, one can optionally set up a type server that maps type URLs to message definitions as follows: * If no scheme is provided, `https` is assumed. * An HTTP GET on the URL must yield a [google.protobuf.Type][] value in binary format, or produce an error. * Applications are allowed to cache lookup results based on the URL, or have them precompiled into a binary to avoid any lookup. Therefore, binary compatibility needs to be preserved on changes to types. (Use versioned type names to manage breaking changes.) Note: this functionality is not currently available in the official protobuf release, and it is not used for type URLs beginning with type.googleapis.com. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. # noqa: E501 + + :param type_url: The type_url of this ProtobufAny. # noqa: E501 + :type type_url: str + """ + + self._type_url = type_url + + @property + def value(self): + """Gets the value of this ProtobufAny. # noqa: E501 + + Must be a valid serialized protocol buffer of the above specified type. # noqa: E501 + + :return: The value of this ProtobufAny. # noqa: E501 + :rtype: str + """ + return self._value + + @value.setter + def value(self, value): + """Sets the value of this ProtobufAny. + + Must be a valid serialized protocol buffer of the above specified type. # noqa: E501 + + :param value: The value of this ProtobufAny. # noqa: E501 + :type value: str + """ + if (self.local_vars_configuration.client_side_validation and + value is not None and not re.search(r'^(?:[A-Za-z0-9+\/]{4})*(?:[A-Za-z0-9+\/]{2}==|[A-Za-z0-9+\/]{3}=)?$', value)): # noqa: E501 + raise ValueError(r"Invalid value for `value`, must be a follow pattern or equal to `/^(?:[A-Za-z0-9+\/]{4})*(?:[A-Za-z0-9+\/]{2}==|[A-Za-z0-9+\/]{3}=)?$/`") # noqa: E501 + + self._value = value + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, ProtobufAny): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, ProtobufAny): + return True + + return self.to_dict() != other.to_dict() diff --git a/backend/api/python_http_client/kfp_server_api/models/report_run_metrics_response_report_run_metric_result.py b/backend/api/python_http_client/kfp_server_api/models/report_run_metrics_response_report_run_metric_result.py new file mode 100644 index 000000000..6b89291fa --- /dev/null +++ b/backend/api/python_http_client/kfp_server_api/models/report_run_metrics_response_report_run_metric_result.py @@ -0,0 +1,218 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from kfp_server_api.configuration import Configuration + + +class ReportRunMetricsResponseReportRunMetricResult(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'metric_name': 'str', + 'metric_node_id': 'str', + 'status': 'ReportRunMetricsResponseReportRunMetricResultStatus', + 'message': 'str' + } + + attribute_map = { + 'metric_name': 'metric_name', + 'metric_node_id': 'metric_node_id', + 'status': 'status', + 'message': 'message' + } + + def __init__(self, metric_name=None, metric_node_id=None, status=None, message=None, local_vars_configuration=None): # noqa: E501 + """ReportRunMetricsResponseReportRunMetricResult - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + + self._metric_name = None + self._metric_node_id = None + self._status = None + self._message = None + self.discriminator = None + + if metric_name is not None: + self.metric_name = metric_name + if metric_node_id is not None: + self.metric_node_id = metric_node_id + if status is not None: + self.status = status + if message is not None: + self.message = message + + @property + def metric_name(self): + """Gets the metric_name of this ReportRunMetricsResponseReportRunMetricResult. # noqa: E501 + + Output. The name of the metric. # noqa: E501 + + :return: The metric_name of this ReportRunMetricsResponseReportRunMetricResult. # noqa: E501 + :rtype: str + """ + return self._metric_name + + @metric_name.setter + def metric_name(self, metric_name): + """Sets the metric_name of this ReportRunMetricsResponseReportRunMetricResult. + + Output. The name of the metric. # noqa: E501 + + :param metric_name: The metric_name of this ReportRunMetricsResponseReportRunMetricResult. # noqa: E501 + :type metric_name: str + """ + + self._metric_name = metric_name + + @property + def metric_node_id(self): + """Gets the metric_node_id of this ReportRunMetricsResponseReportRunMetricResult. # noqa: E501 + + Output. The ID of the node which reports the metric. # noqa: E501 + + :return: The metric_node_id of this ReportRunMetricsResponseReportRunMetricResult. # noqa: E501 + :rtype: str + """ + return self._metric_node_id + + @metric_node_id.setter + def metric_node_id(self, metric_node_id): + """Sets the metric_node_id of this ReportRunMetricsResponseReportRunMetricResult. + + Output. The ID of the node which reports the metric. # noqa: E501 + + :param metric_node_id: The metric_node_id of this ReportRunMetricsResponseReportRunMetricResult. # noqa: E501 + :type metric_node_id: str + """ + + self._metric_node_id = metric_node_id + + @property + def status(self): + """Gets the status of this ReportRunMetricsResponseReportRunMetricResult. # noqa: E501 + + + :return: The status of this ReportRunMetricsResponseReportRunMetricResult. # noqa: E501 + :rtype: ReportRunMetricsResponseReportRunMetricResultStatus + """ + return self._status + + @status.setter + def status(self, status): + """Sets the status of this ReportRunMetricsResponseReportRunMetricResult. + + + :param status: The status of this ReportRunMetricsResponseReportRunMetricResult. # noqa: E501 + :type status: ReportRunMetricsResponseReportRunMetricResultStatus + """ + + self._status = status + + @property + def message(self): + """Gets the message of this ReportRunMetricsResponseReportRunMetricResult. # noqa: E501 + + Output. The detailed message of the error of the reporting. # noqa: E501 + + :return: The message of this ReportRunMetricsResponseReportRunMetricResult. # noqa: E501 + :rtype: str + """ + return self._message + + @message.setter + def message(self, message): + """Sets the message of this ReportRunMetricsResponseReportRunMetricResult. + + Output. The detailed message of the error of the reporting. # noqa: E501 + + :param message: The message of this ReportRunMetricsResponseReportRunMetricResult. # noqa: E501 + :type message: str + """ + + self._message = message + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, ReportRunMetricsResponseReportRunMetricResult): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, ReportRunMetricsResponseReportRunMetricResult): + return True + + return self.to_dict() != other.to_dict() diff --git a/backend/api/python_http_client/kfp_server_api/models/report_run_metrics_response_report_run_metric_result_status.py b/backend/api/python_http_client/kfp_server_api/models/report_run_metrics_response_report_run_metric_result_status.py new file mode 100644 index 000000000..77133f18b --- /dev/null +++ b/backend/api/python_http_client/kfp_server_api/models/report_run_metrics_response_report_run_metric_result_status.py @@ -0,0 +1,117 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from kfp_server_api.configuration import Configuration + + +class ReportRunMetricsResponseReportRunMetricResultStatus(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + allowed enum values + """ + UNSPECIFIED = "UNSPECIFIED" + OK = "OK" + INVALID_ARGUMENT = "INVALID_ARGUMENT" + DUPLICATE_REPORTING = "DUPLICATE_REPORTING" + INTERNAL_ERROR = "INTERNAL_ERROR" + + allowable_values = [UNSPECIFIED, OK, INVALID_ARGUMENT, DUPLICATE_REPORTING, INTERNAL_ERROR] # noqa: E501 + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + } + + attribute_map = { + } + + def __init__(self, local_vars_configuration=None): # noqa: E501 + """ReportRunMetricsResponseReportRunMetricResultStatus - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + self.discriminator = None + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, ReportRunMetricsResponseReportRunMetricResultStatus): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, ReportRunMetricsResponseReportRunMetricResultStatus): + return True + + return self.to_dict() != other.to_dict() diff --git a/backend/api/python_http_client/kfp_server_api/models/run_metric_format.py b/backend/api/python_http_client/kfp_server_api/models/run_metric_format.py new file mode 100644 index 000000000..3397d0f0d --- /dev/null +++ b/backend/api/python_http_client/kfp_server_api/models/run_metric_format.py @@ -0,0 +1,115 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from kfp_server_api.configuration import Configuration + + +class RunMetricFormat(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + allowed enum values + """ + UNSPECIFIED = "UNSPECIFIED" + RAW = "RAW" + PERCENTAGE = "PERCENTAGE" + + allowable_values = [UNSPECIFIED, RAW, PERCENTAGE] # noqa: E501 + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + } + + attribute_map = { + } + + def __init__(self, local_vars_configuration=None): # noqa: E501 + """RunMetricFormat - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + self.discriminator = None + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, RunMetricFormat): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, RunMetricFormat): + return True + + return self.to_dict() != other.to_dict() diff --git a/backend/api/python_http_client/kfp_server_api/models/run_storage_state.py b/backend/api/python_http_client/kfp_server_api/models/run_storage_state.py new file mode 100644 index 000000000..641e3d352 --- /dev/null +++ b/backend/api/python_http_client/kfp_server_api/models/run_storage_state.py @@ -0,0 +1,114 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from kfp_server_api.configuration import Configuration + + +class RunStorageState(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + allowed enum values + """ + AVAILABLE = "STORAGESTATE_AVAILABLE" + ARCHIVED = "STORAGESTATE_ARCHIVED" + + allowable_values = [AVAILABLE, ARCHIVED] # noqa: E501 + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + } + + attribute_map = { + } + + def __init__(self, local_vars_configuration=None): # noqa: E501 + """RunStorageState - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + self.discriminator = None + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, RunStorageState): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, RunStorageState): + return True + + return self.to_dict() != other.to_dict() diff --git a/backend/api/python_http_client/kfp_server_api/rest.py b/backend/api/python_http_client/kfp_server_api/rest.py new file mode 100644 index 000000000..d5c87b026 --- /dev/null +++ b/backend/api/python_http_client/kfp_server_api/rest.py @@ -0,0 +1,305 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import io +import json +import logging +import re +import ssl + +import certifi +# python 2 and python 3 compatibility library +import six +from six.moves.urllib.parse import urlencode +import urllib3 + +from kfp_server_api.exceptions import ApiException, ApiValueError + + +logger = logging.getLogger(__name__) + + +class RESTResponse(io.IOBase): + + def __init__(self, resp): + self.urllib3_response = resp + self.status = resp.status + self.reason = resp.reason + self.data = resp.data + + def getheaders(self): + """Returns a dictionary of the response headers.""" + return self.urllib3_response.getheaders() + + def getheader(self, name, default=None): + """Returns a given response header.""" + return self.urllib3_response.getheader(name, default) + + +class RESTClientObject(object): + + def __init__(self, configuration, pools_size=4, maxsize=None): + # urllib3.PoolManager will pass all kw parameters to connectionpool + # https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/poolmanager.py#L75 # noqa: E501 + # https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/connectionpool.py#L680 # noqa: E501 + # maxsize is the number of requests to host that are allowed in parallel # noqa: E501 + # Custom SSL certificates and client certificates: http://urllib3.readthedocs.io/en/latest/advanced-usage.html # noqa: E501 + + # cert_reqs + if configuration.verify_ssl: + cert_reqs = ssl.CERT_REQUIRED + else: + cert_reqs = ssl.CERT_NONE + + # ca_certs + if configuration.ssl_ca_cert: + ca_certs = configuration.ssl_ca_cert + else: + # if not set certificate file, use Mozilla's root certificates. + ca_certs = certifi.where() + + addition_pool_args = {} + if configuration.assert_hostname is not None: + addition_pool_args['assert_hostname'] = configuration.assert_hostname # noqa: E501 + + if configuration.retries is not None: + addition_pool_args['retries'] = configuration.retries + + if maxsize is None: + if configuration.connection_pool_maxsize is not None: + maxsize = configuration.connection_pool_maxsize + else: + maxsize = 4 + + # https pool manager + if configuration.proxy: + self.pool_manager = urllib3.ProxyManager( + num_pools=pools_size, + maxsize=maxsize, + cert_reqs=cert_reqs, + ca_certs=ca_certs, + cert_file=configuration.cert_file, + key_file=configuration.key_file, + proxy_url=configuration.proxy, + proxy_headers=configuration.proxy_headers, + **addition_pool_args + ) + else: + self.pool_manager = urllib3.PoolManager( + num_pools=pools_size, + maxsize=maxsize, + cert_reqs=cert_reqs, + ca_certs=ca_certs, + cert_file=configuration.cert_file, + key_file=configuration.key_file, + **addition_pool_args + ) + + def request(self, method, url, query_params=None, headers=None, + body=None, post_params=None, _preload_content=True, + _request_timeout=None): + """Perform requests. + + :param method: http request method + :param url: http request url + :param query_params: query parameters in the url + :param headers: http request headers + :param body: request json body, for `application/json` + :param post_params: request post parameters, + `application/x-www-form-urlencoded` + and `multipart/form-data` + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + """ + method = method.upper() + assert method in ['GET', 'HEAD', 'DELETE', 'POST', 'PUT', + 'PATCH', 'OPTIONS'] + + if post_params and body: + raise ApiValueError( + "body parameter cannot be used with post_params parameter." + ) + + post_params = post_params or {} + headers = headers or {} + + timeout = None + if _request_timeout: + if isinstance(_request_timeout, (int, ) if six.PY3 else (int, long)): # noqa: E501,F821 + timeout = urllib3.Timeout(total=_request_timeout) + elif (isinstance(_request_timeout, tuple) and + len(_request_timeout) == 2): + timeout = urllib3.Timeout( + connect=_request_timeout[0], read=_request_timeout[1]) + + if 'Content-Type' not in headers: + headers['Content-Type'] = 'application/json' + + try: + # For `POST`, `PUT`, `PATCH`, `OPTIONS`, `DELETE` + if method in ['POST', 'PUT', 'PATCH', 'OPTIONS', 'DELETE']: + if query_params: + url += '?' + urlencode(query_params) + if re.search('json', headers['Content-Type'], re.IGNORECASE): + request_body = None + if body is not None: + request_body = json.dumps(body) + r = self.pool_manager.request( + method, url, + body=request_body, + preload_content=_preload_content, + timeout=timeout, + headers=headers) + elif headers['Content-Type'] == 'application/x-www-form-urlencoded': # noqa: E501 + r = self.pool_manager.request( + method, url, + fields=post_params, + encode_multipart=False, + preload_content=_preload_content, + timeout=timeout, + headers=headers) + elif headers['Content-Type'] == 'multipart/form-data': + # must del headers['Content-Type'], or the correct + # Content-Type which generated by urllib3 will be + # overwritten. + del headers['Content-Type'] + r = self.pool_manager.request( + method, url, + fields=post_params, + encode_multipart=True, + preload_content=_preload_content, + timeout=timeout, + headers=headers) + # Pass a `string` parameter directly in the body to support + # other content types than Json when `body` argument is + # provided in serialized form + elif isinstance(body, str) or isinstance(body, bytes): + request_body = body + r = self.pool_manager.request( + method, url, + body=request_body, + preload_content=_preload_content, + timeout=timeout, + headers=headers) + else: + # Cannot generate the request from given parameters + msg = """Cannot prepare a request message for provided + arguments. Please check that your arguments match + declared content type.""" + raise ApiException(status=0, reason=msg) + # For `GET`, `HEAD` + else: + r = self.pool_manager.request(method, url, + fields=query_params, + preload_content=_preload_content, + timeout=timeout, + headers=headers) + except urllib3.exceptions.SSLError as e: + msg = "{0}\n{1}".format(type(e).__name__, str(e)) + raise ApiException(status=0, reason=msg) + + if _preload_content: + r = RESTResponse(r) + + # log response body + logger.debug("response body: %s", r.data) + + if not 200 <= r.status <= 299: + raise ApiException(http_resp=r) + + return r + + def GET(self, url, headers=None, query_params=None, _preload_content=True, + _request_timeout=None): + return self.request("GET", url, + headers=headers, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + query_params=query_params) + + def HEAD(self, url, headers=None, query_params=None, _preload_content=True, + _request_timeout=None): + return self.request("HEAD", url, + headers=headers, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + query_params=query_params) + + def OPTIONS(self, url, headers=None, query_params=None, post_params=None, + body=None, _preload_content=True, _request_timeout=None): + return self.request("OPTIONS", url, + headers=headers, + query_params=query_params, + post_params=post_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body) + + def DELETE(self, url, headers=None, query_params=None, body=None, + _preload_content=True, _request_timeout=None): + return self.request("DELETE", url, + headers=headers, + query_params=query_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body) + + def POST(self, url, headers=None, query_params=None, post_params=None, + body=None, _preload_content=True, _request_timeout=None): + return self.request("POST", url, + headers=headers, + query_params=query_params, + post_params=post_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body) + + def PUT(self, url, headers=None, query_params=None, post_params=None, + body=None, _preload_content=True, _request_timeout=None): + return self.request("PUT", url, + headers=headers, + query_params=query_params, + post_params=post_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body) + + def PATCH(self, url, headers=None, query_params=None, post_params=None, + body=None, _preload_content=True, _request_timeout=None): + return self.request("PATCH", url, + headers=headers, + query_params=query_params, + post_params=post_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body) diff --git a/backend/api/python_http_client/requirements.txt b/backend/api/python_http_client/requirements.txt new file mode 100644 index 000000000..eb358efd5 --- /dev/null +++ b/backend/api/python_http_client/requirements.txt @@ -0,0 +1,6 @@ +certifi >= 14.05.14 +future; python_version<="2.7" +six >= 1.10 +python_dateutil >= 2.5.3 +setuptools >= 21.0.0 +urllib3 >= 1.15.1 diff --git a/backend/api/python_http_client/setup.cfg b/backend/api/python_http_client/setup.cfg new file mode 100644 index 000000000..11433ee87 --- /dev/null +++ b/backend/api/python_http_client/setup.cfg @@ -0,0 +1,2 @@ +[flake8] +max-line-length=99 diff --git a/backend/api/python_http_client/setup.py b/backend/api/python_http_client/setup.py new file mode 100644 index 000000000..b96ee2163 --- /dev/null +++ b/backend/api/python_http_client/setup.py @@ -0,0 +1,55 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from setuptools import setup, find_packages # noqa: H301 + +NAME = "kfp-server-api" +VERSION = "1.0.0" +# To install the library, run the following +# +# python setup.py install +# +# prerequisite: setuptools +# http://pypi.python.org/pypi/setuptools + +REQUIRES = ["urllib3 >= 1.15", "six >= 1.10", "certifi", "python-dateutil"] + +setup( + name=NAME, + version=VERSION, + description="Kubeflow Pipelines API", + author="google", + author_email="kubeflow-pipelines@google.com", + url="https://github.com/kubeflow/pipelines", + keywords=["OpenAPI", "OpenAPI-Generator", "Kubeflow Pipelines API"], + install_requires=REQUIRES, + packages=find_packages(exclude=["test", "tests"]), + include_package_data=True, + license="Apache 2.0", + long_description="""\ + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. # noqa: E501 + """ +) diff --git a/backend/api/python_http_client/test-requirements.txt b/backend/api/python_http_client/test-requirements.txt new file mode 100644 index 000000000..4ed3991cb --- /dev/null +++ b/backend/api/python_http_client/test-requirements.txt @@ -0,0 +1,3 @@ +pytest~=4.6.7 # needed for python 2.7+3.4 +pytest-cov>=2.8.1 +pytest-randomly==1.2.3 # needed for python 2.7+3.4 diff --git a/backend/api/python_http_client/test/__init__.py b/backend/api/python_http_client/test/__init__.py new file mode 100644 index 000000000..fc9b9eeb6 --- /dev/null +++ b/backend/api/python_http_client/test/__init__.py @@ -0,0 +1,14 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + diff --git a/backend/api/python_http_client/test/test_api_cron_schedule.py b/backend/api/python_http_client/test/test_api_cron_schedule.py new file mode 100644 index 000000000..e2a3f187f --- /dev/null +++ b/backend/api/python_http_client/test/test_api_cron_schedule.py @@ -0,0 +1,68 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest +import datetime + +import kfp_server_api +from kfp_server_api.models.api_cron_schedule import ApiCronSchedule # noqa: E501 +from kfp_server_api.rest import ApiException + +class TestApiCronSchedule(unittest.TestCase): + """ApiCronSchedule unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional): + """Test ApiCronSchedule + include_option is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # model = kfp_server_api.models.api_cron_schedule.ApiCronSchedule() # noqa: E501 + if include_optional : + return ApiCronSchedule( + start_time = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + end_time = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + cron = '0' + ) + else : + return ApiCronSchedule( + ) + + def testApiCronSchedule(self): + """Test ApiCronSchedule""" + inst_req_only = self.make_instance(include_optional=False) + inst_req_and_optional = self.make_instance(include_optional=True) + + +if __name__ == '__main__': + unittest.main() diff --git a/backend/api/python_http_client/test/test_api_experiment.py b/backend/api/python_http_client/test/test_api_experiment.py new file mode 100644 index 000000000..3a071b8b8 --- /dev/null +++ b/backend/api/python_http_client/test/test_api_experiment.py @@ -0,0 +1,78 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest +import datetime + +import kfp_server_api +from kfp_server_api.models.api_experiment import ApiExperiment # noqa: E501 +from kfp_server_api.rest import ApiException + +class TestApiExperiment(unittest.TestCase): + """ApiExperiment unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional): + """Test ApiExperiment + include_option is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # model = kfp_server_api.models.api_experiment.ApiExperiment() # noqa: E501 + if include_optional : + return ApiExperiment( + id = '0', + name = '0', + description = '0', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + resource_references = [ + kfp_server_api.models.api_resource_reference.apiResourceReference( + key = kfp_server_api.models.api_resource_key.apiResourceKey( + type = 'UNKNOWN_RESOURCE_TYPE', + id = '0', ), + name = '0', + relationship = 'UNKNOWN_RELATIONSHIP', ) + ], + storage_state = 'STORAGESTATE_UNSPECIFIED' + ) + else : + return ApiExperiment( + ) + + def testApiExperiment(self): + """Test ApiExperiment""" + inst_req_only = self.make_instance(include_optional=False) + inst_req_and_optional = self.make_instance(include_optional=True) + + +if __name__ == '__main__': + unittest.main() diff --git a/backend/api/python_http_client/test/test_api_get_template_response.py b/backend/api/python_http_client/test/test_api_get_template_response.py new file mode 100644 index 000000000..827d7b3a0 --- /dev/null +++ b/backend/api/python_http_client/test/test_api_get_template_response.py @@ -0,0 +1,66 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest +import datetime + +import kfp_server_api +from kfp_server_api.models.api_get_template_response import ApiGetTemplateResponse # noqa: E501 +from kfp_server_api.rest import ApiException + +class TestApiGetTemplateResponse(unittest.TestCase): + """ApiGetTemplateResponse unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional): + """Test ApiGetTemplateResponse + include_option is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # model = kfp_server_api.models.api_get_template_response.ApiGetTemplateResponse() # noqa: E501 + if include_optional : + return ApiGetTemplateResponse( + template = '0' + ) + else : + return ApiGetTemplateResponse( + ) + + def testApiGetTemplateResponse(self): + """Test ApiGetTemplateResponse""" + inst_req_only = self.make_instance(include_optional=False) + inst_req_and_optional = self.make_instance(include_optional=True) + + +if __name__ == '__main__': + unittest.main() diff --git a/backend/api/python_http_client/test/test_api_job.py b/backend/api/python_http_client/test/test_api_job.py new file mode 100644 index 000000000..0e3df28a5 --- /dev/null +++ b/backend/api/python_http_client/test/test_api_job.py @@ -0,0 +1,104 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest +import datetime + +import kfp_server_api +from kfp_server_api.models.api_job import ApiJob # noqa: E501 +from kfp_server_api.rest import ApiException + +class TestApiJob(unittest.TestCase): + """ApiJob unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional): + """Test ApiJob + include_option is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # model = kfp_server_api.models.api_job.ApiJob() # noqa: E501 + if include_optional : + return ApiJob( + id = '0', + name = '0', + description = '0', + pipeline_spec = kfp_server_api.models.api_pipeline_spec.apiPipelineSpec( + pipeline_id = '0', + pipeline_name = '0', + workflow_manifest = '0', + pipeline_manifest = '0', + parameters = [ + kfp_server_api.models.api_parameter.apiParameter( + name = '0', + value = '0', ) + ], ), + resource_references = [ + kfp_server_api.models.api_resource_reference.apiResourceReference( + key = kfp_server_api.models.api_resource_key.apiResourceKey( + type = 'UNKNOWN_RESOURCE_TYPE', + id = '0', ), + name = '0', + relationship = 'UNKNOWN_RELATIONSHIP', ) + ], + service_account = '0', + max_concurrency = '0', + trigger = kfp_server_api.models.api_trigger.apiTrigger( + cron_schedule = kfp_server_api.models.cron_schedule_allow_scheduling_the_job_with_unix_like_cron.CronSchedule allow scheduling the job with unix-like cron( + start_time = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + end_time = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + cron = '0', ), + periodic_schedule = kfp_server_api.models.periodic_schedule_allow_scheduling_the_job_periodically_with_certain_interval.PeriodicSchedule allow scheduling the job periodically with certain interval( + start_time = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + end_time = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + interval_second = '0', ), ), + mode = 'UNKNOWN_MODE', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + updated_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + status = '0', + error = '0', + enabled = True, + no_catchup = True + ) + else : + return ApiJob( + ) + + def testApiJob(self): + """Test ApiJob""" + inst_req_only = self.make_instance(include_optional=False) + inst_req_and_optional = self.make_instance(include_optional=True) + + +if __name__ == '__main__': + unittest.main() diff --git a/backend/api/python_http_client/test/test_api_list_experiments_response.py b/backend/api/python_http_client/test/test_api_list_experiments_response.py new file mode 100644 index 000000000..3bcd79a5d --- /dev/null +++ b/backend/api/python_http_client/test/test_api_list_experiments_response.py @@ -0,0 +1,83 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest +import datetime + +import kfp_server_api +from kfp_server_api.models.api_list_experiments_response import ApiListExperimentsResponse # noqa: E501 +from kfp_server_api.rest import ApiException + +class TestApiListExperimentsResponse(unittest.TestCase): + """ApiListExperimentsResponse unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional): + """Test ApiListExperimentsResponse + include_option is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # model = kfp_server_api.models.api_list_experiments_response.ApiListExperimentsResponse() # noqa: E501 + if include_optional : + return ApiListExperimentsResponse( + experiments = [ + kfp_server_api.models.api_experiment.apiExperiment( + id = '0', + name = '0', + description = '0', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + resource_references = [ + kfp_server_api.models.api_resource_reference.apiResourceReference( + key = kfp_server_api.models.api_resource_key.apiResourceKey( + type = 'UNKNOWN_RESOURCE_TYPE', + id = '0', ), + name = '0', + relationship = 'UNKNOWN_RELATIONSHIP', ) + ], + storage_state = 'STORAGESTATE_UNSPECIFIED', ) + ], + total_size = 56, + next_page_token = '0' + ) + else : + return ApiListExperimentsResponse( + ) + + def testApiListExperimentsResponse(self): + """Test ApiListExperimentsResponse""" + inst_req_only = self.make_instance(include_optional=False) + inst_req_and_optional = self.make_instance(include_optional=True) + + +if __name__ == '__main__': + unittest.main() diff --git a/backend/api/python_http_client/test/test_api_list_jobs_response.py b/backend/api/python_http_client/test/test_api_list_jobs_response.py new file mode 100644 index 000000000..7d1963924 --- /dev/null +++ b/backend/api/python_http_client/test/test_api_list_jobs_response.py @@ -0,0 +1,109 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest +import datetime + +import kfp_server_api +from kfp_server_api.models.api_list_jobs_response import ApiListJobsResponse # noqa: E501 +from kfp_server_api.rest import ApiException + +class TestApiListJobsResponse(unittest.TestCase): + """ApiListJobsResponse unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional): + """Test ApiListJobsResponse + include_option is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # model = kfp_server_api.models.api_list_jobs_response.ApiListJobsResponse() # noqa: E501 + if include_optional : + return ApiListJobsResponse( + jobs = [ + kfp_server_api.models.api_job.apiJob( + id = '0', + name = '0', + description = '0', + pipeline_spec = kfp_server_api.models.api_pipeline_spec.apiPipelineSpec( + pipeline_id = '0', + pipeline_name = '0', + workflow_manifest = '0', + pipeline_manifest = '0', + parameters = [ + kfp_server_api.models.api_parameter.apiParameter( + name = '0', + value = '0', ) + ], ), + resource_references = [ + kfp_server_api.models.api_resource_reference.apiResourceReference( + key = kfp_server_api.models.api_resource_key.apiResourceKey( + type = 'UNKNOWN_RESOURCE_TYPE', + id = '0', ), + name = '0', + relationship = 'UNKNOWN_RELATIONSHIP', ) + ], + service_account = '0', + max_concurrency = '0', + trigger = kfp_server_api.models.api_trigger.apiTrigger( + cron_schedule = kfp_server_api.models.cron_schedule_allow_scheduling_the_job_with_unix_like_cron.CronSchedule allow scheduling the job with unix-like cron( + start_time = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + end_time = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + cron = '0', ), + periodic_schedule = kfp_server_api.models.periodic_schedule_allow_scheduling_the_job_periodically_with_certain_interval.PeriodicSchedule allow scheduling the job periodically with certain interval( + start_time = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + end_time = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + interval_second = '0', ), ), + mode = 'UNKNOWN_MODE', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + updated_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + status = '0', + error = '0', + enabled = True, + no_catchup = True, ) + ], + total_size = 56, + next_page_token = '0' + ) + else : + return ApiListJobsResponse( + ) + + def testApiListJobsResponse(self): + """Test ApiListJobsResponse""" + inst_req_only = self.make_instance(include_optional=False) + inst_req_and_optional = self.make_instance(include_optional=True) + + +if __name__ == '__main__': + unittest.main() diff --git a/backend/api/python_http_client/test/test_api_list_pipeline_versions_response.py b/backend/api/python_http_client/test/test_api_list_pipeline_versions_response.py new file mode 100644 index 000000000..b52f8808b --- /dev/null +++ b/backend/api/python_http_client/test/test_api_list_pipeline_versions_response.py @@ -0,0 +1,89 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest +import datetime + +import kfp_server_api +from kfp_server_api.models.api_list_pipeline_versions_response import ApiListPipelineVersionsResponse # noqa: E501 +from kfp_server_api.rest import ApiException + +class TestApiListPipelineVersionsResponse(unittest.TestCase): + """ApiListPipelineVersionsResponse unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional): + """Test ApiListPipelineVersionsResponse + include_option is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # model = kfp_server_api.models.api_list_pipeline_versions_response.ApiListPipelineVersionsResponse() # noqa: E501 + if include_optional : + return ApiListPipelineVersionsResponse( + versions = [ + kfp_server_api.models.api_pipeline_version.apiPipelineVersion( + id = '0', + name = '0', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + parameters = [ + kfp_server_api.models.api_parameter.apiParameter( + name = '0', + value = '0', ) + ], + code_source_url = '0', + package_url = kfp_server_api.models.api_url.apiUrl( + pipeline_url = '0', ), + resource_references = [ + kfp_server_api.models.api_resource_reference.apiResourceReference( + key = kfp_server_api.models.api_resource_key.apiResourceKey( + type = 'UNKNOWN_RESOURCE_TYPE', + id = '0', ), + name = '0', + relationship = 'UNKNOWN_RELATIONSHIP', ) + ], ) + ], + next_page_token = '0', + total_size = 56 + ) + else : + return ApiListPipelineVersionsResponse( + ) + + def testApiListPipelineVersionsResponse(self): + """Test ApiListPipelineVersionsResponse""" + inst_req_only = self.make_instance(include_optional=False) + inst_req_and_optional = self.make_instance(include_optional=True) + + +if __name__ == '__main__': + unittest.main() diff --git a/backend/api/python_http_client/test/test_api_list_pipelines_response.py b/backend/api/python_http_client/test/test_api_list_pipelines_response.py new file mode 100644 index 000000000..eb811417a --- /dev/null +++ b/backend/api/python_http_client/test/test_api_list_pipelines_response.py @@ -0,0 +1,97 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest +import datetime + +import kfp_server_api +from kfp_server_api.models.api_list_pipelines_response import ApiListPipelinesResponse # noqa: E501 +from kfp_server_api.rest import ApiException + +class TestApiListPipelinesResponse(unittest.TestCase): + """ApiListPipelinesResponse unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional): + """Test ApiListPipelinesResponse + include_option is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # model = kfp_server_api.models.api_list_pipelines_response.ApiListPipelinesResponse() # noqa: E501 + if include_optional : + return ApiListPipelinesResponse( + pipelines = [ + kfp_server_api.models.api_pipeline.apiPipeline( + id = '0', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + name = '0', + description = '0', + parameters = [ + kfp_server_api.models.api_parameter.apiParameter( + name = '0', + value = '0', ) + ], + url = kfp_server_api.models.api_url.apiUrl( + pipeline_url = '0', ), + error = '0', + default_version = kfp_server_api.models.api_pipeline_version.apiPipelineVersion( + id = '0', + name = '0', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + code_source_url = '0', + package_url = kfp_server_api.models.api_url.apiUrl( + pipeline_url = '0', ), + resource_references = [ + kfp_server_api.models.api_resource_reference.apiResourceReference( + key = kfp_server_api.models.api_resource_key.apiResourceKey( + type = 'UNKNOWN_RESOURCE_TYPE', + id = '0', ), + name = '0', + relationship = 'UNKNOWN_RELATIONSHIP', ) + ], ), ) + ], + total_size = 56, + next_page_token = '0' + ) + else : + return ApiListPipelinesResponse( + ) + + def testApiListPipelinesResponse(self): + """Test ApiListPipelinesResponse""" + inst_req_only = self.make_instance(include_optional=False) + inst_req_and_optional = self.make_instance(include_optional=True) + + +if __name__ == '__main__': + unittest.main() diff --git a/backend/api/python_http_client/test/test_api_list_runs_response.py b/backend/api/python_http_client/test/test_api_list_runs_response.py new file mode 100644 index 000000000..e2ed318d8 --- /dev/null +++ b/backend/api/python_http_client/test/test_api_list_runs_response.py @@ -0,0 +1,105 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest +import datetime + +import kfp_server_api +from kfp_server_api.models.api_list_runs_response import ApiListRunsResponse # noqa: E501 +from kfp_server_api.rest import ApiException + +class TestApiListRunsResponse(unittest.TestCase): + """ApiListRunsResponse unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional): + """Test ApiListRunsResponse + include_option is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # model = kfp_server_api.models.api_list_runs_response.ApiListRunsResponse() # noqa: E501 + if include_optional : + return ApiListRunsResponse( + runs = [ + kfp_server_api.models.api_run.apiRun( + id = '0', + name = '0', + storage_state = 'STORAGESTATE_AVAILABLE', + description = '0', + pipeline_spec = kfp_server_api.models.api_pipeline_spec.apiPipelineSpec( + pipeline_id = '0', + pipeline_name = '0', + workflow_manifest = '0', + pipeline_manifest = '0', + parameters = [ + kfp_server_api.models.api_parameter.apiParameter( + name = '0', + value = '0', ) + ], ), + resource_references = [ + kfp_server_api.models.api_resource_reference.apiResourceReference( + key = kfp_server_api.models.api_resource_key.apiResourceKey( + type = 'UNKNOWN_RESOURCE_TYPE', + id = '0', ), + name = '0', + relationship = 'UNKNOWN_RELATIONSHIP', ) + ], + service_account = '0', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + scheduled_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + finished_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + status = '0', + error = '0', + metrics = [ + kfp_server_api.models.api_run_metric.apiRunMetric( + name = '0', + node_id = '0', + number_value = 1.337, + format = 'UNSPECIFIED', ) + ], ) + ], + total_size = 56, + next_page_token = '0' + ) + else : + return ApiListRunsResponse( + ) + + def testApiListRunsResponse(self): + """Test ApiListRunsResponse""" + inst_req_only = self.make_instance(include_optional=False) + inst_req_and_optional = self.make_instance(include_optional=True) + + +if __name__ == '__main__': + unittest.main() diff --git a/backend/api/python_http_client/test/test_api_parameter.py b/backend/api/python_http_client/test/test_api_parameter.py new file mode 100644 index 000000000..c3e6cdb08 --- /dev/null +++ b/backend/api/python_http_client/test/test_api_parameter.py @@ -0,0 +1,67 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest +import datetime + +import kfp_server_api +from kfp_server_api.models.api_parameter import ApiParameter # noqa: E501 +from kfp_server_api.rest import ApiException + +class TestApiParameter(unittest.TestCase): + """ApiParameter unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional): + """Test ApiParameter + include_option is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # model = kfp_server_api.models.api_parameter.ApiParameter() # noqa: E501 + if include_optional : + return ApiParameter( + name = '0', + value = '0' + ) + else : + return ApiParameter( + ) + + def testApiParameter(self): + """Test ApiParameter""" + inst_req_only = self.make_instance(include_optional=False) + inst_req_and_optional = self.make_instance(include_optional=True) + + +if __name__ == '__main__': + unittest.main() diff --git a/backend/api/python_http_client/test/test_api_periodic_schedule.py b/backend/api/python_http_client/test/test_api_periodic_schedule.py new file mode 100644 index 000000000..310da026d --- /dev/null +++ b/backend/api/python_http_client/test/test_api_periodic_schedule.py @@ -0,0 +1,68 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest +import datetime + +import kfp_server_api +from kfp_server_api.models.api_periodic_schedule import ApiPeriodicSchedule # noqa: E501 +from kfp_server_api.rest import ApiException + +class TestApiPeriodicSchedule(unittest.TestCase): + """ApiPeriodicSchedule unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional): + """Test ApiPeriodicSchedule + include_option is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # model = kfp_server_api.models.api_periodic_schedule.ApiPeriodicSchedule() # noqa: E501 + if include_optional : + return ApiPeriodicSchedule( + start_time = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + end_time = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + interval_second = '0' + ) + else : + return ApiPeriodicSchedule( + ) + + def testApiPeriodicSchedule(self): + """Test ApiPeriodicSchedule""" + inst_req_only = self.make_instance(include_optional=False) + inst_req_and_optional = self.make_instance(include_optional=True) + + +if __name__ == '__main__': + unittest.main() diff --git a/backend/api/python_http_client/test/test_api_pipeline.py b/backend/api/python_http_client/test/test_api_pipeline.py new file mode 100644 index 000000000..34c0dc7fa --- /dev/null +++ b/backend/api/python_http_client/test/test_api_pipeline.py @@ -0,0 +1,97 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest +import datetime + +import kfp_server_api +from kfp_server_api.models.api_pipeline import ApiPipeline # noqa: E501 +from kfp_server_api.rest import ApiException + +class TestApiPipeline(unittest.TestCase): + """ApiPipeline unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional): + """Test ApiPipeline + include_option is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # model = kfp_server_api.models.api_pipeline.ApiPipeline() # noqa: E501 + if include_optional : + return ApiPipeline( + id = '0', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + name = '0', + description = '0', + parameters = [ + kfp_server_api.models.api_parameter.apiParameter( + name = '0', + value = '0', ) + ], + url = kfp_server_api.models.api_url.apiUrl( + pipeline_url = '0', ), + error = '0', + default_version = kfp_server_api.models.api_pipeline_version.apiPipelineVersion( + id = '0', + name = '0', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + parameters = [ + kfp_server_api.models.api_parameter.apiParameter( + name = '0', + value = '0', ) + ], + code_source_url = '0', + package_url = kfp_server_api.models.api_url.apiUrl( + pipeline_url = '0', ), + resource_references = [ + kfp_server_api.models.api_resource_reference.apiResourceReference( + key = kfp_server_api.models.api_resource_key.apiResourceKey( + type = 'UNKNOWN_RESOURCE_TYPE', + id = '0', ), + name = '0', + relationship = 'UNKNOWN_RELATIONSHIP', ) + ], ) + ) + else : + return ApiPipeline( + ) + + def testApiPipeline(self): + """Test ApiPipeline""" + inst_req_only = self.make_instance(include_optional=False) + inst_req_and_optional = self.make_instance(include_optional=True) + + +if __name__ == '__main__': + unittest.main() diff --git a/backend/api/python_http_client/test/test_api_pipeline_runtime.py b/backend/api/python_http_client/test/test_api_pipeline_runtime.py new file mode 100644 index 000000000..cd534fe28 --- /dev/null +++ b/backend/api/python_http_client/test/test_api_pipeline_runtime.py @@ -0,0 +1,67 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest +import datetime + +import kfp_server_api +from kfp_server_api.models.api_pipeline_runtime import ApiPipelineRuntime # noqa: E501 +from kfp_server_api.rest import ApiException + +class TestApiPipelineRuntime(unittest.TestCase): + """ApiPipelineRuntime unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional): + """Test ApiPipelineRuntime + include_option is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # model = kfp_server_api.models.api_pipeline_runtime.ApiPipelineRuntime() # noqa: E501 + if include_optional : + return ApiPipelineRuntime( + pipeline_manifest = '0', + workflow_manifest = '0' + ) + else : + return ApiPipelineRuntime( + ) + + def testApiPipelineRuntime(self): + """Test ApiPipelineRuntime""" + inst_req_only = self.make_instance(include_optional=False) + inst_req_and_optional = self.make_instance(include_optional=True) + + +if __name__ == '__main__': + unittest.main() diff --git a/backend/api/python_http_client/test/test_api_pipeline_spec.py b/backend/api/python_http_client/test/test_api_pipeline_spec.py new file mode 100644 index 000000000..8359aee60 --- /dev/null +++ b/backend/api/python_http_client/test/test_api_pipeline_spec.py @@ -0,0 +1,74 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest +import datetime + +import kfp_server_api +from kfp_server_api.models.api_pipeline_spec import ApiPipelineSpec # noqa: E501 +from kfp_server_api.rest import ApiException + +class TestApiPipelineSpec(unittest.TestCase): + """ApiPipelineSpec unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional): + """Test ApiPipelineSpec + include_option is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # model = kfp_server_api.models.api_pipeline_spec.ApiPipelineSpec() # noqa: E501 + if include_optional : + return ApiPipelineSpec( + pipeline_id = '0', + pipeline_name = '0', + workflow_manifest = '0', + pipeline_manifest = '0', + parameters = [ + kfp_server_api.models.api_parameter.apiParameter( + name = '0', + value = '0', ) + ] + ) + else : + return ApiPipelineSpec( + ) + + def testApiPipelineSpec(self): + """Test ApiPipelineSpec""" + inst_req_only = self.make_instance(include_optional=False) + inst_req_and_optional = self.make_instance(include_optional=True) + + +if __name__ == '__main__': + unittest.main() diff --git a/backend/api/python_http_client/test/test_api_pipeline_version.py b/backend/api/python_http_client/test/test_api_pipeline_version.py new file mode 100644 index 000000000..1b5eaaa64 --- /dev/null +++ b/backend/api/python_http_client/test/test_api_pipeline_version.py @@ -0,0 +1,84 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest +import datetime + +import kfp_server_api +from kfp_server_api.models.api_pipeline_version import ApiPipelineVersion # noqa: E501 +from kfp_server_api.rest import ApiException + +class TestApiPipelineVersion(unittest.TestCase): + """ApiPipelineVersion unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional): + """Test ApiPipelineVersion + include_option is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # model = kfp_server_api.models.api_pipeline_version.ApiPipelineVersion() # noqa: E501 + if include_optional : + return ApiPipelineVersion( + id = '0', + name = '0', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + parameters = [ + kfp_server_api.models.api_parameter.apiParameter( + name = '0', + value = '0', ) + ], + code_source_url = '0', + package_url = kfp_server_api.models.api_url.apiUrl( + pipeline_url = '0', ), + resource_references = [ + kfp_server_api.models.api_resource_reference.apiResourceReference( + key = kfp_server_api.models.api_resource_key.apiResourceKey( + type = 'UNKNOWN_RESOURCE_TYPE', + id = '0', ), + name = '0', + relationship = 'UNKNOWN_RELATIONSHIP', ) + ] + ) + else : + return ApiPipelineVersion( + ) + + def testApiPipelineVersion(self): + """Test ApiPipelineVersion""" + inst_req_only = self.make_instance(include_optional=False) + inst_req_and_optional = self.make_instance(include_optional=True) + + +if __name__ == '__main__': + unittest.main() diff --git a/backend/api/python_http_client/test/test_api_read_artifact_response.py b/backend/api/python_http_client/test/test_api_read_artifact_response.py new file mode 100644 index 000000000..7be5b8bab --- /dev/null +++ b/backend/api/python_http_client/test/test_api_read_artifact_response.py @@ -0,0 +1,66 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest +import datetime + +import kfp_server_api +from kfp_server_api.models.api_read_artifact_response import ApiReadArtifactResponse # noqa: E501 +from kfp_server_api.rest import ApiException + +class TestApiReadArtifactResponse(unittest.TestCase): + """ApiReadArtifactResponse unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional): + """Test ApiReadArtifactResponse + include_option is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # model = kfp_server_api.models.api_read_artifact_response.ApiReadArtifactResponse() # noqa: E501 + if include_optional : + return ApiReadArtifactResponse( + data = 'YQ==' + ) + else : + return ApiReadArtifactResponse( + ) + + def testApiReadArtifactResponse(self): + """Test ApiReadArtifactResponse""" + inst_req_only = self.make_instance(include_optional=False) + inst_req_and_optional = self.make_instance(include_optional=True) + + +if __name__ == '__main__': + unittest.main() diff --git a/backend/api/python_http_client/test/test_api_relationship.py b/backend/api/python_http_client/test/test_api_relationship.py new file mode 100644 index 000000000..7006ca1ed --- /dev/null +++ b/backend/api/python_http_client/test/test_api_relationship.py @@ -0,0 +1,65 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest +import datetime + +import kfp_server_api +from kfp_server_api.models.api_relationship import ApiRelationship # noqa: E501 +from kfp_server_api.rest import ApiException + +class TestApiRelationship(unittest.TestCase): + """ApiRelationship unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional): + """Test ApiRelationship + include_option is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # model = kfp_server_api.models.api_relationship.ApiRelationship() # noqa: E501 + if include_optional : + return ApiRelationship( + ) + else : + return ApiRelationship( + ) + + def testApiRelationship(self): + """Test ApiRelationship""" + inst_req_only = self.make_instance(include_optional=False) + inst_req_and_optional = self.make_instance(include_optional=True) + + +if __name__ == '__main__': + unittest.main() diff --git a/backend/api/python_http_client/test/test_api_report_run_metrics_request.py b/backend/api/python_http_client/test/test_api_report_run_metrics_request.py new file mode 100644 index 000000000..6f5061dd9 --- /dev/null +++ b/backend/api/python_http_client/test/test_api_report_run_metrics_request.py @@ -0,0 +1,73 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest +import datetime + +import kfp_server_api +from kfp_server_api.models.api_report_run_metrics_request import ApiReportRunMetricsRequest # noqa: E501 +from kfp_server_api.rest import ApiException + +class TestApiReportRunMetricsRequest(unittest.TestCase): + """ApiReportRunMetricsRequest unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional): + """Test ApiReportRunMetricsRequest + include_option is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # model = kfp_server_api.models.api_report_run_metrics_request.ApiReportRunMetricsRequest() # noqa: E501 + if include_optional : + return ApiReportRunMetricsRequest( + run_id = '0', + metrics = [ + kfp_server_api.models.api_run_metric.apiRunMetric( + name = '0', + node_id = '0', + number_value = 1.337, + format = 'UNSPECIFIED', ) + ] + ) + else : + return ApiReportRunMetricsRequest( + ) + + def testApiReportRunMetricsRequest(self): + """Test ApiReportRunMetricsRequest""" + inst_req_only = self.make_instance(include_optional=False) + inst_req_and_optional = self.make_instance(include_optional=True) + + +if __name__ == '__main__': + unittest.main() diff --git a/backend/api/python_http_client/test/test_api_report_run_metrics_response.py b/backend/api/python_http_client/test/test_api_report_run_metrics_response.py new file mode 100644 index 000000000..9a75b9839 --- /dev/null +++ b/backend/api/python_http_client/test/test_api_report_run_metrics_response.py @@ -0,0 +1,72 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest +import datetime + +import kfp_server_api +from kfp_server_api.models.api_report_run_metrics_response import ApiReportRunMetricsResponse # noqa: E501 +from kfp_server_api.rest import ApiException + +class TestApiReportRunMetricsResponse(unittest.TestCase): + """ApiReportRunMetricsResponse unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional): + """Test ApiReportRunMetricsResponse + include_option is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # model = kfp_server_api.models.api_report_run_metrics_response.ApiReportRunMetricsResponse() # noqa: E501 + if include_optional : + return ApiReportRunMetricsResponse( + results = [ + kfp_server_api.models.report_run_metrics_response_report_run_metric_result.ReportRunMetricsResponseReportRunMetricResult( + metric_name = '0', + metric_node_id = '0', + status = 'UNSPECIFIED', + message = '0', ) + ] + ) + else : + return ApiReportRunMetricsResponse( + ) + + def testApiReportRunMetricsResponse(self): + """Test ApiReportRunMetricsResponse""" + inst_req_only = self.make_instance(include_optional=False) + inst_req_and_optional = self.make_instance(include_optional=True) + + +if __name__ == '__main__': + unittest.main() diff --git a/backend/api/python_http_client/test/test_api_resource_key.py b/backend/api/python_http_client/test/test_api_resource_key.py new file mode 100644 index 000000000..a05305575 --- /dev/null +++ b/backend/api/python_http_client/test/test_api_resource_key.py @@ -0,0 +1,67 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest +import datetime + +import kfp_server_api +from kfp_server_api.models.api_resource_key import ApiResourceKey # noqa: E501 +from kfp_server_api.rest import ApiException + +class TestApiResourceKey(unittest.TestCase): + """ApiResourceKey unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional): + """Test ApiResourceKey + include_option is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # model = kfp_server_api.models.api_resource_key.ApiResourceKey() # noqa: E501 + if include_optional : + return ApiResourceKey( + type = 'UNKNOWN_RESOURCE_TYPE', + id = '0' + ) + else : + return ApiResourceKey( + ) + + def testApiResourceKey(self): + """Test ApiResourceKey""" + inst_req_only = self.make_instance(include_optional=False) + inst_req_and_optional = self.make_instance(include_optional=True) + + +if __name__ == '__main__': + unittest.main() diff --git a/backend/api/python_http_client/test/test_api_resource_reference.py b/backend/api/python_http_client/test/test_api_resource_reference.py new file mode 100644 index 000000000..315085c0d --- /dev/null +++ b/backend/api/python_http_client/test/test_api_resource_reference.py @@ -0,0 +1,70 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest +import datetime + +import kfp_server_api +from kfp_server_api.models.api_resource_reference import ApiResourceReference # noqa: E501 +from kfp_server_api.rest import ApiException + +class TestApiResourceReference(unittest.TestCase): + """ApiResourceReference unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional): + """Test ApiResourceReference + include_option is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # model = kfp_server_api.models.api_resource_reference.ApiResourceReference() # noqa: E501 + if include_optional : + return ApiResourceReference( + key = kfp_server_api.models.api_resource_key.apiResourceKey( + type = 'UNKNOWN_RESOURCE_TYPE', + id = '0', ), + name = '0', + relationship = 'UNKNOWN_RELATIONSHIP' + ) + else : + return ApiResourceReference( + ) + + def testApiResourceReference(self): + """Test ApiResourceReference""" + inst_req_only = self.make_instance(include_optional=False) + inst_req_and_optional = self.make_instance(include_optional=True) + + +if __name__ == '__main__': + unittest.main() diff --git a/backend/api/python_http_client/test/test_api_resource_type.py b/backend/api/python_http_client/test/test_api_resource_type.py new file mode 100644 index 000000000..b97a1c66c --- /dev/null +++ b/backend/api/python_http_client/test/test_api_resource_type.py @@ -0,0 +1,65 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest +import datetime + +import kfp_server_api +from kfp_server_api.models.api_resource_type import ApiResourceType # noqa: E501 +from kfp_server_api.rest import ApiException + +class TestApiResourceType(unittest.TestCase): + """ApiResourceType unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional): + """Test ApiResourceType + include_option is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # model = kfp_server_api.models.api_resource_type.ApiResourceType() # noqa: E501 + if include_optional : + return ApiResourceType( + ) + else : + return ApiResourceType( + ) + + def testApiResourceType(self): + """Test ApiResourceType""" + inst_req_only = self.make_instance(include_optional=False) + inst_req_and_optional = self.make_instance(include_optional=True) + + +if __name__ == '__main__': + unittest.main() diff --git a/backend/api/python_http_client/test/test_api_run.py b/backend/api/python_http_client/test/test_api_run.py new file mode 100644 index 000000000..b824d8388 --- /dev/null +++ b/backend/api/python_http_client/test/test_api_run.py @@ -0,0 +1,100 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest +import datetime + +import kfp_server_api +from kfp_server_api.models.api_run import ApiRun # noqa: E501 +from kfp_server_api.rest import ApiException + +class TestApiRun(unittest.TestCase): + """ApiRun unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional): + """Test ApiRun + include_option is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # model = kfp_server_api.models.api_run.ApiRun() # noqa: E501 + if include_optional : + return ApiRun( + id = '0', + name = '0', + storage_state = 'STORAGESTATE_AVAILABLE', + description = '0', + pipeline_spec = kfp_server_api.models.api_pipeline_spec.apiPipelineSpec( + pipeline_id = '0', + pipeline_name = '0', + workflow_manifest = '0', + pipeline_manifest = '0', + parameters = [ + kfp_server_api.models.api_parameter.apiParameter( + name = '0', + value = '0', ) + ], ), + resource_references = [ + kfp_server_api.models.api_resource_reference.apiResourceReference( + key = kfp_server_api.models.api_resource_key.apiResourceKey( + type = 'UNKNOWN_RESOURCE_TYPE', + id = '0', ), + name = '0', + relationship = 'UNKNOWN_RELATIONSHIP', ) + ], + service_account = '0', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + scheduled_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + finished_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + status = '0', + error = '0', + metrics = [ + kfp_server_api.models.api_run_metric.apiRunMetric( + name = '0', + node_id = '0', + number_value = 1.337, + format = 'UNSPECIFIED', ) + ] + ) + else : + return ApiRun( + ) + + def testApiRun(self): + """Test ApiRun""" + inst_req_only = self.make_instance(include_optional=False) + inst_req_and_optional = self.make_instance(include_optional=True) + + +if __name__ == '__main__': + unittest.main() diff --git a/backend/api/python_http_client/test/test_api_run_detail.py b/backend/api/python_http_client/test/test_api_run_detail.py new file mode 100644 index 000000000..750dd390d --- /dev/null +++ b/backend/api/python_http_client/test/test_api_run_detail.py @@ -0,0 +1,104 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest +import datetime + +import kfp_server_api +from kfp_server_api.models.api_run_detail import ApiRunDetail # noqa: E501 +from kfp_server_api.rest import ApiException + +class TestApiRunDetail(unittest.TestCase): + """ApiRunDetail unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional): + """Test ApiRunDetail + include_option is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # model = kfp_server_api.models.api_run_detail.ApiRunDetail() # noqa: E501 + if include_optional : + return ApiRunDetail( + run = kfp_server_api.models.api_run.apiRun( + id = '0', + name = '0', + storage_state = 'STORAGESTATE_AVAILABLE', + description = '0', + pipeline_spec = kfp_server_api.models.api_pipeline_spec.apiPipelineSpec( + pipeline_id = '0', + pipeline_name = '0', + workflow_manifest = '0', + pipeline_manifest = '0', + parameters = [ + kfp_server_api.models.api_parameter.apiParameter( + name = '0', + value = '0', ) + ], ), + resource_references = [ + kfp_server_api.models.api_resource_reference.apiResourceReference( + key = kfp_server_api.models.api_resource_key.apiResourceKey( + type = 'UNKNOWN_RESOURCE_TYPE', + id = '0', ), + name = '0', + relationship = 'UNKNOWN_RELATIONSHIP', ) + ], + service_account = '0', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + scheduled_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + finished_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + status = '0', + error = '0', + metrics = [ + kfp_server_api.models.api_run_metric.apiRunMetric( + name = '0', + node_id = '0', + number_value = 1.337, + format = 'UNSPECIFIED', ) + ], ), + pipeline_runtime = kfp_server_api.models.api_pipeline_runtime.apiPipelineRuntime( + pipeline_manifest = '0', + workflow_manifest = '0', ) + ) + else : + return ApiRunDetail( + ) + + def testApiRunDetail(self): + """Test ApiRunDetail""" + inst_req_only = self.make_instance(include_optional=False) + inst_req_and_optional = self.make_instance(include_optional=True) + + +if __name__ == '__main__': + unittest.main() diff --git a/backend/api/python_http_client/test/test_api_run_metric.py b/backend/api/python_http_client/test/test_api_run_metric.py new file mode 100644 index 000000000..1767b377e --- /dev/null +++ b/backend/api/python_http_client/test/test_api_run_metric.py @@ -0,0 +1,69 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest +import datetime + +import kfp_server_api +from kfp_server_api.models.api_run_metric import ApiRunMetric # noqa: E501 +from kfp_server_api.rest import ApiException + +class TestApiRunMetric(unittest.TestCase): + """ApiRunMetric unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional): + """Test ApiRunMetric + include_option is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # model = kfp_server_api.models.api_run_metric.ApiRunMetric() # noqa: E501 + if include_optional : + return ApiRunMetric( + name = '0', + node_id = '0', + number_value = 1.337, + format = 'UNSPECIFIED' + ) + else : + return ApiRunMetric( + ) + + def testApiRunMetric(self): + """Test ApiRunMetric""" + inst_req_only = self.make_instance(include_optional=False) + inst_req_and_optional = self.make_instance(include_optional=True) + + +if __name__ == '__main__': + unittest.main() diff --git a/backend/api/python_http_client/test/test_api_status.py b/backend/api/python_http_client/test/test_api_status.py new file mode 100644 index 000000000..f039813fc --- /dev/null +++ b/backend/api/python_http_client/test/test_api_status.py @@ -0,0 +1,72 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest +import datetime + +import kfp_server_api +from kfp_server_api.models.api_status import ApiStatus # noqa: E501 +from kfp_server_api.rest import ApiException + +class TestApiStatus(unittest.TestCase): + """ApiStatus unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional): + """Test ApiStatus + include_option is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # model = kfp_server_api.models.api_status.ApiStatus() # noqa: E501 + if include_optional : + return ApiStatus( + error = '0', + code = 56, + details = [ + kfp_server_api.models.protobuf_any.protobufAny( + type_url = '0', + value = 'YQ==', ) + ] + ) + else : + return ApiStatus( + ) + + def testApiStatus(self): + """Test ApiStatus""" + inst_req_only = self.make_instance(include_optional=False) + inst_req_and_optional = self.make_instance(include_optional=True) + + +if __name__ == '__main__': + unittest.main() diff --git a/backend/api/python_http_client/test/test_api_trigger.py b/backend/api/python_http_client/test/test_api_trigger.py new file mode 100644 index 000000000..dab8700e1 --- /dev/null +++ b/backend/api/python_http_client/test/test_api_trigger.py @@ -0,0 +1,73 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest +import datetime + +import kfp_server_api +from kfp_server_api.models.api_trigger import ApiTrigger # noqa: E501 +from kfp_server_api.rest import ApiException + +class TestApiTrigger(unittest.TestCase): + """ApiTrigger unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional): + """Test ApiTrigger + include_option is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # model = kfp_server_api.models.api_trigger.ApiTrigger() # noqa: E501 + if include_optional : + return ApiTrigger( + cron_schedule = kfp_server_api.models.cron_schedule_allow_scheduling_the_job_with_unix_like_cron.CronSchedule allow scheduling the job with unix-like cron( + start_time = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + end_time = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + cron = '0', ), + periodic_schedule = kfp_server_api.models.periodic_schedule_allow_scheduling_the_job_periodically_with_certain_interval.PeriodicSchedule allow scheduling the job periodically with certain interval( + start_time = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + end_time = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + interval_second = '0', ) + ) + else : + return ApiTrigger( + ) + + def testApiTrigger(self): + """Test ApiTrigger""" + inst_req_only = self.make_instance(include_optional=False) + inst_req_and_optional = self.make_instance(include_optional=True) + + +if __name__ == '__main__': + unittest.main() diff --git a/backend/api/python_http_client/test/test_api_url.py b/backend/api/python_http_client/test/test_api_url.py new file mode 100644 index 000000000..950075ef8 --- /dev/null +++ b/backend/api/python_http_client/test/test_api_url.py @@ -0,0 +1,66 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest +import datetime + +import kfp_server_api +from kfp_server_api.models.api_url import ApiUrl # noqa: E501 +from kfp_server_api.rest import ApiException + +class TestApiUrl(unittest.TestCase): + """ApiUrl unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional): + """Test ApiUrl + include_option is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # model = kfp_server_api.models.api_url.ApiUrl() # noqa: E501 + if include_optional : + return ApiUrl( + pipeline_url = '0' + ) + else : + return ApiUrl( + ) + + def testApiUrl(self): + """Test ApiUrl""" + inst_req_only = self.make_instance(include_optional=False) + inst_req_and_optional = self.make_instance(include_optional=True) + + +if __name__ == '__main__': + unittest.main() diff --git a/backend/api/python_http_client/test/test_experiment_service_api.py b/backend/api/python_http_client/test/test_experiment_service_api.py new file mode 100644 index 000000000..fff677f49 --- /dev/null +++ b/backend/api/python_http_client/test/test_experiment_service_api.py @@ -0,0 +1,89 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest + +import kfp_server_api +from kfp_server_api.api.experiment_service_api import ExperimentServiceApi # noqa: E501 +from kfp_server_api.rest import ApiException + + +class TestExperimentServiceApi(unittest.TestCase): + """ExperimentServiceApi unit test stubs""" + + def setUp(self): + self.api = kfp_server_api.api.experiment_service_api.ExperimentServiceApi() # noqa: E501 + + def tearDown(self): + pass + + def test_archive_experiment(self): + """Test case for archive_experiment + + Archives an experiment and the experiment's runs and jobs. # noqa: E501 + """ + pass + + def test_create_experiment(self): + """Test case for create_experiment + + Creates a new experiment. # noqa: E501 + """ + pass + + def test_delete_experiment(self): + """Test case for delete_experiment + + Deletes an experiment without deleting the experiment's runs and jobs. To avoid unexpected behaviors, delete an experiment's runs and jobs before deleting the experiment. # noqa: E501 + """ + pass + + def test_get_experiment(self): + """Test case for get_experiment + + Finds a specific experiment by ID. # noqa: E501 + """ + pass + + def test_list_experiment(self): + """Test case for list_experiment + + Finds all experiments. Supports pagination, and sorting on certain fields. # noqa: E501 + """ + pass + + def test_unarchive_experiment(self): + """Test case for unarchive_experiment + + Restores an archived experiment. The experiment's archived runs and jobs will stay archived. # noqa: E501 + """ + pass + + +if __name__ == '__main__': + unittest.main() diff --git a/backend/api/python_http_client/test/test_experiment_storage_state.py b/backend/api/python_http_client/test/test_experiment_storage_state.py new file mode 100644 index 000000000..c22bc698a --- /dev/null +++ b/backend/api/python_http_client/test/test_experiment_storage_state.py @@ -0,0 +1,65 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest +import datetime + +import kfp_server_api +from kfp_server_api.models.experiment_storage_state import ExperimentStorageState # noqa: E501 +from kfp_server_api.rest import ApiException + +class TestExperimentStorageState(unittest.TestCase): + """ExperimentStorageState unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional): + """Test ExperimentStorageState + include_option is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # model = kfp_server_api.models.experiment_storage_state.ExperimentStorageState() # noqa: E501 + if include_optional : + return ExperimentStorageState( + ) + else : + return ExperimentStorageState( + ) + + def testExperimentStorageState(self): + """Test ExperimentStorageState""" + inst_req_only = self.make_instance(include_optional=False) + inst_req_and_optional = self.make_instance(include_optional=True) + + +if __name__ == '__main__': + unittest.main() diff --git a/backend/api/python_http_client/test/test_job_mode.py b/backend/api/python_http_client/test/test_job_mode.py new file mode 100644 index 000000000..5d3a3e12e --- /dev/null +++ b/backend/api/python_http_client/test/test_job_mode.py @@ -0,0 +1,65 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest +import datetime + +import kfp_server_api +from kfp_server_api.models.job_mode import JobMode # noqa: E501 +from kfp_server_api.rest import ApiException + +class TestJobMode(unittest.TestCase): + """JobMode unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional): + """Test JobMode + include_option is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # model = kfp_server_api.models.job_mode.JobMode() # noqa: E501 + if include_optional : + return JobMode( + ) + else : + return JobMode( + ) + + def testJobMode(self): + """Test JobMode""" + inst_req_only = self.make_instance(include_optional=False) + inst_req_and_optional = self.make_instance(include_optional=True) + + +if __name__ == '__main__': + unittest.main() diff --git a/backend/api/python_http_client/test/test_job_service_api.py b/backend/api/python_http_client/test/test_job_service_api.py new file mode 100644 index 000000000..cf64cdd8e --- /dev/null +++ b/backend/api/python_http_client/test/test_job_service_api.py @@ -0,0 +1,89 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest + +import kfp_server_api +from kfp_server_api.api.job_service_api import JobServiceApi # noqa: E501 +from kfp_server_api.rest import ApiException + + +class TestJobServiceApi(unittest.TestCase): + """JobServiceApi unit test stubs""" + + def setUp(self): + self.api = kfp_server_api.api.job_service_api.JobServiceApi() # noqa: E501 + + def tearDown(self): + pass + + def test_create_job(self): + """Test case for create_job + + Creates a new job. # noqa: E501 + """ + pass + + def test_delete_job(self): + """Test case for delete_job + + Deletes a job. # noqa: E501 + """ + pass + + def test_disable_job(self): + """Test case for disable_job + + Stops a job and all its associated runs. The job is not deleted. # noqa: E501 + """ + pass + + def test_enable_job(self): + """Test case for enable_job + + Restarts a job that was previously stopped. All runs associated with the job will continue. # noqa: E501 + """ + pass + + def test_get_job(self): + """Test case for get_job + + Finds a specific job by ID. # noqa: E501 + """ + pass + + def test_list_jobs(self): + """Test case for list_jobs + + Finds all jobs. # noqa: E501 + """ + pass + + +if __name__ == '__main__': + unittest.main() diff --git a/backend/api/python_http_client/test/test_pipeline_service_api.py b/backend/api/python_http_client/test/test_pipeline_service_api.py new file mode 100644 index 000000000..ff4adee76 --- /dev/null +++ b/backend/api/python_http_client/test/test_pipeline_service_api.py @@ -0,0 +1,117 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest + +import kfp_server_api +from kfp_server_api.api.pipeline_service_api import PipelineServiceApi # noqa: E501 +from kfp_server_api.rest import ApiException + + +class TestPipelineServiceApi(unittest.TestCase): + """PipelineServiceApi unit test stubs""" + + def setUp(self): + self.api = kfp_server_api.api.pipeline_service_api.PipelineServiceApi() # noqa: E501 + + def tearDown(self): + pass + + def test_create_pipeline(self): + """Test case for create_pipeline + + Creates a pipeline. # noqa: E501 + """ + pass + + def test_create_pipeline_version(self): + """Test case for create_pipeline_version + + Adds a pipeline version to the specified pipeline. # noqa: E501 + """ + pass + + def test_delete_pipeline(self): + """Test case for delete_pipeline + + Deletes a pipeline and its pipeline versions. # noqa: E501 + """ + pass + + def test_delete_pipeline_version(self): + """Test case for delete_pipeline_version + + Deletes a pipeline version by pipeline version ID. If the deleted pipeline version is the default pipeline version, the pipeline's default version changes to the pipeline's most recent pipeline version. If there are no remaining pipeline versions, the pipeline will have no default version. Examines the run_service_api.ipynb notebook to learn more about creating a run using a pipeline version (https://github.com/kubeflow/pipelines/blob/master/tools/benchmarks/run_service_api.ipynb). # noqa: E501 + """ + pass + + def test_get_pipeline(self): + """Test case for get_pipeline + + Finds a specific pipeline by ID. # noqa: E501 + """ + pass + + def test_get_pipeline_version(self): + """Test case for get_pipeline_version + + Gets a pipeline version by pipeline version ID. # noqa: E501 + """ + pass + + def test_get_pipeline_version_template(self): + """Test case for get_pipeline_version_template + + Returns a YAML template that contains the specified pipeline version's description, parameters and metadata. # noqa: E501 + """ + pass + + def test_get_template(self): + """Test case for get_template + + Returns a single YAML template that contains the description, parameters, and metadata associated with the pipeline provided. # noqa: E501 + """ + pass + + def test_list_pipeline_versions(self): + """Test case for list_pipeline_versions + + Lists all pipeline versions of a given pipeline. # noqa: E501 + """ + pass + + def test_list_pipelines(self): + """Test case for list_pipelines + + Finds all pipelines. # noqa: E501 + """ + pass + + +if __name__ == '__main__': + unittest.main() diff --git a/backend/api/python_http_client/test/test_pipeline_upload_service_api.py b/backend/api/python_http_client/test/test_pipeline_upload_service_api.py new file mode 100644 index 000000000..2149ffaf8 --- /dev/null +++ b/backend/api/python_http_client/test/test_pipeline_upload_service_api.py @@ -0,0 +1,59 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest + +import kfp_server_api +from kfp_server_api.api.pipeline_upload_service_api import PipelineUploadServiceApi # noqa: E501 +from kfp_server_api.rest import ApiException + + +class TestPipelineUploadServiceApi(unittest.TestCase): + """PipelineUploadServiceApi unit test stubs""" + + def setUp(self): + self.api = kfp_server_api.api.pipeline_upload_service_api.PipelineUploadServiceApi() # noqa: E501 + + def tearDown(self): + pass + + def test_upload_pipeline(self): + """Test case for upload_pipeline + + """ + pass + + def test_upload_pipeline_version(self): + """Test case for upload_pipeline_version + + """ + pass + + +if __name__ == '__main__': + unittest.main() diff --git a/backend/api/python_http_client/test/test_protobuf_any.py b/backend/api/python_http_client/test/test_protobuf_any.py new file mode 100644 index 000000000..71fb6a46e --- /dev/null +++ b/backend/api/python_http_client/test/test_protobuf_any.py @@ -0,0 +1,67 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest +import datetime + +import kfp_server_api +from kfp_server_api.models.protobuf_any import ProtobufAny # noqa: E501 +from kfp_server_api.rest import ApiException + +class TestProtobufAny(unittest.TestCase): + """ProtobufAny unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional): + """Test ProtobufAny + include_option is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # model = kfp_server_api.models.protobuf_any.ProtobufAny() # noqa: E501 + if include_optional : + return ProtobufAny( + type_url = '0', + value = 'YQ==' + ) + else : + return ProtobufAny( + ) + + def testProtobufAny(self): + """Test ProtobufAny""" + inst_req_only = self.make_instance(include_optional=False) + inst_req_and_optional = self.make_instance(include_optional=True) + + +if __name__ == '__main__': + unittest.main() diff --git a/backend/api/python_http_client/test/test_report_run_metrics_response_report_run_metric_result.py b/backend/api/python_http_client/test/test_report_run_metrics_response_report_run_metric_result.py new file mode 100644 index 000000000..3ee1418d4 --- /dev/null +++ b/backend/api/python_http_client/test/test_report_run_metrics_response_report_run_metric_result.py @@ -0,0 +1,69 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest +import datetime + +import kfp_server_api +from kfp_server_api.models.report_run_metrics_response_report_run_metric_result import ReportRunMetricsResponseReportRunMetricResult # noqa: E501 +from kfp_server_api.rest import ApiException + +class TestReportRunMetricsResponseReportRunMetricResult(unittest.TestCase): + """ReportRunMetricsResponseReportRunMetricResult unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional): + """Test ReportRunMetricsResponseReportRunMetricResult + include_option is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # model = kfp_server_api.models.report_run_metrics_response_report_run_metric_result.ReportRunMetricsResponseReportRunMetricResult() # noqa: E501 + if include_optional : + return ReportRunMetricsResponseReportRunMetricResult( + metric_name = '0', + metric_node_id = '0', + status = 'UNSPECIFIED', + message = '0' + ) + else : + return ReportRunMetricsResponseReportRunMetricResult( + ) + + def testReportRunMetricsResponseReportRunMetricResult(self): + """Test ReportRunMetricsResponseReportRunMetricResult""" + inst_req_only = self.make_instance(include_optional=False) + inst_req_and_optional = self.make_instance(include_optional=True) + + +if __name__ == '__main__': + unittest.main() diff --git a/backend/api/python_http_client/test/test_report_run_metrics_response_report_run_metric_result_status.py b/backend/api/python_http_client/test/test_report_run_metrics_response_report_run_metric_result_status.py new file mode 100644 index 000000000..7738f28fe --- /dev/null +++ b/backend/api/python_http_client/test/test_report_run_metrics_response_report_run_metric_result_status.py @@ -0,0 +1,65 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest +import datetime + +import kfp_server_api +from kfp_server_api.models.report_run_metrics_response_report_run_metric_result_status import ReportRunMetricsResponseReportRunMetricResultStatus # noqa: E501 +from kfp_server_api.rest import ApiException + +class TestReportRunMetricsResponseReportRunMetricResultStatus(unittest.TestCase): + """ReportRunMetricsResponseReportRunMetricResultStatus unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional): + """Test ReportRunMetricsResponseReportRunMetricResultStatus + include_option is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # model = kfp_server_api.models.report_run_metrics_response_report_run_metric_result_status.ReportRunMetricsResponseReportRunMetricResultStatus() # noqa: E501 + if include_optional : + return ReportRunMetricsResponseReportRunMetricResultStatus( + ) + else : + return ReportRunMetricsResponseReportRunMetricResultStatus( + ) + + def testReportRunMetricsResponseReportRunMetricResultStatus(self): + """Test ReportRunMetricsResponseReportRunMetricResultStatus""" + inst_req_only = self.make_instance(include_optional=False) + inst_req_and_optional = self.make_instance(include_optional=True) + + +if __name__ == '__main__': + unittest.main() diff --git a/backend/api/python_http_client/test/test_run_metric_format.py b/backend/api/python_http_client/test/test_run_metric_format.py new file mode 100644 index 000000000..6ef0bc0b6 --- /dev/null +++ b/backend/api/python_http_client/test/test_run_metric_format.py @@ -0,0 +1,65 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest +import datetime + +import kfp_server_api +from kfp_server_api.models.run_metric_format import RunMetricFormat # noqa: E501 +from kfp_server_api.rest import ApiException + +class TestRunMetricFormat(unittest.TestCase): + """RunMetricFormat unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional): + """Test RunMetricFormat + include_option is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # model = kfp_server_api.models.run_metric_format.RunMetricFormat() # noqa: E501 + if include_optional : + return RunMetricFormat( + ) + else : + return RunMetricFormat( + ) + + def testRunMetricFormat(self): + """Test RunMetricFormat""" + inst_req_only = self.make_instance(include_optional=False) + inst_req_and_optional = self.make_instance(include_optional=True) + + +if __name__ == '__main__': + unittest.main() diff --git a/backend/api/python_http_client/test/test_run_service_api.py b/backend/api/python_http_client/test/test_run_service_api.py new file mode 100644 index 000000000..2c216880c --- /dev/null +++ b/backend/api/python_http_client/test/test_run_service_api.py @@ -0,0 +1,117 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest + +import kfp_server_api +from kfp_server_api.api.run_service_api import RunServiceApi # noqa: E501 +from kfp_server_api.rest import ApiException + + +class TestRunServiceApi(unittest.TestCase): + """RunServiceApi unit test stubs""" + + def setUp(self): + self.api = kfp_server_api.api.run_service_api.RunServiceApi() # noqa: E501 + + def tearDown(self): + pass + + def test_archive_run(self): + """Test case for archive_run + + Archives a run. # noqa: E501 + """ + pass + + def test_create_run(self): + """Test case for create_run + + Creates a new run. # noqa: E501 + """ + pass + + def test_delete_run(self): + """Test case for delete_run + + Deletes a run. # noqa: E501 + """ + pass + + def test_get_run(self): + """Test case for get_run + + Finds a specific run by ID. # noqa: E501 + """ + pass + + def test_list_runs(self): + """Test case for list_runs + + Finds all runs. # noqa: E501 + """ + pass + + def test_read_artifact(self): + """Test case for read_artifact + + Finds a run's artifact data. # noqa: E501 + """ + pass + + def test_report_run_metrics(self): + """Test case for report_run_metrics + + ReportRunMetrics reports metrics of a run. Each metric is reported in its own transaction, so this API accepts partial failures. Metric can be uniquely identified by (run_id, node_id, name). Duplicate reporting will be ignored by the API. First reporting wins. # noqa: E501 + """ + pass + + def test_retry_run(self): + """Test case for retry_run + + Re-initiates a failed or terminated run. # noqa: E501 + """ + pass + + def test_terminate_run(self): + """Test case for terminate_run + + Terminates an active run. # noqa: E501 + """ + pass + + def test_unarchive_run(self): + """Test case for unarchive_run + + Restores an archived run. # noqa: E501 + """ + pass + + +if __name__ == '__main__': + unittest.main() diff --git a/backend/api/python_http_client/test/test_run_storage_state.py b/backend/api/python_http_client/test/test_run_storage_state.py new file mode 100644 index 000000000..8c3cd4dbe --- /dev/null +++ b/backend/api/python_http_client/test/test_run_storage_state.py @@ -0,0 +1,65 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest +import datetime + +import kfp_server_api +from kfp_server_api.models.run_storage_state import RunStorageState # noqa: E501 +from kfp_server_api.rest import ApiException + +class TestRunStorageState(unittest.TestCase): + """RunStorageState unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional): + """Test RunStorageState + include_option is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # model = kfp_server_api.models.run_storage_state.RunStorageState() # noqa: E501 + if include_optional : + return RunStorageState( + ) + else : + return RunStorageState( + ) + + def testRunStorageState(self): + """Test RunStorageState""" + inst_req_only = self.make_instance(include_optional=False) + inst_req_and_optional = self.make_instance(include_optional=True) + + +if __name__ == '__main__': + unittest.main() diff --git a/backend/api/python_http_client/tox.ini b/backend/api/python_http_client/tox.ini new file mode 100644 index 000000000..25937a687 --- /dev/null +++ b/backend/api/python_http_client/tox.ini @@ -0,0 +1,9 @@ +[tox] +envlist = py27, py3 + +[testenv] +deps=-r{toxinidir}/requirements.txt + -r{toxinidir}/test-requirements.txt + +commands= + pytest --cov=kfp_server_api diff --git a/backend/api/python_http_client_template/README.md b/backend/api/python_http_client_template/README.md new file mode 100644 index 000000000..0a473ff18 --- /dev/null +++ b/backend/api/python_http_client_template/README.md @@ -0,0 +1,5 @@ +This folder contains overrides to openapi-generator python http client templates. + +Resources: +* Documentation for overriding templates: https://github.com/OpenAPITools/openapi-generator/tree/v4.3.1/modules/openapi-generator/src/main/resources/python. +* Original templates for the generator version we use: https://github.com/OpenAPITools/openapi-generator/tree/v4.3.1/modules/openapi-generator/src/main/resources/python diff --git a/backend/api/python_http_client_template/api.mustache b/backend/api/python_http_client_template/api.mustache new file mode 100644 index 000000000..122e816cb --- /dev/null +++ b/backend/api/python_http_client_template/api.mustache @@ -0,0 +1,270 @@ +# coding: utf-8 + +{{>partial_header}} + +from __future__ import absolute_import + +import re # noqa: F401 + +# python 2 and python 3 compatibility library +import six + +from {{packageName}}.api_client import ApiClient +from {{packageName}}.exceptions import ( # noqa: F401 + ApiTypeError, + ApiValueError +) + + +{{#operations}} +class {{classname}}(object): + """NOTE: This class is auto generated by OpenAPI Generator + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None): + if api_client is None: + api_client = ApiClient() + self.api_client = api_client +{{#operation}} + + def {{operationId}}(self, {{#sortParamsByRequiredFlag}}{{#allParams}}{{#required}}{{paramName}}, {{/required}}{{/allParams}}{{/sortParamsByRequiredFlag}}**kwargs): # noqa: E501 + """{{#summary}}{{{.}}}{{/summary}}{{^summary}}{{operationId}}{{/summary}} # noqa: E501 + +{{#notes}} + {{{notes}}} # noqa: E501 +{{/notes}} + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + +{{#sortParamsByRequiredFlag}} + >>> thread = api.{{operationId}}({{#allParams}}{{#required}}{{paramName}}, {{/required}}{{/allParams}}async_req=True) +{{/sortParamsByRequiredFlag}} +{{^sortParamsByRequiredFlag}} + >>> thread = api.{{operationId}}({{#allParams}}{{#required}}{{paramName}}={{paramName}}_value, {{/required}}{{/allParams}}async_req=True) +{{/sortParamsByRequiredFlag}} + >>> result = thread.get() + +{{#allParams}} + :param {{paramName}}:{{#description}} {{{description}}}{{/description}}{{#required}} (required){{/required}}{{#optional}}(optional){{/optional}} + :type {{paramName}}: {{dataType}}{{#optional}}, optional{{/optional}} +{{/allParams}} + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: {{#returnType}}{{returnType}}{{/returnType}}{{^returnType}}None{{/returnType}} + """ + kwargs['_return_http_data_only'] = True + return self.{{operationId}}_with_http_info({{#sortParamsByRequiredFlag}}{{#allParams}}{{#required}}{{paramName}}, {{/required}}{{/allParams}}{{/sortParamsByRequiredFlag}}**kwargs) # noqa: E501 + + def {{operationId}}_with_http_info(self, {{#sortParamsByRequiredFlag}}{{#allParams}}{{#required}}{{paramName}}, {{/required}}{{/allParams}}{{/sortParamsByRequiredFlag}}**kwargs): # noqa: E501 + """{{#summary}}{{{.}}}{{/summary}}{{^summary}}{{operationId}}{{/summary}} # noqa: E501 + +{{#notes}} + {{{notes}}} # noqa: E501 +{{/notes}} + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + +{{#sortParamsByRequiredFlag}} + >>> thread = api.{{operationId}}_with_http_info({{#allParams}}{{#required}}{{paramName}}, {{/required}}{{/allParams}}async_req=True) +{{/sortParamsByRequiredFlag}} +{{^sortParamsByRequiredFlag}} + >>> thread = api.{{operationId}}_with_http_info({{#allParams}}{{#required}}{{paramName}}={{paramName}}_value, {{/required}}{{/allParams}}async_req=True) +{{/sortParamsByRequiredFlag}} + >>> result = thread.get() + +{{#allParams}} + :param {{paramName}}:{{#description}} {{{description}}}{{/description}}{{#required}} (required){{/required}}{{#optional}}(optional){{/optional}} + :type {{paramName}}: {{dataType}}{{#optional}}, optional{{/optional}} +{{/allParams}} + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _return_http_data_only: response data without head status code + and headers + :type _return_http_data_only: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :type _preload_content: bool, optional + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: {{#returnType}}tuple({{returnType}}, status_code(int), headers(HTTPHeaderDict)){{/returnType}}{{^returnType}}None{{/returnType}} + """ + + {{#servers.0}} + local_var_hosts = [ +{{#servers}} + '{{{url}}}'{{^-last}},{{/-last}} +{{/servers}} + ] + local_var_host = local_var_hosts[0] + if kwargs.get('_host_index'): + _host_index = int(kwargs.get('_host_index')) + if _host_index < 0 or _host_index >= len(local_var_hosts): + raise ApiValueError( + "Invalid host index. Must be 0 <= index < %s" + % len(local_var_host) + ) + local_var_host = local_var_hosts[_host_index] + {{/servers.0}} + local_var_params = locals() + + all_params = [ +{{#allParams}} + '{{paramName}}'{{#hasMore}},{{/hasMore}} +{{/allParams}} + ] + all_params.extend( + [ + 'async_req', + '_return_http_data_only', + '_preload_content', + '_request_timeout' + ] + ) + + for key, val in six.iteritems(local_var_params['kwargs']): + if key not in all_params{{#servers.0}} and key != "_host_index"{{/servers.0}}: + raise ApiTypeError( + "Got an unexpected keyword argument '%s'" + " to method {{operationId}}" % key + ) + local_var_params[key] = val + del local_var_params['kwargs'] +{{#allParams}} +{{^isNullable}} +{{#required}} + # verify the required parameter '{{paramName}}' is set + if self.api_client.client_side_validation and ('{{paramName}}' not in local_var_params or # noqa: E501 + local_var_params['{{paramName}}'] is None): # noqa: E501 + raise ApiValueError("Missing the required parameter `{{paramName}}` when calling `{{operationId}}`") # noqa: E501 +{{/required}} +{{/isNullable}} +{{/allParams}} + +{{#allParams}} +{{#hasValidation}} + {{#maxLength}} + if self.api_client.client_side_validation and ('{{paramName}}' in local_var_params and # noqa: E501 + len(local_var_params['{{paramName}}']) > {{maxLength}}): # noqa: E501 + raise ApiValueError("Invalid value for parameter `{{paramName}}` when calling `{{operationId}}`, length must be less than or equal to `{{maxLength}}`") # noqa: E501 + {{/maxLength}} + {{#minLength}} + if self.api_client.client_side_validation and ('{{paramName}}' in local_var_params and # noqa: E501 + len(local_var_params['{{paramName}}']) < {{minLength}}): # noqa: E501 + raise ApiValueError("Invalid value for parameter `{{paramName}}` when calling `{{operationId}}`, length must be greater than or equal to `{{minLength}}`") # noqa: E501 + {{/minLength}} + {{#maximum}} + if self.api_client.client_side_validation and '{{paramName}}' in local_var_params and local_var_params['{{paramName}}'] >{{#exclusiveMaximum}}={{/exclusiveMaximum}} {{maximum}}: # noqa: E501 + raise ApiValueError("Invalid value for parameter `{{paramName}}` when calling `{{operationId}}`, must be a value less than {{^exclusiveMaximum}}or equal to {{/exclusiveMaximum}}`{{maximum}}`") # noqa: E501 + {{/maximum}} + {{#minimum}} + if self.api_client.client_side_validation and '{{paramName}}' in local_var_params and local_var_params['{{paramName}}'] <{{#exclusiveMinimum}}={{/exclusiveMinimum}} {{minimum}}: # noqa: E501 + raise ApiValueError("Invalid value for parameter `{{paramName}}` when calling `{{operationId}}`, must be a value greater than {{^exclusiveMinimum}}or equal to {{/exclusiveMinimum}}`{{minimum}}`") # noqa: E501 + {{/minimum}} + {{#pattern}} + if self.api_client.client_side_validation and '{{paramName}}' in local_var_params and not re.search(r'{{{vendorExtensions.x-regex}}}', local_var_params['{{paramName}}']{{#vendorExtensions.x-modifiers}}{{#-first}}, flags={{/-first}}re.{{.}}{{^-last}} | {{/-last}}{{/vendorExtensions.x-modifiers}}): # noqa: E501 + raise ApiValueError("Invalid value for parameter `{{paramName}}` when calling `{{operationId}}`, must conform to the pattern `{{{pattern}}}`") # noqa: E501 + {{/pattern}} + {{#maxItems}} + if self.api_client.client_side_validation and ('{{paramName}}' in local_var_params and # noqa: E501 + len(local_var_params['{{paramName}}']) > {{maxItems}}): # noqa: E501 + raise ApiValueError("Invalid value for parameter `{{paramName}}` when calling `{{operationId}}`, number of items must be less than or equal to `{{maxItems}}`") # noqa: E501 + {{/maxItems}} + {{#minItems}} + if self.api_client.client_side_validation and ('{{paramName}}' in local_var_params and # noqa: E501 + len(local_var_params['{{paramName}}']) < {{minItems}}): # noqa: E501 + raise ApiValueError("Invalid value for parameter `{{paramName}}` when calling `{{operationId}}`, number of items must be greater than or equal to `{{minItems}}`") # noqa: E501 + {{/minItems}} +{{/hasValidation}} +{{#-last}} +{{/-last}} +{{/allParams}} + collection_formats = {} + + path_params = {} +{{#pathParams}} + if '{{paramName}}' in local_var_params: + path_params['{{baseName}}'] = local_var_params['{{paramName}}']{{#isListContainer}} # noqa: E501 + collection_formats['{{baseName}}'] = '{{collectionFormat}}'{{/isListContainer}} # noqa: E501 +{{/pathParams}} + + query_params = [] +{{#queryParams}} + if '{{paramName}}' in local_var_params and local_var_params['{{paramName}}'] is not None: # noqa: E501 + query_params.append(('{{baseName}}', local_var_params['{{paramName}}'])){{#isListContainer}} # noqa: E501 + collection_formats['{{baseName}}'] = '{{collectionFormat}}'{{/isListContainer}} # noqa: E501 +{{/queryParams}} + + header_params = {} +{{#headerParams}} + if '{{paramName}}' in local_var_params: + header_params['{{baseName}}'] = local_var_params['{{paramName}}']{{#isListContainer}} # noqa: E501 + collection_formats['{{baseName}}'] = '{{collectionFormat}}'{{/isListContainer}} # noqa: E501 +{{/headerParams}} + + form_params = [] + local_var_files = {} +{{#formParams}} + if '{{paramName}}' in local_var_params: + {{^isFile}}form_params.append(('{{baseName}}', local_var_params['{{paramName}}'])){{/isFile}}{{#isFile}}local_var_files['{{baseName}}'] = local_var_params['{{paramName}}']{{/isFile}}{{#isListContainer}} # noqa: E501 + collection_formats['{{baseName}}'] = '{{collectionFormat}}'{{/isListContainer}} # noqa: E501 +{{/formParams}} + + body_params = None +{{#bodyParam}} + if '{{paramName}}' in local_var_params: + body_params = local_var_params['{{paramName}}'] +{{/bodyParam}} + {{#hasProduces}} + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + [{{#produces}}'{{{mediaType}}}'{{#hasMore}}, {{/hasMore}}{{/produces}}]) # noqa: E501 + + {{/hasProduces}} + {{#hasConsumes}} + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + [{{#consumes}}'{{{mediaType}}}'{{#hasMore}}, {{/hasMore}}{{/consumes}}]) # noqa: E501 + + {{/hasConsumes}} + # Authentication setting + auth_settings = [{{#authMethods}}'{{name}}'{{#hasMore}}, {{/hasMore}}{{/authMethods}}] # noqa: E501 + + return self.api_client.call_api( + '{{{path}}}', '{{httpMethod}}', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type={{#returnType}}'{{returnType}}'{{/returnType}}{{^returnType}}None{{/returnType}}, # noqa: E501 + auth_settings=auth_settings, + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + {{#servers.0}} + _host=local_var_host, + {{/servers.0}} + collection_formats=collection_formats) +{{/operation}} +{{/operations}} diff --git a/backend/api/python_http_client_template/model.mustache b/backend/api/python_http_client_template/model.mustache new file mode 100644 index 000000000..2b3299a6f --- /dev/null +++ b/backend/api/python_http_client_template/model.mustache @@ -0,0 +1,246 @@ +# coding: utf-8 + +{{>partial_header}} + +import pprint +import re # noqa: F401 + +import six + +from {{packageName}}.configuration import Configuration + + +{{#models}} +{{#model}} +class {{classname}}(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """{{#allowableValues}} + + """ + allowed enum values + """ +{{#enumVars}} + {{name}} = {{{value}}}{{^-last}} +{{/-last}} +{{/enumVars}}{{/allowableValues}} + +{{#allowableValues}} + allowable_values = [{{#enumVars}}{{name}}{{^-last}}, {{/-last}}{{/enumVars}}] # noqa: E501 + +{{/allowableValues}} + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { +{{#vars}} + '{{name}}': '{{{dataType}}}'{{#hasMore}},{{/hasMore}} +{{/vars}} + } + + attribute_map = { +{{#vars}} + '{{name}}': '{{baseName}}'{{#hasMore}},{{/hasMore}} +{{/vars}} + } +{{#discriminator}} + + discriminator_value_class_map = { +{{#children}} + '{{^vendorExtensions.x-discriminator-value}}{{name}}{{/vendorExtensions.x-discriminator-value}}{{#vendorExtensions.x-discriminator-value}}{{{vendorExtensions.x-discriminator-value}}}{{/vendorExtensions.x-discriminator-value}}': '{{{classname}}}'{{^-last}},{{/-last}} +{{/children}} + } +{{/discriminator}} + + def __init__(self{{#vars}}, {{name}}={{#defaultValue}}{{{defaultValue}}}{{/defaultValue}}{{^defaultValue}}None{{/defaultValue}}{{/vars}}, local_vars_configuration=None): # noqa: E501 + """{{classname}} - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration +{{#vars}}{{#-first}} +{{/-first}} + self._{{name}} = None +{{/vars}} + self.discriminator = {{#discriminator}}'{{{discriminatorName}}}'{{/discriminator}}{{^discriminator}}None{{/discriminator}} +{{#vars}}{{#-first}} +{{/-first}} +{{#required}} + self.{{name}} = {{name}} +{{/required}} +{{^required}} +{{#isNullable}} + self.{{name}} = {{name}} +{{/isNullable}} +{{^isNullable}} + if {{name}} is not None: + self.{{name}} = {{name}} +{{/isNullable}} +{{/required}} +{{/vars}} + +{{#vars}} + @property + def {{name}}(self): + """Gets the {{name}} of this {{classname}}. # noqa: E501 + +{{#description}} + {{{description}}} # noqa: E501 +{{/description}} + + :return: The {{name}} of this {{classname}}. # noqa: E501 + :rtype: {{dataType}} + """ + return self._{{name}} + + @{{name}}.setter + def {{name}}(self, {{name}}): + """Sets the {{name}} of this {{classname}}. + +{{#description}} + {{{description}}} # noqa: E501 +{{/description}} + + :param {{name}}: The {{name}} of this {{classname}}. # noqa: E501 + :type {{name}}: {{dataType}} + """ +{{^isNullable}} +{{#required}} + if self.local_vars_configuration.client_side_validation and {{name}} is None: # noqa: E501 + raise ValueError("Invalid value for `{{name}}`, must not be `None`") # noqa: E501 +{{/required}} +{{/isNullable}} +{{#isEnum}} +{{#isContainer}} + allowed_values = [{{#isNullable}}None,{{/isNullable}}{{#allowableValues}}{{#values}}{{#items.isString}}"{{/items.isString}}{{{this}}}{{#items.isString}}"{{/items.isString}}{{^-last}}, {{/-last}}{{/values}}{{/allowableValues}}] # noqa: E501 +{{#isListContainer}} + if (self.local_vars_configuration.client_side_validation and + not set({{{name}}}).issubset(set(allowed_values))): # noqa: E501 + raise ValueError( + "Invalid values for `{{{name}}}` [{0}], must be a subset of [{1}]" # noqa: E501 + .format(", ".join(map(str, set({{{name}}}) - set(allowed_values))), # noqa: E501 + ", ".join(map(str, allowed_values))) + ) +{{/isListContainer}} +{{#isMapContainer}} + if (self.local_vars_configuration.client_side_validation and + not set({{{name}}}.keys()).issubset(set(allowed_values))): # noqa: E501 + raise ValueError( + "Invalid keys in `{{{name}}}` [{0}], must be a subset of [{1}]" # noqa: E501 + .format(", ".join(map(str, set({{{name}}}.keys()) - set(allowed_values))), # noqa: E501 + ", ".join(map(str, allowed_values))) + ) +{{/isMapContainer}} +{{/isContainer}} +{{^isContainer}} + allowed_values = [{{#isNullable}}None,{{/isNullable}}{{#allowableValues}}{{#values}}{{#isString}}"{{/isString}}{{{this}}}{{#isString}}"{{/isString}}{{^-last}}, {{/-last}}{{/values}}{{/allowableValues}}] # noqa: E501 + if self.local_vars_configuration.client_side_validation and {{{name}}} not in allowed_values: # noqa: E501 + raise ValueError( + "Invalid value for `{{{name}}}` ({0}), must be one of {1}" # noqa: E501 + .format({{{name}}}, allowed_values) + ) +{{/isContainer}} +{{/isEnum}} +{{^isEnum}} +{{#hasValidation}} +{{#maxLength}} + if (self.local_vars_configuration.client_side_validation and + {{name}} is not None and len({{name}}) > {{maxLength}}): + raise ValueError("Invalid value for `{{name}}`, length must be less than or equal to `{{maxLength}}`") # noqa: E501 +{{/maxLength}} +{{#minLength}} + if (self.local_vars_configuration.client_side_validation and + {{name}} is not None and len({{name}}) < {{minLength}}): + raise ValueError("Invalid value for `{{name}}`, length must be greater than or equal to `{{minLength}}`") # noqa: E501 +{{/minLength}} +{{#maximum}} + if (self.local_vars_configuration.client_side_validation and + {{name}} is not None and {{name}} >{{#exclusiveMaximum}}={{/exclusiveMaximum}} {{maximum}}): # noqa: E501 + raise ValueError("Invalid value for `{{name}}`, must be a value less than {{^exclusiveMaximum}}or equal to {{/exclusiveMaximum}}`{{maximum}}`") # noqa: E501 +{{/maximum}} +{{#minimum}} + if (self.local_vars_configuration.client_side_validation and + {{name}} is not None and {{name}} <{{#exclusiveMinimum}}={{/exclusiveMinimum}} {{minimum}}): # noqa: E501 + raise ValueError("Invalid value for `{{name}}`, must be a value greater than {{^exclusiveMinimum}}or equal to {{/exclusiveMinimum}}`{{minimum}}`") # noqa: E501 +{{/minimum}} +{{#pattern}} + if (self.local_vars_configuration.client_side_validation and + {{name}} is not None and not re.search(r'{{{vendorExtensions.x-regex}}}', {{name}}{{#vendorExtensions.x-modifiers}}{{#-first}}, flags={{/-first}}re.{{.}}{{^-last}} | {{/-last}}{{/vendorExtensions.x-modifiers}})): # noqa: E501 + raise ValueError(r"Invalid value for `{{name}}`, must be a follow pattern or equal to `{{{pattern}}}`") # noqa: E501 +{{/pattern}} +{{#maxItems}} + if (self.local_vars_configuration.client_side_validation and + {{name}} is not None and len({{name}}) > {{maxItems}}): + raise ValueError("Invalid value for `{{name}}`, number of items must be less than or equal to `{{maxItems}}`") # noqa: E501 +{{/maxItems}} +{{#minItems}} + if (self.local_vars_configuration.client_side_validation and + {{name}} is not None and len({{name}}) < {{minItems}}): + raise ValueError("Invalid value for `{{name}}`, number of items must be greater than or equal to `{{minItems}}`") # noqa: E501 +{{/minItems}} +{{/hasValidation}} +{{/isEnum}} + + self._{{name}} = {{name}} + +{{/vars}} +{{#discriminator}} + def get_real_child_model(self, data): + """Returns the real base class specified by the discriminator""" + discriminator_key = self.attribute_map[self.discriminator] + discriminator_value = data[discriminator_key] + return self.discriminator_value_class_map.get(discriminator_value) + +{{/discriminator}} + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, {{classname}}): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, {{classname}}): + return True + + return self.to_dict() != other.to_dict() +{{/model}} +{{/models}} diff --git a/backend/api/python_http_client_template/partial_header.mustache b/backend/api/python_http_client_template/partial_header.mustache new file mode 100644 index 000000000..6005af0e3 --- /dev/null +++ b/backend/api/python_http_client_template/partial_header.mustache @@ -0,0 +1,14 @@ +""" +{{#appName}} + {{{appName}}} +{{/appName}} + +{{#appDescription}} + {{{appDescription}}} +{{/appDescription}} + + {{#infoEmail}} + Contact: {{{infoEmail}}} + {{/infoEmail}} + Generated by: https://openapi-generator.tech +""" diff --git a/backend/api/run.proto b/backend/api/run.proto index 78698a368..e1a2bf123 100644 --- a/backend/api/run.proto +++ b/backend/api/run.proto @@ -61,7 +61,7 @@ option (grpc.gateway.protoc_gen_swagger.options.openapiv2_swagger) = { service RunService { - //Create a new run. + // Creates a new run. rpc CreateRun(CreateRunRequest) returns (RunDetail) { option (google.api.http) = { post: "/apis/v1beta1/runs" @@ -69,35 +69,35 @@ service RunService { }; } - //Find a specific run by ID. + // Finds a specific run by ID. rpc GetRun(GetRunRequest) returns (RunDetail) { option (google.api.http) = { get: "/apis/v1beta1/runs/{run_id}" }; } - //Find all runs. + // Finds all runs. rpc ListRuns(ListRunsRequest) returns (ListRunsResponse) { option (google.api.http) = { get: "/apis/v1beta1/runs" }; } - //Archive a run. + // Archives a run. rpc ArchiveRun(ArchiveRunRequest) returns (google.protobuf.Empty) { option (google.api.http) = { post: "/apis/v1beta1/runs/{id}:archive" }; } - //Restore an archived run. + // Restores an archived run. rpc UnarchiveRun(UnarchiveRunRequest) returns (google.protobuf.Empty) { option (google.api.http) = { post: "/apis/v1beta1/runs/{id}:unarchive" }; } - //Delete a run. + // Deletes a run. rpc DeleteRun(DeleteRunRequest) returns (google.protobuf.Empty) { option (google.api.http) = { delete: "/apis/v1beta1/runs/{id}" @@ -116,21 +116,21 @@ service RunService { }; } - //Find a run's artifact data. + // Finds a run's artifact data. rpc ReadArtifact(ReadArtifactRequest) returns (ReadArtifactResponse) { option (google.api.http) = { get: "/apis/v1beta1/runs/{run_id}/nodes/{node_id}/artifacts/{artifact_name}:read" }; } - //Terminate an active run. + // Terminates an active run. rpc TerminateRun(TerminateRunRequest) returns (google.protobuf.Empty) { option (google.api.http) = { post: "/apis/v1beta1/runs/{run_id}/terminate" }; } - //Re-initiate a failed or terminated run. + // Re-initiates a failed or terminated run. rpc RetryRun(RetryRunRequest) returns (google.protobuf.Empty) { option (google.api.http) = { post: "/apis/v1beta1/runs/{run_id}/retry" @@ -143,14 +143,23 @@ message CreateRunRequest { } message GetRunRequest { + // The ID of the run to be retrieved. string run_id = 1; } message ListRunsRequest { + // A page token to request the next page of results. The token is acquried + // from the nextPageToken field of the response from the previous + // ListRuns call or can be omitted when fetching the first page. string page_token = 1; + + // The number of runs to be listed per page. If there are more runs than this + // number, the response message will contain a nextPageToken field you can use + // to fetch the next page. int32 page_size = 2; - // Can be format of "field_name", "field_name asc" or "field_name des" - // (Example, "name asc" or "id des"). Ascending by default. + + // Can be format of "field_name", "field_name asc" or "field_name desc" + // (Example, "name asc" or "id desc"). Ascending by default. string sort_by = 3; // What resource reference to filter on. @@ -165,28 +174,37 @@ message ListRunsRequest { } message TerminateRunRequest { + // The ID of the run to be terminated. string run_id = 1; } message RetryRunRequest { + // The ID of the run to be retried. string run_id = 1; } message ListRunsResponse { repeated Run runs = 1; + + // The total number of runs for the given query. int32 total_size = 3; + + // The token to list the next page of runs. string next_page_token = 2; } message ArchiveRunRequest { + // The ID of the run to be archived. string id = 1; } message UnarchiveRunRequest { + // The ID of the run to be restored. string id = 1; } message DeleteRunRequest { + // The ID of the run to be deleted. string id = 1; } @@ -203,6 +221,7 @@ message Run { STORAGESTATE_ARCHIVED = 1; } + // Output. Specify whether this run is in archived or available mode. StorageState storage_state = 10; // Optional input field. Describing the purpose of the run @@ -213,6 +232,8 @@ message Run { PipelineSpec pipeline_spec = 4; // Optional input field. Specify which resource this run belongs to. + // When creating a run from a particular pipeline version, the pipeline + // version can be specified here. repeated ResourceReference resource_references = 5; // Optional input field. Specify which Kubernetes service account this run uses. diff --git a/backend/api/swagger/experiment.swagger.json b/backend/api/swagger/experiment.swagger.json index 8c5976150..502b07044 100644 --- a/backend/api/swagger/experiment.swagger.json +++ b/backend/api/swagger/experiment.swagger.json @@ -17,7 +17,7 @@ "paths": { "/apis/v1beta1/experiments": { "get": { - "summary": "Find all experiments.", + "summary": "Finds all experiments. Supports pagination, and sorting on certain fields.", "operationId": "ListExperiment", "responses": { "200": { @@ -36,12 +36,14 @@ "parameters": [ { "name": "page_token", + "description": "A page token to request the next page of results. The token is acquried\nfrom the nextPageToken field of the response from the previous\nListExperiment call or can be omitted when fetching the first page.", "in": "query", "required": false, "type": "string" }, { "name": "page_size", + "description": "The number of experiments to be listed per page. If there are more\nexperiments than this number, the response message will contain a\nnextPageToken field you can use to fetch the next page.", "in": "query", "required": false, "type": "integer", @@ -49,7 +51,7 @@ }, { "name": "sort_by", - "description": "Can be format of \"field_name\", \"field_name asc\" or \"field_name des\"\nAscending by default.", + "description": "Can be format of \"field_name\", \"field_name asc\" or \"field_name desc\"\nAscending by default.", "in": "query", "required": false, "type": "string" @@ -90,7 +92,7 @@ ] }, "post": { - "summary": "Create a new experiment.", + "summary": "Creates a new experiment.", "operationId": "CreateExperiment", "responses": { "200": { @@ -124,7 +126,7 @@ }, "/apis/v1beta1/experiments/{id}": { "get": { - "summary": "Find a specific experiment by ID.", + "summary": "Finds a specific experiment by ID.", "operationId": "GetExperiment", "responses": { "200": { @@ -154,7 +156,7 @@ ] }, "delete": { - "summary": "Delete an experiment.", + "summary": "Deletes an experiment without deleting the experiment's runs and jobs. To\navoid unexpected behaviors, delete an experiment's runs and jobs before\ndeleting the experiment.", "operationId": "DeleteExperiment", "responses": { "200": { @@ -186,7 +188,7 @@ }, "/apis/v1beta1/experiments/{id}:archive": { "post": { - "summary": "Archive an experiment.", + "summary": "Archives an experiment and the experiment's runs and jobs.", "operationId": "ArchiveExperiment", "responses": { "200": { @@ -205,6 +207,7 @@ "parameters": [ { "name": "id", + "description": "The ID of the experiment to be archived.", "in": "path", "required": true, "type": "string" @@ -217,7 +220,7 @@ }, "/apis/v1beta1/experiments/{id}:unarchive": { "post": { - "summary": "Restore an archived experiment.", + "summary": "Restores an archived experiment. The experiment's archived runs and jobs\nwill stay archived.", "operationId": "UnarchiveExperiment", "responses": { "200": { @@ -236,6 +239,7 @@ "parameters": [ { "name": "id", + "description": "The ID of the experiment to be restored.", "in": "path", "required": true, "type": "string" @@ -285,7 +289,8 @@ "description": "Optional input field. Specify which resource this run belongs to.\nFor Experiment, the only valid resource reference is a single Namespace." }, "storage_state": { - "$ref": "#/definitions/ExperimentStorageState" + "$ref": "#/definitions/ExperimentStorageState", + "description": "Output. Specifies whether this experiment is in archived or available state." } } }, diff --git a/backend/api/swagger/job.swagger.json b/backend/api/swagger/job.swagger.json index e71c7ffd9..34aadad76 100644 --- a/backend/api/swagger/job.swagger.json +++ b/backend/api/swagger/job.swagger.json @@ -17,7 +17,7 @@ "paths": { "/apis/v1beta1/jobs": { "get": { - "summary": "Find all jobs.", + "summary": "Finds all jobs.", "operationId": "ListJobs", "responses": { "200": { @@ -36,12 +36,14 @@ "parameters": [ { "name": "page_token", + "description": "A page token to request the next page of results. The token is acquried\nfrom the nextPageToken field of the response from the previous\nListJobs call or can be omitted when fetching the first page.", "in": "query", "required": false, "type": "string" }, { "name": "page_size", + "description": "The number of jobs to be listed per page. If there are more jobs than this\nnumber, the response message will contain a nextPageToken field you can use\nto fetch the next page.", "in": "query", "required": false, "type": "integer", @@ -49,7 +51,7 @@ }, { "name": "sort_by", - "description": "Can be format of \"field_name\", \"field_name asc\" or \"field_name des\".\nAscending by default.", + "description": "Can be format of \"field_name\", \"field_name asc\" or \"field_name desc\".\nAscending by default.", "in": "query", "required": false, "type": "string" @@ -90,7 +92,7 @@ ] }, "post": { - "summary": "Create a new job.", + "summary": "Creates a new job.", "operationId": "CreateJob", "responses": { "200": { @@ -124,7 +126,7 @@ }, "/apis/v1beta1/jobs/{id}": { "get": { - "summary": "Find a specific job by ID.", + "summary": "Finds a specific job by ID.", "operationId": "GetJob", "responses": { "200": { @@ -154,7 +156,7 @@ ] }, "delete": { - "summary": "Delete a job.", + "summary": "Deletes a job.", "operationId": "DeleteJob", "responses": { "200": { @@ -364,10 +366,12 @@ }, "total_size": { "type": "integer", - "format": "int32" + "format": "int32", + "description": "The total number of jobs for the given query." }, "next_page_token": { - "type": "string" + "type": "string", + "description": "The token to list the next page of jobs." } } }, diff --git a/backend/api/swagger/kfp_api_single_file.swagger.json b/backend/api/swagger/kfp_api_single_file.swagger.json index 86f1df96f..8bb9131b0 100644 --- a/backend/api/swagger/kfp_api_single_file.swagger.json +++ b/backend/api/swagger/kfp_api_single_file.swagger.json @@ -2,8 +2,17 @@ "swagger": "2.0", "info": { "title": "Kubeflow Pipelines API", - "version": "0.1.38", - "description": "This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition." + "version": "1.0.0", + "description": "This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition.", + "contact": { + "name": "google", + "email": "kubeflow-pipelines@google.com", + "url": "https://www.google.com" + }, + "license": { + "name": "Apache 2.0", + "url": "https://raw.githubusercontent.com/kubeflow/pipelines/master/LICENSE" + } }, "schemes": [ "http", @@ -18,7 +27,7 @@ "paths": { "/apis/v1beta1/runs": { "get": { - "summary": "Find all runs.", + "summary": "Finds all runs.", "operationId": "ListRuns", "responses": { "200": { @@ -37,12 +46,14 @@ "parameters": [ { "name": "page_token", + "description": "A page token to request the next page of results. The token is acquried\nfrom the nextPageToken field of the response from the previous\nListRuns call or can be omitted when fetching the first page.", "in": "query", "required": false, "type": "string" }, { "name": "page_size", + "description": "The number of runs to be listed per page. If there are more runs than this\nnumber, the response message will contain a nextPageToken field you can use\nto fetch the next page.", "in": "query", "required": false, "type": "integer", @@ -50,7 +61,7 @@ }, { "name": "sort_by", - "description": "Can be format of \"field_name\", \"field_name asc\" or \"field_name des\"\n(Example, \"name asc\" or \"id des\"). Ascending by default.", + "description": "Can be format of \"field_name\", \"field_name asc\" or \"field_name desc\"\n(Example, \"name asc\" or \"id desc\"). Ascending by default.", "in": "query", "required": false, "type": "string" @@ -91,7 +102,7 @@ ] }, "post": { - "summary": "Create a new run.", + "summary": "Creates a new run.", "operationId": "CreateRun", "responses": { "200": { @@ -124,7 +135,7 @@ }, "/apis/v1beta1/runs/{id}": { "delete": { - "summary": "Delete a run.", + "summary": "Deletes a run.", "operationId": "DeleteRun", "responses": { "200": { @@ -143,6 +154,7 @@ "parameters": [ { "name": "id", + "description": "The ID of the run to be deleted.", "in": "path", "required": true, "type": "string" @@ -155,7 +167,7 @@ }, "/apis/v1beta1/runs/{id}:archive": { "post": { - "summary": "Archive a run.", + "summary": "Archives a run.", "operationId": "ArchiveRun", "responses": { "200": { @@ -174,6 +186,7 @@ "parameters": [ { "name": "id", + "description": "The ID of the run to be archived.", "in": "path", "required": true, "type": "string" @@ -186,7 +199,7 @@ }, "/apis/v1beta1/runs/{id}:unarchive": { "post": { - "summary": "Restore an archived run.", + "summary": "Restores an archived run.", "operationId": "UnarchiveRun", "responses": { "200": { @@ -205,6 +218,7 @@ "parameters": [ { "name": "id", + "description": "The ID of the run to be restored.", "in": "path", "required": true, "type": "string" @@ -217,7 +231,7 @@ }, "/apis/v1beta1/runs/{run_id}": { "get": { - "summary": "Find a specific run by ID.", + "summary": "Finds a specific run by ID.", "operationId": "GetRun", "responses": { "200": { @@ -236,6 +250,7 @@ "parameters": [ { "name": "run_id", + "description": "The ID of the run to be retrieved.", "in": "path", "required": true, "type": "string" @@ -248,7 +263,7 @@ }, "/apis/v1beta1/runs/{run_id}/nodes/{node_id}/artifacts/{artifact_name}:read": { "get": { - "summary": "Find a run's artifact data.", + "summary": "Finds a run's artifact data.", "operationId": "ReadArtifact", "responses": { "200": { @@ -294,7 +309,7 @@ }, "/apis/v1beta1/runs/{run_id}/retry": { "post": { - "summary": "Re-initiate a failed or terminated run.", + "summary": "Re-initiates a failed or terminated run.", "operationId": "RetryRun", "responses": { "200": { @@ -313,6 +328,7 @@ "parameters": [ { "name": "run_id", + "description": "The ID of the run to be retried.", "in": "path", "required": true, "type": "string" @@ -325,7 +341,7 @@ }, "/apis/v1beta1/runs/{run_id}/terminate": { "post": { - "summary": "Terminate an active run.", + "summary": "Terminates an active run.", "operationId": "TerminateRun", "responses": { "200": { @@ -344,6 +360,7 @@ "parameters": [ { "name": "run_id", + "description": "The ID of the run to be terminated.", "in": "path", "required": true, "type": "string" @@ -396,7 +413,7 @@ }, "/apis/v1beta1/jobs": { "get": { - "summary": "Find all jobs.", + "summary": "Finds all jobs.", "operationId": "ListJobs", "responses": { "200": { @@ -415,12 +432,14 @@ "parameters": [ { "name": "page_token", + "description": "A page token to request the next page of results. The token is acquried\nfrom the nextPageToken field of the response from the previous\nListJobs call or can be omitted when fetching the first page.", "in": "query", "required": false, "type": "string" }, { "name": "page_size", + "description": "The number of jobs to be listed per page. If there are more jobs than this\nnumber, the response message will contain a nextPageToken field you can use\nto fetch the next page.", "in": "query", "required": false, "type": "integer", @@ -428,7 +447,7 @@ }, { "name": "sort_by", - "description": "Can be format of \"field_name\", \"field_name asc\" or \"field_name des\".\nAscending by default.", + "description": "Can be format of \"field_name\", \"field_name asc\" or \"field_name desc\".\nAscending by default.", "in": "query", "required": false, "type": "string" @@ -469,7 +488,7 @@ ] }, "post": { - "summary": "Create a new job.", + "summary": "Creates a new job.", "operationId": "CreateJob", "responses": { "200": { @@ -503,7 +522,7 @@ }, "/apis/v1beta1/jobs/{id}": { "get": { - "summary": "Find a specific job by ID.", + "summary": "Finds a specific job by ID.", "operationId": "GetJob", "responses": { "200": { @@ -533,7 +552,7 @@ ] }, "delete": { - "summary": "Delete a job.", + "summary": "Deletes a job.", "operationId": "DeleteJob", "responses": { "200": { @@ -629,6 +648,7 @@ }, "/apis/v1beta1/pipeline_versions": { "get": { + "summary": "Lists all pipeline versions of a given pipeline.", "operationId": "ListPipelineVersions", "responses": { "200": { @@ -670,6 +690,7 @@ }, { "name": "page_size", + "description": "The number of pipeline versions to be listed per page. If there are more\npipeline versions than this number, the response message will contain a\nnextPageToken field you can use to fetch the next page.", "in": "query", "required": false, "type": "integer", @@ -677,13 +698,14 @@ }, { "name": "page_token", + "description": "A page token to request the next page of results. The token is acquried\nfrom the nextPageToken field of the response from the previous\nListPipelineVersions call or can be omitted when fetching the first page.", "in": "query", "required": false, "type": "string" }, { "name": "sort_by", - "description": "Can be format of \"field_name\", \"field_name asc\" or \"field_name des\"\nAscending by default.", + "description": "Can be format of \"field_name\", \"field_name asc\" or \"field_name desc\"\nAscending by default.", "in": "query", "required": false, "type": "string" @@ -701,6 +723,7 @@ ] }, "post": { + "summary": "Adds a pipeline version to the specified pipeline.", "operationId": "CreatePipelineVersion", "responses": { "200": { @@ -734,6 +757,7 @@ }, "/apis/v1beta1/pipeline_versions/{version_id}": { "get": { + "summary": "Gets a pipeline version by pipeline version ID.", "operationId": "GetPipelineVersion", "responses": { "200": { @@ -752,6 +776,7 @@ "parameters": [ { "name": "version_id", + "description": "The ID of the pipeline version to be retrieved.", "in": "path", "required": true, "type": "string" @@ -762,6 +787,7 @@ ] }, "delete": { + "summary": "Deletes a pipeline version by pipeline version ID. If the deleted pipeline\nversion is the default pipeline version, the pipeline's default version\nchanges to the pipeline's most recent pipeline version. If there are no\nremaining pipeline versions, the pipeline will have no default version.\nExamines the run_service_api.ipynb notebook to learn more about creating a\nrun using a pipeline version (https://github.com/kubeflow/pipelines/blob/master/tools/benchmarks/run_service_api.ipynb).", "operationId": "DeletePipelineVersion", "responses": { "200": { @@ -780,6 +806,7 @@ "parameters": [ { "name": "version_id", + "description": "The ID of the pipeline version to be deleted.", "in": "path", "required": true, "type": "string" @@ -792,6 +819,7 @@ }, "/apis/v1beta1/pipeline_versions/{version_id}/templates": { "get": { + "summary": "Returns a YAML template that contains the specified pipeline version's description, parameters and metadata.", "operationId": "GetPipelineVersionTemplate", "responses": { "200": { @@ -810,6 +838,7 @@ "parameters": [ { "name": "version_id", + "description": "The ID of the pipeline version whose template is to be retrieved.", "in": "path", "required": true, "type": "string" @@ -822,7 +851,7 @@ }, "/apis/v1beta1/pipelines": { "get": { - "summary": "Find all pipelines.", + "summary": "Finds all pipelines.", "operationId": "ListPipelines", "responses": { "200": { @@ -841,12 +870,14 @@ "parameters": [ { "name": "page_token", + "description": "A page token to request the next page of results. The token is acquried\nfrom the nextPageToken field of the response from the previous\nListPipelines call.", "in": "query", "required": false, "type": "string" }, { "name": "page_size", + "description": "The number of pipelines to be listed per page. If there are more pipelines\nthan this number, the response message will contain a valid value in the\nnextPageToken field.", "in": "query", "required": false, "type": "integer", @@ -854,7 +885,7 @@ }, { "name": "sort_by", - "description": "Can be format of \"field_name\", \"field_name asc\" or \"field_name des\"\nAscending by default.", + "description": "Can be format of \"field_name\", \"field_name asc\" or \"field_name desc\"\nAscending by default.", "in": "query", "required": false, "type": "string" @@ -872,7 +903,7 @@ ] }, "post": { - "summary": "Add a pipeline.", + "summary": "Creates a pipeline.", "operationId": "CreatePipeline", "responses": { "200": { @@ -905,7 +936,7 @@ }, "/apis/v1beta1/pipelines/{id}": { "get": { - "summary": "Find a specific pipeline by ID.", + "summary": "Finds a specific pipeline by ID.", "operationId": "GetPipeline", "responses": { "200": { @@ -924,6 +955,7 @@ "parameters": [ { "name": "id", + "description": "The ID of the pipeline to be retrieved.", "in": "path", "required": true, "type": "string" @@ -934,7 +966,7 @@ ] }, "delete": { - "summary": "Delete a pipeline.", + "summary": "Deletes a pipeline and its pipeline versions.", "operationId": "DeletePipeline", "responses": { "200": { @@ -953,6 +985,7 @@ "parameters": [ { "name": "id", + "description": "The ID of the pipeline to be deleted.", "in": "path", "required": true, "type": "string" @@ -984,6 +1017,7 @@ "parameters": [ { "name": "id", + "description": "The ID of the pipeline whose template is to be retrieved.", "in": "path", "required": true, "type": "string" @@ -996,7 +1030,7 @@ }, "/apis/v1beta1/experiments": { "get": { - "summary": "Find all experiments.", + "summary": "Finds all experiments. Supports pagination, and sorting on certain fields.", "operationId": "ListExperiment", "responses": { "200": { @@ -1015,12 +1049,14 @@ "parameters": [ { "name": "page_token", + "description": "A page token to request the next page of results. The token is acquried\nfrom the nextPageToken field of the response from the previous\nListExperiment call or can be omitted when fetching the first page.", "in": "query", "required": false, "type": "string" }, { "name": "page_size", + "description": "The number of experiments to be listed per page. If there are more\nexperiments than this number, the response message will contain a\nnextPageToken field you can use to fetch the next page.", "in": "query", "required": false, "type": "integer", @@ -1028,7 +1064,7 @@ }, { "name": "sort_by", - "description": "Can be format of \"field_name\", \"field_name asc\" or \"field_name des\"\nAscending by default.", + "description": "Can be format of \"field_name\", \"field_name asc\" or \"field_name desc\"\nAscending by default.", "in": "query", "required": false, "type": "string" @@ -1069,7 +1105,7 @@ ] }, "post": { - "summary": "Create a new experiment.", + "summary": "Creates a new experiment.", "operationId": "CreateExperiment", "responses": { "200": { @@ -1103,7 +1139,7 @@ }, "/apis/v1beta1/experiments/{id}": { "get": { - "summary": "Find a specific experiment by ID.", + "summary": "Finds a specific experiment by ID.", "operationId": "GetExperiment", "responses": { "200": { @@ -1133,7 +1169,7 @@ ] }, "delete": { - "summary": "Delete an experiment.", + "summary": "Deletes an experiment without deleting the experiment's runs and jobs. To\navoid unexpected behaviors, delete an experiment's runs and jobs before\ndeleting the experiment.", "operationId": "DeleteExperiment", "responses": { "200": { @@ -1165,7 +1201,7 @@ }, "/apis/v1beta1/experiments/{id}:archive": { "post": { - "summary": "Archive an experiment.", + "summary": "Archives an experiment and the experiment's runs and jobs.", "operationId": "ArchiveExperiment", "responses": { "200": { @@ -1184,6 +1220,7 @@ "parameters": [ { "name": "id", + "description": "The ID of the experiment to be archived.", "in": "path", "required": true, "type": "string" @@ -1196,7 +1233,7 @@ }, "/apis/v1beta1/experiments/{id}:unarchive": { "post": { - "summary": "Restore an archived experiment.", + "summary": "Restores an archived experiment. The experiment's archived runs and jobs\nwill stay archived.", "operationId": "UnarchiveExperiment", "responses": { "200": { @@ -1215,6 +1252,7 @@ "parameters": [ { "name": "id", + "description": "The ID of the experiment to be restored.", "in": "path", "required": true, "type": "string" @@ -1387,10 +1425,12 @@ }, "total_size": { "type": "integer", - "format": "int32" + "format": "int32", + "description": "The total number of runs for the given query." }, "next_page_token": { - "type": "string" + "type": "string", + "description": "The token to list the next page of runs." } } }, @@ -1545,7 +1585,8 @@ "description": "Required input field. Name provided by user,\nor auto generated if run is created by scheduled job. Not unique." }, "storage_state": { - "$ref": "#/definitions/RunStorageState" + "$ref": "#/definitions/RunStorageState", + "description": "Output. Specify whether this run is in archived or available mode." }, "description": { "type": "string", @@ -1560,7 +1601,7 @@ "items": { "$ref": "#/definitions/apiResourceReference" }, - "description": "Optional input field. Specify which resource this run belongs to." + "description": "Optional input field. Specify which resource this run belongs to.\nWhen creating a run from a particular pipeline version, the pipeline\nversion can be specified here." }, "service_account": { "type": "string", @@ -1778,10 +1819,12 @@ }, "total_size": { "type": "integer", - "format": "int32" + "format": "int32", + "description": "The total number of jobs for the given query." }, "next_page_token": { - "type": "string" + "type": "string", + "description": "The token to list the next page of jobs." } } }, @@ -1822,7 +1865,8 @@ "type": "object", "properties": { "template": { - "type": "string" + "type": "string", + "description": "The template of the pipeline specified in a GetTemplate request, or of a\npipeline version specified in a GetPipelinesVersionTemplate request." } } }, @@ -1836,11 +1880,13 @@ } }, "next_page_token": { - "type": "string" + "type": "string", + "description": "The token to list the next page of pipeline versions." }, "total_size": { "type": "integer", - "format": "int32" + "format": "int32", + "description": "The total number of pipeline versions for the given query." } } }, @@ -1855,10 +1901,12 @@ }, "total_size": { "type": "integer", - "format": "int32" + "format": "int32", + "description": "The total number of pipelines for the given query." }, "next_page_token": { - "type": "string" + "type": "string", + "description": "The token to list the next page of pipelines." } } }, @@ -1948,7 +1996,8 @@ "type": "object", "properties": { "pipeline_url": { - "type": "string" + "type": "string", + "description": "URL of the pipeline definition or the pipeline version definition." } } }, @@ -1989,7 +2038,8 @@ "description": "Optional input field. Specify which resource this run belongs to.\nFor Experiment, the only valid resource reference is a single Namespace." }, "storage_state": { - "$ref": "#/definitions/ExperimentStorageState" + "$ref": "#/definitions/ExperimentStorageState", + "description": "Output. Specifies whether this experiment is in archived or available state." } } }, diff --git a/backend/api/swagger/pipeline.swagger.json b/backend/api/swagger/pipeline.swagger.json index f83c5f06d..5f383bef9 100644 --- a/backend/api/swagger/pipeline.swagger.json +++ b/backend/api/swagger/pipeline.swagger.json @@ -17,6 +17,7 @@ "paths": { "/apis/v1beta1/pipeline_versions": { "get": { + "summary": "Lists all pipeline versions of a given pipeline.", "operationId": "ListPipelineVersions", "responses": { "200": { @@ -58,6 +59,7 @@ }, { "name": "page_size", + "description": "The number of pipeline versions to be listed per page. If there are more\npipeline versions than this number, the response message will contain a\nnextPageToken field you can use to fetch the next page.", "in": "query", "required": false, "type": "integer", @@ -65,13 +67,14 @@ }, { "name": "page_token", + "description": "A page token to request the next page of results. The token is acquried\nfrom the nextPageToken field of the response from the previous\nListPipelineVersions call or can be omitted when fetching the first page.", "in": "query", "required": false, "type": "string" }, { "name": "sort_by", - "description": "Can be format of \"field_name\", \"field_name asc\" or \"field_name des\"\nAscending by default.", + "description": "Can be format of \"field_name\", \"field_name asc\" or \"field_name desc\"\nAscending by default.", "in": "query", "required": false, "type": "string" @@ -89,6 +92,7 @@ ] }, "post": { + "summary": "Adds a pipeline version to the specified pipeline.", "operationId": "CreatePipelineVersion", "responses": { "200": { @@ -122,6 +126,7 @@ }, "/apis/v1beta1/pipeline_versions/{version_id}": { "get": { + "summary": "Gets a pipeline version by pipeline version ID.", "operationId": "GetPipelineVersion", "responses": { "200": { @@ -140,6 +145,7 @@ "parameters": [ { "name": "version_id", + "description": "The ID of the pipeline version to be retrieved.", "in": "path", "required": true, "type": "string" @@ -150,6 +156,7 @@ ] }, "delete": { + "summary": "Deletes a pipeline version by pipeline version ID. If the deleted pipeline\nversion is the default pipeline version, the pipeline's default version\nchanges to the pipeline's most recent pipeline version. If there are no\nremaining pipeline versions, the pipeline will have no default version.\nExamines the run_service_api.ipynb notebook to learn more about creating a\nrun using a pipeline version (https://github.com/kubeflow/pipelines/blob/master/tools/benchmarks/run_service_api.ipynb).", "operationId": "DeletePipelineVersion", "responses": { "200": { @@ -168,6 +175,7 @@ "parameters": [ { "name": "version_id", + "description": "The ID of the pipeline version to be deleted.", "in": "path", "required": true, "type": "string" @@ -180,6 +188,7 @@ }, "/apis/v1beta1/pipeline_versions/{version_id}/templates": { "get": { + "summary": "Returns a YAML template that contains the specified pipeline version's description, parameters and metadata.", "operationId": "GetPipelineVersionTemplate", "responses": { "200": { @@ -198,6 +207,7 @@ "parameters": [ { "name": "version_id", + "description": "The ID of the pipeline version whose template is to be retrieved.", "in": "path", "required": true, "type": "string" @@ -210,7 +220,7 @@ }, "/apis/v1beta1/pipelines": { "get": { - "summary": "Find all pipelines.", + "summary": "Finds all pipelines.", "operationId": "ListPipelines", "responses": { "200": { @@ -229,12 +239,14 @@ "parameters": [ { "name": "page_token", + "description": "A page token to request the next page of results. The token is acquried\nfrom the nextPageToken field of the response from the previous\nListPipelines call.", "in": "query", "required": false, "type": "string" }, { "name": "page_size", + "description": "The number of pipelines to be listed per page. If there are more pipelines\nthan this number, the response message will contain a valid value in the\nnextPageToken field.", "in": "query", "required": false, "type": "integer", @@ -242,7 +254,7 @@ }, { "name": "sort_by", - "description": "Can be format of \"field_name\", \"field_name asc\" or \"field_name des\"\nAscending by default.", + "description": "Can be format of \"field_name\", \"field_name asc\" or \"field_name desc\"\nAscending by default.", "in": "query", "required": false, "type": "string" @@ -260,7 +272,7 @@ ] }, "post": { - "summary": "Add a pipeline.", + "summary": "Creates a pipeline.", "operationId": "CreatePipeline", "responses": { "200": { @@ -293,7 +305,7 @@ }, "/apis/v1beta1/pipelines/{id}": { "get": { - "summary": "Find a specific pipeline by ID.", + "summary": "Finds a specific pipeline by ID.", "operationId": "GetPipeline", "responses": { "200": { @@ -312,6 +324,7 @@ "parameters": [ { "name": "id", + "description": "The ID of the pipeline to be retrieved.", "in": "path", "required": true, "type": "string" @@ -322,7 +335,7 @@ ] }, "delete": { - "summary": "Delete a pipeline.", + "summary": "Deletes a pipeline and its pipeline versions.", "operationId": "DeletePipeline", "responses": { "200": { @@ -341,6 +354,7 @@ "parameters": [ { "name": "id", + "description": "The ID of the pipeline to be deleted.", "in": "path", "required": true, "type": "string" @@ -372,6 +386,7 @@ "parameters": [ { "name": "id", + "description": "The ID of the pipeline whose template is to be retrieved.", "in": "path", "required": true, "type": "string" @@ -388,7 +403,8 @@ "type": "object", "properties": { "template": { - "type": "string" + "type": "string", + "description": "The template of the pipeline specified in a GetTemplate request, or of a\npipeline version specified in a GetPipelinesVersionTemplate request." } } }, @@ -402,11 +418,13 @@ } }, "next_page_token": { - "type": "string" + "type": "string", + "description": "The token to list the next page of pipeline versions." }, "total_size": { "type": "integer", - "format": "int32" + "format": "int32", + "description": "The total number of pipeline versions for the given query." } } }, @@ -421,10 +439,12 @@ }, "total_size": { "type": "integer", - "format": "int32" + "format": "int32", + "description": "The total number of pipelines for the given query." }, "next_page_token": { - "type": "string" + "type": "string", + "description": "The token to list the next page of pipelines." } } }, @@ -593,7 +613,8 @@ "type": "object", "properties": { "pipeline_url": { - "type": "string" + "type": "string", + "description": "URL of the pipeline definition or the pipeline version definition." } } }, diff --git a/backend/api/swagger/run.swagger.json b/backend/api/swagger/run.swagger.json index f05ed108e..a3fb93c10 100644 --- a/backend/api/swagger/run.swagger.json +++ b/backend/api/swagger/run.swagger.json @@ -17,7 +17,7 @@ "paths": { "/apis/v1beta1/runs": { "get": { - "summary": "Find all runs.", + "summary": "Finds all runs.", "operationId": "ListRuns", "responses": { "200": { @@ -36,12 +36,14 @@ "parameters": [ { "name": "page_token", + "description": "A page token to request the next page of results. The token is acquried\nfrom the nextPageToken field of the response from the previous\nListRuns call or can be omitted when fetching the first page.", "in": "query", "required": false, "type": "string" }, { "name": "page_size", + "description": "The number of runs to be listed per page. If there are more runs than this\nnumber, the response message will contain a nextPageToken field you can use\nto fetch the next page.", "in": "query", "required": false, "type": "integer", @@ -49,7 +51,7 @@ }, { "name": "sort_by", - "description": "Can be format of \"field_name\", \"field_name asc\" or \"field_name des\"\n(Example, \"name asc\" or \"id des\"). Ascending by default.", + "description": "Can be format of \"field_name\", \"field_name asc\" or \"field_name desc\"\n(Example, \"name asc\" or \"id desc\"). Ascending by default.", "in": "query", "required": false, "type": "string" @@ -90,7 +92,7 @@ ] }, "post": { - "summary": "Create a new run.", + "summary": "Creates a new run.", "operationId": "CreateRun", "responses": { "200": { @@ -123,7 +125,7 @@ }, "/apis/v1beta1/runs/{id}": { "delete": { - "summary": "Delete a run.", + "summary": "Deletes a run.", "operationId": "DeleteRun", "responses": { "200": { @@ -142,6 +144,7 @@ "parameters": [ { "name": "id", + "description": "The ID of the run to be deleted.", "in": "path", "required": true, "type": "string" @@ -154,7 +157,7 @@ }, "/apis/v1beta1/runs/{id}:archive": { "post": { - "summary": "Archive a run.", + "summary": "Archives a run.", "operationId": "ArchiveRun", "responses": { "200": { @@ -173,6 +176,7 @@ "parameters": [ { "name": "id", + "description": "The ID of the run to be archived.", "in": "path", "required": true, "type": "string" @@ -185,7 +189,7 @@ }, "/apis/v1beta1/runs/{id}:unarchive": { "post": { - "summary": "Restore an archived run.", + "summary": "Restores an archived run.", "operationId": "UnarchiveRun", "responses": { "200": { @@ -204,6 +208,7 @@ "parameters": [ { "name": "id", + "description": "The ID of the run to be restored.", "in": "path", "required": true, "type": "string" @@ -216,7 +221,7 @@ }, "/apis/v1beta1/runs/{run_id}": { "get": { - "summary": "Find a specific run by ID.", + "summary": "Finds a specific run by ID.", "operationId": "GetRun", "responses": { "200": { @@ -235,6 +240,7 @@ "parameters": [ { "name": "run_id", + "description": "The ID of the run to be retrieved.", "in": "path", "required": true, "type": "string" @@ -247,7 +253,7 @@ }, "/apis/v1beta1/runs/{run_id}/nodes/{node_id}/artifacts/{artifact_name}:read": { "get": { - "summary": "Find a run's artifact data.", + "summary": "Finds a run's artifact data.", "operationId": "ReadArtifact", "responses": { "200": { @@ -293,7 +299,7 @@ }, "/apis/v1beta1/runs/{run_id}/retry": { "post": { - "summary": "Re-initiate a failed or terminated run.", + "summary": "Re-initiates a failed or terminated run.", "operationId": "RetryRun", "responses": { "200": { @@ -312,6 +318,7 @@ "parameters": [ { "name": "run_id", + "description": "The ID of the run to be retried.", "in": "path", "required": true, "type": "string" @@ -324,7 +331,7 @@ }, "/apis/v1beta1/runs/{run_id}/terminate": { "post": { - "summary": "Terminate an active run.", + "summary": "Terminates an active run.", "operationId": "TerminateRun", "responses": { "200": { @@ -343,6 +350,7 @@ "parameters": [ { "name": "run_id", + "description": "The ID of the run to be terminated.", "in": "path", "required": true, "type": "string" @@ -457,10 +465,12 @@ }, "total_size": { "type": "integer", - "format": "int32" + "format": "int32", + "description": "The total number of runs for the given query." }, "next_page_token": { - "type": "string" + "type": "string", + "description": "The token to list the next page of runs." } } }, @@ -615,7 +625,8 @@ "description": "Required input field. Name provided by user,\nor auto generated if run is created by scheduled job. Not unique." }, "storage_state": { - "$ref": "#/definitions/RunStorageState" + "$ref": "#/definitions/RunStorageState", + "description": "Output. Specify whether this run is in archived or available mode." }, "description": { "type": "string", @@ -630,7 +641,7 @@ "items": { "$ref": "#/definitions/apiResourceReference" }, - "description": "Optional input field. Specify which resource this run belongs to." + "description": "Optional input field. Specify which resource this run belongs to.\nWhen creating a run from a particular pipeline version, the pipeline\nversion can be specified here." }, "service_account": { "type": "string", diff --git a/backend/metadata_writer/requirements.in b/backend/metadata_writer/requirements.in index f067dc8d0..ca0eb00a1 100644 --- a/backend/metadata_writer/requirements.in +++ b/backend/metadata_writer/requirements.in @@ -1,2 +1,2 @@ kubernetes>=8.0.0,<11.0.0 -ml-metadata==0.21.2 +ml-metadata==0.22.0 \ No newline at end of file diff --git a/backend/metadata_writer/requirements.txt b/backend/metadata_writer/requirements.txt index 537737461..95caa9e8e 100644 --- a/backend/metadata_writer/requirements.txt +++ b/backend/metadata_writer/requirements.txt @@ -20,7 +20,7 @@ keras-applications==1.0.8 # via tensorflow keras-preprocessing==1.1.0 # via tensorflow kubernetes==10.1.0 # via -r requirements.in (line 1) markdown==3.2.1 # via tensorboard -ml-metadata==0.21.2 # via -r requirements.in (line 2) +ml-metadata==0.22.0 # via -r requirements.in (line 2) numpy==1.18.2 # via h5py, keras-applications, keras-preprocessing, opt-einsum, scipy, tensorboard, tensorflow oauthlib==3.1.0 # via requests-oauthlib opt-einsum==3.2.0 # via tensorflow diff --git a/backend/metadata_writer/src/metadata_helpers.py b/backend/metadata_writer/src/metadata_helpers.py index 2078fa704..5f2b25b49 100644 --- a/backend/metadata_writer/src/metadata_helpers.py +++ b/backend/metadata_writer/src/metadata_helpers.py @@ -379,13 +379,12 @@ def create_new_output_artifact( run_id: str = None, argo_artifact: dict = None, ) -> metadata_store_pb2.Artifact: - properties = { + custom_properties = { ARTIFACT_IO_NAME_PROPERTY_NAME: metadata_store_pb2.Value(string_value=output_name), } - custom_properties = {} if run_id: - properties[ARTIFACT_PIPELINE_NAME_PROPERTY_NAME] = metadata_store_pb2.Value(string_value=str(run_id)) - properties[ARTIFACT_RUN_ID_PROPERTY_NAME] = metadata_store_pb2.Value(string_value=str(run_id)) + custom_properties[ARTIFACT_PIPELINE_NAME_PROPERTY_NAME] = metadata_store_pb2.Value(string_value=str(run_id)) + custom_properties[ARTIFACT_RUN_ID_PROPERTY_NAME] = metadata_store_pb2.Value(string_value=str(run_id)) if argo_artifact: custom_properties[ARTIFACT_ARGO_ARTIFACT_PROPERTY_NAME] = metadata_store_pb2.Value(string_value=json.dumps(argo_artifact, sort_keys=True)) return create_new_artifact_event_and_attribution( @@ -403,12 +402,6 @@ def create_new_output_artifact( ), ] ), - properties=properties, - artifact_type_properties={ - ARTIFACT_IO_NAME_PROPERTY_NAME: metadata_store_pb2.STRING, - ARTIFACT_PIPELINE_NAME_PROPERTY_NAME: metadata_store_pb2.STRING, - ARTIFACT_RUN_ID_PROPERTY_NAME: metadata_store_pb2.STRING, - }, custom_properties=custom_properties, #milliseconds_since_epoch=int(datetime.now(timezone.utc).timestamp() * 1000), # Happens automatically ) diff --git a/backend/metadata_writer/src/metadata_writer.py b/backend/metadata_writer/src/metadata_writer.py index ae6f3adf5..bba733c83 100644 --- a/backend/metadata_writer/src/metadata_writer.py +++ b/backend/metadata_writer/src/metadata_writer.py @@ -1,4 +1,4 @@ -# Copyright 2019 Google LLC +# Copyright 2020 kubeflow.org # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -16,6 +16,7 @@ import json import hashlib import os import sys +import re import kubernetes import yaml from time import sleep @@ -89,13 +90,26 @@ PIPELINE_LABEL_KEY = TEKTON_PIPELINERUN_LABEL_KEY if PIPELINE_RUNTIME == "tekton def output_name_to_argo(name: str) -> str: import re - return re.sub('-+', '-', re.sub('[^-0-9a-z]+', '-', name.lower())).strip('-') + # This sanitization code should be kept in sync with the code in the DSL compiler. + # See https://github.com/kubeflow/pipelines/blob/39975e3cde7ba4dcea2bca835b92d0fe40b1ae3c/sdk/python/kfp/compiler/_k8s_helper.py#L33 + return re.sub('-+', '-', re.sub('[^-_0-9A-Za-z]+', '-', name)).strip('-') + +def is_s3_endpoint(endpoint: str) -> bool: + return re.search('^.*s3.*amazonaws.com.*$', endpoint) + +def get_object_store_provider(endpoint: str) -> bool: + if is_s3_endpoint(endpoint): + return 's3' + else: + return 'minio' def artifact_to_uri(artifact: dict) -> str: + # s3 here means s3 compatible object storage. not AWS S3. if 's3' in artifact: s3_artifact = artifact['s3'] - return 'minio://{bucket}/{key}'.format( + return '{provider}://{bucket}/{key}'.format( + provider=get_object_store_provider(s3_artifact.get('endpoint', 'minio')), bucket=s3_artifact.get('bucket', ''), key=s3_artifact.get('key', ''), ) @@ -186,6 +200,8 @@ while True: k8s_api.list_namespaced_pod, namespace=namespace_to_watch, label_selector=PIPELINE_LABEL_KEY, + timeout_seconds=1800, # Sometimes watch gets stuck + _request_timeout=2000, # Sometimes HTTP GET gets stuck ): try: obj = event['object'] diff --git a/backend/requirements.in b/backend/requirements.in index 14f829bb1..f45e174e7 100644 --- a/backend/requirements.in +++ b/backend/requirements.in @@ -1 +1 @@ -tfx==0.21.2 +tfx==0.22.0 diff --git a/backend/requirements.txt b/backend/requirements.txt index 130b3a5b9..381c131b1 100644 --- a/backend/requirements.txt +++ b/backend/requirements.txt @@ -2,132 +2,148 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile requirements.in +# pip-compile --output-file=- - # absl-py==0.8.1 # via ml-metadata, tensorboard, tensorflow, tensorflow-data-validation, tensorflow-model-analysis, tensorflow-transform, tfx, tfx-bsl -apache-beam[gcp]==2.17.0 # via tensorflow-data-validation, tensorflow-model-analysis, tensorflow-transform, tfx, tfx-bsl -astor==0.8.1 # via tensorflow +apache-beam[gcp]==2.22.0 # via tensorflow-data-validation, tensorflow-model-analysis, tensorflow-transform, tfx, tfx-bsl +astunparse==1.6.3 # via tensorflow attrs==19.3.0 # via jsonschema -avro-python3==1.9.1 # via apache-beam, tensorflow-data-validation, tensorflow-model-analysis, tfx-bsl -backcall==0.1.0 # via ipython -bleach==3.1.1 # via nbconvert +avro-python3==1.9.2.1 # via apache-beam +backcall==0.2.0 # via ipython +bleach==3.1.5 # via nbconvert cachetools==3.1.1 # via apache-beam, google-auth -certifi==2019.11.28 # via requests +certifi==2020.6.20 # via kubernetes, requests chardet==3.0.4 # via requests -click==7.0 # via tfx +click==7.1.2 # via tfx +colorama==0.4.3 # via keras-tuner crcmod==1.7 # via apache-beam -decorator==4.4.1 # via ipython, traitlets +decorator==4.4.2 # via ipython, traitlets defusedxml==0.6.0 # via nbconvert -dill==0.3.0 # via apache-beam -docker==4.2.0 # via tfx +dill==0.3.1.1 # via apache-beam +docker==4.2.2 # via tfx docopt==0.6.2 # via hdfs entrypoints==0.3 # via nbconvert -fastavro==0.21.24 # via apache-beam +fastavro==0.23.5 # via apache-beam fasteners==0.15 # via google-apitools -future==0.18.2 # via apache-beam -gast==0.2.2 # via tensorflow -google-api-core[grpc]==1.16.0 # via google-cloud-bigtable, google-cloud-core, google-cloud-datastore, google-cloud-pubsub -google-api-python-client==1.7.11 # via tfx -google-apitools==0.5.28 # via apache-beam +future==0.18.2 # via apache-beam, keras-tuner +gast==0.3.3 # via tensorflow +google-api-core[grpc,grpcgcp]==1.21.0 # via google-api-python-client, google-cloud-bigquery, google-cloud-bigtable, google-cloud-core, google-cloud-datastore, google-cloud-dlp, google-cloud-language, google-cloud-pubsub, google-cloud-spanner, google-cloud-videointelligence, google-cloud-vision +google-api-python-client==1.9.3 # via tfx, tfx-bsl +google-apitools==0.5.31 # via apache-beam google-auth-httplib2==0.0.3 # via google-api-python-client google-auth-oauthlib==0.4.1 # via tensorboard -google-auth==1.11.2 # via google-api-core, google-api-python-client, google-auth-httplib2, google-auth-oauthlib, tensorboard -google-cloud-bigquery==1.17.1 # via apache-beam +google-auth==1.18.0 # via google-api-core, google-api-python-client, google-auth-httplib2, google-auth-oauthlib, google-cloud-bigquery, kubernetes, tensorboard +google-cloud-bigquery==1.24.0 # via apache-beam google-cloud-bigtable==1.0.0 # via apache-beam -google-cloud-core==1.3.0 # via apache-beam, google-cloud-bigquery, google-cloud-bigtable, google-cloud-datastore +google-cloud-core==1.3.0 # via apache-beam, google-cloud-bigquery, google-cloud-bigtable, google-cloud-datastore, google-cloud-spanner google-cloud-datastore==1.7.4 # via apache-beam +google-cloud-dlp==0.13.0 # via apache-beam +google-cloud-language==1.3.0 # via apache-beam google-cloud-pubsub==1.0.2 # via apache-beam -google-pasta==0.1.8 # via tensorflow -google-resumable-media==0.4.1 # via google-cloud-bigquery -googleapis-common-protos[grpc]==1.51.0 # via google-api-core, grpc-google-iam-v1, tensorflow-metadata -grpc-google-iam-v1==0.12.3 # via google-cloud-bigtable, google-cloud-pubsub -grpcio==1.27.1 # via apache-beam, google-api-core, googleapis-common-protos, grpc-google-iam-v1, tensorboard, tensorflow, tensorflow-serving-api, tfx -h5py==2.10.0 # via keras-applications +google-cloud-spanner==1.13.0 # via apache-beam +google-cloud-videointelligence==1.13.0 # via apache-beam +google-cloud-vision==0.42.0 # via apache-beam +google-pasta==0.2.0 # via tensorflow +google-resumable-media==0.5.1 # via google-cloud-bigquery +googleapis-common-protos[grpc]==1.52.0 # via google-api-core, grpc-google-iam-v1, tensorflow-metadata +grpc-google-iam-v1==0.12.3 # via google-cloud-bigtable, google-cloud-pubsub, google-cloud-spanner +grpcio-gcp==0.2.2 # via apache-beam, google-api-core +grpcio==1.30.0 # via apache-beam, google-api-core, googleapis-common-protos, grpc-google-iam-v1, grpcio-gcp, tensorboard, tensorflow, tensorflow-serving-api, tfx +h5py==2.10.0 # via tensorflow hdfs==2.5.8 # via apache-beam -httplib2==0.12.0 # via apache-beam, google-api-python-client, google-apitools, google-auth-httplib2, oauth2client -idna==2.9 # via requests -importlib-metadata==1.5.0 # via jsonschema -ipykernel==5.1.4 # via ipywidgets, jupyter, jupyter-console, notebook, qtconsole +httplib2==0.17.4 # via apache-beam, google-api-python-client, google-apitools, google-auth-httplib2, oauth2client +idna==2.10 # via requests +importlib-metadata==1.7.0 # via jsonschema, markdown +ipykernel==5.3.0 # via ipywidgets, jupyter, jupyter-console, notebook, qtconsole ipython-genutils==0.2.0 # via nbformat, notebook, qtconsole, traitlets -ipython==7.9.0 # via ipykernel, ipywidgets, jupyter-console, tensorflow-data-validation +ipython==7.9.0 # via ipykernel, ipywidgets, jupyter-console ipywidgets==7.5.1 # via jupyter, tensorflow-model-analysis -jedi==0.16.0 # via ipython -jinja2==2.11.1 # via nbconvert, notebook, tfx +jedi==0.17.1 # via ipython +jinja2==2.11.2 # via nbconvert, notebook, tfx joblib==0.14.1 # via scikit-learn, tensorflow-data-validation jsonschema==3.2.0 # via nbformat -jupyter-client==6.0.0 # via ipykernel, jupyter-console, notebook, qtconsole +jupyter-client==6.1.5 # via ipykernel, jupyter-console, notebook, qtconsole jupyter-console==6.1.0 # via jupyter jupyter-core==4.6.3 # via jupyter-client, nbconvert, nbformat, notebook, qtconsole jupyter==1.0.0 # via tensorflow-model-analysis -keras-applications==1.0.8 # via tensorflow -keras-preprocessing==1.1.0 # via tensorflow -markdown==3.2.1 # via tensorboard +keras-preprocessing==1.1.2 # via tensorflow +keras-tuner==1.0.1 # via tfx +kubernetes==11.0.0 # via tfx +markdown==3.2.2 # via tensorboard markupsafe==1.1.1 # via jinja2 mistune==0.8.4 # via nbconvert -ml-metadata==0.21.2 # via tfx +ml-metadata==0.22.1 # via tfx mock==2.0.0 # via apache-beam monotonic==1.5 # via fasteners nbconvert==5.6.1 # via jupyter, notebook -nbformat==5.0.4 # via ipywidgets, nbconvert, notebook +nbformat==5.0.7 # via ipywidgets, nbconvert, notebook notebook==6.0.3 # via jupyter, widgetsnbextension -numpy==1.18.1 # via h5py, keras-applications, keras-preprocessing, opt-einsum, pandas, pyarrow, scikit-learn, scipy, tensorboard, tensorflow, tensorflow-data-validation, tensorflow-model-analysis, tensorflow-transform, tfx-bsl +numpy==1.18.5 # via apache-beam, h5py, keras-preprocessing, keras-tuner, opt-einsum, pandas, pyarrow, scikit-learn, scipy, tensorboard, tensorflow, tensorflow-data-validation, tensorflow-model-analysis, tensorflow-transform, tfx-bsl oauth2client==3.0.0 # via apache-beam, google-apitools oauthlib==3.1.0 # via requests-oauthlib -opt-einsum==3.1.0 # via tensorflow -pandas==0.25.3 # via tensorflow-data-validation, tensorflow-model-analysis +opt-einsum==3.2.1 # via tensorflow +packaging==20.4 # via bleach +pandas==0.25.3 # via tensorflow-data-validation, tensorflow-model-analysis, tfx-bsl pandocfilters==1.4.2 # via nbconvert -parso==0.6.1 # via jedi -pbr==5.4.4 # via mock +parso==0.7.0 # via jedi +pbr==5.4.5 # via mock pexpect==4.8.0 # via ipython pickleshare==0.7.5 # via ipython -prometheus-client==0.7.1 # via notebook +prometheus-client==0.8.0 # via notebook prompt-toolkit==2.0.10 # via ipython, jupyter-console -protobuf==3.11.3 # via apache-beam, google-api-core, google-cloud-bigquery, googleapis-common-protos, ml-metadata, tensorboard, tensorflow, tensorflow-data-validation, tensorflow-metadata, tensorflow-model-analysis, tensorflow-serving-api, tensorflow-transform, tfx, tfx-bsl +protobuf==3.12.2 # via apache-beam, google-api-core, google-cloud-bigquery, googleapis-common-protos, ml-metadata, tensorboard, tensorflow, tensorflow-data-validation, tensorflow-metadata, tensorflow-model-analysis, tensorflow-serving-api, tensorflow-transform, tfx, tfx-bsl ptyprocess==0.6.0 # via pexpect, terminado -pyarrow==0.15.1 # via apache-beam, tensorflow-data-validation, tensorflow-model-analysis, tfx, tfx-bsl +pyarrow==0.16.0 # via apache-beam, tensorflow-data-validation, tensorflow-model-analysis, tfx, tfx-bsl pyasn1-modules==0.2.8 # via google-auth, oauth2client pyasn1==0.4.8 # via oauth2client, pyasn1-modules, rsa pydot==1.4.1 # via apache-beam, tensorflow-transform -pygments==2.5.2 # via ipython, jupyter-console, nbconvert, qtconsole +pygments==2.6.1 # via ipython, jupyter-console, nbconvert, qtconsole pymongo==3.10.1 # via apache-beam -pyparsing==2.4.6 # via pydot -pyrsistent==0.15.7 # via jsonschema -python-dateutil==2.8.1 # via apache-beam, jupyter-client, pandas -pytz==2019.3 # via apache-beam, google-api-core, pandas -pyyaml==5.3 # via tfx -pyzmq==19.0.0 # via jupyter-client, notebook -qtconsole==4.6.0 # via jupyter -requests-oauthlib==1.3.0 # via google-auth-oauthlib -requests==2.23.0 # via docker, google-api-core, hdfs, requests-oauthlib, tensorboard -rsa==4.0 # via google-auth, oauth2client -scikit-learn==0.21.3 # via tensorflow-data-validation -scipy==1.4.1 # via scikit-learn, tensorflow, tensorflow-model-analysis +pyparsing==2.4.7 # via packaging, pydot +pyrsistent==0.16.0 # via jsonschema +python-dateutil==2.8.1 # via apache-beam, jupyter-client, kubernetes, pandas +pytz==2020.1 # via apache-beam, fastavro, google-api-core, pandas +pyyaml==5.3.1 # via kubernetes, tfx +pyzmq==19.0.1 # via jupyter-client, notebook, qtconsole +qtconsole==4.7.5 # via jupyter +qtpy==1.9.0 # via qtconsole +requests-oauthlib==1.3.0 # via google-auth-oauthlib, kubernetes +requests==2.24.0 # via docker, google-api-core, hdfs, keras-tuner, kubernetes, requests-oauthlib, tensorboard +rsa==4.6 # via google-auth, oauth2client +scikit-learn==0.22.2.post1 # via keras-tuner +scipy==1.4.1 # via keras-tuner, scikit-learn, tensorflow, tensorflow-model-analysis send2trash==1.5.0 # via notebook -six==1.14.0 # via absl-py, bleach, docker, fasteners, google-api-core, google-api-python-client, google-apitools, google-auth, google-pasta, google-resumable-media, grpcio, h5py, hdfs, jsonschema, keras-preprocessing, ml-metadata, mock, oauth2client, prompt-toolkit, protobuf, pyarrow, pyrsistent, python-dateutil, tensorboard, tensorflow, tensorflow-data-validation, tensorflow-model-analysis, tensorflow-transform, tfx, tfx-bsl, traitlets, websocket-client -tensorboard==2.1.0 # via tensorflow -tensorflow-data-validation==0.21.4 # via tfx -tensorflow-estimator==2.1.0 # via tensorflow -tensorflow-metadata==0.21.1 # via tensorflow-data-validation, tensorflow-model-analysis, tensorflow-transform, tfx-bsl -tensorflow-model-analysis==0.21.5 # via -r requirements.in (line 2), tfx -tensorflow-serving-api==2.1.0 # via tfx, tfx-bsl -tensorflow-transform==0.21.2 # via tensorflow-data-validation, tfx -tensorflow==2.1.0 # via ml-metadata, tensorflow-data-validation, tensorflow-model-analysis, tensorflow-serving-api, tensorflow-transform, tfx, tfx-bsl +six==1.15.0 # via absl-py, astunparse, bleach, docker, fasteners, google-api-core, google-api-python-client, google-apitools, google-auth, google-cloud-bigquery, google-pasta, google-resumable-media, grpcio, h5py, hdfs, jsonschema, keras-preprocessing, kubernetes, ml-metadata, mock, oauth2client, packaging, prompt-toolkit, protobuf, pyarrow, pyrsistent, python-dateutil, tensorboard, tensorflow, tensorflow-data-validation, tensorflow-model-analysis, tensorflow-transform, tfx, tfx-bsl, traitlets, websocket-client +tabulate==0.8.7 # via keras-tuner +tensorboard-plugin-wit==1.7.0 # via tensorboard +tensorboard==2.2.2 # via tensorflow +tensorflow-data-validation==0.22.2 # via tfx +tensorflow-estimator==2.2.0 # via tensorflow +tensorflow-metadata==0.22.2 # via tensorflow-data-validation, tensorflow-model-analysis, tensorflow-transform, tfx-bsl +tensorflow-model-analysis==0.22.2 # via tfx +tensorflow-serving-api==2.2.0 # via tfx, tfx-bsl +tensorflow-transform==0.22.0 # via tensorflow-data-validation, tfx +tensorflow==2.2.0 # via ml-metadata, tensorflow-data-validation, tensorflow-model-analysis, tensorflow-serving-api, tensorflow-transform, tfx, tfx-bsl termcolor==1.1.0 # via tensorflow terminado==0.8.3 # via notebook +terminaltables==3.1.0 # via keras-tuner testpath==0.4.4 # via nbconvert -tfx-bsl==0.21.3 # via tensorflow-data-validation, tensorflow-model-analysis, tensorflow-transform, tfx -tfx==0.21.2 # via -r requirements.in (line 1) -tornado==6.0.3 # via ipykernel, jupyter-client, notebook, terminado +tfx-bsl==0.22.1 # via tensorflow-data-validation, tensorflow-model-analysis, tensorflow-transform, tfx +tfx==0.22.0 # via -r - +tornado==6.0.4 # via ipykernel, jupyter-client, notebook, terminado +tqdm==4.47.0 # via keras-tuner traitlets==4.3.3 # via ipykernel, ipython, ipywidgets, jupyter-client, jupyter-core, nbconvert, nbformat, notebook, qtconsole +typing-extensions==3.7.4.2 # via apache-beam +typing==3.7.4.1 # via apache-beam uritemplate==3.0.1 # via google-api-python-client -urllib3==1.25.8 # via requests -wcwidth==0.1.8 # via prompt-toolkit +urllib3==1.25.9 # via kubernetes, requests +wcwidth==0.2.5 # via prompt-toolkit webencodings==0.5.1 # via bleach -websocket-client==0.57.0 # via docker -werkzeug==1.0.0 # via tensorboard -wheel==0.34.2 # via tensorboard, tensorflow +websocket-client==0.57.0 # via docker, kubernetes +werkzeug==1.0.1 # via tensorboard +wheel==0.34.2 # via astunparse, tensorboard, tensorflow widgetsnbextension==3.5.1 # via ipywidgets -wrapt==1.12.0 # via tensorflow +wrapt==1.12.1 # via tensorflow zipp==1.2.0 # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: diff --git a/backend/src/apiserver/main.go b/backend/src/apiserver/main.go index 67c1baf66..96bc147fb 100644 --- a/backend/src/apiserver/main.go +++ b/backend/src/apiserver/main.go @@ -216,6 +216,8 @@ func initConfig() { replacer := strings.NewReplacer(".", "_") viper.SetEnvKeyReplacer(replacer) viper.AutomaticEnv() + // We need empty string env var for e.g. KUBEFLOW_USERID_PREFIX. + viper.AllowEmptyEnv(true) // Set configuration file name. The format is auto detected in this case. viper.SetConfigName("config") diff --git a/backend/src/apiserver/model/BUILD.bazel b/backend/src/apiserver/model/BUILD.bazel index 6e9794102..b3c43935c 100644 --- a/backend/src/apiserver/model/BUILD.bazel +++ b/backend/src/apiserver/model/BUILD.bazel @@ -24,6 +24,7 @@ go_test( srcs = [ "pipeline_version_test.go", "resource_reference_test.go", + "run_test.go", ], embed = [":go_default_library"], importpath = "github.com/kubeflow/pipelines/backend/src/apiserver/model", diff --git a/backend/src/apiserver/model/run.go b/backend/src/apiserver/model/run.go index 5de92b207..079e69602 100644 --- a/backend/src/apiserver/model/run.go +++ b/backend/src/apiserver/model/run.go @@ -77,6 +77,7 @@ var runAPIToModelFieldMap = map[string]string{ "description": "Description", "scheduled_at": "ScheduledAtInSec", "storage_state": "StorageState", + "status": "Conditions", } // APIToModelFieldMap returns a map from API names to field names for model Run. diff --git a/backend/src/apiserver/model/run_test.go b/backend/src/apiserver/model/run_test.go new file mode 100644 index 000000000..fb379352f --- /dev/null +++ b/backend/src/apiserver/model/run_test.go @@ -0,0 +1,51 @@ +package model + +import ( + "testing" + + sq "github.com/Masterminds/squirrel" + api "github.com/kubeflow/pipelines/backend/api/go_client" + "github.com/kubeflow/pipelines/backend/src/apiserver/list" + "github.com/stretchr/testify/assert" +) + +// Test model name usage in sorting clause +func TestAddStatusFilterToSelect(t *testing.T) { + listable := &Run{ + UUID: "run_id_1", + CreatedAtInSec: 1, + Name: "run_name_1", + Conditions: "Succeeded", + } + protoFilter := &api.Filter{} + protoFilter.Predicates = []*api.Predicate{ + { + Key: "status", + Op: api.Predicate_EQUALS, + Value: &api.Predicate_StringValue{StringValue: "Succeeded"}, + }, + } + listableOptions, err := list.NewOptions(listable, 10, "name", protoFilter) + assert.Nil(t, err) + sqlBuilder := sq.Select("*").From("run_details") + sql, args, err := listableOptions.AddFilterToSelect(sqlBuilder).ToSql() + assert.Nil(t, err) + assert.Contains(t, sql, "WHERE Conditions = ?") // filtering on status, aka Conditions in db + assert.Contains(t, args, "Succeeded") + + notEqualProtoFilter := &api.Filter{} + notEqualProtoFilter.Predicates = []*api.Predicate{ + { + Key: "status", + Op: api.Predicate_NOT_EQUALS, + Value: &api.Predicate_StringValue{StringValue: "somevalue"}, + }, + } + listableOptions, err = list.NewOptions(listable, 10, "name", notEqualProtoFilter) + assert.Nil(t, err) + sqlBuilder = sq.Select("*").From("run_details") + sql, args, err = listableOptions.AddFilterToSelect(sqlBuilder).ToSql() + assert.Nil(t, err) + assert.Contains(t, sql, "WHERE Conditions <> ?") // filtering on status, aka Conditions in db + assert.Contains(t, args, "somevalue") +} diff --git a/backend/src/apiserver/server/auth_server_test.go b/backend/src/apiserver/server/auth_server_test.go index b6797d28b..9e67817b7 100644 --- a/backend/src/apiserver/server/auth_server_test.go +++ b/backend/src/apiserver/server/auth_server_test.go @@ -96,3 +96,26 @@ func TestAuthorizeRequest_Unauthorized(t *testing.T) { assert.Error(t, err) assert.EqualError(t, err, "Failed to authorize the request: Failed to authorize namespace: BadRequestError: Unauthorized access for user@google.com to namespace ns1: Unauthorized access for user@google.com to namespace ns1") } + +func TestAuthorizeRequest_EmptyUserIdPrefix(t *testing.T) { + viper.Set(common.MultiUserMode, "true") + defer viper.Set(common.MultiUserMode, "false") + viper.Set(common.KubeflowUserIDPrefix, "") + defer viper.Set(common.KubeflowUserIDPrefix, common.GoogleIAPUserIdentityPrefix) + + clients, manager, _ := initWithExperiment(t) + defer clients.Close() + authServer := AuthServer{resourceManager: manager} + + md := metadata.New(map[string]string{common.GoogleIAPUserIdentityHeader: "user@google.com"}) + ctx := metadata.NewIncomingContext(context.Background(), md) + + request := &api.AuthorizeRequest{ + Namespace: "ns1", + Resources: api.AuthorizeRequest_VIEWERS, + Verb: api.AuthorizeRequest_GET, + } + + _, err := authServer.Authorize(ctx, request) + assert.Nil(t, err) +} diff --git a/backend/src/apiserver/server/pipeline_server.go b/backend/src/apiserver/server/pipeline_server.go index 635b5e0b3..5faf11ce9 100644 --- a/backend/src/apiserver/server/pipeline_server.go +++ b/backend/src/apiserver/server/pipeline_server.go @@ -54,7 +54,7 @@ func (s *PipelineServer) CreatePipeline(ctx context.Context, request *api.Create return nil, util.Wrap(err, "Invalid pipeline name.") } - pipeline, err := s.resourceManager.CreatePipeline(pipelineName, "", pipelineFile) + pipeline, err := s.resourceManager.CreatePipeline(pipelineName, request.Pipeline.Description, pipelineFile) if err != nil { return nil, util.Wrap(err, "Create pipeline failed.") } @@ -166,6 +166,11 @@ func (s *PipelineServer) ListPipelineVersions(ctx context.Context, request *api. return nil, util.Wrap(err, "Failed to create list options") } + //Ensure resourceKey has been set + if request.ResourceKey == nil { + return nil, util.NewInvalidInputError("ResourceKey must be set in the input") + } + pipelineVersions, total_size, nextPageToken, err := s.resourceManager.ListPipelineVersions(request.ResourceKey.Id, opts) if err != nil { diff --git a/backend/src/apiserver/server/pipeline_server_test.go b/backend/src/apiserver/server/pipeline_server_test.go index 62679969f..bcaa8cdad 100644 --- a/backend/src/apiserver/server/pipeline_server_test.go +++ b/backend/src/apiserver/server/pipeline_server_test.go @@ -29,6 +29,7 @@ func TestCreatePipeline_YAML(t *testing.T) { Pipeline: &api.Pipeline{ Url: &api.Url{PipelineUrl: httpServer.URL + "/arguments-parameters.yaml"}, Name: "argument-parameters", + Description: "pipeline description", }}) assert.Nil(t, err) @@ -41,6 +42,7 @@ func TestCreatePipeline_YAML(t *testing.T) { err = json.Unmarshal([]byte(newPipeline.Parameters), ¶ms) assert.Nil(t, err) assert.Equal(t, []api.Parameter{{Name: "param1", Value: "hello"}, {Name: "param2"}}, params) + assert.Equal(t, "pipeline description", newPipeline.Description) } func TestCreatePipeline_Tarball(t *testing.T) { @@ -56,6 +58,7 @@ func TestCreatePipeline_Tarball(t *testing.T) { Pipeline: &api.Pipeline{ Url: &api.Url{PipelineUrl: httpServer.URL + "/arguments_tarball/arguments.tar.gz"}, Name: "argument-parameters", + Description: "pipeline description", }}) assert.Nil(t, err) @@ -68,6 +71,7 @@ func TestCreatePipeline_Tarball(t *testing.T) { err = json.Unmarshal([]byte(newPipeline.Parameters), ¶ms) assert.Nil(t, err) assert.Equal(t, []api.Parameter{{Name: "param1", Value: "hello"}, {Name: "param2"}}, params) + assert.Equal(t, "pipeline description", newPipeline.Description) } func TestCreatePipeline_InvalidYAML(t *testing.T) { @@ -240,6 +244,24 @@ func TestCreatePipelineVersion_InvalidURL(t *testing.T) { assert.Equal(t, codes.Internal, err.(*util.UserError).ExternalStatusCode()) } +func TestListPipelineVersion_NoResourceKey(t *testing.T){ + httpServer := getMockServer(t) + // Close the server when test finishes + defer httpServer.Close() + + clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) + resourceManager := resource.NewResourceManager(clientManager) + + pipelineServer := PipelineServer{resourceManager: resourceManager, httpClient: httpServer.Client()} + + + _, err := pipelineServer.ListPipelineVersions(context.Background(), &api.ListPipelineVersionsRequest{ + ResourceKey: nil, + PageSize: 20, + }) + assert.Equal(t, "Invalid input error: ResourceKey must be set in the input", err.Error()) +} + func getMockServer(t *testing.T) *httptest.Server { httpServer := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) { // Send response to be tested diff --git a/backend/src/apiserver/server/util.go b/backend/src/apiserver/server/util.go index 7450ab216..cce90f1e7 100644 --- a/backend/src/apiserver/server/util.go +++ b/backend/src/apiserver/server/util.go @@ -275,6 +275,18 @@ func CheckPipelineVersionReference(resourceManager *resource.ResourceManager, re return &pipelineVersionId, nil } +func getUserIdentityFromHeader(userIdentityHeader, prefix string) (string, error) { + if len(userIdentityHeader) > len(prefix) && userIdentityHeader[:len(prefix)] == prefix { + return userIdentityHeader[len(prefix):], nil + } + return "", util.NewBadRequestError( + errors.New("Request header error: user identity value is incorrectly formatted"), + "Request header error: user identity value is incorrectly formatted. Expected prefix '%s', but got the header '%s'", + prefix, + userIdentityHeader, + ) +} + func getUserIdentity(ctx context.Context) (string, error) { if ctx == nil { return "", util.NewBadRequestError(errors.New("Request error: context is nil"), "Request error: context is nil.") @@ -287,12 +299,7 @@ func getUserIdentity(ctx context.Context) (string, error) { return "", util.NewBadRequestError(errors.New("Request header error: unexpected number of user identity header. Expect 1 got "+strconv.Itoa(len(userIdentityHeader))), "Request header error: unexpected number of user identity header. Expect 1 got "+strconv.Itoa(len(userIdentityHeader))) } - userIdentityHeaderFields := strings.Split(userIdentityHeader[0], ":") - if len(userIdentityHeaderFields) != 2 { - return "", util.NewBadRequestError(errors.New("Request header error: user identity value is incorrectly formatted"), - "Request header error: user identity value is incorrectly formatted") - } - return userIdentityHeaderFields[1], nil + return getUserIdentityFromHeader(userIdentityHeader[0], common.GetKubeflowUserIDPrefix()) } return "", util.NewBadRequestError(errors.New("Request header error: there is no user identity header."), "Request header error: there is no user identity header.") } diff --git a/backend/src/apiserver/server/util_test.go b/backend/src/apiserver/server/util_test.go index 34dd5010a..fce21a9bb 100644 --- a/backend/src/apiserver/server/util_test.go +++ b/backend/src/apiserver/server/util_test.go @@ -345,6 +345,34 @@ func TestGetUserIdentity(t *testing.T) { assert.Equal(t, "user@google.com", userIdentity) } +func TestGetUserIdentityError(t *testing.T) { + md := metadata.New(map[string]string{"no-identity-header": "user"}) + ctx := metadata.NewIncomingContext(context.Background(), md) + _, err := getUserIdentity(ctx) + assert.NotNil(t, err) + assert.Contains(t, err.Error(), "Request header error: there is no user identity header.") +} + +func TestGetUserIdentityFromHeaderGoogle(t *testing.T) { + userIdentity, err := getUserIdentityFromHeader(common.GoogleIAPUserIdentityPrefix+"user@google.com", common.GoogleIAPUserIdentityPrefix) + assert.Nil(t, err) + assert.Equal(t, "user@google.com", userIdentity) +} + +func TestGetUserIdentityFromHeaderNonGoogle(t *testing.T) { + prefix := "" + userIdentity, err := getUserIdentityFromHeader(prefix+"user", prefix) + assert.Nil(t, err) + assert.Equal(t, "user", userIdentity) +} + +func TestGetUserIdentityFromHeaderError(t *testing.T) { + prefix := "expected-prefix" + _, err := getUserIdentityFromHeader("user", prefix) + assert.NotNil(t, err) + assert.Contains(t, err.Error(), "Request header error: user identity value is incorrectly formatted") +} + func TestCanAccessNamespaceInResourceReferences_Unauthorized(t *testing.T) { viper.Set(common.MultiUserMode, "true") defer viper.Set(common.MultiUserMode, "false") diff --git a/backend/src/apiserver/storage/experiment_store.go b/backend/src/apiserver/storage/experiment_store.go index bc3ed2b1d..e0aa60e32 100644 --- a/backend/src/apiserver/storage/experiment_store.go +++ b/backend/src/apiserver/storage/experiment_store.go @@ -194,7 +194,7 @@ func (s *ExperimentStore) CreateExperiment(experiment *model.Experiment) (*model _, err = s.db.Exec(sql, args...) if err != nil { if s.db.IsDuplicateError(err) { - return nil, util.NewInvalidInputError( + return nil, util.NewAlreadyExistError( "Failed to create a new experiment. The name %v already exists. Please specify a new name.", experiment.Name) } return nil, util.NewInternalServerError(err, "Failed to add experiment to experiment table: %v", diff --git a/backend/src/apiserver/storage/pipeline_store.go b/backend/src/apiserver/storage/pipeline_store.go index 7662d17e5..6839887ef 100644 --- a/backend/src/apiserver/storage/pipeline_store.go +++ b/backend/src/apiserver/storage/pipeline_store.go @@ -331,7 +331,7 @@ func (s *PipelineStore) CreatePipeline(p *model.Pipeline) (*model.Pipeline, erro if err != nil { if s.db.IsDuplicateError(err) { tx.Rollback() - return nil, util.NewInvalidInputError( + return nil, util.NewAlreadyExistError( "Failed to create a new pipeline. The name %v already exist. Please specify a new name.", p.Name) } tx.Rollback() @@ -342,7 +342,7 @@ func (s *PipelineStore) CreatePipeline(p *model.Pipeline) (*model.Pipeline, erro if err != nil { if s.db.IsDuplicateError(err) { tx.Rollback() - return nil, util.NewInvalidInputError( + return nil, util.NewAlreadyExistError( `Failed to create a new pipeline version. The name %v already exist. Please specify a new name.`, p.DefaultVersion.Name) } @@ -500,7 +500,7 @@ func (s *PipelineStore) CreatePipelineVersion(v *model.PipelineVersion) (*model. if err != nil { tx.Rollback() if s.db.IsDuplicateError(err) { - return nil, util.NewInvalidInputError( + return nil, util.NewAlreadyExistError( "Failed to create a new pipeline version. The name %v already exist. Please specify a new name.", v.Name) } return nil, util.NewInternalServerError(err, "Failed to add version to pipeline version table: %v", @@ -603,7 +603,7 @@ func (s *PipelineStore) ListPipelineVersions(pipelineId string, opts *list.Optio } buildQuery := func(sqlBuilder sq.SelectBuilder) sq.SelectBuilder { - return sqlBuilder. + return opts.AddFilterToSelect(sqlBuilder). From("pipeline_versions"). Where(sq.And{sq.Eq{"PipelineId": pipelineId}, sq.Eq{"status": model.PipelineVersionReady}}) } diff --git a/backend/src/apiserver/storage/pipeline_store_test.go b/backend/src/apiserver/storage/pipeline_store_test.go index b0f65ec4d..5a22b5786 100644 --- a/backend/src/apiserver/storage/pipeline_store_test.go +++ b/backend/src/apiserver/storage/pipeline_store_test.go @@ -1195,6 +1195,89 @@ func TestListPipelineVersions_Pagination_LessThanPageSize(t *testing.T) { }) } +func TestListPipelineVersions_WithFilter(t *testing.T) { + db := NewFakeDbOrFatal() + defer db.Close() + pipelineStore := NewPipelineStore( + db, + util.NewFakeTimeForEpoch(), + util.NewFakeUUIDGeneratorOrFatal(fakeUUID, nil)) + + // Create a pipeline. + pipelineStore.CreatePipeline( + &model.Pipeline{ + Name: "pipeline_1", + Parameters: `[{"Name": "param1"}]`, + Status: model.PipelineReady, + }) + + // Create "version_1" with fakeUUIDTwo. + pipelineStore.uuid = util.NewFakeUUIDGeneratorOrFatal(fakeUUIDTwo, nil) + pipelineStore.CreatePipelineVersion( + &model.PipelineVersion{ + Name: "pipeline_version_1", + Parameters: `[{"Name": "param1"}]`, + PipelineId: fakeUUID, + Status: model.PipelineVersionReady, + }) + + // Create "version_2" with fakeUUIDThree. + pipelineStore.uuid = util.NewFakeUUIDGeneratorOrFatal(fakeUUIDThree, nil) + pipelineStore.CreatePipelineVersion( + &model.PipelineVersion{ + Name: "pipeline_version_2", + Parameters: `[{"Name": "param1"}]`, + PipelineId: fakeUUID, + Status: model.PipelineVersionReady, + }) + + // Filter for name being equal to pipeline_version_1 + equalFilterProto := &api.Filter{ + Predicates: []*api.Predicate{ + &api.Predicate{ + Key: "name", + Op: api.Predicate_EQUALS, + Value: &api.Predicate_StringValue{StringValue: "pipeline_version_1"}, + }, + }, + } + + // Filter for name prefix being pipeline_version + prefixFilterProto := &api.Filter{ + Predicates: []*api.Predicate{ + &api.Predicate{ + Key: "name", + Op: api.Predicate_IS_SUBSTRING, + Value: &api.Predicate_StringValue{StringValue: "pipeline_version"}, + }, + }, + } + + // Only return 1 pipeline version with equal filter. + opts, err := list.NewOptions(&model.PipelineVersion{}, 10, "id", equalFilterProto) + assert.Nil(t, err) + _, totalSize, nextPageToken, err := pipelineStore.ListPipelineVersions(fakeUUID, opts) + assert.Nil(t, err) + assert.Equal(t, "", nextPageToken) + assert.Equal(t, 1, totalSize) + + // Return 2 pipeline versions without filter. + opts, err = list.NewOptions(&model.PipelineVersion{}, 10, "id", nil) + assert.Nil(t, err) + _, totalSize, nextPageToken, err = pipelineStore.ListPipelineVersions(fakeUUID, opts) + assert.Nil(t, err) + assert.Equal(t, "", nextPageToken) + assert.Equal(t, 2, totalSize) + + // Return 2 pipeline versions with prefix filter. + opts, err = list.NewOptions(&model.PipelineVersion{}, 10, "id", prefixFilterProto) + assert.Nil(t, err) + _, totalSize, nextPageToken, err = pipelineStore.ListPipelineVersions(fakeUUID, opts) + assert.Nil(t, err) + assert.Equal(t, "", nextPageToken) + assert.Equal(t, 2, totalSize) +} + func TestListPipelineVersionsError(t *testing.T) { db := NewFakeDbOrFatal() defer db.Close() diff --git a/backend/src/apiserver/visualization/requirements.txt b/backend/src/apiserver/visualization/requirements.txt index ae27d4dd0..715bf7f1c 100644 --- a/backend/src/apiserver/visualization/requirements.txt +++ b/backend/src/apiserver/visualization/requirements.txt @@ -14,3 +14,5 @@ tensorflow-metadata==0.21.1 tensorflow-model-analysis==0.21.5 tensorflow-data-validation==0.21.1 tornado==6.0.2 +# Increase google-api-core to some release AFTER 1.21.0 when it becomes available +google-api-core==1.16.0 diff --git a/backend/src/apiserver/visualization/third_party_licenses.csv b/backend/src/apiserver/visualization/third_party_licenses.csv index 6463f4a09..00f733f12 100644 --- a/backend/src/apiserver/visualization/third_party_licenses.csv +++ b/backend/src/apiserver/visualization/third_party_licenses.csv @@ -54,7 +54,7 @@ grpcio-gcp,https://raw.githubusercontent.com/GoogleCloudPlatform/grpc-gcp-python h5py,https://raw.githubusercontent.com/h5py/h5py/master/LICENSE,BSD-3 hdfs,https://raw.githubusercontent.com/mtth/hdfs/master/LICENSE,MIT httplib2,https://raw.githubusercontent.com/httplib2/httplib2/master/LICENSE,MIT -idna,https://raw.githubusercontent.com/kjd/idna/master/LICENSE.rst,BSD-3 +idna,https://raw.githubusercontent.com/kjd/idna/master/LICENSE.md,BSD-3 importlib-metadata,https://gitlab.com/python-devs/importlib_metadata/master/LICENSE,Apache 2.0 ipykernel,https://raw.githubusercontent.com/ipython/ipykernel/master/COPYING.md,BSD-3 ipython,https://raw.githubusercontent.com/ipython/ipython/master/LICENSE,BSD-3 diff --git a/backend/src/cache/deployer/deploy-cache-service.sh b/backend/src/cache/deployer/deploy-cache-service.sh index 268006201..155039321 100755 --- a/backend/src/cache/deployer/deploy-cache-service.sh +++ b/backend/src/cache/deployer/deploy-cache-service.sh @@ -15,7 +15,7 @@ # limitations under the License. # This script is for deploying cache service to an existing cluster. -# Prerequisite: config kubectl to talk to your cluster. See ref below: +# Prerequisite: config kubectl to talk to your cluster. See ref below: # https://cloud.google.com/kubernetes-engine/docs/how-to/cluster-access-for-kubectl set -ex @@ -24,25 +24,47 @@ echo "Start deploying cache service to existing cluster:" NAMESPACE=${NAMESPACE_TO_WATCH:-kubeflow} MUTATING_WEBHOOK_CONFIGURATION_NAME="cache-webhook-${NAMESPACE}" +WEBHOOK_SECRET_NAME=webhook-server-tls # This should fail if there are connectivity problems # Gotcha: Listing all objects requires list permission, # but when listing a single oblect kubecttl will fail if it's not found # unless --ignore-not-found is specified. kubectl get mutatingwebhookconfigurations "${MUTATING_WEBHOOK_CONFIGURATION_NAME}" --namespace "${NAMESPACE}" --ignore-not-found >webhooks.txt +kubectl get secrets "${WEBHOOK_SECRET_NAME}" --namespace "${NAMESPACE}" --ignore-not-found >cache_secret.txt +webhook_config_exists=false if grep "${MUTATING_WEBHOOK_CONFIGURATION_NAME}" -w webhooks.txt @@ -65,5 +87,5 @@ while true; do else echo "Webhook is not visible yet. Waiting a bit." sleep 10s - fi + fi done diff --git a/backend/src/common/util/workflow.go b/backend/src/common/util/workflow.go index b10e5d589..c2ffbe712 100644 --- a/backend/src/common/util/workflow.go +++ b/backend/src/common/util/workflow.go @@ -1,4 +1,4 @@ -// Copyright 2018 Google LLC +// Copyright 2020 kubeflow.org // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -258,25 +258,15 @@ func (w *Workflow) FindObjectStoreArtifactKeyOrEmpty(nodeID string, artifactName if w.Status.PipelineRunStatusFields.TaskRuns == nil { return "" } - node, found := w.Status.PipelineRunStatusFields.TaskRuns[nodeID] - if !found { - return "" - } - if node.Status == nil || node.Status.TaskRunResults == nil { - return "" - } var s3Key string - for _, artifact := range node.Status.TaskRunResults { - if artifact.Name != artifactName { - continue - } - s3Key = "artifacts/" + w.ObjectMeta.Name + "/" + nodeID + "/" + artifactName + ".tgz" - } + s3Key = "artifacts/" + w.ObjectMeta.Name + "/" + nodeID + "/" + artifactName + ".tgz" return s3Key } // IsInFinalState whether the workflow is in a final state. func (w *Workflow) IsInFinalState() bool { + // Workflows in the statuses other than pending or running are considered final. + if len(w.Status.Status.Conditions) > 0 { finalConditions := map[string]int{ "Succeeded": 1, diff --git a/backend/test/integration/upgrade_test.go b/backend/test/integration/upgrade_test.go index 4245f229c..a732f09c9 100644 --- a/backend/test/integration/upgrade_test.go +++ b/backend/test/integration/upgrade_test.go @@ -2,6 +2,7 @@ package integration import ( "io/ioutil" + "sort" "testing" "time" @@ -60,12 +61,8 @@ func (s *UpgradeTests) TestPrepare() { func (s *UpgradeTests) TestVerify() { s.VerifyExperiments() s.VerifyPipelines() - // TODO(jingzhang36): temporarily comment out the verification of runs and - // jobs since this PR changes the API response and hence a diff between the - // response from previous release and that from this PR is expected. - // Will put them back after the next release is cut. - // s.VerifyRuns() - // s.VerifyJobs() + s.VerifyRuns() + s.VerifyJobs() } // Check the namespace have ML job installed and ready @@ -341,6 +338,7 @@ func (s *UpgradeTests) VerifyJobs() { Name: "hello-world.yaml", Relationship: job_model.APIRelationshipCREATOR, }, }, + ServiceAccount: "pipeline-runner", MaxConcurrency: 10, NoCatchup: true, Enabled: true, @@ -350,6 +348,8 @@ func (s *UpgradeTests) VerifyJobs() { Trigger: &job_model.APITrigger{}, } + sort.Sort(JobResourceReferenceSorter(job.ResourceReferences)) + sort.Sort(JobResourceReferenceSorter(expectedJob.ResourceReferences)) assert.Equal(t, expectedJob, job) } @@ -377,10 +377,13 @@ func checkHelloWorldRunDetail(t *testing.T, runDetail *run_model.APIRunDetail) { Name: "hello-world.yaml", Relationship: run_model.APIRelationshipCREATOR, }, }, - CreatedAt: runDetail.Run.CreatedAt, - ScheduledAt: runDetail.Run.ScheduledAt, - FinishedAt: runDetail.Run.FinishedAt, + ServiceAccount: "pipeline-runner", + CreatedAt: runDetail.Run.CreatedAt, + ScheduledAt: runDetail.Run.ScheduledAt, + FinishedAt: runDetail.Run.FinishedAt, } + sort.Sort(RunResourceReferenceSorter(expectedRun.ResourceReferences)) + sort.Sort(RunResourceReferenceSorter(runDetail.Run.ResourceReferences)) assert.Equal(t, expectedRun, runDetail.Run) } diff --git a/components/XGBoost/Predict/component.py b/components/XGBoost/Predict/component.py new file mode 100644 index 000000000..c89e5ad70 --- /dev/null +++ b/components/XGBoost/Predict/component.py @@ -0,0 +1,45 @@ +from kfp.components import InputPath, OutputPath, create_component_from_func + +def xgboost_predict( + data_path: InputPath('CSV'), # Also supports LibSVM + model_path: InputPath('XGBoostModel'), + predictions_path: OutputPath('Text'), + label_column: int = None, +): + '''Make predictions using a trained XGBoost model. + + Args: + data_path: Path for the feature data in CSV format. + model_path: Path for the trained model in binary XGBoost format. + predictions_path: Output path for the predictions. + label_column: Column containing the label data. + + Annotations: + author: Alexey Volkov + ''' + from pathlib import Path + + import numpy + import xgboost + + csv_data_spec = data_path + '?format=csv' + # Only specifying the column if it's passed. + if label_column is not None: + csv_data_spec += '&label_column=' + str(label_column) + testing_data = xgboost.DMatrix(csv_data_spec) + + model = xgboost.Booster(model_file=model_path) + + predictions = model.predict(testing_data) + + Path(predictions_path).parent.mkdir(parents=True, exist_ok=True) + numpy.savetxt(predictions_path, predictions) + + +if __name__ == '__main__': + create_component_from_func( + xgboost_predict, + output_component_file='component.yaml', + base_image='python:3.7', + packages_to_install=['xgboost==1.0.2'] + ) diff --git a/components/XGBoost/Predict/component.yaml b/components/XGBoost/Predict/component.yaml new file mode 100644 index 000000000..3ae8488c2 --- /dev/null +++ b/components/XGBoost/Predict/component.yaml @@ -0,0 +1,106 @@ +name: Xgboost predict +description: |- + Make predictions using a trained XGBoost model. + + Args: + data_path: Path for the feature data in CSV format. + model_path: Path for the trained model in binary XGBoost format. + predictions_path: Output path for the predictions. + label_column: Column containing the label data. + + Annotations: + author: Alexey Volkov +inputs: +- {name: data, type: CSV} +- {name: model, type: XGBoostModel} +- {name: label_column, type: Integer, optional: true} +outputs: +- {name: predictions, type: Text} +implementation: + container: + image: python:3.7 + command: + - sh + - -c + - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location + 'xgboost==1.0.2' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet + --no-warn-script-location 'xgboost==1.0.2' --user) && "$0" "$@" + - python3 + - -u + - -c + - | + def _make_parent_dirs_and_return_path(file_path: str): + import os + os.makedirs(os.path.dirname(file_path), exist_ok=True) + return file_path + + def xgboost_predict( + data_path, # Also supports LibSVM + model_path, + predictions_path, + label_column = None, + ): + '''Make predictions using a trained XGBoost model. + + Args: + data_path: Path for the feature data in CSV format. + model_path: Path for the trained model in binary XGBoost format. + predictions_path: Output path for the predictions. + label_column: Column containing the label data. + + Annotations: + author: Alexey Volkov + ''' + from pathlib import Path + + import numpy + import xgboost + + csv_data_spec = data_path + '?format=csv' + # Only specifying the column if it's passed. + if label_column is not None: + csv_data_spec += '&label_column=' + str(label_column) + testing_data = xgboost.DMatrix(csv_data_spec) + + model = xgboost.Booster(model_file=model_path) + + predictions = model.predict(testing_data) + + Path(predictions_path).parent.mkdir(parents=True, exist_ok=True) + numpy.savetxt(predictions_path, predictions) + + import argparse + _parser = argparse.ArgumentParser(prog='Xgboost predict', description='Make predictions using a trained XGBoost model.\n\n Args:\n data_path: Path for the feature data in CSV format.\n model_path: Path for the trained model in binary XGBoost format.\n predictions_path: Output path for the predictions.\n label_column: Column containing the label data.\n\n Annotations:\n author: Alexey Volkov ') + _parser.add_argument("--data", dest="data_path", type=str, required=True, default=argparse.SUPPRESS) + _parser.add_argument("--model", dest="model_path", type=str, required=True, default=argparse.SUPPRESS) + _parser.add_argument("--label-column", dest="label_column", type=int, required=False, default=argparse.SUPPRESS) + _parser.add_argument("--predictions", dest="predictions_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) + _parsed_args = vars(_parser.parse_args()) + _output_files = _parsed_args.pop("_output_paths", []) + + _outputs = xgboost_predict(**_parsed_args) + + _output_serializers = [ + + ] + + import os + for idx, output_file in enumerate(_output_files): + try: + os.makedirs(os.path.dirname(output_file)) + except OSError: + pass + with open(output_file, 'w') as f: + f.write(_output_serializers[idx](_outputs[idx])) + args: + - --data + - {inputPath: data} + - --model + - {inputPath: model} + - if: + cond: {isPresent: label_column} + then: + - --label-column + - {inputValue: label_column} + - --predictions + - {outputPath: predictions} diff --git a/components/XGBoost/Train/component.py b/components/XGBoost/Train/component.py new file mode 100644 index 000000000..ba882ffec --- /dev/null +++ b/components/XGBoost/Train/component.py @@ -0,0 +1,79 @@ +from kfp.components import InputPath, OutputPath, create_component_from_func + +def xgboost_train( + training_data_path: InputPath('CSV'), # Also supports LibSVM + model_path: OutputPath('XGBoostModel'), + model_config_path: OutputPath('XGBoostModelConfig'), + starting_model_path: InputPath('XGBoostModel') = None, + + label_column: int = 0, + num_iterations: int = 10, + booster_params: dict = None, + + # Booster parameters + objective: str = 'reg:squarederror', + booster: str = 'gbtree', + learning_rate: float = 0.3, + min_split_loss: float = 0, + max_depth: int = 6, +): + '''Train an XGBoost model. + + Args: + training_data_path: Path for the training data in CSV format. + model_path: Output path for the trained model in binary XGBoost format. + model_config_path: Output path for the internal parameter configuration of Booster as a JSON string. + starting_model_path: Path for the existing trained model to start from. + label_column: Column containing the label data. + num_boost_rounds: Number of boosting iterations. + booster_params: Parameters for the booster. See https://xgboost.readthedocs.io/en/latest/parameter.html + objective: The learning task and the corresponding learning objective. + See https://xgboost.readthedocs.io/en/latest/parameter.html#learning-task-parameters + The most common values are: + "reg:squarederror" - Regression with squared loss (default). + "reg:logistic" - Logistic regression. + "binary:logistic" - Logistic regression for binary classification, output probability. + "binary:logitraw" - Logistic regression for binary classification, output score before logistic transformation + "rank:pairwise" - Use LambdaMART to perform pairwise ranking where the pairwise loss is minimized + "rank:ndcg" - Use LambdaMART to perform list-wise ranking where Normalized Discounted Cumulative Gain (NDCG) is maximized + + Annotations: + author: Alexey Volkov + ''' + import xgboost + + csv_training_data_spec = training_data_path + '?format=csv&label_column=' + str(label_column) + training_data = xgboost.DMatrix(csv_training_data_spec) + booster_params = booster_params or {} + booster_params.setdefault('objective', objective) + booster_params.setdefault('booster', booster) + booster_params.setdefault('learning_rate', learning_rate) + booster_params.setdefault('min_split_loss', min_split_loss) + booster_params.setdefault('max_depth', max_depth) + + starting_model = None + if starting_model_path: + starting_model = xgboost.Booster(model_file=starting_model_path) + + model = xgboost.train( + params=booster_params, + dtrain=training_data, + num_boost_round=num_iterations, + xgb_model=starting_model + ) + + # Saving the model in binary format + model.save_model(model_path) + + model_config_str = model.save_config() + with open(model_config_path, 'w') as model_config_file: + model_config_file.write(model_config_str) + + +if __name__ == '__main__': + create_component_from_func( + xgboost_train, + output_component_file='component.yaml', + base_image='python:3.7', + packages_to_install=['xgboost==1.0.2'] + ) diff --git a/components/XGBoost/Train/component.yaml b/components/XGBoost/Train/component.yaml new file mode 100644 index 000000000..5958ded1e --- /dev/null +++ b/components/XGBoost/Train/component.yaml @@ -0,0 +1,209 @@ +name: Xgboost train +description: |- + Train an XGBoost model. + + Args: + training_data_path: Path for the training data in CSV format. + model_path: Output path for the trained model in binary XGBoost format. + model_config_path: Output path for the internal parameter configuration of Booster as a JSON string. + starting_model_path: Path for the existing trained model to start from. + label_column: Column containing the label data. + num_boost_rounds: Number of boosting iterations. + booster_params: Parameters for the booster. See https://xgboost.readthedocs.io/en/latest/parameter.html + objective: The learning task and the corresponding learning objective. + See https://xgboost.readthedocs.io/en/latest/parameter.html#learning-task-parameters + The most common values are: + "reg:squarederror" - Regression with squared loss (default). + "reg:logistic" - Logistic regression. + "binary:logistic" - Logistic regression for binary classification, output probability. + "binary:logitraw" - Logistic regression for binary classification, output score before logistic transformation + "rank:pairwise" - Use LambdaMART to perform pairwise ranking where the pairwise loss is minimized + "rank:ndcg" - Use LambdaMART to perform list-wise ranking where Normalized Discounted Cumulative Gain (NDCG) is maximized + + Annotations: + author: Alexey Volkov +inputs: +- {name: training_data, type: CSV} +- {name: starting_model, type: XGBoostModel, optional: true} +- {name: label_column, type: Integer, default: '0', optional: true} +- {name: num_iterations, type: Integer, default: '10', optional: true} +- {name: booster_params, type: JsonObject, optional: true} +- {name: objective, type: String, default: 'reg:squarederror', optional: true} +- {name: booster, type: String, default: gbtree, optional: true} +- {name: learning_rate, type: Float, default: '0.3', optional: true} +- {name: min_split_loss, type: Float, default: '0', optional: true} +- {name: max_depth, type: Integer, default: '6', optional: true} +outputs: +- {name: model, type: XGBoostModel} +- {name: model_config, type: XGBoostModelConfig} +implementation: + container: + image: python:3.7 + command: + - sh + - -c + - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location + 'xgboost==1.0.2' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet + --no-warn-script-location 'xgboost==1.0.2' --user) && "$0" "$@" + - python3 + - -u + - -c + - | + def _make_parent_dirs_and_return_path(file_path: str): + import os + os.makedirs(os.path.dirname(file_path), exist_ok=True) + return file_path + + def xgboost_train( + training_data_path, # Also supports LibSVM + model_path, + model_config_path, + starting_model_path = None, + + label_column = 0, + num_iterations = 10, + booster_params = None, + + # Booster parameters + objective = 'reg:squarederror', + booster = 'gbtree', + learning_rate = 0.3, + min_split_loss = 0, + max_depth = 6, + ): + '''Train an XGBoost model. + + Args: + training_data_path: Path for the training data in CSV format. + model_path: Output path for the trained model in binary XGBoost format. + model_config_path: Output path for the internal parameter configuration of Booster as a JSON string. + starting_model_path: Path for the existing trained model to start from. + label_column: Column containing the label data. + num_boost_rounds: Number of boosting iterations. + booster_params: Parameters for the booster. See https://xgboost.readthedocs.io/en/latest/parameter.html + objective: The learning task and the corresponding learning objective. + See https://xgboost.readthedocs.io/en/latest/parameter.html#learning-task-parameters + The most common values are: + "reg:squarederror" - Regression with squared loss (default). + "reg:logistic" - Logistic regression. + "binary:logistic" - Logistic regression for binary classification, output probability. + "binary:logitraw" - Logistic regression for binary classification, output score before logistic transformation + "rank:pairwise" - Use LambdaMART to perform pairwise ranking where the pairwise loss is minimized + "rank:ndcg" - Use LambdaMART to perform list-wise ranking where Normalized Discounted Cumulative Gain (NDCG) is maximized + + Annotations: + author: Alexey Volkov + ''' + import xgboost + + csv_training_data_spec = training_data_path + '?format=csv&label_column=' + str(label_column) + training_data = xgboost.DMatrix(csv_training_data_spec) + booster_params = booster_params or {} + booster_params.setdefault('objective', objective) + booster_params.setdefault('booster', booster) + booster_params.setdefault('learning_rate', learning_rate) + booster_params.setdefault('min_split_loss', min_split_loss) + booster_params.setdefault('max_depth', max_depth) + + starting_model = None + if starting_model_path: + starting_model = xgboost.Booster(model_file=starting_model_path) + + model = xgboost.train( + params=booster_params, + dtrain=training_data, + num_boost_round=num_iterations, + xgb_model=starting_model + ) + + # Saving the model in binary format + model.save_model(model_path) + + model_config_str = model.save_config() + with open(model_config_path, 'w') as model_config_file: + model_config_file.write(model_config_str) + + import json + import argparse + _parser = argparse.ArgumentParser(prog='Xgboost train', description='Train an XGBoost model.\n\n Args:\n training_data_path: Path for the training data in CSV format.\n model_path: Output path for the trained model in binary XGBoost format.\n model_config_path: Output path for the internal parameter configuration of Booster as a JSON string.\n starting_model_path: Path for the existing trained model to start from.\n label_column: Column containing the label data.\n num_boost_rounds: Number of boosting iterations.\n booster_params: Parameters for the booster. See https://xgboost.readthedocs.io/en/latest/parameter.html\n objective: The learning task and the corresponding learning objective.\n See https://xgboost.readthedocs.io/en/latest/parameter.html#learning-task-parameters\n The most common values are:\n "reg:squarederror" - Regression with squared loss (default).\n "reg:logistic" - Logistic regression.\n "binary:logistic" - Logistic regression for binary classification, output probability.\n "binary:logitraw" - Logistic regression for binary classification, output score before logistic transformation\n "rank:pairwise" - Use LambdaMART to perform pairwise ranking where the pairwise loss is minimized\n "rank:ndcg" - Use LambdaMART to perform list-wise ranking where Normalized Discounted Cumulative Gain (NDCG) is maximized\n\n Annotations:\n author: Alexey Volkov ') + _parser.add_argument("--training-data", dest="training_data_path", type=str, required=True, default=argparse.SUPPRESS) + _parser.add_argument("--starting-model", dest="starting_model_path", type=str, required=False, default=argparse.SUPPRESS) + _parser.add_argument("--label-column", dest="label_column", type=int, required=False, default=argparse.SUPPRESS) + _parser.add_argument("--num-iterations", dest="num_iterations", type=int, required=False, default=argparse.SUPPRESS) + _parser.add_argument("--booster-params", dest="booster_params", type=json.loads, required=False, default=argparse.SUPPRESS) + _parser.add_argument("--objective", dest="objective", type=str, required=False, default=argparse.SUPPRESS) + _parser.add_argument("--booster", dest="booster", type=str, required=False, default=argparse.SUPPRESS) + _parser.add_argument("--learning-rate", dest="learning_rate", type=float, required=False, default=argparse.SUPPRESS) + _parser.add_argument("--min-split-loss", dest="min_split_loss", type=float, required=False, default=argparse.SUPPRESS) + _parser.add_argument("--max-depth", dest="max_depth", type=int, required=False, default=argparse.SUPPRESS) + _parser.add_argument("--model", dest="model_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) + _parser.add_argument("--model-config", dest="model_config_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) + _parsed_args = vars(_parser.parse_args()) + _output_files = _parsed_args.pop("_output_paths", []) + + _outputs = xgboost_train(**_parsed_args) + + _output_serializers = [ + + ] + + import os + for idx, output_file in enumerate(_output_files): + try: + os.makedirs(os.path.dirname(output_file)) + except OSError: + pass + with open(output_file, 'w') as f: + f.write(_output_serializers[idx](_outputs[idx])) + args: + - --training-data + - {inputPath: training_data} + - if: + cond: {isPresent: starting_model} + then: + - --starting-model + - {inputPath: starting_model} + - if: + cond: {isPresent: label_column} + then: + - --label-column + - {inputValue: label_column} + - if: + cond: {isPresent: num_iterations} + then: + - --num-iterations + - {inputValue: num_iterations} + - if: + cond: {isPresent: booster_params} + then: + - --booster-params + - {inputValue: booster_params} + - if: + cond: {isPresent: objective} + then: + - --objective + - {inputValue: objective} + - if: + cond: {isPresent: booster} + then: + - --booster + - {inputValue: booster} + - if: + cond: {isPresent: learning_rate} + then: + - --learning-rate + - {inputValue: learning_rate} + - if: + cond: {isPresent: min_split_loss} + then: + - --min-split-loss + - {inputValue: min_split_loss} + - if: + cond: {isPresent: max_depth} + then: + - --max-depth + - {inputValue: max_depth} + - --model + - {outputPath: model} + - --model-config + - {outputPath: model_config} diff --git a/components/XGBoost/_samples/sample_pipeline.py b/components/XGBoost/_samples/sample_pipeline.py new file mode 100644 index 000000000..1562a6f67 --- /dev/null +++ b/components/XGBoost/_samples/sample_pipeline.py @@ -0,0 +1,33 @@ +import kfp +from kfp import components + + +chicago_taxi_dataset_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/e3337b8bdcd63636934954e592d4b32c95b49129/components/datasets/Chicago%20Taxi/component.yaml') +xgboost_train_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/eb0e470be494c2f6415d543d7f37c61f1bf768dc/components/XGBoost/Train/component.yaml') +xgboost_predict_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/eb0e470be494c2f6415d543d7f37c61f1bf768dc/components/XGBoost/Predict/component.yaml') + + +def xgboost_pipeline(): + get_training_data_task = chicago_taxi_dataset_op( + where='trip_start_timestamp >= "2019-01-01" AND trip_start_timestamp < "2019-02-01"', + select='tips,trip_seconds,trip_miles,pickup_community_area,dropoff_community_area,fare,tolls,extras,trip_total', + limit=10000, + ) + + xgboost_train_task = xgboost_train_op( + training_data=get_training_data_task.output, + label_column=0, + objective='reg:squarederror', + num_iterations=200, + ) + + xgboost_predict_op( + data=get_training_data_task.output, + model=xgboost_train_task.outputs['model'], + label_column=0, + ) + + +if __name__ == '__main__': + kfp_endpoint=None + kfp.Client(host=kfp_endpoint).create_run_from_pipeline_func(xgboost_pipeline, arguments={}) diff --git a/components/_converters/ApacheParquet/_samples/sample_pipeline.py b/components/_converters/ApacheParquet/_samples/sample_pipeline.py new file mode 100644 index 000000000..6aa283ae1 --- /dev/null +++ b/components/_converters/ApacheParquet/_samples/sample_pipeline.py @@ -0,0 +1,37 @@ +import kfp +from kfp import components + +component_store = components.ComponentStore(url_search_prefixes=['https://raw.githubusercontent.com/kubeflow/pipelines/0d7d6f41c92bdc05c2825232afe2b47e5cb6c4b3/components/']) + +chicago_taxi_dataset_op = component_store.load_component(name='datasets/Chicago_Taxi_Trips') +convert_csv_to_apache_parquet_op = component_store.load_component(name='_converters/ApacheParquet/from_CSV') +convert_tsv_to_apache_parquet_op = component_store.load_component(name='_converters/ApacheParquet/from_TSV') +convert_apache_parquet_to_apache_arrow_feather_op = component_store.load_component(name='_converters/ApacheParquet/to_ApacheArrowFeather') +convert_apache_arrow_feather_to_apache_parquet_op = component_store.load_component(name='_converters/ApacheParquet/from_ApacheArrowFeather') + + +def parquet_pipeline(): + csv = chicago_taxi_dataset_op( + where='trip_start_timestamp >= "2019-01-01" AND trip_start_timestamp < "2019-02-01"', + select='tips,trip_seconds,trip_miles,pickup_community_area,dropoff_community_area,fare,tolls,extras,trip_total', + limit=10000, + ).output + + tsv = chicago_taxi_dataset_op( + where='trip_start_timestamp >= "2019-01-01" AND trip_start_timestamp < "2019-02-01"', + select='tips,trip_seconds,trip_miles,pickup_community_area,dropoff_community_area,fare,tolls,extras,trip_total', + limit=10000, + format='tsv', + ).output + + csv_parquet = convert_csv_to_apache_parquet_op(csv).output + csv_parquet_feather = convert_apache_parquet_to_apache_arrow_feather_op(csv_parquet).output + csv_parquet_feather_parquet = convert_apache_arrow_feather_to_apache_parquet_op(csv_parquet_feather).output + + tsv_parquet = convert_tsv_to_apache_parquet_op(tsv).output + tsv_parquet_feather = convert_apache_parquet_to_apache_arrow_feather_op(tsv_parquet).output + tsv_parquet_feather_parquet = convert_apache_arrow_feather_to_apache_parquet_op(tsv_parquet_feather).output + +if __name__ == '__main__': + kfp_endpoint = None + kfp.Client(host=kfp_endpoint).create_run_from_pipeline_func(parquet_pipeline, arguments={}) diff --git a/components/_converters/ApacheParquet/from_ApacheArrowFeather/component.py b/components/_converters/ApacheParquet/from_ApacheArrowFeather/component.py new file mode 100644 index 000000000..a6949b7ce --- /dev/null +++ b/components/_converters/ApacheParquet/from_ApacheArrowFeather/component.py @@ -0,0 +1,27 @@ +from kfp.components import InputPath, OutputPath, create_component_from_func + +def convert_apache_arrow_feather_to_apache_parquet( + data_path: InputPath('ApacheArrowFeather'), + output_data_path: OutputPath('ApacheParquet'), +): + '''Converts Apache Arrow Feather to Apache Parquet. + + [Apache Arrow Feather](https://arrow.apache.org/docs/python/feather.html) + [Apache Parquet](https://parquet.apache.org/) + + Annotations: + author: Alexey Volkov + ''' + from pyarrow import feather, parquet + + table = feather.read_table(data_path) + parquet.write_table(table, output_data_path) + + +if __name__ == '__main__': + create_component_from_func( + convert_apache_arrow_feather_to_apache_parquet, + output_component_file='component.yaml', + base_image='python:3.7', + packages_to_install=['pyarrow==0.17.1'] + ) diff --git a/components/_converters/ApacheParquet/from_ApacheArrowFeather/component.yaml b/components/_converters/ApacheParquet/from_ApacheArrowFeather/component.yaml new file mode 100644 index 000000000..5c14ed777 --- /dev/null +++ b/components/_converters/ApacheParquet/from_ApacheArrowFeather/component.yaml @@ -0,0 +1,74 @@ +name: Convert apache arrow feather to apache parquet +description: |- + Converts Apache Arrow Feather to Apache Parquet. + + [Apache Arrow Feather](https://arrow.apache.org/docs/python/feather.html) + [Apache Parquet](https://parquet.apache.org/) + + Annotations: + author: Alexey Volkov +inputs: +- {name: data, type: ApacheArrowFeather} +outputs: +- {name: output_data, type: ApacheParquet} +implementation: + container: + image: python:3.7 + command: + - sh + - -c + - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location + 'pyarrow==0.17.1' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install + --quiet --no-warn-script-location 'pyarrow==0.17.1' --user) && "$0" "$@" + - python3 + - -u + - -c + - | + def _make_parent_dirs_and_return_path(file_path: str): + import os + os.makedirs(os.path.dirname(file_path), exist_ok=True) + return file_path + + def convert_apache_arrow_feather_to_apache_parquet( + data_path, + output_data_path, + ): + '''Converts Apache Arrow Feather to Apache Parquet. + + [Apache Arrow Feather](https://arrow.apache.org/docs/python/feather.html) + [Apache Parquet](https://parquet.apache.org/) + + Annotations: + author: Alexey Volkov + ''' + from pyarrow import feather, parquet + + table = feather.read_table(data_path) + parquet.write_table(table, output_data_path) + + import argparse + _parser = argparse.ArgumentParser(prog='Convert apache arrow feather to apache parquet', description='Converts Apache Arrow Feather to Apache Parquet.\n\n [Apache Arrow Feather](https://arrow.apache.org/docs/python/feather.html)\n [Apache Parquet](https://parquet.apache.org/)\n\n Annotations:\n author: Alexey Volkov ') + _parser.add_argument("--data", dest="data_path", type=str, required=True, default=argparse.SUPPRESS) + _parser.add_argument("--output-data", dest="output_data_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) + _parsed_args = vars(_parser.parse_args()) + _output_files = _parsed_args.pop("_output_paths", []) + + _outputs = convert_apache_arrow_feather_to_apache_parquet(**_parsed_args) + + _output_serializers = [ + + ] + + import os + for idx, output_file in enumerate(_output_files): + try: + os.makedirs(os.path.dirname(output_file)) + except OSError: + pass + with open(output_file, 'w') as f: + f.write(_output_serializers[idx](_outputs[idx])) + args: + - --data + - {inputPath: data} + - --output-data + - {outputPath: output_data} diff --git a/components/_converters/ApacheParquet/from_CSV/component.py b/components/_converters/ApacheParquet/from_CSV/component.py new file mode 100644 index 000000000..101aa7831 --- /dev/null +++ b/components/_converters/ApacheParquet/from_CSV/component.py @@ -0,0 +1,26 @@ +from kfp.components import InputPath, OutputPath, create_component_from_func + +def convert_csv_to_apache_parquet( + data_path: InputPath('CSV'), + output_data_path: OutputPath('ApacheParquet'), +): + '''Converts CSV table to Apache Parquet. + + [Apache Parquet](https://parquet.apache.org/) + + Annotations: + author: Alexey Volkov + ''' + from pyarrow import csv, parquet + + table = csv.read_csv(data_path) + parquet.write_table(table, output_data_path) + + +if __name__ == '__main__': + create_component_from_func( + convert_csv_to_apache_parquet, + output_component_file='component.yaml', + base_image='python:3.7', + packages_to_install=['pyarrow==0.17.1'] + ) diff --git a/components/_converters/ApacheParquet/from_CSV/component.yaml b/components/_converters/ApacheParquet/from_CSV/component.yaml new file mode 100644 index 000000000..bb3ac32e2 --- /dev/null +++ b/components/_converters/ApacheParquet/from_CSV/component.yaml @@ -0,0 +1,72 @@ +name: Convert csv to apache parquet +description: |- + Converts CSV table to Apache Parquet. + + [Apache Parquet](https://parquet.apache.org/) + + Annotations: + author: Alexey Volkov +inputs: +- {name: data, type: CSV} +outputs: +- {name: output_data, type: ApacheParquet} +implementation: + container: + image: python:3.7 + command: + - sh + - -c + - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location + 'pyarrow==0.17.1' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install + --quiet --no-warn-script-location 'pyarrow==0.17.1' --user) && "$0" "$@" + - python3 + - -u + - -c + - | + def _make_parent_dirs_and_return_path(file_path: str): + import os + os.makedirs(os.path.dirname(file_path), exist_ok=True) + return file_path + + def convert_csv_to_apache_parquet( + data_path, + output_data_path, + ): + '''Converts CSV table to Apache Parquet. + + [Apache Parquet](https://parquet.apache.org/) + + Annotations: + author: Alexey Volkov + ''' + from pyarrow import csv, parquet + + table = csv.read_csv(data_path) + parquet.write_table(table, output_data_path) + + import argparse + _parser = argparse.ArgumentParser(prog='Convert csv to apache parquet', description='Converts CSV table to Apache Parquet.\n\n [Apache Parquet](https://parquet.apache.org/)\n\n Annotations:\n author: Alexey Volkov ') + _parser.add_argument("--data", dest="data_path", type=str, required=True, default=argparse.SUPPRESS) + _parser.add_argument("--output-data", dest="output_data_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) + _parsed_args = vars(_parser.parse_args()) + _output_files = _parsed_args.pop("_output_paths", []) + + _outputs = convert_csv_to_apache_parquet(**_parsed_args) + + _output_serializers = [ + + ] + + import os + for idx, output_file in enumerate(_output_files): + try: + os.makedirs(os.path.dirname(output_file)) + except OSError: + pass + with open(output_file, 'w') as f: + f.write(_output_serializers[idx](_outputs[idx])) + args: + - --data + - {inputPath: data} + - --output-data + - {outputPath: output_data} diff --git a/components/_converters/ApacheParquet/from_TSV/component.py b/components/_converters/ApacheParquet/from_TSV/component.py new file mode 100644 index 000000000..d297171a9 --- /dev/null +++ b/components/_converters/ApacheParquet/from_TSV/component.py @@ -0,0 +1,26 @@ +from kfp.components import InputPath, OutputPath, create_component_from_func + +def convert_tsv_to_apache_parquet( + data_path: InputPath('TSV'), + output_data_path: OutputPath('ApacheParquet'), +): + '''Converts TSV table to Apache Parquet. + + [Apache Parquet](https://parquet.apache.org/) + + Annotations: + author: Alexey Volkov + ''' + from pyarrow import csv, parquet + + table = csv.read_csv(data_path, parse_options=csv.ParseOptions(delimiter='\t')) + parquet.write_table(table, output_data_path) + + +if __name__ == '__main__': + create_component_from_func( + convert_tsv_to_apache_parquet, + output_component_file='component.yaml', + base_image='python:3.7', + packages_to_install=['pyarrow==0.17.1'] + ) diff --git a/components/_converters/ApacheParquet/from_TSV/component.yaml b/components/_converters/ApacheParquet/from_TSV/component.yaml new file mode 100644 index 000000000..499370281 --- /dev/null +++ b/components/_converters/ApacheParquet/from_TSV/component.yaml @@ -0,0 +1,72 @@ +name: Convert tsv to apache parquet +description: |- + Converts TSV table to Apache Parquet. + + [Apache Parquet](https://parquet.apache.org/) + + Annotations: + author: Alexey Volkov +inputs: +- {name: data, type: TSV} +outputs: +- {name: output_data, type: ApacheParquet} +implementation: + container: + image: python:3.7 + command: + - sh + - -c + - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location + 'pyarrow==0.17.1' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install + --quiet --no-warn-script-location 'pyarrow==0.17.1' --user) && "$0" "$@" + - python3 + - -u + - -c + - | + def _make_parent_dirs_and_return_path(file_path: str): + import os + os.makedirs(os.path.dirname(file_path), exist_ok=True) + return file_path + + def convert_tsv_to_apache_parquet( + data_path, + output_data_path, + ): + '''Converts TSV table to Apache Parquet. + + [Apache Parquet](https://parquet.apache.org/) + + Annotations: + author: Alexey Volkov + ''' + from pyarrow import csv, parquet + + table = csv.read_csv(data_path, parse_options=csv.ParseOptions(delimiter='\t')) + parquet.write_table(table, output_data_path) + + import argparse + _parser = argparse.ArgumentParser(prog='Convert tsv to apache parquet', description='Converts TSV table to Apache Parquet.\n\n [Apache Parquet](https://parquet.apache.org/)\n\n Annotations:\n author: Alexey Volkov ') + _parser.add_argument("--data", dest="data_path", type=str, required=True, default=argparse.SUPPRESS) + _parser.add_argument("--output-data", dest="output_data_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) + _parsed_args = vars(_parser.parse_args()) + _output_files = _parsed_args.pop("_output_paths", []) + + _outputs = convert_tsv_to_apache_parquet(**_parsed_args) + + _output_serializers = [ + + ] + + import os + for idx, output_file in enumerate(_output_files): + try: + os.makedirs(os.path.dirname(output_file)) + except OSError: + pass + with open(output_file, 'w') as f: + f.write(_output_serializers[idx](_outputs[idx])) + args: + - --data + - {inputPath: data} + - --output-data + - {outputPath: output_data} diff --git a/components/_converters/ApacheParquet/to_ApacheArrowFeather/component.py b/components/_converters/ApacheParquet/to_ApacheArrowFeather/component.py new file mode 100644 index 000000000..0129334ba --- /dev/null +++ b/components/_converters/ApacheParquet/to_ApacheArrowFeather/component.py @@ -0,0 +1,27 @@ +from kfp.components import InputPath, OutputPath, create_component_from_func + +def convert_apache_parquet_to_apache_arrow_feather( + data_path: InputPath('ApacheParquet'), + output_data_path: OutputPath('ApacheArrowFeather'), +): + '''Converts Apache Parquet to Apache Arrow Feather. + + [Apache Arrow Feather](https://arrow.apache.org/docs/python/feather.html) + [Apache Parquet](https://parquet.apache.org/) + + Annotations: + author: Alexey Volkov + ''' + from pyarrow import feather, parquet + + data_frame = parquet.read_pandas(data_path).to_pandas() + feather.write_feather(data_frame, output_data_path) + + +if __name__ == '__main__': + convert_apache_parquet_to_apache_arrow_feather_op = create_component_from_func( + convert_apache_parquet_to_apache_arrow_feather, + output_component_file='component.yaml', + base_image='python:3.7', + packages_to_install=['pyarrow==0.17.1', 'pandas==1.0.3'] + ) diff --git a/components/_converters/ApacheParquet/to_ApacheArrowFeather/component.yaml b/components/_converters/ApacheParquet/to_ApacheArrowFeather/component.yaml new file mode 100644 index 000000000..28f64056d --- /dev/null +++ b/components/_converters/ApacheParquet/to_ApacheArrowFeather/component.yaml @@ -0,0 +1,75 @@ +name: Convert apache parquet to apache arrow feather +description: |- + Converts Apache Parquet to Apache Arrow Feather. + + [Apache Arrow Feather](https://arrow.apache.org/docs/python/feather.html) + [Apache Parquet](https://parquet.apache.org/) + + Annotations: + author: Alexey Volkov +inputs: +- {name: data, type: ApacheParquet} +outputs: +- {name: output_data, type: ApacheArrowFeather} +implementation: + container: + image: python:3.7 + command: + - sh + - -c + - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location + 'pyarrow==0.17.1' 'pandas==1.0.3' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 + -m pip install --quiet --no-warn-script-location 'pyarrow==0.17.1' 'pandas==1.0.3' + --user) && "$0" "$@" + - python3 + - -u + - -c + - | + def _make_parent_dirs_and_return_path(file_path: str): + import os + os.makedirs(os.path.dirname(file_path), exist_ok=True) + return file_path + + def convert_apache_parquet_to_apache_arrow_feather( + data_path, + output_data_path, + ): + '''Converts Apache Parquet to Apache Arrow Feather. + + [Apache Arrow Feather](https://arrow.apache.org/docs/python/feather.html) + [Apache Parquet](https://parquet.apache.org/) + + Annotations: + author: Alexey Volkov + ''' + from pyarrow import feather, parquet + + data_frame = parquet.read_pandas(data_path).to_pandas() + feather.write_feather(data_frame, output_data_path) + + import argparse + _parser = argparse.ArgumentParser(prog='Convert apache parquet to apache arrow feather', description='Converts Apache Parquet to Apache Arrow Feather.\n\n [Apache Arrow Feather](https://arrow.apache.org/docs/python/feather.html)\n [Apache Parquet](https://parquet.apache.org/)\n\n Annotations:\n author: Alexey Volkov ') + _parser.add_argument("--data", dest="data_path", type=str, required=True, default=argparse.SUPPRESS) + _parser.add_argument("--output-data", dest="output_data_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) + _parsed_args = vars(_parser.parse_args()) + _output_files = _parsed_args.pop("_output_paths", []) + + _outputs = convert_apache_parquet_to_apache_arrow_feather(**_parsed_args) + + _output_serializers = [ + + ] + + import os + for idx, output_file in enumerate(_output_files): + try: + os.makedirs(os.path.dirname(output_file)) + except OSError: + pass + with open(output_file, 'w') as f: + f.write(_output_serializers[idx](_outputs[idx])) + args: + - --data + - {inputPath: data} + - --output-data + - {outputPath: output_data} diff --git a/components/aws/sagemaker/.gitignore b/components/aws/sagemaker/.gitignore new file mode 100644 index 000000000..58c9068fc --- /dev/null +++ b/components/aws/sagemaker/.gitignore @@ -0,0 +1,2 @@ +# Any environment variable files +**/*/.env \ No newline at end of file diff --git a/components/aws/sagemaker/README.md b/components/aws/sagemaker/README.md new file mode 100644 index 000000000..3e2ec03a0 --- /dev/null +++ b/components/aws/sagemaker/README.md @@ -0,0 +1,45 @@ +# Amazon SageMaker Components for Kubeflow Pipelines + +## Summary +With Amazon SageMaker Components for Kubeflow Pipelines (KFP), you can create and monitor training, tuning, endpoint deployment, and batch transform jobs in Amazon SageMaker. By running Kubeflow Pipeline jobs on Amazon SageMaker, you move data processing and training jobs from the Kubernetes cluster to Amazon SageMaker’s machine learning-optimized managed service. The job parameters, status, logs, and outputs from Amazon SageMaker are still accessible from the Kubeflow Pipelines UI. + + +## Components +Amazon SageMaker Components for Kubeflow Pipelines offer an alternative to launching compute-intensive jobs in Kubernetes and integrate the orchestration benefits of Kubeflow Pipelines. The following Amazon SageMaker components have been created to integrate 6 key Amazon SageMaker features into your ML workflows. You can create a Kubeflow Pipeline built entirely using these components, or integrate individual components into your workflow as needed. + +There is no additional charge for using Amazon SageMaker Components for Kubeflow Pipelines. You incur charges for any Amazon SageMaker resources you use through these components. + +### Training components + +#### Training + +The Training component allows you to submit Amazon SageMaker Training jobs directly from a Kubeflow Pipelines workflow. For more information, see [SageMaker Training Kubeflow Pipelines component](https://github.com/kubeflow/pipelines/tree/master/components/aws/sagemaker/train). + + +#### Hyperparameter Optimization + +The Hyperparameter Optimization component enables you to submit hyperparameter tuning jobs to Amazon SageMaker directly from a Kubeflow Pipelines workflow. For more information, see [SageMaker hyperparameter optimization Kubeflow Pipeline component](https://github.com/kubeflow/pipelines/tree/master/components/aws/sagemaker/hyperparameter_tuning). + + +### Inference components + +#### Hosting Deploy + +The Deploy component enables you to deploy a model in Amazon SageMaker Hosting from a Kubeflow Pipelines workflow. For more information, see [SageMaker Hosting Services - Create Endpoint Kubeflow Pipeline component](https://github.com/kubeflow/pipelines/tree/master/components/aws/sagemaker/deploy). + +#### Batch Transform component + +The Batch Transform component enables you to run inference jobs for an entire dataset in Amazon SageMaker from a Kubeflow Pipelines workflow. For more information, see [SageMaker Batch Transform Kubeflow Pipeline component](https://github.com/kubeflow/pipelines/tree/master/components/aws/sagemaker/batch_transform). + + +### Ground Truth components + +#### Ground Truth + +The Ground Truth component enables you to to submit Amazon SageMaker Ground Truth labeling jobs directly from a Kubeflow Pipelines workflow. For more information, see [SageMaker Ground Truth Kubeflow Pipelines component](https://github.com/kubeflow/pipelines/tree/master/components/aws/sagemaker/ground_truth). + +#### Workteam + +The Workteam component enables you to create Amazon SageMaker private workteam jobs directly from a Kubeflow Pipelines workflow. For more information, see [SageMaker create private workteam Kubeflow Pipelines component](https://github.com/kubeflow/pipelines/tree/master/components/aws/sagemaker/workteam). + + diff --git a/components/aws/sagemaker/batch_transform/src/batch_transform.py b/components/aws/sagemaker/batch_transform/src/batch_transform.py index b658dad73..45924170f 100644 --- a/components/aws/sagemaker/batch_transform/src/batch_transform.py +++ b/components/aws/sagemaker/batch_transform/src/batch_transform.py @@ -10,6 +10,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import sys import argparse import logging from pathlib2 import Path @@ -44,9 +45,7 @@ def create_parser(): parser.add_argument('--input_filter', type=str, required=False, help='A JSONPath expression used to select a portion of the input data to pass to the algorithm.', default='') parser.add_argument('--output_filter', type=str, required=False, help='A JSONPath expression used to select a portion of the joined dataset to save in the output file for a batch transform job.', default='') parser.add_argument('--join_source', choices=['None', 'Input', ''], type=str, required=False, help='Specifies the source of the data to join with the transformed data.', default='None') - parser.add_argument('--instance_type', choices=['ml.m4.xlarge', 'ml.m4.2xlarge', 'ml.m4.4xlarge', 'ml.m4.10xlarge', 'ml.m4.16xlarge', 'ml.m5.large', 'ml.m5.xlarge', 'ml.m5.2xlarge', 'ml.m5.4xlarge', - 'ml.m5.12xlarge', 'ml.m5.24xlarge', 'ml.c4.xlarge', 'ml.c4.2xlarge', 'ml.c4.4xlarge', 'ml.c4.8xlarge', 'ml.p2.xlarge', 'ml.p2.8xlarge', 'ml.p2.16xlarge', 'ml.p3.2xlarge', 'ml.p3.8xlarge', 'ml.p3.16xlarge', - 'ml.c5.xlarge', 'ml.c5.2xlarge', 'ml.c5.4xlarge', 'ml.c5.9xlarge', 'ml.c5.18xlarge'], type=str, required=True, help='The ML compute instance type for the transform job.', default='ml.m4.xlarge') + parser.add_argument('--instance_type', type=str, required=False, help='The ML compute instance type for the transform job.', default='ml.m4.xlarge') parser.add_argument('--instance_count', type=int, required=False, help='The number of ML compute instances to use in the transform job.') parser.add_argument('--resource_encryption_key', type=str, required=False, help='The AWS KMS key that Amazon SageMaker uses to encrypt data on the storage volume attached to the ML compute instance(s).', default='') parser.add_argument('--tags', type=_utils.yaml_or_json_str, required=False, help='An array of key-value pairs, to categorize AWS resources.', default={}) @@ -56,7 +55,7 @@ def create_parser(): def main(argv=None): parser = create_parser() - args = parser.parse_args() + args = parser.parse_args(argv) logging.getLogger().setLevel(logging.INFO) client = _utils.get_sagemaker_client(args.region, args.endpoint_url) @@ -66,10 +65,11 @@ def main(argv=None): _utils.wait_for_transform_job(client, batch_job_name) Path(args.output_location_file).parent.mkdir(parents=True, exist_ok=True) - Path(args.output_location_file).write_text(unicode(args.output_location)) + with open(args.output_location_file, 'w') as f: + f.write(unicode(args.output_location)) logging.info('Batch Transformation creation completed.') if __name__== "__main__": - main() + main(sys.argv[1:]) diff --git a/components/aws/sagemaker/codebuild/integration-test.buildspec.yml b/components/aws/sagemaker/codebuild/integration-test.buildspec.yml index 0ca12b06c..d40afba2a 100644 --- a/components/aws/sagemaker/codebuild/integration-test.buildspec.yml +++ b/components/aws/sagemaker/codebuild/integration-test.buildspec.yml @@ -1,14 +1,24 @@ version: 0.2 + +env: + variables: + CONTAINER_VARIABLES: "AWS_CONTAINER_CREDENTIALS_RELATIVE_URI EKS_PRIVATE_SUBNETS EKS_PUBLIC_SUBNETS PYTEST_ADDOPTS S3_DATA_BUCKET EKS_EXISTING_CLUSTER SAGEMAKER_EXECUTION_ROLE_ARN REGION SKIP_FSX_TESTS" + phases: build: commands: - cd components/aws - docker build . -f ./sagemaker/tests/integration_tests/Dockerfile -t amazon/integration-test-image --quiet + - cd sagemaker/codebuild/scripts && export CONTAINER_VARIABLE_ARGUMENTS="$(./construct_environment_array.sh)" + # Run the container and copy the results to /tmp - # Passes all host environment variables through to the container - - docker run --name integration-test-container $(env | cut -f1 -d= | sed 's/^/-e /') amazon/integration-test-image - - docker cp integration-test-container:/app/tests/integration_tests/integration_tests.log /tmp/results.xml + # Passes all listed host environment variables through to the container + - docker run --name integration-test-container $(echo $CONTAINER_VARIABLE_ARGUMENTS) amazon/integration-test-image + + post_build: + commands: + - docker cp integration-test-container:/tests/integration_tests/integration_tests.log /tmp/results.xml - docker rm -f integration-test-container reports: diff --git a/components/aws/sagemaker/codebuild/scripts/construct_environment_array.sh b/components/aws/sagemaker/codebuild/scripts/construct_environment_array.sh new file mode 100755 index 000000000..249108d8b --- /dev/null +++ b/components/aws/sagemaker/codebuild/scripts/construct_environment_array.sh @@ -0,0 +1,10 @@ +#!/usr/bin/env bash + +# This script breaks up a string of environment variable names into a list of +# parameters that `docker run` accepts. This needs to be made into a script +# for CodeBuild because these commands do not run in dash - the default terminal +# on the CodeBuild standard images. + +IFS=' ' read -a variable_array <<< $CONTAINER_VARIABLES +printf -v CONTAINER_VARIABLE_ARGUMENTS -- "--env %s " "${variable_array[@]}" +echo $CONTAINER_VARIABLE_ARGUMENTS \ No newline at end of file diff --git a/components/aws/sagemaker/codebuild/unit-test.buildspec.yml b/components/aws/sagemaker/codebuild/unit-test.buildspec.yml index a366094bf..4d6884944 100644 --- a/components/aws/sagemaker/codebuild/unit-test.buildspec.yml +++ b/components/aws/sagemaker/codebuild/unit-test.buildspec.yml @@ -8,6 +8,9 @@ phases: # Run the container and copy the results to /tmp # Passes all host environment variables through to the container - docker run --name unit-test-container $(env | cut -f1 -d= | sed 's/^/-e /') amazon/unit-test-image + + post_build: + commands: - docker cp unit-test-container:/app/tests/unit_tests/unit_tests.log /tmp/results.xml - docker rm -f unit-test-container diff --git a/components/aws/sagemaker/common/_utils.py b/components/aws/sagemaker/common/_utils.py index 71d9ffe47..cd93ee5bb 100644 --- a/components/aws/sagemaker/common/_utils.py +++ b/components/aws/sagemaker/common/_utils.py @@ -254,21 +254,17 @@ def create_model_request(args): else: request['PrimaryContainer'].pop('ContainerHostname') - if (args['image'] or args['model_artifact_url']) and args['model_package']: - logging.error("Please specify an image AND model artifact url, OR a model package name.") - raise Exception("Could not make create model request.") - elif args['model_package']: + if args['model_package']: request['PrimaryContainer']['ModelPackageName'] = args['model_package'] request['PrimaryContainer'].pop('Image') request['PrimaryContainer'].pop('ModelDataUrl') + elif args['image'] and args['model_artifact_url']: + request['PrimaryContainer']['Image'] = args['image'] + request['PrimaryContainer']['ModelDataUrl'] = args['model_artifact_url'] + request['PrimaryContainer'].pop('ModelPackageName') else: - if args['image'] and args['model_artifact_url']: - request['PrimaryContainer']['Image'] = args['image'] - request['PrimaryContainer']['ModelDataUrl'] = args['model_artifact_url'] - request['PrimaryContainer'].pop('ModelPackageName') - else: - logging.error("Please specify an image AND model artifact url.") - raise Exception("Could not make create model request.") + logging.error("Please specify an image AND model artifact url, OR a model package name.") + raise Exception("Could not make create model request.") request['ExecutionRoleArn'] = args['role'] request['EnableNetworkIsolation'] = args['network_isolation'] @@ -296,6 +292,10 @@ def create_endpoint_config_request(args): with open(os.path.join(__cwd__, 'endpoint_config.template.yaml'), 'r') as f: request = yaml.safe_load(f) + if not args['model_name_1']: + logging.error("Must specify at least one model (model name) to host.") + raise Exception("Could not create endpoint config.") + endpoint_config_name = args['endpoint_config_name'] if args['endpoint_config_name'] else 'EndpointConfig' + args['model_name_1'][args['model_name_1'].index('-'):] request['EndpointConfigName'] = endpoint_config_name @@ -304,10 +304,6 @@ def create_endpoint_config_request(args): else: request.pop('KmsKeyId') - if not args['model_name_1']: - logging.error("Must specify at least one model (model name) to host.") - raise Exception("Could not create endpoint config.") - for i in range(len(request['ProductionVariants']), 0, -1): if args['model_name_' + str(i)]: request['ProductionVariants'][i-1]['ModelName'] = args['model_name_' + str(i)] @@ -377,14 +373,14 @@ def wait_for_endpoint_creation(client, endpoint_name): finally: resp = client.describe_endpoint(EndpointName=endpoint_name) status = resp['EndpointStatus'] - logging.info("Endpoint Arn: " + resp['EndpointArn']) - logging.info("Create endpoint ended with status: " + status) if status != 'InService': - message = client.describe_endpoint(EndpointName=endpoint_name)['FailureReason'] + message = resp['FailureReason'] logging.info('Create endpoint failed with the following error: {}'.format(message)) raise Exception('Endpoint creation did not succeed') + logging.info("Endpoint Arn: " + resp['EndpointArn']) + logging.info("Create endpoint ended with status: " + status) def create_transform_job_request(args): ### Documentation: https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/sagemaker.html#SageMaker.Client.create_transform_job @@ -462,7 +458,7 @@ def create_transform_job(client, args): raise Exception(e.response['Error']['Message']) -def wait_for_transform_job(client, batch_job_name): +def wait_for_transform_job(client, batch_job_name, poll_interval=30): ### Wait until the job finishes while(True): response = client.describe_transform_job(TransformJobName=batch_job_name) @@ -475,7 +471,7 @@ def wait_for_transform_job(client, batch_job_name): logging.info('Transform failed with the following error: {}'.format(message)) raise Exception('Transform job failed') logging.info("Transform job is still in status: " + status) - time.sleep(30) + time.sleep(poll_interval) def create_hyperparameter_tuning_job_request(args): @@ -592,7 +588,7 @@ def create_hyperparameter_tuning_job(client, args): """Create a Sagemaker HPO job""" request = create_hyperparameter_tuning_job_request(args) try: - job_arn = client.create_hyper_parameter_tuning_job(**request) + client.create_hyper_parameter_tuning_job(**request) hpo_job_name = request['HyperParameterTuningJobName'] logging.info("Created Hyperparameter Training Job with name: " + hpo_job_name) logging.info("HPO job in SageMaker: https://{}.console.aws.amazon.com/sagemaker/home?region={}#/hyper-tuning-jobs/{}" @@ -604,7 +600,7 @@ def create_hyperparameter_tuning_job(client, args): raise Exception(e.response['Error']['Message']) -def wait_for_hyperparameter_training_job(client, hpo_job_name): +def wait_for_hyperparameter_training_job(client, hpo_job_name, poll_interval=30): ### Wait until the job finishes while(True): response = client.describe_hyper_parameter_tuning_job(HyperParameterTuningJobName=hpo_job_name) @@ -617,7 +613,7 @@ def wait_for_hyperparameter_training_job(client, hpo_job_name): logging.error('Hyperparameter tuning failed with the following error: {}'.format(message)) raise Exception('Hyperparameter tuning job failed') logging.info("Hyperparameter tuning job is still in status: " + status) - time.sleep(30) + time.sleep(poll_interval) def get_best_training_job_and_hyperparameters(client, hpo_job_name): @@ -809,7 +805,7 @@ def create_labeling_job(client, args): raise Exception(e.response['Error']['Message']) -def wait_for_labeling_job(client, labeling_job_name): +def wait_for_labeling_job(client, labeling_job_name, poll_interval=30): ### Wait until the job finishes status = 'InProgress' while(status == 'InProgress'): @@ -820,7 +816,7 @@ def wait_for_labeling_job(client, labeling_job_name): logging.info('Labeling failed with the following error: {}'.format(message)) raise Exception('Labeling job failed') logging.info("Labeling job is still in status: " + status) - time.sleep(30) + time.sleep(poll_interval) if status == 'Completed': logging.info("Labeling job ended with status: " + status) @@ -880,4 +876,4 @@ def yaml_or_json_str(str): def str_to_bool(str): # This distutils function returns an integer representation of the boolean # rather than a True/False value. This simply hard casts it. - return bool(strtobool(str)) \ No newline at end of file + return bool(strtobool(str)) diff --git a/components/aws/sagemaker/deploy/README.md b/components/aws/sagemaker/deploy/README.md index 95c1c6865..c69525cea 100644 --- a/components/aws/sagemaker/deploy/README.md +++ b/components/aws/sagemaker/deploy/README.md @@ -31,7 +31,7 @@ Argument | Description | Optional (in pipeline definition :--- | :---------- | :---------- | :---------- | :----------| :---------- | :----------| model_name_[1, 3] | The name of the model that you want to host. This is the name that you specified when creating the model | No | No | String | | | variant_name_[1, 3] | The name of the production variant | Yes | Yes | String | | variant_name_[1, 3] | -instance_type_[1, 3] | The ML compute instance type | Yes | Yes | String | ml.m4.xlarge, ml.m4.2xlarge, ml.m4.4xlarge, ml.m4.10xlarge, ml.m4.16xlarge, ml.m5.large, ml.m5.xlarge, ml.m5.2xlarge, ml.m5.4xlarge, ml.m5.12xlarge, ml.m5.24xlarge, ml.c4.xlarge, ml.c4.2xlarge, ml.c4.4xlarge, ml.c4.8xlarge, ml.p2.xlarge, ml.p2.8xlarge, ml.p2.16xlarge, ml.p3.2xlarge, ml.p3.8xlarge, ml.p3.16xlarge, ml.c5.xlarge, ml.c5.2xlarge, ml.c5.4xlarge, ml.c5.9xlarge, ml.c5.18xlarge | ml.m4.xlarge | +instance_type_[1, 3] | The ML compute instance type | Yes | Yes | String | ml.m4.xlarge, ml.m4.2xlarge, ml.m4.4xlarge, ml.m4.10xlarge, ml.m4.16xlarge, ml.m5.large, ml.m5.xlarge, ml.m5.2xlarge, ml.m5.4xlarge, ml.m5.12xlarge, ml.m5.24xlarge, ml.c4.xlarge, ml.c4.2xlarge, ml.c4.4xlarge, ml.c4.8xlarge, ml.p2.xlarge, ml.p2.8xlarge, ml.p2.16xlarge, ml.p3.2xlarge, ml.p3.8xlarge, ml.p3.16xlarge, ml.c5.xlarge, ml.c5.2xlarge, ml.c5.4xlarge, ml.c5.9xlarge, ml.c5.18xlarge [and many more](https://aws.amazon.com/sagemaker/pricing/instance-types/)| ml.m4.xlarge | initial_instance_count_[1, 3] | Number of instances to launch initially | Yes | Yes | Integer | ≥ 1 | 1 | initial_variant_weight_[1, 3] | Determines initial traffic distribution among all of the models that you specify in the endpoint configuration. The traffic to a production variant is determined by the ratio of the VariantWeight to the sum of all VariantWeight values across all ProductionVariants. | Yes | Yes | Float | Minimum value of 0 | | accelerator_type_[1, 3] | The size of the Elastic Inference (EI) instance to use for the production variant | Yes | Yes | String| ml.eia1.medium, ml.eia1.large, ml.eia1.xlarge | | diff --git a/components/aws/sagemaker/deploy/src/deploy.py b/components/aws/sagemaker/deploy/src/deploy.py index 1888e1b45..a9c64b43c 100644 --- a/components/aws/sagemaker/deploy/src/deploy.py +++ b/components/aws/sagemaker/deploy/src/deploy.py @@ -10,6 +10,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import sys import argparse import logging @@ -23,30 +24,23 @@ def create_parser(): parser.add_argument('--variant_name_1', type=str, required=False, help='The name of the production variant.', default='variant-name-1') parser.add_argument('--model_name_1', type=str, required=True, help='The model name used for endpoint deployment.') parser.add_argument('--initial_instance_count_1', type=int, required=False, help='Number of instances to launch initially.', default=1) - parser.add_argument('--instance_type_1', choices=['ml.m4.xlarge', 'ml.m4.2xlarge', 'ml.m4.4xlarge', 'ml.m4.10xlarge', 'ml.m4.16xlarge', 'ml.m5.large', 'ml.m5.xlarge', 'ml.m5.2xlarge', 'ml.m5.4xlarge', - 'ml.m5.12xlarge', 'ml.m5.24xlarge', 'ml.c4.xlarge', 'ml.c4.2xlarge', 'ml.c4.4xlarge', 'ml.c4.8xlarge', 'ml.p2.xlarge', 'ml.p2.8xlarge', 'ml.p2.16xlarge', 'ml.p3.2xlarge', 'ml.p3.8xlarge', 'ml.p3.16xlarge', - 'ml.c5.xlarge', 'ml.c5.2xlarge', 'ml.c5.4xlarge', 'ml.c5.9xlarge', 'ml.c5.18xlarge', ''], type=str, required=False, help='The ML compute instance type.', default='ml.m4.xlarge') + parser.add_argument('--instance_type_1', type=str, required=False, help='The ML compute instance type.', default='ml.m4.xlarge') parser.add_argument('--initial_variant_weight_1', type=float, required=False, help='Determines initial traffic distribution among all of the models that you specify in the endpoint configuration.', default=1.0) parser.add_argument('--accelerator_type_1', choices=['ml.eia1.medium', 'ml.eia1.large', 'ml.eia1.xlarge', ''], type=str, required=False, help='The size of the Elastic Inference (EI) instance to use for the production variant.', default='') parser.add_argument('--variant_name_2', type=str, required=False, help='The name of the production variant.', default='variant-name-2') parser.add_argument('--model_name_2', type=str, required=False, help='The model name used for endpoint deployment.', default='') parser.add_argument('--initial_instance_count_2', type=int, required=False, help='Number of instances to launch initially.', default=1) - parser.add_argument('--instance_type_2', choices=['ml.m4.xlarge', 'ml.m4.2xlarge', 'ml.m4.4xlarge', 'ml.m4.10xlarge', 'ml.m4.16xlarge', 'ml.m5.large', 'ml.m5.xlarge', 'ml.m5.2xlarge', 'ml.m5.4xlarge', - 'ml.m5.12xlarge', 'ml.m5.24xlarge', 'ml.c4.xlarge', 'ml.c4.2xlarge', 'ml.c4.4xlarge', 'ml.c4.8xlarge', 'ml.p2.xlarge', 'ml.p2.8xlarge', 'ml.p2.16xlarge', 'ml.p3.2xlarge', 'ml.p3.8xlarge', 'ml.p3.16xlarge', - 'ml.c5.xlarge', 'ml.c5.2xlarge', 'ml.c5.4xlarge', 'ml.c5.9xlarge', 'ml.c5.18xlarge', ''], type=str, required=False, help='The ML compute instance type.', default='ml.m4.xlarge') + parser.add_argument('--instance_type_2', type=str, required=False, help='The ML compute instance type.', default='ml.m4.xlarge') parser.add_argument('--initial_variant_weight_2', type=float, required=False, help='Determines initial traffic distribution among all of the models that you specify in the endpoint configuration.', default=1.0) parser.add_argument('--accelerator_type_2', choices=['ml.eia1.medium', 'ml.eia1.large', 'ml.eia1.xlarge', ''], type=str, required=False, help='The size of the Elastic Inference (EI) instance to use for the production variant.', default='') parser.add_argument('--variant_name_3', type=str, required=False, help='The name of the production variant.', default='variant-name-3') parser.add_argument('--model_name_3', type=str, required=False, help='The model name used for endpoint deployment.', default='') parser.add_argument('--initial_instance_count_3', type=int, required=False, help='Number of instances to launch initially.', default=1) - parser.add_argument('--instance_type_3', choices=['ml.m4.xlarge', 'ml.m4.2xlarge', 'ml.m4.4xlarge', 'ml.m4.10xlarge', 'ml.m4.16xlarge', 'ml.m5.large', 'ml.m5.xlarge', 'ml.m5.2xlarge', 'ml.m5.4xlarge', - 'ml.m5.12xlarge', 'ml.m5.24xlarge', 'ml.c4.xlarge', 'ml.c4.2xlarge', 'ml.c4.4xlarge', 'ml.c4.8xlarge', 'ml.p2.xlarge', 'ml.p2.8xlarge', 'ml.p2.16xlarge', 'ml.p3.2xlarge', 'ml.p3.8xlarge', 'ml.p3.16xlarge', - 'ml.c5.xlarge', 'ml.c5.2xlarge', 'ml.c5.4xlarge', 'ml.c5.9xlarge', 'ml.c5.18xlarge', ''], type=str, required=False, help='The ML compute instance type.', default='ml.m4.xlarge') + parser.add_argument('--instance_type_3', type=str, required=False, help='The ML compute instance type.', default='ml.m4.xlarge') parser.add_argument('--initial_variant_weight_3', type=float, required=False, help='Determines initial traffic distribution among all of the models that you specify in the endpoint configuration.', default=1.0) parser.add_argument('--accelerator_type_3', choices=['ml.eia1.medium', 'ml.eia1.large', 'ml.eia1.xlarge', ''], type=str, required=False, help='The size of the Elastic Inference (EI) instance to use for the production variant.', default='') parser.add_argument('--resource_encryption_key', type=str, required=False, help='The AWS KMS key that Amazon SageMaker uses to encrypt data on the storage volume attached to the ML compute instance(s).', default='') parser.add_argument('--endpoint_config_tags', type=_utils.yaml_or_json_str, required=False, help='An array of key-value pairs, to categorize AWS resources.', default={}) - parser.add_argument('--endpoint_name', type=str, required=False, help='The name of the endpoint.', default='') parser.add_argument('--endpoint_tags', type=_utils.yaml_or_json_str, required=False, help='An array of key-value pairs, to categorize AWS resources.', default={}) @@ -54,7 +48,7 @@ def create_parser(): def main(argv=None): parser = create_parser() - args = parser.parse_args() + args = parser.parse_args(argv) logging.getLogger().setLevel(logging.INFO) client = _utils.get_sagemaker_client(args.region, args.endpoint_url) @@ -70,4 +64,4 @@ def main(argv=None): if __name__== "__main__": - main() + main(sys.argv[1:]) diff --git a/components/aws/sagemaker/ground_truth/component.yaml b/components/aws/sagemaker/ground_truth/component.yaml index 3a143cc77..bb25b14a9 100644 --- a/components/aws/sagemaker/ground_truth/component.yaml +++ b/components/aws/sagemaker/ground_truth/component.yaml @@ -49,11 +49,11 @@ inputs: type: String - name: max_human_labeled_objects description: 'The maximum number of objects that can be labeled by human workers.' - default: '' + default: '0' type: Integer - name: max_percent_objects description: 'The maximum number of input data objects that should be labeled.' - default: '' + default: '0' type: Integer - name: enable_auto_labeling description: 'Enables auto-labeling, only for bounding box, text classification, and image classification.' diff --git a/components/aws/sagemaker/ground_truth/src/ground_truth.py b/components/aws/sagemaker/ground_truth/src/ground_truth.py index 68f7a557d..10532f3b4 100644 --- a/components/aws/sagemaker/ground_truth/src/ground_truth.py +++ b/components/aws/sagemaker/ground_truth/src/ground_truth.py @@ -10,6 +10,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import sys import argparse import logging @@ -53,7 +54,7 @@ def create_parser(): def main(argv=None): parser = create_parser() - args = parser.parse_args() + args = parser.parse_args(argv) logging.getLogger().setLevel(logging.INFO) client = _utils.get_sagemaker_client(args.region, args.endpoint_url) @@ -72,4 +73,4 @@ def main(argv=None): if __name__== "__main__": - main() + main(sys.argv[1:]) diff --git a/components/aws/sagemaker/hyperparameter_tuning/README.md b/components/aws/sagemaker/hyperparameter_tuning/README.md index 8f719b10a..8718e5fae 100644 --- a/components/aws/sagemaker/hyperparameter_tuning/README.md +++ b/components/aws/sagemaker/hyperparameter_tuning/README.md @@ -28,7 +28,7 @@ categorical_parameters | The array of CategoricalParameterRange objects that spe channels | A list of dicts specifying the input channels (at least one); refer to [documentation](https://github.com/awsdocs/amazon-sagemaker-developer-guide/blob/master/doc_source/API_Channel.md) for parameters | No | No | List of Dicts | | | output_location | The Amazon S3 path where you want Amazon SageMaker to store the results of the transform job | No | No | String | | | output_encryption_key | The AWS KMS key that Amazon SageMaker uses to encrypt the model artifacts | Yes | Yes | String | | | -instance_type | The ML compute instance type | Yes | No | String | ml.m4.xlarge, ml.m4.2xlarge, ml.m4.4xlarge, ml.m4.10xlarge, ml.m4.16xlarge, ml.m5.large, ml.m5.xlarge, ml.m5.2xlarge, ml.m5.4xlarge, ml.m5.12xlarge, ml.m5.24xlarge, ml.c4.xlarge, ml.c4.2xlarge, ml.c4.4xlarge, ml.c4.8xlarge, ml.p2.xlarge, ml.p2.8xlarge, ml.p2.16xlarge, ml.p3.2xlarge, ml.p3.8xlarge, ml.p3.16xlarge, ml.c5.xlarge, ml.c5.2xlarge, ml.c5.4xlarge, ml.c5.9xlarge, ml.c5.18xlarge | ml.m4.xlarge | +instance_type | The ML compute instance type | Yes | No | String | ml.m4.xlarge, ml.m4.2xlarge, ml.m4.4xlarge, ml.m4.10xlarge, ml.m4.16xlarge, ml.m5.large, ml.m5.xlarge, ml.m5.2xlarge, ml.m5.4xlarge, ml.m5.12xlarge, ml.m5.24xlarge, ml.c4.xlarge, ml.c4.2xlarge, ml.c4.4xlarge, ml.c4.8xlarge, ml.p2.xlarge, ml.p2.8xlarge, ml.p2.16xlarge, ml.p3.2xlarge, ml.p3.8xlarge, ml.p3.16xlarge, ml.c5.xlarge, ml.c5.2xlarge, ml.c5.4xlarge, ml.c5.9xlarge, ml.c5.18xlarge [and many more](https://aws.amazon.com/sagemaker/pricing/instance-types/)| ml.m4.xlarge | instance_count | The number of ML compute instances to use in each training job | Yes | Yes | Int | ≥ 1 | 1 | volume_size | The size of the ML storage volume that you want to provision in GB | Yes | Yes | Int | ≥ 1 | 30 | max_num_jobs | The maximum number of training jobs that a hyperparameter tuning job can launch | No | No | Int | [1, 500] | | diff --git a/components/aws/sagemaker/hyperparameter_tuning/src/hyperparameter_tuning.py b/components/aws/sagemaker/hyperparameter_tuning/src/hyperparameter_tuning.py index df44a4098..068e52f5b 100644 --- a/components/aws/sagemaker/hyperparameter_tuning/src/hyperparameter_tuning.py +++ b/components/aws/sagemaker/hyperparameter_tuning/src/hyperparameter_tuning.py @@ -19,10 +19,10 @@ from common import _utils def create_parser(): parser = argparse.ArgumentParser(description='SageMaker Hyperparameter Tuning Job') _utils.add_default_client_arguments(parser) - + parser.add_argument('--job_name', type=str, required=False, help='The name of the tuning job. Must be unique within the same AWS account and AWS region.') parser.add_argument('--role', type=str, required=True, help='The Amazon Resource Name (ARN) that Amazon SageMaker assumes to perform tasks on your behalf.') - parser.add_argument('--image', type=str, required=True, help='The registry path of the Docker image that contains the training algorithm.', default='') + parser.add_argument('--image', type=str, required=False, help='The registry path of the Docker image that contains the training algorithm.', default='') parser.add_argument('--algorithm_name', type=str, required=False, help='The name of the resource algorithm to use for the hyperparameter tuning job.', default='') parser.add_argument('--training_input_mode', choices=['File', 'Pipe'], type=str, required=False, help='The input mode that the algorithm supports. File or Pipe.', default='File') parser.add_argument('--metric_definitions', type=_utils.yaml_or_json_str, required=False, help='The dictionary of name-regex pairs specify the metrics that the algorithm emits.', default={}) @@ -37,11 +37,9 @@ def create_parser(): parser.add_argument('--channels', type=_utils.yaml_or_json_str, required=True, help='A list of dicts specifying the input channels. Must have at least one.') parser.add_argument('--output_location', type=str, required=True, help='The Amazon S3 path where you want Amazon SageMaker to store the results of the transform job.') parser.add_argument('--output_encryption_key', type=str, required=False, help='The AWS KMS key that Amazon SageMaker uses to encrypt the model artifacts.', default='') - parser.add_argument('--instance_type', choices=['ml.m4.xlarge', 'ml.m4.2xlarge', 'ml.m4.4xlarge', 'ml.m4.10xlarge', 'ml.m4.16xlarge', 'ml.m5.large', 'ml.m5.xlarge', 'ml.m5.2xlarge', 'ml.m5.4xlarge', - 'ml.m5.12xlarge', 'ml.m5.24xlarge', 'ml.c4.xlarge', 'ml.c4.2xlarge', 'ml.c4.4xlarge', 'ml.c4.8xlarge', 'ml.p2.xlarge', 'ml.p2.8xlarge', 'ml.p2.16xlarge', 'ml.p3.2xlarge', 'ml.p3.8xlarge', 'ml.p3.16xlarge', - 'ml.c5.xlarge', 'ml.c5.2xlarge', 'ml.c5.4xlarge', 'ml.c5.9xlarge', 'ml.c5.18xlarge'], type=str, required=False, help='The ML compute instance type.', default='ml.m4.xlarge') + parser.add_argument('--instance_type', type=str, required=False, help='The ML compute instance type.', default='ml.m4.xlarge') parser.add_argument('--instance_count', type=int, required=False, help='The number of ML compute instances to use in each training job.', default=1) - parser.add_argument('--volume_size', type=int, required=False, help='The size of the ML storage volume that you want to provision.', default=1) + parser.add_argument('--volume_size', type=int, required=False, help='The size of the ML storage volume that you want to provision.', default=30) parser.add_argument('--max_num_jobs', type=int, required=True, help='The maximum number of training jobs that a hyperparameter tuning job can launch.') parser.add_argument('--max_parallel_jobs', type=int, required=True, help='The maximum number of concurrent training jobs that a hyperparameter tuning job can launch.') parser.add_argument('--max_run_time', type=int, required=False, help='The maximum run time in seconds per training job.', default=86400) @@ -65,7 +63,7 @@ def create_parser(): def main(argv=None): parser = create_parser() - args = parser.parse_args() + args = parser.parse_args(argv) logging.getLogger().setLevel(logging.INFO) client = _utils.get_sagemaker_client(args.region) @@ -92,4 +90,4 @@ def main(argv=None): if __name__== "__main__": - main() + main(sys.argv[1:]) diff --git a/components/aws/sagemaker/model/src/create_model.py b/components/aws/sagemaker/model/src/create_model.py index c6fcebd24..d44c09a93 100644 --- a/components/aws/sagemaker/model/src/create_model.py +++ b/components/aws/sagemaker/model/src/create_model.py @@ -10,6 +10,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import sys import argparse import logging @@ -36,7 +37,7 @@ def create_parser(): def main(argv=None): parser = create_parser() - args = parser.parse_args() + args = parser.parse_args(argv) logging.getLogger().setLevel(logging.INFO) client = _utils.get_sagemaker_client(args.region, args.endpoint_url) @@ -50,4 +51,4 @@ def main(argv=None): if __name__== "__main__": - main() + main(sys.argv[1:]) diff --git a/components/aws/sagemaker/tests/integration_tests/.env.example b/components/aws/sagemaker/tests/integration_tests/.env.example new file mode 100644 index 000000000..b4c0aa92b --- /dev/null +++ b/components/aws/sagemaker/tests/integration_tests/.env.example @@ -0,0 +1,15 @@ +# If you would like to override the credentials for the container +# AWS_ACCESS_KEY_ID= +# AWS_SECRET_ACCESS_KEY= +# AWS_SESSION_TOKEN= + +REGION=us-east-1 + +SAGEMAKER_EXECUTION_ROLE_ARN=arn:aws:iam::123456789012:role/service-role/AmazonSageMaker-ExecutionRole-Example +S3_DATA_BUCKET=my-data-bucket + +# If you hope to use an existing EKS cluster, rather than creating a new one. +# EKS_EXISTING_CLUSTER=my-eks-cluster + +# If you would like to skip the FSx set-up and tests +# SKIP_FSX_TESTS=true \ No newline at end of file diff --git a/components/aws/sagemaker/tests/integration_tests/Dockerfile b/components/aws/sagemaker/tests/integration_tests/Dockerfile new file mode 100644 index 000000000..75c66f8c1 --- /dev/null +++ b/components/aws/sagemaker/tests/integration_tests/Dockerfile @@ -0,0 +1,43 @@ +FROM continuumio/miniconda:4.7.12 + +RUN apt-get update && apt-get install -y --no-install-recommends \ + curl \ + wget \ + git \ + jq + +# Install eksctl +RUN curl --location "https://github.com/weaveworks/eksctl/releases/download/0.19.0/eksctl_$(uname -s)_amd64.tar.gz" | tar xz -C /tmp \ + && mv /tmp/eksctl /usr/local/bin + +# Install aws-iam-authenticator +RUN curl -S -o /usr/local/bin/aws-iam-authenticator https://amazon-eks.s3.us-west-2.amazonaws.com/1.16.8/2020-04-16/bin/linux/amd64/aws-iam-authenticator \ + && chmod +x /usr/local/bin/aws-iam-authenticator + +# Install Kubectl +RUN curl -LO https://storage.googleapis.com/kubernetes-release/release/v1.18.0/bin/linux/amd64/kubectl \ + && chmod +x ./kubectl \ + && mv ./kubectl /usr/local/bin/kubectl + +# Install Argo CLI +RUN curl -sSL -o /usr/local/bin/argo https://github.com/argoproj/argo/releases/download/v2.8.0/argo-linux-amd64 \ + && chmod +x /usr/local/bin/argo + +# Copy conda environment early to avoid cache busting +COPY ./sagemaker/tests/integration_tests/environment.yml environment.yml + +# Create conda environment for running tests and set as start-up environment +RUN conda env create -f environment.yml +RUN echo "source activate kfp_test_env" > ~/.bashrc +ENV PATH "/opt/conda/envs/kfp_test_env/bin":$PATH + +# Environment variables to be used by tests +ENV REGION="us-west-2" +ENV SAGEMAKER_EXECUTION_ROLE_ARN="arn:aws:iam::1234567890:role/sagemaker-role" +ENV S3_DATA_BUCKET="kfp-test-data" +ENV MINIO_LOCAL_PORT=9000 +ENV KFP_NAMESPACE="kubeflow" + +COPY ./sagemaker/ . + +ENTRYPOINT [ "/bin/bash", "./tests/integration_tests/scripts/run_integration_tests" ] \ No newline at end of file diff --git a/components/aws/sagemaker/tests/integration_tests/README.md b/components/aws/sagemaker/tests/integration_tests/README.md index 898d666d6..6076956ef 100644 --- a/components/aws/sagemaker/tests/integration_tests/README.md +++ b/components/aws/sagemaker/tests/integration_tests/README.md @@ -1,42 +1,25 @@ ## Requirements -1. [Conda](https://docs.conda.io/en/latest/miniconda.html) -1. [Kubectl](https://kubernetes.io/docs/tasks/tools/install-kubectl/) -1. Argo CLI: [Mac](https://github.com/argoproj/homebrew-tap), [Linux](https://eksworkshop.com/advanced/410_batch/install/) -1. K8s cluster with Kubeflow pipelines > 0.4.0 installed -1. [IAM Role](https://docs.aws.amazon.com/sagemaker/latest/dg/sagemaker-roles.html) with a SageMakerFullAccess and S3FullAccess -1. IAM User credentials with SageMakerFullAccess permissions +1. [Docker](https://www.docker.com/) +1. [IAM Role](https://docs.aws.amazon.com/sagemaker/latest/dg/sagemaker-roles.html) with a SageMakerFullAccess and AmazonS3FullAccess +1. IAM User credentials with SageMakerFullAccess, AWSCloudFormationFullAccess, IAMFullAccess, AmazonEC2FullAccess, AmazonS3FullAccess permissions +2. The SageMaker WorkTeam and GroundTruth Component tests expect that at least one private workteam already exists in the region where you are running these tests. + ## Creating S3 buckets with datasets -Change the bucket name and run the python script `[s3_sample_data_creator.py](https://github.com/kubeflow/pipelines/tree/master/samples/contrib/aws-samples/mnist-kmeans-sagemaker#the-sample-dataset)` to create S3 buckets with mnist dataset in the region where you want to run the tests +1. In the following Python script, change the bucket name and run the [`s3_sample_data_creator.py`](https://github.com/kubeflow/pipelines/tree/master/samples/contrib/aws-samples/mnist-kmeans-sagemaker#the-sample-dataset) to create an S3 bucket with the sample mnist dataset in the region where you want to run the tests. +2. To prepare the dataset for the SageMaker GroundTruth Component test, follow the steps in the `[GroundTruth Sample README](https://github.com/kubeflow/pipelines/tree/master/samples/contrib/aws-samples/ground_truth_pipeline_demo#prep-the-dataset-label-categories-and-ui-template)`. + ## Step to run integration tests -1. Configure AWS credentials with access to EKS cluster -1. Fetch kubeconfig to `~/.kube/config` or set `KUBECONFIG` environment variable to point to kubeconfig of the cluster -1. Create a [secret](https://kubernetes.io/docs/tasks/inject-data-application/distribute-credentials-secure/) named `aws-secret` in kubeflow namespace with credentials of IAM User for SageMakerFullAccess - ```yaml - apiVersion: v1 - kind: Secret - metadata: - name: aws-secret - namespace: kubeflow - type: Opaque - data: - AWS_ACCESS_KEY_ID: YOUR_BASE64_ACCESS_KEY - AWS_SECRET_ACCESS_KEY: YOUR_BASE64_SECRET_ACCESS - ``` - - > Note: To get base64 string, run `echo -n $AWS_ACCESS_KEY_ID | base64` -1. Create conda environment using environment.yml for running tests. Run `conda env create -f environment.yml` -1. Activate the conda environment `conda activate kfp_test_env` -1. Run port-forward to minio service in background. Example: `kubectl port-forward svc/minio-service 9000:9000 -n kubeflow &` -1. Provide the following arguments to pytest: - 1. `region`: AWS region where test will run. Default - us-west-2 - 1. `role-arn`: SageMaker execution IAM role ARN - 1. `s3-data-bucket`: Regional S3 bucket in which test data is hosted - 1. `minio-service-port`: Localhost port to which minio service is mapped to. Default - 9000 - 1. `kfp-namespace`: Cluster namespace where kubeflow pipelines is installed. Default - Kubeflow -1. cd into this directory and run - ``` - pytest --region <> --role-arn <> --s3-data-bucket <> --minio-service-port <> --kfp-namespace <> - ``` +1. Copy the `.env.example` file to `.env` and in the following steps modify the fields of this new file: + 1. Configure the AWS credentials fields with those of your IAM User. + 1. Update the `SAGEMAKER_EXECUTION_ROLE_ARN` with that of your role created earlier. + 1. Update the `S3_DATA_BUCKET` parameter with the name of the bucket created earlier. + 1. (Optional) If you have already created an EKS cluster for testing, replace the `EKS_EXISTING_CLUSTER` field with it's name. +1. Build the image by doing the following: + 1. Navigate to the `components/aws` directory. + 1. Run `docker build . -f sagemaker/tests/integration_tests/Dockerfile -t amazon/integration_test` +1. Run the image, injecting your environment variable files: + 1. Navigate to the `components/aws` directory. + 1. Run `docker run --env-file sagemaker/tests/integration_tests/.env amazon/integration_test` \ No newline at end of file diff --git a/components/aws/sagemaker/tests/integration_tests/component_tests/test_groundtruth_component.py b/components/aws/sagemaker/tests/integration_tests/component_tests/test_groundtruth_component.py new file mode 100644 index 000000000..18c0ad485 --- /dev/null +++ b/components/aws/sagemaker/tests/integration_tests/component_tests/test_groundtruth_component.py @@ -0,0 +1,87 @@ +import pytest +import os +import json +import utils +from utils import kfp_client_utils +from utils import sagemaker_utils +from test_workteam_component import create_workteamjob +import time + + +@pytest.mark.parametrize( + "test_file_dir", + [ + pytest.param( + "resources/config/image-classification-groundtruth", + marks=pytest.mark.canary_test, + ) + ], +) +def test_groundtruth_labeling_job( + kfp_client, experiment_id, region, sagemaker_client, test_file_dir +): + + download_dir = utils.mkdir(os.path.join(test_file_dir + "/generated")) + test_params = utils.load_params( + utils.replace_placeholders( + os.path.join(test_file_dir, "config.yaml"), + os.path.join(download_dir, "config.yaml"), + ) + ) + + # First create a workteam using a separate pipeline and get the name, arn of the workteam created. + workteam_name, _ = create_workteamjob( + kfp_client, + experiment_id, + region, + sagemaker_client, + "resources/config/create-workteam", + download_dir, + ) + + test_params["Arguments"][ + "workteam_arn" + ] = workteam_arn = sagemaker_utils.get_workteam_arn(sagemaker_client, workteam_name) + + # Generate the ground_truth_train_job_name based on the workteam which will be used for labeling. + test_params["Arguments"][ + "ground_truth_train_job_name" + ] = ground_truth_train_job_name = ( + test_params["Arguments"]["ground_truth_train_job_name"] + "-by-" + workteam_name + ) + + _ = kfp_client_utils.compile_run_monitor_pipeline( + kfp_client, + experiment_id, + test_params["PipelineDefinition"], + test_params["Arguments"], + download_dir, + test_params["TestName"], + test_params["Timeout"], + test_params["StatusToCheck"], + ) + + # Verify the GroundTruthJob was created in SageMaker and is InProgress. + # TODO: Add a bot to complete the labeling job and check for completion instead. + try: + response = sagemaker_utils.describe_labeling_job( + sagemaker_client, ground_truth_train_job_name + ) + assert response["LabelingJobStatus"] == "InProgress" + + # Verify that the workteam has the specified labeling job + labeling_jobs = sagemaker_utils.list_labeling_jobs_for_workteam( + sagemaker_client, workteam_arn + ) + assert len(labeling_jobs["LabelingJobSummaryList"]) == 1 + assert ( + labeling_jobs["LabelingJobSummaryList"][0]["LabelingJobName"] + == ground_truth_train_job_name + ) + finally: + # Cleanup the SageMaker Resources + sagemaker_utils.stop_labeling_job(sagemaker_client, ground_truth_train_job_name) + sagemaker_utils.delete_workteam(sagemaker_client, workteam_name) + + # Delete generated files + utils.remove_dir(download_dir) diff --git a/components/aws/sagemaker/tests/integration_tests/component_tests/test_train_component.py b/components/aws/sagemaker/tests/integration_tests/component_tests/test_train_component.py index 037350f0b..b4402d371 100644 --- a/components/aws/sagemaker/tests/integration_tests/component_tests/test_train_component.py +++ b/components/aws/sagemaker/tests/integration_tests/component_tests/test_train_component.py @@ -12,7 +12,8 @@ from utils import sagemaker_utils [ pytest.param( "resources/config/simple-mnist-training", marks=pytest.mark.canary_test - ) + ), + pytest.param("resources/config/fsx-mnist-training", marks=pytest.mark.fsx_test), ], ) def test_trainingjob( diff --git a/components/aws/sagemaker/tests/integration_tests/component_tests/test_workteam_component.py b/components/aws/sagemaker/tests/integration_tests/component_tests/test_workteam_component.py new file mode 100644 index 000000000..e263d4e11 --- /dev/null +++ b/components/aws/sagemaker/tests/integration_tests/component_tests/test_workteam_component.py @@ -0,0 +1,84 @@ +import pytest +import os +import json +import utils +from utils import kfp_client_utils +from utils import sagemaker_utils +from utils import minio_utils + + +def create_workteamjob( + kfp_client, experiment_id, region, sagemaker_client, test_file_dir, download_dir +): + + test_params = utils.load_params( + utils.replace_placeholders( + os.path.join(test_file_dir, "config.yaml"), + os.path.join(download_dir, "config.yaml"), + ) + ) + + # Get the account, region specific user_pool and client_id for the Sagemaker Workforce. + ( + test_params["Arguments"]["user_pool"], + test_params["Arguments"]["client_id"], + test_params["Arguments"]["user_groups"], + ) = sagemaker_utils.get_cognito_member_definitions(sagemaker_client) + + # Generate random prefix for workteam_name to avoid errors if resources with same name exists + test_params["Arguments"]["team_name"] = workteam_name = ( + utils.generate_random_string(5) + "-" + test_params["Arguments"]["team_name"] + ) + + _, _, workflow_json = kfp_client_utils.compile_run_monitor_pipeline( + kfp_client, + experiment_id, + test_params["PipelineDefinition"], + test_params["Arguments"], + download_dir, + test_params["TestName"], + test_params["Timeout"], + ) + + return workteam_name, workflow_json + + +@pytest.mark.parametrize( + "test_file_dir", + [pytest.param("resources/config/create-workteam", marks=pytest.mark.canary_test)], +) +def test_workteamjob( + kfp_client, experiment_id, region, sagemaker_client, test_file_dir +): + + download_dir = utils.mkdir(os.path.join(test_file_dir + "/generated")) + workteam_name, workflow_json = create_workteamjob( + kfp_client, experiment_id, region, sagemaker_client, test_file_dir, download_dir + ) + + outputs = {"sagemaker-private-workforce": ["workteam_arn"]} + + try: + output_files = minio_utils.artifact_download_iterator( + workflow_json, outputs, download_dir + ) + + response = sagemaker_utils.describe_workteam(sagemaker_client, workteam_name) + + # Verify WorkTeam was created in SageMaker + assert response["Workteam"]["CreateDate"] is not None + assert response["Workteam"]["WorkteamName"] == workteam_name + + # Verify WorkTeam arn artifact was created in Minio and matches the one in SageMaker + workteam_arn = utils.read_from_file_in_tar( + output_files["sagemaker-private-workforce"]["workteam_arn"], + "workteam_arn.txt", + ) + assert response["Workteam"]["WorkteamArn"] == workteam_arn + + finally: + # Cleanup the SageMaker Resources + sagemaker_utils.delete_workteam(sagemaker_client, workteam_name) + + # Delete generated files only if the test is successful + utils.remove_dir(download_dir) diff --git a/components/aws/sagemaker/tests/integration_tests/conftest.py b/components/aws/sagemaker/tests/integration_tests/conftest.py index 47e6cb9ea..475d2edd4 100644 --- a/components/aws/sagemaker/tests/integration_tests/conftest.py +++ b/components/aws/sagemaker/tests/integration_tests/conftest.py @@ -5,6 +5,7 @@ import os import utils from datetime import datetime +from filelock import FileLock def pytest_addoption(parser): @@ -34,6 +35,24 @@ def pytest_addoption(parser): required=False, help="Cluster namespace where kubeflow pipelines is installed", ) + parser.addoption( + "--fsx-subnet", + required=False, + help="The subnet in which FSx is installed", + default="", + ) + parser.addoption( + "--fsx-security-group", + required=False, + help="The security group SageMaker should use when running the FSx test", + default="", + ) + parser.addoption( + "--fsx-id", + required=False, + help="The file system ID of the FSx instance", + default="", + ) @pytest.fixture(scope="session", autouse=True) @@ -66,6 +85,24 @@ def kfp_namespace(request): return request.config.getoption("--kfp-namespace") +@pytest.fixture(scope="session", autouse=True) +def fsx_subnet(request): + os.environ["FSX_SUBNET"] = request.config.getoption("--fsx-subnet") + return request.config.getoption("--fsx-subnet") + + +@pytest.fixture(scope="session", autouse=True) +def fsx_security_group(request): + os.environ["FSX_SECURITY_GROUP"] = request.config.getoption("--fsx-security-group") + return request.config.getoption("--fsx-security-group") + + +@pytest.fixture(scope="session", autouse=True) +def fsx_id(request): + os.environ["FSX_ID"] = request.config.getoption("--fsx-id") + return request.config.getoption("--fsx-id") + + @pytest.fixture(scope="session") def boto3_session(region): return boto3.Session(region_name=region) @@ -87,11 +124,30 @@ def kfp_client(): return kfp.Client(namespace=kfp_installed_namespace) -@pytest.fixture(scope="session") -def experiment_id(kfp_client): - exp_name = datetime.now().strftime("%Y-%m-%d") +def get_experiment_id(kfp_client): + exp_name = datetime.now().strftime("%Y-%m-%d-%H-%M") try: experiment = kfp_client.get_experiment(experiment_name=exp_name) except ValueError: experiment = kfp_client.create_experiment(name=exp_name) return experiment.id + + +@pytest.fixture(scope="session") +def experiment_id(kfp_client, tmp_path_factory, worker_id): + if not worker_id: + return get_experiment_id(kfp_client) + + # Locking taking as an example from + # https://github.com/pytest-dev/pytest-xdist#making-session-scoped-fixtures-execute-only-once + # get the temp directory shared by all workers + root_tmp_dir = tmp_path_factory.getbasetemp().parent + + fn = root_tmp_dir / "experiment_id" + with FileLock(str(fn) + ".lock"): + if fn.is_file(): + data = fn.read_text() + else: + data = get_experiment_id(kfp_client) + fn.write_text(data) + return data diff --git a/components/aws/sagemaker/tests/integration_tests/environment.yml b/components/aws/sagemaker/tests/integration_tests/environment.yml index 565777dc8..90c7645bc 100644 --- a/components/aws/sagemaker/tests/integration_tests/environment.yml +++ b/components/aws/sagemaker/tests/integration_tests/environment.yml @@ -12,6 +12,7 @@ dependencies: - pyyaml=5.3.* - flake8=3.7.* - flake8-black=0.1.* + - filelock=3.0.* - pip: - kubernetes==11.0.* - kfp==0.5.* diff --git a/components/aws/sagemaker/tests/integration_tests/pytest.ini b/components/aws/sagemaker/tests/integration_tests/pytest.ini index b8b25ae72..1aeed4a6a 100644 --- a/components/aws/sagemaker/tests/integration_tests/pytest.ini +++ b/components/aws/sagemaker/tests/integration_tests/pytest.ini @@ -1,4 +1,6 @@ [pytest] +junit_family = xunit2 addopts = -rA markers = - canary_test: test to be run as part of canaries. \ No newline at end of file + canary_test: test to be run as part of canaries. + fsx_test: tests for FSx features \ No newline at end of file diff --git a/components/aws/sagemaker/tests/integration_tests/resources/config/create-workteam/config.yaml b/components/aws/sagemaker/tests/integration_tests/resources/config/create-workteam/config.yaml new file mode 100644 index 000000000..244148e6a --- /dev/null +++ b/components/aws/sagemaker/tests/integration_tests/resources/config/create-workteam/config.yaml @@ -0,0 +1,10 @@ +PipelineDefinition: resources/definition/workteam_pipeline.py +TestName: create-workteam +Timeout: 3600 +Arguments: + region: ((REGION)) + team_name: 'test-workteam' + description: 'Team for GroundTruth Integ Test' + user_pool: 'user-pool' + user_groups: 'user-group' + client_id: 'client-id' diff --git a/components/aws/sagemaker/tests/integration_tests/resources/config/fsx-mnist-training/config.yaml b/components/aws/sagemaker/tests/integration_tests/resources/config/fsx-mnist-training/config.yaml new file mode 100644 index 000000000..48dafc6c5 --- /dev/null +++ b/components/aws/sagemaker/tests/integration_tests/resources/config/fsx-mnist-training/config.yaml @@ -0,0 +1,36 @@ +PipelineDefinition: resources/definition/training_pipeline.py +TestName: fsx-mnist-training +Timeout: 3600 +ExpectedTrainingImage: ((KMEANS_REGISTRY)).dkr.ecr.((REGION)).amazonaws.com/kmeans:1 +Arguments: + region: ((REGION)) + image: ((KMEANS_REGISTRY)).dkr.ecr.((REGION)).amazonaws.com/kmeans:1 + training_input_mode: File + hyperparameters: + k: "10" + feature_dim: "784" + channels: + - ChannelName: train + DataSource: + FileSystemDataSource: + FileSystemType: FSxLustre + FileSystemAccessMode: ro + FileSystemId: ((FSX_ID)) + DirectoryPath: /fsx/mnist_kmeans_example/input + CompressionType: None + ContentType: text/csv;label_size=0 + RecordWrapperType: None + InputMode: File + vpc_security_group_ids: ((FSX_SECURITY_GROUP)) + vpc_subnets: ((FSX_SUBNET)) + instance_type: ml.m5.xlarge + instance_count: 1 + volume_size: 50 + max_run_time: 3600 + model_artifact_path: s3://((DATA_BUCKET))/mnist_kmeans_example/output + network_isolation: "True" + traffic_encryption: "False" + spot_instance: "False" + max_wait_time: 3600 + checkpoint_config: "{}" + role: ((ROLE_ARN)) diff --git a/components/aws/sagemaker/tests/integration_tests/resources/config/image-classification-groundtruth/config.yaml b/components/aws/sagemaker/tests/integration_tests/resources/config/image-classification-groundtruth/config.yaml new file mode 100644 index 000000000..bd583c9e6 --- /dev/null +++ b/components/aws/sagemaker/tests/integration_tests/resources/config/image-classification-groundtruth/config.yaml @@ -0,0 +1,22 @@ +PipelineDefinition: resources/definition/groundtruth_pipeline.py +TestName: image-classification-groundtruth +Timeout: 10 +StatusToCheck: 'running' +Arguments: + region: ((REGION)) + role: ((ROLE_ARN)) + ground_truth_train_job_name: 'image-labeling' + ground_truth_label_attribute_name: 'category' + ground_truth_train_manifest_location: 's3://((DATA_BUCKET))/mini-image-classification/ground-truth-demo/train.manifest' + ground_truth_output_location: 's3://((DATA_BUCKET))/mini-image-classification/ground-truth-demo/output' + ground_truth_task_type: 'image classification' + ground_truth_worker_type: 'private' + ground_truth_label_category_config: 's3://((DATA_BUCKET))/mini-image-classification/ground-truth-demo/class_labels.json' + ground_truth_ui_template: 's3://((DATA_BUCKET))/mini-image-classification/ground-truth-demo/instructions.template' + ground_truth_title: 'Mini image classification' + ground_truth_description: 'Test for Ground Truth KFP component' + ground_truth_num_workers_per_object: '1' + ground_truth_time_limit: '30' + ground_truth_task_availibility: '3600' + ground_truth_max_concurrent_tasks: '20' + workteam_arn: 'workteam-arn' diff --git a/components/aws/sagemaker/tests/integration_tests/resources/config/kmeans-mnist-endpoint/config.yaml b/components/aws/sagemaker/tests/integration_tests/resources/config/kmeans-mnist-endpoint/config.yaml index e961a588b..d75320eb6 100644 --- a/components/aws/sagemaker/tests/integration_tests/resources/config/kmeans-mnist-endpoint/config.yaml +++ b/components/aws/sagemaker/tests/integration_tests/resources/config/kmeans-mnist-endpoint/config.yaml @@ -13,6 +13,7 @@ Arguments: image: ((KMEANS_REGISTRY)).dkr.ecr.((REGION)).amazonaws.com/kmeans:1 model_artifact_url: s3://((DATA_BUCKET))/mnist_kmeans_example/model/kmeans-mnist-model/model.tar.gz variant_name_1: variant-1 + initial_variant_weight_1: 1.0 instance_type_1: ml.m4.xlarge initial_instance_count_1: 1 network_isolation: "True" diff --git a/components/aws/sagemaker/tests/integration_tests/resources/definition/create_endpoint_pipeline.py b/components/aws/sagemaker/tests/integration_tests/resources/definition/create_endpoint_pipeline.py index 801b3458f..8b28e52ea 100644 --- a/components/aws/sagemaker/tests/integration_tests/resources/definition/create_endpoint_pipeline.py +++ b/components/aws/sagemaker/tests/integration_tests/resources/definition/create_endpoint_pipeline.py @@ -34,7 +34,7 @@ def create_endpoint_pipeline( model_artifact_url=model_artifact_url, network_isolation=network_isolation, role=role, - ).apply(use_aws_secret("aws-secret", "AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY")) + ) sagemaker_deploy_op( region=region, @@ -46,7 +46,7 @@ def create_endpoint_pipeline( instance_type_1=instance_type_1, initial_instance_count_1=initial_instance_count_1, initial_variant_weight_1=initial_variant_weight_1, - ).apply(use_aws_secret("aws-secret", "AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY")) + ) if __name__ == "__main__": diff --git a/components/aws/sagemaker/tests/integration_tests/resources/definition/create_model_pipeline.py b/components/aws/sagemaker/tests/integration_tests/resources/definition/create_model_pipeline.py index a7fa0afe0..75f4f6a26 100644 --- a/components/aws/sagemaker/tests/integration_tests/resources/definition/create_model_pipeline.py +++ b/components/aws/sagemaker/tests/integration_tests/resources/definition/create_model_pipeline.py @@ -26,7 +26,7 @@ def create_model_pipeline( model_artifact_url=model_artifact_url, network_isolation=network_isolation, role=role, - ).apply(use_aws_secret("aws-secret", "AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY")) + ) if __name__ == "__main__": diff --git a/components/aws/sagemaker/tests/integration_tests/resources/definition/groundtruth_pipeline.py b/components/aws/sagemaker/tests/integration_tests/resources/definition/groundtruth_pipeline.py new file mode 100644 index 000000000..23456abf5 --- /dev/null +++ b/components/aws/sagemaker/tests/integration_tests/resources/definition/groundtruth_pipeline.py @@ -0,0 +1,59 @@ +import kfp +import json +import copy +from kfp import components +from kfp import dsl +from kfp.aws import use_aws_secret + +sagemaker_gt_op = components.load_component_from_file( + "../../ground_truth/component.yaml" +) + + +@dsl.pipeline( + name="SageMaker GroundTruth image classification test pipeline", + description="SageMaker GroundTruth image classification test pipeline", +) +def ground_truth_test( + region="", + ground_truth_train_job_name="", + ground_truth_label_attribute_name="", + ground_truth_train_manifest_location="", + ground_truth_output_location="", + ground_truth_task_type="", + ground_truth_worker_type="", + ground_truth_label_category_config="", + ground_truth_ui_template="", + ground_truth_title="", + ground_truth_description="", + ground_truth_num_workers_per_object="", + ground_truth_time_limit="", + ground_truth_task_availibility="", + ground_truth_max_concurrent_tasks="", + role="", + workteam_arn="", +): + + ground_truth_train = sagemaker_gt_op( + region=region, + role=role, + job_name=ground_truth_train_job_name, + label_attribute_name=ground_truth_label_attribute_name, + manifest_location=ground_truth_train_manifest_location, + output_location=ground_truth_output_location, + task_type=ground_truth_task_type, + worker_type=ground_truth_worker_type, + workteam_arn=workteam_arn, + label_category_config=ground_truth_label_category_config, + ui_template=ground_truth_ui_template, + title=ground_truth_title, + description=ground_truth_description, + num_workers_per_object=ground_truth_num_workers_per_object, + time_limit=ground_truth_time_limit, + task_availibility=ground_truth_task_availibility, + max_concurrent_tasks=ground_truth_max_concurrent_tasks, + ) + + +if __name__ == "__main__": + kfp.compiler.Compiler().compile(ground_truth_test, __file__ + ".yaml") diff --git a/components/aws/sagemaker/tests/integration_tests/resources/definition/hpo_pipeline.py b/components/aws/sagemaker/tests/integration_tests/resources/definition/hpo_pipeline.py index 721658355..cd1a50fb5 100644 --- a/components/aws/sagemaker/tests/integration_tests/resources/definition/hpo_pipeline.py +++ b/components/aws/sagemaker/tests/integration_tests/resources/definition/hpo_pipeline.py @@ -56,7 +56,7 @@ def hpo_pipeline( network_isolation=network_isolation, max_wait_time=max_wait_time, role=role, - ).apply(use_aws_secret("aws-secret", "AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY")) + ) if __name__ == "__main__": diff --git a/components/aws/sagemaker/tests/integration_tests/resources/definition/training_pipeline.py b/components/aws/sagemaker/tests/integration_tests/resources/definition/training_pipeline.py index e69d103e5..16e490f78 100644 --- a/components/aws/sagemaker/tests/integration_tests/resources/definition/training_pipeline.py +++ b/components/aws/sagemaker/tests/integration_tests/resources/definition/training_pipeline.py @@ -25,6 +25,8 @@ def training_pipeline( spot_instance="", max_wait_time="", checkpoint_config="{}", + vpc_security_group_ids="", + vpc_subnets="", role="", ): sagemaker_train_op( @@ -45,8 +47,10 @@ def training_pipeline( spot_instance=spot_instance, max_wait_time=max_wait_time, checkpoint_config=checkpoint_config, + vpc_security_group_ids=vpc_security_group_ids, + vpc_subnets=vpc_subnets, role=role, - ).apply(use_aws_secret("aws-secret", "AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY")) + ) if __name__ == "__main__": diff --git a/components/aws/sagemaker/tests/integration_tests/resources/definition/transform_job_pipeline.py b/components/aws/sagemaker/tests/integration_tests/resources/definition/transform_job_pipeline.py index 8ac879f81..e8b38697f 100644 --- a/components/aws/sagemaker/tests/integration_tests/resources/definition/transform_job_pipeline.py +++ b/components/aws/sagemaker/tests/integration_tests/resources/definition/transform_job_pipeline.py @@ -40,7 +40,7 @@ def batch_transform_pipeline( model_artifact_url=model_artifact_url, network_isolation=network_isolation, role=role, - ).apply(use_aws_secret("aws-secret", "AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY")) + ) sagemaker_batch_transform_op( region=region, @@ -57,7 +57,7 @@ def batch_transform_pipeline( split_type=split_type, compression_type=compression_type, output_location=output_location, - ).apply(use_aws_secret("aws-secret", "AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY")) + ) if __name__ == "__main__": diff --git a/components/aws/sagemaker/tests/integration_tests/resources/definition/workteam_pipeline.py b/components/aws/sagemaker/tests/integration_tests/resources/definition/workteam_pipeline.py new file mode 100644 index 000000000..3e2273301 --- /dev/null +++ b/components/aws/sagemaker/tests/integration_tests/resources/definition/workteam_pipeline.py @@ -0,0 +1,36 @@ +#!/usr/bin/env python3 + +import kfp +import json +import copy +from kfp import components +from kfp import dsl +from kfp.aws import use_aws_secret + +sagemaker_workteam_op = components.load_component_from_file( + "../../workteam/component.yaml" +) + + +@dsl.pipeline( + name="SageMaker WorkTeam test pipeline", + description="SageMaker WorkTeam test pipeline", +) +def workteam_test( + region="", team_name="", description="", user_pool="", user_groups="", client_id="" +): + + workteam = sagemaker_workteam_op( + region=region, + team_name=team_name, + description=description, + user_pool=user_pool, + user_groups=user_groups, + client_id=client_id, + ) + + +if __name__ == "__main__": + kfp.compiler.Compiler().compile( + workteam_test, "SageMaker_WorkTeam_Pipelines" + ".yaml" + ) diff --git a/components/aws/sagemaker/tests/integration_tests/scripts/fsx_setup b/components/aws/sagemaker/tests/integration_tests/scripts/fsx_setup new file mode 100755 index 000000000..3319b4235 --- /dev/null +++ b/components/aws/sagemaker/tests/integration_tests/scripts/fsx_setup @@ -0,0 +1,79 @@ +#!/usr/bin/env bash + +# Helper script that provides a set of methods to configure VPC, EFS and FSx +# ready for the full suite of integration tests. + +function create_fsx_security_group() { + echo "[Creating FSx Security Group] Creating security group" + + IFS=',' read -r -a subnets_list <<< "$EKS_PRIVATE_SUBNETS" + local vpc_id="$(aws ec2 describe-subnets --subnet-ids "${subnets_list[0]}" \ + --output text --query "Subnets[0].VpcId" --region ${REGION})" + + local fsx_security_group="${DEPLOY_NAME}-fsx-sg" + FSX_SECURITY_GROUP_ID="$(aws ec2 create-security-group --region "${REGION}" \ + --vpc-id ${vpc_id} \ + --description "Security group for FSx in ${DEPLOY_NAME}" \ + --group-name "${fsx_security_group}" --output text --query "GroupId")" + + # Open FSx port to internal security group + aws ec2 authorize-security-group-ingress \ + --region "${REGION}" --group-id "${FSX_SECURITY_GROUP_ID}" \ + --protocol tcp --port 988 --source-group "${FSX_SECURITY_GROUP_ID}" + + echo "[Creating FSx Security Group] Created security group ${FSX_SECURITY_GROUP_ID}" +} + +function cleanup_fsx_security_group() { + if [ ! -z "${FSX_SECURITY_GROUP_ID}" ]; then + # You must remove any self-referencing ingress rules before deleting a SG + aws ec2 revoke-security-group-ingress --region "${REGION}" \ + --group-id "${FSX_SECURITY_GROUP_ID}" --protocol tcp --port 988 \ + --source-group "${FSX_SECURITY_GROUP_ID}" + + aws ec2 delete-security-group --group-id "${FSX_SECURITY_GROUP_ID}" --region "${REGION}" + fi +} + +# Creates a new FSX LUSTRE instance and automatically imports the data set from S3. +function create_fsx_instance() { + echo "[Creating FSx] Creating file system" + IFS=',' read -r -a subnets_list <<< "$EKS_PRIVATE_SUBNETS" + + local fs_id=$(aws fsx create-file-system \ + --file-system-type LUSTRE \ + --lustre-configuration ImportPath=s3://${S3_DATA_BUCKET}/mnist_kmeans_example \ + --storage-capacity 1200 \ + --subnet-ids "${subnets_list[0]}" \ + --security-group-ids "${FSX_SECURITY_GROUP_ID}" \ + --tags Key="Name",Value=fsx-integ-lustre \ + --region "${REGION}" \ + --output text \ + --query "FileSystem.FileSystemId") + + echo "[Creating FSx] Waiting for file system to be in state AVAILABLE" + + local fs_status="CREATING" + until [[ "${fs_status}" != "CREATING" ]]; do + fs_status="$(aws fsx describe-file-systems --region "${REGION}" --file-system-id ${fs_id} --output text --query "FileSystems[0].Lifecycle")" + sleep 10 + done + aws fsx --region "${REGION}" describe-file-systems --file-system-id ${fs_id} + + if [[ "${fs_status}" != "AVAILABLE" ]]; then + echo "[Creating FSx] FSx cluster never reached state 'Available'" + exit 1 + fi + + FSX_ID="${fs_id}" + + echo "[Creating FSx] File system now available as ${FSX_ID}" + + return 0 +} + +function delete_fsx_instance() { + if [ ! -z "${FSX_ID}" ]; then + aws fsx delete-file-system --file-system-id "${FSX_ID}" --region "${REGION}" + fi +} \ No newline at end of file diff --git a/components/aws/sagemaker/tests/integration_tests/scripts/generate_iam_role b/components/aws/sagemaker/tests/integration_tests/scripts/generate_iam_role new file mode 100755 index 000000000..7e4d1e9b1 --- /dev/null +++ b/components/aws/sagemaker/tests/integration_tests/scripts/generate_iam_role @@ -0,0 +1,68 @@ +#!/usr/bin/env bash + +# Helper script to generate an IAM Role needed to install role-based authentication to a KFP service account. +# +# Run as: +# $ ./generate_iam_role ${cluster_arn/cluster_name} ${role_name} ${cluster_region} [optional: ${service_namespace} ${service_account}] +# + +CLUSTER_ARN="${1}" +ROLE_NAME="${2}" +CLUSTER_REGION="${3:-us-east-1}" +SERVICE_NAMESPACE="${4:-kubeflow}" +SERVICE_ACCOUNT="${5:-pipeline-runner}" +aws_account=$(aws sts get-caller-identity --query Account --output text) +trustfile="trust.json" + +cwd=$(dirname $(realpath $0)) + +# if using an existing cluster, use the cluster arn to get the region and cluster name +# example, cluster_arn=arn:aws:eks:us-east-1:12345678910:cluster/test +cluster_name=$(echo ${CLUSTER_ARN} | cut -d'/' -f2) + +# A function to get the OIDC_ID associated with an EKS cluster +function get_oidc_id { + # TODO: Ideally this should be based on version compatibility instead of command failure + eksctl utils associate-iam-oidc-provider --cluster ${cluster_name} --region ${CLUSTER_REGION} --approve + if [[ $? -ge 1 ]]; then + eksctl utils associate-iam-oidc-provider --name ${cluster_name} --region ${CLUSTER_REGION} --approve + fi + + local oidc=$(aws eks describe-cluster --name ${cluster_name} --region ${CLUSTER_REGION} --query cluster.identity.oidc.issuer --output text) + oidc_id=$(echo ${oidc} | rev | cut -d'/' -f1 | rev) +} + +# A function that generates an IAM role for the given account, cluster, namespace, region +# Parameter: +# $1: Name of the trust file to generate. +function create_namespaced_iam_role { + local trustfile="${1}" + # Check if role already exists + aws iam get-role --role-name ${ROLE_NAME} + if [[ $? -eq 0 ]]; then + echo "A role for this cluster and namespace already exists in this account, assuming sagemaker access and proceeding." + else + echo "IAM Role does not exist, creating a new Role for the cluster" + aws iam create-role --role-name ${ROLE_NAME} --assume-role-policy-document file://${trustfile} --output=text --query "Role.Arn" + aws iam attach-role-policy --role-name ${ROLE_NAME} --policy-arn arn:aws:iam::aws:policy/AmazonSageMakerFullAccess + fi +} + +# Remove the generated trust file +# Parameter: +# $1: Name of the trust file to delete. +function delete_generated_file { + rm "${1}" +} + +echo "Get the OIDC ID for the cluster" +get_oidc_id +echo "Delete the trust json file if it already exists" +delete_generated_file "${trustfile}" +echo "Generate a trust json" +"$cwd"/generate_trust_policy ${CLUSTER_REGION} ${aws_account} ${oidc_id} ${SERVICE_NAMESPACE} ${SERVICE_ACCOUNT} > "${trustfile}" +echo "Create the IAM Role using these values" +create_namespaced_iam_role "${trustfile}" +echo "Cleanup for the next run" +delete_generated_file "${trustfile}" + diff --git a/components/aws/sagemaker/tests/integration_tests/scripts/generate_trust_policy b/components/aws/sagemaker/tests/integration_tests/scripts/generate_trust_policy new file mode 100755 index 000000000..1c10fa10f --- /dev/null +++ b/components/aws/sagemaker/tests/integration_tests/scripts/generate_trust_policy @@ -0,0 +1,39 @@ +#!/usr/bin/env bash + +# Helper script to generate trust the policy needed to assign role-based authentication to a KFP service account. +# +# Run as: +# $ ./generate_trust_policy ${EKS_CLUSTER_REGION} ${AWS_ACCOUNT_ID} ${OIDC_ID} ${SERVICE_NAMESPACE} ${SERVICE_ACCOUNT} > trust.json +# +# For example: +# $ ./generate_trust_policy us-west-2 123456789012 D48675832CA65BD10A532F597OIDCID > trust.json +# This will create a file `trust.json` containing a role policy that enables the KFP service runner in an EKS cluster to assume AWS roles. +# +# The SERVICE_NAMESPACE parameter is for when you want to run Kubeflow in a custom namespace other than "kubeflow". +# The SERVICE_ACCOUNT parameter is for when you want to give permissions to a service account other than the default "pipeline-runner". + +cluster_region="$1" +account_number="$2" +oidc_id="$3" +service_namespace="${4}" +service_account="${5}" + +printf '{ + "Version": "2012-10-17", + "Statement": [ + { + "Effect": "Allow", + "Principal": { + "Federated": "arn:aws:iam::'"${account_number}"':oidc-provider/oidc.eks.'"${cluster_region}"'.amazonaws.com/id/'"${oidc_id}"'" + }, + "Action": "sts:AssumeRoleWithWebIdentity", + "Condition": { + "StringEquals": { + "oidc.eks.'"${cluster_region}"'.amazonaws.com/id/'"${oidc_id}"':aud": "sts.amazonaws.com", + "oidc.eks.'"${cluster_region}"'.amazonaws.com/id/'"${oidc_id}"':sub": "system:serviceaccount:'"${service_namespace}"':'"${service_account}"'" + } + } + } + ] +} +' \ No newline at end of file diff --git a/components/aws/sagemaker/tests/integration_tests/scripts/run_integration_tests b/components/aws/sagemaker/tests/integration_tests/scripts/run_integration_tests new file mode 100755 index 000000000..3dfbabb88 --- /dev/null +++ b/components/aws/sagemaker/tests/integration_tests/scripts/run_integration_tests @@ -0,0 +1,206 @@ +#!/usr/bin/env bash + +set -u +set -o pipefail + +usage(){ + echo "Usage: $0 -n [-r ]" + exit 1 +} + +cwd=$(dirname $(realpath $0)) +source "$cwd"/fsx_setup + +### Input parameters +DEPLOY_NAME="sagemaker-kfp-"$(date '+%Y-%m-%d-%H-%M-%S')"" # The name given to the entire deployment (tagging all resources) +REGION=${REGION:-"$(aws configure get region)"} # Deployment region + +### Configuration parameters +EKS_EXISTING_CLUSTER=${EKS_EXISTING_CLUSTER:-""} # Use an existing EKS cluster +EKS_CLUSTER_VERSION=${EKS_CLUSTER_VERSION:-"1.15"} # EKS cluster K8s version +EKS_NODE_COUNT=${EKS_NODE_COUNT:-"1"} # The initial node count of the EKS cluster +EKS_PUBLIC_SUBNETS=${EKS_PUBLIC_SUBNETS:-""} +EKS_PRIVATE_SUBNETS=${EKS_PRIVATE_SUBNETS:-""} + +### Testing parameters +MINIO_LOCAL_PORT=${MINIO_LOCAL_PORT:-9000} +KFP_NAMESPACE=${KFP_NAMESPACE:-"kubeflow"} +KFP_SERVICE_ACCOUNT=${KFP_SERVICE_ACCOUNT:-"pipeline-runner"} + +PYTEST_MARKER=${PYTEST_MARKER:-""} +S3_DATA_BUCKET=${S3_DATA_BUCKET:-""} +SAGEMAKER_EXECUTION_ROLE_ARN=${SAGEMAKER_EXECUTION_ROLE_ARN:-""} + +SKIP_FSX_TESTS=${SKIP_FSX_TESTS:-"false"} + +while getopts ":n:r:s:" opt; do + case $opt in + n) + DEPLOY_NAME="$OPTARG" + ;; + s) + S3_DATA_BUCKET="$OPTARG" + ;; + r) + REGION="$OPTARG" + ;; + \?) + echo "Invalid option: -$OPTARG" >&2 + exit 1 + ;; + :) + echo "Option -$OPTARG requires an argument." >&2 + exit 1 + ;; + esac +done + +# Ensure a deployment name was specified +if [ "$DEPLOY_NAME" == "" ]; then + echo "Missing deployment name" + usage + exit 1 +fi + +if [ "$S3_DATA_BUCKET" == "" ]; then + echo "Missing S3 data bucket name" + usage + exit 1 +fi + +if [[ "$SKIP_FSX_TESTS" == "false" && "$EKS_PRIVATE_SUBNETS" == "" ]]; then + echo "Missing EKS private subnets" + usage + exit 1 +fi + +function cleanup() { + set +e + + cleanup_kfp + delete_generated_role + + if [[ "${SKIP_FSX_TESTS}" == "false" ]]; then + delete_fsx_instance + # Sleep in order for the security group to detach before attempting to delete it + sleep 15s + cleanup_fsx_security_group + fi + + if [[ -z "${EKS_EXISTING_CLUSTER}" ]]; then + delete_eks + fi +} + +# Set the trap to clean up resources in the case of an error +trap cleanup EXIT +set -e + +function launch_eks() { + echo "[Creating EKS] Launching EKS cluster $EKS_CLUSTER_NAME" + + eksctl_args=( --managed --nodes "${EKS_NODE_COUNT}" --node-type=c5.xlarge --timeout=30m --region "${REGION}" --auto-kubeconfig --version "${EKS_CLUSTER_VERSION}" ) + [ ! -z "${EKS_PUBLIC_SUBNETS}" ] && eksctl_args+=( --vpc-public-subnets="${EKS_PUBLIC_SUBNETS}" ) + [ ! -z "${EKS_PRIVATE_SUBNETS}" ] && eksctl_args+=( --vpc-private-subnets="${EKS_PRIVATE_SUBNETS}" ) + + eksctl create cluster "${EKS_CLUSTER_NAME}" "${eksctl_args[@]}" + + aws eks update-kubeconfig --name "$EKS_CLUSTER_NAME" --region "$REGION" + + echo "[Creating EKS] $EKS_CLUSTER_NAME launched" +} + +function delete_eks() { + eksctl delete cluster --name "${EKS_CLUSTER_NAME}" --region "${REGION}" +} + +function install_kfp() { + echo "[Installing KFP] Applying KFP manifests" + + PIPELINE_VERSION=0.5.1 + kubectl apply -k github.com/kubeflow/pipelines/manifests/kustomize/cluster-scoped-resources?ref=$PIPELINE_VERSION + kubectl wait --for condition=established --timeout=60s crd/applications.app.k8s.io + kubectl apply -k github.com/kubeflow/pipelines/manifests/kustomize/env/dev?ref=$PIPELINE_VERSION + + echo "[Installing KFP] Port-forwarding Minio" + + kubectl wait --for=condition=ready -n "${KFP_NAMESPACE}" pod -l app=minio --timeout=5m + kubectl port-forward -n kubeflow svc/minio-service $MINIO_LOCAL_PORT:9000 & + MINIO_PID=$! + + echo "[Installing KFP] Minio port-forwarded to ${MINIO_LOCAL_PORT}" + + echo "[Installing KFP] Waiting for pods to stand up" + + kubectl wait --for=condition=ready -n "${KFP_NAMESPACE}" pod -l app=ml-pipeline --timeout=5m + + # TODO: Replace with calculated waits + # For the moment we don't know which pods will be slower, so we are just relying on a fixed interval + sleep 3m + + echo "[Installing KFP] Pipeline pods are ready" +} + +function generate_iam_role_name() { + OIDC_ROLE_NAME="$(echo "${DEPLOY_NAME}-kubeflow-role" | cut -c1-64)" + OIDC_ROLE_ARN="arn:aws:iam::$(aws sts get-caller-identity --query=Account --output=text):role/${OIDC_ROLE_NAME}" +} + +function install_generated_role() { + kubectl patch serviceaccount -n ${KFP_NAMESPACE} ${KFP_SERVICE_ACCOUNT} --patch '{"metadata": {"annotations": {"eks.amazonaws.com/role-arn": "'"${OIDC_ROLE_ARN}"'"}}}' +} + +function delete_generated_role() { + # Delete the role associated with the cluster thats being deleted + aws iam detach-role-policy --role-name "${OIDC_ROLE_NAME}" --policy-arn arn:aws:iam::aws:policy/AmazonSageMakerFullAccess + aws iam delete-role --role-name "${OIDC_ROLE_NAME}" +} + +function cleanup_kfp() { + # Clean up Minio + if [ ! -z "${MINIO_PID}" ]; then + kill -9 $MINIO_PID || true + fi +} + +if [[ -z "${EKS_EXISTING_CLUSTER}" ]]; then + # Launch all of these in parallel to reduce start-up time + EKS_CLUSTER_NAME="${DEPLOY_NAME}-eks-cluster" + launch_eks & + + if [[ "${SKIP_FSX_TESTS}" == "false" ]]; then + create_fsx_security_group + create_fsx_instance + fi + + wait +else + aws eks update-kubeconfig --name "${EKS_EXISTING_CLUSTER}" --region "$REGION" + EKS_CLUSTER_NAME="${EKS_EXISTING_CLUSTER}" + DEPLOY_NAME="${EKS_EXISTING_CLUSTER}" + + if [[ "${SKIP_FSX_TESTS}" == "false" ]]; then + create_fsx_security_group + create_fsx_instance + fi + wait +fi + +generate_iam_role_name +"$cwd"/generate_iam_role ${EKS_CLUSTER_NAME} ${OIDC_ROLE_NAME} ${REGION} ${KFP_NAMESPACE} ${KFP_SERVICE_ACCOUNT} +install_kfp +install_generated_role + +pytest_args=( --region "${REGION}" --role-arn "${SAGEMAKER_EXECUTION_ROLE_ARN}" --s3-data-bucket "${S3_DATA_BUCKET}" --minio-service-port "${MINIO_LOCAL_PORT}" --kfp-namespace "${KFP_NAMESPACE}" ) + +if [[ "${SKIP_FSX_TESTS}" == "true" ]]; then + pytest_args+=( -m "not fsx_test" ) +else + # Get the VPC arguments for the FSx test + IFS=',' read -r -a private_subnets <<< "$EKS_PRIVATE_SUBNETS" + pytest_args+=( --fsx-subnet "${private_subnets[0]}" --fsx-security-group "${FSX_SECURITY_GROUP_ID}" --fsx-id "${FSX_ID}" ) +fi + +[ ! -z "${PYTEST_MARKER}" ] && pytest_args+=( -m "${PYTEST_MARKER}" ) + +cd tests/integration_tests && python -m pytest "${pytest_args[@]}" --junitxml ./integration_tests.log -n $(nproc) \ No newline at end of file diff --git a/components/aws/sagemaker/tests/integration_tests/utils/__init__.py b/components/aws/sagemaker/tests/integration_tests/utils/__init__.py index 7b3be9448..7e3e71c65 100644 --- a/components/aws/sagemaker/tests/integration_tests/utils/__init__.py +++ b/components/aws/sagemaker/tests/integration_tests/utils/__init__.py @@ -30,6 +30,18 @@ def get_kfp_namespace(): return os.environ.get("NAMESPACE") +def get_fsx_subnet(): + return os.environ.get("FSX_SUBNET") + + +def get_fsx_security_group(): + return os.environ.get("FSX_SECURITY_GROUP") + + +def get_fsx_id(): + return os.environ.get("FSX_ID") + + def get_algorithm_image_registry(region, algorithm): return get_image_uri(region, algorithm).split(".")[0] @@ -61,12 +73,17 @@ def replace_placeholders(input_filename, output_filename): "((ROLE_ARN))": get_role_arn(), "((DATA_BUCKET))": get_s3_data_bucket(), "((KMEANS_REGISTRY))": get_algorithm_image_registry(region, "kmeans"), + "((FSX_ID))": get_fsx_id(), + "((FSX_SUBNET))": get_fsx_subnet(), + "((FSX_SECURITY_GROUP))": get_fsx_security_group(), } filedata = "" with open(input_filename, "r") as f: filedata = f.read() for replace_key, replace_value in variables_to_replace.items(): + if replace_value is None: + continue filedata = filedata.replace(replace_key, replace_value) with open(output_filename, "w") as f: diff --git a/components/aws/sagemaker/tests/integration_tests/utils/kfp_client_utils.py b/components/aws/sagemaker/tests/integration_tests/utils/kfp_client_utils.py index 9d3882d7b..a3ef367e2 100644 --- a/components/aws/sagemaker/tests/integration_tests/utils/kfp_client_utils.py +++ b/components/aws/sagemaker/tests/integration_tests/utils/kfp_client_utils.py @@ -1,6 +1,7 @@ import os import utils import pytest +import time from utils import argo_utils @@ -23,9 +24,23 @@ def compile_and_run_pipeline( return run.id -def wait_for_job_completion(client, run_id, timeout): +def wait_for_job_completion(client, run_id, timeout, status_to_check): response = client.wait_for_run_completion(run_id, timeout) - status = response.run.status.lower() == "succeeded" + status = None + if response.run.status: + status = response.run.status.lower() == status_to_check + return status + + +def wait_for_job_status(client, run_id, timeout, status_to_check="succeeded"): + if status_to_check == "succeeded": + status = wait_for_job_completion(client, run_id, timeout, status_to_check) + else: + time.sleep(timeout) + response = client.get_run(run_id) + status = None + if response.run.status: + status = response.run.status.lower() == status_to_check return status @@ -43,6 +58,7 @@ def compile_run_monitor_pipeline( output_file_dir, pipeline_name, timeout, + status_to_check="succeeded", check=True, ): run_id = compile_and_run_pipeline( @@ -53,7 +69,7 @@ def compile_run_monitor_pipeline( output_file_dir, pipeline_name, ) - status = wait_for_job_completion(client, run_id, timeout) + status = wait_for_job_status(client, run_id, timeout, status_to_check) workflow_json = get_workflow_json(client, run_id) if check and not status: diff --git a/components/aws/sagemaker/tests/integration_tests/utils/sagemaker_utils.py b/components/aws/sagemaker/tests/integration_tests/utils/sagemaker_utils.py index ad963b4eb..4898d89ad 100644 --- a/components/aws/sagemaker/tests/integration_tests/utils/sagemaker_utils.py +++ b/components/aws/sagemaker/tests/integration_tests/utils/sagemaker_utils.py @@ -24,3 +24,46 @@ def describe_hpo_job(client, job_name): def describe_transform_job(client, job_name): return client.describe_transform_job(TransformJobName=job_name) + + +def describe_workteam(client, workteam_name): + return client.describe_workteam(WorkteamName=workteam_name) + + +def list_workteams(client): + return client.list_workteams() + + +def get_cognito_member_definitions(client): + # This is one way to get the user_pool and client_id for the Sagemaker Workforce. + # An alternative would be to take these values as user input via params or a config file. + # The current mechanism expects that there exists atleast one private workteam in the region. + default_workteam = list_workteams(client)["Workteams"][0]["MemberDefinitions"][0][ + "CognitoMemberDefinition" + ] + return ( + default_workteam["UserPool"], + default_workteam["ClientId"], + default_workteam["UserGroup"], + ) + + +def list_labeling_jobs_for_workteam(client, workteam_arn): + return client.list_labeling_jobs_for_workteam(WorkteamArn=workteam_arn) + + +def describe_labeling_job(client, labeling_job_name): + return client.describe_labeling_job(LabelingJobName=labeling_job_name) + + +def get_workteam_arn(client, workteam_name): + response = describe_workteam(client, workteam_name) + return response["Workteam"]["WorkteamArn"] + + +def delete_workteam(client, workteam_name): + client.delete_workteam(WorkteamName=workteam_name) + + +def stop_labeling_job(client, labeling_job_name): + client.stop_labeling_job(LabelingJobName=labeling_job_name) diff --git a/components/aws/sagemaker/tests/unit_tests/tests/test_batch_transform.py b/components/aws/sagemaker/tests/unit_tests/tests/test_batch_transform.py index d1ced1efd..7a8e6dd70 100644 --- a/components/aws/sagemaker/tests/unit_tests/tests/test_batch_transform.py +++ b/components/aws/sagemaker/tests/unit_tests/tests/test_batch_transform.py @@ -1,7 +1,7 @@ import json import unittest -from unittest.mock import patch, Mock, MagicMock +from unittest.mock import patch, call, Mock, MagicMock, mock_open from botocore.exceptions import ClientError from datetime import datetime @@ -19,7 +19,7 @@ required_args = [ '--input_location', 's3://fake-bucket/data', '--output_location', 's3://fake-bucket/output', '--instance_type', 'ml.c5.18xlarge', - '--output_location_file', 'tmp/' + '--output_location_file', 'tmp/output.txt' ] class BatchTransformTestCase(unittest.TestCase): @@ -28,12 +28,135 @@ class BatchTransformTestCase(unittest.TestCase): parser = batch_transform.create_parser() cls.parser = parser - def test_sample(self): - args = self.parser.parse_args(required_args) - response = _utils.create_transform_job_request(vars(args)) - self.assertEqual(response['TransformOutput']['S3OutputPath'], 's3://fake-bucket/output') - def test_empty_string(self): - args = self.parser.parse_args(required_args) - response = _utils.create_transform_job_request(vars(args)) - test_utils.check_empty_string_values(response) \ No newline at end of file + def test_create_parser(self): + self.assertIsNotNone(self.parser) + + + def test_main(self): + # Mock out all of utils except parser + batch_transform._utils = MagicMock() + batch_transform._utils.add_default_client_arguments = _utils.add_default_client_arguments + + # Set some static returns + batch_transform._utils.create_transform_job.return_value = 'test-batch-job' + + with patch('builtins.open', mock_open()) as file_open: + batch_transform.main(required_args) + + # Check if correct requests were created and triggered + batch_transform._utils.create_transform_job.assert_called() + batch_transform._utils.wait_for_transform_job.assert_called() + + # Check the file outputs + file_open.assert_has_calls([ + call('tmp/output.txt', 'w') + ]) + + file_open().write.assert_has_calls([ + call('s3://fake-bucket/output') + ]) + + + def test_batch_transform(self): + mock_client = MagicMock() + mock_args = self.parser.parse_args(required_args + ['--job_name', 'test-batch-job']) + response = _utils.create_transform_job(mock_client, vars(mock_args)) + + mock_client.create_transform_job.assert_called_once_with( + DataProcessing={'InputFilter': '', 'OutputFilter': '', 'JoinSource': 'None'}, + Environment={}, + MaxConcurrentTransforms=0, + MaxPayloadInMB=6, + ModelName='model-test', + Tags=[], + TransformInput={'DataSource': {'S3DataSource': {'S3DataType': 'S3Prefix', 'S3Uri': 's3://fake-bucket/data'}}, + 'ContentType': '', 'CompressionType': 'None', 'SplitType': 'None'}, + TransformJobName='test-batch-job', + TransformOutput={'S3OutputPath': 's3://fake-bucket/output', 'Accept': None, 'KmsKeyId': ''}, + TransformResources={'InstanceType': 'ml.c5.18xlarge', 'InstanceCount': None, 'VolumeKmsKeyId': ''} + ) + + self.assertEqual(response, 'test-batch-job') + + + def test_pass_all_arguments(self): + mock_client = MagicMock() + mock_args = self.parser.parse_args(required_args + [ + '--job_name', 'test-batch-job', + '--max_concurrent', '5', + '--max_payload', '100', + '--batch_strategy', 'MultiRecord', + '--data_type', 'S3Prefix', + '--compression_type', 'Gzip', + '--split_type', 'RecordIO', + '--assemble_with', 'Line', + '--join_source', 'Input', + '--tags', '{"fake_key": "fake_value"}' + ]) + response = _utils.create_transform_job(mock_client, vars(mock_args)) + + mock_client.create_transform_job.assert_called_once_with( + BatchStrategy='MultiRecord', + DataProcessing={'InputFilter': '', 'OutputFilter': '', + 'JoinSource': 'Input'}, + Environment={}, + MaxConcurrentTransforms=5, + MaxPayloadInMB=100, + ModelName='model-test', + Tags=[{'Key': 'fake_key', 'Value': 'fake_value'}], + TransformInput={ + 'DataSource': {'S3DataSource': {'S3DataType': 'S3Prefix', + 'S3Uri': 's3://fake-bucket/data'}}, + 'ContentType': '', + 'CompressionType': 'Gzip', + 'SplitType': 'RecordIO', + }, + TransformJobName='test-batch-job', + TransformOutput={ + 'S3OutputPath': 's3://fake-bucket/output', + 'Accept': None, + 'AssembleWith': 'Line', + 'KmsKeyId': '', + }, + TransformResources={'InstanceType': 'ml.c5.18xlarge', + 'InstanceCount': None, 'VolumeKmsKeyId': ''} + ) + + + def test_sagemaker_exception_in_batch_transform(self): + mock_client = MagicMock() + mock_exception = ClientError({"Error": {"Message": "SageMaker broke"}}, "batch_transform") + mock_client.create_transform_job.side_effect = mock_exception + mock_args = self.parser.parse_args(required_args) + + with self.assertRaises(Exception): + _utils.create_transform_job(mock_client, vars(mock_args)) + + + def test_wait_for_transform_job_creation(self): + mock_client = MagicMock() + mock_client.describe_transform_job.side_effect = [ + {"TransformJobStatus": "InProgress"}, + {"TransformJobStatus": "Completed"}, + {"TransformJobStatus": "Should not be called"} + ] + + _utils.wait_for_transform_job(mock_client, 'test-batch', 0) + self.assertEqual(mock_client.describe_transform_job.call_count, 2) + + + def test_wait_for_failed_job(self): + mock_client = MagicMock() + mock_client.describe_transform_job.side_effect = [ + {"TransformJobStatus": "InProgress"}, + {"TransformJobStatus": "Failed", "FailureReason": "SYSTEM FAILURE"}, + {"TransformJobStatus": "Should not be called"} + ] + + with self.assertRaises(Exception): + _utils.wait_for_transform_job(mock_client, 'test-batch', 0) + + self.assertEqual(mock_client.describe_transform_job.call_count, 2) + + diff --git a/components/aws/sagemaker/tests/unit_tests/tests/test_deploy.py b/components/aws/sagemaker/tests/unit_tests/tests/test_deploy.py index 63c720561..aaf367034 100644 --- a/components/aws/sagemaker/tests/unit_tests/tests/test_deploy.py +++ b/components/aws/sagemaker/tests/unit_tests/tests/test_deploy.py @@ -1,7 +1,7 @@ import json import unittest -from unittest.mock import patch, Mock, MagicMock +from unittest.mock import patch, call, Mock, MagicMock, mock_open from botocore.exceptions import ClientError from datetime import datetime @@ -21,12 +21,155 @@ class DeployTestCase(unittest.TestCase): parser = deploy.create_parser() cls.parser = parser - def test_sample(self): - args = self.parser.parse_args(required_args) - response = _utils.create_endpoint_config_request(vars(args)) - self.assertEqual(response['EndpointConfigName'], 'EndpointConfig-test') + def test_create_parser(self): + self.assertIsNotNone(self.parser) + + def test_main(self): + # Mock out all of utils except parser + deploy._utils = MagicMock() + deploy._utils.add_default_client_arguments = _utils.add_default_client_arguments + + # Set some static returns + deploy._utils.deploy_model.return_value = 'test-endpoint-name' + + with patch('builtins.open', mock_open()) as file_open: + deploy.main(required_args) + + # Check if correct requests were created and triggered + deploy._utils.deploy_model.assert_called() + deploy._utils.wait_for_endpoint_creation.assert_called() + + # Check the file outputs + file_open.assert_has_calls([ + call('/tmp/endpoint_name.txt', 'w') + ]) + + file_open().write.assert_has_calls([ + call('test-endpoint-name') + ]) + + def test_deploy_model(self): + mock_client = MagicMock() + mock_args = self.parser.parse_args(required_args + ['--endpoint_name', 'test-endpoint-name', '--endpoint_config_name', 'test-endpoint-config-name']) + response = _utils.deploy_model(mock_client, vars(mock_args)) + + mock_client.create_endpoint_config.assert_called_once_with( + EndpointConfigName='test-endpoint-config-name', + ProductionVariants=[ + {'VariantName': 'variant-name-1', 'ModelName': 'model-test', 'InitialInstanceCount': 1, + 'InstanceType': 'ml.m4.xlarge', 'InitialVariantWeight': 1.0} + ], + Tags=[] + ) + + self.assertEqual(response, 'test-endpoint-name') + + def test_sagemaker_exception_in_deploy_model(self): + mock_client = MagicMock() + mock_exception = ClientError({"Error": {"Message": "SageMaker broke"}}, "deploy_model") + mock_client.create_endpoint_config.side_effect = mock_exception + mock_args = self.parser.parse_args(required_args) + + with self.assertRaises(Exception): + _utils.deploy_model(mock_client, vars(mock_args)) + + def test_model_name_exception(self): + mock_client = MagicMock() + mock_args = vars(self.parser.parse_args(required_args)) + mock_args['model_name_1'] = None + + with self.assertRaises(Exception): + _utils.create_endpoint_config_request(mock_args) + + def test_create_endpoint_exception(self): + mock_client = MagicMock() + mock_exception = ClientError({"Error": {"Message": "SageMaker broke"}}, "deploy_model") + mock_client.create_endpoint.side_effect = mock_exception + + with self.assertRaises(Exception): + _utils.create_endpoint(mock_client, 'us-east-1', 'fake-endpoint', 'fake-endpoint-config', {}) + + def test_wait_for_endpoint_creation(self): + mock_client = MagicMock() + mock_client.describe_endpoint.side_effect = [ + {"EndpointStatus": "Creating", "EndpointArn": "fake_arn"}, + {"EndpointStatus": "InService", "EndpointArn": "fake_arn"}, + {"EndpointStatus": "Should not be called", "EndpointArn": "fake_arn"} + ] + + _utils.wait_for_endpoint_creation(mock_client, 'test-endpoint') + self.assertEqual(mock_client.describe_endpoint.call_count, 2) + + def test_wait_for_failed_job(self): + mock_client = MagicMock() + mock_client.describe_endpoint.side_effect = [ + {"EndpointStatus": "Creating", "EndpointArn": "fake_arn"}, + {"EndpointStatus": "Failed", "FailureReason": "SYSTEM FAILURE"}, + {"EndpointStatus": "Should not be called"} + ] + + with self.assertRaises(Exception): + _utils.wait_for_endpoint_creation(mock_client, 'test-endpoint') + + self.assertEqual(mock_client.describe_endpoint.call_count, 2) + + def test_get_endpoint_name_from_job(self): + mock_client = MagicMock() + + # if we don't pass --endpoint_name argument then endpoint name is constructed using --model_name_1 + self.assertEqual(_utils.deploy_model(mock_client, vars(self.parser.parse_args(required_args))), 'Endpoint-test') + + def test_pass_most_args(self): + arguments = [ + '--region', 'us-west-2', + '--endpoint_url', 'fake-url', + '--model_name_1', 'model-test-1', + '--accelerator_type_1', 'ml.eia1.medium', + '--model_name_2', 'model-test-2', + '--accelerator_type_2', 'ml.eia1.medium', + '--model_name_3', 'model-test-3', + '--accelerator_type_3', 'ml.eia1.medium', + '--resource_encryption_key', 'fake-key', + '--endpoint_config_tags', '{"fake_config_key": "fake_config_value"}', + '--endpoint_tags', '{"fake_key": "fake_value"}' + ] + + response = _utils.create_endpoint_config_request(vars(self.parser.parse_args(arguments))) + self.assertEqual(response, {'EndpointConfigName': 'EndpointConfig-test-1', + 'KmsKeyId': 'fake-key', + 'ProductionVariants': [ + {'InitialInstanceCount': 1, + 'AcceleratorType': 'ml.eia1.medium', + 'InitialVariantWeight': 1.0, + 'InstanceType': 'ml.m4.xlarge', + 'ModelName': 'model-test-1', + 'VariantName': 'variant-name-1' + }, + {'InitialInstanceCount': 1, + 'AcceleratorType': 'ml.eia1.medium', + 'InitialVariantWeight': 1.0, + 'InstanceType': 'ml.m4.xlarge', + 'ModelName': 'model-test-2', + 'VariantName': 'variant-name-2' + }, + {'InitialInstanceCount': 1, + 'AcceleratorType': 'ml.eia1.medium', + 'InitialVariantWeight': 1.0, + 'InstanceType': 'ml.m4.xlarge', + 'ModelName': 'model-test-3', + 'VariantName': 'variant-name-3' + } + ], + 'Tags': [{'Key': 'fake_config_key', 'Value': 'fake_config_value'}] + }) + + def test_tag_in_create_endpoint(self): + mock_client = MagicMock() + _utils.create_endpoint(mock_client, 'us-east-1', 'fake-endpoint', 'fake-endpoint-config', {"fake_key": "fake_value"}) + + mock_client.create_endpoint.assert_called_once_with( + EndpointConfigName='fake-endpoint-config', + EndpointName='fake-endpoint', + Tags=[{'Key': 'fake_key', 'Value': 'fake_value'}] + ) - def test_empty_string(self): - args = self.parser.parse_args(required_args) - response = _utils.create_endpoint_config_request(vars(args)) - test_utils.check_empty_string_values(response) \ No newline at end of file diff --git a/components/aws/sagemaker/tests/unit_tests/tests/test_ground_truth.py b/components/aws/sagemaker/tests/unit_tests/tests/test_ground_truth.py index bf7d49d89..6ae47fa44 100644 --- a/components/aws/sagemaker/tests/unit_tests/tests/test_ground_truth.py +++ b/components/aws/sagemaker/tests/unit_tests/tests/test_ground_truth.py @@ -1,7 +1,7 @@ import json import unittest -from unittest.mock import patch, Mock, MagicMock +from unittest.mock import patch, call, Mock, MagicMock, mock_open from botocore.exceptions import ClientError from datetime import datetime @@ -31,12 +31,151 @@ class GroundTruthTestCase(unittest.TestCase): parser = ground_truth.create_parser() cls.parser = parser - def test_sample(self): - args = self.parser.parse_args(required_args) - response = _utils.create_labeling_job_request(vars(args)) - self.assertEqual(response['LabelingJobName'], 'test_job') + def test_create_parser(self): + self.assertIsNotNone(self.parser) + + def test_main(self): + # Mock out all of utils except parser + ground_truth._utils = MagicMock() + ground_truth._utils.add_default_client_arguments = _utils.add_default_client_arguments + + # Set some static returns + ground_truth._utils.get_labeling_job_outputs.return_value = ('s3://fake-bucket/output', 'arn:aws:sagemaker:us-east-1:999999999999:labeling-job') + + with patch('builtins.open', mock_open()) as file_open: + ground_truth.main(required_args) + + # Check if correct requests were created and triggered + ground_truth._utils.create_labeling_job.assert_called() + ground_truth._utils.wait_for_labeling_job.assert_called() + ground_truth._utils.get_labeling_job_outputs.assert_called() + + # Check the file outputs + file_open.assert_has_calls([ + call('/tmp/output_manifest_location.txt', 'w'), + call('/tmp/active_learning_model_arn.txt', 'w') + ], any_order=True) + + file_open().write.assert_has_calls([ + call('s3://fake-bucket/output'), + call('arn:aws:sagemaker:us-east-1:999999999999:labeling-job') + ], any_order=False) + + def test_ground_truth(self): + mock_client = MagicMock() + mock_args = self.parser.parse_args(required_args) + response = _utils.create_labeling_job(mock_client, vars(mock_args)) + + mock_client.create_labeling_job.assert_called_once_with( + HumanTaskConfig={'WorkteamArn': None, 'UiConfig': {'UiTemplateS3Uri': 's3://fake-bucket/ui_template'}, + 'PreHumanTaskLambdaArn': '', 'TaskTitle': 'fake-image-labelling-work', + 'TaskDescription': 'fake job', 'NumberOfHumanWorkersPerDataObject': 1, + 'TaskTimeLimitInSeconds': 180, + 'AnnotationConsolidationConfig': {'AnnotationConsolidationLambdaArn': ''}}, + InputConfig={'DataSource': {'S3DataSource': {'ManifestS3Uri': 's3://fake-bucket/manifest'}}}, + LabelAttributeName='test_job', LabelCategoryConfigS3Uri='', LabelingJobName='test_job', + OutputConfig={'S3OutputPath': 's3://fake-bucket/output', 'KmsKeyId': ''}, + RoleArn='arn:aws:iam::123456789012:user/Development/product_1234/*', Tags=[] + ) + + self.assertEqual(response, 'test_job') + + def test_sagemaker_exception_in_ground_truth(self): + mock_client = MagicMock() + mock_exception = ClientError({"Error": {"Message": "SageMaker broke"}}, "ground_truth") + mock_client.create_labeling_job.side_effect = mock_exception + mock_args = self.parser.parse_args(required_args) + + with self.assertRaises(Exception): + _utils.get_labeling_job_outputs(mock_client, vars(mock_args)) + + def test_wait_for_labeling_job_creation(self): + mock_client = MagicMock() + mock_client.describe_labeling_job.side_effect = [ + {"LabelingJobStatus": "InProgress"}, + {"LabelingJobStatus": "Completed"}, + {"LabelingJobStatus": "Should not be called"} + ] + + _utils.wait_for_labeling_job(mock_client, 'test-batch', 0) + self.assertEqual(mock_client.describe_labeling_job.call_count, 2) + + def test_wait_for_labeling_job_creation(self): + mock_client = MagicMock() + mock_client.describe_labeling_job.side_effect = [ + {"LabelingJobStatus": "InProgress"}, + {"LabelingJobStatus": "Failed"}, + {"LabelingJobStatus": "Should not be called"} + ] + + with self.assertRaises(Exception): + _utils.wait_for_labeling_job(mock_client, 'test-batch', 0) + self.assertEqual(mock_client.describe_labeling_job.call_count, 2) + + def test_get_labeling_job_output_from_job(self): + mock_client = MagicMock() + mock_client.describe_labeling_job.return_value = {"LabelingJobOutput": { + "OutputDatasetS3Uri": "s3://path/", + "FinalActiveLearningModelArn": "fake-arn" + }} + + output_manifest, active_learning_model_arn = _utils.get_labeling_job_outputs(mock_client, 'labeling-job', True) + self.assertEqual(output_manifest, 's3://path/') + self.assertEqual(active_learning_model_arn, 'fake-arn') + + def test_pass_most_args(self): + required_args = [ + '--region', 'us-west-2', + '--role', 'arn:aws:iam::123456789012:user/Development/product_1234/*', + '--job_name', 'test_job', + '--manifest_location', 's3://fake-bucket/manifest', + '--output_location', 's3://fake-bucket/output', + '--task_type', 'image classification', + '--worker_type', 'fake_worker', + '--ui_template', 's3://fake-bucket/ui_template', + '--title', 'fake-image-labelling-work', + '--description', 'fake job', + '--num_workers_per_object', '1', + '--time_limit', '180', + ] + arguments = required_args + ['--label_attribute_name', 'fake-attribute', + '--max_human_labeled_objects', '10', + '--max_percent_objects', '50', + '--enable_auto_labeling', 'True', + '--initial_model_arn', 'fake-model-arn', + '--task_availibility', '30', + '--max_concurrent_tasks', '10', + '--task_keywords', 'fake-keyword', + '--worker_type', 'public', + '--no_adult_content', 'True', + '--no_ppi', 'True', + '--tags', '{"fake_key": "fake_value"}' + ] + response = _utils.create_labeling_job_request(vars(self.parser.parse_args(arguments))) + print(response) + self.assertEqual(response, {'LabelingJobName': 'test_job', + 'LabelAttributeName': 'fake-attribute', + 'InputConfig': {'DataSource': {'S3DataSource': {'ManifestS3Uri': 's3://fake-bucket/manifest'}}, + 'DataAttributes': {'ContentClassifiers': ['FreeOfAdultContent', 'FreeOfPersonallyIdentifiableInformation']}}, + 'OutputConfig': {'S3OutputPath': 's3://fake-bucket/output', 'KmsKeyId': ''}, + 'RoleArn': 'arn:aws:iam::123456789012:user/Development/product_1234/*', + 'LabelCategoryConfigS3Uri': '', + 'StoppingConditions': {'MaxHumanLabeledObjectCount': 10, 'MaxPercentageOfInputDatasetLabeled': 50}, + 'LabelingJobAlgorithmsConfig': {'LabelingJobAlgorithmSpecificationArn': 'arn:aws:sagemaker:us-west-2:027400017018:labeling-job-algorithm-specification/image-classification', + 'InitialActiveLearningModelArn': 'fake-model-arn', + 'LabelingJobResourceConfig': {'VolumeKmsKeyId': ''}}, + 'HumanTaskConfig': {'WorkteamArn': 'arn:aws:sagemaker:us-west-2:394669845002:workteam/public-crowd/default', + 'UiConfig': {'UiTemplateS3Uri': 's3://fake-bucket/ui_template'}, + 'PreHumanTaskLambdaArn': 'arn:aws:lambda:us-west-2:081040173940:function:PRE-ImageMultiClass', + 'TaskKeywords': ['fake-keyword'], + 'TaskTitle': 'fake-image-labelling-work', + 'TaskDescription': 'fake job', + 'NumberOfHumanWorkersPerDataObject': 1, + 'TaskTimeLimitInSeconds': 180, + 'TaskAvailabilityLifetimeInSeconds': 30, + 'MaxConcurrentTaskCount': 10, + 'AnnotationConsolidationConfig': {'AnnotationConsolidationLambdaArn': 'arn:aws:lambda:us-west-2:081040173940:function:ACS-ImageMultiClass'}, + 'PublicWorkforceTaskPrice': {'AmountInUsd': {'Dollars': 0, 'Cents': 0, 'TenthFractionsOfACent': 0}}}, + 'Tags': [{'Key': 'fake_key', 'Value': 'fake_value'}]} + ) - def test_empty_string(self): - args = self.parser.parse_args(required_args) - response = _utils.create_labeling_job_request(vars(args)) - test_utils.check_empty_string_values(response) diff --git a/components/aws/sagemaker/tests/unit_tests/tests/test_hpo.py b/components/aws/sagemaker/tests/unit_tests/tests/test_hpo.py index d37382a2d..d1424bdea 100644 --- a/components/aws/sagemaker/tests/unit_tests/tests/test_hpo.py +++ b/components/aws/sagemaker/tests/unit_tests/tests/test_hpo.py @@ -1,11 +1,12 @@ import json import unittest -from unittest.mock import patch, Mock, MagicMock +from unittest.mock import patch, call, Mock, MagicMock, mock_open from botocore.exceptions import ClientError from datetime import datetime from hyperparameter_tuning.src import hyperparameter_tuning as hpo +from train.src import train from common import _utils from . import test_utils @@ -28,6 +29,9 @@ class HyperparameterTestCase(unittest.TestCase): parser = hpo.create_parser() cls.parser = parser + def test_create_parser(self): + self.assertIsNotNone(self.parser) + def test_spot_bad_args(self): no_max_wait_args = self.parser.parse_args(required_args + ['--spot_instance', 'True']) no_checkpoint_args = self.parser.parse_args(required_args + ['--spot_instance', 'True', '--max_wait_time', '3600']) @@ -60,4 +64,341 @@ class HyperparameterTestCase(unittest.TestCase): required_args + ['--spot_instance', 'True', '--max_wait_time', '86400', '--checkpoint_config', '{"S3Uri": "s3://fake-uri/"}']) response = _utils.create_hyperparameter_tuning_job_request(vars(good_args)) - test_utils.check_empty_string_values(response) \ No newline at end of file + test_utils.check_empty_string_values(response) + + def test_main(self): + # Mock out all of utils except parser + hpo._utils = MagicMock() + hpo._utils.add_default_client_arguments = _utils.add_default_client_arguments + + # Set some static returns + hpo._utils.create_hyperparameter_tuning_job.return_value = 'job-name' + hpo._utils.get_best_training_job_and_hyperparameters.return_value = 'best_job', 'best_hyperparameters' + hpo._utils.get_image_from_job.return_value = 'training-image' + hpo._utils.get_model_artifacts_from_job.return_value = 'model-artifacts' + + with patch('builtins.open', mock_open()) as file_open: + hpo.main(required_args) + + # Check if correct requests were created and triggered + hpo._utils.create_hyperparameter_tuning_job.assert_called() + hpo._utils.wait_for_hyperparameter_training_job.assert_called() + + # Check the file outputs + file_open.assert_has_calls([ + call('/tmp/hpo_job_name.txt', 'w'), + call('/tmp/best_job_name.txt', 'w'), + call('/tmp/best_hyperparameters.txt', 'w'), + call('/tmp/model_artifact_url.txt', 'w'), + call('/tmp/training_image.txt', 'w') + ], any_order=True) + + file_open().write.assert_has_calls([ + call('job-name'), + call('best_job'), + call('"best_hyperparameters"'), + call('model-artifacts'), + call('training-image'), + ], any_order=False) + + def test_create_hyperparameter_tuning_job(self): + mock_client = MagicMock() + mock_args = self.parser.parse_args(required_args + ['--job_name', 'test-job']) + response = _utils.create_hyperparameter_tuning_job(mock_client, vars(mock_args)) + + mock_client.create_hyper_parameter_tuning_job.assert_called_once_with( + HyperParameterTuningJobConfig={'Strategy': 'Bayesian', + 'HyperParameterTuningJobObjective': {'Type': 'Maximize', 'MetricName': 'test-metric'}, + 'ResourceLimits': {'MaxNumberOfTrainingJobs': 5, 'MaxParallelTrainingJobs': 2}, + 'ParameterRanges': {'IntegerParameterRanges': [], 'ContinuousParameterRanges': [], 'CategoricalParameterRanges': []}, + 'TrainingJobEarlyStoppingType': 'Off' + }, + HyperParameterTuningJobName='test-job', + Tags=[], + TrainingJobDefinition={'StaticHyperParameters': {}, + 'AlgorithmSpecification': {'TrainingImage': 'test-image', 'TrainingInputMode': 'File'}, + 'RoleArn': 'arn:aws:iam::123456789012:user/Development/product_1234/*', + 'InputDataConfig': [{'ChannelName': 'train', + 'DataSource': {'S3DataSource': {'S3Uri': 's3://fake-bucket/data', + 'S3DataType': 'S3Prefix', + 'S3DataDistributionType': 'FullyReplicated'}}, + 'ContentType': '', + 'CompressionType': 'None', + 'RecordWrapperType': 'None', + 'InputMode': 'File'}], + 'OutputDataConfig': {'KmsKeyId': '', 'S3OutputPath': 'test-output-location'}, + 'ResourceConfig': {'InstanceType': 'ml.m4.xlarge', 'InstanceCount': 1, 'VolumeSizeInGB': 30, 'VolumeKmsKeyId': ''}, + 'StoppingCondition': {'MaxRuntimeInSeconds': 86400}, + 'EnableNetworkIsolation': True, + 'EnableInterContainerTrafficEncryption': False, + 'EnableManagedSpotTraining': False} + ) + + self.assertEqual(response, 'test-job') + + + def test_sagemaker_exception_in_create_hyperparameter_tuning_job(self): + mock_client = MagicMock() + mock_exception = ClientError({"Error": {"Message": "SageMaker broke"}}, "create_hyperparameter_tuning_job") + mock_client.create_hyper_parameter_tuning_job.side_effect = mock_exception + mock_args = self.parser.parse_args(required_args) + + with self.assertRaises(Exception): + _utils.create_hyperparameter_tuning_job(mock_client, vars(mock_args)) + + def test_wait_for_hyperparameter_tuning_job(self): + mock_client = MagicMock() + mock_client.describe_hyper_parameter_tuning_job.side_effect = [ + {"HyperParameterTuningJobStatus": "InProgress"}, + {"HyperParameterTuningJobStatus": "Completed"}, + {"HyperParameterTuningJobStatus": "Should not be called"} + ] + + _utils.wait_for_hyperparameter_training_job(mock_client,'hyperparameter-tuning-job',0) + self.assertEqual(mock_client.describe_hyper_parameter_tuning_job.call_count, 2) + + def test_wait_for_failed_job(self): + mock_client = MagicMock() + mock_client.describe_hyper_parameter_tuning_job.side_effect = [ + {"HyperParameterTuningJobStatus": "InProgress"}, + {"HyperParameterTuningJobStatus": "Failed", "FailureReason": "Something broke lol"}, + {"HyperParameterTuningJobStatus": "Should not be called"} + ] + + with self.assertRaises(Exception): + _utils.wait_for_hyperparameter_training_job(mock_client, 'training-job', 0) + + self.assertEqual(mock_client.describe_hyper_parameter_tuning_job.call_count,2) + + def test_get_image_from_algorithm_job(self): + mock_client = MagicMock() + mock_client.describe_hyper_parameter_tuning_job.return_value = {"TrainingJobDefinition": {"AlgorithmSpecification": {"AlgorithmName": "my-algorithm"}}} + mock_client.describe_algorithm.return_value = {"TrainingSpecification": {"TrainingImage": "training-image-url"}} + + self.assertEqual(_utils.get_image_from_job(mock_client, 'training-job'), "training-image-url") + + def test_best_training_job(self): + mock_client = MagicMock() + mock_client.describe_hyper_parameter_tuning_job.return_value = {'BestTrainingJob': {'TrainingJobName': 'best_training_job'}} + mock_client.describe_training_job.return_value = {"HyperParameters": {"hp": "val", '_tuning_objective_metric': 'remove_me'}} + name, params =_utils.get_best_training_job_and_hyperparameters(mock_client, "mock-hpo-job") + self.assertEqual("best_training_job", name) + self.assertEqual("val", params["hp"]) + + def test_warm_start_and_parents_args(self): + # specifying both params + good_args = self.parser.parse_args(required_args + ['--warm_start_type', 'TransferLearning'] + ['--parent_hpo_jobs', 'A,B,C']) + response = _utils.create_hyperparameter_tuning_job_request(vars(good_args)) + self.assertIn('WarmStartConfig', response) + self.assertIn('ParentHyperParameterTuningJobs', response['WarmStartConfig']) + self.assertIn('WarmStartType', response['WarmStartConfig']) + self.assertEqual(response['WarmStartConfig']['ParentHyperParameterTuningJobs'][0]['HyperParameterTuningJobName'], 'A') + self.assertEqual(response['WarmStartConfig']['ParentHyperParameterTuningJobs'][1]['HyperParameterTuningJobName'], 'B') + self.assertEqual(response['WarmStartConfig']['ParentHyperParameterTuningJobs'][2]['HyperParameterTuningJobName'], 'C') + self.assertEqual(response['WarmStartConfig']['WarmStartType'], 'TransferLearning') + + def test_either_warm_start_or_parents_args(self): + # It will generate an exception if either warm_start_type or parent hpo jobs is being passed + missing_parent_hpo_jobs_args = self.parser.parse_args(required_args + ['--warm_start_type', 'TransferLearning']) + with self.assertRaises(Exception): + _utils.create_hyperparameter_tuning_job_request(vars(missing_parent_hpo_jobs_args)) + + missing_warm_start_type_args = self.parser.parse_args(required_args + ['--parent_hpo_jobs', 'A,B,C']) + with self.assertRaises(Exception): + _utils.create_hyperparameter_tuning_job_request(vars(missing_warm_start_type_args)) + + + def test_reasonable_required_args(self): + response = _utils.create_hyperparameter_tuning_job_request(vars(self.parser.parse_args(required_args))) + + # Ensure all of the optional arguments have reasonable default values + self.assertFalse(response['TrainingJobDefinition']['EnableManagedSpotTraining']) + self.assertDictEqual(response['TrainingJobDefinition']['StaticHyperParameters'], {}) + self.assertNotIn('VpcConfig', response['TrainingJobDefinition']) + self.assertNotIn('MetricDefinitions', response['TrainingJobDefinition']) + self.assertEqual(response['Tags'], []) + self.assertEqual(response['TrainingJobDefinition']['AlgorithmSpecification']['TrainingInputMode'], 'File') + self.assertEqual(response['TrainingJobDefinition']['OutputDataConfig']['S3OutputPath'], 'test-output-location') + + def test_metric_definitions(self): + metric_definition_args = self.parser.parse_args(required_args + ['--metric_definitions', '{"metric1": "regexval1", "metric2": "regexval2"}']) + response = _utils.create_hyperparameter_tuning_job_request(vars(metric_definition_args)) + + self.assertIn('MetricDefinitions', response['TrainingJobDefinition']['AlgorithmSpecification']) + response_metric_definitions = response['TrainingJobDefinition']['AlgorithmSpecification']['MetricDefinitions'] + + self.assertEqual(response_metric_definitions, [{ + 'Name': "metric1", + 'Regex': "regexval1" + }, { + 'Name': "metric2", + 'Regex': "regexval2" + }]) + + def test_no_defined_image(self): + # Pass the image to pass the parser + no_image_args = required_args.copy() + image_index = no_image_args.index('--image') + # Cut out --image and it's associated value + no_image_args = no_image_args[:image_index] + no_image_args[image_index+2:] + + parsed_args = self.parser.parse_args(no_image_args) + + with self.assertRaises(Exception): + _utils.create_hyperparameter_tuning_job_request(vars(parsed_args)) + + def test_first_party_algorithm(self): + algorithm_name_args = self.parser.parse_args(required_args + ['--algorithm_name', 'first-algorithm']) + + # Should not throw an exception + response = _utils.create_hyperparameter_tuning_job_request(vars(algorithm_name_args)) + self.assertIn('TrainingJobDefinition', response) + self.assertIn('TrainingImage', response['TrainingJobDefinition']['AlgorithmSpecification']) + self.assertNotIn('AlgorithmName', response['TrainingJobDefinition']['AlgorithmSpecification']) + + def test_known_algorithm_key(self): + # This passes an algorithm that is a known NAME of an algorithm + known_algorithm_args = required_args + ['--algorithm_name', 'seq2seq modeling'] + image_index = required_args.index('--image') + # Cut out --image and it's associated value + known_algorithm_args = known_algorithm_args[:image_index] + known_algorithm_args[image_index+2:] + + parsed_args = self.parser.parse_args(known_algorithm_args) + + # Patch get_image_uri + _utils.get_image_uri = MagicMock() + _utils.get_image_uri.return_value = "seq2seq-url" + + response = _utils.create_hyperparameter_tuning_job_request(vars(parsed_args)) + + _utils.get_image_uri.assert_called_with('us-west-2', 'seq2seq') + self.assertEqual(response['TrainingJobDefinition']['AlgorithmSpecification']['TrainingImage'], "seq2seq-url") + + + def test_known_algorithm_value(self): + # This passes an algorithm that is a known SageMaker algorithm name + known_algorithm_args = required_args + ['--algorithm_name', 'seq2seq'] + image_index = required_args.index('--image') + # Cut out --image and it's associated value + known_algorithm_args = known_algorithm_args[:image_index] + known_algorithm_args[image_index+2:] + + parsed_args = self.parser.parse_args(known_algorithm_args) + + # Patch get_image_uri + _utils.get_image_uri = MagicMock() + _utils.get_image_uri.return_value = "seq2seq-url" + + response = _utils.create_hyperparameter_tuning_job_request(vars(parsed_args)) + + _utils.get_image_uri.assert_called_with('us-west-2', 'seq2seq') + self.assertEqual(response['TrainingJobDefinition']['AlgorithmSpecification']['TrainingImage'], "seq2seq-url") + + + def test_unknown_algorithm(self): + known_algorithm_args = required_args + ['--algorithm_name', 'unknown algorithm'] + image_index = required_args.index('--image') + # Cut out --image and it's associated value + known_algorithm_args = known_algorithm_args[:image_index] + known_algorithm_args[image_index+2:] + + parsed_args = self.parser.parse_args(known_algorithm_args) + + # Patch get_image_uri + _utils.get_image_uri = MagicMock() + _utils.get_image_uri.return_value = "unknown-url" + + response = _utils.create_hyperparameter_tuning_job_request(vars(parsed_args)) + + # Should just place the algorithm name in regardless + _utils.get_image_uri.assert_not_called() + self.assertEqual(response['TrainingJobDefinition']['AlgorithmSpecification']['AlgorithmName'], "unknown algorithm") + + def test_no_channels(self): + no_channels_args = required_args.copy() + channels_index = required_args.index('--channels') + # Replace the value after the flag with an empty list + no_channels_args[channels_index + 1] = '[]' + parsed_args = self.parser.parse_args(no_channels_args) + + with self.assertRaises(Exception): + _utils.create_hyperparameter_tuning_job_request(vars(parsed_args)) + + def test_tags(self): + args = self.parser.parse_args(required_args + ['--tags', '{"key1": "val1", "key2": "val2"}']) + response = _utils.create_hyperparameter_tuning_job_request(vars(args)) + self.assertIn({'Key': 'key1', 'Value': 'val1'}, response['Tags']) + self.assertIn({'Key': 'key2', 'Value': 'val2'}, response['Tags']) + + def test_valid_hyperparameters(self): + hyperparameters_str = '{"hp1": "val1", "hp2": "val2", "hp3": "val3"}' + categorical_params = '[{"Name" : "categorical", "Values": ["A", "B"]}]' + integer_params = '[{"MaxValue": "integer_val1", "MinValue": "integer_val2", "Name": "integer", "ScalingType": "test_integer"}]' + continuous_params = '[{"MaxValue": "continuous_val1", "MinValue": "continuous_val2", "Name": "continuous", "ScalingType": "test_continuous"}]' + good_args = self.parser.parse_args(required_args + ['--static_parameters', hyperparameters_str] + ['--integer_parameters', integer_params] + ['--continuous_parameters', continuous_params] + ['--categorical_parameters', categorical_params]) + response = _utils.create_hyperparameter_tuning_job_request(vars(good_args)) + + self.assertIn('hp1', response['TrainingJobDefinition']['StaticHyperParameters']) + self.assertIn('hp2', response['TrainingJobDefinition']['StaticHyperParameters']) + self.assertIn('hp3', response['TrainingJobDefinition']['StaticHyperParameters']) + self.assertEqual(response['TrainingJobDefinition']['StaticHyperParameters']['hp1'], "val1") + self.assertEqual(response['TrainingJobDefinition']['StaticHyperParameters']['hp2'], "val2") + self.assertEqual(response['TrainingJobDefinition']['StaticHyperParameters']['hp3'], "val3") + + self.assertIn('ParameterRanges', response['HyperParameterTuningJobConfig']) + self.assertIn('IntegerParameterRanges', response['HyperParameterTuningJobConfig']['ParameterRanges']) + self.assertIn('ContinuousParameterRanges', response['HyperParameterTuningJobConfig']['ParameterRanges']) + self.assertIn('CategoricalParameterRanges', response['HyperParameterTuningJobConfig']['ParameterRanges']) + self.assertIn('Name', response['HyperParameterTuningJobConfig']['ParameterRanges']['CategoricalParameterRanges'][0]) + self.assertIn('Values', response['HyperParameterTuningJobConfig']['ParameterRanges']['CategoricalParameterRanges'][0]) + self.assertIn('MaxValue', response['HyperParameterTuningJobConfig']['ParameterRanges']['IntegerParameterRanges'][0]) + self.assertIn('MinValue', response['HyperParameterTuningJobConfig']['ParameterRanges']['IntegerParameterRanges'][0]) + self.assertIn('Name', response['HyperParameterTuningJobConfig']['ParameterRanges']['IntegerParameterRanges'][0]) + self.assertIn('ScalingType', response['HyperParameterTuningJobConfig']['ParameterRanges']['IntegerParameterRanges'][0]) + self.assertIn('MaxValue', response['HyperParameterTuningJobConfig']['ParameterRanges']['ContinuousParameterRanges'][0]) + self.assertIn('MinValue', response['HyperParameterTuningJobConfig']['ParameterRanges']['ContinuousParameterRanges'][0]) + self.assertIn('Name', response['HyperParameterTuningJobConfig']['ParameterRanges']['ContinuousParameterRanges'][0]) + self.assertIn('ScalingType', response['HyperParameterTuningJobConfig']['ParameterRanges']['ContinuousParameterRanges'][0]) + + self.assertEqual(response['HyperParameterTuningJobConfig']['ParameterRanges']['CategoricalParameterRanges'][0]['Name'], "categorical") + self.assertEqual(response['HyperParameterTuningJobConfig']['ParameterRanges']['CategoricalParameterRanges'][0]["Values"][0], "A") + self.assertEqual(response['HyperParameterTuningJobConfig']['ParameterRanges']['CategoricalParameterRanges'][0]["Values"][1], "B") + self.assertEqual(response['HyperParameterTuningJobConfig']['ParameterRanges']['IntegerParameterRanges'][0]['MaxValue'], "integer_val1") + self.assertEqual(response['HyperParameterTuningJobConfig']['ParameterRanges']['IntegerParameterRanges'][0]['MinValue'], "integer_val2") + self.assertEqual(response['HyperParameterTuningJobConfig']['ParameterRanges']['IntegerParameterRanges'][0]['Name'], "integer") + self.assertEqual(response['HyperParameterTuningJobConfig']['ParameterRanges']['IntegerParameterRanges'][0]['ScalingType'], "test_integer") + self.assertEqual(response['HyperParameterTuningJobConfig']['ParameterRanges']['ContinuousParameterRanges'][0]['MaxValue'], "continuous_val1") + self.assertEqual(response['HyperParameterTuningJobConfig']['ParameterRanges']['ContinuousParameterRanges'][0]['MinValue'], "continuous_val2") + self.assertEqual(response['HyperParameterTuningJobConfig']['ParameterRanges']['ContinuousParameterRanges'][0]['Name'], "continuous") + self.assertEqual(response['HyperParameterTuningJobConfig']['ParameterRanges']['ContinuousParameterRanges'][0]['ScalingType'], "test_continuous") + + + def test_empty_hyperparameters(self): + hyperparameters_str = '{}' + + good_args = self.parser.parse_args(required_args + ['--static_parameters', hyperparameters_str]) + response = _utils.create_hyperparameter_tuning_job_request(vars(good_args)) + + self.assertEqual(response['TrainingJobDefinition']['StaticHyperParameters'], {}) + + def test_object_hyperparameters(self): + hyperparameters_str = '{"hp1": {"innerkey": "innerval"}}' + + invalid_args = self.parser.parse_args(required_args + ['--static_parameters', hyperparameters_str]) + with self.assertRaises(Exception): + _utils.create_hyperparameter_tuning_job_request(vars(invalid_args)) + + def test_vpc_configuration(self): + required_vpc_args = self.parser.parse_args(required_args + ['--vpc_security_group_ids', 'sg1,sg2', '--vpc_subnets', 'subnet1,subnet2']) + response = _utils.create_hyperparameter_tuning_job_request(vars(required_vpc_args)) + + self.assertIn('TrainingJobDefinition', response) + self.assertIn('VpcConfig', response['TrainingJobDefinition']) + self.assertIn('sg1', response['TrainingJobDefinition']['VpcConfig']['SecurityGroupIds']) + self.assertIn('sg2', response['TrainingJobDefinition']['VpcConfig']['SecurityGroupIds']) + self.assertIn('subnet1', response['TrainingJobDefinition']['VpcConfig']['Subnets']) + self.assertIn('subnet2', response['TrainingJobDefinition']['VpcConfig']['Subnets']) + + def test_training_mode(self): + required_vpc_args = self.parser.parse_args(required_args + ['--training_input_mode', 'Pipe']) + response = _utils.create_hyperparameter_tuning_job_request(vars(required_vpc_args)) + + self.assertEqual(response['TrainingJobDefinition']['AlgorithmSpecification']['TrainingInputMode'], 'Pipe') diff --git a/components/aws/sagemaker/tests/unit_tests/tests/test_model.py b/components/aws/sagemaker/tests/unit_tests/tests/test_model.py index d4fc16758..1bb674933 100644 --- a/components/aws/sagemaker/tests/unit_tests/tests/test_model.py +++ b/components/aws/sagemaker/tests/unit_tests/tests/test_model.py @@ -1,7 +1,7 @@ import json import unittest -from unittest.mock import patch, Mock, MagicMock +from unittest.mock import patch, call, Mock, MagicMock, mock_open from botocore.exceptions import ClientError from datetime import datetime @@ -24,12 +24,96 @@ class ModelTestCase(unittest.TestCase): parser = create_model.create_parser() cls.parser = parser - def test_sample(self): - args = self.parser.parse_args(required_args) - response = _utils.create_model_request(vars(args)) - self.assertEqual(response['ModelName'], 'model_test') + def test_create_parser(self): + self.assertIsNotNone(self.parser) - def test_empty_string(self): - args = self.parser.parse_args(required_args) - response = _utils.create_model_request(vars(args)) - test_utils.check_empty_string_values(response) \ No newline at end of file + def test_main(self): + # Mock out all of utils except parser + create_model._utils = MagicMock() + create_model._utils.add_default_client_arguments = _utils.add_default_client_arguments + + # Set some static returns + create_model._utils.create_model.return_value = 'model_test' + + with patch('builtins.open', mock_open()) as file_open: + create_model.main(required_args) + + # Check if correct requests were created and triggered + create_model._utils.create_model.assert_called() + + # Check the file outputs + file_open.assert_has_calls([ + call('/tmp/model_name.txt', 'w') + ]) + + file_open().write.assert_has_calls([ + call('model_test') + ]) + + def test_create_model(self): + mock_client = MagicMock() + mock_args = self.parser.parse_args(required_args) + response = _utils.create_model(mock_client, vars(mock_args)) + + mock_client.create_model.assert_called_once_with( + EnableNetworkIsolation=True, + ExecutionRoleArn='arn:aws:iam::123456789012:user/Development/product_1234/*', + ModelName='model_test', + PrimaryContainer={'Image': 'test-image', 'ModelDataUrl': 's3://fake-bucket/model_artifact', 'Environment': {}}, + Tags=[] + ) + + + def test_sagemaker_exception_in_create_model(self): + mock_client = MagicMock() + mock_exception = ClientError({"Error": {"Message": "SageMaker broke"}}, "create_model") + mock_client.create_model.side_effect = mock_exception + mock_args = self.parser.parse_args(required_args) + + with self.assertRaises(Exception): + _utils.create_model(mock_client, vars(mock_args)) + + def test_secondary_containers(self): + arguments = required_args + ['--secondary_containers', '["fake-container"]'] + response = _utils.create_model_request(vars(self.parser.parse_args(arguments))) + + self.assertEqual(response['Containers'], ['fake-container']) + + def test_pass_most_arguments(self): + arguments = required_args + ['--container_host_name', 'fake-host', + '--tags', '{"fake_key": "fake_value"}', + '--vpc_security_group_ids', 'fake-ids', + '--vpc_subnets', 'fake-subnets'] + response = _utils.create_model_request(vars(self.parser.parse_args(arguments))) + print(response) + self.assertEqual(response, {'ModelName': 'model_test', + 'PrimaryContainer': {'ContainerHostname': 'fake-host', + 'Image': 'test-image', + 'ModelDataUrl': 's3://fake-bucket/model_artifact', + 'Environment': {} + }, + 'ExecutionRoleArn': 'arn:aws:iam::123456789012:user/Development/product_1234/*', + 'Tags': [{'Key': 'fake_key', 'Value': 'fake_value'}], + 'VpcConfig': {'SecurityGroupIds': ['fake-ids'], 'Subnets': ['fake-subnets']}, + 'EnableNetworkIsolation': True + }) + + def test_image_model_package(self): + arguments = [ '--region', 'us-west-2', + '--model_name', 'model_test', + '--role', 'arn:aws:iam::123456789012:user/Development/product_1234/*' + ] + + # does not error out + _utils.create_model_request(vars(self.parser.parse_args(arguments + ['--image', 'test-image', + '--model_artifact_url', 's3://fake-bucket/model_artifact' + ]))) + # does not error out + _utils.create_model_request(vars(self.parser.parse_args(arguments + ['--model_package', 'fake-package']))) + + with self.assertRaises(Exception): + _utils.create_model_request(vars(self.parser.parse_args(arguments))) + with self.assertRaises(Exception): + _utils.create_model_request(vars(self.parser.parse_args(arguments+ ['--image', 'test-image']))) + with self.assertRaises(Exception): + _utils.create_model_request(vars(self.parser.parse_args(arguments+ ['--model_artifact_url', 's3://fake-bucket/model_artifact']))) \ No newline at end of file diff --git a/components/aws/sagemaker/tests/unit_tests/tests/test_train.py b/components/aws/sagemaker/tests/unit_tests/tests/test_train.py index 1993c72d2..b247d6c85 100644 --- a/components/aws/sagemaker/tests/unit_tests/tests/test_train.py +++ b/components/aws/sagemaker/tests/unit_tests/tests/test_train.py @@ -65,15 +65,26 @@ class TrainTestCase(unittest.TestCase): mock_args = self.parser.parse_args(required_args + ['--job_name', 'test-job']) response = _utils.create_training_job(mock_client, vars(mock_args)) - mock_client.create_training_job.assert_called_once_with(AlgorithmSpecification={'TrainingImage': 'test-image', - 'TrainingInputMode': 'File'}, EnableInterContainerTrafficEncryption=False, EnableManagedSpotTraining=False, - EnableNetworkIsolation=True, HyperParameters={}, InputDataConfig=[{'ChannelName': 'train', 'DataSource': - {'S3DataSource': {'S3Uri': 's3://fake-bucket/data', 'S3DataType': 'S3Prefix', 'S3DataDistributionType': - 'FullyReplicated'}}, 'ContentType': '', 'CompressionType': 'None', 'RecordWrapperType': 'None', 'InputMode': - 'File'}], OutputDataConfig={'KmsKeyId': '', 'S3OutputPath': 'test-path'}, ResourceConfig={'InstanceType': - 'ml.m4.xlarge', 'InstanceCount': 1, 'VolumeSizeInGB': 50, 'VolumeKmsKeyId': ''}, - RoleArn='arn:aws:iam::123456789012:user/Development/product_1234/*', StoppingCondition={'MaxRuntimeInSeconds': - 3600}, Tags=[], TrainingJobName='test-job') + mock_client.create_training_job.assert_called_once_with( + AlgorithmSpecification={'TrainingImage': 'test-image', 'TrainingInputMode': 'File'}, + EnableInterContainerTrafficEncryption=False, + EnableManagedSpotTraining=False, + EnableNetworkIsolation=True, + HyperParameters={}, + InputDataConfig=[{'ChannelName': 'train', + 'DataSource': {'S3DataSource': {'S3Uri': 's3://fake-bucket/data', 'S3DataType': 'S3Prefix', 'S3DataDistributionType': 'FullyReplicated'}}, + 'ContentType': '', + 'CompressionType': 'None', + 'RecordWrapperType': 'None', + 'InputMode': 'File' + }], + OutputDataConfig={'KmsKeyId': '', 'S3OutputPath': 'test-path'}, + ResourceConfig={'InstanceType': 'ml.m4.xlarge', 'InstanceCount': 1, 'VolumeSizeInGB': 50, 'VolumeKmsKeyId': ''}, + RoleArn='arn:aws:iam::123456789012:user/Development/product_1234/*', + StoppingCondition={'MaxRuntimeInSeconds': 3600}, + Tags=[], + TrainingJobName='test-job' + ) self.assertEqual(response, 'test-job') def test_sagemaker_exception_in_create_training_job(self): @@ -243,18 +254,13 @@ class TrainTestCase(unittest.TestCase): with self.assertRaises(Exception): _utils.create_training_job_request(vars(parsed_args)) - def test_invalid_instance_type(self): - invalid_instance_args = required_args + ['--instance_type', 'invalid-instance'] - - with self.assertRaises(SystemExit): - self.parser.parse_args(invalid_instance_args) def test_valid_hyperparameters(self): hyperparameters_str = '{"hp1": "val1", "hp2": "val2", "hp3": "val3"}' good_args = self.parser.parse_args(required_args + ['--hyperparameters', hyperparameters_str]) response = _utils.create_training_job_request(vars(good_args)) - + self.assertIn('hp1', response['HyperParameters']) self.assertIn('hp2', response['HyperParameters']) self.assertIn('hp3', response['HyperParameters']) @@ -267,7 +273,7 @@ class TrainTestCase(unittest.TestCase): good_args = self.parser.parse_args(required_args + ['--hyperparameters', hyperparameters_str]) response = _utils.create_training_job_request(vars(good_args)) - + self.assertEqual(response['HyperParameters'], {}) def test_object_hyperparameters(self): @@ -280,7 +286,7 @@ class TrainTestCase(unittest.TestCase): def test_vpc_configuration(self): required_vpc_args = self.parser.parse_args(required_args + ['--vpc_security_group_ids', 'sg1,sg2', '--vpc_subnets', 'subnet1,subnet2']) response = _utils.create_training_job_request(vars(required_vpc_args)) - + self.assertIn('VpcConfig', response) self.assertIn('sg1', response['VpcConfig']['SecurityGroupIds']) self.assertIn('sg2', response['VpcConfig']['SecurityGroupIds']) @@ -305,7 +311,7 @@ class TrainTestCase(unittest.TestCase): def test_spot_lesser_wait_time(self): args = self.parser.parse_args(required_args + ['--spot_instance', 'True', '--max_wait_time', '3599', '--checkpoint_config', '{"S3Uri": "s3://fake-uri/", "LocalPath": "local-path"}']) with self.assertRaises(Exception): - _utils.create_training_job_request(vars(args)) + _utils.create_training_job_request(vars(args)) def test_spot_good_args(self): good_args = self.parser.parse_args(required_args + ['--spot_instance', 'True', '--max_wait_time', '3600', '--checkpoint_config', '{"S3Uri": "s3://fake-uri/"}']) @@ -324,4 +330,4 @@ class TrainTestCase(unittest.TestCase): args = self.parser.parse_args(required_args + ['--tags', '{"key1": "val1", "key2": "val2"}']) response = _utils.create_training_job_request(vars(args)) self.assertIn({'Key': 'key1', 'Value': 'val1'}, response['Tags']) - self.assertIn({'Key': 'key2', 'Value': 'val2'}, response['Tags']) \ No newline at end of file + self.assertIn({'Key': 'key2', 'Value': 'val2'}, response['Tags']) diff --git a/components/aws/sagemaker/tests/unit_tests/tests/test_workteam.py b/components/aws/sagemaker/tests/unit_tests/tests/test_workteam.py index 7d5dcc7ae..1a741a9a4 100644 --- a/components/aws/sagemaker/tests/unit_tests/tests/test_workteam.py +++ b/components/aws/sagemaker/tests/unit_tests/tests/test_workteam.py @@ -1,7 +1,7 @@ import json import unittest -from unittest.mock import patch, Mock, MagicMock +from unittest.mock import patch, call, Mock, MagicMock, mock_open from botocore.exceptions import ClientError from datetime import datetime @@ -22,12 +22,65 @@ class WorkTeamTestCase(unittest.TestCase): parser = workteam.create_parser() cls.parser = parser - def test_sample(self): - args = self.parser.parse_args(required_args) - response = _utils.create_workteam_request(vars(args)) - self.assertEqual(response['WorkteamName'], 'test-team') + def test_create_parser(self): + self.assertIsNotNone(self.parser) - def test_empty_string(self): - args = self.parser.parse_args(required_args) - response = _utils.create_workteam_request(vars(args)) - test_utils.check_empty_string_values(response) \ No newline at end of file + def test_main(self): + # Mock out all of utils except parser + workteam._utils = MagicMock() + workteam._utils.add_default_client_arguments = _utils.add_default_client_arguments + + # Set some static returns + workteam._utils.create_workteam.return_value = 'arn:aws:sagemaker:us-east-1:999999999999:work-team' + + with patch('builtins.open', mock_open()) as file_open: + workteam.main(required_args) + + # Check if correct requests were created and triggered + workteam._utils.create_workteam.assert_called() + + # Check the file outputs + file_open.assert_has_calls([ + call('/tmp/workteam_arn.txt', 'w') + ]) + + file_open().write.assert_has_calls([ + call('arn:aws:sagemaker:us-east-1:999999999999:work-team') + ]) + + def test_workteam(self): + mock_client = MagicMock() + mock_args = self.parser.parse_args(required_args) + response = _utils.create_workteam(mock_client, vars(mock_args)) + + mock_client.create_workteam.assert_called_once_with( + Description='fake team', + MemberDefinitions=[{'CognitoMemberDefinition': {'UserPool': '', 'UserGroup': '', 'ClientId': ''}}], Tags=[], + WorkteamName='test-team' + ) + + def test_sagemaker_exception_in_workteam(self): + mock_client = MagicMock() + mock_exception = ClientError({"Error": {"Message": "SageMaker broke"}}, "workteam") + mock_client.create_workteam.side_effect = mock_exception + mock_args = self.parser.parse_args(required_args) + + with self.assertRaises(Exception): + _utils.create_workteam(mock_client, vars(mock_args)) + + def test_get_workteam_output_from_job(self): + mock_client = MagicMock() + mock_client.create_workteam.return_value = {"WorkteamArn": "fake-arn"} + + self.assertEqual(_utils.create_workteam(mock_client, vars(self.parser.parse_args(required_args))), 'fake-arn') + + def test_pass_most_arguments(self): + arguments = required_args + ['--sns_topic', 'fake-topic', '--tags', '{"fake_key": "fake_value"}'] + response = _utils.create_workteam_request(vars(self.parser.parse_args(arguments))) + + self.assertEqual(response, {'WorkteamName': 'test-team', + 'MemberDefinitions': [{'CognitoMemberDefinition': {'UserPool': '', 'UserGroup': '', 'ClientId': ''}}], + 'Description': 'fake team', + 'NotificationConfiguration' : {'NotificationTopicArn': 'fake-topic'}, + 'Tags': [{'Key': 'fake_key', 'Value': 'fake_value'}] + }) \ No newline at end of file diff --git a/components/aws/sagemaker/train/README.md b/components/aws/sagemaker/train/README.md index a84556586..e8437f6d3 100644 --- a/components/aws/sagemaker/train/README.md +++ b/components/aws/sagemaker/train/README.md @@ -20,8 +20,8 @@ algorithm_name | The name of the algorithm resource to use for the hyperparamete metric_definitions | The dictionary of name-regex pairs specify the metrics that the algorithm emits | Yes | Dict | | {} | put_mode | The input mode that the algorithm supports | No | String | File, Pipe | File | hyperparameters | Hyperparameters for the selected algorithm | No | Dict | [Depends on Algo](https://docs.aws.amazon.com/sagemaker/latest/dg/k-means-api-config.html)| | -channels | A list of dicts specifying the input channels (at least one); refer to [documentation](https://github.com/awsdocs/amazon-sagemaker-developer-guide/blob/master/doc_source/API_Channel.md) for parameters | No | No | List of Dicts | | | -instance_type | The ML compute instance type | Yes | No | String | ml.m4.xlarge, ml.m4.2xlarge, ml.m4.4xlarge, ml.m4.10xlarge, ml.m4.16xlarge, ml.m5.large, ml.m5.xlarge, ml.m5.2xlarge, ml.m5.4xlarge, ml.m5.12xlarge, ml.m5.24xlarge, ml.c4.xlarge, ml.c4.2xlarge, ml.c4.4xlarge, ml.c4.8xlarge, ml.p2.xlarge, ml.p2.8xlarge, ml.p2.16xlarge, ml.p3.2xlarge, ml.p3.8xlarge, ml.p3.16xlarge, ml.c5.xlarge, ml.c5.2xlarge, ml.c5.4xlarge, ml.c5.9xlarge, ml.c5.18xlarge | ml.m4.xlarge | +channels | A list of dicts specifying the input channels (at least one); refer to [documentation](https://github.com/awsdocs/amazon-sagemaker-developer-guide/blob/master/doc_source/API_Channel.md) for parameters | No | List of Dicts | | | +instance_type | The ML compute instance type | Yes | String | ml.m4.xlarge, ml.m4.2xlarge, ml.m4.4xlarge, ml.m4.10xlarge, ml.m4.16xlarge, ml.m5.large, ml.m5.xlarge, ml.m5.2xlarge, ml.m5.4xlarge, ml.m5.12xlarge, ml.m5.24xlarge, ml.c4.xlarge, ml.c4.2xlarge, ml.c4.4xlarge, ml.c4.8xlarge, ml.p2.xlarge, ml.p2.8xlarge, ml.p2.16xlarge, ml.p3.2xlarge, ml.p3.8xlarge, ml.p3.16xlarge, ml.c5.xlarge, ml.c5.2xlarge, ml.c5.4xlarge, ml.c5.9xlarge, ml.c5.18xlarge [and many more](https://aws.amazon.com/sagemaker/pricing/instance-types/) | ml.m4.xlarge | instance_count | The number of ML compute instances to use in each training job | Yes | Int | ≥ 1 | 1 | volume_size | The size of the ML storage volume that you want to provision in GB | Yes | Int | ≥ 1 | 30 | resource_encryption_key | The AWS KMS key that Amazon SageMaker uses to encrypt data on the storage volume attached to the ML compute instance(s) | Yes | String | | | @@ -42,7 +42,7 @@ tags | Key-value pairs to categorize AWS resources | Yes | Dict | | {} | Stores the Model in the s3 bucket you specified # Example code -Simple example pipeline with only Train component : [simple_train_pipeline](https://github.com/kubeflow/pipelines/tree/documents/samples/contrib/aws-samples/simple_train_pipeline) +Simple example pipeline with only Train component : [simple_train_pipeline](https://github.com/kubeflow/pipelines/tree/master/samples/contrib/aws-samples/simple_train_pipeline) # Resources * [Using Amazon built-in algorithms](https://docs.aws.amazon.com/sagemaker/latest/dg/sagemaker-algo-docker-registry-paths.html) diff --git a/components/aws/sagemaker/train/src/train.py b/components/aws/sagemaker/train/src/train.py index 2e01aaffb..d14d26282 100644 --- a/components/aws/sagemaker/train/src/train.py +++ b/components/aws/sagemaker/train/src/train.py @@ -28,11 +28,9 @@ def create_parser(): parser.add_argument('--training_input_mode', choices=['File', 'Pipe'], type=str, help='The input mode that the algorithm supports. File or Pipe.', default='File') parser.add_argument('--hyperparameters', type=_utils.yaml_or_json_str, help='Dictionary of hyperparameters for the the algorithm.', default={}) parser.add_argument('--channels', type=_utils.yaml_or_json_str, required=True, help='A list of dicts specifying the input channels. Must have at least one.') - parser.add_argument('--instance_type', required=True, choices=['ml.m4.xlarge', 'ml.m4.2xlarge', 'ml.m4.4xlarge', 'ml.m4.10xlarge', 'ml.m4.16xlarge', 'ml.m5.large', 'ml.m5.xlarge', 'ml.m5.2xlarge', 'ml.m5.4xlarge', - 'ml.m5.12xlarge', 'ml.m5.24xlarge', 'ml.c4.xlarge', 'ml.c4.2xlarge', 'ml.c4.4xlarge', 'ml.c4.8xlarge', 'ml.p2.xlarge', 'ml.p2.8xlarge', 'ml.p2.16xlarge', 'ml.p3.2xlarge', 'ml.p3.8xlarge', 'ml.p3.16xlarge', - 'ml.c5.xlarge', 'ml.c5.2xlarge', 'ml.c5.4xlarge', 'ml.c5.9xlarge', 'ml.c5.18xlarge'], type=str, help='The ML compute instance type.', default='ml.m4.xlarge') + parser.add_argument('--instance_type', required=False, type=str, help='The ML compute instance type.', default='ml.m4.xlarge') parser.add_argument('--instance_count', required=True, type=int, help='The registry path of the Docker image that contains the training algorithm.', default=1) - parser.add_argument('--volume_size', type=int, required=True, help='The size of the ML storage volume that you want to provision.', default=1) + parser.add_argument('--volume_size', type=int, required=True, help='The size of the ML storage volume that you want to provision.', default=30) parser.add_argument('--resource_encryption_key', type=str, required=False, help='The AWS KMS key that Amazon SageMaker uses to encrypt data on the storage volume attached to the ML compute instance(s).', default='') parser.add_argument('--max_run_time', type=int, required=True, help='The maximum run time in seconds for the training job.', default=86400) parser.add_argument('--model_artifact_path', type=str, required=True, help='Identifies the S3 path where you want Amazon SageMaker to store the model artifacts.') diff --git a/components/aws/sagemaker/workteam/src/workteam.py b/components/aws/sagemaker/workteam/src/workteam.py index 5f9b6caaf..29d78dd71 100644 --- a/components/aws/sagemaker/workteam/src/workteam.py +++ b/components/aws/sagemaker/workteam/src/workteam.py @@ -10,6 +10,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import sys import argparse import logging @@ -31,7 +32,7 @@ def create_parser(): def main(argv=None): parser = create_parser() - args = parser.parse_args() + args = parser.parse_args(argv) logging.getLogger().setLevel(logging.INFO) client = _utils.get_sagemaker_client(args.region, args.endpoint_url) @@ -45,4 +46,4 @@ def main(argv=None): if __name__== "__main__": - main() + main(sys.argv[1:]) diff --git a/components/datasets/Chicago_Taxi_Trips/component.yaml b/components/datasets/Chicago_Taxi_Trips/component.yaml new file mode 100644 index 000000000..df2e85464 --- /dev/null +++ b/components/datasets/Chicago_Taxi_Trips/component.yaml @@ -0,0 +1,41 @@ +name: Chicago Taxi Trips dataset +description: | + City of Chicago Taxi Trips dataset: https://data.cityofchicago.org/Transportation/Taxi-Trips/wrvz-psew + + The input parameters configure the SQL query to the database. + The dataset is pretty big, so limit the number of results using the `Limit` or `Where` parameters. + Read [Socrata dev](https://dev.socrata.com/docs/queries/) for the advanced query syntax +metadata: + annotations: + author: Alexey Volkov +inputs: +- {name: Where, type: String, default: 'trip_start_timestamp>="1900-01-01" AND trip_start_timestamp<"2100-01-01"'} +- {name: Limit, type: Integer, default: '1000', description: 'Number of rows to return. The rows are randomly sampled.'} +- {name: Select, type: String, default: 'trip_id,taxi_id,trip_start_timestamp,trip_end_timestamp,trip_seconds,trip_miles,pickup_census_tract,dropoff_census_tract,pickup_community_area,dropoff_community_area,fare,tips,tolls,extras,trip_total,payment_type,company,pickup_centroid_latitude,pickup_centroid_longitude,pickup_centroid_location,dropoff_centroid_latitude,dropoff_centroid_longitude,dropoff_centroid_location'} +- {name: Format, type: String, default: 'csv', description: 'Output data format. Suports csv,tsv,cml,rdf,json'} +outputs: +- {name: Table, description: 'Result type depends on format. CSV and TSV have header.'} +implementation: + container: + image: curlimages/curl + command: + - sh + - -c + - | + set -e -x -o pipefail + output_path="$0" + select="$1" + where="$2" + limit="$3" + format="$4" + mkdir -p "$(dirname "$output_path")" + curl --get 'https://data.cityofchicago.org/resource/wrvz-psew.'"${format}" \ + --data-urlencode '$limit='"${limit}" \ + --data-urlencode '$where='"${where}" \ + --data-urlencode '$select='"${select}" \ + | tr -d '"' > "$output_path" # Removing unneeded quotes around all numbers + - {outputPath: Table} + - {inputValue: Select} + - {inputValue: Where} + - {inputValue: Limit} + - {inputValue: Format} diff --git a/components/tfx/Evaluator/component.py b/components/deprecated/tfx/Evaluator/component.py similarity index 100% rename from components/tfx/Evaluator/component.py rename to components/deprecated/tfx/Evaluator/component.py diff --git a/components/tfx/Evaluator/component.yaml b/components/deprecated/tfx/Evaluator/component.yaml similarity index 100% rename from components/tfx/Evaluator/component.yaml rename to components/deprecated/tfx/Evaluator/component.yaml diff --git a/components/tfx/Evaluator/with_URI_IO/component.py b/components/deprecated/tfx/Evaluator/with_URI_IO/component.py similarity index 100% rename from components/tfx/Evaluator/with_URI_IO/component.py rename to components/deprecated/tfx/Evaluator/with_URI_IO/component.py diff --git a/components/tfx/Evaluator/with_URI_IO/component.yaml b/components/deprecated/tfx/Evaluator/with_URI_IO/component.yaml similarity index 100% rename from components/tfx/Evaluator/with_URI_IO/component.yaml rename to components/deprecated/tfx/Evaluator/with_URI_IO/component.yaml diff --git a/components/tfx/ExampleGen/BigQueryExampleGen/component.py b/components/deprecated/tfx/ExampleGen/BigQueryExampleGen/component.py similarity index 100% rename from components/tfx/ExampleGen/BigQueryExampleGen/component.py rename to components/deprecated/tfx/ExampleGen/BigQueryExampleGen/component.py diff --git a/components/tfx/ExampleGen/BigQueryExampleGen/component.yaml b/components/deprecated/tfx/ExampleGen/BigQueryExampleGen/component.yaml similarity index 100% rename from components/tfx/ExampleGen/BigQueryExampleGen/component.yaml rename to components/deprecated/tfx/ExampleGen/BigQueryExampleGen/component.yaml diff --git a/components/tfx/ExampleGen/BigQueryExampleGen/with_URI_IO/component.py b/components/deprecated/tfx/ExampleGen/BigQueryExampleGen/with_URI_IO/component.py similarity index 100% rename from components/tfx/ExampleGen/BigQueryExampleGen/with_URI_IO/component.py rename to components/deprecated/tfx/ExampleGen/BigQueryExampleGen/with_URI_IO/component.py diff --git a/components/tfx/ExampleGen/BigQueryExampleGen/with_URI_IO/component.yaml b/components/deprecated/tfx/ExampleGen/BigQueryExampleGen/with_URI_IO/component.yaml similarity index 100% rename from components/tfx/ExampleGen/BigQueryExampleGen/with_URI_IO/component.yaml rename to components/deprecated/tfx/ExampleGen/BigQueryExampleGen/with_URI_IO/component.yaml diff --git a/components/tfx/ExampleGen/CsvExampleGen/component.py b/components/deprecated/tfx/ExampleGen/CsvExampleGen/component.py similarity index 100% rename from components/tfx/ExampleGen/CsvExampleGen/component.py rename to components/deprecated/tfx/ExampleGen/CsvExampleGen/component.py diff --git a/components/tfx/ExampleGen/CsvExampleGen/component.yaml b/components/deprecated/tfx/ExampleGen/CsvExampleGen/component.yaml similarity index 100% rename from components/tfx/ExampleGen/CsvExampleGen/component.yaml rename to components/deprecated/tfx/ExampleGen/CsvExampleGen/component.yaml diff --git a/components/tfx/ExampleGen/CsvExampleGen/with_URI_IO/component.py b/components/deprecated/tfx/ExampleGen/CsvExampleGen/with_URI_IO/component.py similarity index 100% rename from components/tfx/ExampleGen/CsvExampleGen/with_URI_IO/component.py rename to components/deprecated/tfx/ExampleGen/CsvExampleGen/with_URI_IO/component.py diff --git a/components/tfx/ExampleGen/CsvExampleGen/with_URI_IO/component.yaml b/components/deprecated/tfx/ExampleGen/CsvExampleGen/with_URI_IO/component.yaml similarity index 100% rename from components/tfx/ExampleGen/CsvExampleGen/with_URI_IO/component.yaml rename to components/deprecated/tfx/ExampleGen/CsvExampleGen/with_URI_IO/component.yaml diff --git a/components/tfx/ExampleGen/ImportExampleGen/component.py b/components/deprecated/tfx/ExampleGen/ImportExampleGen/component.py similarity index 100% rename from components/tfx/ExampleGen/ImportExampleGen/component.py rename to components/deprecated/tfx/ExampleGen/ImportExampleGen/component.py diff --git a/components/tfx/ExampleGen/ImportExampleGen/component.yaml b/components/deprecated/tfx/ExampleGen/ImportExampleGen/component.yaml similarity index 100% rename from components/tfx/ExampleGen/ImportExampleGen/component.yaml rename to components/deprecated/tfx/ExampleGen/ImportExampleGen/component.yaml diff --git a/components/tfx/ExampleGen/ImportExampleGen/with_URI_IO/component.py b/components/deprecated/tfx/ExampleGen/ImportExampleGen/with_URI_IO/component.py similarity index 100% rename from components/tfx/ExampleGen/ImportExampleGen/with_URI_IO/component.py rename to components/deprecated/tfx/ExampleGen/ImportExampleGen/with_URI_IO/component.py diff --git a/components/tfx/ExampleGen/ImportExampleGen/with_URI_IO/component.yaml b/components/deprecated/tfx/ExampleGen/ImportExampleGen/with_URI_IO/component.yaml similarity index 100% rename from components/tfx/ExampleGen/ImportExampleGen/with_URI_IO/component.yaml rename to components/deprecated/tfx/ExampleGen/ImportExampleGen/with_URI_IO/component.yaml diff --git a/components/tfx/ExampleValidator/component.py b/components/deprecated/tfx/ExampleValidator/component.py similarity index 100% rename from components/tfx/ExampleValidator/component.py rename to components/deprecated/tfx/ExampleValidator/component.py diff --git a/components/tfx/ExampleValidator/component.yaml b/components/deprecated/tfx/ExampleValidator/component.yaml similarity index 100% rename from components/tfx/ExampleValidator/component.yaml rename to components/deprecated/tfx/ExampleValidator/component.yaml diff --git a/components/tfx/ExampleValidator/with_URI_IO/component.py b/components/deprecated/tfx/ExampleValidator/with_URI_IO/component.py similarity index 100% rename from components/tfx/ExampleValidator/with_URI_IO/component.py rename to components/deprecated/tfx/ExampleValidator/with_URI_IO/component.py diff --git a/components/tfx/ExampleValidator/with_URI_IO/component.yaml b/components/deprecated/tfx/ExampleValidator/with_URI_IO/component.yaml similarity index 100% rename from components/tfx/ExampleValidator/with_URI_IO/component.yaml rename to components/deprecated/tfx/ExampleValidator/with_URI_IO/component.yaml diff --git a/components/tfx/README.md b/components/deprecated/tfx/README.md similarity index 100% rename from components/tfx/README.md rename to components/deprecated/tfx/README.md diff --git a/components/tfx/SchemaGen/component.py b/components/deprecated/tfx/SchemaGen/component.py similarity index 100% rename from components/tfx/SchemaGen/component.py rename to components/deprecated/tfx/SchemaGen/component.py diff --git a/components/tfx/SchemaGen/component.yaml b/components/deprecated/tfx/SchemaGen/component.yaml similarity index 100% rename from components/tfx/SchemaGen/component.yaml rename to components/deprecated/tfx/SchemaGen/component.yaml diff --git a/components/tfx/SchemaGen/with_URI_IO/component.py b/components/deprecated/tfx/SchemaGen/with_URI_IO/component.py similarity index 100% rename from components/tfx/SchemaGen/with_URI_IO/component.py rename to components/deprecated/tfx/SchemaGen/with_URI_IO/component.py diff --git a/components/tfx/SchemaGen/with_URI_IO/component.yaml b/components/deprecated/tfx/SchemaGen/with_URI_IO/component.yaml similarity index 100% rename from components/tfx/SchemaGen/with_URI_IO/component.yaml rename to components/deprecated/tfx/SchemaGen/with_URI_IO/component.yaml diff --git a/components/tfx/StatisticsGen/component.py b/components/deprecated/tfx/StatisticsGen/component.py similarity index 100% rename from components/tfx/StatisticsGen/component.py rename to components/deprecated/tfx/StatisticsGen/component.py diff --git a/components/tfx/StatisticsGen/component.yaml b/components/deprecated/tfx/StatisticsGen/component.yaml similarity index 100% rename from components/tfx/StatisticsGen/component.yaml rename to components/deprecated/tfx/StatisticsGen/component.yaml diff --git a/components/tfx/StatisticsGen/with_URI_IO/component.py b/components/deprecated/tfx/StatisticsGen/with_URI_IO/component.py similarity index 100% rename from components/tfx/StatisticsGen/with_URI_IO/component.py rename to components/deprecated/tfx/StatisticsGen/with_URI_IO/component.py diff --git a/components/tfx/StatisticsGen/with_URI_IO/component.yaml b/components/deprecated/tfx/StatisticsGen/with_URI_IO/component.yaml similarity index 100% rename from components/tfx/StatisticsGen/with_URI_IO/component.yaml rename to components/deprecated/tfx/StatisticsGen/with_URI_IO/component.yaml diff --git a/components/tfx/Trainer/component.py b/components/deprecated/tfx/Trainer/component.py similarity index 100% rename from components/tfx/Trainer/component.py rename to components/deprecated/tfx/Trainer/component.py diff --git a/components/tfx/Trainer/component.yaml b/components/deprecated/tfx/Trainer/component.yaml similarity index 100% rename from components/tfx/Trainer/component.yaml rename to components/deprecated/tfx/Trainer/component.yaml diff --git a/components/tfx/Trainer/with_URI_IO/component.py b/components/deprecated/tfx/Trainer/with_URI_IO/component.py similarity index 100% rename from components/tfx/Trainer/with_URI_IO/component.py rename to components/deprecated/tfx/Trainer/with_URI_IO/component.py diff --git a/components/tfx/Trainer/with_URI_IO/component.yaml b/components/deprecated/tfx/Trainer/with_URI_IO/component.yaml similarity index 100% rename from components/tfx/Trainer/with_URI_IO/component.yaml rename to components/deprecated/tfx/Trainer/with_URI_IO/component.yaml diff --git a/components/tfx/Transform/component.py b/components/deprecated/tfx/Transform/component.py similarity index 100% rename from components/tfx/Transform/component.py rename to components/deprecated/tfx/Transform/component.py diff --git a/components/tfx/Transform/component.yaml b/components/deprecated/tfx/Transform/component.yaml similarity index 100% rename from components/tfx/Transform/component.yaml rename to components/deprecated/tfx/Transform/component.yaml diff --git a/components/tfx/Transform/with_URI_IO/component.py b/components/deprecated/tfx/Transform/with_URI_IO/component.py similarity index 100% rename from components/tfx/Transform/with_URI_IO/component.py rename to components/deprecated/tfx/Transform/with_URI_IO/component.py diff --git a/components/tfx/Transform/with_URI_IO/component.yaml b/components/deprecated/tfx/Transform/with_URI_IO/component.yaml similarity index 100% rename from components/tfx/Transform/with_URI_IO/component.yaml rename to components/deprecated/tfx/Transform/with_URI_IO/component.yaml diff --git a/components/tfx/_samples/TFX_Dataflow_pipeline.ipynb b/components/deprecated/tfx/_samples/TFX_Dataflow_pipeline.ipynb similarity index 100% rename from components/tfx/_samples/TFX_Dataflow_pipeline.ipynb rename to components/deprecated/tfx/_samples/TFX_Dataflow_pipeline.ipynb diff --git a/components/tfx/_samples/TFX_pipeline.ipynb b/components/deprecated/tfx/_samples/TFX_pipeline.ipynb similarity index 100% rename from components/tfx/_samples/TFX_pipeline.ipynb rename to components/deprecated/tfx/_samples/TFX_pipeline.ipynb diff --git a/components/diagnostics/diagnose_me/component.yaml b/components/diagnostics/diagnose_me/component.yaml index 48538ab91..8966c5d1f 100644 --- a/components/diagnostics/diagnose_me/component.yaml +++ b/components/diagnostics/diagnose_me/component.yaml @@ -23,6 +23,9 @@ description: |- Raises: RuntimeError: If configuration is not setup properly and HALT_ON_ERROR flag is set. +metadata: + annotations: + volatile_component: "true" inputs: - name: bucket type: String diff --git a/components/gcp/bigquery/query/README.md b/components/gcp/bigquery/query/README.md index e5802eae1..60eec425a 100644 --- a/components/gcp/bigquery/query/README.md +++ b/components/gcp/bigquery/query/README.md @@ -85,7 +85,7 @@ KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar import kfp.components as comp bigquery_query_op = comp.load_component_from_url( - 'https://raw.githubusercontent.com/kubeflow/pipelines/01a23ae8672d3b18e88adf3036071496aca3552d/components/gcp/bigquery/query/component.yaml') + 'https://raw.githubusercontent.com/kubeflow/pipelines/1.0.0/components/gcp/bigquery/query/component.yaml') help(bigquery_query_op) ``` diff --git a/components/gcp/bigquery/query/component.yaml b/components/gcp/bigquery/query/component.yaml index a6c3a3462..af6798271 100644 --- a/components/gcp/bigquery/query/component.yaml +++ b/components/gcp/bigquery/query/component.yaml @@ -59,7 +59,7 @@ outputs: type: UI metadata implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:ad9bd5648dd0453005225779f25d8cebebc7ca00 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:1.0.0 args: [ --ui_metadata_path, {outputPath: MLPipeline UI metadata}, kfp_component.google.bigquery, query, diff --git a/components/gcp/bigquery/query/sample.ipynb b/components/gcp/bigquery/query/sample.ipynb index b1ad4c793..191cc5e77 100644 --- a/components/gcp/bigquery/query/sample.ipynb +++ b/components/gcp/bigquery/query/sample.ipynb @@ -104,7 +104,7 @@ "import kfp.components as comp\n", "\n", "bigquery_query_op = comp.load_component_from_url(\n", - " 'https://raw.githubusercontent.com/kubeflow/pipelines/01a23ae8672d3b18e88adf3036071496aca3552d/components/gcp/bigquery/query/component.yaml')\n", + " 'https://raw.githubusercontent.com/kubeflow/pipelines/1.0.0/components/gcp/bigquery/query/component.yaml')\n", "help(bigquery_query_op)" ] }, diff --git a/components/gcp/container/component_sdk/python/kfp_component/google/dataflow/_client.py b/components/gcp/container/component_sdk/python/kfp_component/google/dataflow/_client.py index 8d5783afb..c5e4828ee 100644 --- a/components/gcp/container/component_sdk/python/kfp_component/google/dataflow/_client.py +++ b/components/gcp/container/component_sdk/python/kfp_component/google/dataflow/_client.py @@ -21,7 +21,7 @@ class DataflowClient: def launch_template(self, project_id, gcs_path, location, validate_only, launch_parameters): - return self._df.projects().templates().launch( + return self._df.projects().locations().templates().launch( projectId = project_id, gcsPath = gcs_path, location = location, diff --git a/components/gcp/container/component_sdk/python/kfp_component/google/ml_engine/_create_job.py b/components/gcp/container/component_sdk/python/kfp_component/google/ml_engine/_create_job.py index 73f130d77..999e510b6 100644 --- a/components/gcp/container/component_sdk/python/kfp_component/google/ml_engine/_create_job.py +++ b/components/gcp/container/component_sdk/python/kfp_component/google/ml_engine/_create_job.py @@ -26,7 +26,8 @@ from kfp_component.core import KfpExecutionContext from ._client import MLEngineClient from .. import common as gcp_common -def create_job(project_id, job, job_id_prefix=None, wait_interval=30): +def create_job(project_id, job, job_id_prefix=None, job_id=None, + wait_interval=30): """Creates a MLEngine job. Args: @@ -34,6 +35,8 @@ def create_job(project_id, job, job_id_prefix=None, wait_interval=30): job: the payload of the job. Must have ``jobId`` and ``trainingInput`` or ``predictionInput`. job_id_prefix: the prefix of the generated job id. + job_id: the created job_id, takes precedence over generated job + id if set. wait_interval: optional wait interval between calls to get job status. Defaults to 30. @@ -42,15 +45,16 @@ def create_job(project_id, job, job_id_prefix=None, wait_interval=30): /tmp/kfp/output/ml_engine/job_id.txt: The ID of the created job. /tmp/kfp/output/ml_engine/job_dir.txt: The `jobDir` of the training job. """ - return CreateJobOp(project_id, job, job_id_prefix, - wait_interval).execute_and_wait() + return CreateJobOp(project_id, job, job_id_prefix, job_id, wait_interval + ).execute_and_wait() class CreateJobOp: - def __init__(self, project_id, job, job_id_prefix=None, wait_interval=30): + def __init__(self,project_id, job, job_id_prefix=None, job_id=None, + wait_interval=30): self._ml = MLEngineClient() self._project_id = project_id self._job_id_prefix = job_id_prefix - self._job_id = None + self._job_id = job_id self._job = job self._wait_interval = wait_interval @@ -61,7 +65,9 @@ class CreateJobOp: return wait_for_job_done(self._ml, self._project_id, self._job_id, self._wait_interval) def _set_job_id(self, context_id): - if self._job_id_prefix: + if self._job_id: + job_id = self._job_id + elif self._job_id_prefix: job_id = self._job_id_prefix + context_id[:16] else: job_id = 'job_' + context_id diff --git a/components/gcp/container/component_sdk/python/kfp_component/google/ml_engine/_train.py b/components/gcp/container/component_sdk/python/kfp_component/google/ml_engine/_train.py index ff6a44786..3632103e7 100644 --- a/components/gcp/container/component_sdk/python/kfp_component/google/ml_engine/_train.py +++ b/components/gcp/container/component_sdk/python/kfp_component/google/ml_engine/_train.py @@ -19,7 +19,7 @@ from ._create_job import create_job def train(project_id, python_module=None, package_uris=None, region=None, args=None, job_dir=None, python_version=None, runtime_version=None, master_image_uri=None, worker_image_uri=None, - training_input=None, job_id_prefix=None, wait_interval=30): + training_input=None, job_id_prefix=None, job_id=None, wait_interval=30): """Creates a MLEngine training job. Args: @@ -50,6 +50,8 @@ def train(project_id, python_module=None, package_uris=None, This image must be in Container Registry. training_input (dict): Input parameters to create a training job. job_id_prefix (str): the prefix of the generated job id. + job_id (str): the created job_id, takes precedence over generated job + id if set. wait_interval (int): optional wait interval between calls to get job status. Defaults to 30. """ @@ -80,4 +82,4 @@ def train(project_id, python_module=None, package_uris=None, job = { 'trainingInput': training_input } - return create_job(project_id, job, job_id_prefix, wait_interval) \ No newline at end of file + return create_job(project_id, job, job_id_prefix, job_id, wait_interval) \ No newline at end of file diff --git a/components/gcp/container/component_sdk/python/setup.py b/components/gcp/container/component_sdk/python/setup.py index aaf851259..b0bf28af9 100644 --- a/components/gcp/container/component_sdk/python/setup.py +++ b/components/gcp/container/component_sdk/python/setup.py @@ -15,7 +15,7 @@ from setuptools import setup PACKAGE_NAME = 'kfp-component' -VERSION = '0.4.0' +VERSION = '1.0.0' setup( name=PACKAGE_NAME, diff --git a/components/gcp/container/component_sdk/python/tests/google/ml_engine/test__create_job.py b/components/gcp/container/component_sdk/python/tests/google/ml_engine/test__create_job.py index 8fcd0b10c..f94824bb1 100644 --- a/components/gcp/container/component_sdk/python/tests/google/ml_engine/test__create_job.py +++ b/components/gcp/container/component_sdk/python/tests/google/ml_engine/test__create_job.py @@ -67,6 +67,27 @@ class TestCreateJob(unittest.TestCase): 'jobId': 'mock_job_ctx1' } ) + + def test_create_job_with_job_id_succeed(self, mock_mlengine_client, + mock_kfp_context, mock_dump_json, mock_display): + mock_kfp_context().__enter__().context_id.return_value = 'ctx1' + job = {} + returned_job = { + 'jobId': 'mock_job', + 'state': 'SUCCEEDED' + } + mock_mlengine_client().get_job.return_value = ( + returned_job) + + result = create_job('mock_project', job, job_id='mock_job') + + self.assertEqual(returned_job, result) + mock_mlengine_client().create_job.assert_called_with( + project_id = 'mock_project', + job = { + 'jobId': 'mock_job' + } + ) def test_execute_retry_job_success(self, mock_mlengine_client, mock_kfp_context, mock_dump_json, mock_display): diff --git a/components/gcp/container/component_sdk/python/tests/google/ml_engine/test__train.py b/components/gcp/container/component_sdk/python/tests/google/ml_engine/test__train.py index 9825b1aee..341b4ce2b 100644 --- a/components/gcp/container/component_sdk/python/tests/google/ml_engine/test__train.py +++ b/components/gcp/container/component_sdk/python/tests/google/ml_engine/test__train.py @@ -21,7 +21,7 @@ from kfp_component.google.ml_engine import train CREATE_JOB_MODULE = 'kfp_component.google.ml_engine._train' @mock.patch(CREATE_JOB_MODULE + '.create_job') -class TestCreateTraingingJob(unittest.TestCase): +class TestCreateTrainingJob(unittest.TestCase): def test_train_succeed(self, mock_create_job): train('proj-1', 'mock.module', ['gs://test/package'], @@ -29,7 +29,8 @@ class TestCreateTraingingJob(unittest.TestCase): training_input={ 'runtimeVersion': '1.10', 'pythonVersion': '2.7' - }, job_id_prefix='job-', master_image_uri='tensorflow:latest', + }, job_id_prefix='job-', job_id='job-1', + master_image_uri='tensorflow:latest', worker_image_uri='debian:latest') mock_create_job.assert_called_with('proj-1', { @@ -48,4 +49,4 @@ class TestCreateTraingingJob(unittest.TestCase): 'imageUri': 'debian:latest' } } - }, 'job-', 30) + }, 'job-', 'job-1', 30) diff --git a/components/gcp/dataflow/launch_python/README.md b/components/gcp/dataflow/launch_python/README.md index dd62d02ae..b1e928db5 100644 --- a/components/gcp/dataflow/launch_python/README.md +++ b/components/gcp/dataflow/launch_python/README.md @@ -91,7 +91,7 @@ The steps to use the component in a pipeline are: ```python import kfp.components as comp - dataflow_python_op = comp.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/01a23ae8672d3b18e88adf3036071496aca3552d/components/gcp/dataflow/launch_python/component.yaml') + dataflow_python_op = comp.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/1.0.0/components/gcp/dataflow/launch_python/component.yaml') help(dataflow_python_op) ``` diff --git a/components/gcp/dataflow/launch_python/component.yaml b/components/gcp/dataflow/launch_python/component.yaml index 21fe7b8ba..794fbc1bb 100644 --- a/components/gcp/dataflow/launch_python/component.yaml +++ b/components/gcp/dataflow/launch_python/component.yaml @@ -53,7 +53,7 @@ outputs: type: UI metadata implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:ad9bd5648dd0453005225779f25d8cebebc7ca00 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:1.0.0 args: [ --ui_metadata_path, {outputPath: MLPipeline UI metadata}, kfp_component.google.dataflow, launch_python, diff --git a/components/gcp/dataflow/launch_python/sample.ipynb b/components/gcp/dataflow/launch_python/sample.ipynb index 745a3cb68..b6860094c 100644 --- a/components/gcp/dataflow/launch_python/sample.ipynb +++ b/components/gcp/dataflow/launch_python/sample.ipynb @@ -92,7 +92,7 @@ "import kfp.components as comp\n", "\n", "dataflow_python_op = comp.load_component_from_url(\n", - " 'https://raw.githubusercontent.com/kubeflow/pipelines/01a23ae8672d3b18e88adf3036071496aca3552d/components/gcp/dataflow/launch_python/component.yaml')\n", + " 'https://raw.githubusercontent.com/kubeflow/pipelines/1.0.0/components/gcp/dataflow/launch_python/component.yaml')\n", "help(dataflow_python_op)" ] }, diff --git a/components/gcp/dataflow/launch_template/README.md b/components/gcp/dataflow/launch_template/README.md index afb506743..004469372 100644 --- a/components/gcp/dataflow/launch_template/README.md +++ b/components/gcp/dataflow/launch_template/README.md @@ -64,7 +64,7 @@ KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar import kfp.components as comp dataflow_template_op = comp.load_component_from_url( - 'https://raw.githubusercontent.com/kubeflow/pipelines/01a23ae8672d3b18e88adf3036071496aca3552d/components/gcp/dataflow/launch_template/component.yaml') + 'https://raw.githubusercontent.com/kubeflow/pipelines/1.0.0/components/gcp/dataflow/launch_template/component.yaml') help(dataflow_template_op) ``` diff --git a/components/gcp/dataflow/launch_template/component.yaml b/components/gcp/dataflow/launch_template/component.yaml index ca86d350e..d6d288c29 100644 --- a/components/gcp/dataflow/launch_template/component.yaml +++ b/components/gcp/dataflow/launch_template/component.yaml @@ -63,7 +63,7 @@ outputs: type: UI metadata implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:ad9bd5648dd0453005225779f25d8cebebc7ca00 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:1.0.0 args: [ --ui_metadata_path, {outputPath: MLPipeline UI metadata}, kfp_component.google.dataflow, launch_template, diff --git a/components/gcp/dataflow/launch_template/sample.ipynb b/components/gcp/dataflow/launch_template/sample.ipynb index db2bb3e62..4af7ec925 100644 --- a/components/gcp/dataflow/launch_template/sample.ipynb +++ b/components/gcp/dataflow/launch_template/sample.ipynb @@ -82,7 +82,7 @@ "import kfp.components as comp\n", "\n", "dataflow_template_op = comp.load_component_from_url(\n", - " 'https://raw.githubusercontent.com/kubeflow/pipelines/01a23ae8672d3b18e88adf3036071496aca3552d/components/gcp/dataflow/launch_template/component.yaml')\n", + " 'https://raw.githubusercontent.com/kubeflow/pipelines/1.0.0/components/gcp/dataflow/launch_template/component.yaml')\n", "help(dataflow_template_op)" ] }, diff --git a/components/gcp/dataproc/create_cluster/README.md b/components/gcp/dataproc/create_cluster/README.md index 48028a0e8..181f5fb55 100644 --- a/components/gcp/dataproc/create_cluster/README.md +++ b/components/gcp/dataproc/create_cluster/README.md @@ -88,7 +88,7 @@ Follow these steps to use the component in a pipeline: ```python import kfp.components as comp - dataproc_create_cluster_op = comp.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/01a23ae8672d3b18e88adf3036071496aca3552d/components/gcp/dataproc/create_cluster/component.yaml') + dataproc_create_cluster_op = comp.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/1.0.0/components/gcp/dataproc/create_cluster/component.yaml') help(dataproc_create_cluster_op) ``` diff --git a/components/gcp/dataproc/create_cluster/component.yaml b/components/gcp/dataproc/create_cluster/component.yaml index f2add9920..d44266739 100644 --- a/components/gcp/dataproc/create_cluster/component.yaml +++ b/components/gcp/dataproc/create_cluster/component.yaml @@ -70,7 +70,7 @@ outputs: type: UI metadata implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:ad9bd5648dd0453005225779f25d8cebebc7ca00 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:1.0.0 args: [ --ui_metadata_path, {outputPath: MLPipeline UI metadata}, kfp_component.google.dataproc, create_cluster, diff --git a/components/gcp/dataproc/create_cluster/sample.ipynb b/components/gcp/dataproc/create_cluster/sample.ipynb index 8baeda37a..b3dd2ce3f 100644 --- a/components/gcp/dataproc/create_cluster/sample.ipynb +++ b/components/gcp/dataproc/create_cluster/sample.ipynb @@ -88,7 +88,7 @@ "import kfp.components as comp\n", "\n", "dataproc_create_cluster_op = comp.load_component_from_url(\n", - " 'https://raw.githubusercontent.com/kubeflow/pipelines/01a23ae8672d3b18e88adf3036071496aca3552d/components/gcp/dataproc/create_cluster/component.yaml')\n", + " 'https://raw.githubusercontent.com/kubeflow/pipelines/1.0.0/components/gcp/dataproc/create_cluster/component.yaml')\n", "help(dataproc_create_cluster_op)" ] }, diff --git a/components/gcp/dataproc/delete_cluster/README.md b/components/gcp/dataproc/delete_cluster/README.md index a26600b6a..eb4ed2288 100644 --- a/components/gcp/dataproc/delete_cluster/README.md +++ b/components/gcp/dataproc/delete_cluster/README.md @@ -66,7 +66,7 @@ Follow these steps to use the component in a pipeline: ```python import kfp.components as comp - dataproc_delete_cluster_op = comp.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/01a23ae8672d3b18e88adf3036071496aca3552d/components/gcp/dataproc/delete_cluster/component.yaml') + dataproc_delete_cluster_op = comp.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/1.0.0/components/gcp/dataproc/delete_cluster/component.yaml') help(dataproc_delete_cluster_op) ``` diff --git a/components/gcp/dataproc/delete_cluster/component.yaml b/components/gcp/dataproc/delete_cluster/component.yaml index 9b9282f55..615988b3b 100644 --- a/components/gcp/dataproc/delete_cluster/component.yaml +++ b/components/gcp/dataproc/delete_cluster/component.yaml @@ -36,7 +36,7 @@ inputs: type: Integer implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:ad9bd5648dd0453005225779f25d8cebebc7ca00 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:1.0.0 args: [ kfp_component.google.dataproc, delete_cluster, --project_id, {inputValue: project_id}, diff --git a/components/gcp/dataproc/delete_cluster/sample.ipynb b/components/gcp/dataproc/delete_cluster/sample.ipynb index 3ffdd2bdf..8d727d2fb 100644 --- a/components/gcp/dataproc/delete_cluster/sample.ipynb +++ b/components/gcp/dataproc/delete_cluster/sample.ipynb @@ -71,7 +71,7 @@ "import kfp.components as comp\n", "\n", "dataproc_delete_cluster_op = comp.load_component_from_url(\n", - " 'https://raw.githubusercontent.com/kubeflow/pipelines/01a23ae8672d3b18e88adf3036071496aca3552d/components/gcp/dataproc/delete_cluster/component.yaml')\n", + " 'https://raw.githubusercontent.com/kubeflow/pipelines/1.0.0/components/gcp/dataproc/delete_cluster/component.yaml')\n", "help(dataproc_delete_cluster_op)" ] }, diff --git a/components/gcp/dataproc/submit_hadoop_job/README.md b/components/gcp/dataproc/submit_hadoop_job/README.md index 52498b1b0..2faba4b03 100644 --- a/components/gcp/dataproc/submit_hadoop_job/README.md +++ b/components/gcp/dataproc/submit_hadoop_job/README.md @@ -83,7 +83,7 @@ Follow these steps to use the component in a pipeline: ```python import kfp.components as comp - dataproc_submit_hadoop_job_op = comp.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/01a23ae8672d3b18e88adf3036071496aca3552d/components/gcp/dataproc/submit_hadoop_job/component.yaml') + dataproc_submit_hadoop_job_op = comp.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/1.0.0/components/gcp/dataproc/submit_hadoop_job/component.yaml') help(dataproc_submit_hadoop_job_op) ``` diff --git a/components/gcp/dataproc/submit_hadoop_job/component.yaml b/components/gcp/dataproc/submit_hadoop_job/component.yaml index 451f3269c..80f43decd 100644 --- a/components/gcp/dataproc/submit_hadoop_job/component.yaml +++ b/components/gcp/dataproc/submit_hadoop_job/component.yaml @@ -80,7 +80,7 @@ outputs: type: UI metadata implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:ad9bd5648dd0453005225779f25d8cebebc7ca00 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:1.0.0 args: [ --ui_metadata_path, {outputPath: MLPipeline UI metadata}, kfp_component.google.dataproc, submit_hadoop_job, diff --git a/components/gcp/dataproc/submit_hadoop_job/sample.ipynb b/components/gcp/dataproc/submit_hadoop_job/sample.ipynb index aa5d464e8..facbd6ff4 100644 --- a/components/gcp/dataproc/submit_hadoop_job/sample.ipynb +++ b/components/gcp/dataproc/submit_hadoop_job/sample.ipynb @@ -86,7 +86,7 @@ "import kfp.components as comp\n", "\n", "dataproc_submit_hadoop_job_op = comp.load_component_from_url(\n", - " 'https://raw.githubusercontent.com/kubeflow/pipelines/01a23ae8672d3b18e88adf3036071496aca3552d/components/gcp/dataproc/submit_hadoop_job/component.yaml')\n", + " 'https://raw.githubusercontent.com/kubeflow/pipelines/1.0.0/components/gcp/dataproc/submit_hadoop_job/component.yaml')\n", "help(dataproc_submit_hadoop_job_op)" ] }, diff --git a/components/gcp/dataproc/submit_hive_job/README.md b/components/gcp/dataproc/submit_hive_job/README.md index 1dd4e1840..2632bef13 100644 --- a/components/gcp/dataproc/submit_hive_job/README.md +++ b/components/gcp/dataproc/submit_hive_job/README.md @@ -73,7 +73,7 @@ Follow these steps to use the component in a pipeline: ```python import kfp.components as comp - dataproc_submit_hive_job_op = comp.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/01a23ae8672d3b18e88adf3036071496aca3552d/components/gcp/dataproc/submit_hive_job/component.yaml') + dataproc_submit_hive_job_op = comp.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/1.0.0/components/gcp/dataproc/submit_hive_job/component.yaml') help(dataproc_submit_hive_job_op) ``` diff --git a/components/gcp/dataproc/submit_hive_job/component.yaml b/components/gcp/dataproc/submit_hive_job/component.yaml index 3de85d0c6..61fa0acf3 100644 --- a/components/gcp/dataproc/submit_hive_job/component.yaml +++ b/components/gcp/dataproc/submit_hive_job/component.yaml @@ -75,7 +75,7 @@ outputs: type: UI metadata implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:ad9bd5648dd0453005225779f25d8cebebc7ca00 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:1.0.0 args: [ --ui_metadata_path, {outputPath: MLPipeline UI metadata}, kfp_component.google.dataproc, submit_hive_job, diff --git a/components/gcp/dataproc/submit_hive_job/sample.ipynb b/components/gcp/dataproc/submit_hive_job/sample.ipynb index 3cd9a87a9..ce52cb803 100644 --- a/components/gcp/dataproc/submit_hive_job/sample.ipynb +++ b/components/gcp/dataproc/submit_hive_job/sample.ipynb @@ -77,7 +77,7 @@ "import kfp.components as comp\n", "\n", "dataproc_submit_hive_job_op = comp.load_component_from_url(\n", - " 'https://raw.githubusercontent.com/kubeflow/pipelines/01a23ae8672d3b18e88adf3036071496aca3552d/components/gcp/dataproc/submit_hive_job/component.yaml')\n", + " 'https://raw.githubusercontent.com/kubeflow/pipelines/1.0.0/components/gcp/dataproc/submit_hive_job/component.yaml')\n", "help(dataproc_submit_hive_job_op)" ] }, diff --git a/components/gcp/dataproc/submit_pig_job/README.md b/components/gcp/dataproc/submit_pig_job/README.md index 9da02d3a8..32a03fa24 100644 --- a/components/gcp/dataproc/submit_pig_job/README.md +++ b/components/gcp/dataproc/submit_pig_job/README.md @@ -82,7 +82,7 @@ Follow these steps to use the component in a pipeline: ```python import kfp.components as comp - dataproc_submit_pig_job_op = comp.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/01a23ae8672d3b18e88adf3036071496aca3552d/components/gcp/dataproc/submit_pig_job/component.yaml') + dataproc_submit_pig_job_op = comp.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/1.0.0/components/gcp/dataproc/submit_pig_job/component.yaml') help(dataproc_submit_pig_job_op) ``` diff --git a/components/gcp/dataproc/submit_pig_job/component.yaml b/components/gcp/dataproc/submit_pig_job/component.yaml index a1e718a43..f29c5350a 100644 --- a/components/gcp/dataproc/submit_pig_job/component.yaml +++ b/components/gcp/dataproc/submit_pig_job/component.yaml @@ -75,7 +75,7 @@ outputs: type: UI metadata implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:ad9bd5648dd0453005225779f25d8cebebc7ca00 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:1.0.0 args: [ --ui_metadata_path, {outputPath: MLPipeline UI metadata}, kfp_component.google.dataproc, submit_pig_job, diff --git a/components/gcp/dataproc/submit_pig_job/sample.ipynb b/components/gcp/dataproc/submit_pig_job/sample.ipynb index 7fbf28831..042b3d601 100644 --- a/components/gcp/dataproc/submit_pig_job/sample.ipynb +++ b/components/gcp/dataproc/submit_pig_job/sample.ipynb @@ -80,7 +80,7 @@ "import kfp.components as comp\n", "\n", "dataproc_submit_pig_job_op = comp.load_component_from_url(\n", - " 'https://raw.githubusercontent.com/kubeflow/pipelines/01a23ae8672d3b18e88adf3036071496aca3552d/components/gcp/dataproc/submit_pig_job/component.yaml')\n", + " 'https://raw.githubusercontent.com/kubeflow/pipelines/1.0.0/components/gcp/dataproc/submit_pig_job/component.yaml')\n", "help(dataproc_submit_pig_job_op)" ] }, diff --git a/components/gcp/dataproc/submit_pyspark_job/README.md b/components/gcp/dataproc/submit_pyspark_job/README.md index 233b43e7e..1362cd7ef 100644 --- a/components/gcp/dataproc/submit_pyspark_job/README.md +++ b/components/gcp/dataproc/submit_pyspark_job/README.md @@ -79,7 +79,7 @@ Follow these steps to use the component in a pipeline: ```python import kfp.components as comp - dataproc_submit_pyspark_job_op = comp.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/01a23ae8672d3b18e88adf3036071496aca3552d/components/gcp/dataproc/submit_pyspark_job/component.yaml') + dataproc_submit_pyspark_job_op = comp.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/1.0.0/components/gcp/dataproc/submit_pyspark_job/component.yaml') help(dataproc_submit_pyspark_job_op) ``` diff --git a/components/gcp/dataproc/submit_pyspark_job/component.yaml b/components/gcp/dataproc/submit_pyspark_job/component.yaml index 5e9db428d..436c687a1 100644 --- a/components/gcp/dataproc/submit_pyspark_job/component.yaml +++ b/components/gcp/dataproc/submit_pyspark_job/component.yaml @@ -69,7 +69,7 @@ outputs: type: UI metadata implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:ad9bd5648dd0453005225779f25d8cebebc7ca00 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:1.0.0 args: [ --ui_metadata_path, {outputPath: MLPipeline UI metadata}, kfp_component.google.dataproc, submit_pyspark_job, diff --git a/components/gcp/dataproc/submit_pyspark_job/sample.ipynb b/components/gcp/dataproc/submit_pyspark_job/sample.ipynb index 8c96cfec7..7f91b7ad9 100644 --- a/components/gcp/dataproc/submit_pyspark_job/sample.ipynb +++ b/components/gcp/dataproc/submit_pyspark_job/sample.ipynb @@ -82,7 +82,7 @@ "import kfp.components as comp\n", "\n", "dataproc_submit_pyspark_job_op = comp.load_component_from_url(\n", - " 'https://raw.githubusercontent.com/kubeflow/pipelines/01a23ae8672d3b18e88adf3036071496aca3552d/components/gcp/dataproc/submit_pyspark_job/component.yaml')\n", + " 'https://raw.githubusercontent.com/kubeflow/pipelines/1.0.0/components/gcp/dataproc/submit_pyspark_job/component.yaml')\n", "help(dataproc_submit_pyspark_job_op)" ] }, diff --git a/components/gcp/dataproc/submit_spark_job/README.md b/components/gcp/dataproc/submit_spark_job/README.md index 3b7320691..95c594dcd 100644 --- a/components/gcp/dataproc/submit_spark_job/README.md +++ b/components/gcp/dataproc/submit_spark_job/README.md @@ -95,7 +95,7 @@ Follow these steps to use the component in a pipeline: import kfp.components as comp dataproc_submit_spark_job_op = comp.load_component_from_url( - 'https://raw.githubusercontent.com/kubeflow/pipelines/01a23ae8672d3b18e88adf3036071496aca3552d/components/gcp/dataproc/submit_spark_job/component.yaml') + 'https://raw.githubusercontent.com/kubeflow/pipelines/1.0.0/components/gcp/dataproc/submit_spark_job/component.yaml') help(dataproc_submit_spark_job_op) ``` diff --git a/components/gcp/dataproc/submit_spark_job/component.yaml b/components/gcp/dataproc/submit_spark_job/component.yaml index 4200f6a46..385036ecd 100644 --- a/components/gcp/dataproc/submit_spark_job/component.yaml +++ b/components/gcp/dataproc/submit_spark_job/component.yaml @@ -76,7 +76,7 @@ outputs: type: UI metadata implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:ad9bd5648dd0453005225779f25d8cebebc7ca00 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:1.0.0 args: [ --ui_metadata_path, {outputPath: MLPipeline UI metadata}, kfp_component.google.dataproc, submit_spark_job, diff --git a/components/gcp/dataproc/submit_spark_job/sample.ipynb b/components/gcp/dataproc/submit_spark_job/sample.ipynb index 253201820..a9334f181 100644 --- a/components/gcp/dataproc/submit_spark_job/sample.ipynb +++ b/components/gcp/dataproc/submit_spark_job/sample.ipynb @@ -93,7 +93,7 @@ "import kfp.components as comp\n", "\n", "dataproc_submit_spark_job_op = comp.load_component_from_url(\n", - " 'https://raw.githubusercontent.com/kubeflow/pipelines/01a23ae8672d3b18e88adf3036071496aca3552d/components/gcp/dataproc/submit_spark_job/component.yaml')\n", + " 'https://raw.githubusercontent.com/kubeflow/pipelines/1.0.0/components/gcp/dataproc/submit_spark_job/component.yaml')\n", "help(dataproc_submit_spark_job_op)" ] }, diff --git a/components/gcp/dataproc/submit_sparksql_job/README.md b/components/gcp/dataproc/submit_sparksql_job/README.md index 6393a01c4..5d29fca1c 100644 --- a/components/gcp/dataproc/submit_sparksql_job/README.md +++ b/components/gcp/dataproc/submit_sparksql_job/README.md @@ -74,7 +74,7 @@ Follow these steps to use the component in a pipeline: ```python import kfp.components as comp - dataproc_submit_sparksql_job_op = comp.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/01a23ae8672d3b18e88adf3036071496aca3552d/components/gcp/dataproc/submit_sparksql_job/component.yaml') + dataproc_submit_sparksql_job_op = comp.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/1.0.0/components/gcp/dataproc/submit_sparksql_job/component.yaml') help(dataproc_submit_sparksql_job_op) ``` diff --git a/components/gcp/dataproc/submit_sparksql_job/component.yaml b/components/gcp/dataproc/submit_sparksql_job/component.yaml index 3cffc8d5d..8287b5c50 100644 --- a/components/gcp/dataproc/submit_sparksql_job/component.yaml +++ b/components/gcp/dataproc/submit_sparksql_job/component.yaml @@ -75,7 +75,7 @@ outputs: type: UI metadata implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:ad9bd5648dd0453005225779f25d8cebebc7ca00 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:1.0.0 args: [ --ui_metadata_path, {outputPath: MLPipeline UI metadata}, kfp_component.google.dataproc, submit_sparksql_job, diff --git a/components/gcp/dataproc/submit_sparksql_job/sample.ipynb b/components/gcp/dataproc/submit_sparksql_job/sample.ipynb index 21de17a2c..7dcc987ba 100644 --- a/components/gcp/dataproc/submit_sparksql_job/sample.ipynb +++ b/components/gcp/dataproc/submit_sparksql_job/sample.ipynb @@ -78,7 +78,7 @@ "import kfp.components as comp\n", "\n", "dataproc_submit_sparksql_job_op = comp.load_component_from_url(\n", - " 'https://raw.githubusercontent.com/kubeflow/pipelines/01a23ae8672d3b18e88adf3036071496aca3552d/components/gcp/dataproc/submit_sparksql_job/component.yaml')\n", + " 'https://raw.githubusercontent.com/kubeflow/pipelines/1.0.0/components/gcp/dataproc/submit_sparksql_job/component.yaml')\n", "help(dataproc_submit_sparksql_job_op)" ] }, diff --git a/components/gcp/ml_engine/batch_predict/README.md b/components/gcp/ml_engine/batch_predict/README.md index 690c0fae6..05eadc5b2 100644 --- a/components/gcp/ml_engine/batch_predict/README.md +++ b/components/gcp/ml_engine/batch_predict/README.md @@ -87,7 +87,7 @@ KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar import kfp.components as comp mlengine_batch_predict_op = comp.load_component_from_url( - 'https://raw.githubusercontent.com/kubeflow/pipelines/01a23ae8672d3b18e88adf3036071496aca3552d/components/gcp/ml_engine/batch_predict/component.yaml') + 'https://raw.githubusercontent.com/kubeflow/pipelines/1.0.0/components/gcp/ml_engine/batch_predict/component.yaml') help(mlengine_batch_predict_op) ``` diff --git a/components/gcp/ml_engine/batch_predict/component.yaml b/components/gcp/ml_engine/batch_predict/component.yaml index a5f7e91e2..a24873034 100644 --- a/components/gcp/ml_engine/batch_predict/component.yaml +++ b/components/gcp/ml_engine/batch_predict/component.yaml @@ -69,7 +69,7 @@ outputs: type: UI metadata implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:ad9bd5648dd0453005225779f25d8cebebc7ca00 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:1.0.0 args: [ --ui_metadata_path, {outputPath: MLPipeline UI metadata}, kfp_component.google.ml_engine, batch_predict, diff --git a/components/gcp/ml_engine/batch_predict/sample.ipynb b/components/gcp/ml_engine/batch_predict/sample.ipynb index 793557488..e908a63bb 100644 --- a/components/gcp/ml_engine/batch_predict/sample.ipynb +++ b/components/gcp/ml_engine/batch_predict/sample.ipynb @@ -106,7 +106,7 @@ "import kfp.components as comp\n", "\n", "mlengine_batch_predict_op = comp.load_component_from_url(\n", - " 'https://raw.githubusercontent.com/kubeflow/pipelines/01a23ae8672d3b18e88adf3036071496aca3552d/components/gcp/ml_engine/batch_predict/component.yaml')\n", + " 'https://raw.githubusercontent.com/kubeflow/pipelines/1.0.0/components/gcp/ml_engine/batch_predict/component.yaml')\n", "help(mlengine_batch_predict_op)" ] }, diff --git a/components/gcp/ml_engine/deploy/README.md b/components/gcp/ml_engine/deploy/README.md index 84fd3b023..b8b72c554 100644 --- a/components/gcp/ml_engine/deploy/README.md +++ b/components/gcp/ml_engine/deploy/README.md @@ -104,7 +104,7 @@ KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar import kfp.components as comp mlengine_deploy_op = comp.load_component_from_url( - 'https://raw.githubusercontent.com/kubeflow/pipelines/01a23ae8672d3b18e88adf3036071496aca3552d/components/gcp/ml_engine/deploy/component.yaml') + 'https://raw.githubusercontent.com/kubeflow/pipelines/1.0.0/components/gcp/ml_engine/deploy/component.yaml') help(mlengine_deploy_op) ``` diff --git a/components/gcp/ml_engine/deploy/component.yaml b/components/gcp/ml_engine/deploy/component.yaml index e2e399fa6..9e86022a3 100644 --- a/components/gcp/ml_engine/deploy/component.yaml +++ b/components/gcp/ml_engine/deploy/component.yaml @@ -95,7 +95,7 @@ outputs: type: UI metadata implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:ad9bd5648dd0453005225779f25d8cebebc7ca00 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:1.0.0 args: [ --ui_metadata_path, {outputPath: MLPipeline UI metadata}, kfp_component.google.ml_engine, deploy, diff --git a/components/gcp/ml_engine/deploy/sample.ipynb b/components/gcp/ml_engine/deploy/sample.ipynb index 50f583cee..c1532b416 100644 --- a/components/gcp/ml_engine/deploy/sample.ipynb +++ b/components/gcp/ml_engine/deploy/sample.ipynb @@ -121,7 +121,7 @@ "import kfp.components as comp\n", "\n", "mlengine_deploy_op = comp.load_component_from_url(\n", - " 'https://raw.githubusercontent.com/kubeflow/pipelines/01a23ae8672d3b18e88adf3036071496aca3552d/components/gcp/ml_engine/deploy/component.yaml')\n", + " 'https://raw.githubusercontent.com/kubeflow/pipelines/1.0.0/components/gcp/ml_engine/deploy/component.yaml')\n", "help(mlengine_deploy_op)" ] }, diff --git a/components/gcp/ml_engine/train/README.md b/components/gcp/ml_engine/train/README.md index 3fecc9f82..832244821 100644 --- a/components/gcp/ml_engine/train/README.md +++ b/components/gcp/ml_engine/train/README.md @@ -49,6 +49,7 @@ Use this component to submit a training job to AI Platform from a Kubeflow pipel | worker_image_uri | The Docker image to run on the worker replica. This image must be in Container Registry. | Yes | GCRPath |- | None | | training_input | The input parameters to create a training job. | Yes | Dict | [TrainingInput](https://cloud.google.com/ml-engine/reference/rest/v1/projects.jobs#TrainingInput) | None | | job_id_prefix | The prefix of the job ID that is generated. | Yes | String | - | None | +| job_id | The ID of the job to create, takes precedence over generated job id if set. | Yes | String | - | None | | wait_interval | The number of seconds to wait between API calls to get the status of the job. | Yes | Integer | - | 30 | @@ -99,7 +100,7 @@ The steps to use the component in a pipeline are: ```python import kfp.components as comp - mlengine_train_op = comp.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/01a23ae8672d3b18e88adf3036071496aca3552d/components/gcp/ml_engine/train/component.yaml') + mlengine_train_op = comp.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/1.0.0/components/gcp/ml_engine/train/component.yaml') help(mlengine_train_op) ``` ### Sample @@ -179,6 +180,7 @@ def pipeline( worker_image_uri = '', training_input = '', job_id_prefix = '', + job_id = '', wait_interval = '30'): task = mlengine_train_op( project_id=project_id, @@ -193,6 +195,7 @@ def pipeline( worker_image_uri=worker_image_uri, training_input=training_input, job_id_prefix=job_id_prefix, + job_id=job_id, wait_interval=wait_interval) ``` diff --git a/components/gcp/ml_engine/train/component.yaml b/components/gcp/ml_engine/train/component.yaml index 5d26e1554..aa2ef3000 100644 --- a/components/gcp/ml_engine/train/component.yaml +++ b/components/gcp/ml_engine/train/component.yaml @@ -84,6 +84,12 @@ inputs: description: 'The prefix of the generated job id.' default: '' type: String + - name: job_id + description: >- + The ID of the job to create, takes precedence over generated + job id if set. + default: '' + type: String - name: wait_interval description: >- Optional. A time-interval to wait for between calls to get the job status. @@ -103,7 +109,7 @@ outputs: type: UI metadata implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:ad9bd5648dd0453005225779f25d8cebebc7ca00 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:1.0.0 args: [ --ui_metadata_path, {outputPath: MLPipeline UI metadata}, kfp_component.google.ml_engine, train, @@ -119,6 +125,7 @@ implementation: --worker_image_uri, {inputValue: worker_image_uri}, --training_input, {inputValue: training_input}, --job_id_prefix, {inputValue: job_id_prefix}, + --job_id, {inputValue: job_id}, --wait_interval, {inputValue: wait_interval}, ] env: diff --git a/components/gcp/ml_engine/train/sample.ipynb b/components/gcp/ml_engine/train/sample.ipynb index c9d5dfbc0..fb417fe1b 100644 --- a/components/gcp/ml_engine/train/sample.ipynb +++ b/components/gcp/ml_engine/train/sample.ipynb @@ -32,6 +32,7 @@ "| worker_image_uri | The Docker image to run on the worker replica. This image must be in Container Registry. | Yes | GCRPath | | None |\n", "| training_input | The input parameters to create a training job. | Yes | Dict | [TrainingInput](https://cloud.google.com/ml-engine/reference/rest/v1/projects.jobs#TrainingInput) | None |\n", "| job_id_prefix | The prefix of the job ID that is generated. | Yes | String | | None |\n", + "| job_id | The ID of the job to create, takes precedence over generated job id if set. | Yes | String | - | None |\n", "| wait_interval | The number of seconds to wait between API calls to get the status of the job. | Yes | Integer | | 30 |\n", "\n", "\n", @@ -99,7 +100,7 @@ "import kfp.components as comp\n", "\n", "mlengine_train_op = comp.load_component_from_url(\n", - " 'https://raw.githubusercontent.com/kubeflow/pipelines/01a23ae8672d3b18e88adf3036071496aca3552d/components/gcp/ml_engine/train/component.yaml')\n", + " 'https://raw.githubusercontent.com/kubeflow/pipelines/1.0.0/components/gcp/ml_engine/train/component.yaml')\n", "help(mlengine_train_op)" ] }, @@ -238,6 +239,7 @@ " worker_image_uri = '',\n", " training_input = '',\n", " job_id_prefix = '',\n", + " job_id = '',\n", " wait_interval = '30'):\n", " task = mlengine_train_op(\n", " project_id=project_id, \n", @@ -251,7 +253,8 @@ " master_image_uri=master_image_uri, \n", " worker_image_uri=worker_image_uri, \n", " training_input=training_input, \n", - " job_id_prefix=job_id_prefix, \n", + " job_id_prefix=job_id_prefix,\n", + " job_id=job_id,\n", " wait_interval=wait_interval)" ] }, @@ -354,4 +357,4 @@ }, "nbformat": 4, "nbformat_minor": 2 -} \ No newline at end of file +} diff --git a/components/git/clone/component.yaml b/components/git/clone/component.yaml index 0e18ed64e..07964d8c6 100644 --- a/components/git/clone/component.yaml +++ b/components/git/clone/component.yaml @@ -1,5 +1,8 @@ name: Git clone description: Creates a shallow clone of the specified repo branch +metadata: + annotations: + volatile_component: "true" inputs: - {name: Repo URI, type: URI} - {name: Branch, type: String, default: master} diff --git a/components/google-cloud/storage/list/component.yaml b/components/google-cloud/storage/list/component.yaml index 1b7892fa9..5d8d25dd1 100644 --- a/components/google-cloud/storage/list/component.yaml +++ b/components/google-cloud/storage/list/component.yaml @@ -3,6 +3,9 @@ inputs: - {name: GCS path, type: URI, description: 'GCS path for listing. For recursive listing use the "gs://bucket/path/**" syntax".'} outputs: - {name: Paths} +metadata: + annotations: + volatile_component: 'true' implementation: container: image: google/cloud-sdk diff --git a/components/kubeflow/deployer/component.yaml b/components/kubeflow/deployer/component.yaml index facbb394e..60d361094 100644 --- a/components/kubeflow/deployer/component.yaml +++ b/components/kubeflow/deployer/component.yaml @@ -11,7 +11,7 @@ inputs: # - {name: Endppoint URI, type: Serving URI, description: 'URI of the deployed prediction service..'} implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-deployer:ad9bd5648dd0453005225779f25d8cebebc7ca00 + image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-deployer:1.0.0 command: [/bin/deploy.sh] args: [ --model-export-path, {inputValue: Model dir}, diff --git a/components/kubeflow/dnntrainer/component.yaml b/components/kubeflow/dnntrainer/component.yaml index ecc787277..255c96ebe 100644 --- a/components/kubeflow/dnntrainer/component.yaml +++ b/components/kubeflow/dnntrainer/component.yaml @@ -16,7 +16,7 @@ outputs: - {name: MLPipeline UI metadata, type: UI metadata} implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:ad9bd5648dd0453005225779f25d8cebebc7ca00 + image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:1.0.0 command: [python2, -m, trainer.task] args: [ --transformed-data-dir, {inputValue: Transformed data dir}, diff --git a/components/kubeflow/kfserving/Dockerfile b/components/kubeflow/kfserving/Dockerfile index 82f655f0a..8021a63e1 100644 --- a/components/kubeflow/kfserving/Dockerfile +++ b/components/kubeflow/kfserving/Dockerfile @@ -1,6 +1,6 @@ FROM python:3.6-slim -RUN pip3 install kubernetes==10.0.1 kfserving==0.2.1 requests==2.22.0 Flask==1.1.1 flask-cors==3.0.8 +RUN pip3 install kubernetes==10.0.1 kfserving==0.3.0 requests==2.22.0 Flask==1.1.1 flask-cors==3.0.8 ENV APP_HOME /app COPY src $APP_HOME diff --git a/components/kubeflow/kfserving/component.yaml b/components/kubeflow/kfserving/component.yaml index ea59664ae..e6fa3cd2c 100644 --- a/components/kubeflow/kfserving/component.yaml +++ b/components/kubeflow/kfserving/component.yaml @@ -12,6 +12,7 @@ inputs: - {name: Canary Custom Model Spec, type: String, default: '{}', description: 'Custom runtime canary custom model container spec.'} - {name: Autoscaling Target, type: String, default: '0', description: 'Autoscaling Target Number'} - {name: KFServing Endpoint, type: String, default: '', description: 'KFServing remote deployer API endpoint'} + - {name: Service Account, type: String, default: '', description: 'Model Service Account'} outputs: - {name: Service Endpoint URI, type: String, description: 'URI of the deployed prediction service..'} implementation: @@ -31,5 +32,6 @@ implementation: --canary-custom-model-spec, {inputValue: Canary Custom Model Spec}, --kfserving-endpoint, {inputValue: KFServing Endpoint}, --autoscaling-target, {inputValue: Autoscaling Target}, - --output_path, {outputPath: Service Endpoint URI} + --service-account, {inputValue: Service Account}, + --output-path, {outputPath: Service Endpoint URI} ] diff --git a/components/kubeflow/kfserving/src/app.py b/components/kubeflow/kfserving/src/app.py index 21aa7f0d6..f0eaa71eb 100644 --- a/components/kubeflow/kfserving/src/app.py +++ b/components/kubeflow/kfserving/src/app.py @@ -23,33 +23,36 @@ app = Flask(__name__) CORS(app) -@app.route('/deploy-model', methods=['POST']) +@app.route("/deploy-model", methods=["POST"]) def deploy_model_post(): if not request.json: abort(400) - return json.dumps(deploy_model( - action=request.json['action'], - model_name=request.json['model_name'], - default_model_uri=request.json['default_model_uri'], - canary_model_uri=request.json['canary_model_uri'], - canary_model_traffic=request.json['canary_model_traffic'], - namespace=request.json['namespace'], - framework=request.json['framework'], - default_custom_model_spec=request.json['default_custom_model_spec'], - canary_custom_model_spec=request.json['canary_custom_model_spec'], - autoscaling_target=request.json['autoscaling_target'] - )) + return json.dumps( + deploy_model( + action=request.json["action"], + model_name=request.json["model_name"], + default_model_uri=request.json["default_model_uri"], + canary_model_uri=request.json["canary_model_uri"], + canary_model_traffic=request.json["canary_model_traffic"], + namespace=request.json["namespace"], + framework=request.json["framework"], + default_custom_model_spec=request.json["default_custom_model_spec"], + canary_custom_model_spec=request.json["canary_custom_model_spec"], + autoscaling_target=request.json["autoscaling_target"], + service_account=request.json["service_account"], + ) + ) -@app.route('/', methods=['GET']) +@app.route("/", methods=["GET"]) def root_get(): return 200 -@app.route('/', methods=['OPTIONS']) +@app.route("/", methods=["OPTIONS"]) def root_options(): return "200" if __name__ == "__main__": - app.run(debug=True, host='0.0.0.0', port=int(os.environ.get('PORT', 8080))) + app.run(debug=True, host="0.0.0.0", port=int(os.environ.get("PORT", 8080))) diff --git a/components/kubeflow/kfserving/src/kfservingdeployer.py b/components/kubeflow/kfserving/src/kfservingdeployer.py index 4803c85b8..08f8d9cf0 100644 --- a/components/kubeflow/kfserving/src/kfservingdeployer.py +++ b/components/kubeflow/kfserving/src/kfservingdeployer.py @@ -34,103 +34,236 @@ from kfserving import V1alpha2CustomSpec from kfserving import V1alpha2InferenceServiceSpec from kfserving import V1alpha2InferenceService -def EndpointSpec(framework, storage_uri): - if framework == 'tensorflow': - return V1alpha2EndpointSpec(predictor=V1alpha2PredictorSpec(tensorflow=V1alpha2TensorflowSpec(storage_uri=storage_uri))) - elif framework == 'pytorch': - return V1alpha2EndpointSpec(predictor=V1alpha2PredictorSpec(pytorch=V1alpha2PyTorchSpec(storage_uri=storage_uri))) - elif framework == 'sklearn': - return V1alpha2EndpointSpec(predictor=V1alpha2PredictorSpec(sklearn=V1alpha2SKLearnSpec(storage_uri=storage_uri))) - elif framework == 'xgboost': - return V1alpha2EndpointSpec(predictor=V1alpha2PredictorSpec(xgboost=V1alpha2XGBoostSpec(storage_uri=storage_uri))) - elif framework == 'onnx': - return V1alpha2EndpointSpec(predictor=V1alpha2PredictorSpec(onnx=V1alpha2ONNXSpec(storage_uri=storage_uri))) - elif framework == 'tensorrt': - return V1alpha2EndpointSpec(predictor=V1alpha2PredictorSpec(tensorrt=V1alpha2TensorRTSpec(storage_uri=storage_uri))) + +def EndpointSpec(framework, storage_uri, service_account): + if framework == "tensorflow": + return V1alpha2EndpointSpec( + predictor=V1alpha2PredictorSpec( + tensorflow=V1alpha2TensorflowSpec(storage_uri=storage_uri), + service_account_name=service_account, + ) + ) + elif framework == "pytorch": + return V1alpha2EndpointSpec( + predictor=V1alpha2PredictorSpec( + pytorch=V1alpha2PyTorchSpec(storage_uri=storage_uri), + service_account_name=service_account, + ) + ) + elif framework == "sklearn": + return V1alpha2EndpointSpec( + predictor=V1alpha2PredictorSpec( + sklearn=V1alpha2SKLearnSpec(storage_uri=storage_uri), + service_account_name=service_account, + ) + ) + elif framework == "xgboost": + return V1alpha2EndpointSpec( + predictor=V1alpha2PredictorSpec( + xgboost=V1alpha2XGBoostSpec(storage_uri=storage_uri), + service_account_name=service_account, + ) + ) + elif framework == "onnx": + return V1alpha2EndpointSpec( + predictor=V1alpha2PredictorSpec( + onnx=V1alpha2ONNXSpec(storage_uri=storage_uri), + service_account_name=service_account, + ) + ) + elif framework == "tensorrt": + return V1alpha2EndpointSpec( + predictor=V1alpha2PredictorSpec( + tensorrt=V1alpha2TensorRTSpec(storage_uri=storage_uri), + service_account_name=service_account, + ) + ) else: - raise("Error: No matching framework: " + framework) + raise ("Error: No matching framework: " + framework) -def customEndpointSpec(custom_model_spec): - env = [client.V1EnvVar(name=i['name'], value=i['value']) for i in custom_model_spec['env']] if custom_model_spec.get('env', '') else None - ports = [client.V1ContainerPort(container_port=int(custom_model_spec.get('port', '')))] if custom_model_spec.get('port', '') else None +def customEndpointSpec(custom_model_spec, service_account): + env = ( + [ + client.V1EnvVar(name=i["name"], value=i["value"]) + for i in custom_model_spec["env"] + ] + if custom_model_spec.get("env", "") + else None + ) + ports = ( + [client.V1ContainerPort(container_port=int(custom_model_spec.get("port", "")))] + if custom_model_spec.get("port", "") + else None + ) containerSpec = client.V1Container( - name=custom_model_spec.get('name', 'custom-container'), - image=custom_model_spec['image'], + name=custom_model_spec.get("name", "custom-container"), + image=custom_model_spec["image"], env=env, ports=ports, - command=custom_model_spec.get('command', None), - args=custom_model_spec.get('args', None), - image_pull_policy=custom_model_spec.get('image_pull_policy', None), - working_dir=custom_model_spec.get('working_dir', None) + command=custom_model_spec.get("command", None), + args=custom_model_spec.get("args", None), + image_pull_policy=custom_model_spec.get("image_pull_policy", None), + working_dir=custom_model_spec.get("working_dir", None), + ) + return V1alpha2EndpointSpec( + predictor=V1alpha2PredictorSpec( + custom=V1alpha2CustomSpec(container=containerSpec), + service_account_name=service_account, + ) ) - return V1alpha2EndpointSpec(predictor=V1alpha2PredictorSpec(custom=V1alpha2CustomSpec(container=containerSpec))) -def InferenceService(metadata, default_model_spec, canary_model_spec=None, canary_model_traffic=None): - return V1alpha2InferenceService(api_version=constants.KFSERVING_GROUP + '/' + constants.KFSERVING_VERSION, - kind=constants.KFSERVING_KIND, - metadata=metadata, - spec=V1alpha2InferenceServiceSpec(default=default_model_spec, - canary=canary_model_spec, - canary_traffic_percent=canary_model_traffic)) +def InferenceService( + metadata, default_model_spec, canary_model_spec=None, canary_model_traffic=None +): + return V1alpha2InferenceService( + api_version=constants.KFSERVING_GROUP + "/" + constants.KFSERVING_VERSION, + kind=constants.KFSERVING_KIND, + metadata=metadata, + spec=V1alpha2InferenceServiceSpec( + default=default_model_spec, + canary=canary_model_spec, + canary_traffic_percent=canary_model_traffic, + ), + ) -def deploy_model(action, model_name, default_model_uri, canary_model_uri, canary_model_traffic, namespace, framework, default_custom_model_spec, canary_custom_model_spec, autoscaling_target=0): +def deploy_model( + action, + model_name, + default_model_uri, + canary_model_uri, + canary_model_traffic, + namespace, + framework, + default_custom_model_spec, + canary_custom_model_spec, + service_account, + autoscaling_target=0, +): if int(autoscaling_target) != 0: annotations = {"autoscaling.knative.dev/target": str(autoscaling_target)} else: annotations = None - metadata = client.V1ObjectMeta(name=model_name, namespace=namespace, annotations=annotations) - + metadata = client.V1ObjectMeta( + name=model_name, namespace=namespace, annotations=annotations + ) + # Create Default deployment if default model uri is provided. - if framework != 'custom' and default_model_uri: - default_model_spec = EndpointSpec(framework, default_model_uri) - elif framework == 'custom' and default_custom_model_spec: - default_model_spec = customEndpointSpec(default_custom_model_spec) - + if framework != "custom" and default_model_uri: + default_model_spec = EndpointSpec(framework, default_model_uri, service_account) + elif framework == "custom" and default_custom_model_spec: + default_model_spec = customEndpointSpec( + default_custom_model_spec, service_account + ) + # Create Canary deployment if canary model uri is provided. - if framework != 'custom' and canary_model_uri: - canary_model_spec = EndpointSpec(framework, canary_model_uri) - kfsvc = InferenceService(metadata, default_model_spec, canary_model_spec, canary_model_traffic) - elif framework == 'custom' and canary_custom_model_spec: - canary_model_spec = customEndpointSpec(canary_custom_model_spec) - kfsvc = InferenceService(metadata, default_model_spec, canary_model_spec, canary_model_traffic) + if framework != "custom" and canary_model_uri: + canary_model_spec = EndpointSpec(framework, canary_model_uri, service_account) + kfsvc = InferenceService( + metadata, default_model_spec, canary_model_spec, canary_model_traffic + ) + elif framework == "custom" and canary_custom_model_spec: + canary_model_spec = customEndpointSpec( + canary_custom_model_spec, service_account + ) + kfsvc = InferenceService( + metadata, default_model_spec, canary_model_spec, canary_model_traffic + ) else: kfsvc = InferenceService(metadata, default_model_spec) KFServing = KFServingClient() - if action == 'create': + if action == "create": KFServing.create(kfsvc, watch=True, timeout_seconds=120) - elif action == 'update': + elif action == "update": KFServing.patch(model_name, kfsvc) - elif action == 'rollout': - KFServing.rollout_canary(model_name, canary=canary_model_spec, percent=canary_model_traffic, namespace=namespace, watch=True, timeout_seconds=120) - elif action == 'promote': - KFServing.promote(model_name, namespace=namespace, watch=True, timeout_seconds=120) - elif action == 'delete': + elif action == "rollout": + KFServing.rollout_canary( + model_name, + canary=canary_model_spec, + percent=canary_model_traffic, + namespace=namespace, + watch=True, + timeout_seconds=120, + ) + elif action == "promote": + KFServing.promote( + model_name, namespace=namespace, watch=True, timeout_seconds=120 + ) + elif action == "delete": KFServing.delete(model_name, namespace=namespace) else: - raise("Error: No matching action: " + action) + raise ("Error: No matching action: " + action) model_status = KFServing.get(model_name, namespace=namespace) return model_status + if __name__ == "__main__": parser = argparse.ArgumentParser() - parser.add_argument('--action', type=str, help='Action to execute on KFServing', default='create') - parser.add_argument('--model-name', type=str, help='Name to give to the deployed model', default="") - parser.add_argument('--default-model-uri', type=str, help='Path of the S3, GCS or PVC directory containing default model.') - parser.add_argument('--canary-model-uri', type=str, help='Optional path of the S3, GCS or PVC directory containing canary model.', default="") - parser.add_argument('--canary-model-traffic', type=str, help='Optional Traffic to be sent to the default model', default='0') - parser.add_argument('--namespace', type=str, help='Kubernetes namespace where the KFServing service is deployed.', default='kubeflow') - parser.add_argument('--framework', type=str, help='Model Serving Framework', default='tensorflow') - parser.add_argument('--default-custom-model-spec', type=json.loads, help='Custom runtime default custom model container spec', default={}) - parser.add_argument('--canary-custom-model-spec', type=json.loads, help='Custom runtime canary custom model container spec', default={}) - parser.add_argument('--kfserving-endpoint', type=str, help='kfserving remote deployer api endpoint', default='') - parser.add_argument('--autoscaling-target', type=str, help='Autoscaling target number', default='0') - parser.add_argument('--output_path', type=str, help='Path to store URI output') + parser.add_argument( + "--action", type=str, help="Action to execute on KFServing", default="create" + ) + parser.add_argument( + "--model-name", type=str, help="Name to give to the deployed model", default="" + ) + parser.add_argument( + "--default-model-uri", + type=str, + help="Path of the S3, GCS or PVC directory containing default model.", + ) + parser.add_argument( + "--canary-model-uri", + type=str, + help="Optional path of the S3, GCS or PVC directory containing canary model.", + default="", + ) + parser.add_argument( + "--canary-model-traffic", + type=str, + help="Optional Traffic to be sent to the default model", + default="0", + ) + parser.add_argument( + "--namespace", + type=str, + help="Kubernetes namespace where the KFServing service is deployed.", + default="kubeflow", + ) + parser.add_argument( + "--framework", type=str, help="Model Serving Framework", default="tensorflow" + ) + parser.add_argument( + "--default-custom-model-spec", + type=json.loads, + help="Custom runtime default custom model container spec", + default={}, + ) + parser.add_argument( + "--canary-custom-model-spec", + type=json.loads, + help="Custom runtime canary custom model container spec", + default={}, + ) + parser.add_argument( + "--kfserving-endpoint", + type=str, + help="kfserving remote deployer api endpoint", + default="", + ) + parser.add_argument( + "--autoscaling-target", type=str, help="Autoscaling target number", default="0" + ) + parser.add_argument( + "--service-account", + type=str, + help="Service account containing s3 credentials", + default="", + ) + parser.add_argument("--output-path", type=str, help="Path to store URI output") args = parser.parse_args() url = re.compile(r"https?://") @@ -145,8 +278,9 @@ if __name__ == "__main__": output_path = args.output_path default_custom_model_spec = args.default_custom_model_spec canary_custom_model_spec = args.canary_custom_model_spec - kfserving_endpoint = url.sub('', args.kfserving_endpoint) + kfserving_endpoint = url.sub("", args.kfserving_endpoint) autoscaling_target = int(args.autoscaling_target) + service_account = args.service_account if kfserving_endpoint: formData = { @@ -159,9 +293,12 @@ if __name__ == "__main__": "framework": framework, "default_custom_model_spec": default_custom_model_spec, "canary_custom_model_spec": canary_custom_model_spec, - "autoscaling_target": autoscaling_target - } - response = requests.post("http://" + kfserving_endpoint + "/deploy-model", json=formData) + "autoscaling_target": autoscaling_target, + "service_account": service_account, + } + response = requests.post( + "http://" + kfserving_endpoint + "/deploy-model", json=formData + ) model_status = response.json() else: model_status = deploy_model( @@ -174,19 +311,29 @@ if __name__ == "__main__": framework=framework, default_custom_model_spec=default_custom_model_spec, canary_custom_model_spec=canary_custom_model_spec, - autoscaling_target=autoscaling_target + autoscaling_target=autoscaling_target, + service_account=service_account, ) print(model_status) try: - print(model_status['status']['url'] + ' is the knative domain header. $ISTIO_INGRESS_ENDPOINT are defined in the below commands') - print('Sample test commands: ') - print('# Note: If Istio Ingress gateway is not served with LoadBalancer, use $CLUSTER_NODE_IP:31380 as the ISTIO_INGRESS_ENDPOINT') - print('ISTIO_INGRESS_ENDPOINT=$(kubectl -n istio-system get service istio-ingressgateway -o jsonpath=\'{.status.loadBalancer.ingress[0].ip}\')') + print( + model_status["status"]["url"] + + " is the knative domain header. $ISTIO_INGRESS_ENDPOINT are defined in the below commands" + ) + print("Sample test commands: ") + print( + "# Note: If Istio Ingress gateway is not served with LoadBalancer, use $CLUSTER_NODE_IP:31380 as the ISTIO_INGRESS_ENDPOINT" + ) + print( + "ISTIO_INGRESS_ENDPOINT=$(kubectl -n istio-system get service istio-ingressgateway -o jsonpath='{.status.loadBalancer.ingress[0].ip}')" + ) # model_status['status']['url'] is like http://flowers-sample.kubeflow.example.com/v1/models/flowers-sample - host, path = url.sub('', model_status['status']['url']).split("/", 1) - print('curl -X GET -H "Host: ' + host + '" http://$ISTIO_INGRESS_ENDPOINT/' + path) + host, path = url.sub("", model_status["status"]["url"]).split("/", 1) + print( + 'curl -X GET -H "Host: ' + host + '" http://$ISTIO_INGRESS_ENDPOINT/' + path + ) except: - print('Model is not ready, check the logs for the Knative URL status.') + print("Model is not ready, check the logs for the Knative URL status.") if not os.path.exists(os.path.dirname(output_path)): os.makedirs(os.path.dirname(output_path)) with open(output_path, "w") as report: diff --git a/components/local/confusion_matrix/component.yaml b/components/local/confusion_matrix/component.yaml index 022819575..441f9620e 100644 --- a/components/local/confusion_matrix/component.yaml +++ b/components/local/confusion_matrix/component.yaml @@ -9,7 +9,7 @@ outputs: - {name: MLPipeline Metrics, type: Metrics} implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-local-confusion-matrix:ad9bd5648dd0453005225779f25d8cebebc7ca00 + image: gcr.io/ml-pipeline/ml-pipeline-local-confusion-matrix:1.0.0 command: [python2, /ml/confusion_matrix.py] args: [ --predictions, {inputValue: Predictions}, diff --git a/components/local/roc/component.yaml b/components/local/roc/component.yaml index c5d622197..2bed42d3d 100644 --- a/components/local/roc/component.yaml +++ b/components/local/roc/component.yaml @@ -11,7 +11,7 @@ outputs: - {name: MLPipeline Metrics, type: Metrics} implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-local-confusion-matrix:ad9bd5648dd0453005225779f25d8cebebc7ca00 + image: gcr.io/ml-pipeline/ml-pipeline-local-confusion-matrix:1.0.0 command: [python2, /ml/roc.py] args: [ --predictions, {inputValue: Predictions dir}, diff --git a/components/pipeline_component_repository.yaml b/components/pipeline_component_repository.yaml new file mode 100644 index 000000000..f1a102c84 --- /dev/null +++ b/components/pipeline_component_repository.yaml @@ -0,0 +1,2 @@ +# A marker file that marks the location of a repository of Kubeflow Pipelines components +# This marker file makes it easier to find public repositories online diff --git a/components/release-in-place.sh b/components/release-in-place.sh new file mode 100755 index 000000000..bbd1fe9f6 --- /dev/null +++ b/components/release-in-place.sh @@ -0,0 +1,76 @@ +#!/bin/bash +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This script automated the process to update the component images. +# To run it, find a good release candidate commit SHA from ml-pipeline-test project, +# and provide a full github COMMIT SHA to the script. E.g. +# ./update-for-release.sh 2118baf752d3d30a8e43141165e13573b20d85b8 +# The script copies the images from test to prod, and update the local code. + +set -xe + +images=( + "ml-pipeline-kubeflow-deployer" + "ml-pipeline-kubeflow-tf-trainer" + "ml-pipeline-kubeflow-tf-trainer-gpu" + "ml-pipeline-kubeflow-tfjob" + "ml-pipeline-dataproc-analyze" + "ml-pipeline-dataproc-create-cluster" + "ml-pipeline-dataproc-delete-cluster" + "ml-pipeline-dataproc-predict" + "ml-pipeline-dataproc-transform" + "ml-pipeline-dataproc-train" + "ml-pipeline-local-confusion-matrix" + "ml-pipeline-local-roc" + "ml-pipeline-gcp" +) + +TAG_NAME=$1 +FROM_GCR_PREFIX='gcr.io/ml-pipeline-test/' +TO_GCR_PREFIX='gcr.io/ml-pipeline/' +DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" > /dev/null && pwd)" + +if [ -z "$TAG_NAME" ]; then + echo "Usage: release.sh " >&2 + exit 1 +fi + +# KFP repo root +pushd "$DIR/.." + +# Update setup.py VERSION +sed -i.bak -e "s|VERSION =.\+'|VERSION = '${TAG_NAME}'|g" "components/gcp/container/component_sdk/python/setup.py" + +# Updating components and samples. +for image in "${images[@]}" +do + TARGET_IMAGE_BASE=${TO_GCR_PREFIX}${image} + TARGET_IMAGE=${TARGET_IMAGE_BASE}:${TAG_NAME} + + # Update the code + find components samples -type f | while read file; do + sed -i -e "s|${TARGET_IMAGE_BASE}:\([a-zA-Z0-9_.-]\)\+|${TARGET_IMAGE}|g" "$file" + done +done + +# Updating the samples to use the updated components +git diff --name-only | while read component_file; do + echo $component_file + find components samples -type f | while read file; do + sed -i -E "s|(https://raw.githubusercontent.com/kubeflow/pipelines/)[^/]+(/$component_file)|\1${TAG_NAME}\2|g" "$file"; + done +done +popd diff --git a/components/release.sh b/components/release.sh old mode 100755 new mode 100644 diff --git a/components/test_load_all_components.sh b/components/test_load_all_components.sh index 5e733869c..0517db323 100755 --- a/components/test_load_all_components.sh +++ b/components/test_load_all_components.sh @@ -26,10 +26,11 @@ cd "$(dirname "$0")" PYTHONPATH="$PYTHONPATH:../sdk/python" echo "Testing loading all components" -python3 -c ' +find . -name component.yaml | python3 -c ' import sys import kfp -for component_file in sys.argv[1:]: +for component_file in sys.stdin: + component_file = component_file.rstrip("\n") print(component_file) kfp.components.load_component_from_file(component_file) -' $(find . -name component.yaml) +' diff --git a/components/third_party_licenses.csv b/components/third_party_licenses.csv index 6977f8ac8..c819b818b 100644 --- a/components/third_party_licenses.csv +++ b/components/third_party_licenses.csv @@ -40,7 +40,7 @@ pexpect,https://raw.githubusercontent.com/pexpect/pexpect/ab7d99a670794fc2b03654 setuptools,https://raw.githubusercontent.com/pypa/setuptools/master/LICENSE,MIT tornado,https://raw.githubusercontent.com/tornadoweb/tornado/stable/LICENSE,Apache Software License 2.0 traitlets,https://raw.githubusercontent.com/ipython/traitlets/master/COPYING.md,BSD -subprocess32,https://raw.githubusercontent.com/google/python-subprocess32/master/LICENSE,Python Software Foundation License +subprocess32,https://raw.githubusercontent.com/google/python-subprocess32/main/LICENSE,Python Software Foundation License jinja2,https://raw.githubusercontent.com/pallets/jinja/master/LICENSE.rst,BSD html5lib,https://raw.githubusercontent.com/html5lib/html5lib-python/master/LICENSE,MIT opencv-python,https://raw.githubusercontent.com/skvark/opencv-python/master/LICENSE.txt,MIT @@ -101,7 +101,7 @@ google-gax,https://raw.githubusercontent.com/googleapis/gax-python/master/LICENS googleapis-common-protos,https://raw.githubusercontent.com/googleapis/api-common-protos/master/LICENSE,Apache 2.0 googledatastore,https://raw.githubusercontent.com/GoogleCloudPlatform/google-cloud-datastore/master/LICENSE,Apache 2.0 hdfs,https://github.com/mtth/hdfs/blob/master/LICENSE,MIT -idna,https://raw.githubusercontent.com/kjd/idna/master/LICENSE.rst,BSD-like +idna,https://raw.githubusercontent.com/kjd/idna/master/LICENSE.md,BSD-3 Markdown,https://raw.githubusercontent.com/Python-Markdown/markdown/master/LICENSE.md,BSD monotonic,https://raw.githubusercontent.com/atdt/monotonic/master/LICENSE,Apache 2.0 oauth2client,https://raw.githubusercontent.com/google/oauth2client/master/LICENSE,Apache 2.0 @@ -167,7 +167,7 @@ simplegeneric,https://opensource.org/licenses/ZPL-2.0,ZPL 2.1 singledispatch,https://opensource.org/licenses/MIT,MIT tensorflow-model-analysis,https://github.com/tensorflow/model-analysis/blob/master/LICENSE,Apache 2.0 terminado,https://raw.githubusercontent.com/jupyter/terminado/master/LICENSE,BSD -wcwidth,https://raw.githubusercontent.com/jquast/wcwidth/master/LICENSE.txt,MIT +wcwidth,https://raw.githubusercontent.com/jquast/wcwidth/master/LICENSE,MIT widgetsnbextension,https://raw.githubusercontent.com/jupyter-widgets/ipywidgets/master/widgetsnbextension/LICENSE,BSD pandas,https://raw.githubusercontent.com/pandas-dev/pandas/master/LICENSE,3-Clause BSD scikit-learn,https://raw.githubusercontent.com/scikit-learn/scikit-learn/master/COPYING,BSD diff --git a/docs/source/kfp.client.rst b/docs/source/kfp.client.rst index a81b9538b..d979c353c 100644 --- a/docs/source/kfp.client.rst +++ b/docs/source/kfp.client.rst @@ -5,3 +5,11 @@ kfp.Client class :members: :undoc-members: :show-inheritance: + +Generated APIs +------------------ + +.. toctree:: + :maxdepth: 2 + + kfp.server_api diff --git a/docs/source/kfp.dsl.rst b/docs/source/kfp.dsl.rst index 0a20e8484..7dbc94d4d 100644 --- a/docs/source/kfp.dsl.rst +++ b/docs/source/kfp.dsl.rst @@ -8,6 +8,10 @@ kfp.dsl package :imported-members: :exclude-members: Pipeline, OpsGroup, match_serialized_pipelineparam + .. py:data:: RUN_ID_PLACEHOLDER + + .. py:data:: EXECUTION_ID_PLACEHOLDER + .. toctree:: diff --git a/docs/source/kfp.server_api.rst b/docs/source/kfp.server_api.rst new file mode 100644 index 000000000..87cc58302 --- /dev/null +++ b/docs/source/kfp.server_api.rst @@ -0,0 +1,48 @@ +kfp.Client().runs +----------------------------- + +.. autoclass:: kfp_server_api.api.run_service_api.RunServiceApi + :members: + :undoc-members: + :show-inheritance: + +kfp.Client().pipelines +----------------------------- + +.. autoclass:: kfp_server_api.api.pipeline_service_api.PipelineServiceApi + :members: + :undoc-members: + :show-inheritance: + +kfp.Client().experiments +----------------------------- + +.. autoclass:: kfp_server_api.api.experiment_service_api.ExperimentServiceApi + :members: + :undoc-members: + :show-inheritance: + +kfp.Client().jobs +----------------------------- + +.. autoclass:: kfp_server_api.api.job_service_api.JobServiceApi + :members: + :undoc-members: + :show-inheritance: + +kfp.Client().pipeline_uploads +----------------------------- + +.. autoclass:: kfp_server_api.api.pipeline_upload_service_api.PipelineUploadServiceApi + :members: + :undoc-members: + :show-inheritance: + +kfp.Client().models +----------------------------- + +.. automodule:: kfp_server_api.models + :members: + :undoc-members: + :imported-members: + diff --git a/frontend/global-setup.js b/frontend/global-setup.js new file mode 100644 index 000000000..0932e9d86 --- /dev/null +++ b/frontend/global-setup.js @@ -0,0 +1,6 @@ +export default () => { + // This let unit tests run in UTC timezone consistently, despite developers' + // dev machine's timezone. + // Reference: https://stackoverflow.com/a/56482581 + process.env.TZ = 'UTC'; +}; diff --git a/frontend/mock-backend/mock-api-middleware.ts b/frontend/mock-backend/mock-api-middleware.ts index c05f205b1..176929502 100644 --- a/frontend/mock-backend/mock-api-middleware.ts +++ b/frontend/mock-backend/mock-api-middleware.ts @@ -589,8 +589,12 @@ export default (app: express.Application) => { app.get('/k8s/pod/logs', (req, res) => { const podName = decodeURIComponent(req.query.podname); + if (podName === 'json-12abc') { + res.status(404).send('pod not found'); + return; + } if (podName === 'coinflip-recursive-q7dqb-3721646052') { - res.status(404).send('Failed to retrieve log'); + res.status(500).send('Failed to retrieve log'); return; } const shortLog = fs.readFileSync('./mock-backend/shortlog.txt', 'utf-8'); diff --git a/frontend/package-lock.json b/frontend/package-lock.json index b8a8d373a..ac5f6b4bd 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -2190,9 +2190,9 @@ }, "dependencies": { "google-protobuf": { - "version": "3.11.4", - "resolved": "https://registry.npmjs.org/google-protobuf/-/google-protobuf-3.11.4.tgz", - "integrity": "sha512-lL6b04rDirurUBOgsY2+LalI6Evq8eH5TcNzi7TYQ3BsIWelT0KSOQSBsXuavEkNf+odQU6c0lgz3UsZXeNX9Q==" + "version": "3.12.2", + "resolved": "https://registry.npmjs.org/google-protobuf/-/google-protobuf-3.12.2.tgz", + "integrity": "sha512-4CZhpuRr1d6HjlyrxoXoocoGFnRYgKULgMtikMddA9ztRyYR59Aondv2FioyxWVamRo0rF2XpYawkTCBEQOSkA==" } } }, @@ -10194,9 +10194,9 @@ "dev": true }, "grpc-web": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/grpc-web/-/grpc-web-1.0.7.tgz", - "integrity": "sha512-Fkbz1nyvvt6GC6ODcxh9Fen6LLB3OTCgGHzHwM2Eni44SUhzqPz1UQgFp9sfBEfInOhx3yBdwo9ZLjZAmJ+TtA==" + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/grpc-web/-/grpc-web-1.1.0.tgz", + "integrity": "sha512-oPoS4/E/EO0TA2ZOSf3AxV2AbWDeabwfbAo+8oXNenOw87RmKz4hME8Sy4KDu2dUnqK8cuGfzdQlJPAEQEygNQ==" }, "gtoken": { "version": "4.1.3", @@ -19586,7 +19586,7 @@ "dependencies": { "node-notifier": { "version": "4.6.1", - "resolved": "http://registry.npmjs.org/node-notifier/-/node-notifier-4.6.1.tgz", + "resolved": "https://registry.npmjs.org/node-notifier/-/node-notifier-4.6.1.tgz", "integrity": "sha1-BW0UJE89zBzq3+aK+c/wxUc6M/M=", "dev": true, "requires": { @@ -20786,9 +20786,9 @@ } }, "websocket-extensions": { - "version": "0.1.3", - "resolved": "https://registry.npmjs.org/websocket-extensions/-/websocket-extensions-0.1.3.tgz", - "integrity": "sha512-nqHUnMXmBzT0w570r2JpJxfiSD1IzoI+HGVdd3aZ0yNi3ngvQ4jv1dtHt5VGxfI2yj5yqImPhOK4vmIh2xMbGg==", + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/websocket-extensions/-/websocket-extensions-0.1.4.tgz", + "integrity": "sha512-OqedPIGOfsDlo31UNwYbCFMSaO9m9G/0faIHj5/dZFDMFqPTcx6UwqyOy3COEaEOg/9VsGIpdqn62W5KhoKSpg==", "dev": true }, "whatwg-encoding": { diff --git a/frontend/package.json b/frontend/package.json index 15960761b..1fe182a67 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -110,6 +110,7 @@ "!src/index.tsx", "!src/CSSReset.tsx" ], + "globalSetup": "./global-setup.js", "snapshotSerializers": [ "./src/__serializers__/mock-function", "snapshot-diff/serializer.js", diff --git a/frontend/server/handlers/pod-logs.ts b/frontend/server/handlers/pod-logs.ts index 8a49a9d4d..d9e6a6f01 100644 --- a/frontend/server/handlers/pod-logs.ts +++ b/frontend/server/handlers/pod-logs.ts @@ -62,7 +62,7 @@ export function getPodLogsHandler( return async (req, res) => { if (!req.query.podname) { - res.status(404).send('podname argument is required'); + res.status(400).send('podname argument is required'); return; } const podName = decodeURIComponent(req.query.podname); @@ -73,7 +73,16 @@ export function getPodLogsHandler( try { const stream = await getPodLogsStream(podName, podNamespace); - stream.on('error', err => res.status(500).send('Could not get main container logs: ' + err)); + stream.on('error', err => { + if ( + err?.message && + err.message?.indexOf('Unable to find pod log archive information') > -1 + ) { + res.status(404).send('pod not found'); + } else { + res.status(500).send('Could not get main container logs: ' + err); + } + }); stream.on('end', () => res.end()); stream.pipe(res); } catch (err) { diff --git a/frontend/src/Css.tsx b/frontend/src/Css.tsx index 4ba877533..37c2d0522 100644 --- a/frontend/src/Css.tsx +++ b/frontend/src/Css.tsx @@ -43,6 +43,8 @@ export const color = { themeDarker: '#0b59dc', warningBg: '#f9f9e1', warningText: '#ee8100', + infoBg: '#f3f4ff', + infoText: '#1a73e8', weak: '#9aa0a6', }; diff --git a/frontend/src/components/ArtifactLink.tsx b/frontend/src/components/ArtifactLink.tsx index 6f3ab9b55..e3e9be2c2 100644 --- a/frontend/src/components/ArtifactLink.tsx +++ b/frontend/src/components/ArtifactLink.tsx @@ -1,5 +1,9 @@ import * as React from 'react'; -import { generateGcsConsoleUri, generateMinioArtifactUrl } from '../lib/Utils'; +import { + generateGcsConsoleUri, + generateS3ArtifactUrl, + generateMinioArtifactUrl, +} from '../lib/Utils'; /** * A component that renders an artifact URL as clickable link if URL is correct @@ -12,6 +16,9 @@ export const ArtifactLink: React.FC<{ artifactUri?: string }> = ({ artifactUri } if (gcsConsoleUrl) { clickableUrl = gcsConsoleUrl; } + } + if (artifactUri.startsWith('s3:')) { + clickableUrl = generateS3ArtifactUrl(artifactUri); } else if (artifactUri.startsWith('http:') || artifactUri.startsWith('https:')) { clickableUrl = artifactUri; } else if (artifactUri.startsWith('minio:')) { diff --git a/frontend/src/components/Banner.test.tsx b/frontend/src/components/Banner.test.tsx index b682c3c89..739ba62fc 100644 --- a/frontend/src/components/Banner.test.tsx +++ b/frontend/src/components/Banner.test.tsx @@ -35,6 +35,11 @@ describe('Banner', () => { expect(tree).toMatchSnapshot(); }); + it('uses info mode when instructed', () => { + const tree = shallow(); + expect(tree).toMatchSnapshot(); + }); + it('shows "Details" button and has dialog when there is additional info', () => { const tree = shallow(); expect(tree).toMatchSnapshot(); @@ -52,8 +57,23 @@ describe('Banner', () => { expect(tree).toMatchSnapshot(); }); + it('does not show "Refresh" button if mode is "info"', () => { + const tree = shallow( + { + /* do nothing */ + }} + />, + ); + expect(tree.findWhere(el => el.text() === 'Refresh').exists()).toEqual(false); + }); + it('shows troubleshooting link instructed by prop', () => { - const tree = shallow(); + const tree = shallow( + , + ); expect(tree).toMatchInlineSnapshot(`
{ `); }); + it('does not show troubleshooting link if warning', () => { + const tree = shallow( + , + ); + expect(tree.findWhere(el => el.text() === 'Troubleshooting guide').exists()).toEqual(false); + }); + it('opens details dialog when button is clicked', () => { const tree = shallow(); tree diff --git a/frontend/src/components/Banner.tsx b/frontend/src/components/Banner.tsx index 34e60c7ac..6c8a39eb2 100644 --- a/frontend/src/components/Banner.tsx +++ b/frontend/src/components/Banner.tsx @@ -94,6 +94,8 @@ class Banner extends React.Component { }); let bannerIcon = ; let dialogTitle = 'An error occurred'; + let showTroubleshootingGuideLink = false; + let showRefreshButton = true; switch (this.props.mode) { case 'error': @@ -102,6 +104,7 @@ class Banner extends React.Component { }); bannerIcon = ; dialogTitle = 'An error occurred'; + showTroubleshootingGuideLink = this.props.showTroubleshootingGuideLink || false; break; case 'warning': bannerModeCss = stylesheet({ @@ -112,10 +115,11 @@ class Banner extends React.Component { break; case 'info': bannerModeCss = stylesheet({ - mode: { backgroundColor: color.background, color: color.success }, + mode: { backgroundColor: color.infoBg, color: color.infoText }, }); bannerIcon = ; dialogTitle = 'Info'; + showRefreshButton = false; break; default: // Already set defaults above. diff --git a/frontend/src/components/ExperimentList.tsx b/frontend/src/components/ExperimentList.tsx index 330e4488a..8823ec9b7 100644 --- a/frontend/src/components/ExperimentList.tsx +++ b/frontend/src/components/ExperimentList.tsx @@ -177,7 +177,7 @@ export class ExperimentList extends React.PureComponent null} {...this.props} - disablePaging={true} + disablePaging={false} noFilterBox={true} storageState={ this.props.storageState === ExperimentStorageState.ARCHIVED diff --git a/frontend/src/components/PodYaml.test.tsx b/frontend/src/components/PodYaml.test.tsx index 342003d8b..1736fa713 100644 --- a/frontend/src/components/PodYaml.test.tsx +++ b/frontend/src/components/PodYaml.test.tsx @@ -84,7 +84,7 @@ describe('PodInfo', () => { const { getByText } = render(); await act(TestUtils.flushPromises); getByText( - 'Warning: failed to retrieve pod info. Possible reasons include cluster autoscaling, pod preemption or pod cleaned up by time to live configuration', + 'Failed to retrieve pod info. Possible reasons include cluster autoscaling, pod preemption or pod cleaned up by time to live configuration', ); }); @@ -162,7 +162,7 @@ describe('PodEvents', () => { const { getByText } = render(); await act(TestUtils.flushPromises); getByText( - 'Warning: failed to retrieve pod events. Possible reasons include cluster autoscaling, pod preemption or pod cleaned up by time to live configuration', + 'Failed to retrieve pod events. Possible reasons include cluster autoscaling, pod preemption or pod cleaned up by time to live configuration', ); }); }); diff --git a/frontend/src/components/PodYaml.tsx b/frontend/src/components/PodYaml.tsx index 03a832911..27a005149 100644 --- a/frontend/src/components/PodYaml.tsx +++ b/frontend/src/components/PodYaml.tsx @@ -14,7 +14,7 @@ export const PodInfo: React.FC<{ name: string; namespace: string }> = ({ name, n ); @@ -32,7 +32,7 @@ export const PodEvents: React.FC<{ ); diff --git a/frontend/src/components/Trigger.test.tsx b/frontend/src/components/Trigger.test.tsx index b72ab6e2a..9a001b65a 100644 --- a/frontend/src/components/Trigger.test.tsx +++ b/frontend/src/components/Trigger.test.tsx @@ -30,20 +30,31 @@ const PERIODIC_DEFAULT = { }; const CRON_DEFAULT = { cron: '0 * * * * ?', end_time: undefined, start_time: undefined }; +beforeAll(() => { + process.env.TZ = 'UTC'; +}); + describe('Trigger', () => { // tslint:disable-next-line:variable-name const RealDate = Date; function mockDate(isoDate: any): void { (global as any).Date = class extends RealDate { - constructor() { + constructor(...args: any[]) { super(); - return new RealDate(isoDate); + if (args.length === 0) { + // Use mocked date when calling new Date() + return new RealDate(isoDate); + } else { + // Otherwise, use real Date constructor + return new (RealDate as any)(...args); + } } }; } - const testDate = new Date(2018, 11, 21, 7, 53); - mockDate(testDate); + const now = new Date(2018, 11, 21, 7, 53); + mockDate(now); + const oneWeekLater = new Date(2018, 11, 28, 7, 53); it('renders periodic schedule controls for initial render', () => { const tree = shallow(); @@ -113,7 +124,7 @@ describe('Trigger', () => { expect(spy).toHaveBeenLastCalledWith({ ...PARAMS_DEFAULT, trigger: { - periodic_schedule: { ...PERIODIC_DEFAULT, start_time: testDate }, + periodic_schedule: { ...PERIODIC_DEFAULT, start_time: now }, }, }); }); @@ -128,7 +139,7 @@ describe('Trigger', () => { target: { type: 'checkbox', checked: true }, }); (tree.instance() as Trigger).handleChange('startDate')({ target: { value: '2018-11-23' } }); - (tree.instance() as Trigger).handleChange('endTime')({ target: { value: '08:35' } }); + (tree.instance() as Trigger).handleChange('startTime')({ target: { value: '08:35' } }); expect(spy).toHaveBeenLastCalledWith({ ...PARAMS_DEFAULT, trigger: { @@ -193,7 +204,7 @@ describe('Trigger', () => { expect(spy).toHaveBeenLastCalledWith({ ...PARAMS_DEFAULT, trigger: { - periodic_schedule: { ...PERIODIC_DEFAULT, end_time: testDate, start_time: testDate }, + periodic_schedule: { ...PERIODIC_DEFAULT, end_time: oneWeekLater, start_time: now }, }, }); }); @@ -292,6 +303,38 @@ describe('Trigger', () => { }, }); }); + + it('inits with cloned initial props', () => { + const spy = jest.fn(); + const startTime = new Date('2020-01-01T23:53:00.000Z'); + shallow( + , + ); + expect(spy).toHaveBeenCalledTimes(1); + expect(spy).toHaveBeenLastCalledWith({ + catchup: false, + maxConcurrentRuns: '3', + trigger: { + periodic_schedule: { + end_time: undefined, + interval_second: '10800', + start_time: startTime, + }, + }, + }); + }); }); describe('cron', () => { @@ -318,7 +361,7 @@ describe('Trigger', () => { expect(spy).toHaveBeenLastCalledWith({ ...PARAMS_DEFAULT, trigger: { - cron_schedule: { ...CRON_DEFAULT, start_time: testDate }, + cron_schedule: { ...CRON_DEFAULT, start_time: new Date('2018-03-23T07:53:00.000Z') }, }, }); }); @@ -336,7 +379,7 @@ describe('Trigger', () => { expect(spy).toHaveBeenLastCalledWith({ ...PARAMS_DEFAULT, trigger: { - cron_schedule: { ...CRON_DEFAULT, end_time: testDate, cron: '0 0 0 * * ?' }, + cron_schedule: { ...CRON_DEFAULT, end_time: oneWeekLater, cron: '0 0 0 * * ?' }, }, }); }); @@ -384,5 +427,39 @@ describe('Trigger', () => { }, }); }); + + it('inits with cloned initial props', () => { + const spy = jest.fn(); + const startTime = new Date('2020-01-01T00:00:00.000Z'); + const endTime = new Date('2020-01-02T01:02:00.000Z'); + shallow( + , + ); + expect(spy).toHaveBeenCalledTimes(1); + expect(spy).toHaveBeenLastCalledWith({ + catchup: true, + maxConcurrentRuns: '4', + trigger: { + cron_schedule: { + cron: '0 0 0 ? * 1,5,6', + start_time: startTime, + end_time: endTime, + }, + }, + }); + }); }); }); diff --git a/frontend/src/components/Trigger.tsx b/frontend/src/components/Trigger.tsx index fbfaeaae0..93b52d34e 100644 --- a/frontend/src/components/Trigger.tsx +++ b/frontend/src/components/Trigger.tsx @@ -33,9 +33,19 @@ import { pickersToDate, triggers, TriggerType, + parseTrigger, + ParsedTrigger, } from '../lib/TriggerUtils'; +import { logger } from 'src/lib/Utils'; + +type TriggerInitialProps = { + maxConcurrentRuns?: string; + catchup?: boolean; + trigger?: ApiTrigger; +}; interface TriggerProps { + initialProps?: TriggerInitialProps; onChange?: (config: { trigger?: ApiTrigger; maxConcurrentRuns?: string; @@ -67,33 +77,48 @@ const css = stylesheet({ }); export default class Trigger extends React.Component { - public state = (() => { - const now = new Date(); - const inAWeek = new Date( - now.getFullYear(), - now.getMonth(), - now.getDate() + 7, - now.getHours(), - now.getMinutes(), - ); - const [startDate, startTime] = dateToPickerFormat(now); - const [endDate, endTime] = dateToPickerFormat(inAWeek); + public state: TriggerState = (() => { + const { maxConcurrentRuns, catchup, trigger } = + this.props.initialProps || ({} as TriggerInitialProps); + let parsedTrigger: Partial = {}; + try { + if (trigger) { + parsedTrigger = parseTrigger(trigger); + } + } catch (err) { + logger.warn('Failed to parse original trigger: ', trigger); + logger.warn(err); + } + const startDateTime = parsedTrigger.startDateTime ?? new Date(); + const endDateTime = + parsedTrigger.endDateTime ?? + new Date( + startDateTime.getFullYear(), + startDateTime.getMonth(), + startDateTime.getDate() + 7, + startDateTime.getHours(), + startDateTime.getMinutes(), + ); + const [startDate, startTime] = dateToPickerFormat(startDateTime); + const [endDate, endTime] = dateToPickerFormat(endDateTime); return { - catchup: true, - cron: '', - editCron: false, + catchup: catchup ?? true, + maxConcurrentRuns: maxConcurrentRuns || '10', + hasEndDate: !!parsedTrigger?.endDateTime, endDate, endTime, - hasEndDate: false, - hasStartDate: false, - intervalCategory: PeriodicInterval.MINUTE, - intervalValue: 1, - maxConcurrentRuns: '10', - selectedDays: new Array(7).fill(true), + hasStartDate: !!parsedTrigger?.startDateTime, startDate, startTime, - type: TriggerType.INTERVALED, + selectedDays: new Array(7).fill(true), + type: parsedTrigger.type ?? TriggerType.INTERVALED, + // cron state + editCron: parsedTrigger.type === TriggerType.CRON, + cron: parsedTrigger.cron || '', + // interval state + intervalCategory: parsedTrigger.intervalCategory ?? PeriodicInterval.MINUTE, + intervalValue: parsedTrigger.intervalValue ?? 1, }; })(); diff --git a/frontend/src/components/__snapshots__/Banner.test.tsx.snap b/frontend/src/components/__snapshots__/Banner.test.tsx.snap index 8dc20fcf3..17150b82d 100644 --- a/frontend/src/components/__snapshots__/Banner.test.tsx.snap +++ b/frontend/src/components/__snapshots__/Banner.test.tsx.snap @@ -107,6 +107,24 @@ exports[`Banner uses error mode when instructed 1`] = `
`; +exports[`Banner uses info mode when instructed 1`] = ` +
+
+ + Some message +
+
+
+`; + exports[`Banner uses warning mode when instructed 1`] = `
@@ -459,7 +459,7 @@ exports[`Trigger renders all week days enabled 1`] = ` } } type="date" - value="2018-12-21" + value="2018-12-28" variant="outlined" width={160} /> @@ -800,7 +800,7 @@ exports[`Trigger renders periodic schedule controls for initial render 1`] = ` } } type="date" - value="2018-12-21" + value="2018-12-28" variant="outlined" width={160} /> @@ -1039,7 +1039,7 @@ exports[`Trigger renders periodic schedule controls if the trigger type is CRON } } type="date" - value="2018-12-21" + value="2018-12-28" variant="outlined" width={160} /> @@ -1301,7 +1301,7 @@ exports[`Trigger renders week days if the trigger type is CRON and interval is w } } type="date" - value="2018-12-21" + value="2018-12-28" variant="outlined" width={160} /> diff --git a/frontend/src/components/viewers/ConfusionMatrix.tsx b/frontend/src/components/viewers/ConfusionMatrix.tsx index fd3ea98b0..2ef0642a7 100644 --- a/frontend/src/components/viewers/ConfusionMatrix.tsx +++ b/frontend/src/components/viewers/ConfusionMatrix.tsx @@ -55,6 +55,7 @@ class ConfusionMatrix extends Viewer ) - 1 : 0; private _shrinkThreshold = 600; + private _uiData: number[][] = []; private _css = stylesheet({ activeLabel: { @@ -154,8 +155,36 @@ class ConfusionMatrix extends Viewer this.state = { activeCell: [-1, -1], }; + // Raw data: + // [ + // [1, 2], + // [3, 4], + // ] + // converts to UI data: + // y-axis + // ^ + // | + // 1 [2, 4], + // | + // 0 [1, 3], + // | + // *---0--1---> x-axis + if (!this._config || !this._config.labels || !this._config.data) { + this._uiData = []; + } else { + const labelCount = this._config.labels.length; + const uiData: number[][] = new Array(labelCount) + .fill(undefined) + .map(() => new Array(labelCount)); + for (let i = 0; i < labelCount; ++i) { + for (let j = 0; j < labelCount; ++j) { + uiData[labelCount - 1 - j][i] = this._config.data[i]?.[j]; + } + } + this._uiData = uiData; + } - for (const i of this._config.data) { + for (const i of this._uiData) { const row = []; for (const j of i) { row.push(+j / this._max); @@ -186,7 +215,7 @@ class ConfusionMatrix extends Viewer {yAxisLabel} )} - {this._config.data.map((row, r) => ( + {this._uiData.map((row, r) => ( {!small && ( @@ -196,7 +225,11 @@ class ConfusionMatrix extends Viewer r === activeRow ? this._css.activeLabel : '', )} > - {this._config.labels[r]} + { + this._config.labels[ + this._config.labels.length - 1 - r + ] /* uiData's ith's row corresponds to the reverse ordered label */ + }
)} @@ -209,6 +242,15 @@ class ConfusionMatrix extends Viewer color: this._opacities[r][c] < 0.6 ? color.foreground : color.background, }} onMouseOver={() => this.setState({ activeCell: [r, c] })} + onMouseLeave={() => + this.setState(state => ({ + // Remove active cell if it's still the one active + activeCell: + state.activeCell[0] === r && state.activeCell[1] === c + ? [-1, -1] + : state.activeCell, + })) + } >
+ +
+ label2 +
+ + +
+ 4 + + +
+ 7 + + +
-
- 4 - - -
- 5 - - - - -
- label2 -
- - 6 - -
- 7 - - -
- 8 - + +
+ label2 +
+ + +
+ 1 + + +
+ 4 + + +
-
- 1 - - -
- 2 - - - - -
- label2 -
- - 3 - -
- 4 - - -
- 5 - - - - -
- - -
- 6 - - -
- 7 - - -
- 8 - +
+ 1 + + +
+ 4 + + + + -
- 1 - - -
- 2 - - - - 3 - -
- 4 - - -
- 5 - - - - -
- 6 - - -
- 7 - - -
- 8 - @@ -885,6 +632,61 @@ exports[`ConfusionMatrix renders only one of the given list of configs 1`] = ` + +
+ label2 +
+ + +
+ 1 + + +
+ 4 + + +
-
- 1 - - -
- 2 - - - - -
- label2 -
- - 3 - -
- 4 - - -
- 5 - - - - -
- - -
- 6 - - -
- 7 - - -
- 8 - { it('getPodLogs', async () => { const spy = fetchSpy('http://some/address'); - expect(await Apis.getPodLogs('some-pod-name')).toEqual('http://some/address'); - expect(spy).toHaveBeenCalledWith('k8s/pod/logs?podname=some-pod-name', { + expect(await Apis.getPodLogs('some-pod-name', 'ns')).toEqual('http://some/address'); + expect(spy).toHaveBeenCalledWith('k8s/pod/logs?podname=some-pod-name&podnamespace=ns', { credentials: 'same-origin', }); }); @@ -87,7 +87,7 @@ describe('Apis', () => { text: () => 'bad response', }), ); - expect(Apis.getPodLogs('some-pod-name')).rejects.toThrowError('bad response'); + expect(Apis.getPodLogs('some-pod-name', 'ns')).rejects.toThrowError('bad response'); expect(Apis.getPodLogs('some-pod-name', 'some-namespace-name')).rejects.toThrowError( 'bad response', ); diff --git a/frontend/src/lib/Apis.ts b/frontend/src/lib/Apis.ts index 54d4f3a2a..daa72fa92 100644 --- a/frontend/src/lib/Apis.ts +++ b/frontend/src/lib/Apis.ts @@ -98,7 +98,7 @@ export class Apis { /** * Get pod logs */ - public static getPodLogs(podName: string, podNamespace?: string): Promise { + public static getPodLogs(podName: string, podNamespace: string): Promise { let query = `k8s/pod/logs?podname=${encodeURIComponent(podName)}`; if (podNamespace) { query += `&podnamespace=${encodeURIComponent(podNamespace)}`; diff --git a/frontend/src/lib/CompareUtils.ts b/frontend/src/lib/CompareUtils.ts index d566a5f33..58e82c7af 100644 --- a/frontend/src/lib/CompareUtils.ts +++ b/frontend/src/lib/CompareUtils.ts @@ -118,7 +118,7 @@ export default class CompareUtils { xLabels = Array.from(namesToNodesToValues.keys()); rows = Array.from(nodeIds.keys()).map(nodeId => { - yLabels.push((workflow && workflow.status.nodes[nodeId].displayName) || nodeId); + yLabels.push(nodeId); return xLabels.map(metricName => namesToNodesToValues.get(metricName)!.get(nodeId) || ''); }); } diff --git a/frontend/src/lib/OutputArtifactLoader.test.ts b/frontend/src/lib/OutputArtifactLoader.test.ts index 41d9de5c9..1c9732438 100644 --- a/frontend/src/lib/OutputArtifactLoader.test.ts +++ b/frontend/src/lib/OutputArtifactLoader.test.ts @@ -208,24 +208,27 @@ describe('OutputArtifactLoader', () => { fileToRead = ''; const source = ` - field1,field1,1 - field1,field2,2 - field2,field1,3 - field2,field2,4 + label1,label1,1 + label1,label2,2 + label2,label1,3 + label2,label2,4 `; const expectedResult: ConfusionMatrixConfig = { axes: ['field1', 'field2'], data: [ - [1, 2], - [3, 4], + // Note, the data matrix's layout does not match how we show it in UI. + // field1 is x-axis, field2 is y-axis + [1 /* field1=label1, field2=label1 */, 2 /* field1=label1, field2=label2 */], + [3 /* field1=label2, field2=label1 */, 4 /* field1=label2, field2=label2 */], ], - labels: ['field1', 'field2'], + labels: ['label1', 'label2'], type: PlotType.CONFUSION_MATRIX, }; const result = await OutputArtifactLoader.buildConfusionMatrixConfig( { ...basicMetadata, + labels: ['label1', 'label2'], storage: 'inline', source, } as any, diff --git a/frontend/src/lib/OutputArtifactLoader.ts b/frontend/src/lib/OutputArtifactLoader.ts index 7d91d48d3..52d849483 100644 --- a/frontend/src/lib/OutputArtifactLoader.ts +++ b/frontend/src/lib/OutputArtifactLoader.ts @@ -143,9 +143,10 @@ export class OutputArtifactLoader { } const data = Array.from(Array(labels.length), () => new Array(labels.length)); - csvRows.forEach(([target, predicted, count]) => { - const i = labelIndex[target.trim()]; - const j = labelIndex[predicted.trim()]; + csvRows.forEach(([labelX, labelY, count]) => { + const i = labelIndex[labelX.trim()]; + const j = labelIndex[labelY.trim()]; + // Note: data[i][j] means data(i, j) i on x-axis, j on y-axis data[i][j] = Number.parseInt(count, 10); }); diff --git a/frontend/src/lib/TriggerUtils.test.ts b/frontend/src/lib/TriggerUtils.test.ts index af6a7efd0..c6223099b 100644 --- a/frontend/src/lib/TriggerUtils.test.ts +++ b/frontend/src/lib/TriggerUtils.test.ts @@ -23,6 +23,7 @@ import { TriggerType, dateToPickerFormat, triggerDisplayString, + parseTrigger, } from './TriggerUtils'; import { ApiTrigger } from '../apis/job'; @@ -236,6 +237,45 @@ describe('TriggerUtils', () => { }); }); + describe('parseTrigger', () => { + it('throws on invalid trigger', () => { + expect(() => parseTrigger({})).toThrow('Invalid trigger: {}'); + }); + + it('parses periodic schedule', () => { + const startTime = new Date(1234); + const parsedTrigger = parseTrigger({ + periodic_schedule: { + start_time: startTime, + interval_second: '120', + }, + }); + expect(parsedTrigger).toEqual({ + type: TriggerType.INTERVALED, + startDateTime: startTime, + endDateTime: undefined, + intervalCategory: PeriodicInterval.MINUTE, + intervalValue: 2, + }); + }); + + it('parses cron schedule', () => { + const endTime = new Date(12345); + const parsedTrigger = parseTrigger({ + cron_schedule: { + end_time: endTime, + cron: '0 0 0 ? * 0,6', + }, + }); + expect(parsedTrigger).toEqual({ + type: TriggerType.CRON, + cron: '0 0 0 ? * 0,6', + startDateTime: undefined, + endDateTime: endTime, + }); + }); + }); + describe('dateToPickerFormat', () => { it('converts date to picker format date and time', () => { const testDate = new Date(2018, 11, 13, 11, 33); diff --git a/frontend/src/lib/TriggerUtils.ts b/frontend/src/lib/TriggerUtils.ts index 3168cab71..fd38c0d7e 100644 --- a/frontend/src/lib/TriggerUtils.ts +++ b/frontend/src/lib/TriggerUtils.ts @@ -28,6 +28,20 @@ export enum PeriodicInterval { WEEK = 'Week', MONTH = 'Month', } +const INTERVAL_SECONDS = { + [PeriodicInterval.MINUTE]: 60, + [PeriodicInterval.HOUR]: 60 * 60, + [PeriodicInterval.DAY]: 60 * 60 * 24, + [PeriodicInterval.WEEK]: 60 * 60 * 24 * 7, + [PeriodicInterval.MONTH]: 60 * 60 * 24 * 30, +}; +const PERIODIC_INTERVAL_DESCENDING = [ + PeriodicInterval.MONTH, + PeriodicInterval.WEEK, + PeriodicInterval.DAY, + PeriodicInterval.HOUR, + PeriodicInterval.MINUTE, +]; export const triggers = new Map([ [TriggerType.INTERVALED, { displayName: 'Periodic' }], @@ -35,28 +49,26 @@ export const triggers = new Map([ ]); export function getPeriodInSeconds(interval: PeriodicInterval, count: number): number { - let intervalSeconds = 0; - switch (interval) { - case PeriodicInterval.MINUTE: - intervalSeconds = 60; - break; - case PeriodicInterval.HOUR: - intervalSeconds = 60 * 60; - break; - case PeriodicInterval.DAY: - intervalSeconds = 60 * 60 * 24; - break; - case PeriodicInterval.WEEK: - intervalSeconds = 60 * 60 * 24 * 7; - break; - case PeriodicInterval.MONTH: - intervalSeconds = 60 * 60 * 24 * 30; - break; - default: - throw new Error('Invalid interval category: ' + interval); + const intervalSeconds = INTERVAL_SECONDS[interval]; + if (!intervalSeconds) { + throw new Error('Invalid interval category: ' + interval); } return intervalSeconds * count; } +export function parsePeriodFromSeconds( + seconds: number, +): { interval: PeriodicInterval; count: number } { + for (const interval of PERIODIC_INTERVAL_DESCENDING) { + const intervalSeconds = INTERVAL_SECONDS[interval]; + if (seconds % intervalSeconds === 0) { + return { + interval, + count: seconds / intervalSeconds, + }; + } + } + throw new Error('Invalid seconds: ' + seconds); +} export function buildCron( startDateTime: Date | undefined, @@ -174,6 +186,66 @@ export function buildTrigger( return trigger; } +export type ParsedTrigger = + | { + type: TriggerType.INTERVALED; + intervalCategory: PeriodicInterval; + intervalValue: number; + startDateTime?: Date; + endDateTime?: Date; + cron?: undefined; + } + | { + type: TriggerType.CRON; + intervalCategory?: undefined; + intervalValue?: undefined; + startDateTime?: Date; + endDateTime?: Date; + cron: string; + }; + +export function parseTrigger(trigger: ApiTrigger): ParsedTrigger { + if (trigger.periodic_schedule) { + const periodicSchedule = trigger.periodic_schedule; + const intervalSeconds = parseInt(periodicSchedule.interval_second || '', 10); + if (Number.isNaN(intervalSeconds)) { + throw new Error( + `Interval seconds is NaN: ${periodicSchedule.interval_second} for ${JSON.stringify( + trigger, + )}`, + ); + } + const { interval: intervalCategory, count: intervalValue } = parsePeriodFromSeconds( + intervalSeconds, + ); + return { + type: TriggerType.INTERVALED, + intervalCategory, + intervalValue, + // Generated client has a bug the fields will be string here instead, so + // we use new Date() to convert them to Date. + startDateTime: periodicSchedule.start_time + ? new Date(periodicSchedule.start_time as any) + : undefined, + endDateTime: periodicSchedule.end_time + ? new Date(periodicSchedule.end_time as any) + : undefined, + }; + } + if (trigger.cron_schedule) { + const { cron, start_time: startTime, end_time: endTime } = trigger.cron_schedule; + return { + type: TriggerType.CRON, + cron: cron || '', + // Generated client has a bug the fields will be string here instead, so + // we use new Date() to convert them to Date. + startDateTime: startTime ? new Date(startTime as any) : undefined, + endDateTime: endTime ? new Date(endTime as any) : undefined, + }; + } + throw new Error(`Invalid trigger: ${JSON.stringify(trigger)}`); +} + export function dateToPickerFormat(d: Date): [string, string] { const year = d.getFullYear(); const month = ('0' + (d.getMonth() + 1)).slice(-2); diff --git a/frontend/src/lib/Utils.test.ts b/frontend/src/lib/Utils.test.ts index 5de4a8942..8da86efbe 100644 --- a/frontend/src/lib/Utils.test.ts +++ b/frontend/src/lib/Utils.test.ts @@ -19,6 +19,7 @@ import { enabledDisplayString, formatDateString, generateMinioArtifactUrl, + generateS3ArtifactUrl, getRunDuration, getRunDurationFromWorkflow, logger, @@ -253,4 +254,12 @@ describe('Utils', () => { expect(generateMinioArtifactUrl('ZZZ://my-bucket/a/b/c')).toBe(undefined); }); }); + + describe('generateS3ArtifactUrl', () => { + it('handles s3:// URIs', () => { + expect(generateS3ArtifactUrl('s3://my-bucket/a/b/c')).toBe( + 'artifacts/get?source=s3&bucket=my-bucket&key=a%2Fb%2Fc', + ); + }); + }); }); diff --git a/frontend/src/lib/Utils.tsx b/frontend/src/lib/Utils.tsx index 2c3daa86e..e4b68e038 100644 --- a/frontend/src/lib/Utils.tsx +++ b/frontend/src/lib/Utils.tsx @@ -337,6 +337,26 @@ export function generateMinioArtifactUrl(minioUri: string, peek?: number): strin return generateArtifactUrl('minio', matches[1], matches[2], peek); } +const S3_URI_PREFIX = 's3://'; +/** + * Generates an HTTPS API URL from s3:// uri + * + * @param s3Uri S3 uri that starts with s3://, like s3://ml-pipeline/path/file + * @returns A URL that leads to the artifact data. Returns undefined when s3Uri is not valid. + */ +export function generateS3ArtifactUrl(s3Uri: string): string | undefined { + if (!s3Uri.startsWith(S3_URI_PREFIX)) { + return undefined; + } + + // eslint-disable-next-line no-useless-escape + const matches = s3Uri.match(/^s3:\/\/([^\/]+)\/(.+)$/); + if (matches == null) { + return undefined; + } + return generateArtifactUrl('s3', matches[1], matches[2]); +} + export function buildQuery(queriesMap: { [key: string]: string | number | undefined }): string { const queryContent = Object.entries(queriesMap) .filter((entry): entry is [string, string | number] => entry[1] != null) diff --git a/frontend/src/lib/WorkflowParser.ts b/frontend/src/lib/WorkflowParser.ts index e7c8a7215..7992e90e6 100644 --- a/frontend/src/lib/WorkflowParser.ts +++ b/frontend/src/lib/WorkflowParser.ts @@ -101,7 +101,7 @@ export default class WorkflowParser { for (const edge of edges || []) graph.setEdge(edge['parent'], edge['child']); - const status = this.getStatus(statusMap.get(task['name'])) + const status = this.getStatus(statusMap.get(task['name'])); const phase = statusToPhase(status); const statusColoring = exitHandlers.includes(task['name']) ? '#fef7f0' diff --git a/frontend/src/pages/ExperimentList.tsx b/frontend/src/pages/ExperimentList.tsx index cc192c406..4a6a6ce7f 100644 --- a/frontend/src/pages/ExperimentList.tsx +++ b/frontend/src/pages/ExperimentList.tsx @@ -278,7 +278,7 @@ export class ExperimentList extends Page<{ namespace?: string }, ExperimentListS experimentIdMask={experiment.id} onError={() => null} {...this.props} - disablePaging={true} + disablePaging={false} selectedIds={this.state.selectedIds} noFilterBox={true} storageState={RunStorageState.AVAILABLE} diff --git a/frontend/src/pages/GettingStarted.test.tsx b/frontend/src/pages/GettingStarted.test.tsx index c1820be51..c3da858ae 100644 --- a/frontend/src/pages/GettingStarted.test.tsx +++ b/frontend/src/pages/GettingStarted.test.tsx @@ -73,9 +73,24 @@ describe('GettingStarted page', () => {

Demos - Try an end-to-end demonstration pipeline.

@@ -1180,7 +1199,8 @@ describe('RunDetails', () => { it('keeps side pane open and on same tab when logs change after refresh', async () => { testRun.pipeline_runtime!.workflow_manifest = JSON.stringify({ - status: { nodes: { node1: { id: 'node1' } } }, + status: { nodes: { node1: { id: 'node1', phase: 'Succeeded' } } }, + metadata: { namespace: 'ns' }, }); tree = shallow(); await getRunSpy; @@ -1198,9 +1218,10 @@ describe('RunDetails', () => { expect(tree).toMatchSnapshot(); }); - it('dismisses log failure warning banner when logs can be fetched after refresh', async () => { + it('shows error banner if fetching logs failed not because pod has gone away', async () => { testRun.pipeline_runtime!.workflow_manifest = JSON.stringify({ - status: { nodes: { node1: { id: 'node1' } } }, + status: { nodes: { node1: { id: 'node1', phase: 'Succeeded' } } }, + metadata: { namespace: 'ns' }, }); TestUtils.makeErrorResponseOnce(getPodLogsSpy, 'getting logs failed'); tree = shallow(); @@ -1214,10 +1235,32 @@ describe('RunDetails', () => { await getPodLogsSpy; await TestUtils.flushPromises(); expect(tree.state()).toMatchObject({ - logsBannerAdditionalInfo: 'getting logs failed', - logsBannerMessage: - 'Warning: failed to retrieve pod logs. Possible reasons include cluster autoscaling or pod preemption', - logsBannerMode: 'warning', + logsBannerAdditionalInfo: 'Error response: getting logs failed', + logsBannerMessage: 'Failed to retrieve pod logs.', + logsBannerMode: 'error', + }); + }); + + it('dismisses log failure warning banner when logs can be fetched after refresh', async () => { + testRun.pipeline_runtime!.workflow_manifest = JSON.stringify({ + status: { nodes: { node1: { id: 'node1', phase: 'Failed' } } }, + metadata: { namespace: 'ns' }, + }); + TestUtils.makeErrorResponseOnce(getPodLogsSpy, 'getting logs failed'); + tree = shallow(); + await getRunSpy; + await TestUtils.flushPromises(); + clickGraphNode(tree, 'node1'); + tree + .find('MD2Tabs') + .at(1) + .simulate('switch', STEP_TABS.LOGS); + await getPodLogsSpy; + await TestUtils.flushPromises(); + expect(tree.state()).toMatchObject({ + logsBannerAdditionalInfo: 'Error response: getting logs failed', + logsBannerMessage: 'Failed to retrieve pod logs.', + logsBannerMode: 'error', }); testRun.run!.status = 'Failed'; @@ -1229,6 +1272,64 @@ describe('RunDetails', () => { }); }); + describe('pod tab', () => { + it('shows pod info', async () => { + testRun.pipeline_runtime!.workflow_manifest = JSON.stringify({ + status: { nodes: { node1: { id: 'node1', phase: 'Failed' } } }, + metadata: { namespace: 'ns' }, + }); + tree = shallow(); + await getRunSpy; + await TestUtils.flushPromises(); + clickGraphNode(tree, 'node1'); + tree + .find('MD2Tabs') + .at(1) + .simulate('switch', STEP_TABS.POD); + await getPodInfoSpy; + await TestUtils.flushPromises(); + + expect(tree.find(NODE_DETAILS_SELECTOR)).toMatchInlineSnapshot(` +
+
+ +
+
+ `); + }); + + it('does not show pod pane if selected node skipped', async () => { + testRun.pipeline_runtime!.workflow_manifest = JSON.stringify({ + status: { nodes: { node1: { id: 'node1', phase: 'Skipped' } } }, + metadata: { namespace: 'ns' }, + }); + tree = shallow(); + await getRunSpy; + await TestUtils.flushPromises(); + clickGraphNode(tree, 'node1'); + tree + .find('MD2Tabs') + .at(1) + .simulate('switch', STEP_TABS.POD); + await TestUtils.flushPromises(); + + expect(tree.find(NODE_DETAILS_SELECTOR)).toMatchInlineSnapshot(` +
+ `); + }); + }); + describe('auto refresh', () => { beforeEach(() => { testRun.run!.status = NodePhase.PENDING; diff --git a/frontend/src/pages/RunDetails.tsx b/frontend/src/pages/RunDetails.tsx index 410c9e0d6..69a2d97aa 100644 --- a/frontend/src/pages/RunDetails.tsx +++ b/frontend/src/pages/RunDetails.tsx @@ -87,8 +87,8 @@ enum SidePaneTab { interface SelectedNodeDetails { id: string; - mode?: Mode; logs?: string; + phase?: string; phaseMessage?: string; } @@ -124,6 +124,7 @@ interface RunDetailsState { runMetadata?: ApiRun; selectedTab: number; selectedNodeDetails: SelectedNodeDetails | null; + sidepanelBannerMode: Mode; sidepanelBusy: boolean; sidepanelSelectedTab: SidePaneTab; workflow?: any; @@ -173,6 +174,7 @@ class RunDetails extends Page { runFinished: false, selectedNodeDetails: null, selectedTab: 0, + sidepanelBannerMode: 'warning', sidepanelBusy: false, sidepanelSelectedTab: SidePaneTab.INPUT_OUTPUT, mlmdRunContext: undefined, @@ -232,6 +234,7 @@ class RunDetails extends Page { runFinished, runMetadata, selectedTab, + sidepanelBannerMode, selectedNodeDetails, sidepanelSelectedTab, workflow, @@ -303,7 +306,7 @@ class RunDetails extends Page { {!!selectedNodeDetails.phaseMessage && ( )} @@ -456,62 +459,66 @@ class RunDetails extends Page {
)} - {sidepanelSelectedTab === SidePaneTab.POD && ( -
- {selectedNodeId && namespace && ( - - )} -
- )} + {sidepanelSelectedTab === SidePaneTab.POD && + selectedNodeDetails.phase !== NodePhase.SKIPPED && ( +
+ {selectedNodeId && namespace && ( + + )} +
+ )} - {sidepanelSelectedTab === SidePaneTab.EVENTS && ( -
- {selectedNodeId && namespace && ( - - )} -
- )} + {sidepanelSelectedTab === SidePaneTab.EVENTS && + selectedNodeDetails.phase !== NodePhase.SKIPPED && ( +
+ {selectedNodeId && namespace && ( + + )} +
+ )} - {sidepanelSelectedTab === SidePaneTab.LOGS && ( -
- {this.state.logsBannerMessage && ( - - - - )} - {stackdriverK8sLogsUrl && ( - - )} - {!this.state.logsBannerMessage && - this.state.selectedNodeDetails && ( - // Overflow hidden here, because scroll is handled inside - // LogViewer. -
- + {this.state.logsBannerMessage && ( + + + + )} + {stackdriverK8sLogsUrl && ( +
+ Logs can also be viewed in{' '} + + Stackdriver Kubernetes Monitoring + + .
)} -
- )} + {!this.state.logsBannerMessage && + this.state.selectedNodeDetails && ( + // Overflow hidden here, because scroll is handled inside + // LogViewer. +
+ +
+ )} +
+ )}
@@ -833,7 +840,7 @@ class RunDetails extends Page { return !workflow.status ? [] : [ - ['Status', workflow.status.conditions[0].reason], + ['Status', workflow.status.conditions ? workflow.status.conditions[0].reason : 'Pending'], ['Description', runMetadata ? runMetadata!.description! : ''], [ 'Created at', @@ -855,7 +862,10 @@ class RunDetails extends Page { private async _loadSidePaneTab(tab: SidePaneTab): Promise { const workflow = this.state.workflow; const selectedNodeDetails = this.state.selectedNodeDetails; - if (workflow && workflow.status && workflow.status && selectedNodeDetails) { + + let sidepanelBannerMode: Mode = 'warning'; + + if (workflow && workflow.status && workflow.status.taskRuns && selectedNodeDetails) { let node: any; for (const podName of Object.getOwnPropertyNames(workflow.status.taskRuns)) { @@ -869,23 +879,41 @@ class RunDetails extends Page { } } - if (node && node.status && node.status.conditions[0].type !== 'Succeeded') { - selectedNodeDetails.phaseMessage = - node && node.status - ? `This step is in ${node.status.conditions[0].type} state with this message: ` + - node.status.conditions[0].message - : undefined; - } else if (node && node.status && node.status.conditions && node.conditionChecks) { - if (node.status.conditions[0].reason === 'Succeeded') { - selectedNodeDetails.mode = 'info'; - selectedNodeDetails.phaseMessage = 'All ConditionChecks have completed executing'; - } else selectedNodeDetails.phaseMessage = node.status.conditions[0].message; + if (node) { + selectedNodeDetails.phase = statusToPhase(node.status.conditions[0].reason); + + switch (selectedNodeDetails.phase) { + // TODO: make distinction between system and pipelines error clear + case NodePhase.ERROR: + case NodePhase.SKIPPED: + sidepanelBannerMode = 'warning'; + break; + case NodePhase.FAILED: + sidepanelBannerMode = 'error'; + break; + default: + sidepanelBannerMode = 'info'; + break; + } + + if (node.status.conditions[0].type !== 'Succeeded') { + selectedNodeDetails.phaseMessage = + node && node.status + ? `This step is in ${node.status.conditions[0].type} state with this message: ` + + node.status.conditions[0].message + : undefined; + } else if (node.status.conditions && node.conditionChecks) { + if (node.status.conditions[0].reason === 'Succeeded') { + selectedNodeDetails.phaseMessage = 'All ConditionChecks have completed executing'; + } else selectedNodeDetails.phaseMessage = node.status.conditions[0].message; + } } - this.setStateSafe({ selectedNodeDetails, sidepanelSelectedTab: tab }); + + this.setStateSafe({ selectedNodeDetails, sidepanelSelectedTab: tab, sidepanelBannerMode }); switch (tab) { case SidePaneTab.LOGS: - if (node.status.phase !== NodePhase.SKIPPED) { + if (node.status.phase !== NodePhase.PENDING && node.status.phase !== NodePhase.SKIPPED) { await this._loadSelectedNodeLogs(); } else { // Clear logs @@ -897,31 +925,43 @@ class RunDetails extends Page { private async _loadSelectedNodeLogs(): Promise { const selectedNodeDetails = this.state.selectedNodeDetails; - if (!selectedNodeDetails) { + const namespace = this.state.workflow?.metadata?.namespace; + if (!selectedNodeDetails || !namespace) { return; } this.setStateSafe({ sidepanelBusy: true }); + + let logsBannerMessage = ''; + let logsBannerAdditionalInfo = ''; + let logsBannerMode = '' as Mode; + try { - const logs = await Apis.getPodLogs( - selectedNodeDetails.id, - this.state.workflow?.metadata?.namespace, - ); - selectedNodeDetails.logs = logs; - this.setStateSafe({ - logsBannerAdditionalInfo: '', - logsBannerMessage: '', - selectedNodeDetails, - }); + selectedNodeDetails.logs = await Apis.getPodLogs(selectedNodeDetails.id, namespace); } catch (err) { - this.setStateSafe({ - logsBannerMessage: - 'Warning: failed to retrieve pod logs. Possible reasons include cluster autoscaling or pod preemption', - logsBannerAdditionalInfo: await errorToMessage(err), - logsBannerMode: 'warning', - }); - } finally { - this.setStateSafe({ sidepanelBusy: false }); + let errMsg = await errorToMessage(err); + logsBannerMessage = 'Failed to retrieve pod logs.'; + + if (errMsg === 'pod not found') { + logsBannerMessage += this.props.gkeMetadata.projectId + ? ' Use Stackdriver Kubernetes Monitoring to view them.' + : ''; + logsBannerMode = 'info'; + logsBannerAdditionalInfo = + 'Possible reasons include pod garbage collection, cluster autoscaling and pod preemption. '; + } else { + logsBannerMode = 'error'; + } + + logsBannerAdditionalInfo += 'Error response: ' + errMsg; } + + this.setStateSafe({ + sidepanelBusy: false, + logsBannerAdditionalInfo, + logsBannerMessage, + logsBannerMode, + selectedNodeDetails, + }); } private async _onGenerate( diff --git a/frontend/src/pages/Status.tsx b/frontend/src/pages/Status.tsx index a956f660d..9535cc6a2 100644 --- a/frontend/src/pages/Status.tsx +++ b/frontend/src/pages/Status.tsx @@ -111,7 +111,7 @@ export function statusToIcon( case NodePhase.PIPELINERUNCOULDNTCANCEL: IconComponent = TerminatedIcon; iconColor = color.terminated; - title = 'PipelineRun couldn\'t cancel'; + title = 'PipelineRun could not cancel'; break; case NodePhase.TASKRUNCANCELLED: IconComponent = TerminatedIcon; @@ -121,7 +121,7 @@ export function statusToIcon( case NodePhase.TASKRUNCOULDNTCANCEL: IconComponent = TerminatedIcon; iconColor = color.terminated; - title = 'TaskRun couldn\'t cancel'; + title = 'TaskRun could not cancel'; break; case NodePhase.UNKNOWN: break; diff --git a/frontend/src/pages/__snapshots__/GettingStarted.test.tsx.snap b/frontend/src/pages/__snapshots__/GettingStarted.test.tsx.snap index f70e0ec7e..8413a27a1 100644 --- a/frontend/src/pages/__snapshots__/GettingStarted.test.tsx.snap +++ b/frontend/src/pages/__snapshots__/GettingStarted.test.tsx.snap @@ -67,7 +67,29 @@ exports[`GettingStarted page initially renders documentation 1`] = ` class="link" href="#/pipelines" > - TFX pipeline demo + TFX pipeline demo with Keras + + + + +
  • + + TFX pipeline demo with Estimator
      @@ -140,7 +162,9 @@ exports[`GettingStarted page initially renders documentation 1`] = `
    • DSL - Control structures @@ -194,13 +218,13 @@ Array [ undefined, 10, undefined, - "%7B%22predicates%22%3A%5B%7B%22key%22%3A%22name%22%2C%22op%22%3A%22EQUALS%22%2C%22string_value%22%3A%22%5BTutorial%5D%20Data%20passing%20in%20python%20components%22%7D%5D%7D", + "%7B%22predicates%22%3A%5B%7B%22key%22%3A%22name%22%2C%22op%22%3A%22EQUALS%22%2C%22string_value%22%3A%22%5BDemo%5D%20TFX%20-%20Iris%20classification%20pipeline%22%7D%5D%7D", ], Array [ undefined, 10, undefined, - "%7B%22predicates%22%3A%5B%7B%22key%22%3A%22name%22%2C%22op%22%3A%22EQUALS%22%2C%22string_value%22%3A%22%5BTutorial%5D%20DSL%20-%20Control%20structures%22%7D%5D%7D", + "%7B%22predicates%22%3A%5B%7B%22key%22%3A%22name%22%2C%22op%22%3A%22EQUALS%22%2C%22string_value%22%3A%22%5BTutorial%5D%20Data%20passing%20in%20python%20components%22%7D%5D%7D", ], ] `; diff --git a/frontend/src/pages/__snapshots__/NewRun.test.tsx.snap b/frontend/src/pages/__snapshots__/NewRun.test.tsx.snap index 85d465f24..61aba1667 100644 --- a/frontend/src/pages/__snapshots__/NewRun.test.tsx.snap +++ b/frontend/src/pages/__snapshots__/NewRun.test.tsx.snap @@ -1482,6 +1482,13 @@ exports[`NewRun changes title and form if the new run will recur, based on the r Choose a method by which new runs will be triggered
  • -
    -
    - -
    -
    -
    + />
    /dev/null || (echo "node not found in PATH, recommend install via https://github.com/nvm-sh/nvm#installing-and-updating" && exit 1) +node -v | grep v12 || (echo "node not v12.x version" && exit 1) +echo "jq>=1.6" +which jq >/dev/null || (echo "jq not found in PATH" && exit 1) +echo "yq>=3.3" +which yq >/dev/null || (echo "yq not found in PATH" && exit 1) +yq -V | grep 3. || (echo "yq version 3.x should be used" && exit 1) +echo "java>=8" +which java >/dev/null || (echo "java not found in PATH" && exit 1) +echo "bazel==0.24.0" +which bazel >/dev/null || (echo "bazel not found in PATH" && exit 1) +bazel version | grep 0.24.0 || (echo "bazel not 0.24.0 version" && exit 1) +echo "python>3" +which python >/dev/null || (echo "python not found in PATH" && exit 1) +python -c "import setuptools" || (echo "setuptools should be installed in python" && exit 1) + +echo "All tools installed" +echo "Please add another needed tools if above list is not complete" diff --git a/hack/release-imp.sh b/hack/release-imp.sh new file mode 100755 index 000000000..7c49ed922 --- /dev/null +++ b/hack/release-imp.sh @@ -0,0 +1,48 @@ +#!/bin/bash +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -ex + +echo "Usage: update kubeflow/pipelines/VERSION to new version tag by" +echo '`echo -n "\$VERSION" > VERSION` first, then run this script.' +echo "Please use the above command to make sure the file doesn't have extra" +echo "line endings." + +DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" > /dev/null && pwd)" +REPO_ROOT="$DIR/.." +TAG_NAME="$(cat $REPO_ROOT/VERSION)" + +if [[ -z "$TAG_NAME" ]]; then + echo "ERROR: $REPO_ROOT/VERSION is empty" >&2 + exit 1 +fi + +"$DIR/check-release-needed-tools.sh" + +pushd "$REPO_ROOT" +npm ci +npm run changelog +popd +# Change github issue/PR references like #123 to real urls in markdown. +# The issues must have a " " or a "(" before it to avoid already converted issues like [\#123](url...). +sed -i.bak -e 's|\([ (]\)#\([0-9]\+\)|\1[\\#\2](https://github.com/kubeflow/pipelines/issues/\2)|g' "$REPO_ROOT/CHANGELOG.md" + +"$REPO_ROOT/components/release-in-place.sh" $TAG_NAME +"$REPO_ROOT/manifests/gcp_marketplace/hack/release.sh" $TAG_NAME +"$REPO_ROOT/manifests/kustomize/hack/release.sh" $TAG_NAME +"$REPO_ROOT/sdk/hack/release.sh" $TAG_NAME +"$REPO_ROOT/backend/api/generate_api.sh" +"$REPO_ROOT/backend/api/build_kfp_server_api_python_package.sh" diff --git a/hack/release.sh b/hack/release.sh new file mode 100755 index 000000000..ac31d9f6a --- /dev/null +++ b/hack/release.sh @@ -0,0 +1,49 @@ +#!/bin/bash +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -xe + +TAG_NAME=$1 +BRANCH=$2 +REPO=kubeflow/pipelines + +if [[ -z "$BRANCH" || -z "$TAG_NAME" ]]; then + echo "Usage: release.sh " >&2 + exit 1 +fi + +# Checking out the repo's release branch +clone_dir=$(mktemp -d) +git clone "git@github.com:${REPO}.git" "$clone_dir" +cd "$clone_dir" +git checkout "$BRANCH" + +echo -n "$TAG_NAME" > ./VERSION +# Run the release script in cloned repo +"hack/release-imp.sh" $TAG_NAME + +# Checking-in the component changes +git add --all +git commit --message "chore(release): bumped version to $TAG_NAME" +git tag -a "$TAG_NAME" -m "Kubeflow Pipelines $TAG_NAME release" + +# Pushing the changes upstream +read -p "Do you want to push the version change and tag $TAG_NAME tag to upstream? [y|n]" +if [ "$REPLY" != "y" ]; then + exit +fi +git push --set-upstream origin "$BRANCH" +git push origin "$TAG_NAME" diff --git a/manifests/gcp_marketplace/chart/kubeflow-pipelines/templates/application.yaml b/manifests/gcp_marketplace/chart/kubeflow-pipelines/templates/application.yaml index 4a9d3e719..217af28b3 100644 --- a/manifests/gcp_marketplace/chart/kubeflow-pipelines/templates/application.yaml +++ b/manifests/gcp_marketplace/chart/kubeflow-pipelines/templates/application.yaml @@ -6,46 +6,45 @@ metadata: annotations: kubernetes-engine.cloud.google.com/icon: >- data:image/png;base64,{{ .Files.Get "logo.png" | b64enc }} - marketplace.cloud.google.com/deploy-info: '{"partner_id": "google-cloud-ai-platform", "product_id": "kubeflow-pipelines", "partner_name": "Google Cloud AI Platform"}' + marketplace.cloud.google.com/deploy-info: '{"partner_id": "google-cloud-ai-platform", + "product_id": "kubeflow-pipelines", "partner_name": "Google Cloud AI Platform"}' labels: app.kubernetes.io/name: "{{ .Release.Name }}" spec: descriptor: type: Kubeflow Pipelines - version: '0.5.1' + version: 1.0.0 description: |- Reusable end-to-end ML workflow maintainers: - - name: Google Cloud AI Platform - url: https://cloud.google.com/ai-platform/ - - name: Kubeflow Pipelines - url: https://github.com/kubeflow/pipelines + - name: Google Cloud AI Platform + url: https://cloud.google.com/ai-platform/ + - name: Kubeflow Pipelines + url: https://github.com/kubeflow/pipelines links: - - description: 'Kubeflow Pipelines Documentation' - url: https://www.kubeflow.org/docs/pipelines/ + - description: 'Kubeflow Pipelines Documentation' + url: https://www.kubeflow.org/docs/pipelines/ notes: |- Please go to [Hosted Kubeflow Pipelines Console](https://console.cloud.google.com/ai-platform/pipelines/clusters). - info: - - name: Application Namespace - value: {{ .Release.Namespace }} - - name: Console - value: 'https://console.cloud.google.com/ai-platform/pipelines/clusters' + - name: Application Namespace + value: "{{ .Release.Namespace }}" + - name: Console + value: 'https://console.cloud.google.com/ai-platform/pipelines/clusters' componentKinds: - - group: v1 - kind: ServiceAccount - - group: rbac.authorization.k8s.io/v1 - kind: Role - - group: rbac.authorization.k8s.io/v1 - kind: RoleBinding - - group: v1 - kind: Service - - group: v1 - kind: PersistentVolumeClaim - - group: v1 - kind: ConfigMap - - group: v1 - kind: Secret - - group: apps/v1 - kind: Deployment - + - group: v1 + kind: ServiceAccount + - group: rbac.authorization.k8s.io/v1 + kind: Role + - group: rbac.authorization.k8s.io/v1 + kind: RoleBinding + - group: v1 + kind: Service + - group: v1 + kind: PersistentVolumeClaim + - group: v1 + kind: ConfigMap + - group: v1 + kind: Secret + - group: apps/v1 + kind: Deployment diff --git a/manifests/gcp_marketplace/chart/kubeflow-pipelines/templates/argo.yaml b/manifests/gcp_marketplace/chart/kubeflow-pipelines/templates/argo.yaml index e21da1050..8084e2cc2 100644 --- a/manifests/gcp_marketplace/chart/kubeflow-pipelines/templates/argo.yaml +++ b/manifests/gcp_marketplace/chart/kubeflow-pipelines/templates/argo.yaml @@ -146,7 +146,7 @@ data: artifactRepository: { s3: { - bucket: '{{ if .Values.managedstorage.enabled }}{{ tpl .Values.managedstorage.gcsBucketName . }}{{ else }}mlpipeline{{ end }}', + bucket: '{{ if .Values.managedstorage.enabled }}{{ .Values.managedstorage.gcsBucketName }}{{ else }}mlpipeline{{ end }}', keyPrefix: artifacts, endpoint: minio-service.{{ .Release.Namespace }}:9000, insecure: true, diff --git a/manifests/gcp_marketplace/chart/kubeflow-pipelines/templates/gcp_secret.yaml b/manifests/gcp_marketplace/chart/kubeflow-pipelines/templates/gcp_secret.yaml deleted file mode 100644 index 1070094e1..000000000 --- a/manifests/gcp_marketplace/chart/kubeflow-pipelines/templates/gcp_secret.yaml +++ /dev/null @@ -1,11 +0,0 @@ -apiVersion: v1 -kind: Secret -metadata: - name: {{ .Values.gcpSecretName }} - labels: - app: gcp-sa - app.kubernetes.io/name: {{ .Release.Name }} -type: Opaque -data: - application_default_credentials.json: {{ .Values.serviceAccountCredential | quote }} - user-gcp-sa.json: {{ .Values.serviceAccountCredential | quote }} diff --git a/manifests/gcp_marketplace/chart/kubeflow-pipelines/templates/minio.yaml b/manifests/gcp_marketplace/chart/kubeflow-pipelines/templates/minio.yaml index e3bb83049..a0dfbc388 100644 --- a/manifests/gcp_marketplace/chart/kubeflow-pipelines/templates/minio.yaml +++ b/manifests/gcp_marketplace/chart/kubeflow-pipelines/templates/minio.yaml @@ -43,7 +43,7 @@ spec: - name: PROJECT_ID valueFrom: configMapKeyRef: - name: {{ .Values.gcpDefaultConfigName}} + name: "{{ .Values.gcpDefaultConfigName }}" key: "project_id" - name: MINIO_ACCESS_KEY value: minio @@ -58,14 +58,6 @@ spec: name: minio ports: - containerPort: 9000 - volumeMounts: - - name: gcp-sa-token - mountPath: "/etc/credentials" - readOnly: true - volumes: - - name: gcp-sa-token - secret: - secretName: {{ .Values.gcpSecretName }} {{ end }} --- {{ if not .Values.managedstorage.enabled }} diff --git a/manifests/gcp_marketplace/chart/kubeflow-pipelines/templates/mysql.yaml b/manifests/gcp_marketplace/chart/kubeflow-pipelines/templates/mysql.yaml index 88bb2622d..dcd78bce7 100644 --- a/manifests/gcp_marketplace/chart/kubeflow-pipelines/templates/mysql.yaml +++ b/manifests/gcp_marketplace/chart/kubeflow-pipelines/templates/mysql.yaml @@ -60,14 +60,9 @@ spec: volumeMounts: - mountPath: /cloudsql name: cloudsql - - mountPath: /credentials - name: gcp-sa-token volumes: - name: cloudsql emptyDir: - - name: gcp-sa-token - secret: - secretName: {{ .Values.gcpSecretName }} --- apiVersion: v1 kind: Secret diff --git a/manifests/gcp_marketplace/chart/kubeflow-pipelines/templates/pipeline.yaml b/manifests/gcp_marketplace/chart/kubeflow-pipelines/templates/pipeline.yaml index b38993ec2..512f66211 100644 --- a/manifests/gcp_marketplace/chart/kubeflow-pipelines/templates/pipeline.yaml +++ b/manifests/gcp_marketplace/chart/kubeflow-pipelines/templates/pipeline.yaml @@ -112,6 +112,13 @@ rules: - get - list - watch + - apiGroups: + - '' + resources: + - events + verbs: + - create + - patch --- apiVersion: rbac.authorization.k8s.io/v1 kind: Role @@ -696,20 +703,27 @@ spec: spec: containers: - env: + {{ if .Values.managedstorage.enabled }} + - name: HAS_DEFAULT_BUCKET + value: "true" + - name: BUCKET_NAME + value: "{{ .Values.managedstorage.gcsBucketName }}" + {{ else }} - name: HAS_DEFAULT_BUCKET valueFrom: configMapKeyRef: - name: {{ .Values.gcpDefaultConfigName}} + name: "{{ .Values.gcpDefaultConfigName }}" key: "has_default_bucket" - name: BUCKET_NAME valueFrom: configMapKeyRef: - name: {{ .Values.gcpDefaultConfigName}} + name: "{{ .Values.gcpDefaultConfigName }}" key: "bucket_name" + {{ end }} - name: PROJECT_ID valueFrom: configMapKeyRef: - name: {{ .Values.gcpDefaultConfigName}} + name: "{{ .Values.gcpDefaultConfigName }}" key: "project_id" - name: POD_NAMESPACE valueFrom: @@ -721,7 +735,7 @@ spec: value: "false" {{ if .Values.managedstorage.enabled }} - name: OBJECTSTORECONFIG_BUCKETNAME - value: '{{ tpl .Values.managedstorage.gcsBucketName . }}' + value: "{{ .Values.managedstorage.gcsBucketName }}" - name: DBCONFIG_DBNAME {{ if .Values.managedstorage.databaseNamePrefix }} value: '{{ .Values.managedstorage.databaseNamePrefix }}_pipeline' diff --git a/manifests/gcp_marketplace/chart/kubeflow-pipelines/values.yaml b/manifests/gcp_marketplace/chart/kubeflow-pipelines/values.yaml index ef51501b0..a772dd4a6 100644 --- a/manifests/gcp_marketplace/chart/kubeflow-pipelines/values.yaml +++ b/manifests/gcp_marketplace/chart/kubeflow-pipelines/values.yaml @@ -17,23 +17,13 @@ images: cacheserver: gcr.io/ml-pipeline/google/pipelines/cacheserver:dummy cachedeployer: gcr.io/ml-pipeline/google/pipelines/cachedeployer:dummy -gcpSecretName: "user-gcp-sa" serviceAccountCredential: "" gcpDefaultConfigName: "gcp-default-config" managedstorage: enabled: false cloudsqlInstanceConnectionName: null - # gcsBucketName should be determined by cloudsqlInstanceConnectionName to make - # sure user is always using a valid pair of connection name + gcs bucket name. - # - # gcsBucketName is used in two places, so I wrote a template string here that - # can be evaluated in each place. - # - # Name pattern: - # If spedify databaseNamePrefix: %{cloudsqlInstanceConnectionName}-%{truncedDatabaseNamePrefix} - # else: %{cloudsqlInstanceConnectionName}-%{releaseName} - gcsBucketName: '{{ if .Values.managedstorage.databaseNamePrefix }}{{ printf "%s-%s" .Values.managedstorage.cloudsqlInstanceConnectionName .Values.managedstorage.databaseNamePrefix | replace ":" "-" | lower | trunc 60 }}{{ else }}{{ printf "%s-%s" .Values.managedstorage.cloudsqlInstanceConnectionName .Release.Name | replace ":" "-" | lower | trunc 60 }}{{ end }}' + gcsBucketName: null databaseNamePrefix: null dbUsername: 'root' dbPassword: '' diff --git a/manifests/gcp_marketplace/hack/release.sh b/manifests/gcp_marketplace/hack/release.sh new file mode 100755 index 000000000..52e68bf3d --- /dev/null +++ b/manifests/gcp_marketplace/hack/release.sh @@ -0,0 +1,30 @@ +#!/bin/bash +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -ex + +TAG_NAME=$1 +DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" > /dev/null && pwd)" + +if [[ -z "$TAG_NAME" ]]; then + echo "Usage: release.sh " >&2 + exit 1 +fi + +echo "This release script uses yq, it can be downloaded at https://github.com/mikefarah/yq/releases/tag/3.3.0" +yq w -i "$DIR/../schema.yaml" "x-google-marketplace.publishedVersion" "$TAG_NAME" +yq w -i "$DIR/../schema.yaml" "x-google-marketplace.publishedVersionMetadata.releaseNote" "Based on $TAG_NAME version." +yq w -i "$DIR/../chart/kubeflow-pipelines/templates/application.yaml" "spec.descriptor.version" "$TAG_NAME" diff --git a/manifests/gcp_marketplace/schema.yaml b/manifests/gcp_marketplace/schema.yaml index b347e7312..8ac1198ee 100644 --- a/manifests/gcp_marketplace/schema.yaml +++ b/manifests/gcp_marketplace/schema.yaml @@ -1,12 +1,11 @@ x-google-marketplace: schemaVersion: v2 applicationApiVersion: v1beta1 - publishedVersion: '0.5.1' + publishedVersion: 1.0.0 publishedVersionMetadata: - releaseNote: >- - Based on 0.5.1 version. + releaseNote: Based on 1.0.0 version. releaseTypes: - - Feature + - Feature recommended: false managedUpdates: kalmSupported: false @@ -87,12 +86,12 @@ x-google-marketplace: type: FULL deployerServiceAccount: roles: - - type: ClusterRole # This is a cluster-wide ClusterRole - rulesType: CUSTOM # We specify our own custom RBAC roles - rules: - - apiGroups: ['apiextensions.k8s.io', 'rbac.authorization.k8s.io'] - resources: ['customresourcedefinitions', 'clusterroles', 'clusterrolebindings'] - verbs: ['*'] + - type: ClusterRole # This is a cluster-wide ClusterRole + rulesType: CUSTOM # We specify our own custom RBAC roles + rules: + - apiGroups: ['apiextensions.k8s.io', 'rbac.authorization.k8s.io'] + resources: ['customresourcedefinitions', 'clusterroles', 'clusterrolebindings'] + verbs: ['*'] clusterConstraints: resources: - replicas: 3 @@ -105,8 +104,7 @@ x-google-marketplace: gcp: nodes: requiredOauthScopes: - - https://www.googleapis.com/auth/cloud-platform - + - https://www.googleapis.com/auth/cloud-platform properties: name: type: string @@ -120,49 +118,79 @@ properties: type: boolean title: Use managed storage description: |- - Use Cloud SQL and GCS for storing the data. - Using CloudSQL and GCS provides better reliability and performance, + Select this option to store pipeline artifacts and metadata using + Cloud SQL and Cloud Storage. Otherwise, pipeline artifacts and metadata + are stored on Compute Engine persistent disks. + + Cloud SQL and Cloud Storage provides better reliability and performance, as well as features such as data backup and usage monitoring. - This is the recommended option especially for production scenarios. - If false, the data will be stored in GCE Persistent Disk. + Using Cloud SQL and Cloud Storage is recommended for production + deployments. + + To configure your cluster to use managed storage, specify your Cloud + Storage bucket and Cloud SQL connection details below. To preserve your + data while reinstalling Kubeflow Pipelines, you must specify the same + managed storage options as your previous instance. + + If upgrading from Kubeflow Pipelines 0.5.1 with managed storage, the + Cloud Storage bucket was auto-generated in the same project. This bucket + should be named like "-". default: false + managedstorage.gcsBucketName: + type: string + title: Artifact storage Cloud Storage bucket (Managed storage only) + description: |- + If you are deploying Kubeflow Pipelines with managed storage, specify the + Cloud Storage bucket that you would like Kubeflow Pipelines to store + pipeline artifacts in. Learn more about creating a new bucket, + see https://cloud.google.com/storage/docs/creating-buckets. + + Warning, if the provided bucket doesn't exist, the deployer will + automatically create a bucket for you in us-central1, but you won't be + able to customize the bucket's parameters. managedstorage.cloudsqlInstanceConnectionName: type: string title: Cloud SQL instance connection name (Managed storage only) description: |- - This field must be specified if choose to use managed storage. - Provide the instance connection name for an existing Cloud SQL for MySQL instance. - The instance connection name can be found on the instance detail page in the Cloud SQL console. - The instance connection name uses the format project:zone:instance-name, for example,myproject:us-central1:myinstance. - For more details on how to create a new instance, see https://cloud.google.com/sql/docs/mysql/quickstart. + If you are deploying Kubeflow Pipelines with managed storage, specify the + instance connection name for your MySQL instance on Cloud SQL. + The instance connection name can be found on the instance detail page in + the Cloud SQL console. The instance connection name uses the format + "::", for example: myproject:us-central1:myinstance. + Learn more about setting up your Cloud SQL instance, see https://cloud.google.com/sql/docs/mysql/quickstart. managedstorage.dbUsername: type: string title: Database username (Managed storage only) description: |- - The database username to use when connecting to the Cloud SQL instance. - If you leave this field empty, the deployment will use the default 'root' user account to connect. - For more details about MySQL users, see https://cloud.google.com/sql/docs/mysql/users. + If you are deploying Kubeflow Pipelines with managed storage, specify the + database username for Kubeflow Pipelines to use when connecting to your + MySQL instance on Cloud SQL. If you leave this field empty, this value + defaults to 'root'. Learn more about MySQL users, see https://cloud.google.com/sql/docs/mysql/users. managedstorage.dbPassword: type: string title: Database password (Managed storage only) x-google-marketplace: type: MASKED_FIELD description: |- - The database password to use when connecting to the Cloud SQL instance. - If you leave this field empty, the deployment will try to connect to the instance without providing a password. - This will fail if a password is required for the username you provided. + If you are deploying Kubeflow Pipelines with managed storage, specify the + database password for Kubeflow Pipelines to use when connecting to your + MySQL instance on Cloud SQL. If you leave this field empty, Kubeflow + Pipelines connects to your Cloud SQL instance without providing a + password. This will fail if a password is required for the username you + specified. managedstorage.databaseNamePrefix: type: string title: Database name prefix (Managed storage only) description: |- - The prefix of the database name. Kubeflow Pipelines will create two databases, - [prefix]_pipeline and [prefix]_metadata. - Use lowercase letters, numbers, and hyphens. Start with a letter. - If the prefix specified is same as an old deployment in the past, - the deployment will recover from an old deployment. - If this not specified, the app instance name will be used. + If you are deploying Kubeflow Pipelines with managed storage, specify the + database name prefix. The prefix value must contain only lowercase + letters, numbers and underscores. Additionally, the prefix must start with a letter. + During the deployment process, Kubeflow Pipelines creates two databases, + "_pipeline" and "_metadata". If the prefix specified + matches a previous deployment, this deployment will reuse the existing + databases. If this value is not specified, the application instance name + is used. required: - - name - - namespace - +- name +- namespace diff --git a/manifests/kustomize/base/argo/kustomization.yaml b/manifests/kustomize/base/argo/kustomization.yaml index 1a00e5777..890cb4cb3 100644 --- a/manifests/kustomize/base/argo/kustomization.yaml +++ b/manifests/kustomize/base/argo/kustomization.yaml @@ -2,7 +2,6 @@ apiVersion: kustomize.config.k8s.io/v1beta1 kind: Kustomization resources: -- minio-artifact-secret.yaml - workflow-controller-configmap.yaml - workflow-controller-deployment.yaml - workflow-controller-role.yaml diff --git a/manifests/kustomize/base/argo/minio-artifact-secret.yaml b/manifests/kustomize/base/argo/minio-artifact-secret.yaml deleted file mode 100644 index 3ae64f116..000000000 --- a/manifests/kustomize/base/argo/minio-artifact-secret.yaml +++ /dev/null @@ -1,8 +0,0 @@ -apiVersion: v1 -data: - accesskey: bWluaW8= - secretkey: bWluaW8xMjM= -kind: Secret -metadata: - name: mlpipeline-minio-artifact -type: Opaque diff --git a/manifests/kustomize/base/argo/workflow-controller-configmap.yaml b/manifests/kustomize/base/argo/workflow-controller-configmap.yaml index aafe839c7..ffb01c5fb 100644 --- a/manifests/kustomize/base/argo/workflow-controller-configmap.yaml +++ b/manifests/kustomize/base/argo/workflow-controller-configmap.yaml @@ -5,14 +5,14 @@ metadata: data: config: | { - namespace: $(NAMESPACE), + namespace: $(kfp-namespace), executorImage: gcr.io/ml-pipeline/argoexec:v2.7.5-license-compliance, artifactRepository: { s3: { - bucket: $(BUCKET_NAME), + bucket: $(kfp-artifact-bucket-name), keyPrefix: artifacts, - endpoint: minio-service.$(NAMESPACE):9000, + endpoint: minio-service.$(kfp-namespace):9000, insecure: true, accessKeySecret: { name: mlpipeline-minio-artifact, diff --git a/manifests/kustomize/base/cache-deployer/cluster-scoped/cache-deployer-clusterrolebinding.yaml b/manifests/kustomize/base/cache-deployer/cluster-scoped/cache-deployer-clusterrolebinding.yaml index 7e2d2dcf8..c0f19d757 100644 --- a/manifests/kustomize/base/cache-deployer/cluster-scoped/cache-deployer-clusterrolebinding.yaml +++ b/manifests/kustomize/base/cache-deployer/cluster-scoped/cache-deployer-clusterrolebinding.yaml @@ -9,4 +9,4 @@ roleRef: subjects: - kind: ServiceAccount name: kubeflow-pipelines-cache-deployer-sa - namespace: $(NAMESPACE) +# namespace will be added by kustomize automatically according to the namespace field in kustomization.yaml diff --git a/manifests/kustomize/base/cache-deployer/cluster-scoped/cache-deployer-sa.yaml b/manifests/kustomize/base/cache-deployer/cluster-scoped/cache-deployer-sa.yaml new file mode 100644 index 000000000..affada3d1 --- /dev/null +++ b/manifests/kustomize/base/cache-deployer/cluster-scoped/cache-deployer-sa.yaml @@ -0,0 +1,4 @@ +apiVersion: v1 +kind: ServiceAccount +metadata: + name: kubeflow-pipelines-cache-deployer-sa diff --git a/manifests/kustomize/base/cache-deployer/cluster-scoped/kustomization.yaml b/manifests/kustomize/base/cache-deployer/cluster-scoped/kustomization.yaml index 6e741ab8c..2b941ae3f 100644 --- a/manifests/kustomize/base/cache-deployer/cluster-scoped/kustomization.yaml +++ b/manifests/kustomize/base/cache-deployer/cluster-scoped/kustomization.yaml @@ -4,4 +4,8 @@ kind: Kustomization resources: - cache-deployer-clusterrole.yaml - cache-deployer-clusterrolebinding.yaml + # HACK: although a service account(SA) is not a cluster-scoped resource. + # Presence of a SA referred by a clusterrolebinding allows kustomize to auto-add + # namespace for the clusterrolebinding's SA ref. + - cache-deployer-sa.yaml \ No newline at end of file diff --git a/manifests/kustomize/base/cache-deployer/kustomization.yaml b/manifests/kustomize/base/cache-deployer/kustomization.yaml index 89959036c..fadd6a383 100644 --- a/manifests/kustomize/base/cache-deployer/kustomization.yaml +++ b/manifests/kustomize/base/cache-deployer/kustomization.yaml @@ -1,9 +1,9 @@ apiVersion: kustomize.config.k8s.io/v1beta1 kind: Kustomization - resources: - - cache-deployer-role.yaml - - cache-deployer-rolebinding.yaml - - cache-deployer-sa.yaml - - cache-deployer-deployment.yaml - \ No newline at end of file +- cache-deployer-role.yaml +- cache-deployer-rolebinding.yaml +- cache-deployer-deployment.yaml +images: +- name: gcr.io/ml-pipeline/cache-deployer + newTag: 1.0.0 diff --git a/manifests/kustomize/base/cache/cache-deployment.yaml b/manifests/kustomize/base/cache/cache-deployment.yaml index a0ac2ec84..e1ba16797 100644 --- a/manifests/kustomize/base/cache/cache-deployment.yaml +++ b/manifests/kustomize/base/cache/cache-deployment.yaml @@ -23,18 +23,18 @@ spec: - name: DBCONFIG_DB_NAME valueFrom: configMapKeyRef: - name: mysql-configmap - key: cache_db + name: pipeline-install-config + key: cacheDb - name: DBCONFIG_HOST_NAME valueFrom: configMapKeyRef: - name: mysql-configmap - key: host + name: pipeline-install-config + key: dbHost - name: DBCONFIG_PORT valueFrom: configMapKeyRef: - name: mysql-configmap - key: port + name: pipeline-install-config + key: dbPort - name: DBCONFIG_USER valueFrom: secretKeyRef: diff --git a/manifests/kustomize/base/cache/kustomization.yaml b/manifests/kustomize/base/cache/kustomization.yaml index 82d7cb1f2..a7d8c3dc0 100644 --- a/manifests/kustomize/base/cache/kustomization.yaml +++ b/manifests/kustomize/base/cache/kustomization.yaml @@ -1,9 +1,11 @@ apiVersion: kustomize.config.k8s.io/v1beta1 kind: Kustomization - resources: - - cache-deployment.yaml - - cache-service.yaml - - cache-role.yaml - - cache-rolebinding.yaml - - cache-sa.yaml +- cache-deployment.yaml +- cache-service.yaml +- cache-role.yaml +- cache-rolebinding.yaml +- cache-sa.yaml +images: +- name: gcr.io/ml-pipeline/cache-server + newTag: 1.0.0 diff --git a/manifests/kustomize/base/kustomization.yaml b/manifests/kustomize/base/kustomization.yaml index e51281ec0..a5f4df51d 100644 --- a/manifests/kustomize/base/kustomization.yaml +++ b/manifests/kustomize/base/kustomization.yaml @@ -1,12 +1,11 @@ apiVersion: kustomize.config.k8s.io/v1beta1 kind: Kustomization - +namespace: kubeflow bases: - application - argo - pipeline - metadata -- mysql - cache - cache-deployer - tekton @@ -17,100 +16,52 @@ resources: images: - name: gcr.io/ml-pipeline/api-server newName: docker.io/aipipeline/api-server - newTag: 0.5.1 + newTag: 1.0.0 - name: gcr.io/ml-pipeline/persistenceagent newName: docker.io/aipipeline/persistenceagent - newTag: 0.5.1 - - name: gcr.io/ml-pipeline/scheduledworkflow - newTag: 0.5.1 + newTag: 1.0.0 - name: gcr.io/ml-pipeline/frontend newName: docker.io/aipipeline/frontend - newTag: 0.5.1 - - name: gcr.io/ml-pipeline/viewer-crd-controller - newTag: 0.5.1 - - name: gcr.io/ml-pipeline/visualization-server - newTag: 0.5.1 + newTag: 1.0.0 - name: gcr.io/ml-pipeline/metadata-writer newName: docker.io/aipipeline/metadata-writer - newTag: 0.5.1 - - name: gcr.io/ml-pipeline/cache-server - newTag: 0.5.1 - - name: gcr.io/ml-pipeline/cache-deployer - newTag: 0.5.1 + newTag: 1.0.0 # Used by Kustomize configMapGenerator: - - name: pipeline-install-config - env: params.env - +- name: pipeline-install-config + env: params.env secretGenerator: - - name: mysql-secret - env: params-db-secret.env - +- name: mysql-secret + env: params-db-secret.env vars: - - name: NAMESPACE - objref: - kind: Deployment - apiVersion: apps/v1 - name: ml-pipeline - fieldref: - fieldpath: metadata.namespace - - name: APP_NAME - objref: - kind: ConfigMap - name: pipeline-install-config - apiVersion: v1 - fieldref: - fieldpath: data.appName - - name: APP_VERSION - objref: - kind: ConfigMap - name: pipeline-install-config - apiVersion: v1 - fieldref: - fieldpath: data.appVersion - - name: DBSERVICE_HOST - objref: - kind: ConfigMap - name: pipeline-install-config - apiVersion: v1 - fieldref: - fieldpath: data.dbHost - - name: DBSERVICE_PORT - objref: - kind: ConfigMap - name: pipeline-install-config - apiVersion: v1 - fieldref: - fieldpath: data.dbPort - - name: DBNAME_MLMD - objref: - kind: ConfigMap - name: pipeline-install-config - apiVersion: v1 - fieldref: - fieldpath: data.mlmdDb - - name: DBNAME_CACHE - objref: - kind: ConfigMap - name: pipeline-install-config - apiVersion: v1 - fieldref: - fieldpath: data.cacheDb - - name: DBNAME_PIPELINE - objref: - kind: ConfigMap - name: pipeline-install-config - apiVersion: v1 - fieldref: - fieldpath: data.pipelineDb - - name: BUCKET_NAME - objref: - kind: ConfigMap - name: pipeline-install-config - apiVersion: v1 - fieldref: - fieldpath: data.bucketName - +- name: kfp-namespace + objref: + kind: Deployment + apiVersion: apps/v1 + name: ml-pipeline + fieldref: + fieldpath: metadata.namespace +- name: kfp-app-name + objref: + kind: ConfigMap + name: pipeline-install-config + apiVersion: v1 + fieldref: + fieldpath: data.appName +- name: kfp-app-version + objref: + kind: ConfigMap + name: pipeline-install-config + apiVersion: v1 + fieldref: + fieldpath: data.appVersion +- name: kfp-artifact-bucket-name + objref: + kind: ConfigMap + name: pipeline-install-config + apiVersion: v1 + fieldref: + fieldpath: data.bucketName configurations: - - params.yaml +- params.yaml diff --git a/manifests/kustomize/base/metadata/kustomization.yaml b/manifests/kustomize/base/metadata/kustomization.yaml index a44ba147f..5bd67c255 100644 --- a/manifests/kustomize/base/metadata/kustomization.yaml +++ b/manifests/kustomize/base/metadata/kustomization.yaml @@ -1,13 +1,11 @@ apiVersion: kustomize.config.k8s.io/v1beta1 kind: Kustomization - resources: - - metadata-configmap.yaml - - metadata-grpc-deployment.yaml - - metadata-grpc-service.yaml - - metadata-envoy-deployment.yaml - - metadata-envoy-service.yaml - - metadata-writer-deployment.yaml - - metadata-writer-role.yaml - - metadata-writer-rolebinding.yaml - - metadata-writer-sa.yaml +- metadata-grpc-configmap.yaml +- metadata-grpc-deployment.yaml +- metadata-grpc-service.yaml +- metadata-envoy-deployment.yaml +- metadata-envoy-service.yaml +images: +- name: gcr.io/ml-pipeline/metadata-envoy + newTag: 1.0.0 diff --git a/manifests/kustomize/base/metadata/metadata-envoy-deployment.yaml b/manifests/kustomize/base/metadata/metadata-envoy-deployment.yaml index 1ccf7122f..3de11bea0 100644 --- a/manifests/kustomize/base/metadata/metadata-envoy-deployment.yaml +++ b/manifests/kustomize/base/metadata/metadata-envoy-deployment.yaml @@ -16,7 +16,7 @@ spec: spec: containers: - name: container - image: gcr.io/ml-pipeline/envoy:metadata-grpc + image: gcr.io/ml-pipeline/metadata-envoy:dummy ports: - name: md-envoy containerPort: 9090 diff --git a/manifests/kustomize/base/metadata/metadata-configmap.yaml b/manifests/kustomize/base/metadata/metadata-grpc-configmap.yaml similarity index 100% rename from manifests/kustomize/base/metadata/metadata-configmap.yaml rename to manifests/kustomize/base/metadata/metadata-grpc-configmap.yaml diff --git a/manifests/kustomize/base/metadata/metadata-grpc-deployment.yaml b/manifests/kustomize/base/metadata/metadata-grpc-deployment.yaml index 45712f102..7e383d5ca 100644 --- a/manifests/kustomize/base/metadata/metadata-grpc-deployment.yaml +++ b/manifests/kustomize/base/metadata/metadata-grpc-deployment.yaml @@ -31,18 +31,18 @@ spec: - name: MYSQL_DATABASE valueFrom: configMapKeyRef: - name: mysql-configmap - key: mlmd_db + name: pipeline-install-config + key: mlmdDb - name: MYSQL_HOST valueFrom: configMapKeyRef: - name: mysql-configmap - key: host + name: pipeline-install-config + key: dbHost - name: MYSQL_PORT valueFrom: configMapKeyRef: - name: mysql-configmap - key: port + name: pipeline-install-config + key: dbPort command: ["/bin/metadata_store_server"] args: ["--grpc_port=8080", "--mysql_config_database=$(MYSQL_DATABASE)", diff --git a/manifests/kustomize/base/mysql/kustomization.yaml b/manifests/kustomize/base/mysql/kustomization.yaml deleted file mode 100644 index 4765ac8a1..000000000 --- a/manifests/kustomize/base/mysql/kustomization.yaml +++ /dev/null @@ -1,5 +0,0 @@ -apiVersion: kustomize.config.k8s.io/v1beta1 -kind: Kustomization - -resources: - - mysql-configmap.yaml diff --git a/manifests/kustomize/base/mysql/mysql-configmap.yaml b/manifests/kustomize/base/mysql/mysql-configmap.yaml deleted file mode 100644 index 537a44522..000000000 --- a/manifests/kustomize/base/mysql/mysql-configmap.yaml +++ /dev/null @@ -1,10 +0,0 @@ -apiVersion: v1 -kind: ConfigMap -metadata: - name: mysql-configmap -data: - host: $(DBSERVICE_HOST) - port: "$(DBSERVICE_PORT)" - mlmd_db: $(DBNAME_MLMD) - cache_db: $(DBNAME_CACHE) - pipeline_db: $(DBNAME_PIPELINE) diff --git a/manifests/kustomize/base/params.env b/manifests/kustomize/base/params.env index b0068518e..a274506fc 100644 --- a/manifests/kustomize/base/params.env +++ b/manifests/kustomize/base/params.env @@ -1,5 +1,5 @@ appName=pipeline -appVersion=0.5.1 +appVersion=1.0.0 dbHost=mysql dbPort=3306 mlmdDb=metadb diff --git a/manifests/kustomize/base/params.yaml b/manifests/kustomize/base/params.yaml index ecd1794ad..1f99ef2c5 100644 --- a/manifests/kustomize/base/params.yaml +++ b/manifests/kustomize/base/params.yaml @@ -2,23 +2,7 @@ varReference: - path: data/config kind: ConfigMap -- path: data/bucket_name - kind: ConfigMap -- path: data/project_id - kind: ConfigMap -- path: data/host - kind: ConfigMap -- path: data/port - kind: ConfigMap -- path: data/mlmd_db - kind: ConfigMap -- path: data/cache_db - kind: ConfigMap -- path: data/pipeline_db - kind: ConfigMap - path: metadata/name kind: Application - path: spec/descriptor/version kind: Application -- path: spec/template/spec/containers/image - kind: Deployment diff --git a/manifests/kustomize/base/pipeline-application.yaml b/manifests/kustomize/base/pipeline-application.yaml index f53f3778f..6d8cdf323 100644 --- a/manifests/kustomize/base/pipeline-application.yaml +++ b/manifests/kustomize/base/pipeline-application.yaml @@ -1,7 +1,7 @@ apiVersion: app.k8s.io/v1beta1 kind: Application metadata: - name: $(APP_NAME) + name: $(kfp-app-name) annotations: kubernetes-engine.cloud.google.com/icon: >- data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADMAAAAyCAYAAADx/eOPAAALuUlEQVRogd2afWxd9XnHP99bK4pS3yhiGUIRiiJUVVVqbq8ppdR20ibqpuIMtDRkUYERp29J57gMZVuxsrZiK7oZXVv5re1AiOuoG+N1DMkuytprsGPEVMouxqQZJVHEWIdQlGVxZlmZdb/747zcc869jpMO+seOdPz7nd/L83tev89zzjX8Bq795Rq9o17zXp+Tey+Ijkyboela29DRWkhffyT733pH/Z3este9F2cC6N0kNjxtjD+FdRD8UF9X7u97y7UbQFPAivC0BdllS381slun3s3z3xVhhqeds90tqR/oMB7u68z19ZZra0E/l1if3WOziPx3skrDPTr+bvDxfxImEIJbgX6gGBJ7EfHJX/ySReDHwO9KYAenyWCMFKw21GSeslwa2Z17+TcuzPBRr7B8m6Df5oOJqdPAR/u6cm/2lmv3At+IT3GiXZqbcaxSLsfRoTsvn7XL2jE87ZXGnwf+VGiDY86ETM1wU1+XjvSW/RlgTJADQ2QaCZKWcX1/aDIcjE8i3SdzZLjn0lm8pJXD02417BM+gLmq2Rqjr/d16Vu95dp6wc8Ra5O8NrPIcoZCvIR1H+KZkd2qLcfnRYUZOuorJO+3uQt0RerolGYZR7r5+C9ZATwPviGyQprd6Liszy3bnwVKwGMjPbnFyxJmeNpX2T4gaR/QmmSpyYZTho/2depMb9k/kNh3KawuJ1bWauHzUcyXRpZAv5Zmg7aHBLcmNN9ECAFeAO3s69KZ3nLtDuF9dnBs0IT9JO24rbPb0JfP2syCZpFfE5q1mRWcvlgMNcwMTRq9z/+OWXdx4AGjvX1deqC37DbwPwOrMgsufol5mWMWs1ivEbjTrOCtLNNb+udygqsNbUBtopR/NkuuwTJ6Hxsw67KSuvH5MPDA/nJttfGTdUFCMUlp/ALwOtIs9muBxpnFnBzuSQf21oP/BbXclVvumWuTaDN8WNBm2GizJkxPM0CDMA2WGZ72bbb/Njue2TRj9Il/PcG87SeBz4ZTNaSTsmctHcO8SqDp14d7dCFLZ2v/3OpQ023Ah4n65kohvETUCdcsfmuilD+bpNdgGZvOODuHqYGIVGCec9g7+7o031v2jaBTiD0ysxbHRnZrPktzyz1zK7f0z10nh5pWwLRhvZro1KqznVJhNB8UyDeSsU4zAOiIXV1OuEqQ2AR79nflXgcY6dGLwIvR8q39cy1b+uc2Emo6dI824BpMSxz8iVhy4m/2WiYHdV5UmOHp2mpwm52ESCdwRn+9v0tPAWzpn9sAFAQbMdc60PaHsFZEWd9uxk4z8G3seykECfObTEd2KmuZG4CWyLXkYLMwtiYt+hMsTUdAEZQzjs9apv66SHJRk73ZjBQ+iRu29s+1VEr5OImmXs4MHUahVoLWgK23wbv6OrU4OulcuHYehWsVHhpXwpE2FNRayTszX2cwDpQEzTB+QvrJHCXUaigk+c++aXZiE98YmUVgV19X7u3ypH/fgfUA5h2usY2jNjmWoGVn50nvC9T2NviA5OPBGPW91OlG+0Xa1WJhhqadk3WjpKCilQIQFP19XZocnfIHgIeFWyNh6goXyX6gdNWfU8aJ5tNjEheAHZVS/ruGj0s8k6VPhh6ms6kwgoLl1aGuCEuSpwXfHZ2qrTJ+HHkNCpOjmbdFcEcGUIhUSj/H65rPO6j+766U8i/QXV0z8cqJc4btwF8AtWgtMb1wj+j41Df/s1EYQwdEDiqM3hDes9quGY3IKoYOvCrU7HlCoZtEWapPkzEpsU8uq8b36a6uBqaBv5l45URLpZT/pmGH8LnkvlAdAOt1oeXqRsuYTjlEMJiXvWN/Z+5szfqioKcOKo7qr/nAEesKiOyv2A/q88rOx8+8bPhK5dUTAA8jbUT6MuKnbKteNVHKP23xCeD1LC0F2TWOmzoAKEiWxmC+sr8rN1OerF2HGaqXFcZhDWaYj11S4ZxcXxVqyKqPZOeNTwM7Jkr5BeDPQJ8NFQaoC/gZ26rXT5TyxxAfRx6P94d0gU0pYYama+tsbwix/AHM4fKUrwAeB68kRJ5AZsWWieGTjLipsVCgrKCwKHF7pZQ/RXf104j76i4ZMmquxkzRXb2zUsqfxdxsfCiA70hRjZbpCDHmJcRdeZPDHkVck0Ul5PeHZ81DgHxKtglXaHCxVN9fr5TyR9hW3QA8Amqp5526SyKtBEbZVv1eZeZkbqKU7xfsFJwPqRW29s+11oUxnUhnkHf2dWoB+R5Jv5dNaGHh1wog8d/ZAI+0GgVpFPTp4AfJT2Hup7u6EvMk0tpkboutEz0HMPzHyD+mu3pFpZR/Aug0Pgm0RLkvFzLWYfjDvs7cqfKUt2LuXTLhue5mdWhVDJdEzxDDcRKawceN9lRePVkDfgBcR/LKVqNpz/s08DO6q4VKKT8j8zHgJ1HyzA1P11YZjfV1arw85auBR4RalDB5lEjDKi0CgPPphKZ0QiNRwUQeg88B2ydKreew9yH1NCxe/r4GaZpt1Vsrh/JnDDcBLwPkbLVgf6s86RXYj4KvtJKJM8KsGLkSlsmUL6mSg1RJY1xD7KmU8sfprnYgBqJsGVsiEfupsca7FfMo26p/OfHKiVqllB8HyPV16VxfV66G/G1QBwY5xvCgTT7X3/MTaBbFVr0fJvqw2ASZ+yul/FN0V68CHsesiDl3UopM3CwhDZDD/Dnwj3S/sjoYAMqTtc1YX02jVqYOiuuqsAKIkqZCfFIz/IrfFY8gDrKt2gI8irSuwQezyTeNaOl+6qYb+fpYGKEXJE9GSTObK5ItrheaLHE5/XRKcHul+kYN8x2kzWlLNNuVtUqibzKW5CBjxUoszO7NWrS1E/xWvMeJjck2WQHEKJeMD+qH4gWCSvg00m3AVxv5TMRKsp9Cs0Q/Ka/1BOZQNBSXMz2b9Q5oO9JCKgkqg2aKofl8uvTPeE1w3t5KKf8y26pFxINhLRa5R9JV6huT/aZuFu7Ds+A9jBdj+VIvZz2b9BL2Xi5yJQEgUFqinI9SZBDx358o5Q/HiRGtquOEmxJu6DcbC/afQWxnvHg+Odrwm2bP5txh5OEYjOM3vaiu8qqHJw1mPmK/Xs7HJf0LRncDMF5cAL6NWUxDrX/duwbczljxjSzvTX+gtXU3MBlrRCltrsxBTgorACKrRGf5bczOiVLrhUL74B2F9oHVjBd/iLwTWEhr+CIWaLYumDjIWLHha+aSwvRs1iJmJ9Kb9ZJRETS3ACsMC8i1ZNwgXZDYWTmU/1WhfeAW8Cjo+UL7wDrGik8jfid0kYz/Z2ODepv+GPIY+FAznpcUJhAo9w5mh81CFtEsWieCTzwXkogmfKBSyh8ttA98EDPqoPouYqYLxYEPMVY8itmEeTM+KEaqZhVAkiPPIL6QDPhLFiYQSC9J7M3mGlF/24zWSvwIM1xoH2gF/sFiTcSPxQakqUJxsIPx4jGCr0AzCUYTbROJ7DPAdsbSAX9ZwgDs3qTDiMGUOxF/1DgfekLVsPf0sw8DPARsDNwy8iYBXov4p0L7wC2MF99CfBJ4rqmbJbO/qYE+x1jx5HK8Xtp/aFgHDM/FX+RM9FFjHjjj4NV3HvlPsP4g+SqQgm6zCuvJQnHgi4wVz2JuAj8RnLGEVaCf8Y8cuRQ2L0mYEBB2Gb8ZHKD4NQBx+0Qpf7LQPrAVVGqiiWTpCcEn4QcLxcF7C7+aXMDahT1YX5IS5DHE/ZfC4yULEwr0DtIOWwuuvwZ8rVLKP1soDqzHPGJoyRao9b4SXiQQ30A8eO1/PJ8D7gK+BtQSJcQM8AXGlg747LUkmi91lad8J3CuZ5OeBii0D64ET2FdH1N0omWJvgLPkvwM8LmZf7lrnm3VO4CHsM4DH2P8I8vGSfK67P9q8v9wWPAcQLH4PbBHbK6Pq+3M9+Ml+6FL2dyC+WmhOLiWseKPMDeDd12uIPBrWCZ5Xds++AHsAwGlBKnoB5747c2J+aSJEuvRL8CDv/2Zz+cqh/LL/gPD//vrfwFjcI5oX6jDBwAAAABJRU5ErkJggg== @@ -12,7 +12,7 @@ spec: matchLabels: application-crd-id: kubeflow-pipelines descriptor: - version: $(APP_VERSION) + version: $(kfp-app-version) type: Kubeflow Pipelines description: |- Reusable end-to-end ML workflow diff --git a/manifests/kustomize/base/pipeline/kustomization.yaml b/manifests/kustomize/base/pipeline/kustomization.yaml index b0bb3cf70..75585a442 100644 --- a/manifests/kustomize/base/pipeline/kustomization.yaml +++ b/manifests/kustomize/base/pipeline/kustomization.yaml @@ -1,6 +1,7 @@ apiVersion: kustomize.config.k8s.io/v1beta1 kind: Kustomization - +bases: +- metadata-writer resources: - ml-pipeline-apiserver-deployment.yaml - ml-pipeline-apiserver-role.yaml @@ -16,6 +17,7 @@ resources: - ml-pipeline-scheduledworkflow-rolebinding.yaml - ml-pipeline-scheduledworkflow-sa.yaml - ml-pipeline-ui-deployment.yaml +- ml-pipeline-ui-configmap.yaml - ml-pipeline-ui-role.yaml - ml-pipeline-ui-rolebinding.yaml - ml-pipeline-ui-sa.yaml @@ -31,3 +33,17 @@ resources: - pipeline-runner-rolebinding.yaml - pipeline-runner-sa.yaml - container-builder-sa.yaml +- viewer-sa.yaml +images: +- name: gcr.io/ml-pipeline/api-server + newTag: 1.0.0 +- name: gcr.io/ml-pipeline/persistenceagent + newTag: 1.0.0 +- name: gcr.io/ml-pipeline/scheduledworkflow + newTag: 1.0.0 +- name: gcr.io/ml-pipeline/frontend + newTag: 1.0.0 +- name: gcr.io/ml-pipeline/viewer-crd-controller + newTag: 1.0.0 +- name: gcr.io/ml-pipeline/visualization-server + newTag: 1.0.0 diff --git a/manifests/kustomize/base/pipeline/metadata-writer/kustomization.yaml b/manifests/kustomize/base/pipeline/metadata-writer/kustomization.yaml new file mode 100644 index 000000000..6a4a3b21c --- /dev/null +++ b/manifests/kustomize/base/pipeline/metadata-writer/kustomization.yaml @@ -0,0 +1,10 @@ +apiVersion: kustomize.config.k8s.io/v1beta1 +kind: Kustomization +resources: +- metadata-writer-deployment.yaml +- metadata-writer-role.yaml +- metadata-writer-rolebinding.yaml +- metadata-writer-sa.yaml +images: +- name: gcr.io/ml-pipeline/metadata-writer + newTag: 1.0.0 diff --git a/manifests/kustomize/base/metadata/metadata-writer-deployment.yaml b/manifests/kustomize/base/pipeline/metadata-writer/metadata-writer-deployment.yaml similarity index 100% rename from manifests/kustomize/base/metadata/metadata-writer-deployment.yaml rename to manifests/kustomize/base/pipeline/metadata-writer/metadata-writer-deployment.yaml diff --git a/manifests/kustomize/base/metadata/metadata-writer-role.yaml b/manifests/kustomize/base/pipeline/metadata-writer/metadata-writer-role.yaml similarity index 100% rename from manifests/kustomize/base/metadata/metadata-writer-role.yaml rename to manifests/kustomize/base/pipeline/metadata-writer/metadata-writer-role.yaml diff --git a/manifests/kustomize/base/metadata/metadata-writer-rolebinding.yaml b/manifests/kustomize/base/pipeline/metadata-writer/metadata-writer-rolebinding.yaml similarity index 100% rename from manifests/kustomize/base/metadata/metadata-writer-rolebinding.yaml rename to manifests/kustomize/base/pipeline/metadata-writer/metadata-writer-rolebinding.yaml diff --git a/manifests/kustomize/base/metadata/metadata-writer-sa.yaml b/manifests/kustomize/base/pipeline/metadata-writer/metadata-writer-sa.yaml similarity index 100% rename from manifests/kustomize/base/metadata/metadata-writer-sa.yaml rename to manifests/kustomize/base/pipeline/metadata-writer/metadata-writer-sa.yaml diff --git a/manifests/kustomize/base/pipeline/ml-pipeline-apiserver-deployment.yaml b/manifests/kustomize/base/pipeline/ml-pipeline-apiserver-deployment.yaml index 13115aa43..e00a35133 100644 --- a/manifests/kustomize/base/pipeline/ml-pipeline-apiserver-deployment.yaml +++ b/manifests/kustomize/base/pipeline/ml-pipeline-apiserver-deployment.yaml @@ -22,7 +22,10 @@ spec: - name: OBJECTSTORECONFIG_SECURE value: "false" - name: OBJECTSTORECONFIG_BUCKETNAME - value: $(BUCKET_NAME) + valueFrom: + configMapKeyRef: + name: pipeline-install-config + key: bucketName - name: DBCONFIG_USER valueFrom: secretKeyRef: @@ -36,18 +39,28 @@ spec: - name: DBCONFIG_DBNAME valueFrom: configMapKeyRef: - name: mysql-configmap - key: pipeline_db + name: pipeline-install-config + key: pipelineDb - name: DBCONFIG_HOST valueFrom: configMapKeyRef: - name: mysql-configmap - key: host + name: pipeline-install-config + key: dbHost - name: DBCONFIG_PORT valueFrom: configMapKeyRef: - name: mysql-configmap - key: port + name: pipeline-install-config + key: dbPort + - name: OBJECTSTORECONFIG_ACCESSKEY + valueFrom: + secretKeyRef: + name: mlpipeline-minio-artifact + key: accesskey + - name: OBJECTSTORECONFIG_SECRETACCESSKEY + valueFrom: + secretKeyRef: + name: mlpipeline-minio-artifact + key: secretkey - name: PIPELINE_RUNTIME value: tekton image: gcr.io/ml-pipeline/api-server:dummy diff --git a/manifests/kustomize/base/pipeline/ml-pipeline-scheduledworkflow-role.yaml b/manifests/kustomize/base/pipeline/ml-pipeline-scheduledworkflow-role.yaml index 4b1be056c..187272a97 100644 --- a/manifests/kustomize/base/pipeline/ml-pipeline-scheduledworkflow-role.yaml +++ b/manifests/kustomize/base/pipeline/ml-pipeline-scheduledworkflow-role.yaml @@ -28,4 +28,11 @@ rules: - watch - update - patch - - delete \ No newline at end of file + - delete +- apiGroups: + - '' + resources: + - events + verbs: + - create + - patch diff --git a/manifests/kustomize/base/pipeline/ml-pipeline-ui-configmap.yaml b/manifests/kustomize/base/pipeline/ml-pipeline-ui-configmap.yaml new file mode 100644 index 000000000..85b642297 --- /dev/null +++ b/manifests/kustomize/base/pipeline/ml-pipeline-ui-configmap.yaml @@ -0,0 +1,11 @@ +apiVersion: v1 +kind: ConfigMap +metadata: + name: ml-pipeline-ui-configmap +data: + viewer-pod-template.json: |- + { + "spec": { + "serviceAccountName": "kubeflow-pipelines-viewer" + } + } diff --git a/manifests/kustomize/base/pipeline/ml-pipeline-ui-deployment.yaml b/manifests/kustomize/base/pipeline/ml-pipeline-ui-deployment.yaml index 212369dbf..cbb03c523 100644 --- a/manifests/kustomize/base/pipeline/ml-pipeline-ui-deployment.yaml +++ b/manifests/kustomize/base/pipeline/ml-pipeline-ui-deployment.yaml @@ -13,17 +13,37 @@ spec: labels: app: ml-pipeline-ui spec: + volumes: + - name: config-volume + configMap: + name: ml-pipeline-ui-configmap containers: - image: gcr.io/ml-pipeline/frontend:dummy imagePullPolicy: IfNotPresent name: ml-pipeline-ui ports: - containerPort: 3000 + volumeMounts: + - name: config-volume + mountPath: /etc/config + readOnly: true env: + - name: VIEWER_TENSORBOARD_POD_TEMPLATE_SPEC_PATH + value: /etc/config/viewer-pod-template.json - name: MINIO_NAMESPACE valueFrom: fieldRef: fieldPath: metadata.namespace + - name: MINIO_ACCESS_KEY + valueFrom: + secretKeyRef: + name: mlpipeline-minio-artifact + key: accesskey + - name: MINIO_SECRET_KEY + valueFrom: + secretKeyRef: + name: mlpipeline-minio-artifact + key: secretkey - name: ALLOW_CUSTOM_VISUALIZATIONS value: "true" readinessProbe: diff --git a/manifests/kustomize/base/pipeline/ml-pipeline-ui-role.yaml b/manifests/kustomize/base/pipeline/ml-pipeline-ui-role.yaml index 1b4049e87..d80adb4a5 100644 --- a/manifests/kustomize/base/pipeline/ml-pipeline-ui-role.yaml +++ b/manifests/kustomize/base/pipeline/ml-pipeline-ui-role.yaml @@ -11,9 +11,7 @@ rules: - pods - pods/log verbs: - - create - get - - list - apiGroups: - "" resources: diff --git a/manifests/kustomize/base/pipeline/ml-pipeline-ui-service.yaml b/manifests/kustomize/base/pipeline/ml-pipeline-ui-service.yaml index 53d3f391d..093ad8ca2 100644 --- a/manifests/kustomize/base/pipeline/ml-pipeline-ui-service.yaml +++ b/manifests/kustomize/base/pipeline/ml-pipeline-ui-service.yaml @@ -6,7 +6,9 @@ metadata: name: ml-pipeline-ui spec: ports: - - port: 80 + - name: http + protocol: TCP + port: 80 targetPort: 3000 selector: - app: ml-pipeline-ui \ No newline at end of file + app: ml-pipeline-ui diff --git a/manifests/kustomize/base/cache-deployer/cache-deployer-sa.yaml b/manifests/kustomize/base/pipeline/viewer-sa.yaml similarity index 51% rename from manifests/kustomize/base/cache-deployer/cache-deployer-sa.yaml rename to manifests/kustomize/base/pipeline/viewer-sa.yaml index 9cd266d73..932133c82 100644 --- a/manifests/kustomize/base/cache-deployer/cache-deployer-sa.yaml +++ b/manifests/kustomize/base/pipeline/viewer-sa.yaml @@ -1,4 +1,4 @@ apiVersion: v1 kind: ServiceAccount metadata: - name: kubeflow-pipelines-cache-deployer-sa \ No newline at end of file + name: kubeflow-pipelines-viewer diff --git a/manifests/kustomize/cluster-scoped-resources/kustomization.yaml b/manifests/kustomize/cluster-scoped-resources/kustomization.yaml index bbde83719..911c595cf 100644 --- a/manifests/kustomize/cluster-scoped-resources/kustomization.yaml +++ b/manifests/kustomize/cluster-scoped-resources/kustomization.yaml @@ -1,28 +1,27 @@ apiVersion: kustomize.config.k8s.io/v1beta1 kind: Kustomization +namespace: kubeflow + +resources: +- namespace.yaml bases: - ../base/application/cluster-scoped - ../base/argo/cluster-scoped - ../base/pipeline/cluster-scoped - ../base/cache-deployer/cluster-scoped - -resources: - - namespace.yaml - -# Used by Kustomize -configMapGenerator: - - name: pipeline-cluster-scoped-install-config - env: params.env - vars: - - name: NAMESPACE - objref: - kind: ConfigMap - name: pipeline-cluster-scoped-install-config - apiVersion: v1 - fieldref: - fieldpath: data.namespace - +# NOTE: var name must be unique globally to allow composition of multiple kustomize +# packages. Therefore, we added prefix `kfp-cluster-scoped-` to distinguish it from +# others. +- name: kfp-cluster-scoped-namespace + objref: + # cache deployer sa's metadata.namespace will be first transformed by namespace field in kustomization.yaml + # so that we only need to change kustomization.yaml's namespace field for namespace customization. + kind: ServiceAccount + name: kubeflow-pipelines-cache-deployer-sa + apiVersion: v1 + fieldref: + fieldpath: metadata.namespace configurations: - - params.yaml +- params.yaml diff --git a/manifests/kustomize/cluster-scoped-resources/namespace.yaml b/manifests/kustomize/cluster-scoped-resources/namespace.yaml index ae346817e..3c65856e7 100644 --- a/manifests/kustomize/cluster-scoped-resources/namespace.yaml +++ b/manifests/kustomize/cluster-scoped-resources/namespace.yaml @@ -1,4 +1,4 @@ apiVersion: v1 kind: Namespace metadata: - name: $(NAMESPACE) + name: '$(kfp-cluster-scoped-namespace)' diff --git a/manifests/kustomize/cluster-scoped-resources/params.env b/manifests/kustomize/cluster-scoped-resources/params.env deleted file mode 100644 index 78166431d..000000000 --- a/manifests/kustomize/cluster-scoped-resources/params.env +++ /dev/null @@ -1 +0,0 @@ -namespace=kubeflow diff --git a/manifests/kustomize/cluster-scoped-resources/params.yaml b/manifests/kustomize/cluster-scoped-resources/params.yaml index 3bfd0e5be..cc253fe26 100644 --- a/manifests/kustomize/cluster-scoped-resources/params.yaml +++ b/manifests/kustomize/cluster-scoped-resources/params.yaml @@ -2,5 +2,3 @@ varReference: - path: metadata/name kind: Namespace -- path: subjects/namespace - kind: ClusterRoleBinding diff --git a/manifests/kustomize/env/gcp/cloudsql-proxy/cloudsql-proxy-deployment.yaml b/manifests/kustomize/env/gcp/cloudsql-proxy/cloudsql-proxy-deployment.yaml index eb7157c0a..10e1f6aaf 100644 --- a/manifests/kustomize/env/gcp/cloudsql-proxy/cloudsql-proxy-deployment.yaml +++ b/manifests/kustomize/env/gcp/cloudsql-proxy/cloudsql-proxy-deployment.yaml @@ -14,9 +14,16 @@ spec: labels: app: cloudsqlproxy spec: + serviceAccountName: kubeflow-pipelines-cloudsql-proxy containers: - image: gcr.io/cloudsql-docker/gce-proxy:1.14 name: cloudsqlproxy + env: + - name: GCP_CLOUDSQL_INSTANCE_NAME + valueFrom: + configMapKeyRef: + name: pipeline-install-config + key: gcsCloudSqlInstanceName command: ["/cloud_sql_proxy", "-dir=/cloudsql", "-instances=$(GCP_CLOUDSQL_INSTANCE_NAME)=tcp:0.0.0.0:3306", @@ -37,4 +44,4 @@ spec: name: cloudsql volumes: - name: cloudsql - emptyDir: \ No newline at end of file + emptyDir: diff --git a/manifests/kustomize/env/gcp/cloudsql-proxy/cloudsql-proxy-sa.yaml b/manifests/kustomize/env/gcp/cloudsql-proxy/cloudsql-proxy-sa.yaml new file mode 100644 index 000000000..a4cc9c43d --- /dev/null +++ b/manifests/kustomize/env/gcp/cloudsql-proxy/cloudsql-proxy-sa.yaml @@ -0,0 +1,4 @@ +apiVersion: v1 +kind: ServiceAccount +metadata: + name: kubeflow-pipelines-cloudsql-proxy diff --git a/manifests/kustomize/env/gcp/cloudsql-proxy/kustomization.yaml b/manifests/kustomize/env/gcp/cloudsql-proxy/kustomization.yaml index 704e59c33..a336cb50f 100644 --- a/manifests/kustomize/env/gcp/cloudsql-proxy/kustomization.yaml +++ b/manifests/kustomize/env/gcp/cloudsql-proxy/kustomization.yaml @@ -3,4 +3,5 @@ kind: Kustomization resources: - cloudsql-proxy-deployment.yaml -- mysql-service.yaml \ No newline at end of file +- cloudsql-proxy-sa.yaml +- mysql-service.yaml diff --git a/manifests/kustomize/env/gcp/gcp-configurations-patch.yaml b/manifests/kustomize/env/gcp/gcp-configurations-patch.yaml index 683a3627d..5e725b536 100644 --- a/manifests/kustomize/env/gcp/gcp-configurations-patch.yaml +++ b/manifests/kustomize/env/gcp/gcp-configurations-patch.yaml @@ -9,17 +9,14 @@ spec: - name: ml-pipeline-api-server env: - name: HAS_DEFAULT_BUCKET - valueFrom: - configMapKeyRef: - name: gcp-default-config - key: "has_default_bucket" + value: 'true' - name: BUCKET_NAME valueFrom: configMapKeyRef: - name: gcp-default-config - key: "bucket_name" + name: pipeline-install-config + key: bucketName - name: PROJECT_ID valueFrom: configMapKeyRef: - name: gcp-default-config - key: "project_id" + name: pipeline-install-config + key: gcsProjectId diff --git a/manifests/kustomize/env/gcp/gcp-default-config/gcp-default-configmap.yaml b/manifests/kustomize/env/gcp/gcp-default-config/gcp-default-configmap.yaml deleted file mode 100644 index f705fefb8..000000000 --- a/manifests/kustomize/env/gcp/gcp-default-config/gcp-default-configmap.yaml +++ /dev/null @@ -1,8 +0,0 @@ -apiVersion: v1 -kind: ConfigMap -metadata: - name: gcp-default-config -data: - bucket_name: "$(BUCKET_NAME)" - has_default_bucket: "true" - project_id: "$(GCP_PROJECT_ID)" diff --git a/manifests/kustomize/env/gcp/gcp-default-config/kustomization.yaml b/manifests/kustomize/env/gcp/gcp-default-config/kustomization.yaml deleted file mode 100644 index 55e85a87c..000000000 --- a/manifests/kustomize/env/gcp/gcp-default-config/kustomization.yaml +++ /dev/null @@ -1,5 +0,0 @@ -apiVersion: kustomize.config.k8s.io/v1beta1 -kind: Kustomization - -resources: -- gcp-default-configmap.yaml \ No newline at end of file diff --git a/manifests/kustomize/env/gcp/inverse-proxy/kustomization.yaml b/manifests/kustomize/env/gcp/inverse-proxy/kustomization.yaml index 63578ee26..dd5b29598 100644 --- a/manifests/kustomize/env/gcp/inverse-proxy/kustomization.yaml +++ b/manifests/kustomize/env/gcp/inverse-proxy/kustomization.yaml @@ -1,10 +1,8 @@ apiVersion: kustomize.config.k8s.io/v1beta1 kind: Kustomization - images: - name: gcr.io/ml-pipeline/inverse-proxy-agent - newTag: 0.5.1 - + newTag: 1.0.0 resources: - proxy-configmap.yaml - proxy-deployment.yaml diff --git a/manifests/kustomize/env/gcp/kustomization.yaml b/manifests/kustomize/env/gcp/kustomization.yaml index 9fd4e86c5..9a8bcdd92 100644 --- a/manifests/kustomize/env/gcp/kustomization.yaml +++ b/manifests/kustomize/env/gcp/kustomization.yaml @@ -6,7 +6,6 @@ bases: - inverse-proxy - minio-gcs-gateway - cloudsql-proxy - - gcp-default-config # Identifier for application manager to apply ownerReference. # The ownerReference ensures the resources get garbage collected @@ -26,19 +25,3 @@ configMapGenerator: - name: pipeline-install-config env: params.env behavior: merge - -vars: - - name: GCP_PROJECT_ID - objref: - kind: ConfigMap - name: pipeline-install-config - apiVersion: v1 - fieldref: - fieldpath: data.gcsProjectId - - name: GCP_CLOUDSQL_INSTANCE_NAME - objref: - kind: ConfigMap - name: pipeline-install-config - apiVersion: v1 - fieldref: - fieldpath: data.gcsCloudSqlInstanceName diff --git a/manifests/kustomize/env/gcp/minio-gcs-gateway/kustomization.yaml b/manifests/kustomize/env/gcp/minio-gcs-gateway/kustomization.yaml index db573c477..877ad8174 100644 --- a/manifests/kustomize/env/gcp/minio-gcs-gateway/kustomization.yaml +++ b/manifests/kustomize/env/gcp/minio-gcs-gateway/kustomization.yaml @@ -3,4 +3,12 @@ kind: Kustomization resources: - minio-gcs-gateway-deployment.yaml -- minio-gcs-gateway-service.yaml \ No newline at end of file +- minio-gcs-gateway-sa.yaml +- minio-gcs-gateway-service.yaml + +secretGenerator: +- name: mlpipeline-minio-artifact + env: minio-artifact-secret.env +generatorOptions: + # mlpipeline-minio-artifact needs to be referred by exact name + disableNameSuffixHash: true diff --git a/manifests/kustomize/env/gcp/minio-gcs-gateway/minio-artifact-secret.env b/manifests/kustomize/env/gcp/minio-gcs-gateway/minio-artifact-secret.env new file mode 100644 index 000000000..bc8613ce2 --- /dev/null +++ b/manifests/kustomize/env/gcp/minio-gcs-gateway/minio-artifact-secret.env @@ -0,0 +1,2 @@ +accesskey=minio +secretkey=minio123 diff --git a/manifests/kustomize/env/gcp/minio-gcs-gateway/minio-gcs-gateway-deployment.yaml b/manifests/kustomize/env/gcp/minio-gcs-gateway/minio-gcs-gateway-deployment.yaml index 6828a74ec..f26d27cc6 100644 --- a/manifests/kustomize/env/gcp/minio-gcs-gateway/minio-gcs-gateway-deployment.yaml +++ b/manifests/kustomize/env/gcp/minio-gcs-gateway/minio-gcs-gateway-deployment.yaml @@ -15,6 +15,7 @@ spec: labels: app: minio spec: + serviceAccountName: kubeflow-pipelines-minio-gcs-gateway containers: - name: minio image: gcr.io/ml-pipeline/minio:RELEASE.2019-08-14T20-37-41Z-license-compliance @@ -23,9 +24,20 @@ spec: - gcs - $(GCP_PROJECT_ID) env: + - name: GCP_PROJECT_ID + valueFrom: + configMapKeyRef: + name: pipeline-install-config + key: gcsProjectId - name: MINIO_ACCESS_KEY - value: "minio" + valueFrom: + secretKeyRef: + name: mlpipeline-minio-artifact + key: accesskey - name: MINIO_SECRET_KEY - value: "minio123" + valueFrom: + secretKeyRef: + name: mlpipeline-minio-artifact + key: secretkey ports: - - containerPort: 9000 \ No newline at end of file + - containerPort: 9000 diff --git a/manifests/kustomize/env/gcp/minio-gcs-gateway/minio-gcs-gateway-sa.yaml b/manifests/kustomize/env/gcp/minio-gcs-gateway/minio-gcs-gateway-sa.yaml new file mode 100644 index 000000000..2aa4f9376 --- /dev/null +++ b/manifests/kustomize/env/gcp/minio-gcs-gateway/minio-gcs-gateway-sa.yaml @@ -0,0 +1,4 @@ +apiVersion: v1 +kind: ServiceAccount +metadata: + name: kubeflow-pipelines-minio-gcs-gateway diff --git a/manifests/kustomize/env/platform-agnostic/minio/kustomization.yaml b/manifests/kustomize/env/platform-agnostic/minio/kustomization.yaml index 8ed66b303..11b2d99de 100644 --- a/manifests/kustomize/env/platform-agnostic/minio/kustomization.yaml +++ b/manifests/kustomize/env/platform-agnostic/minio/kustomization.yaml @@ -5,3 +5,10 @@ resources: - minio-deployment.yaml - minio-pvc.yaml - minio-service.yaml + +secretGenerator: +- name: mlpipeline-minio-artifact + env: minio-artifact-secret.env +generatorOptions: + # mlpipeline-minio-artifact needs to be referred by exact name + disableNameSuffixHash: true diff --git a/manifests/kustomize/env/platform-agnostic/minio/minio-artifact-secret.env b/manifests/kustomize/env/platform-agnostic/minio/minio-artifact-secret.env new file mode 100644 index 000000000..bc8613ce2 --- /dev/null +++ b/manifests/kustomize/env/platform-agnostic/minio/minio-artifact-secret.env @@ -0,0 +1,2 @@ +accesskey=minio +secretkey=minio123 diff --git a/manifests/kustomize/env/platform-agnostic/minio/minio-deployment.yaml b/manifests/kustomize/env/platform-agnostic/minio/minio-deployment.yaml index 6d944a73f..9613615b2 100644 --- a/manifests/kustomize/env/platform-agnostic/minio/minio-deployment.yaml +++ b/manifests/kustomize/env/platform-agnostic/minio/minio-deployment.yaml @@ -21,9 +21,15 @@ spec: - /data env: - name: MINIO_ACCESS_KEY - value: minio + valueFrom: + secretKeyRef: + name: mlpipeline-minio-artifact + key: accesskey - name: MINIO_SECRET_KEY - value: minio123 + valueFrom: + secretKeyRef: + name: mlpipeline-minio-artifact + key: secretkey image: gcr.io/ml-pipeline/minio:RELEASE.2019-08-14T20-37-41Z-license-compliance name: minio ports: @@ -35,4 +41,4 @@ spec: volumes: - name: data persistentVolumeClaim: - claimName: minio-pvc \ No newline at end of file + claimName: minio-pvc diff --git a/manifests/kustomize/env/platform-agnostic/minio/minio-service.yaml b/manifests/kustomize/env/platform-agnostic/minio/minio-service.yaml index bdecf182a..3ab420430 100644 --- a/manifests/kustomize/env/platform-agnostic/minio/minio-service.yaml +++ b/manifests/kustomize/env/platform-agnostic/minio/minio-service.yaml @@ -4,8 +4,9 @@ metadata: name: minio-service spec: ports: - - port: 9000 + - name: http + port: 9000 protocol: TCP targetPort: 9000 selector: - app: minio \ No newline at end of file + app: minio diff --git a/manifests/kustomize/env/platform-agnostic/mysql/mysql-service.yaml b/manifests/kustomize/env/platform-agnostic/mysql/mysql-service.yaml index 78e201bf7..d52482770 100644 --- a/manifests/kustomize/env/platform-agnostic/mysql/mysql-service.yaml +++ b/manifests/kustomize/env/platform-agnostic/mysql/mysql-service.yaml @@ -4,6 +4,9 @@ metadata: name: mysql spec: ports: - - port: 3306 + - # We cannot have name: mysql here, because some requests through istio fail with it. + port: 3306 + protocol: TCP + targetPort: 3306 selector: - app: mysql \ No newline at end of file + app: mysql diff --git a/manifests/kustomize/gcp-workload-identity-setup.sh b/manifests/kustomize/gcp-workload-identity-setup.sh index e749ccbc0..6fac04c57 100755 --- a/manifests/kustomize/gcp-workload-identity-setup.sh +++ b/manifests/kustomize/gcp-workload-identity-setup.sh @@ -16,44 +16,59 @@ set -e +# Kubernetes Namespace +NAMESPACE=${NAMESPACE:-kubeflow} + # Google service Account (GSA) -SYSTEM_GSA=${SYSTEM_GSA:-$CLUSTER_NAME-kfp-system} -USER_GSA=${USER_GSA:-$CLUSTER_NAME-kfp-user} +SYSTEM_GSA=${SYSTEM_GSA:-$RESOURCE_PREFIX-kfp-system} +USER_GSA=${USER_GSA:-$RESOURCE_PREFIX-kfp-user} # Kubernetes Service Account (KSA) +# Note, if deploying manifests/kustomize/env/gcp, you can add the following KSAs +# to the array of SYSTEM_KSA: +# * kubeflow-pipelines-minio-gcs-gateway needs gcs permissions +# * kubeflow-pipelines-cloudsql-proxy needs cloudsql permissions SYSTEM_KSA=(ml-pipeline-ui ml-pipeline-visualizationserver) -USER_KSA=(pipeline-runner kubeflow-pipelines-container-builder) +USER_KSA=(pipeline-runner kubeflow-pipelines-container-builder kubeflow-pipelines-viewer) + +if [ -n $USE_GCP_MANAGED_STORAGE ]; then + SYSTEM_KSA+=(kubeflow-pipelines-minio-gcs-gateway) + SYSTEM_KSA+=(kubeflow-pipelines-cloudsql-proxy) +fi cat < CLUSTER_NAME= NAMESPACE= ./gcp-workload-identity-setup.sh +PROJECT_ID= RESOURCE_PREFIX= NAMESPACE= ./gcp-workload-identity-setup.sh ``` PROJECT_ID: GCP project ID your cluster belongs to. -CLUSTER_NAME: your GKE cluster's name. -NAMESPACE: Kubernetes namespace your Kubeflow Pipelines standalone deployment belongs to (default is kubeflow). +RESOURCE_PREFIX: Your preferred resource prefix for GCP resources this script creates. +NAMESPACE: Optional. Kubernetes namespace your Kubeflow Pipelines standalone deployment belongs to. (Defaults to kubeflow) +USE_GCP_MANAGED_STORAGE: Optional. Defaults to "false", specify "true" if you intend to use GCP managed storage (Google Cloud Storage and Cloud SQL) following instructions in: +https://github.com/kubeflow/pipelines/tree/master/manifests/kustomize/sample EOF } if [ -z "$PROJECT_ID" ]; then @@ -62,18 +77,39 @@ if [ -z "$PROJECT_ID" ]; then echo "Error: PROJECT_ID env variable is empty!" exit 1 fi -if [ -z "$CLUSTER_NAME" ]; then +if [ -z "$RESOURCE_PREFIX" ]; then usage echo - echo "Error: CLUSTER_NAME env variable is empty!" + echo "Error: RESOURCE_PREFIX env variable is empty!" exit 1 fi echo "Env variables set:" echo "* PROJECT_ID=$PROJECT_ID" -echo "* CLUSTER_NAME=$CLUSTER_NAME" +echo "* RESOURCE_PREFIX=$RESOURCE_PREFIX" echo "* NAMESPACE=$NAMESPACE" +echo "* USE_GCP_MANAGED_STORAGE=${USE_GCP_MANAGED_STORAGE:-false}" echo +SYSTEM_GSA_FULL="$SYSTEM_GSA@$PROJECT_ID.iam.gserviceaccount.com" +USER_GSA_FULL="$USER_GSA@$PROJECT_ID.iam.gserviceaccount.com" + +cat </dev/null; then + echo "KSA $name already exists" + else + kubectl create serviceaccount $name -n $NAMESPACE --save-config + echo "KSA $name created" + fi +} # Bind KSA to GSA through workload identity. # Documentation: https://cloud.google.com/kubernetes-engine/docs/how-to/workload-identity @@ -112,6 +149,8 @@ function bind_gsa_and_ksa { --member="serviceAccount:$PROJECT_ID.svc.id.goog[$NAMESPACE/$ksa]" \ --role="roles/iam.workloadIdentityUser" \ > /dev/null # hide verbose output + + create_ksa_if_not_present $ksa kubectl annotate serviceaccount \ --namespace $NAMESPACE \ --overwrite \ @@ -129,3 +168,35 @@ echo "Binding each kfp user KSA to $USER_GSA" for ksa in ${USER_KSA[@]}; do bind_gsa_and_ksa $USER_GSA $ksa done + +echo +echo "All the workload identity bindings have succeeded!" +cat < /dev/null && pwd)" + +function format_yaml { + local path=$1 + local tmp=$(mktemp) + yq r "$path" > "$tmp" + cp "$tmp" "$path" +} +echo "This formatting script uses yq, it can be downloaded at https://github.com/mikefarah/yq/releases/tag/3.3.0" +kustomization_yamls_with_images=( + "base/cache-deployer/kustomization.yaml" + "base/cache/kustomization.yaml" + "base/metadata/kustomization.yaml" + "base/pipeline/metadata-writer/kustomization.yaml" + "base/pipeline/kustomization.yaml" + "env/gcp/inverse-proxy/kustomization.yaml" +) +for path in "${kustomization_yamls_with_images[@]}" +do + format_yaml "$DIR/../$path" +done diff --git a/manifests/kustomize/hack/release.sh b/manifests/kustomize/hack/release.sh new file mode 100755 index 000000000..4094183f9 --- /dev/null +++ b/manifests/kustomize/hack/release.sh @@ -0,0 +1,42 @@ +#!/bin/bash +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -ex + +TAG_NAME=$1 +DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" > /dev/null && pwd)" + +if [[ -z "$TAG_NAME" ]]; then + echo "Usage: release.sh " >&2 + exit 1 +fi + +echo "This release script uses yq, it can be downloaded at https://github.com/mikefarah/yq/releases/tag/3.3.0" +kustomization_yamls_with_images=( + "base/cache-deployer/kustomization.yaml" + "base/cache/kustomization.yaml" + "base/metadata/kustomization.yaml" + "base/pipeline/metadata-writer/kustomization.yaml" + "base/pipeline/kustomization.yaml" + "env/gcp/inverse-proxy/kustomization.yaml" +) +for path in "${kustomization_yamls_with_images[@]}" +do + yq w -i "$DIR/../$path" images[*].newTag "$TAG_NAME" +done + +# Note, this only works in linux. TODO: make it MacOS sed compatible. +sed -i.bak -e "s|appVersion=.\+|appVersion=$TAG_NAME|g" "$DIR/../base/params.env" diff --git a/manifests/kustomize/sample/README.md b/manifests/kustomize/sample/README.md index cb5c7e753..c4bfe95ba 100644 --- a/manifests/kustomize/sample/README.md +++ b/manifests/kustomize/sample/README.md @@ -8,8 +8,8 @@ You may consider create **zero-sized GPU node-pool with autoscaling**. Please reference [GPU Tutorial](/samples/tutorials/gpu/). - **Security** You may consider use **Workload Identity** in GCP cluster. -Here for simplicity we create a small cluster with **--scopes=cloud-platform** -to save credentail configure efforts. +Here for simplicity, we create a small cluster with **--scopes=cloud-platform** +which grants all the GCP permissions to the cluster. ``` gcloud container clusters create mycluster \ @@ -53,7 +53,14 @@ gsutil mb -p myProjectId gs://myBucketName/ - Edit **params.env**, **params-db-secret.env** and **cluster-scoped-resources/params.env** - Edit kustomization.yaml to set your namespace, e.x. "kubeflow" -5. Install +5. (Optional.) If the cluster is on Workload Identity, please run **[gcp-workload-identity-setup.sh](../gcp-workload-identity-setup.sh)** + The script prints usage documentation when calling without argument. Note, you should + call it with `USE_GCP_MANAGED_STORAGE=true` env var. + + - make sure the Google Service Account (GSA) can access the CloudSQL instance and GCS bucket + - if your workload calls other GCP APIs, make sure the GSA can access them + +6. Install ``` kubectl apply -k sample/cluster-scoped-resources/ @@ -68,11 +75,3 @@ kubectl wait applications/mypipeline -n kubeflow --for condition=Ready --timeout ``` Now you can find the installation in [Console](http://console.cloud.google.com/ai-platform/pipelines) - -6. Post-installation configures - -It depends on how you create the cluster, -- if the cluster is created with **--scopes=cloud-platform**, no actions required -- if the cluster is on Workload Identity, please run **gcp-workload-identity-setup.sh** - - make sure the Google Service Account (GSA) can access the CloudSQL instance and GCS bucket - - if your workload calls other GCP APIs, make sure the GSA can access them diff --git a/manifests/kustomize/sample/cluster-scoped-resources/kustomization.yaml b/manifests/kustomize/sample/cluster-scoped-resources/kustomization.yaml index 9a2276e10..83c54aa99 100644 --- a/manifests/kustomize/sample/cluster-scoped-resources/kustomization.yaml +++ b/manifests/kustomize/sample/cluster-scoped-resources/kustomization.yaml @@ -1,12 +1,10 @@ apiVersion: kustomize.config.k8s.io/v1beta1 kind: Kustomization +# !!! If you want to customize the namespace, +# please also update sample/kustomization.yaml's namespace field to the same value +namespace: kubeflow + bases: # Or github.com/kubeflow/pipelines/manifests/kustomize/cluster-scoped-resources?ref=1.0.0 - ../../cluster-scoped-resources - -# Change the value in params.env to yours. -configMapGenerator: - - name: pipeline-cluster-scoped-install-config - env: params.env - behavior: merge diff --git a/manifests/kustomize/sample/cluster-scoped-resources/params.env b/manifests/kustomize/sample/cluster-scoped-resources/params.env deleted file mode 100644 index 78166431d..000000000 --- a/manifests/kustomize/sample/cluster-scoped-resources/params.env +++ /dev/null @@ -1 +0,0 @@ -namespace=kubeflow diff --git a/manifests/kustomize/sample/kustomization.yaml b/manifests/kustomize/sample/kustomization.yaml index db2b428bc..74a1e49e1 100644 --- a/manifests/kustomize/sample/kustomization.yaml +++ b/manifests/kustomize/sample/kustomization.yaml @@ -23,7 +23,7 @@ secretGenerator: behavior: merge # !!! If you want to customize the namespace, -# please also update sample/params.env and sample/cluster-scoped-resources/params.env +# please also update sample/cluster-scoped-resources/kustomization.yaml's namespace field to the same value namespace: kubeflow #### Customization ### diff --git a/package-lock.json b/package-lock.json new file mode 100644 index 000000000..4cc259e99 --- /dev/null +++ b/package-lock.json @@ -0,0 +1,2283 @@ +{ + "name": "kubeflow-pipelines-hack", + "version": "1.0.0-rc.2", + "lockfileVersion": 1, + "requires": true, + "dependencies": { + "@babel/code-frame": { + "version": "7.10.1", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.10.1.tgz", + "integrity": "sha512-IGhtTmpjGbYzcEDOw7DcQtbQSXcG9ftmAXtWTu9V936vDye4xjjekktFAtgZsWpzTj/X01jocB46mTywm/4SZw==", + "dev": true, + "requires": { + "@babel/highlight": "^7.10.1" + } + }, + "@babel/helper-validator-identifier": { + "version": "7.10.1", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.10.1.tgz", + "integrity": "sha512-5vW/JXLALhczRCWP0PnFDMCJAchlBvM7f4uk/jXritBnIa6E1KmqmtrS3yn1LAnxFBypQ3eneLuXjsnfQsgILw==", + "dev": true + }, + "@babel/highlight": { + "version": "7.10.1", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.10.1.tgz", + "integrity": "sha512-8rMof+gVP8mxYZApLF/JgNDAkdKa+aJt3ZYxF8z6+j/hpeXL7iMsKCPHa2jNMHu/qqBwzQF4OHNoYi8dMA/rYg==", + "dev": true, + "requires": { + "@babel/helper-validator-identifier": "^7.10.1", + "chalk": "^2.0.0", + "js-tokens": "^4.0.0" + } + }, + "@types/color-name": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@types/color-name/-/color-name-1.1.1.tgz", + "integrity": "sha512-rr+OQyAjxze7GgWrSaJwydHStIhHq2lvY3BOC2Mj7KnzI7XK0Uw1TOOdI9lDoajEbSWLiYgoo4f1R51erQfhPQ==", + "dev": true + }, + "@types/minimist": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@types/minimist/-/minimist-1.2.0.tgz", + "integrity": "sha1-aaI6OtKcrwCX8G7aWbNh7i8GOfY=", + "dev": true + }, + "@types/normalize-package-data": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/@types/normalize-package-data/-/normalize-package-data-2.4.0.tgz", + "integrity": "sha512-f5j5b/Gf71L+dbqxIpQ4Z2WlmI/mPJ0fOkGGmFgtb6sAu97EPczzbS3/tJKxmcYDj55OX6ssqwDAWOHIYDRDGA==", + "dev": true + }, + "JSONStream": { + "version": "1.3.5", + "resolved": "https://registry.npmjs.org/JSONStream/-/JSONStream-1.3.5.tgz", + "integrity": "sha512-E+iruNOY8VV9s4JEbe1aNEm6MiszPRr/UfcHMz0TQh1BXSxHK+ASV1R6W4HpjBhSeS+54PIsAMCBmwD06LLsqQ==", + "dev": true, + "requires": { + "jsonparse": "^1.2.0", + "through": ">=2.2.7 <3" + } + }, + "add-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/add-stream/-/add-stream-1.0.0.tgz", + "integrity": "sha1-anmQQ3ynNtXhKI25K9MmbV9csqo=", + "dev": true + }, + "ansi-regex": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.0.tgz", + "integrity": "sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg==", + "dev": true + }, + "ansi-styles": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "dev": true, + "requires": { + "color-convert": "^1.9.0" + } + }, + "array-find-index": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/array-find-index/-/array-find-index-1.0.2.tgz", + "integrity": "sha1-3wEKoSh+Fku9pvlyOwqWoexBh6E=", + "dev": true + }, + "array-ify": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/array-ify/-/array-ify-1.0.0.tgz", + "integrity": "sha1-nlKHYrSpBmrRY6aWKjZEGOlibs4=", + "dev": true + }, + "arrify": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/arrify/-/arrify-2.0.1.tgz", + "integrity": "sha512-3duEwti880xqi4eAMN8AyR4a0ByT90zoYdLlevfrvU43vb0YZwZVfxOgxWrLXXXpyugL0hNZc9G6BiB5B3nUug==", + "dev": true + }, + "balanced-match": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", + "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=", + "dev": true + }, + "brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "requires": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "buffer-from": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.1.tgz", + "integrity": "sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A==", + "dev": true + }, + "camelcase": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.0.0.tgz", + "integrity": "sha512-8KMDF1Vz2gzOq54ONPJS65IvTUaB1cHJ2DMM7MbPmLZljDH1qpzzLsWdiN9pHh6qvkRVDTi/07+eNGch/oLU4w==", + "dev": true + }, + "camelcase-keys": { + "version": "6.2.2", + "resolved": "https://registry.npmjs.org/camelcase-keys/-/camelcase-keys-6.2.2.tgz", + "integrity": "sha512-YrwaA0vEKazPBkn0ipTiMpSajYDSe+KjQfrjhcBMxJt/znbvlHd8Pw/Vamaz5EB4Wfhs3SUR3Z9mwRu/P3s3Yg==", + "dev": true, + "requires": { + "camelcase": "^5.3.1", + "map-obj": "^4.0.0", + "quick-lru": "^4.0.1" + }, + "dependencies": { + "camelcase": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", + "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", + "dev": true + } + } + }, + "chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "dev": true, + "requires": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + } + }, + "cliui": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-6.0.0.tgz", + "integrity": "sha512-t6wbgtoCXvAzst7QgXxJYqPt0usEfbgQdftEPbLL/cvv6HPE5VgvqCuAIDR0NgU52ds6rFwqrgakNLrHEjCbrQ==", + "dev": true, + "requires": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.0", + "wrap-ansi": "^6.2.0" + } + }, + "color-convert": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "dev": true, + "requires": { + "color-name": "1.1.3" + } + }, + "color-name": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=", + "dev": true + }, + "commander": { + "version": "2.20.3", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", + "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==", + "dev": true, + "optional": true + }, + "compare-func": { + "version": "1.3.4", + "resolved": "https://registry.npmjs.org/compare-func/-/compare-func-1.3.4.tgz", + "integrity": "sha512-sq2sWtrqKPkEXAC8tEJA1+BqAH9GbFkGBtUOqrUX57VSfwp8xyktctk+uLoRy5eccTdxzDcVIztlYDpKs3Jv1Q==", + "dev": true, + "requires": { + "array-ify": "^1.0.0", + "dot-prop": "^3.0.0" + } + }, + "concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=", + "dev": true + }, + "concat-stream": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/concat-stream/-/concat-stream-2.0.0.tgz", + "integrity": "sha512-MWufYdFw53ccGjCA+Ol7XJYpAlW6/prSMzuPOTRnJGcGzuhLn4Scrz7qf6o8bROZ514ltazcIFJZevcfbo0x7A==", + "dev": true, + "requires": { + "buffer-from": "^1.0.0", + "inherits": "^2.0.3", + "readable-stream": "^3.0.2", + "typedarray": "^0.0.6" + } + }, + "conventional-changelog": { + "version": "3.1.18", + "resolved": "https://registry.npmjs.org/conventional-changelog/-/conventional-changelog-3.1.18.tgz", + "integrity": "sha512-aN6a3rjgV8qwAJj3sC/Lme2kvswWO7fFSGQc32gREcwIOsaiqBaO6f2p0NomFaPDnTqZ+mMZFLL3hlzvEnZ0mQ==", + "dev": true, + "requires": { + "conventional-changelog-angular": "^5.0.6", + "conventional-changelog-atom": "^2.0.3", + "conventional-changelog-codemirror": "^2.0.3", + "conventional-changelog-conventionalcommits": "^4.2.3", + "conventional-changelog-core": "^4.1.4", + "conventional-changelog-ember": "^2.0.4", + "conventional-changelog-eslint": "^3.0.4", + "conventional-changelog-express": "^2.0.1", + "conventional-changelog-jquery": "^3.0.6", + "conventional-changelog-jshint": "^2.0.3", + "conventional-changelog-preset-loader": "^2.3.0" + } + }, + "conventional-changelog-angular": { + "version": "5.0.10", + "resolved": "https://registry.npmjs.org/conventional-changelog-angular/-/conventional-changelog-angular-5.0.10.tgz", + "integrity": "sha512-k7RPPRs0vp8+BtPsM9uDxRl6KcgqtCJmzRD1wRtgqmhQ96g8ifBGo9O/TZBG23jqlXS/rg8BKRDELxfnQQGiaA==", + "dev": true, + "requires": { + "compare-func": "^1.3.1", + "q": "^1.5.1" + } + }, + "conventional-changelog-atom": { + "version": "2.0.7", + "resolved": "https://registry.npmjs.org/conventional-changelog-atom/-/conventional-changelog-atom-2.0.7.tgz", + "integrity": "sha512-7dOREZwzB+tCEMjRTDfen0OHwd7vPUdmU0llTy1eloZgtOP4iSLVzYIQqfmdRZEty+3w5Jz+AbhfTJKoKw1JeQ==", + "dev": true, + "requires": { + "q": "^1.5.1" + } + }, + "conventional-changelog-codemirror": { + "version": "2.0.7", + "resolved": "https://registry.npmjs.org/conventional-changelog-codemirror/-/conventional-changelog-codemirror-2.0.7.tgz", + "integrity": "sha512-Oralk1kiagn3Gb5cR5BffenWjVu59t/viE6UMD/mQa1hISMPkMYhJIqX+CMeA1zXgVBO+YHQhhokEj99GP5xcg==", + "dev": true, + "requires": { + "q": "^1.5.1" + } + }, + "conventional-changelog-config-spec": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/conventional-changelog-config-spec/-/conventional-changelog-config-spec-2.1.0.tgz", + "integrity": "sha512-IpVePh16EbbB02V+UA+HQnnPIohgXvJRxHcS5+Uwk4AT5LjzCZJm5sp/yqs5C6KZJ1jMsV4paEV13BN1pvDuxQ==", + "dev": true + }, + "conventional-changelog-conventionalcommits": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/conventional-changelog-conventionalcommits/-/conventional-changelog-conventionalcommits-4.2.3.tgz", + "integrity": "sha512-atGa+R4vvEhb8N/8v3IoW59gCBJeeFiX6uIbPu876ENAmkMwsenyn0R21kdDHJFLQdy6zW4J6b4xN8KI3b9oww==", + "dev": true, + "requires": { + "compare-func": "^1.3.1", + "lodash": "^4.17.15", + "q": "^1.5.1" + } + }, + "conventional-changelog-core": { + "version": "4.1.7", + "resolved": "https://registry.npmjs.org/conventional-changelog-core/-/conventional-changelog-core-4.1.7.tgz", + "integrity": "sha512-UBvSrQR2RdKbSQKh7RhueiiY4ZAIOW3+CSWdtKOwRv+KxIMNFKm1rOcGBFx0eA8AKhGkkmmacoTWJTqyz7Q0VA==", + "dev": true, + "requires": { + "add-stream": "^1.0.0", + "conventional-changelog-writer": "^4.0.16", + "conventional-commits-parser": "^3.1.0", + "dateformat": "^3.0.0", + "get-pkg-repo": "^1.0.0", + "git-raw-commits": "2.0.0", + "git-remote-origin-url": "^2.0.0", + "git-semver-tags": "^4.0.0", + "lodash": "^4.17.15", + "normalize-package-data": "^2.3.5", + "q": "^1.5.1", + "read-pkg": "^3.0.0", + "read-pkg-up": "^3.0.0", + "shelljs": "^0.8.3", + "through2": "^3.0.0" + }, + "dependencies": { + "git-semver-tags": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/git-semver-tags/-/git-semver-tags-4.0.0.tgz", + "integrity": "sha512-LajaAWLYVBff+1NVircURJFL8TQ3EMIcLAfHisWYX/nPoMwnTYfWAznQDmMujlLqoD12VtLmoSrF1sQ5MhimEQ==", + "dev": true, + "requires": { + "meow": "^7.0.0", + "semver": "^6.0.0" + } + }, + "semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "dev": true + } + } + }, + "conventional-changelog-ember": { + "version": "2.0.8", + "resolved": "https://registry.npmjs.org/conventional-changelog-ember/-/conventional-changelog-ember-2.0.8.tgz", + "integrity": "sha512-JEMEcUAMg4Q9yxD341OgWlESQ4gLqMWMXIWWUqoQU8yvTJlKnrvcui3wk9JvnZQyONwM2g1MKRZuAjKxr8hAXA==", + "dev": true, + "requires": { + "q": "^1.5.1" + } + }, + "conventional-changelog-eslint": { + "version": "3.0.8", + "resolved": "https://registry.npmjs.org/conventional-changelog-eslint/-/conventional-changelog-eslint-3.0.8.tgz", + "integrity": "sha512-5rTRltgWG7TpU1PqgKHMA/2ivjhrB+E+S7OCTvj0zM/QGg4vmnVH67Vq/EzvSNYtejhWC+OwzvDrLk3tqPry8A==", + "dev": true, + "requires": { + "q": "^1.5.1" + } + }, + "conventional-changelog-express": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/conventional-changelog-express/-/conventional-changelog-express-2.0.5.tgz", + "integrity": "sha512-pW2hsjKG+xNx/Qjof8wYlAX/P61hT5gQ/2rZ2NsTpG+PgV7Rc8RCfITvC/zN9K8fj0QmV6dWmUefCteD9baEAw==", + "dev": true, + "requires": { + "q": "^1.5.1" + } + }, + "conventional-changelog-jquery": { + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/conventional-changelog-jquery/-/conventional-changelog-jquery-3.0.6.tgz", + "integrity": "sha512-gHAABCXUNA/HjnZEm+vxAfFPJkgtrZvCDIlCKfdPVXtCIo/Q0lN5VKpx8aR5p8KdVRQFF3OuTlvv5kv6iPuRqA==", + "dev": true, + "requires": { + "q": "^1.5.1" + } + }, + "conventional-changelog-jshint": { + "version": "2.0.7", + "resolved": "https://registry.npmjs.org/conventional-changelog-jshint/-/conventional-changelog-jshint-2.0.7.tgz", + "integrity": "sha512-qHA8rmwUnLiIxANJbz650+NVzqDIwNtc0TcpIa0+uekbmKHttidvQ1dGximU3vEDdoJVKFgR3TXFqYuZmYy9ZQ==", + "dev": true, + "requires": { + "compare-func": "^1.3.1", + "q": "^1.5.1" + } + }, + "conventional-changelog-preset-loader": { + "version": "2.3.4", + "resolved": "https://registry.npmjs.org/conventional-changelog-preset-loader/-/conventional-changelog-preset-loader-2.3.4.tgz", + "integrity": "sha512-GEKRWkrSAZeTq5+YjUZOYxdHq+ci4dNwHvpaBC3+ENalzFWuCWa9EZXSuZBpkr72sMdKB+1fyDV4takK1Lf58g==", + "dev": true + }, + "conventional-changelog-writer": { + "version": "4.0.16", + "resolved": "https://registry.npmjs.org/conventional-changelog-writer/-/conventional-changelog-writer-4.0.16.tgz", + "integrity": "sha512-jmU1sDJDZpm/dkuFxBeRXvyNcJQeKhGtVcFFkwTphUAzyYWcwz2j36Wcv+Mv2hU3tpvLMkysOPXJTLO55AUrYQ==", + "dev": true, + "requires": { + "compare-func": "^1.3.1", + "conventional-commits-filter": "^2.0.6", + "dateformat": "^3.0.0", + "handlebars": "^4.7.6", + "json-stringify-safe": "^5.0.1", + "lodash": "^4.17.15", + "meow": "^7.0.0", + "semver": "^6.0.0", + "split": "^1.0.0", + "through2": "^3.0.0" + }, + "dependencies": { + "semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "dev": true + } + } + }, + "conventional-commits-filter": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/conventional-commits-filter/-/conventional-commits-filter-2.0.6.tgz", + "integrity": "sha512-4g+sw8+KA50/Qwzfr0hL5k5NWxqtrOVw4DDk3/h6L85a9Gz0/Eqp3oP+CWCNfesBvZZZEFHF7OTEbRe+yYSyKw==", + "dev": true, + "requires": { + "lodash.ismatch": "^4.4.0", + "modify-values": "^1.0.0" + } + }, + "conventional-commits-parser": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/conventional-commits-parser/-/conventional-commits-parser-3.1.0.tgz", + "integrity": "sha512-RSo5S0WIwXZiRxUGTPuYFbqvrR4vpJ1BDdTlthFgvHt5kEdnd1+pdvwWphWn57/oIl4V72NMmOocFqqJ8mFFhA==", + "dev": true, + "requires": { + "JSONStream": "^1.0.4", + "is-text-path": "^1.0.1", + "lodash": "^4.17.15", + "meow": "^7.0.0", + "split2": "^2.0.0", + "through2": "^3.0.0", + "trim-off-newlines": "^1.0.0" + } + }, + "conventional-recommended-bump": { + "version": "6.0.5", + "resolved": "https://registry.npmjs.org/conventional-recommended-bump/-/conventional-recommended-bump-6.0.5.tgz", + "integrity": "sha512-srkferrB4kACPEbKYltZwX1CQZAEqbQkabKN444mavLRVMetzwJFJf23/+pwvtMsWbd+cc4HaleV1nHke0f8Rw==", + "dev": true, + "requires": { + "concat-stream": "^2.0.0", + "conventional-changelog-preset-loader": "^2.3.0", + "conventional-commits-filter": "^2.0.2", + "conventional-commits-parser": "^3.0.8", + "git-raw-commits": "2.0.0", + "git-semver-tags": "^3.0.1", + "meow": "^5.0.0", + "q": "^1.5.1" + }, + "dependencies": { + "arrify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz", + "integrity": "sha1-iYUI2iIm84DfkEcoRWhJwVAaSw0=", + "dev": true + }, + "camelcase": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-4.1.0.tgz", + "integrity": "sha1-1UVjW+HjPFQmScaRc+Xeas+uNN0=", + "dev": true + }, + "camelcase-keys": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/camelcase-keys/-/camelcase-keys-4.2.0.tgz", + "integrity": "sha1-oqpfsa9oh1glnDLBQUJteJI7m3c=", + "dev": true, + "requires": { + "camelcase": "^4.1.0", + "map-obj": "^2.0.0", + "quick-lru": "^1.0.0" + } + }, + "indent-string": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-3.2.0.tgz", + "integrity": "sha1-Sl/W0nzDMvN+VBmlBNu4NxBckok=", + "dev": true + }, + "map-obj": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-2.0.0.tgz", + "integrity": "sha1-plzSkIepJZi4eRJXpSPgISIqwfk=", + "dev": true + }, + "meow": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/meow/-/meow-5.0.0.tgz", + "integrity": "sha512-CbTqYU17ABaLefO8vCU153ZZlprKYWDljcndKKDCFcYQITzWCXZAVk4QMFZPgvzrnUQ3uItnIE/LoUOwrT15Ig==", + "dev": true, + "requires": { + "camelcase-keys": "^4.0.0", + "decamelize-keys": "^1.0.0", + "loud-rejection": "^1.0.0", + "minimist-options": "^3.0.1", + "normalize-package-data": "^2.3.4", + "read-pkg-up": "^3.0.0", + "redent": "^2.0.0", + "trim-newlines": "^2.0.0", + "yargs-parser": "^10.0.0" + } + }, + "minimist-options": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/minimist-options/-/minimist-options-3.0.2.tgz", + "integrity": "sha512-FyBrT/d0d4+uiZRbqznPXqw3IpZZG3gl3wKWiX784FycUKVwBt0uLBFkQrtE4tZOrgo78nZp2jnKz3L65T5LdQ==", + "dev": true, + "requires": { + "arrify": "^1.0.1", + "is-plain-obj": "^1.1.0" + } + }, + "quick-lru": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/quick-lru/-/quick-lru-1.1.0.tgz", + "integrity": "sha1-Q2CxfGETatOAeDl/8RQW4Ybc+7g=", + "dev": true + }, + "redent": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/redent/-/redent-2.0.0.tgz", + "integrity": "sha1-wbIAe0LVfrE4kHmzyDM2OdXhzKo=", + "dev": true, + "requires": { + "indent-string": "^3.0.0", + "strip-indent": "^2.0.0" + } + }, + "strip-indent": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/strip-indent/-/strip-indent-2.0.0.tgz", + "integrity": "sha1-XvjbKV0B5u1sv3qrlpmNeCJSe2g=", + "dev": true + }, + "trim-newlines": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/trim-newlines/-/trim-newlines-2.0.0.tgz", + "integrity": "sha1-tAPQuRvlDDMd/EuC7s6yLD3hbSA=", + "dev": true + }, + "yargs-parser": { + "version": "10.1.0", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-10.1.0.tgz", + "integrity": "sha512-VCIyR1wJoEBZUqk5PA+oOBF6ypbwh5aNB3I50guxAL/quggdfs4TtNHQrSazFA3fYZ+tEqfs0zIGlv0c/rgjbQ==", + "dev": true, + "requires": { + "camelcase": "^4.1.0" + } + } + } + }, + "core-util-is": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", + "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=", + "dev": true + }, + "currently-unhandled": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/currently-unhandled/-/currently-unhandled-0.4.1.tgz", + "integrity": "sha1-mI3zP+qxke95mmE2nddsF635V+o=", + "dev": true, + "requires": { + "array-find-index": "^1.0.1" + } + }, + "dargs": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/dargs/-/dargs-4.1.0.tgz", + "integrity": "sha1-A6nbtLXC8Tm/FK5T8LiipqhvThc=", + "dev": true, + "requires": { + "number-is-nan": "^1.0.0" + } + }, + "dateformat": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/dateformat/-/dateformat-3.0.3.tgz", + "integrity": "sha512-jyCETtSl3VMZMWeRo7iY1FL19ges1t55hMo5yaam4Jrsm5EPL89UQkoQRyiI+Yf4k8r2ZpdngkV8hr1lIdjb3Q==", + "dev": true + }, + "decamelize": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz", + "integrity": "sha1-9lNNFRSCabIDUue+4m9QH5oZEpA=", + "dev": true + }, + "decamelize-keys": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/decamelize-keys/-/decamelize-keys-1.1.0.tgz", + "integrity": "sha1-0XGoeTMlKAfrPLYdwcFEXQeN8tk=", + "dev": true, + "requires": { + "decamelize": "^1.1.0", + "map-obj": "^1.0.0" + }, + "dependencies": { + "map-obj": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-1.0.1.tgz", + "integrity": "sha1-2TPOuSBdgr3PSIb2dCvcK03qFG0=", + "dev": true + } + } + }, + "detect-indent": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/detect-indent/-/detect-indent-6.0.0.tgz", + "integrity": "sha512-oSyFlqaTHCItVRGK5RmrmjB+CmaMOW7IaNA/kdxqhoa6d17j/5ce9O9eWXmV/KEdRwqpQA+Vqe8a8Bsybu4YnA==", + "dev": true + }, + "detect-newline": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/detect-newline/-/detect-newline-3.1.0.tgz", + "integrity": "sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA==", + "dev": true + }, + "dot-prop": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/dot-prop/-/dot-prop-3.0.0.tgz", + "integrity": "sha1-G3CK8JSknJoOfbyteQq6U52sEXc=", + "dev": true, + "requires": { + "is-obj": "^1.0.0" + } + }, + "dotgitignore": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/dotgitignore/-/dotgitignore-2.1.0.tgz", + "integrity": "sha512-sCm11ak2oY6DglEPpCB8TixLjWAxd3kJTs6UIcSasNYxXdFPV+YKlye92c8H4kKFqV5qYMIh7d+cYecEg0dIkA==", + "dev": true, + "requires": { + "find-up": "^3.0.0", + "minimatch": "^3.0.4" + }, + "dependencies": { + "find-up": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", + "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", + "dev": true, + "requires": { + "locate-path": "^3.0.0" + } + }, + "locate-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", + "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", + "dev": true, + "requires": { + "p-locate": "^3.0.0", + "path-exists": "^3.0.0" + } + }, + "p-locate": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", + "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "dev": true, + "requires": { + "p-limit": "^2.0.0" + } + }, + "path-exists": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz", + "integrity": "sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=", + "dev": true + } + } + }, + "emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "error-ex": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", + "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==", + "dev": true, + "requires": { + "is-arrayish": "^0.2.1" + } + }, + "escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=", + "dev": true + }, + "figures": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/figures/-/figures-3.1.0.tgz", + "integrity": "sha512-ravh8VRXqHuMvZt/d8GblBeqDMkdJMBdv/2KntFH+ra5MXkO7nxNKpzQ3n6QD/2da1kH0aWmNISdvhM7gl2gVg==", + "dev": true, + "requires": { + "escape-string-regexp": "^1.0.5" + } + }, + "find-up": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", + "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", + "dev": true, + "requires": { + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" + } + }, + "fs-access": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/fs-access/-/fs-access-1.0.1.tgz", + "integrity": "sha1-1qh/JiJxzv6+wwxVNAf7mV2od3o=", + "dev": true, + "requires": { + "null-check": "^1.0.0" + } + }, + "fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=", + "dev": true + }, + "get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "dev": true + }, + "get-pkg-repo": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/get-pkg-repo/-/get-pkg-repo-1.4.0.tgz", + "integrity": "sha1-xztInAbYDMVTbCyFP54FIyBWly0=", + "dev": true, + "requires": { + "hosted-git-info": "^2.1.4", + "meow": "^3.3.0", + "normalize-package-data": "^2.3.0", + "parse-github-repo-url": "^1.3.0", + "through2": "^2.0.0" + }, + "dependencies": { + "camelcase": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-2.1.1.tgz", + "integrity": "sha1-fB0W1nmhu+WcoCys7PsBHiAfWh8=", + "dev": true + }, + "camelcase-keys": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/camelcase-keys/-/camelcase-keys-2.1.0.tgz", + "integrity": "sha1-MIvur/3ygRkFHvodkyITyRuPkuc=", + "dev": true, + "requires": { + "camelcase": "^2.0.0", + "map-obj": "^1.0.0" + } + }, + "find-up": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-1.1.2.tgz", + "integrity": "sha1-ay6YIrGizgpgq2TWEOzK1TyyTQ8=", + "dev": true, + "requires": { + "path-exists": "^2.0.0", + "pinkie-promise": "^2.0.0" + } + }, + "indent-string": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-2.1.0.tgz", + "integrity": "sha1-ji1INIdCEhtKghi3oTfppSBJ3IA=", + "dev": true, + "requires": { + "repeating": "^2.0.0" + } + }, + "map-obj": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-1.0.1.tgz", + "integrity": "sha1-2TPOuSBdgr3PSIb2dCvcK03qFG0=", + "dev": true + }, + "meow": { + "version": "3.7.0", + "resolved": "https://registry.npmjs.org/meow/-/meow-3.7.0.tgz", + "integrity": "sha1-cstmi0JSKCkKu/qFaJJYcwioAfs=", + "dev": true, + "requires": { + "camelcase-keys": "^2.0.0", + "decamelize": "^1.1.2", + "loud-rejection": "^1.0.0", + "map-obj": "^1.0.1", + "minimist": "^1.1.3", + "normalize-package-data": "^2.3.4", + "object-assign": "^4.0.1", + "read-pkg-up": "^1.0.1", + "redent": "^1.0.0", + "trim-newlines": "^1.0.0" + } + }, + "path-exists": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-2.1.0.tgz", + "integrity": "sha1-D+tsZPD8UY2adU3V77YscCJ2H0s=", + "dev": true, + "requires": { + "pinkie-promise": "^2.0.0" + } + }, + "read-pkg": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-1.1.0.tgz", + "integrity": "sha1-9f+qXs0pyzHAR0vKfXVra7KePyg=", + "dev": true, + "requires": { + "load-json-file": "^1.0.0", + "normalize-package-data": "^2.3.2", + "path-type": "^1.0.0" + } + }, + "read-pkg-up": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-1.0.1.tgz", + "integrity": "sha1-nWPBMnbAZZGNV/ACpX9AobZD+wI=", + "dev": true, + "requires": { + "find-up": "^1.0.0", + "read-pkg": "^1.0.0" + } + }, + "readable-stream": { + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", + "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "dev": true, + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "redent": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/redent/-/redent-1.0.0.tgz", + "integrity": "sha1-z5Fqsf1fHxbfsggi3W7H9zDCr94=", + "dev": true, + "requires": { + "indent-string": "^2.1.0", + "strip-indent": "^1.0.1" + } + }, + "safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "dev": true + }, + "string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "dev": true, + "requires": { + "safe-buffer": "~5.1.0" + } + }, + "strip-indent": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/strip-indent/-/strip-indent-1.0.1.tgz", + "integrity": "sha1-DHlipq3vp7vUrDZkYKY4VSrhoKI=", + "dev": true, + "requires": { + "get-stdin": "^4.0.1" + } + }, + "through2": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/through2/-/through2-2.0.5.tgz", + "integrity": "sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ==", + "dev": true, + "requires": { + "readable-stream": "~2.3.6", + "xtend": "~4.0.1" + } + }, + "trim-newlines": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/trim-newlines/-/trim-newlines-1.0.0.tgz", + "integrity": "sha1-WIeWa7WCpFA6QetST301ARgVphM=", + "dev": true + } + } + }, + "get-stdin": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/get-stdin/-/get-stdin-4.0.1.tgz", + "integrity": "sha1-uWjGsKBDhDJJAui/Gl3zJXmkUP4=", + "dev": true + }, + "git-raw-commits": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/git-raw-commits/-/git-raw-commits-2.0.0.tgz", + "integrity": "sha512-w4jFEJFgKXMQJ0H0ikBk2S+4KP2VEjhCvLCNqbNRQC8BgGWgLKNCO7a9K9LI+TVT7Gfoloje502sEnctibffgg==", + "dev": true, + "requires": { + "dargs": "^4.0.1", + "lodash.template": "^4.0.2", + "meow": "^4.0.0", + "split2": "^2.0.0", + "through2": "^2.0.0" + }, + "dependencies": { + "arrify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz", + "integrity": "sha1-iYUI2iIm84DfkEcoRWhJwVAaSw0=", + "dev": true + }, + "camelcase": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-4.1.0.tgz", + "integrity": "sha1-1UVjW+HjPFQmScaRc+Xeas+uNN0=", + "dev": true + }, + "camelcase-keys": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/camelcase-keys/-/camelcase-keys-4.2.0.tgz", + "integrity": "sha1-oqpfsa9oh1glnDLBQUJteJI7m3c=", + "dev": true, + "requires": { + "camelcase": "^4.1.0", + "map-obj": "^2.0.0", + "quick-lru": "^1.0.0" + } + }, + "indent-string": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-3.2.0.tgz", + "integrity": "sha1-Sl/W0nzDMvN+VBmlBNu4NxBckok=", + "dev": true + }, + "map-obj": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-2.0.0.tgz", + "integrity": "sha1-plzSkIepJZi4eRJXpSPgISIqwfk=", + "dev": true + }, + "meow": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/meow/-/meow-4.0.1.tgz", + "integrity": "sha512-xcSBHD5Z86zaOc+781KrupuHAzeGXSLtiAOmBsiLDiPSaYSB6hdew2ng9EBAnZ62jagG9MHAOdxpDi/lWBFJ/A==", + "dev": true, + "requires": { + "camelcase-keys": "^4.0.0", + "decamelize-keys": "^1.0.0", + "loud-rejection": "^1.0.0", + "minimist": "^1.1.3", + "minimist-options": "^3.0.1", + "normalize-package-data": "^2.3.4", + "read-pkg-up": "^3.0.0", + "redent": "^2.0.0", + "trim-newlines": "^2.0.0" + } + }, + "minimist-options": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/minimist-options/-/minimist-options-3.0.2.tgz", + "integrity": "sha512-FyBrT/d0d4+uiZRbqznPXqw3IpZZG3gl3wKWiX784FycUKVwBt0uLBFkQrtE4tZOrgo78nZp2jnKz3L65T5LdQ==", + "dev": true, + "requires": { + "arrify": "^1.0.1", + "is-plain-obj": "^1.1.0" + } + }, + "quick-lru": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/quick-lru/-/quick-lru-1.1.0.tgz", + "integrity": "sha1-Q2CxfGETatOAeDl/8RQW4Ybc+7g=", + "dev": true + }, + "readable-stream": { + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", + "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "dev": true, + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "redent": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/redent/-/redent-2.0.0.tgz", + "integrity": "sha1-wbIAe0LVfrE4kHmzyDM2OdXhzKo=", + "dev": true, + "requires": { + "indent-string": "^3.0.0", + "strip-indent": "^2.0.0" + } + }, + "safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "dev": true + }, + "string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "dev": true, + "requires": { + "safe-buffer": "~5.1.0" + } + }, + "strip-indent": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/strip-indent/-/strip-indent-2.0.0.tgz", + "integrity": "sha1-XvjbKV0B5u1sv3qrlpmNeCJSe2g=", + "dev": true + }, + "through2": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/through2/-/through2-2.0.5.tgz", + "integrity": "sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ==", + "dev": true, + "requires": { + "readable-stream": "~2.3.6", + "xtend": "~4.0.1" + } + }, + "trim-newlines": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/trim-newlines/-/trim-newlines-2.0.0.tgz", + "integrity": "sha1-tAPQuRvlDDMd/EuC7s6yLD3hbSA=", + "dev": true + } + } + }, + "git-remote-origin-url": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/git-remote-origin-url/-/git-remote-origin-url-2.0.0.tgz", + "integrity": "sha1-UoJlna4hBxRaERJhEq0yFuxfpl8=", + "dev": true, + "requires": { + "gitconfiglocal": "^1.0.0", + "pify": "^2.3.0" + } + }, + "git-semver-tags": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/git-semver-tags/-/git-semver-tags-3.0.1.tgz", + "integrity": "sha512-Hzd1MOHXouITfCasrpVJbRDg9uvW7LfABk3GQmXYZByerBDrfrEMP9HXpNT7RxAbieiocP6u+xq20DkvjwxnCA==", + "dev": true, + "requires": { + "meow": "^5.0.0", + "semver": "^6.0.0" + }, + "dependencies": { + "arrify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz", + "integrity": "sha1-iYUI2iIm84DfkEcoRWhJwVAaSw0=", + "dev": true + }, + "camelcase": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-4.1.0.tgz", + "integrity": "sha1-1UVjW+HjPFQmScaRc+Xeas+uNN0=", + "dev": true + }, + "camelcase-keys": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/camelcase-keys/-/camelcase-keys-4.2.0.tgz", + "integrity": "sha1-oqpfsa9oh1glnDLBQUJteJI7m3c=", + "dev": true, + "requires": { + "camelcase": "^4.1.0", + "map-obj": "^2.0.0", + "quick-lru": "^1.0.0" + } + }, + "indent-string": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-3.2.0.tgz", + "integrity": "sha1-Sl/W0nzDMvN+VBmlBNu4NxBckok=", + "dev": true + }, + "map-obj": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-2.0.0.tgz", + "integrity": "sha1-plzSkIepJZi4eRJXpSPgISIqwfk=", + "dev": true + }, + "meow": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/meow/-/meow-5.0.0.tgz", + "integrity": "sha512-CbTqYU17ABaLefO8vCU153ZZlprKYWDljcndKKDCFcYQITzWCXZAVk4QMFZPgvzrnUQ3uItnIE/LoUOwrT15Ig==", + "dev": true, + "requires": { + "camelcase-keys": "^4.0.0", + "decamelize-keys": "^1.0.0", + "loud-rejection": "^1.0.0", + "minimist-options": "^3.0.1", + "normalize-package-data": "^2.3.4", + "read-pkg-up": "^3.0.0", + "redent": "^2.0.0", + "trim-newlines": "^2.0.0", + "yargs-parser": "^10.0.0" + } + }, + "minimist-options": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/minimist-options/-/minimist-options-3.0.2.tgz", + "integrity": "sha512-FyBrT/d0d4+uiZRbqznPXqw3IpZZG3gl3wKWiX784FycUKVwBt0uLBFkQrtE4tZOrgo78nZp2jnKz3L65T5LdQ==", + "dev": true, + "requires": { + "arrify": "^1.0.1", + "is-plain-obj": "^1.1.0" + } + }, + "quick-lru": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/quick-lru/-/quick-lru-1.1.0.tgz", + "integrity": "sha1-Q2CxfGETatOAeDl/8RQW4Ybc+7g=", + "dev": true + }, + "redent": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/redent/-/redent-2.0.0.tgz", + "integrity": "sha1-wbIAe0LVfrE4kHmzyDM2OdXhzKo=", + "dev": true, + "requires": { + "indent-string": "^3.0.0", + "strip-indent": "^2.0.0" + } + }, + "semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "dev": true + }, + "strip-indent": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/strip-indent/-/strip-indent-2.0.0.tgz", + "integrity": "sha1-XvjbKV0B5u1sv3qrlpmNeCJSe2g=", + "dev": true + }, + "trim-newlines": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/trim-newlines/-/trim-newlines-2.0.0.tgz", + "integrity": "sha1-tAPQuRvlDDMd/EuC7s6yLD3hbSA=", + "dev": true + }, + "yargs-parser": { + "version": "10.1.0", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-10.1.0.tgz", + "integrity": "sha512-VCIyR1wJoEBZUqk5PA+oOBF6ypbwh5aNB3I50guxAL/quggdfs4TtNHQrSazFA3fYZ+tEqfs0zIGlv0c/rgjbQ==", + "dev": true, + "requires": { + "camelcase": "^4.1.0" + } + } + } + }, + "gitconfiglocal": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/gitconfiglocal/-/gitconfiglocal-1.0.0.tgz", + "integrity": "sha1-QdBF84UaXqiPA/JMocYXgRRGS5s=", + "dev": true, + "requires": { + "ini": "^1.3.2" + } + }, + "glob": { + "version": "7.1.6", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz", + "integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==", + "dev": true, + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + } + }, + "graceful-fs": { + "version": "4.2.4", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.4.tgz", + "integrity": "sha512-WjKPNJF79dtJAVniUlGGWHYGz2jWxT6VhN/4m1NdkbZ2nOsEF+cI1Edgql5zCRhs/VsQYRvrXctxktVXZUkixw==", + "dev": true + }, + "handlebars": { + "version": "4.7.6", + "resolved": "https://registry.npmjs.org/handlebars/-/handlebars-4.7.6.tgz", + "integrity": "sha512-1f2BACcBfiwAfStCKZNrUCgqNZkGsAT7UM3kkYtXuLo0KnaVfjKOyf7PRzB6++aK9STyT1Pd2ZCPe3EGOXleXA==", + "dev": true, + "requires": { + "minimist": "^1.2.5", + "neo-async": "^2.6.0", + "source-map": "^0.6.1", + "uglify-js": "^3.1.4", + "wordwrap": "^1.0.0" + } + }, + "hard-rejection": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/hard-rejection/-/hard-rejection-2.1.0.tgz", + "integrity": "sha512-VIZB+ibDhx7ObhAe7OVtoEbuP4h/MuOTHJ+J8h/eBXotJYl0fBgR72xDFCKgIh22OJZIOVNxBMWuhAr10r8HdA==", + "dev": true + }, + "has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=", + "dev": true + }, + "hosted-git-info": { + "version": "2.8.8", + "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.8.tgz", + "integrity": "sha512-f/wzC2QaWBs7t9IYqB4T3sR1xviIViXJRJTWBlx2Gf3g0Xi5vI7Yy4koXQ1c9OYDGHN9sBy1DQ2AB8fqZBWhUg==", + "dev": true + }, + "indent-string": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz", + "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==", + "dev": true + }, + "inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", + "dev": true, + "requires": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "dev": true + }, + "ini": { + "version": "1.3.5", + "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.5.tgz", + "integrity": "sha512-RZY5huIKCMRWDUqZlEi72f/lmXKMvuszcMBduliQ3nnWbx9X/ZBQO7DijMEYS9EhHBb2qacRUMtC7svLwe0lcw==", + "dev": true + }, + "interpret": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/interpret/-/interpret-1.4.0.tgz", + "integrity": "sha512-agE4QfB2Lkp9uICn7BAqoscw4SZP9kTE2hxiFI3jBPmXJfdqiahTbUuKGsMoN2GtqL9AxhYioAcVvgsb1HvRbA==", + "dev": true + }, + "is-arrayish": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", + "integrity": "sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0=", + "dev": true + }, + "is-finite": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/is-finite/-/is-finite-1.1.0.tgz", + "integrity": "sha512-cdyMtqX/BOqqNBBiKlIVkytNHm49MtMlYyn1zxzvJKWmFMlGzm+ry5BBfYyeY9YmNKbRSo/o7OX9w9ale0wg3w==", + "dev": true + }, + "is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true + }, + "is-obj": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-obj/-/is-obj-1.0.1.tgz", + "integrity": "sha1-PkcprB9f3gJc19g6iW2rn09n2w8=", + "dev": true + }, + "is-plain-obj": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-1.1.0.tgz", + "integrity": "sha1-caUMhCnfync8kqOQpKA7OfzVHT4=", + "dev": true + }, + "is-text-path": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-text-path/-/is-text-path-1.0.1.tgz", + "integrity": "sha1-Thqg+1G/vLPpJogAE5cgLBd1tm4=", + "dev": true, + "requires": { + "text-extensions": "^1.0.0" + } + }, + "is-utf8": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/is-utf8/-/is-utf8-0.2.1.tgz", + "integrity": "sha1-Sw2hRCEE0bM2NA6AeX6GXPOffXI=", + "dev": true + }, + "isarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=", + "dev": true + }, + "js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "dev": true + }, + "json-parse-better-errors": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz", + "integrity": "sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw==", + "dev": true + }, + "json-stringify-safe": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", + "integrity": "sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus=", + "dev": true + }, + "jsonparse": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/jsonparse/-/jsonparse-1.3.1.tgz", + "integrity": "sha1-P02uSpH6wxX3EGL4UhzCOfE2YoA=", + "dev": true + }, + "kind-of": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", + "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==", + "dev": true + }, + "lines-and-columns": { + "version": "1.1.6", + "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.1.6.tgz", + "integrity": "sha1-HADHQ7QzzQpOgHWPe2SldEDZ/wA=", + "dev": true + }, + "load-json-file": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-1.1.0.tgz", + "integrity": "sha1-lWkFcI1YtLq0wiYbBPWfMcmTdMA=", + "dev": true, + "requires": { + "graceful-fs": "^4.1.2", + "parse-json": "^2.2.0", + "pify": "^2.0.0", + "pinkie-promise": "^2.0.0", + "strip-bom": "^2.0.0" + }, + "dependencies": { + "parse-json": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-2.2.0.tgz", + "integrity": "sha1-9ID0BDTvgHQfhGkJn43qGPVaTck=", + "dev": true, + "requires": { + "error-ex": "^1.2.0" + } + } + } + }, + "locate-path": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", + "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", + "dev": true, + "requires": { + "p-locate": "^4.1.0" + } + }, + "lodash": { + "version": "4.17.15", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.15.tgz", + "integrity": "sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==", + "dev": true + }, + "lodash._reinterpolate": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/lodash._reinterpolate/-/lodash._reinterpolate-3.0.0.tgz", + "integrity": "sha1-DM8tiRZq8Ds2Y8eWU4t1rG4RTZ0=", + "dev": true + }, + "lodash.ismatch": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/lodash.ismatch/-/lodash.ismatch-4.4.0.tgz", + "integrity": "sha1-dWy1FQyjum8RCFp4hJZF8Yj4Xzc=", + "dev": true + }, + "lodash.template": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/lodash.template/-/lodash.template-4.5.0.tgz", + "integrity": "sha512-84vYFxIkmidUiFxidA/KjjH9pAycqW+h980j7Fuz5qxRtO9pgB7MDFTdys1N7A5mcucRiDyEq4fusljItR1T/A==", + "dev": true, + "requires": { + "lodash._reinterpolate": "^3.0.0", + "lodash.templatesettings": "^4.0.0" + } + }, + "lodash.templatesettings": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/lodash.templatesettings/-/lodash.templatesettings-4.2.0.tgz", + "integrity": "sha512-stgLz+i3Aa9mZgnjr/O+v9ruKZsPsndy7qPZOchbqk2cnTU1ZaldKK+v7m54WoKIyxiuMZTKT2H81F8BeAc3ZQ==", + "dev": true, + "requires": { + "lodash._reinterpolate": "^3.0.0" + } + }, + "loud-rejection": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/loud-rejection/-/loud-rejection-1.6.0.tgz", + "integrity": "sha1-W0b4AUft7leIcPCG0Eghz5mOVR8=", + "dev": true, + "requires": { + "currently-unhandled": "^0.4.1", + "signal-exit": "^3.0.0" + } + }, + "map-obj": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-4.1.0.tgz", + "integrity": "sha512-glc9y00wgtwcDmp7GaE/0b0OnxpNJsVf3ael/An6Fe2Q51LLwN1er6sdomLRzz5h0+yMpiYLhWYF5R7HeqVd4g==", + "dev": true + }, + "meow": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/meow/-/meow-7.0.1.tgz", + "integrity": "sha512-tBKIQqVrAHqwit0vfuFPY3LlzJYkEOFyKa3bPgxzNl6q/RtN8KQ+ALYEASYuFayzSAsjlhXj/JZ10rH85Q6TUw==", + "dev": true, + "requires": { + "@types/minimist": "^1.2.0", + "arrify": "^2.0.1", + "camelcase": "^6.0.0", + "camelcase-keys": "^6.2.2", + "decamelize-keys": "^1.1.0", + "hard-rejection": "^2.1.0", + "minimist-options": "^4.0.2", + "normalize-package-data": "^2.5.0", + "read-pkg-up": "^7.0.1", + "redent": "^3.0.0", + "trim-newlines": "^3.0.0", + "type-fest": "^0.13.1", + "yargs-parser": "^18.1.3" + }, + "dependencies": { + "read-pkg": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-5.2.0.tgz", + "integrity": "sha512-Ug69mNOpfvKDAc2Q8DRpMjjzdtrnv9HcSMX+4VsZxD1aZ6ZzrIE7rlzXBtWTyhULSMKg076AW6WR5iZpD0JiOg==", + "dev": true, + "requires": { + "@types/normalize-package-data": "^2.4.0", + "normalize-package-data": "^2.5.0", + "parse-json": "^5.0.0", + "type-fest": "^0.6.0" + }, + "dependencies": { + "type-fest": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.6.0.tgz", + "integrity": "sha512-q+MB8nYR1KDLrgr4G5yemftpMC7/QLqVndBmEEdqzmNj5dcFOO4Oo8qlwZE3ULT3+Zim1F8Kq4cBnikNhlCMlg==", + "dev": true + } + } + }, + "read-pkg-up": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-7.0.1.tgz", + "integrity": "sha512-zK0TB7Xd6JpCLmlLmufqykGE+/TlOePD6qKClNW7hHDKFh/J7/7gCWGR7joEQEW1bKq3a3yUZSObOoWLFQ4ohg==", + "dev": true, + "requires": { + "find-up": "^4.1.0", + "read-pkg": "^5.2.0", + "type-fest": "^0.8.1" + }, + "dependencies": { + "type-fest": { + "version": "0.8.1", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.8.1.tgz", + "integrity": "sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==", + "dev": true + } + } + } + } + }, + "min-indent": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/min-indent/-/min-indent-1.0.1.tgz", + "integrity": "sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg==", + "dev": true + }, + "minimatch": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", + "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", + "dev": true, + "requires": { + "brace-expansion": "^1.1.7" + } + }, + "minimist": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz", + "integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==", + "dev": true + }, + "minimist-options": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/minimist-options/-/minimist-options-4.1.0.tgz", + "integrity": "sha512-Q4r8ghd80yhO/0j1O3B2BjweX3fiHg9cdOwjJd2J76Q135c+NDxGCqdYKQ1SKBuFfgWbAUzBfvYjPUEeNgqN1A==", + "dev": true, + "requires": { + "arrify": "^1.0.1", + "is-plain-obj": "^1.1.0", + "kind-of": "^6.0.3" + }, + "dependencies": { + "arrify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz", + "integrity": "sha1-iYUI2iIm84DfkEcoRWhJwVAaSw0=", + "dev": true + } + } + }, + "modify-values": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/modify-values/-/modify-values-1.0.1.tgz", + "integrity": "sha512-xV2bxeN6F7oYjZWTe/YPAy6MN2M+sL4u/Rlm2AHCIVGfo2p1yGmBHQ6vHehl4bRTZBdHu3TSkWdYgkwpYzAGSw==", + "dev": true + }, + "neo-async": { + "version": "2.6.1", + "resolved": "https://registry.npmjs.org/neo-async/-/neo-async-2.6.1.tgz", + "integrity": "sha512-iyam8fBuCUpWeKPGpaNMetEocMt364qkCsfL9JuhjXX6dRnguRVOfk2GZaDpPjcOKiiXCPINZC1GczQ7iTq3Zw==", + "dev": true + }, + "normalize-package-data": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.5.0.tgz", + "integrity": "sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==", + "dev": true, + "requires": { + "hosted-git-info": "^2.1.4", + "resolve": "^1.10.0", + "semver": "2 || 3 || 4 || 5", + "validate-npm-package-license": "^3.0.1" + }, + "dependencies": { + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "dev": true + } + } + }, + "null-check": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/null-check/-/null-check-1.0.0.tgz", + "integrity": "sha1-l33/1xdgErnsMNKjnbXPcqBDnt0=", + "dev": true + }, + "number-is-nan": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/number-is-nan/-/number-is-nan-1.0.1.tgz", + "integrity": "sha1-CXtgK1NCKlIsGvuHkDGDNpQaAR0=", + "dev": true + }, + "object-assign": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", + "integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=", + "dev": true + }, + "once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", + "dev": true, + "requires": { + "wrappy": "1" + } + }, + "p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dev": true, + "requires": { + "p-try": "^2.0.0" + } + }, + "p-locate": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", + "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", + "dev": true, + "requires": { + "p-limit": "^2.2.0" + } + }, + "p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "dev": true + }, + "parse-github-repo-url": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/parse-github-repo-url/-/parse-github-repo-url-1.4.1.tgz", + "integrity": "sha1-nn2LslKmy2ukJZUGC3v23z28H1A=", + "dev": true + }, + "parse-json": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.0.0.tgz", + "integrity": "sha512-OOY5b7PAEFV0E2Fir1KOkxchnZNCdowAJgQ5NuxjpBKTRP3pQhwkrkxqQjeoKJ+fO7bCpmIZaogI4eZGDMEGOw==", + "dev": true, + "requires": { + "@babel/code-frame": "^7.0.0", + "error-ex": "^1.3.1", + "json-parse-better-errors": "^1.0.1", + "lines-and-columns": "^1.1.6" + } + }, + "path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true + }, + "path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=", + "dev": true + }, + "path-parse": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.6.tgz", + "integrity": "sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw==", + "dev": true + }, + "path-type": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-1.1.0.tgz", + "integrity": "sha1-WcRPfuSR2nBNpBXaWkBwuk+P5EE=", + "dev": true, + "requires": { + "graceful-fs": "^4.1.2", + "pify": "^2.0.0", + "pinkie-promise": "^2.0.0" + } + }, + "pify": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", + "integrity": "sha1-7RQaasBDqEnqWISY59yosVMw6Qw=", + "dev": true + }, + "pinkie": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/pinkie/-/pinkie-2.0.4.tgz", + "integrity": "sha1-clVrgM+g1IqXToDnckjoDtT3+HA=", + "dev": true + }, + "pinkie-promise": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/pinkie-promise/-/pinkie-promise-2.0.1.tgz", + "integrity": "sha1-ITXW36ejWMBprJsXh3YogihFD/o=", + "dev": true, + "requires": { + "pinkie": "^2.0.0" + } + }, + "process-nextick-args": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", + "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==", + "dev": true + }, + "q": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/q/-/q-1.5.1.tgz", + "integrity": "sha1-fjL3W0E4EpHQRhHxvxQQmsAGUdc=", + "dev": true + }, + "quick-lru": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/quick-lru/-/quick-lru-4.0.1.tgz", + "integrity": "sha512-ARhCpm70fzdcvNQfPoy49IaanKkTlRWF2JMzqhcJbhSFRZv7nPTvZJdcY7301IPmvW+/p0RgIWnQDLJxifsQ7g==", + "dev": true + }, + "read-pkg": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-3.0.0.tgz", + "integrity": "sha1-nLxoaXj+5l0WwA4rGcI3/Pbjg4k=", + "dev": true, + "requires": { + "load-json-file": "^4.0.0", + "normalize-package-data": "^2.3.2", + "path-type": "^3.0.0" + }, + "dependencies": { + "load-json-file": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-4.0.0.tgz", + "integrity": "sha1-L19Fq5HjMhYjT9U62rZo607AmTs=", + "dev": true, + "requires": { + "graceful-fs": "^4.1.2", + "parse-json": "^4.0.0", + "pify": "^3.0.0", + "strip-bom": "^3.0.0" + } + }, + "parse-json": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-4.0.0.tgz", + "integrity": "sha1-vjX1Qlvh9/bHRxhPmKeIy5lHfuA=", + "dev": true, + "requires": { + "error-ex": "^1.3.1", + "json-parse-better-errors": "^1.0.1" + } + }, + "path-type": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-3.0.0.tgz", + "integrity": "sha512-T2ZUsdZFHgA3u4e5PfPbjd7HDDpxPnQb5jN0SrDsjNSuVXHJqtwTnWqG0B1jZrgmJ/7lj1EmVIByWt1gxGkWvg==", + "dev": true, + "requires": { + "pify": "^3.0.0" + } + }, + "pify": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", + "integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=", + "dev": true + }, + "strip-bom": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz", + "integrity": "sha1-IzTBjpx1n3vdVv3vfprj1YjmjtM=", + "dev": true + } + } + }, + "read-pkg-up": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-3.0.0.tgz", + "integrity": "sha1-PtSWaF26D4/hGNBpHcUfSh/5bwc=", + "dev": true, + "requires": { + "find-up": "^2.0.0", + "read-pkg": "^3.0.0" + }, + "dependencies": { + "find-up": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-2.1.0.tgz", + "integrity": "sha1-RdG35QbHF93UgndaK3eSCjwMV6c=", + "dev": true, + "requires": { + "locate-path": "^2.0.0" + } + }, + "locate-path": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-2.0.0.tgz", + "integrity": "sha1-K1aLJl7slExtnA3pw9u7ygNUzY4=", + "dev": true, + "requires": { + "p-locate": "^2.0.0", + "path-exists": "^3.0.0" + } + }, + "p-limit": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-1.3.0.tgz", + "integrity": "sha512-vvcXsLAJ9Dr5rQOPk7toZQZJApBl2K4J6dANSsEuh6QI41JYcsS/qhTGa9ErIUUgK3WNQoJYvylxvjqmiqEA9Q==", + "dev": true, + "requires": { + "p-try": "^1.0.0" + } + }, + "p-locate": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-2.0.0.tgz", + "integrity": "sha1-IKAQOyIqcMj9OcwuWAaA893l7EM=", + "dev": true, + "requires": { + "p-limit": "^1.1.0" + } + }, + "p-try": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-1.0.0.tgz", + "integrity": "sha1-y8ec26+P1CKOE/Yh8rGiN8GyB7M=", + "dev": true + }, + "path-exists": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz", + "integrity": "sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=", + "dev": true + } + } + }, + "readable-stream": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", + "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "dev": true, + "requires": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + } + }, + "rechoir": { + "version": "0.6.2", + "resolved": "https://registry.npmjs.org/rechoir/-/rechoir-0.6.2.tgz", + "integrity": "sha1-hSBLVNuoLVdC4oyWdW70OvUOM4Q=", + "dev": true, + "requires": { + "resolve": "^1.1.6" + } + }, + "redent": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/redent/-/redent-3.0.0.tgz", + "integrity": "sha512-6tDA8g98We0zd0GvVeMT9arEOnTw9qM03L9cJXaCjrip1OO764RDBLBfrB4cwzNGDj5OA5ioymC9GkizgWJDUg==", + "dev": true, + "requires": { + "indent-string": "^4.0.0", + "strip-indent": "^3.0.0" + } + }, + "repeating": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/repeating/-/repeating-2.0.1.tgz", + "integrity": "sha1-UhTFOpJtNVJwdSf7q0FdvAjQbdo=", + "dev": true, + "requires": { + "is-finite": "^1.0.0" + } + }, + "require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha1-jGStX9MNqxyXbiNE/+f3kqam30I=", + "dev": true + }, + "require-main-filename": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-2.0.0.tgz", + "integrity": "sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==", + "dev": true + }, + "resolve": { + "version": "1.17.0", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.17.0.tgz", + "integrity": "sha512-ic+7JYiV8Vi2yzQGFWOkiZD5Z9z7O2Zhm9XMaTxdJExKasieFCr+yXZ/WmXsckHiKl12ar0y6XiXDx3m4RHn1w==", + "dev": true, + "requires": { + "path-parse": "^1.0.6" + } + }, + "safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "dev": true + }, + "semver": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.1.1.tgz", + "integrity": "sha512-WfuG+fl6eh3eZ2qAf6goB7nhiCd7NPXhmyFxigB/TOkQyeLP8w8GsVehvtGNtnNmyboz4TgeK40B1Kbql/8c5A==", + "dev": true + }, + "set-blocking": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", + "integrity": "sha1-BF+XgtARrppoA93TgrJDkrPYkPc=", + "dev": true + }, + "shelljs": { + "version": "0.8.4", + "resolved": "https://registry.npmjs.org/shelljs/-/shelljs-0.8.4.tgz", + "integrity": "sha512-7gk3UZ9kOfPLIAbslLzyWeGiEqx9e3rxwZM0KE6EL8GlGwjym9Mrlx5/p33bWTu9YG6vcS4MBxYZDHYr5lr8BQ==", + "dev": true, + "requires": { + "glob": "^7.0.0", + "interpret": "^1.0.0", + "rechoir": "^0.6.2" + } + }, + "signal-exit": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.3.tgz", + "integrity": "sha512-VUJ49FC8U1OxwZLxIbTTrDvLnf/6TDgxZcK8wxR8zs13xpx7xbG60ndBlhNrFi2EMuFRoeDoJO7wthSLq42EjA==", + "dev": true + }, + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true + }, + "spdx-correct": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.1.1.tgz", + "integrity": "sha512-cOYcUWwhCuHCXi49RhFRCyJEK3iPj1Ziz9DpViV3tbZOwXD49QzIN3MpOLJNxh2qwq2lJJZaKMVw9qNi4jTC0w==", + "dev": true, + "requires": { + "spdx-expression-parse": "^3.0.0", + "spdx-license-ids": "^3.0.0" + } + }, + "spdx-exceptions": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.3.0.tgz", + "integrity": "sha512-/tTrYOC7PPI1nUAgx34hUpqXuyJG+DTHJTnIULG4rDygi4xu/tfgmq1e1cIRwRzwZgo4NLySi+ricLkZkw4i5A==", + "dev": true + }, + "spdx-expression-parse": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-3.0.1.tgz", + "integrity": "sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q==", + "dev": true, + "requires": { + "spdx-exceptions": "^2.1.0", + "spdx-license-ids": "^3.0.0" + } + }, + "spdx-license-ids": { + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.5.tgz", + "integrity": "sha512-J+FWzZoynJEXGphVIS+XEh3kFSjZX/1i9gFBaWQcB+/tmpe2qUsSBABpcxqxnAxFdiUFEgAX1bjYGQvIZmoz9Q==", + "dev": true + }, + "split": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/split/-/split-1.0.1.tgz", + "integrity": "sha512-mTyOoPbrivtXnwnIxZRFYRrPNtEFKlpB2fvjSnCQUiAA6qAZzqwna5envK4uk6OIeP17CsdF3rSBGYVBsU0Tkg==", + "dev": true, + "requires": { + "through": "2" + } + }, + "split2": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/split2/-/split2-2.2.0.tgz", + "integrity": "sha512-RAb22TG39LhI31MbreBgIuKiIKhVsawfTgEGqKHTK87aG+ul/PB8Sqoi3I7kVdRWiCfrKxK3uo4/YUkpNvhPbw==", + "dev": true, + "requires": { + "through2": "^2.0.2" + }, + "dependencies": { + "readable-stream": { + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", + "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "dev": true, + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "dev": true + }, + "string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "dev": true, + "requires": { + "safe-buffer": "~5.1.0" + } + }, + "through2": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/through2/-/through2-2.0.5.tgz", + "integrity": "sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ==", + "dev": true, + "requires": { + "readable-stream": "~2.3.6", + "xtend": "~4.0.1" + } + } + } + }, + "standard-version": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/standard-version/-/standard-version-8.0.0.tgz", + "integrity": "sha512-cS/U9yhYPHfyokFce6e/H3U8MaKwZKSGzH25J776sChrae/doDQjsl3vCQ0hW1MSzdrUTb7pir4ApjnbDt/TAg==", + "dev": true, + "requires": { + "chalk": "2.4.2", + "conventional-changelog": "3.1.18", + "conventional-changelog-config-spec": "2.1.0", + "conventional-changelog-conventionalcommits": "4.2.3", + "conventional-recommended-bump": "6.0.5", + "detect-indent": "6.0.0", + "detect-newline": "3.1.0", + "dotgitignore": "2.1.0", + "figures": "3.1.0", + "find-up": "4.1.0", + "fs-access": "1.0.1", + "git-semver-tags": "3.0.1", + "semver": "7.1.1", + "stringify-package": "1.0.1", + "yargs": "15.3.1" + } + }, + "string-width": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.0.tgz", + "integrity": "sha512-zUz5JD+tgqtuDjMhwIg5uFVV3dtqZ9yQJlZVfq4I01/K5Paj5UHj7VyrQOJvzawSVlKpObApbfD0Ed6yJc+1eg==", + "dev": true, + "requires": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.0" + } + }, + "string_decoder": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", + "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", + "dev": true, + "requires": { + "safe-buffer": "~5.2.0" + } + }, + "stringify-package": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/stringify-package/-/stringify-package-1.0.1.tgz", + "integrity": "sha512-sa4DUQsYciMP1xhKWGuFM04fB0LG/9DlluZoSVywUMRNvzid6XucHK0/90xGxRoHrAaROrcHK1aPKaijCtSrhg==", + "dev": true + }, + "strip-ansi": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz", + "integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==", + "dev": true, + "requires": { + "ansi-regex": "^5.0.0" + } + }, + "strip-bom": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-2.0.0.tgz", + "integrity": "sha1-YhmoVhZSBJHzV4i9vxRHqZx+aw4=", + "dev": true, + "requires": { + "is-utf8": "^0.2.0" + } + }, + "strip-indent": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-indent/-/strip-indent-3.0.0.tgz", + "integrity": "sha512-laJTa3Jb+VQpaC6DseHhF7dXVqHTfJPCRDaEbid/drOhgitgYku/letMUqOXFoWV0zIIUbjpdH2t+tYj4bQMRQ==", + "dev": true, + "requires": { + "min-indent": "^1.0.0" + } + }, + "supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "dev": true, + "requires": { + "has-flag": "^3.0.0" + } + }, + "text-extensions": { + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/text-extensions/-/text-extensions-1.9.0.tgz", + "integrity": "sha512-wiBrwC1EhBelW12Zy26JeOUkQ5mRu+5o8rpsJk5+2t+Y5vE7e842qtZDQ2g1NpX/29HdyFeJ4nSIhI47ENSxlQ==", + "dev": true + }, + "through": { + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz", + "integrity": "sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU=", + "dev": true + }, + "through2": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/through2/-/through2-3.0.1.tgz", + "integrity": "sha512-M96dvTalPT3YbYLaKaCuwu+j06D/8Jfib0o/PxbVt6Amhv3dUAtW6rTV1jPgJSBG83I/e04Y6xkVdVhSRhi0ww==", + "dev": true, + "requires": { + "readable-stream": "2 || 3" + } + }, + "trim-newlines": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/trim-newlines/-/trim-newlines-3.0.0.tgz", + "integrity": "sha512-C4+gOpvmxaSMKuEf9Qc134F1ZuOHVXKRbtEflf4NTtuuJDEIJ9p5PXsalL8SkeRw+qit1Mo+yuvMPAKwWg/1hA==", + "dev": true + }, + "trim-off-newlines": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/trim-off-newlines/-/trim-off-newlines-1.0.1.tgz", + "integrity": "sha1-n5up2e+odkw4dpi8v+sshI8RrbM=", + "dev": true + }, + "type-fest": { + "version": "0.13.1", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.13.1.tgz", + "integrity": "sha512-34R7HTnG0XIJcBSn5XhDd7nNFPRcXYRZrBB2O2jdKqYODldSzBAqzsWoZYYvduky73toYS/ESqxPvkDf/F0XMg==", + "dev": true + }, + "typedarray": { + "version": "0.0.6", + "resolved": "https://registry.npmjs.org/typedarray/-/typedarray-0.0.6.tgz", + "integrity": "sha1-hnrHTjhkGHsdPUfZlqeOxciDB3c=", + "dev": true + }, + "uglify-js": { + "version": "3.9.4", + "resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-3.9.4.tgz", + "integrity": "sha512-8RZBJq5smLOa7KslsNsVcSH+KOXf1uDU8yqLeNuVKwmT0T3FA0ZoXlinQfRad7SDcbZZRZE4ov+2v71EnxNyCA==", + "dev": true, + "optional": true, + "requires": { + "commander": "~2.20.3" + } + }, + "util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=", + "dev": true + }, + "validate-npm-package-license": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz", + "integrity": "sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==", + "dev": true, + "requires": { + "spdx-correct": "^3.0.0", + "spdx-expression-parse": "^3.0.0" + } + }, + "which-module": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/which-module/-/which-module-2.0.0.tgz", + "integrity": "sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho=", + "dev": true + }, + "wordwrap": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-1.0.0.tgz", + "integrity": "sha1-J1hIEIkUVqQXHI0CJkQa3pDLyus=", + "dev": true + }, + "wrap-ansi": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz", + "integrity": "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==", + "dev": true, + "requires": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "dependencies": { + "ansi-styles": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.2.1.tgz", + "integrity": "sha512-9VGjrMsG1vePxcSweQsN20KY/c4zN0h9fLjqAbwbPfahM3t+NL+M9HC8xeXG2I8pX5NoamTGNuomEUFI7fcUjA==", + "dev": true, + "requires": { + "@types/color-name": "^1.1.1", + "color-convert": "^2.0.1" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + } + } + }, + "wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=", + "dev": true + }, + "xtend": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", + "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==", + "dev": true + }, + "y18n": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.0.tgz", + "integrity": "sha512-r9S/ZyXu/Xu9q1tYlpsLIsa3EeLXXk0VwlxqTcFRfg9EhMW+17kbt9G0NrgCmhGb5vT2hyhJZLfDGx+7+5Uj/w==", + "dev": true + }, + "yargs": { + "version": "15.3.1", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-15.3.1.tgz", + "integrity": "sha512-92O1HWEjw27sBfgmXiixJWT5hRBp2eobqXicLtPBIDBhYB+1HpwZlXmbW2luivBJHBzki+7VyCLRtAkScbTBQA==", + "dev": true, + "requires": { + "cliui": "^6.0.0", + "decamelize": "^1.2.0", + "find-up": "^4.1.0", + "get-caller-file": "^2.0.1", + "require-directory": "^2.1.1", + "require-main-filename": "^2.0.0", + "set-blocking": "^2.0.0", + "string-width": "^4.2.0", + "which-module": "^2.0.0", + "y18n": "^4.0.0", + "yargs-parser": "^18.1.1" + } + }, + "yargs-parser": { + "version": "18.1.3", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-18.1.3.tgz", + "integrity": "sha512-o50j0JeToy/4K6OZcaQmW6lyXXKhq7csREXcDwk2omFPJEwUNOVtJKvmDr9EI1fAJZUyZcRF7kxGBWmRXudrCQ==", + "dev": true, + "requires": { + "camelcase": "^5.0.0", + "decamelize": "^1.2.0" + }, + "dependencies": { + "camelcase": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", + "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", + "dev": true + } + } + } + } +} diff --git a/package.json b/package.json new file mode 100644 index 000000000..df6e7ab2a --- /dev/null +++ b/package.json @@ -0,0 +1,72 @@ +{ + "name": "kubeflow-pipelines", + "private": true, + "description": "kubeflow pipelines package that only helps with release tooling", + "scripts": { + "changelog": "standard-version -t ''" + }, + "standard-version": { + "header": "# Changelog\n", + "types": [ + { + "type": "feat", + "section": "Features" + }, + { + "type": "fix", + "section": "Bug Fixes" + }, + { + "type": "perf", + "section": "Performance" + }, + { + "type": "", + "section": "Other Pull Requests" + }, + { + "type": "chore", + "hidden": true + }, + { + "type": "docs", + "hidden": true + }, + { + "type": "style", + "hidden": true + }, + { + "type": "refactor", + "hidden": true + }, + { + "type": "test", + "hidden": true + } + ], + "issuePrefixes": [ + "never-match-an-issue" + ], + "skip": { + "bump": true, + "tag": true, + "commit": true + }, + "packageFiles": [ + { + "filename": "./VERSION", + "type": "plain-text" + } + ] + }, + "author": "google", + "license": "Apache-2.0", + "repository": { + "type": "git", + "url": "https://github.com/kubeflow/pipelines.git" + }, + "devDependencies": { + "standard-version": "^8.0.0" + } +} diff --git a/proxy/attempt-register-vm-on-proxy.sh b/proxy/attempt-register-vm-on-proxy.sh index 55ce0d185..bb9b518ab 100755 --- a/proxy/attempt-register-vm-on-proxy.sh +++ b/proxy/attempt-register-vm-on-proxy.sh @@ -35,10 +35,19 @@ function run-proxy-agent { --health-check-unhealthy-threshold=${HEALTH_CHECK_UNHEALTHY_THRESHOLD} } -# Don't reuse existing hostname. It means if proxy-agent got restarted, -# it will get a new hostname. -# https://github.com/kubeflow/pipelines/issues/3143 -# Another option is that We may try to fix it in InverseProxy server side. +# Check if already has Hostname value. +# It's possible the pod got restarted, in such case we continue use the existing +# hostname. In proxy server side, it doesn't check VM name even pod got moved to +# new VM. +HOSTNAME=$(kubectl get configmap inverse-proxy-config -o json | jq -r ".data.Hostname // empty") +if [[ -n "${HOSTNAME}" ]]; then + echo "Reuse existing hostname" + PROXY_URL=$(kubectl get configmap inverse-proxy-config -o json | jq -r ".data.ProxyUrl") + BACKEND_ID=$(kubectl get configmap inverse-proxy-config -o json | jq -r ".data.BackendId") + # If ConfigMap already exist, reuse the existing endpoint (a.k.a BACKEND_ID) and same ProxyUrl. + run-proxy-agent + exit 0 +fi # Activate service account for gcloud SDK first if [[ ! -z "${GOOGLE_APPLICATION_CREDENTIALS}" ]]; then diff --git a/release/RELEASE.md b/release/RELEASE.md deleted file mode 100644 index 8311551fc..000000000 --- a/release/RELEASE.md +++ /dev/null @@ -1,12 +0,0 @@ -# Current version under development -* Added a release note. -## Major features and improvements -### Components: - -* Added [Google Cloud Storage components](https://github.com/kubeflow/pipelines/tree/290fa55fb9c908be38cbe6bc4c3f477da6b0e97f/components/google-cloud/storage): [Download](https://github.com/kubeflow/pipelines/tree/290fa55fb9c908be38cbe6bc4c3f477da6b0e97f/components/google-cloud/storage/download), [Upload to explicit URI](https://github.com/kubeflow/pipelines/tree/290fa55fb9c908be38cbe6bc4c3f477da6b0e97f/components/google-cloud/storage/upload_to_explicit_uri), [Upload to unique URI](https://github.com/kubeflow/pipelines/tree/290fa55fb9c908be38cbe6bc4c3f477da6b0e97f/components/google-cloud/storage/upload_to_unique_uri) and [List](https://github.com/kubeflow/pipelines/tree/290fa55fb9c908be38cbe6bc4c3f477da6b0e97f/components/google-cloud/storage/list). - -## Bug fixes and other changes - -### Deprecations - -## Breaking changes \ No newline at end of file diff --git a/sdk/python/README.md b/sdk/python/README.md index 51f818208..938b30112 100644 --- a/sdk/python/README.md +++ b/sdk/python/README.md @@ -103,7 +103,7 @@ the Tekton YAML instead of Argo YAML. Since the KFP SDK was not designed and imp _monkey-patching_ was used to replace non-class methods and functions at runtime. In order for the _monkey patch_ to work properly, the `kfp-tekton` compiler source code has to be aligned with a -specific version of the `kfp` SDK compiler. As of now that version is [`0.5.1`](https://github.com/kubeflow/pipelines/releases/tag/0.5.1). +specific version of the `kfp` SDK compiler. As of now that version is [`1.0.0`](https://github.com/kubeflow/pipelines/releases/tag/1.0.0). ## Adding New Code diff --git a/sdk/python/kfp_tekton/__init__.py b/sdk/python/kfp_tekton/__init__.py index b69fcd4ed..ab9376fe5 100644 --- a/sdk/python/kfp_tekton/__init__.py +++ b/sdk/python/kfp_tekton/__init__.py @@ -12,6 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '0.2.0' +__version__ = '0.3.0' from ._client import TektonClient # noqa F401 diff --git a/sdk/python/kfp_tekton/compiler/compiler.py b/sdk/python/kfp_tekton/compiler/compiler.py index fb49e358b..941c45716 100644 --- a/sdk/python/kfp_tekton/compiler/compiler.py +++ b/sdk/python/kfp_tekton/compiler/compiler.py @@ -26,7 +26,7 @@ from typing import Callable, List, Text, Dict, Any # Kubeflow Pipeline imports from kfp import dsl -from kfp.compiler._default_transformers import add_pod_env, add_pod_labels, get_default_telemetry_labels +from kfp.compiler._default_transformers import add_pod_env # , add_pod_labels, get_default_telemetry_labels from kfp.compiler.compiler import Compiler # from kfp.components._yaml_utils import dump_yaml from kfp.components.structures import InputSpec @@ -476,7 +476,7 @@ class TektonCompiler(Compiler): 'kind': 'PipelineRun', 'metadata': { 'name': sanitize_k8s_name(pipeline.name or 'Pipeline', suffix_space=4), - 'labels': get_default_telemetry_labels(), + # 'labels': get_default_telemetry_labels(), 'annotations': { 'tekton.dev/output_artifacts': json.dumps(self.output_artifacts, sort_keys=True), 'tekton.dev/input_artifacts': json.dumps(self.input_artifacts, sort_keys=True), @@ -581,7 +581,6 @@ class TektonCompiler(Compiler): pipeline_description: Text = None, params_list: List[dsl.PipelineParam] = None, pipeline_conf: dsl.PipelineConf = None, - allow_telemetry: bool = True, ) -> Dict[Text, Any]: """ Internal implementation of create_workflow.""" params_list = params_list or [] @@ -646,12 +645,12 @@ class TektonCompiler(Compiler): op_transformers = [add_pod_env] - # By default adds telemetry instruments. Users can opt out toggling - # allow_telemetry. - # Also, TFX pipelines will be bypassed for pipeline compiled by tfx>0.21.4. - if allow_telemetry: - pod_labels = get_default_telemetry_labels() - op_transformers.append(add_pod_labels(pod_labels)) + # # By default adds telemetry instruments. Users can opt out toggling + # # allow_telemetry. + # # Also, TFX pipelines will be bypassed for pipeline compiled by tfx>0.21.4. + # if allow_telemetry: + # pod_labels = get_default_telemetry_labels() + # op_transformers.append(add_pod_labels(pod_labels)) op_transformers.extend(pipeline_conf.op_transformers) @@ -685,7 +684,6 @@ class TektonCompiler(Compiler): package_path, type_check=True, pipeline_conf: dsl.PipelineConf = None, - allow_telemetry: bool = True, enable_artifacts=True): """Compile the given pipeline function into workflow yaml. Args: @@ -698,8 +696,7 @@ class TektonCompiler(Compiler): enable_artifacts: enable artifacts, requires Kubeflow Pipelines with Minio. """ self.enable_artifacts = enable_artifacts - super().compile(pipeline_func, package_path, type_check, pipeline_conf=pipeline_conf, - allow_telemetry=allow_telemetry) + super().compile(pipeline_func, package_path, type_check, pipeline_conf=pipeline_conf) @staticmethod def _write_workflow(workflow: Dict[Text, Any], @@ -774,7 +771,6 @@ class TektonCompiler(Compiler): params_list: List[dsl.PipelineParam] = None, pipeline_conf: dsl.PipelineConf = None, package_path: Text = None, - allow_telemetry: bool = True ) -> None: """Compile the given pipeline function and dump it to specified file format.""" workflow = self._create_workflow( @@ -782,8 +778,7 @@ class TektonCompiler(Compiler): pipeline_name, pipeline_description, params_list, - pipeline_conf, - allow_telemetry) + pipeline_conf) TektonCompiler._write_workflow(workflow=workflow, package_path=package_path) # Tekton change _validate_workflow(workflow) diff --git a/sdk/python/kfp_tekton/compiler/main.py b/sdk/python/kfp_tekton/compiler/main.py index c12ecb817..7fc1822bd 100644 --- a/sdk/python/kfp_tekton/compiler/main.py +++ b/sdk/python/kfp_tekton/compiler/main.py @@ -44,9 +44,6 @@ def parse_arguments(): parser.add_argument('--disable-type-check', action='store_true', help='disable the type check, default is enabled.') - parser.add_argument('--disable-telemetry', - action='store_true', - help='disable adding telemetry labels, default is enabled.') parser.add_argument('--disable_artifacts', action='store_true', help='disable artifact inputs and outputs') @@ -56,7 +53,7 @@ def parse_arguments(): def _compile_pipeline_function(pipeline_funcs, function_name, output_path, type_check, - allow_telemetry, enable_artifacts=False): + enable_artifacts=False): if len(pipeline_funcs) == 0: raise ValueError('A function with @dsl.pipeline decorator is required in the py file.') @@ -73,11 +70,10 @@ def _compile_pipeline_function(pipeline_funcs, function_name, output_path, type_ pipeline_func = pipeline_funcs[0] TektonCompiler().compile(pipeline_func, output_path, type_check, - allow_telemetry=allow_telemetry, enable_artifacts=enable_artifacts) -def compile_pyfile(pyfile, function_name, output_path, type_check, allow_telemetry, +def compile_pyfile(pyfile, function_name, output_path, type_check, enable_artifacts=False): sys.path.insert(0, os.path.dirname(pyfile)) try: @@ -85,7 +81,6 @@ def compile_pyfile(pyfile, function_name, output_path, type_check, allow_telemet with kfp_compiler_main.PipelineCollectorContext() as pipeline_funcs: __import__(os.path.splitext(filename)[0]) _compile_pipeline_function(pipeline_funcs, function_name, output_path, type_check, - allow_telemetry=allow_telemetry, enable_artifacts=enable_artifacts) finally: del sys.path[0] @@ -98,7 +93,7 @@ def main(): (args.py is not None and args.package is not None)): raise ValueError('Either --py or --package is needed but not both.') if args.py: - compile_pyfile(args.py, args.function, args.output, not args.disable_type_check, not args.disable_telemetry, + compile_pyfile(args.py, args.function, args.output, not args.disable_type_check, not args.disable_artifacts) else: if args.namespace is None: diff --git a/sdk/python/requirements.in b/sdk/python/requirements.in index f06c62849..48ccca00e 100644 --- a/sdk/python/requirements.in +++ b/sdk/python/requirements.in @@ -1,3 +1,3 @@ # kfp -kfp==0.5.1 +kfp==1.0.0 diff --git a/sdk/python/requirements.txt b/sdk/python/requirements.txt index d4cd5ece5..31f25ca68 100644 --- a/sdk/python/requirements.txt +++ b/sdk/python/requirements.txt @@ -21,7 +21,7 @@ idna==2.9 # via requests importlib-metadata==1.6.1 # via jsonschema jsonschema==3.2.0 # via kfp kfp-server-api==0.5.0 # via kfp -kfp==0.5.1 # via -r requirements.in +kfp==1.0.0 # via -r requirements.in kubernetes==11.0.0 # via kfp oauthlib==3.1.0 # via requests-oauthlib protobuf==3.12.2 # via google-api-core, googleapis-common-protos diff --git a/sdk/python/setup.py b/sdk/python/setup.py index a2882df28..c13ef9df8 100644 --- a/sdk/python/setup.py +++ b/sdk/python/setup.py @@ -20,14 +20,14 @@ # # To create a distribution for PyPi run: # -# $ export KFP_TEKTON_VERSION=0.2.0-rc1 +# $ export KFP_TEKTON_VERSION=0.3.0-rc1 # $ python3 setup.py sdist # $ twine check dist/kfp-tekton-${KFP_TEKTON_VERSION}.tar.gz # $ twine upload --repository pypi dist/kfp-tekton-${KFP_TEKTON_VERSION}.tar.gz # # ... or: # -# $ make distribution KFP_TEKTON_VERSION=0.2.0-rc1 +# $ make distribution KFP_TEKTON_VERSION=0.3.0-rc1 # # ============================================================================= @@ -52,7 +52,7 @@ development stage. Contributions are welcome: {} """.format(HOMEPAGE) REQUIRES = [ - 'kfp==0.5.1', + 'kfp==1.0.0', ] logging.basicConfig() diff --git a/sdk/python/tests/README.md b/sdk/python/tests/README.md index 28c8f1bad..816885276 100644 --- a/sdk/python/tests/README.md +++ b/sdk/python/tests/README.md @@ -24,7 +24,7 @@ or run this command from the project root directory: You should see an output similar to the one below, outlining which test scripts have passed and which are failing: ```YAML -KFP version: 0.5.1 +KFP version: 1.0.0 SUCCESS: add_pod_env.py SUCCESS: basic.py @@ -131,5 +131,5 @@ Occurences of other Errors: ## Disclaimer -**Note:** The reports above were created for the pipeline scripts found in KFP version `0.5.1` since the -`kfp_tekton` compiler code is currently based on the `kfp` SDK compiler version `0.5.1`. +**Note:** The reports above were created for the pipeline scripts found in KFP version `1.0.0` since the +`kfp_tekton` compiler code is currently based on the `kfp` SDK compiler version `1.0.0`. diff --git a/sdk/python/tests/compiler/testdata/affinity.yaml b/sdk/python/tests/compiler/testdata/affinity.yaml index 421ce1f0f..4f6c01797 100644 --- a/sdk/python/tests/compiler/testdata/affinity.yaml +++ b/sdk/python/tests/compiler/testdata/affinity.yaml @@ -21,8 +21,6 @@ metadata: sidecar.istio.io/inject: 'false' tekton.dev/input_artifacts: '{}' tekton.dev/output_artifacts: '{}' - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp name: affinity spec: pipelineSpec: diff --git a/sdk/python/tests/compiler/testdata/basic_no_decorator.yaml b/sdk/python/tests/compiler/testdata/basic_no_decorator.yaml index f4b1c4d38..8cff4661d 100644 --- a/sdk/python/tests/compiler/testdata/basic_no_decorator.yaml +++ b/sdk/python/tests/compiler/testdata/basic_no_decorator.yaml @@ -23,8 +23,6 @@ metadata: sidecar.istio.io/inject: 'false' tekton.dev/input_artifacts: '{}' tekton.dev/output_artifacts: '{}' - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp name: save-most-frequent-word spec: params: diff --git a/sdk/python/tests/compiler/testdata/big_data_passing.yaml b/sdk/python/tests/compiler/testdata/big_data_passing.yaml index 4007b177f..3e6b6658a 100644 --- a/sdk/python/tests/compiler/testdata/big_data_passing.yaml +++ b/sdk/python/tests/compiler/testdata/big_data_passing.yaml @@ -18,23 +18,21 @@ metadata: annotations: pipelines.kubeflow.org/pipeline_spec: '{"name": "File passing pipelines"}' sidecar.istio.io/inject: 'false' - tekton.dev/input_artifacts: '{"print-params": [{"name": "gen-params-output", "parent_task": + tekton.dev/input_artifacts: '{"print-params": [{"name": "gen-params-Output", "parent_task": "gen-params"}], "print-text": [{"name": "repeat-line-output_text", "parent_task": "repeat-line"}], "print-text-2": [{"name": "split-text-lines-odd_lines", "parent_task": "split-text-lines"}], "print-text-3": [{"name": "split-text-lines-even_lines", "parent_task": "split-text-lines"}], "print-text-4": [{"name": "write-numbers-numbers", - "parent_task": "write-numbers"}], "print-text-5": [{"name": "sum-numbers-output", + "parent_task": "write-numbers"}], "print-text-5": [{"name": "sum-numbers-Output", "parent_task": "sum-numbers"}], "sum-numbers": [{"name": "write-numbers-numbers", "parent_task": "write-numbers"}]}' - tekton.dev/output_artifacts: '{"gen-params": [{"name": "gen-params-output", "path": + tekton.dev/output_artifacts: '{"gen-params": [{"name": "gen-params-Output", "path": "/tmp/outputs/Output/data"}], "repeat-line": [{"name": "repeat-line-output_text", "path": "/tmp/outputs/output_text/data"}], "split-text-lines": [{"name": "split-text-lines-even_lines", "path": "/tmp/outputs/even_lines/data"}, {"name": "split-text-lines-odd_lines", - "path": "/tmp/outputs/odd_lines/data"}], "sum-numbers": [{"name": "sum-numbers-output", + "path": "/tmp/outputs/odd_lines/data"}], "sum-numbers": [{"name": "sum-numbers-Output", "path": "/tmp/outputs/Output/data"}], "write-numbers": [{"name": "write-numbers-numbers", "path": "/tmp/outputs/numbers/data"}]}' - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp name: file-passing-pipelines spec: pipelineSpec: @@ -55,7 +53,7 @@ spec: - -c - "def _make_parent_dirs_and_return_path(file_path: str):\n import os\n\ \ os.makedirs(os.path.dirname(file_path), exist_ok=True)\n return\ - \ file_path\n\ndef repeat_line(line , output_text_path , count = 10):\n\ + \ file_path\n\ndef repeat_line(line, output_text_path, count = 10):\n\ \ '''Repeat the line specified number of times'''\n with open(output_text_path,\ \ 'w') as writer:\n for i in range(count):\n writer.write(line\ \ + '\\n')\n\nimport argparse\n_parser = argparse.ArgumentParser(prog='Repeat\ @@ -65,12 +63,8 @@ spec: \ default=argparse.SUPPRESS)\n_parser.add_argument(\"--output-text\",\ \ dest=\"output_text_path\", type=_make_parent_dirs_and_return_path, required=True,\ \ default=argparse.SUPPRESS)\n_parsed_args = vars(_parser.parse_args())\n\ - _output_files = _parsed_args.pop(\"_output_paths\", [])\n\n_outputs =\ - \ repeat_line(**_parsed_args)\n\n_output_serializers = [\n\n]\n\nimport\ - \ os\nfor idx, output_file in enumerate(_output_files):\n try:\n \ - \ os.makedirs(os.path.dirname(output_file))\n except OSError:\n\ - \ pass\n with open(output_file, 'w') as f:\n f.write(_output_serializers[idx](_outputs[idx]))\n" - image: tensorflow/tensorflow:1.13.2-py3 + \n_outputs = repeat_line(**_parsed_args)\n" + image: python:3.7 name: main - env: - name: PIPELINERUN @@ -124,19 +118,14 @@ spec: - python3 - -u - -c - - "def print_text(\n text_path \n): # The \"text\" input is untyped\ + - "def print_text(\n text_path\n): # The \"text\" input is untyped\ \ so that any data can be printed\n '''Print text'''\n with open(text_path,\ \ 'r') as reader:\n for line in reader:\n print(line,\ \ end='')\n\nimport argparse\n_parser = argparse.ArgumentParser(prog='Print\ \ text', description='Print text')\n_parser.add_argument(\"--text\", dest=\"\ text_path\", type=str, required=True, default=argparse.SUPPRESS)\n_parsed_args\ - \ = vars(_parser.parse_args())\n_output_files = _parsed_args.pop(\"_output_paths\"\ - , [])\n\n_outputs = print_text(**_parsed_args)\n\n_output_serializers\ - \ = [\n\n]\n\nimport os\nfor idx, output_file in enumerate(_output_files):\n\ - \ try:\n os.makedirs(os.path.dirname(output_file))\n except\ - \ OSError:\n pass\n with open(output_file, 'w') as f:\n \ - \ f.write(_output_serializers[idx](_outputs[idx]))\n" - image: tensorflow/tensorflow:1.13.2-py3 + \ = vars(_parser.parse_args())\n\n_outputs = print_text(**_parsed_args)\n" + image: python:3.7 name: main workspaces: - name: print-text @@ -190,28 +179,24 @@ spec: - -c - "def _make_parent_dirs_and_return_path(file_path: str):\n import os\n\ \ os.makedirs(os.path.dirname(file_path), exist_ok=True)\n return\ - \ file_path\n\ndef split_text_lines(source_path ,\n \ - \ odd_lines_path ,\n even_lines_path ):\n with\ - \ open(source_path, 'r') as reader:\n with open(odd_lines_path,\ - \ 'w') as odd_writer:\n with open(even_lines_path, 'w') as\ - \ even_writer:\n while True:\n line\ + \ file_path\n\ndef split_text_lines(source_path,\n \ + \ odd_lines_path,\n even_lines_path):\n with open(source_path,\ + \ 'r') as reader:\n with open(odd_lines_path, 'w') as odd_writer:\n\ + \ with open(even_lines_path, 'w') as even_writer:\n \ + \ while True:\n line = reader.readline()\n\ + \ if line == \"\":\n break\n\ + \ odd_writer.write(line)\n line\ \ = reader.readline()\n if line == \"\":\n \ - \ break\n odd_writer.write(line)\n \ - \ line = reader.readline()\n if line\ - \ == \"\":\n break\n even_writer.write(line)\n\ - \nimport argparse\n_parser = argparse.ArgumentParser(prog='Split text\ - \ lines', description='')\n_parser.add_argument(\"--source\", dest=\"\ - source_path\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"\ + \ break\n even_writer.write(line)\n\n\ + import argparse\n_parser = argparse.ArgumentParser(prog='Split text lines',\ + \ description='')\n_parser.add_argument(\"--source\", dest=\"source_path\"\ + , type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"\ --odd-lines\", dest=\"odd_lines_path\", type=_make_parent_dirs_and_return_path,\ \ required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--even-lines\"\ , dest=\"even_lines_path\", type=_make_parent_dirs_and_return_path, required=True,\ \ default=argparse.SUPPRESS)\n_parsed_args = vars(_parser.parse_args())\n\ - _output_files = _parsed_args.pop(\"_output_paths\", [])\n\n_outputs =\ - \ split_text_lines(**_parsed_args)\n\n_output_serializers = [\n\n]\n\n\ - import os\nfor idx, output_file in enumerate(_output_files):\n try:\n\ - \ os.makedirs(os.path.dirname(output_file))\n except OSError:\n\ - \ pass\n with open(output_file, 'w') as f:\n f.write(_output_serializers[idx](_outputs[idx]))\n" - image: tensorflow/tensorflow:1.13.2-py3 + \n_outputs = split_text_lines(**_parsed_args)\n" + image: python:3.7 name: main - env: - name: PIPELINERUN @@ -272,19 +257,14 @@ spec: - python3 - -u - -c - - "def print_text(\n text_path \n): # The \"text\" input is untyped\ + - "def print_text(\n text_path\n): # The \"text\" input is untyped\ \ so that any data can be printed\n '''Print text'''\n with open(text_path,\ \ 'r') as reader:\n for line in reader:\n print(line,\ \ end='')\n\nimport argparse\n_parser = argparse.ArgumentParser(prog='Print\ \ text', description='Print text')\n_parser.add_argument(\"--text\", dest=\"\ text_path\", type=str, required=True, default=argparse.SUPPRESS)\n_parsed_args\ - \ = vars(_parser.parse_args())\n_output_files = _parsed_args.pop(\"_output_paths\"\ - , [])\n\n_outputs = print_text(**_parsed_args)\n\n_output_serializers\ - \ = [\n\n]\n\nimport os\nfor idx, output_file in enumerate(_output_files):\n\ - \ try:\n os.makedirs(os.path.dirname(output_file))\n except\ - \ OSError:\n pass\n with open(output_file, 'w') as f:\n \ - \ f.write(_output_serializers[idx](_outputs[idx]))\n" - image: tensorflow/tensorflow:1.13.2-py3 + \ = vars(_parser.parse_args())\n\n_outputs = print_text(**_parsed_args)\n" + image: python:3.7 name: main workspaces: - name: print-text-2 @@ -303,19 +283,14 @@ spec: - python3 - -u - -c - - "def print_text(\n text_path \n): # The \"text\" input is untyped\ + - "def print_text(\n text_path\n): # The \"text\" input is untyped\ \ so that any data can be printed\n '''Print text'''\n with open(text_path,\ \ 'r') as reader:\n for line in reader:\n print(line,\ \ end='')\n\nimport argparse\n_parser = argparse.ArgumentParser(prog='Print\ \ text', description='Print text')\n_parser.add_argument(\"--text\", dest=\"\ text_path\", type=str, required=True, default=argparse.SUPPRESS)\n_parsed_args\ - \ = vars(_parser.parse_args())\n_output_files = _parsed_args.pop(\"_output_paths\"\ - , [])\n\n_outputs = print_text(**_parsed_args)\n\n_output_serializers\ - \ = [\n\n]\n\nimport os\nfor idx, output_file in enumerate(_output_files):\n\ - \ try:\n os.makedirs(os.path.dirname(output_file))\n except\ - \ OSError:\n pass\n with open(output_file, 'w') as f:\n \ - \ f.write(_output_serializers[idx](_outputs[idx]))\n" - image: tensorflow/tensorflow:1.13.2-py3 + \ = vars(_parser.parse_args())\n\n_outputs = print_text(**_parsed_args)\n" + image: python:3.7 name: main workspaces: - name: print-text-3 @@ -336,21 +311,17 @@ spec: - -c - "def _make_parent_dirs_and_return_path(file_path: str):\n import os\n\ \ os.makedirs(os.path.dirname(file_path), exist_ok=True)\n return\ - \ file_path\n\ndef write_numbers(\n numbers_path , start = 0,\ - \ count = 10):\n with open(numbers_path, 'w') as writer:\n \ - \ for i in range(start, count):\n writer.write(str(i) + '\\\ - n')\n\nimport argparse\n_parser = argparse.ArgumentParser(prog='Write\ - \ numbers', description='')\n_parser.add_argument(\"--start\", dest=\"\ - start\", type=int, required=False, default=argparse.SUPPRESS)\n_parser.add_argument(\"\ - --count\", dest=\"count\", type=int, required=False, default=argparse.SUPPRESS)\n\ - _parser.add_argument(\"--numbers\", dest=\"numbers_path\", type=_make_parent_dirs_and_return_path,\ + \ file_path\n\ndef write_numbers(\n numbers_path, start = 0, count\ + \ = 10):\n with open(numbers_path, 'w') as writer:\n for i in\ + \ range(start, count):\n writer.write(str(i) + '\\n')\n\nimport\ + \ argparse\n_parser = argparse.ArgumentParser(prog='Write numbers', description='')\n\ + _parser.add_argument(\"--start\", dest=\"start\", type=int, required=False,\ + \ default=argparse.SUPPRESS)\n_parser.add_argument(\"--count\", dest=\"\ + count\", type=int, required=False, default=argparse.SUPPRESS)\n_parser.add_argument(\"\ + --numbers\", dest=\"numbers_path\", type=_make_parent_dirs_and_return_path,\ \ required=True, default=argparse.SUPPRESS)\n_parsed_args = vars(_parser.parse_args())\n\ - _output_files = _parsed_args.pop(\"_output_paths\", [])\n\n_outputs =\ - \ write_numbers(**_parsed_args)\n\n_output_serializers = [\n\n]\n\nimport\ - \ os\nfor idx, output_file in enumerate(_output_files):\n try:\n \ - \ os.makedirs(os.path.dirname(output_file))\n except OSError:\n\ - \ pass\n with open(output_file, 'w') as f:\n f.write(_output_serializers[idx](_outputs[idx]))\n" - image: tensorflow/tensorflow:1.13.2-py3 + \n_outputs = write_numbers(**_parsed_args)\n" + image: python:3.7 name: main - env: - name: PIPELINERUN @@ -404,19 +375,14 @@ spec: - python3 - -u - -c - - "def print_text(\n text_path \n): # The \"text\" input is untyped\ + - "def print_text(\n text_path\n): # The \"text\" input is untyped\ \ so that any data can be printed\n '''Print text'''\n with open(text_path,\ \ 'r') as reader:\n for line in reader:\n print(line,\ \ end='')\n\nimport argparse\n_parser = argparse.ArgumentParser(prog='Print\ \ text', description='Print text')\n_parser.add_argument(\"--text\", dest=\"\ text_path\", type=str, required=True, default=argparse.SUPPRESS)\n_parsed_args\ - \ = vars(_parser.parse_args())\n_output_files = _parsed_args.pop(\"_output_paths\"\ - , [])\n\n_outputs = print_text(**_parsed_args)\n\n_output_serializers\ - \ = [\n\n]\n\nimport os\nfor idx, output_file in enumerate(_output_files):\n\ - \ try:\n os.makedirs(os.path.dirname(output_file))\n except\ - \ OSError:\n pass\n with open(output_file, 'w') as f:\n \ - \ f.write(_output_serializers[idx](_outputs[idx]))\n" - image: tensorflow/tensorflow:1.13.2-py3 + \ = vars(_parser.parse_args())\n\n_outputs = print_text(**_parsed_args)\n" + image: python:3.7 name: main workspaces: - name: print-text-4 @@ -432,12 +398,12 @@ spec: - --numbers - $(workspaces.sum-numbers.path)/write-numbers-numbers - '----output-paths' - - $(workspaces.sum-numbers.path)/sum-numbers-output + - $(workspaces.sum-numbers.path)/sum-numbers-Output command: - python3 - -u - -c - - "def sum_numbers(numbers_path ) :\n sum = 0\n with open(numbers_path,\ + - "def sum_numbers(numbers_path):\n sum = 0\n with open(numbers_path,\ \ 'r') as reader:\n for line in reader:\n sum = sum\ \ + int(line)\n return sum\n\ndef _serialize_int(int_value: int) ->\ \ str:\n if isinstance(int_value, str):\n return int_value\n\ @@ -454,7 +420,7 @@ spec: \ try:\n os.makedirs(os.path.dirname(output_file))\n except\ \ OSError:\n pass\n with open(output_file, 'w') as f:\n \ \ f.write(_output_serializers[idx](_outputs[idx]))\n" - image: tensorflow/tensorflow:1.13.2-py3 + image: python:3.7 name: main - env: - name: PIPELINERUN @@ -486,9 +452,9 @@ spec: mc config host add storage http://minio-service.$NAMESPACE:9000 $AWS_ACCESS_KEY_ID $AWS_SECRET_ACCESS_KEY - tar -cvzf output.tgz $(workspaces.sum-numbers.path)/sum-numbers-output + tar -cvzf Output.tgz $(workspaces.sum-numbers.path)/sum-numbers-Output - mc cp output.tgz storage/mlpipeline/artifacts/$PIPELINERUN/$PIPELINETASK/output.tgz + mc cp Output.tgz storage/mlpipeline/artifacts/$PIPELINERUN/$PIPELINETASK/Output.tgz ' workspaces: @@ -503,24 +469,19 @@ spec: steps: - args: - --text - - $(workspaces.print-text-5.path)/sum-numbers-output + - $(workspaces.print-text-5.path)/sum-numbers-Output command: - python3 - -u - -c - - "def print_text(\n text_path \n): # The \"text\" input is untyped\ + - "def print_text(\n text_path\n): # The \"text\" input is untyped\ \ so that any data can be printed\n '''Print text'''\n with open(text_path,\ \ 'r') as reader:\n for line in reader:\n print(line,\ \ end='')\n\nimport argparse\n_parser = argparse.ArgumentParser(prog='Print\ \ text', description='Print text')\n_parser.add_argument(\"--text\", dest=\"\ text_path\", type=str, required=True, default=argparse.SUPPRESS)\n_parsed_args\ - \ = vars(_parser.parse_args())\n_output_files = _parsed_args.pop(\"_output_paths\"\ - , [])\n\n_outputs = print_text(**_parsed_args)\n\n_output_serializers\ - \ = [\n\n]\n\nimport os\nfor idx, output_file in enumerate(_output_files):\n\ - \ try:\n os.makedirs(os.path.dirname(output_file))\n except\ - \ OSError:\n pass\n with open(output_file, 'w') as f:\n \ - \ f.write(_output_serializers[idx](_outputs[idx]))\n" - image: tensorflow/tensorflow:1.13.2-py3 + \ = vars(_parser.parse_args())\n\n_outputs = print_text(**_parsed_args)\n" + image: python:3.7 name: main workspaces: - name: print-text-5 @@ -540,7 +501,7 @@ spec: - python3 - -u - -c - - "def gen_params() :\n import random\n num = random.randint(0, 9)\n\ + - "def gen_params():\n import random\n num = random.randint(0, 9)\n\ \ return num\n\ndef _serialize_int(int_value: int) -> str:\n if\ \ isinstance(int_value, str):\n return int_value\n if not isinstance(int_value,\ \ int):\n raise TypeError('Value \"{}\" has type \"{}\" instead\ @@ -554,7 +515,7 @@ spec: \ enumerate(_output_files):\n try:\n os.makedirs(os.path.dirname(output_file))\n\ \ except OSError:\n pass\n with open(output_file, 'w') as\ \ f:\n f.write(_output_serializers[idx](_outputs[idx]))\n" - image: tensorflow/tensorflow:1.13.2-py3 + image: python:3.7 name: main - env: - name: PIPELINERUN @@ -586,37 +547,32 @@ spec: mc config host add storage http://minio-service.$NAMESPACE:9000 $AWS_ACCESS_KEY_ID $AWS_SECRET_ACCESS_KEY - tar -cvzf output.tgz $(results.output.path) + tar -cvzf Output.tgz $(results.output.path) - mc cp output.tgz storage/mlpipeline/artifacts/$PIPELINERUN/$PIPELINETASK/output.tgz + mc cp Output.tgz storage/mlpipeline/artifacts/$PIPELINERUN/$PIPELINETASK/Output.tgz ' - name: print-params params: - - name: gen-params-output + - name: gen-params-Output value: $(tasks.gen-params.results.output) taskSpec: params: - - name: gen-params-output + - name: gen-params-Output steps: - args: - --numbers-parm - - $(inputs.params.gen-params-output) + - $(inputs.params.gen-params-Output) command: - python3 - -u - -c - - "def print_params(numbers_parm ):\n print(\"The result number is: %d\"\ + - "def print_params(numbers_parm):\n print(\"The result number is: %d\"\ \ % numbers_parm)\n\nimport argparse\n_parser = argparse.ArgumentParser(prog='Print\ \ params', description='')\n_parser.add_argument(\"--numbers-parm\", dest=\"\ numbers_parm\", type=int, required=True, default=argparse.SUPPRESS)\n\ - _parsed_args = vars(_parser.parse_args())\n_output_files = _parsed_args.pop(\"\ - _output_paths\", [])\n\n_outputs = print_params(**_parsed_args)\n\n_output_serializers\ - \ = [\n\n]\n\nimport os\nfor idx, output_file in enumerate(_output_files):\n\ - \ try:\n os.makedirs(os.path.dirname(output_file))\n except\ - \ OSError:\n pass\n with open(output_file, 'w') as f:\n \ - \ f.write(_output_serializers[idx](_outputs[idx]))\n" - image: tensorflow/tensorflow:1.13.2-py3 + _parsed_args = vars(_parser.parse_args())\n\n_outputs = print_params(**_parsed_args)\n" + image: python:3.7 name: main workspaces: - name: file-passing-pipelines diff --git a/sdk/python/tests/compiler/testdata/compose.yaml b/sdk/python/tests/compiler/testdata/compose.yaml index d30ddcc9f..bcf023726 100644 --- a/sdk/python/tests/compiler/testdata/compose.yaml +++ b/sdk/python/tests/compiler/testdata/compose.yaml @@ -28,8 +28,6 @@ metadata: tekton.dev/output_artifacts: '{"download": [{"name": "download-downloaded", "path": "/tmp/results.txt"}], "get-frequent": [{"name": "get-frequent-word", "path": "/tmp/message.txt"}]}' - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp name: download-and-save-most-frequent spec: params: diff --git a/sdk/python/tests/compiler/testdata/condition.yaml b/sdk/python/tests/compiler/testdata/condition.yaml index debd2e9ef..916168b4f 100644 --- a/sdk/python/tests/compiler/testdata/condition.yaml +++ b/sdk/python/tests/compiler/testdata/condition.yaml @@ -25,8 +25,6 @@ metadata: "flip-again"}], "print2": [{"name": "flip-output", "parent_task": "flip"}]}' tekton.dev/output_artifacts: '{"flip": [{"name": "flip-output", "path": "/tmp/output"}], "flip-again": [{"name": "flip-again-output", "path": "/tmp/output"}]}' - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp name: flip-coin-example-pipeline spec: params: diff --git a/sdk/python/tests/compiler/testdata/exit_handler.yaml b/sdk/python/tests/compiler/testdata/exit_handler.yaml index c24fca25d..fffc8a885 100644 --- a/sdk/python/tests/compiler/testdata/exit_handler.yaml +++ b/sdk/python/tests/compiler/testdata/exit_handler.yaml @@ -25,8 +25,6 @@ metadata: "gcs-download"}]}' tekton.dev/output_artifacts: '{"gcs-download": [{"name": "gcs-download-data", "path": "/tmp/results.txt"}]}' - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp name: exit-handler spec: params: diff --git a/sdk/python/tests/compiler/testdata/hidden_output_file.yaml b/sdk/python/tests/compiler/testdata/hidden_output_file.yaml index e1a21c43a..a374eee65 100644 --- a/sdk/python/tests/compiler/testdata/hidden_output_file.yaml +++ b/sdk/python/tests/compiler/testdata/hidden_output_file.yaml @@ -23,8 +23,6 @@ metadata: "download-file"}]}' tekton.dev/output_artifacts: '{"download-file": [{"name": "download-file-data", "path": "/tmp/results.txt"}]}' - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp name: hidden-output-file-pipeline spec: pipelineSpec: diff --git a/sdk/python/tests/compiler/testdata/imagepullsecrets.yaml b/sdk/python/tests/compiler/testdata/imagepullsecrets.yaml index 1120481e4..234b4eb08 100644 --- a/sdk/python/tests/compiler/testdata/imagepullsecrets.yaml +++ b/sdk/python/tests/compiler/testdata/imagepullsecrets.yaml @@ -24,8 +24,6 @@ metadata: tekton.dev/input_artifacts: '{}' tekton.dev/output_artifacts: '{"get-frequent": [{"name": "get-frequent-word", "path": "/tmp/message.txt"}]}' - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp name: save-most-frequent spec: params: diff --git a/sdk/python/tests/compiler/testdata/init_container.yaml b/sdk/python/tests/compiler/testdata/init_container.yaml index 2d6abec96..6833d3877 100644 --- a/sdk/python/tests/compiler/testdata/init_container.yaml +++ b/sdk/python/tests/compiler/testdata/init_container.yaml @@ -21,8 +21,6 @@ metadata: sidecar.istio.io/inject: 'false' tekton.dev/input_artifacts: '{}' tekton.dev/output_artifacts: '{}' - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp name: initcontainer spec: pipelineSpec: diff --git a/sdk/python/tests/compiler/testdata/input_artifact_raw_value.yaml b/sdk/python/tests/compiler/testdata/input_artifact_raw_value.yaml index 43a8d78f6..f09725107 100644 --- a/sdk/python/tests/compiler/testdata/input_artifact_raw_value.yaml +++ b/sdk/python/tests/compiler/testdata/input_artifact_raw_value.yaml @@ -22,8 +22,6 @@ metadata: sidecar.istio.io/inject: 'false' tekton.dev/input_artifacts: '{}' tekton.dev/output_artifacts: '{}' - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp name: pipeline-with-artifact-input-raw-argument-value spec: pipelineSpec: diff --git a/sdk/python/tests/compiler/testdata/katib.yaml b/sdk/python/tests/compiler/testdata/katib.yaml index 11d27359b..944c83793 100644 --- a/sdk/python/tests/compiler/testdata/katib.yaml +++ b/sdk/python/tests/compiler/testdata/katib.yaml @@ -30,8 +30,6 @@ metadata: "parent_task": "mnist-hpo"}]}' tekton.dev/output_artifacts: '{"mnist-hpo": [{"name": "mnist-hpo-bestHyperParameter", "path": "/output.txt"}]}' - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp name: launch-katib-experiment spec: params: diff --git a/sdk/python/tests/compiler/testdata/load_from_yaml.yaml b/sdk/python/tests/compiler/testdata/load_from_yaml.yaml index f6055502f..5adda4697 100644 --- a/sdk/python/tests/compiler/testdata/load_from_yaml.yaml +++ b/sdk/python/tests/compiler/testdata/load_from_yaml.yaml @@ -22,8 +22,6 @@ metadata: tekton.dev/input_artifacts: '{}' tekton.dev/output_artifacts: '{"busybox": [{"name": "busybox-dummy_output_path", "path": "/tmp/outputs/dummy_output_path/data"}]}' - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp name: component-yaml-pipeline spec: pipelineSpec: diff --git a/sdk/python/tests/compiler/testdata/loop_static.yaml b/sdk/python/tests/compiler/testdata/loop_static.yaml index 4343f9dec..b9de8235f 100644 --- a/sdk/python/tests/compiler/testdata/loop_static.yaml +++ b/sdk/python/tests/compiler/testdata/loop_static.yaml @@ -23,8 +23,6 @@ metadata: "parent_task": null}], "my-in-coop2": [{"name": "loop-item-param-subvar-B_b", "parent_task": null}]}' tekton.dev/output_artifacts: '{}' - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp name: my-loop-pipeline spec: params: diff --git a/sdk/python/tests/compiler/testdata/node_selector.yaml b/sdk/python/tests/compiler/testdata/node_selector.yaml index 3d98a7c1e..c32d68b56 100644 --- a/sdk/python/tests/compiler/testdata/node_selector.yaml +++ b/sdk/python/tests/compiler/testdata/node_selector.yaml @@ -21,8 +21,6 @@ metadata: sidecar.istio.io/inject: 'false' tekton.dev/input_artifacts: '{}' tekton.dev/output_artifacts: '{}' - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp name: node-selector spec: pipelineSpec: diff --git a/sdk/python/tests/compiler/testdata/parallel_join.yaml b/sdk/python/tests/compiler/testdata/parallel_join.yaml index cc52e4c1c..1151b0a08 100644 --- a/sdk/python/tests/compiler/testdata/parallel_join.yaml +++ b/sdk/python/tests/compiler/testdata/parallel_join.yaml @@ -23,8 +23,6 @@ metadata: sidecar.istio.io/inject: 'false' tekton.dev/input_artifacts: '{}' tekton.dev/output_artifacts: '{}' - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp name: parallel-pipeline spec: params: diff --git a/sdk/python/tests/compiler/testdata/parallel_join_with_argo_vars.yaml b/sdk/python/tests/compiler/testdata/parallel_join_with_argo_vars.yaml index 1b853fca2..71d2411ef 100644 --- a/sdk/python/tests/compiler/testdata/parallel_join_with_argo_vars.yaml +++ b/sdk/python/tests/compiler/testdata/parallel_join_with_argo_vars.yaml @@ -27,8 +27,6 @@ metadata: tekton.dev/output_artifacts: '{"gcs-download": [{"name": "gcs-download-data", "path": "/tmp/results.txt"}], "gcs-download-2": [{"name": "gcs-download-2-data", "path": "/tmp/results.txt"}]}' - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp name: parallel-pipeline-with-argo-vars spec: params: diff --git a/sdk/python/tests/compiler/testdata/parallel_join_with_artifacts.yaml b/sdk/python/tests/compiler/testdata/parallel_join_with_artifacts.yaml index 6dc253865..a996b0c7f 100644 --- a/sdk/python/tests/compiler/testdata/parallel_join_with_artifacts.yaml +++ b/sdk/python/tests/compiler/testdata/parallel_join_with_artifacts.yaml @@ -26,8 +26,6 @@ metadata: tekton.dev/output_artifacts: '{"gcs-download": [{"name": "gcs-download-data", "path": "/tmp/results.txt"}], "gcs-download-2": [{"name": "gcs-download-2-data", "path": "/tmp/results.txt"}]}' - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp name: parallel-pipeline spec: params: diff --git a/sdk/python/tests/compiler/testdata/pipeline_transformers.yaml b/sdk/python/tests/compiler/testdata/pipeline_transformers.yaml index 63e6ecb4f..cd0f8d0e3 100644 --- a/sdk/python/tests/compiler/testdata/pipeline_transformers.yaml +++ b/sdk/python/tests/compiler/testdata/pipeline_transformers.yaml @@ -22,8 +22,6 @@ metadata: sidecar.istio.io/inject: 'false' tekton.dev/input_artifacts: '{}' tekton.dev/output_artifacts: '{}' - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp name: pipeline-transformer spec: pipelineSpec: diff --git a/sdk/python/tests/compiler/testdata/pipelineparams.yaml b/sdk/python/tests/compiler/testdata/pipelineparams.yaml index 85f4aaaf9..59423ed5d 100644 --- a/sdk/python/tests/compiler/testdata/pipelineparams.yaml +++ b/sdk/python/tests/compiler/testdata/pipelineparams.yaml @@ -25,8 +25,6 @@ metadata: "parent_task": "download"}]}' tekton.dev/output_artifacts: '{"download": [{"name": "download-downloaded_resultOutput", "path": "/tmp/results.txt"}]}' - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp name: pipelineparams spec: params: diff --git a/sdk/python/tests/compiler/testdata/resourceop_basic.yaml b/sdk/python/tests/compiler/testdata/resourceop_basic.yaml index cbcc66ad1..ccb8e55c9 100644 --- a/sdk/python/tests/compiler/testdata/resourceop_basic.yaml +++ b/sdk/python/tests/compiler/testdata/resourceop_basic.yaml @@ -21,8 +21,6 @@ metadata: sidecar.istio.io/inject: 'false' tekton.dev/input_artifacts: '{}' tekton.dev/output_artifacts: '{}' - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp name: resourceop-basic spec: pipelineSpec: diff --git a/sdk/python/tests/compiler/testdata/retry.yaml b/sdk/python/tests/compiler/testdata/retry.yaml index fdfaaf95b..c1982134d 100644 --- a/sdk/python/tests/compiler/testdata/retry.yaml +++ b/sdk/python/tests/compiler/testdata/retry.yaml @@ -22,8 +22,6 @@ metadata: sidecar.istio.io/inject: 'false' tekton.dev/input_artifacts: '{}' tekton.dev/output_artifacts: '{}' - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp name: retry-random-failures spec: pipelineSpec: diff --git a/sdk/python/tests/compiler/testdata/sequential.yaml b/sdk/python/tests/compiler/testdata/sequential.yaml index 2f344c7d1..d3b834642 100644 --- a/sdk/python/tests/compiler/testdata/sequential.yaml +++ b/sdk/python/tests/compiler/testdata/sequential.yaml @@ -23,8 +23,6 @@ metadata: sidecar.istio.io/inject: 'false' tekton.dev/input_artifacts: '{}' tekton.dev/output_artifacts: '{}' - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp name: sequential-pipeline spec: params: diff --git a/sdk/python/tests/compiler/testdata/sidecar.yaml b/sdk/python/tests/compiler/testdata/sidecar.yaml index 2f0a917f1..940957d64 100644 --- a/sdk/python/tests/compiler/testdata/sidecar.yaml +++ b/sdk/python/tests/compiler/testdata/sidecar.yaml @@ -23,8 +23,6 @@ metadata: "download"}]}' tekton.dev/output_artifacts: '{"download": [{"name": "download-downloaded", "path": "/tmp/results.txt"}]}' - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp name: sidecar spec: pipelineSpec: diff --git a/sdk/python/tests/compiler/testdata/timeout.yaml b/sdk/python/tests/compiler/testdata/timeout.yaml index 3f5b3697c..3ce16c5c5 100644 --- a/sdk/python/tests/compiler/testdata/timeout.yaml +++ b/sdk/python/tests/compiler/testdata/timeout.yaml @@ -21,8 +21,6 @@ metadata: sidecar.istio.io/inject: 'false' tekton.dev/input_artifacts: '{}' tekton.dev/output_artifacts: '{}' - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp name: pipeline-includes-two-steps-which-fail-randomly spec: pipelineSpec: diff --git a/sdk/python/tests/compiler/testdata/tolerations.yaml b/sdk/python/tests/compiler/testdata/tolerations.yaml index a9b48a544..e7ec4ff9d 100644 --- a/sdk/python/tests/compiler/testdata/tolerations.yaml +++ b/sdk/python/tests/compiler/testdata/tolerations.yaml @@ -22,8 +22,6 @@ metadata: tekton.dev/input_artifacts: '{}' tekton.dev/output_artifacts: '{"download": [{"name": "download-downloaded", "path": "/tmp/results.txt"}]}' - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp name: tolerations spec: pipelineSpec: diff --git a/sdk/python/tests/compiler/testdata/volume.yaml b/sdk/python/tests/compiler/testdata/volume.yaml index 60fb2f266..b2dc0f200 100644 --- a/sdk/python/tests/compiler/testdata/volume.yaml +++ b/sdk/python/tests/compiler/testdata/volume.yaml @@ -23,8 +23,6 @@ metadata: "download"}]}' tekton.dev/output_artifacts: '{"download": [{"name": "download-downloaded", "path": "/tmp/results.txt"}]}' - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp name: volume spec: pipelineSpec: diff --git a/sdk/python/tests/compiler/testdata/volume_op.yaml b/sdk/python/tests/compiler/testdata/volume_op.yaml index 1edac4c28..6e0ed34df 100644 --- a/sdk/python/tests/compiler/testdata/volume_op.yaml +++ b/sdk/python/tests/compiler/testdata/volume_op.yaml @@ -23,8 +23,6 @@ metadata: tekton.dev/input_artifacts: '{"cop": [{"name": "create-pvc-name", "parent_task": "create-pvc"}]}' tekton.dev/output_artifacts: '{}' - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp name: volumeop-basic spec: params: diff --git a/sdk/python/tests/compiler/testdata/volume_snapshot_op.yaml b/sdk/python/tests/compiler/testdata/volume_snapshot_op.yaml index ec7a5bd96..b1c3e135a 100644 --- a/sdk/python/tests/compiler/testdata/volume_snapshot_op.yaml +++ b/sdk/python/tests/compiler/testdata/volume_snapshot_op.yaml @@ -30,8 +30,6 @@ metadata: "parent_task": "create-volume"}], "step4-output": [{"name": "create-volume-name", "parent_task": "create-volume"}]}' tekton.dev/output_artifacts: '{}' - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp name: volumesnapshotop-sequential spec: params: diff --git a/sdk/python/tests/compiler/testdata/withitem_nested.yaml b/sdk/python/tests/compiler/testdata/withitem_nested.yaml index 3e566b730..568b5e64a 100644 --- a/sdk/python/tests/compiler/testdata/withitem_nested.yaml +++ b/sdk/python/tests/compiler/testdata/withitem_nested.yaml @@ -24,8 +24,6 @@ metadata: "parent_task": null}], "my-inner-inner-coop": [{"name": "loop-item-param-00000001-subvar-a", "parent_task": null}, {"name": "loop-item-param-00000002", "parent_task": null}]}' tekton.dev/output_artifacts: '{}' - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp name: my-pipeline spec: params: diff --git a/sdk/python/tests/test_kfp_samples.sh b/sdk/python/tests/test_kfp_samples.sh index 0e464744e..67f6f0d94 100755 --- a/sdk/python/tests/test_kfp_samples.sh +++ b/sdk/python/tests/test_kfp_samples.sh @@ -52,7 +52,7 @@ function help { # process command line parameters while (( $# > 0 )); do case "$1" in - -v|--kfp-version) KFP_VERSION="$2"; shift 2 ;; # KFP SDK version, default: 0.5.1 + -v|--kfp-version) KFP_VERSION="$2"; shift 2 ;; # KFP SDK version, default: 1.0.0 -a|--include-all-samples) ALL_SAMPLES="TRUE"; shift 1 ;; # Compile all DSL scripts in KFP repo -s|--dont-list-files) SKIP_FILES="TRUE"; shift 1 ;; # Suppress compile status for each DSL file -e|--print-error-details) PRINT_ERRORS="TRUE"; shift 1 ;; # Print summary of compilation errors @@ -63,7 +63,7 @@ while (( $# > 0 )); do done # define global variables -KFP_VERSION=${KFP_VERSION:-0.5.1} +KFP_VERSION=${KFP_VERSION:-1.0.0} KFP_REPO_URL="https://github.com/kubeflow/pipelines.git" SCRIPT_DIR="$(cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd)" PROJECT_DIR="${TRAVIS_BUILD_DIR:-$(cd "${SCRIPT_DIR%/sdk/python/tests}"; pwd)}" diff --git a/test/cloudbuild/batch_build.yaml b/test/cloudbuild/batch_build.yaml index 905e70f2d..a9ce34512 100644 --- a/test/cloudbuild/batch_build.yaml +++ b/test/cloudbuild/batch_build.yaml @@ -41,6 +41,10 @@ steps: name: 'gcr.io/cloud-builders/docker' args: ['build', '-t', '$_GCR_BASE/cache-deployer', '-f', 'backend/src/cache/deployer/Dockerfile', '.'] waitFor: ["-"] + - id: 'buildMetadataEnvoy' + name: 'gcr.io/cloud-builders/docker' + args: ['build', '-t', '$_GCR_BASE/metadata-envoy', '-f', 'third_party/metadata_envoy/Dockerfile', '.'] + waitFor: ["-"] options: machineType: N1_HIGHCPU_8 # use a fast machine to build because there a lot of work images: @@ -53,4 +57,5 @@ images: - "$_GCR_BASE/metadata-writer" - "$_GCR_BASE/cache-server" - "$_GCR_BASE/cache-deployer" + - "$_GCR_BASE/metadata-envoy" timeout: 1800s # 30min diff --git a/test/deploy-cluster.sh b/test/deploy-cluster.sh index cf7969909..b697c28ae 100755 --- a/test/deploy-cluster.sh +++ b/test/deploy-cluster.sh @@ -68,7 +68,7 @@ else SHOULD_CLEANUP_CLUSTER=true # Machine type and cluster size is the same as kubeflow deployment to # easily compare performance. We can reduce usage later. - NODE_POOL_CONFIG_ARG="--num-nodes=2 --machine-type=n1-standard-8 \ + NODE_POOL_CONFIG_ARG="--num-nodes=2 --machine-type=e2-standard-8 \ --enable-autoscaling --max-nodes=8 --min-nodes=2" KUBERNETES_VERSION_ARG="--cluster-version=1.14" if [ "$ENABLE_WORKLOAD_IDENTITY" = true ]; then diff --git a/test/deploy-pipeline-lite.sh b/test/deploy-pipeline-lite.sh index 62e3d5f06..61dbfc24e 100755 --- a/test/deploy-pipeline-lite.sh +++ b/test/deploy-pipeline-lite.sh @@ -61,6 +61,7 @@ if [ -z "$KFP_DEPLOY_RELEASE" ]; then kustomize edit set image gcr.io/ml-pipeline/metadata-writer=${GCR_IMAGE_BASE_DIR}/metadata-writer:${GCR_IMAGE_TAG} kustomize edit set image gcr.io/ml-pipeline/cache-server=${GCR_IMAGE_BASE_DIR}/cache-server:${GCR_IMAGE_TAG} kustomize edit set image gcr.io/ml-pipeline/cache-deployer=${GCR_IMAGE_BASE_DIR}/cache-deployer:${GCR_IMAGE_TAG} + kustomize edit set image gcr.io/ml-pipeline/metadata-envoy=${GCR_IMAGE_BASE_DIR}/metadata-envoy:${GCR_IMAGE_TAG} cat kustomization.yaml kubectl apply -k . @@ -114,7 +115,7 @@ if [ "$ENABLE_WORKLOAD_IDENTITY" = true ]; then # between tests. Unless for testing scenario like this, it won't # meet the concurrent change issue. sleep $((RANDOM%30)) - yes | PROJECT_ID=$PROJECT CLUSTER_NAME=$TEST_CLUSTER NAMESPACE=$NAMESPACE \ + yes | PROJECT_ID=$PROJECT RESOURCE_PREFIX=$TEST_CLUSTER NAMESPACE=$NAMESPACE \ ${DIR}/../manifests/kustomize/gcp-workload-identity-setup.sh } retry setup_workload_identity diff --git a/test/postsubmit-tests-with-pipeline-deployment.sh b/test/postsubmit-tests-with-pipeline-deployment.sh index a3c4ed474..311cf9c8c 100755 --- a/test/postsubmit-tests-with-pipeline-deployment.sh +++ b/test/postsubmit-tests-with-pipeline-deployment.sh @@ -86,10 +86,11 @@ source "${DIR}/test-prep.sh" CLOUDBUILD_TIMEOUT_SECONDS=3600 PULL_CLOUDBUILD_STATUS_MAX_ATTEMPT=$(expr ${CLOUDBUILD_TIMEOUT_SECONDS} / 20 ) CLOUDBUILD_STARTED=TIMEOUT +CLOUDBUILD_FILTER="substitutions.COMMIT_SHA:${PULL_BASE_SHA} AND tags:build-each-commit" for i in $(seq 1 ${PULL_CLOUDBUILD_STATUS_MAX_ATTEMPT}) do - output=`gcloud builds list --project="$CLOUDBUILD_PROJECT" --filter="sourceProvenance.resolvedRepoSource.commitSha:${PULL_BASE_SHA}"` + output=`gcloud builds list --project="$CLOUDBUILD_PROJECT" --filter="$CLOUDBUILD_FILTER"` if [[ ${output} != "" ]]; then CLOUDBUILD_STARTED=True break @@ -106,7 +107,7 @@ fi CLOUDBUILD_FINISHED=TIMEOUT for i in $(seq 1 ${PULL_CLOUDBUILD_STATUS_MAX_ATTEMPT}) do - output=`gcloud builds list --project="$CLOUDBUILD_PROJECT" --filter="sourceProvenance.resolvedRepoSource.commitSha:${PULL_BASE_SHA}"` + output=`gcloud builds list --project="$CLOUDBUILD_PROJECT" --filter="$CLOUDBUILD_FILTER"` if [[ ${output} == *"SUCCESS"* ]]; then CLOUDBUILD_FINISHED=SUCCESS break diff --git a/test/presubmit-backend-test.sh b/test/presubmit-backend-test.sh new file mode 100755 index 000000000..478065f91 --- /dev/null +++ b/test/presubmit-backend-test.sh @@ -0,0 +1,27 @@ +#!/bin/bash +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# usage: `./hack/run_unit_tests_backend.sh` to run backend unit tests once +# `WATCH=true ./hack/run_unit_tests_backend.sh` to watch code changes and auto rerun tests +# Note: ibazel can be downloaded from https://github.com/bazelbuild/bazel-watcher + +COMMAND="bazel" +if [ -n "$WATCH" ]; then + COMMAND="ibazel" +fi +$COMMAND --host_jvm_args=-Xmx500m --host_jvm_args=-Xms500m test \ + --noshow_progress --noshow_loading_progress --define=grpc_no_ares=true \ + --test_output=all //backend/... diff --git a/test/presubmit-tests.gke.sh b/test/presubmit-tests.gke.sh deleted file mode 100755 index 79ccb176f..000000000 --- a/test/presubmit-tests.gke.sh +++ /dev/null @@ -1,65 +0,0 @@ -#!/bin/bash -ex -# -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -#This test endpoint is not used yet. See https://github.com/kubeflow/pipelines/issues/1499 -#Due to the way Prow testing scripts are called, any big change needs to be done in multiple steps/check-ins so that nothing breaks. -#Here is the sequence of check-ins: -#New entry-point script (this script - presubmit-tests.gke.sh) -#Change test entry-point in Prow config -#Remove unused code from the original script (presubmit-tests.sh). - -#Need to parse the command line to get the workflow name to generate the cluster name. -for i in $(seq 1 $#); do - if [ "${!i}" == "--workflow_file" ] || [ "${!i}" == "--test-cluster-name-prefix" ]: then - next_idx=$(($i+1)) - WORKFLOW_FILE=${!next_idx} - fi -done - -#$PULL_PULL_SHA and $WORKSPACE are env variables set by Prow - -echo "Creating new GKE cluster cluster to run tests..." -PROJECT_ID=ml-pipeline-test -ZONE=us-west1-a -TEST_CLUSTER_PREFIX=${WORKFLOW_FILE%.*} -TEST_CLUSTER=${TEST_CLUSTER_PREFIX//_}-${PULL_PULL_SHA:0:10}-${RANDOM} -machine_type=n1-standard-2 -num_nodes=3 - -# activating the service account -gcloud auth activate-service-account --key-file="${GOOGLE_APPLICATION_CREDENTIALS}" - -function delete_cluster { - echo "Delete cluster..." - gcloud container clusters delete ${TEST_CLUSTER} --async -} -#Setting the exit handler to delete the cluster. The cluster will be deleted when the script exists (either completes or fails) -trap delete_cluster EXIT - -gcloud config set project $PROJECT_ID -gcloud config set compute/zone $ZONE -gcloud container clusters create $TEST_CLUSTER \ - --scopes cloud-platform \ - --enable-cloud-logging \ - --enable-cloud-monitoring \ - --machine-type $machine_type \ - --num-nodes $num_nodes \ - --network test \ - --subnetwork test-1 - -gcloud container clusters get-credentials $TEST_CLUSTER - -$(dirname "$0")/presubmit-tests.sh "$@" --cluster-type gke diff --git a/test/sample-test/requirements.in b/test/sample-test/requirements.in index d31705a96..20a544f0d 100644 --- a/test/sample-test/requirements.in +++ b/test/sample-test/requirements.in @@ -3,7 +3,7 @@ minio papermill fire yamale -tfx==0.21.2 +tfx==0.22.0 # Avoiding conflicts: # There are incompatible versions in the resolved dependencies: diff --git a/test/sample-test/requirements.txt b/test/sample-test/requirements.txt index 7a83bf8c4..31d7cb4ef 100644 --- a/test/sample-test/requirements.txt +++ b/test/sample-test/requirements.txt @@ -6,157 +6,163 @@ # absl-py==0.8.1 # via ml-metadata, tensorboard, tensorflow, tensorflow-data-validation, tensorflow-model-analysis, tensorflow-transform, tfx, tfx-bsl ansiwrap==0.8.4 # via papermill -apache-beam[gcp]==2.17.0 # via tensorflow-data-validation, tensorflow-model-analysis, tensorflow-transform, tfx, tfx-bsl -appdirs==1.4.3 # via black -argo-models==2.2.1a # via -r - -astor==0.8.1 # via tensorflow +apache-beam[gcp]==2.22.0 # via tensorflow-data-validation, tensorflow-model-analysis, tensorflow-transform, tfx, tfx-bsl +appdirs==1.4.4 # via black +astunparse==1.6.3 # via tensorflow async-generator==1.10 # via nbclient attrs==19.3.0 # via black, jsonschema avro-python3==1.9.2.1 # via apache-beam -backcall==0.1.0 # via ipython +backcall==0.2.0 # via ipython black==19.10b0 # via papermill -bleach==3.1.4 # via nbconvert +bleach==3.1.5 # via nbconvert cachetools==3.1.1 # via apache-beam, google-auth -certifi==2020.4.5.1 # via kfp-server-api, kubernetes, minio, requests +certifi==2020.6.20 # via kubernetes, minio, requests chardet==3.0.4 # via requests -click==7.1.1 # via -r -, black, papermill, tfx -cloudpickle==1.3.0 # via -r - +click==7.1.2 # via black, papermill, tfx +colorama==0.4.3 # via keras-tuner configparser==5.0.0 # via minio crcmod==1.7 # via apache-beam decorator==4.4.2 # via ipython, traitlets defusedxml==0.6.0 # via nbconvert -deprecated==1.2.9 # via -r - -dill==0.3.0 # via apache-beam -docker==4.2.0 # via tfx +dill==0.3.1.1 # via apache-beam +docker==4.2.2 # via tfx docopt==0.6.2 # via hdfs entrypoints==0.3 # via nbconvert, papermill -fastavro==0.21.24 # via apache-beam +fastavro==0.23.5 # via apache-beam fasteners==0.15 # via google-apitools fire==0.3.1 # via -r - -future==0.18.2 # via apache-beam -gast==0.2.2 # via tensorflow -google-api-core[grpc]==1.17.0 # via google-api-python-client, google-cloud-bigtable, google-cloud-core, google-cloud-datastore, google-cloud-pubsub -google-api-python-client==1.8.0 # via tfx, tfx-bsl -google-apitools==0.5.28 # via apache-beam +future==0.18.2 # via apache-beam, keras-tuner +gast==0.3.3 # via tensorflow +google-api-core[grpc,grpcgcp]==1.21.0 # via google-api-python-client, google-cloud-bigtable, google-cloud-core, google-cloud-datastore, google-cloud-dlp, google-cloud-language, google-cloud-pubsub, google-cloud-spanner, google-cloud-videointelligence, google-cloud-vision +google-api-python-client==1.9.3 # via tfx, tfx-bsl +google-apitools==0.5.31 # via apache-beam google-auth-httplib2==0.0.3 # via google-api-python-client google-auth-oauthlib==0.4.1 # via tensorboard -google-auth==1.14.0 # via -r -, google-api-core, google-api-python-client, google-auth-httplib2, google-auth-oauthlib, google-cloud-storage, kubernetes, tensorboard +google-auth==1.18.0 # via google-api-core, google-api-python-client, google-auth-httplib2, google-auth-oauthlib, google-cloud-storage, kubernetes, tensorboard google-cloud-bigquery==1.17.0 # via -r -, apache-beam google-cloud-bigtable==1.0.0 # via apache-beam -google-cloud-core==1.3.0 # via apache-beam, google-cloud-bigquery, google-cloud-bigtable, google-cloud-datastore, google-cloud-storage +google-cloud-core==1.3.0 # via apache-beam, google-cloud-bigquery, google-cloud-bigtable, google-cloud-datastore, google-cloud-spanner, google-cloud-storage google-cloud-datastore==1.7.4 # via apache-beam +google-cloud-dlp==0.13.0 # via apache-beam +google-cloud-language==1.3.0 # via apache-beam google-cloud-pubsub==1.0.2 # via apache-beam +google-cloud-spanner==1.13.0 # via apache-beam google-cloud-storage==1.17.0 # via -r - +google-cloud-videointelligence==1.13.0 # via apache-beam +google-cloud-vision==0.42.0 # via apache-beam google-pasta==0.2.0 # via tensorflow -google-resumable-media==0.5.0 # via google-cloud-bigquery, google-cloud-storage -googleapis-common-protos[grpc]==1.51.0 # via google-api-core, grpc-google-iam-v1, tensorflow-metadata -grpc-google-iam-v1==0.12.3 # via google-cloud-bigtable, google-cloud-pubsub -grpcio==1.28.1 # via apache-beam, google-api-core, googleapis-common-protos, grpc-google-iam-v1, tensorboard, tensorflow, tensorflow-serving-api, tfx -h5py==2.10.0 # via keras-applications +google-resumable-media==0.5.1 # via google-cloud-bigquery, google-cloud-storage +googleapis-common-protos[grpc]==1.52.0 # via google-api-core, grpc-google-iam-v1, tensorflow-metadata +grpc-google-iam-v1==0.12.3 # via google-cloud-bigtable, google-cloud-pubsub, google-cloud-spanner +grpcio-gcp==0.2.2 # via apache-beam, google-api-core +grpcio==1.30.0 # via apache-beam, google-api-core, googleapis-common-protos, grpc-google-iam-v1, grpcio-gcp, tensorboard, tensorflow, tensorflow-serving-api, tfx +h5py==2.10.0 # via tensorflow hdfs==2.5.8 # via apache-beam -httplib2==0.12.0 # via apache-beam, google-api-python-client, google-apitools, google-auth-httplib2, oauth2client -idna==2.9 # via requests -importlib-metadata==1.6.0 # via jsonschema -ipykernel==5.2.1 # via ipywidgets, jupyter, jupyter-console, notebook, qtconsole +httplib2==0.17.4 # via apache-beam, google-api-python-client, google-apitools, google-auth-httplib2, oauth2client +idna==2.10 # via requests +importlib-metadata==1.7.0 # via jsonschema, markdown +ipykernel==5.3.0 # via ipywidgets, jupyter, jupyter-console, notebook, qtconsole ipython-genutils==0.2.0 # via nbformat, notebook, qtconsole, traitlets -ipython==7.13.0 # via ipykernel, ipywidgets, jupyter-console, tensorflow-data-validation +ipython==7.16.1 # via ipykernel, ipywidgets, jupyter-console ipywidgets==7.5.1 # via jupyter, tensorflow-model-analysis -jedi==0.17.0 # via ipython +jedi==0.17.1 # via ipython jinja2==2.11.2 # via nbconvert, notebook, tfx joblib==0.14.1 # via scikit-learn, tensorflow-data-validation -jsonschema==3.2.0 # via -r -, nbformat +jsonschema==3.2.0 # via nbformat junit-xml==1.9 # via -r - -jupyter-client==6.1.3 # via ipykernel, jupyter-console, nbclient, notebook, papermill, qtconsole +jupyter-client==6.1.5 # via ipykernel, jupyter-console, nbclient, notebook, papermill, qtconsole jupyter-console==6.1.0 # via jupyter jupyter-core==4.6.3 # via jupyter-client, nbconvert, nbformat, notebook, qtconsole jupyter==1.0.0 # via tensorflow-model-analysis -keras-applications==1.0.8 # via tensorflow -keras-preprocessing==1.1.0 # via tensorflow -kfp-server-api==0.3.0 # via -r - -kubernetes==11.0.0 # via -r -, argo-models -markdown==3.2.1 # via tensorboard +keras-preprocessing==1.1.2 # via tensorflow +keras-tuner==1.0.1 # via tfx +kubernetes==11.0.0 # via tfx +markdown==3.2.2 # via tensorboard markupsafe==1.1.1 # via jinja2 minio==5.0.10 # via -r - mistune==0.8.4 # via nbconvert -ml-metadata==0.21.2 # via tfx +ml-metadata==0.22.1 # via tfx mock==2.0.0 # via apache-beam monotonic==1.5 # via fasteners -nbclient==0.2.0 # via papermill +nbclient==0.4.0 # via papermill nbconvert==5.6.1 # via jupyter, notebook -nbformat==5.0.5 # via ipywidgets, nbclient, nbconvert, notebook, papermill -nest-asyncio==1.3.2 # via nbclient +nbformat==5.0.7 # via ipywidgets, nbclient, nbconvert, notebook, papermill +nest-asyncio==1.3.3 # via nbclient notebook==6.0.3 # via jupyter, widgetsnbextension -numpy==1.18.2 # via h5py, keras-applications, keras-preprocessing, opt-einsum, pandas, pyarrow, scikit-learn, scipy, tensorboard, tensorflow, tensorflow-data-validation, tensorflow-model-analysis, tensorflow-transform, tfx-bsl +numpy==1.19.0 # via apache-beam, h5py, keras-preprocessing, keras-tuner, opt-einsum, pandas, pyarrow, scikit-learn, scipy, tensorboard, tensorflow, tensorflow-data-validation, tensorflow-model-analysis, tensorflow-transform, tfx-bsl oauth2client==3.0.0 # via apache-beam, google-apitools oauthlib==3.1.0 # via requests-oauthlib opt-einsum==3.2.1 # via tensorflow -pandas==0.25.3 # via tensorflow-data-validation, tensorflow-model-analysis +packaging==20.4 # via bleach +pandas==1.0.5 # via tensorflow-data-validation, tensorflow-model-analysis, tfx-bsl pandocfilters==1.4.2 # via nbconvert -papermill==2.1.0 # via -r - +papermill==2.1.2 # via -r - parso==0.7.0 # via jedi pathspec==0.8.0 # via black pbr==5.4.5 # via mock pexpect==4.8.0 # via ipython pickleshare==0.7.5 # via ipython -prometheus-client==0.7.1 # via notebook +prometheus-client==0.8.0 # via notebook prompt-toolkit==3.0.5 # via ipython, jupyter-console -protobuf==3.11.3 # via apache-beam, google-api-core, google-cloud-bigquery, googleapis-common-protos, ml-metadata, tensorboard, tensorflow, tensorflow-data-validation, tensorflow-metadata, tensorflow-model-analysis, tensorflow-serving-api, tensorflow-transform, tfx, tfx-bsl +protobuf==3.12.2 # via apache-beam, google-api-core, google-cloud-bigquery, googleapis-common-protos, ml-metadata, tensorboard, tensorflow, tensorflow-data-validation, tensorflow-metadata, tensorflow-model-analysis, tensorflow-serving-api, tensorflow-transform, tfx, tfx-bsl ptyprocess==0.6.0 # via pexpect, terminado -pyarrow==0.15.1 # via apache-beam, tensorflow-data-validation, tensorflow-model-analysis, tfx, tfx-bsl +pyarrow==0.16.0 # via apache-beam, tensorflow-data-validation, tensorflow-model-analysis, tfx, tfx-bsl pyasn1-modules==0.2.8 # via google-auth, oauth2client pyasn1==0.4.8 # via oauth2client, pyasn1-modules, rsa pydot==1.4.1 # via apache-beam, tensorflow-transform pygments==2.6.1 # via ipython, jupyter-console, nbconvert, qtconsole pymongo==3.10.1 # via apache-beam -pyparsing==2.4.7 # via pydot +pyparsing==2.4.7 # via packaging, pydot pyrsistent==0.16.0 # via jsonschema -python-dateutil==2.8.1 # via apache-beam, jupyter-client, kfp-server-api, kubernetes, minio, pandas -pytz==2019.3 # via apache-beam, google-api-core, minio, pandas -pyyaml==5.3.1 # via -r -, kubernetes, papermill, tfx, yamale -pyzmq==19.0.0 # via jupyter-client, notebook, qtconsole -qtconsole==4.7.3 # via jupyter +python-dateutil==2.8.1 # via apache-beam, jupyter-client, kubernetes, minio, pandas +pytz==2020.1 # via apache-beam, fastavro, google-api-core, minio, pandas +pyyaml==5.3.1 # via kubernetes, papermill, tfx, yamale +pyzmq==19.0.1 # via jupyter-client, notebook, qtconsole +qtconsole==4.7.5 # via jupyter qtpy==1.9.0 # via qtconsole -regex==2020.4.4 # via black +regex==2020.6.8 # via black requests-oauthlib==1.3.0 # via google-auth-oauthlib, kubernetes -requests-toolbelt==0.9.1 # via -r - -requests==2.23.0 # via docker, google-api-core, hdfs, kubernetes, papermill, requests-oauthlib, requests-toolbelt, tensorboard -rsa==4.0 # via google-auth, oauth2client -scikit-learn==0.21.3 # via tensorflow-data-validation -scipy==1.4.1 # via scikit-learn, tensorflow, tensorflow-model-analysis +requests==2.24.0 # via docker, google-api-core, hdfs, keras-tuner, kubernetes, papermill, requests-oauthlib, tensorboard +rsa==4.6 # via google-auth, oauth2client +scikit-learn==0.23.1 # via keras-tuner +scipy==1.4.1 # via keras-tuner, scikit-learn, tensorflow, tensorflow-model-analysis send2trash==1.5.0 # via notebook -six==1.14.0 # via absl-py, bleach, docker, fasteners, fire, google-api-core, google-api-python-client, google-apitools, google-auth, google-pasta, google-resumable-media, grpcio, h5py, hdfs, jsonschema, junit-xml, keras-preprocessing, kfp-server-api, kubernetes, ml-metadata, mock, oauth2client, protobuf, pyarrow, pyrsistent, python-dateutil, tenacity, tensorboard, tensorflow, tensorflow-data-validation, tensorflow-model-analysis, tensorflow-transform, tfx, tfx-bsl, traitlets, websocket-client -strip-hints==0.1.8 # via -r - -tabulate==0.8.7 # via -r - -tenacity==6.1.0 # via papermill -tensorboard==2.1.1 # via tensorflow -tensorflow-data-validation==0.21.5 # via tfx -tensorflow-estimator==2.1.0 # via tensorflow -tensorflow-metadata==0.21.2 # via tensorflow-data-validation, tensorflow-model-analysis, tensorflow-transform, tfx-bsl -tensorflow-model-analysis==0.21.6 # via tfx -tensorflow-serving-api==2.1.0 # via tfx, tfx-bsl -tensorflow-transform==0.21.2 # via tensorflow-data-validation, tfx -tensorflow==2.1.0 # via ml-metadata, tensorflow-data-validation, tensorflow-model-analysis, tensorflow-serving-api, tensorflow-transform, tfx, tfx-bsl +six==1.15.0 # via absl-py, astunparse, bleach, docker, fasteners, fire, google-api-core, google-api-python-client, google-apitools, google-auth, google-pasta, google-resumable-media, grpcio, h5py, hdfs, jsonschema, junit-xml, keras-preprocessing, kubernetes, ml-metadata, mock, oauth2client, packaging, protobuf, pyarrow, pyrsistent, python-dateutil, tenacity, tensorboard, tensorflow, tensorflow-data-validation, tensorflow-model-analysis, tensorflow-transform, tfx, tfx-bsl, traitlets, websocket-client +tabulate==0.8.7 # via keras-tuner +tenacity==6.2.0 # via papermill +tensorboard-plugin-wit==1.7.0 # via tensorboard +tensorboard==2.2.2 # via tensorflow +tensorflow-data-validation==0.22.2 # via tfx +tensorflow-estimator==2.2.0 # via tensorflow +tensorflow-metadata==0.22.2 # via tensorflow-data-validation, tensorflow-model-analysis, tensorflow-transform, tfx-bsl +tensorflow-model-analysis==0.22.2 # via tfx +tensorflow-serving-api==2.2.0 # via tfx, tfx-bsl +tensorflow-transform==0.22.0 # via tensorflow-data-validation, tfx +tensorflow==2.2.0 # via ml-metadata, tensorflow-data-validation, tensorflow-model-analysis, tensorflow-serving-api, tensorflow-transform, tfx, tfx-bsl termcolor==1.1.0 # via fire, tensorflow terminado==0.8.3 # via notebook +terminaltables==3.1.0 # via keras-tuner testpath==0.4.4 # via nbconvert textwrap3==0.9.2 # via ansiwrap -tfx-bsl==0.21.4 # via tensorflow-data-validation, tensorflow-model-analysis, tensorflow-transform, tfx -tfx==0.21.2 # via -r - -toml==0.10.0 # via black +tfx-bsl==0.22.1 # via tensorflow-data-validation, tensorflow-model-analysis, tensorflow-transform, tfx +tfx==0.22.0 # via -r - +threadpoolctl==2.1.0 # via scikit-learn +toml==0.10.1 # via black tornado==6.0.4 # via ipykernel, jupyter-client, notebook, terminado -tqdm==4.45.0 # via papermill +tqdm==4.47.0 # via keras-tuner, papermill traitlets==4.3.3 # via ipykernel, ipython, ipywidgets, jupyter-client, jupyter-core, nbclient, nbconvert, nbformat, notebook, qtconsole typed-ast==1.4.1 # via black +typing-extensions==3.7.4.2 # via apache-beam uritemplate==3.0.1 # via google-api-python-client -urllib3==1.25.9 # via kfp-server-api, kubernetes, minio, requests -wcwidth==0.1.9 # via prompt-toolkit +urllib3==1.25.9 # via kubernetes, minio, requests +wcwidth==0.2.5 # via prompt-toolkit webencodings==0.5.1 # via bleach websocket-client==0.57.0 # via docker, kubernetes werkzeug==1.0.1 # via tensorboard -wheel==0.34.2 # via strip-hints, tensorboard, tensorflow +wheel==0.34.2 # via astunparse, tensorboard, tensorflow widgetsnbextension==3.5.1 # via ipywidgets -wrapt==1.12.1 # via deprecated, tensorflow -yamale==2.0.1 # via -r - +wrapt==1.12.1 # via tensorflow +yamale==2.2.0 # via -r - zipp==3.1.0 # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: diff --git a/test/sample-test/sample_test_launcher.py b/test/sample-test/sample_test_launcher.py index 149133f79..9cc855609 100644 --- a/test/sample-test/sample_test_launcher.py +++ b/test/sample-test/sample_test_launcher.py @@ -199,13 +199,13 @@ class ComponentTest(SampleTest): def _injection(self): """Sample-specific image injection into yaml file.""" - subs = { - 'gcr\.io/ml-pipeline/ml-pipeline/ml-pipeline-local-confusion-matrix:\w+':self._local_confusionmatrix_image, - 'gcr\.io/ml-pipeline/ml-pipeline/ml-pipeline-local-roc:\w+':self._local_roc_image + subs = { # Tag can look like 1.0.0-rc.3, so we need both "-" and "." in the regex. + 'gcr\.io/ml-pipeline/ml-pipeline/ml-pipeline-local-confusion-matrix:(\w+|[.-])+':self._local_confusionmatrix_image, + 'gcr\.io/ml-pipeline/ml-pipeline/ml-pipeline-local-roc:(\w+|[.-])+':self._local_roc_image } if self._test_name == 'xgboost_training_cm': subs.update({ - 'gcr\.io/ml-pipeline/ml-pipeline-gcp:\w+':self._dataproc_gcp_image + 'gcr\.io/ml-pipeline/ml-pipeline-gcp:(\w|[.-])+':self._dataproc_gcp_image }) utils.file_injection('%s.py.yaml' % self._test_name, @@ -233,4 +233,4 @@ def main(): }) if __name__ == '__main__': - main() \ No newline at end of file + main() diff --git a/tools/bazel_builder/BUILD b/tools/bazel_builder/BUILD new file mode 100644 index 000000000..dc02582e6 --- /dev/null +++ b/tools/bazel_builder/BUILD @@ -0,0 +1,17 @@ +CONTAINER_VERSION = "5de9890554d173f12a2d152bf4345b1d9e85227f65b97b3e2ca9ca95ebcb70ae" + +platform( + name = "rbe_ubuntu1604", + constraint_values = [ + "@bazel_tools//platforms:x86_64", + "@bazel_tools//platforms:linux", + "@bazel_tools//tools/cpp:clang", + "@bazel_toolchains//constraints:xenial", + ], + remote_execution_properties = """ + properties: { + name: "container-image" + value:"docker://gcr.io/ml-pipeline-test/bazel-builder@sha256:%s" + } + """ % (CONTAINER_VERSION), +) diff --git a/tools/benchmarks/pipeline_service_api.ipynb b/tools/benchmarks/pipeline_service_api.ipynb index 2e5231666..464474473 100644 --- a/tools/benchmarks/pipeline_service_api.ipynb +++ b/tools/benchmarks/pipeline_service_api.ipynb @@ -98,17 +98,17 @@ " # Plots\n", " fig, axs = plt.subplots(nrows=4, figsize=(10,20))\n", " \n", - " label_create_latencies = pd.Series(create_latencies, name='Create Pipeline Latency (Second)')\n", - " sns.distplot(a=label_create_latencies, ax=axs[0])\n", + " axs[0].set(title='Create Pipeline Latency', xlabel='Time (Second)', ylabel='Create Pipeline')\n", + " sns.distplot(a=create_latencies, ax=axs[0], hist=True, kde=False, rug=True)\n", " \n", - " label_create_version_latencies = pd.Series(create_version_latencies, name='Create Pipeline Version Latency (Second)')\n", - " sns.distplot(a=label_create_version_latencies, ax=axs[1])\n", + " axs[1].set(title='Create Pipeline Version Latency', xlabel='Time (Second)', ylabel='Create Pipeline Version')\n", + " sns.distplot(a=create_version_latencies, ax=axs[1], hist=True, kde=False, rug=True) \n", " \n", - " label_get_latencies = pd.Series(get_latencies, name='Get Pipeline Latency (Second)')\n", - " sns.distplot(a=label_get_latencies, ax=axs[2])\n", + " axs[2].set(title='Get Pipeline Latency', xlabel='Time (Second)', ylabel='Get Pipeline')\n", + " sns.distplot(a=get_latencies, ax=axs[2], hist=True, kde=False, rug=True) \n", " \n", - " label_delete_latencies = pd.Series(delete_latencies, name='Delete Pipeline Latency (Second)')\n", - " sns.distplot(a=label_delete_latencies, ax=axs[3])\n", + " axs[3].set(title='Delete Pipeline Latency', xlabel='Time (Second)', ylabel='Delete Pipeline')\n", + " sns.distplot(a=delete_latencies, ax=axs[3], hist=True, kde=False, rug=True)\n", " \n", " # TODO(jingzhang36): maybe dump the durations data to db or gcs, and let searborn read from there" ] diff --git a/tools/benchmarks/run_service_api.ipynb b/tools/benchmarks/run_service_api.ipynb index eae8978a4..56e6861db 100644 --- a/tools/benchmarks/run_service_api.ipynb +++ b/tools/benchmarks/run_service_api.ipynb @@ -135,18 +135,18 @@ " # Plots\n", " fig, axs = plt.subplots(nrows=4, figsize=(10,20))\n", " \n", - " label_create_run_latencies = pd.Series(create_run_latencies, name='Create Run Latency (Second)')\n", - " sns.distplot(a=label_create_run_latencies, ax=axs[0])\n", + " axs[0].set(title='Create Run Latency', xlabel='Time (Second)', ylabel='Create')\n", + " sns.distplot(a=create_run_latencies, ax=axs[0], hist=True, kde=False, rug=True)\n", " \n", - " label_run_durations = pd.Series(succeeded_run_durations, name='Run Durations (Second)')\n", - " sns.distplot(a=label_run_durations, ax=axs[1]) \n", + " axs[1].set(title='Run Durations', xlabel='Time (Second)', ylabel='Run')\n", + " sns.distplot(a=succeeded_run_durations, ax=axs[1], hist=True, kde=False, rug=True) \n", + " \n", + " axs[2].set(title='Get Run Latency', xlabel='Time (Second)', ylabel='Get')\n", + " sns.distplot(a=get_run_latencies, ax=axs[2], hist=True, kde=False, rug=True) \n", + " \n", + " axs[3].set(title='Delete Run Latency', xlabel='Time (Second)', ylabel='Delete')\n", + " sns.distplot(a=delete_run_latencies, ax=axs[3], hist=True, kde=False, rug=True)\n", "\n", - " label_get_run_latencies = pd.Series(get_run_latencies, name='Get Run Latency (Second)')\n", - " sns.distplot(a=label_get_run_latencies, ax=axs[2]) \n", - " \n", - " label_delete_run_latencies = pd.Series(delete_run_latencies, name='Delete Run Latency (Second)')\n", - " sns.distplot(a=label_delete_run_latencies, ax=axs[3])\n", - " \n", " loaded_run_results = pd.DataFrame(np.array(run_results), columns=['result'])\n", " sns.catplot(x='result', kind=\"count\", data=loaded_run_results)\n", " "