pipelines/components/gcp/dataflow/launch_template/component.yaml

81 lines
2.8 KiB
YAML

# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
name: Launch Dataflow Template
description: |
Launchs a dataflow job from template.
metadata:
labels:
add-pod-env: 'true'
inputs:
- name: project_id
description: 'Required. The ID of the Cloud Platform project that the job belongs to.'
type: GCPProjectID
- name: gcs_path
description: >-
Required. A Cloud Storage path to the template from
which to create the job. Must be valid Cloud Storage URL, beginning with `gs://`.
type: GCSPath
- name: launch_parameters
description: >-
Parameters to provide to the template being launched. Schema defined in
https://cloud.google.com/dataflow/docs/reference/rest/v1b3/LaunchTemplateParameters.
`jobName` will be replaced by generated name.'
type: Dict
default: '{}'
- name: location
description: 'The regional endpoint to which to direct the request.'
default: ''
type: GCPRegion
- name: validate_only
description: >-
If true, the request is validated but not actually executed. Defaults to false.
default: 'False'
type: Bool
- name: staging_dir
description: >-
Optional. The GCS directory for keeping staging files.
A random subdirectory will be created under the directory to keep job info
for resuming the job in case of failure.
default: ''
type: GCSPath
- name: wait_interval
description: >-
Optional wait interval between calls to get job status. Defaults to 30.
default: '30'
type: Integer
outputs:
- name: job_id
description: 'The id of the created dataflow job.'
type: String
- name: MLPipeline UI metadata
type: UI metadata
implementation:
container:
image: gcr.io/ml-pipeline/ml-pipeline-gcp:1.4.0-rc.1
args: [
--ui_metadata_path, {outputPath: MLPipeline UI metadata},
kfp_component.google.dataflow, launch_template,
--project_id, {inputValue: project_id},
--gcs_path, {inputValue: gcs_path},
--launch_parameters, {inputValue: launch_parameters},
--location, {inputValue: location},
--validate_only, {inputValue: validate_only},
--staging_dir, {inputValue: staging_dir},
--wait_interval, {inputValue: wait_interval},
--job_id_output_path, {outputPath: job_id},
]
env:
KFP_POD_NAME: "{{pod.name}}"