using comp yaml for ffdl pipeline (#1148)

This commit is contained in:
Animesh Singh 2019-04-14 20:00:01 -07:00 committed by Kubernetes Prow Robot
parent 55f7e328dc
commit 54cd931eff
1 changed files with 23 additions and 25 deletions

View File

@ -1,4 +1,6 @@
import kfp.dsl as dsl import kfp
from kfp import components
from kfp import dsl
import ai_pipeline_params as params import ai_pipeline_params as params
# generate default secret name # generate default secret name
@ -27,34 +29,30 @@ def ffdlPipeline(
value='gender_classification.py') value='gender_classification.py')
): ):
"""A pipeline for end to end machine learning workflow.""" """A pipeline for end to end machine learning workflow."""
config_op = dsl.ContainerOp(
name="config", configuration_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/master/components/ibm-components/commons/config/component.yaml')
image="aipipeline/wml-config", train_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/master/components/ibm-components/ffdl/train/component.yaml')
command=['python3'], serve_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/master/components/ibm-components/ffdl/serve/component.yaml')
arguments=['/app/config.py',
'--token', GITHUB_TOKEN, get_configuration = configuration_op(
'--url', CONFIG_FILE_URL, token = GITHUB_TOKEN,
'--name', secret_name], url = CONFIG_FILE_URL,
file_outputs={'secret-name': '/tmp/' + secret_name} name = secret_name
) )
train = dsl.ContainerOp( train = train_op(
name='train', model_def_file_path,
image='aipipeline/ffdl-train:0.6', manifest_file_path
command=['sh', '-c'], ).apply(params.use_ai_pipeline_params(secret_name))
arguments=['echo %s > /tmp/logs.txt; python -u train.py --model_def_file_path %s --manifest_file_path %s;'
% (config_op.output, model_def_file_path, manifest_file_path)],
file_outputs={'output': '/tmp/training_id.txt'}).apply(params.use_ai_pipeline_params(secret_name))
serve = dsl.ContainerOp( serve = serve_op(
name='serve', train.output,
image='aipipeline/ffdl-serve:0.11', model_deployment_name,
command=['sh', '-c'], model_class_name,
arguments=['python -u serve.py --model_id %s --deployment_name %s --model_class_name %s --model_class_file %s;' model_class_file
% (train.output, model_deployment_name, model_class_name, model_class_file)], ).apply(params.use_ai_pipeline_params(secret_name))
file_outputs={'output': '/tmp/deployment_result.txt'}).apply(params.use_ai_pipeline_params(secret_name))
if __name__ == '__main__': if __name__ == '__main__':
import kfp.compiler as compiler import kfp.compiler as compiler
compiler.Compiler().compile(ffdlPipeline, __file__ + '.zip') compiler.Compiler().compile(ffdlPipeline, __file__ + '.tar.gz')