modernize-wml-pipeline (#1227)

* modernized-wml-pipeline

* simplifying-params
This commit is contained in:
Animesh Singh 2019-04-25 13:05:01 -07:00 committed by Pascal Vicaire
parent bb0a5e36f6
commit f6283d4583
1 changed files with 41 additions and 38 deletions

View File

@ -16,60 +16,63 @@ CONFIG_FILE_URL = 'https://raw.githubusercontent.com/user-name/kfp-secrets/maste
# generate default secret name # generate default secret name
import os import os
secret_name = 'ai-pipeline-' + os.path.splitext(os.path.basename(CONFIG_FILE_URL))[0] import kfp
from kfp import components
from kfp import dsl
import ai_pipeline_params as params
secret_name = 'kfp-creds'
configuration_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/master/components/ibm-components/commons/config/component.yaml')
train_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/master/components/ibm-components/watson/train/component.yaml')
store_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/master/components/ibm-components/watson/store/component.yaml')
deploy_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/master/components/ibm-components/watson/deploy/component.yaml')
# create pipelines # create pipelines
import kfp.dsl as dsl
import ai_pipeline_params as params
@dsl.pipeline( @dsl.pipeline(
name='KFP on WML training', name='KFP on WML training',
description='Kubeflow pipelines running on WML performing tensorflow image recognition.' description='Kubeflow pipelines running on WML performing tensorflow image recognition.'
) )
def kfp_wml_pipeline( def kfp_wml_pipeline(
GITHUB_TOKEN='',
CONFIG_FILE_URL='https://raw.githubusercontent.com/user/repository/branch/creds.ini',
train_code='tf-model.zip',
execution_command='\'python3 convolutional_network.py --trainImagesFile ${DATA_DIR}/train-images-idx3-ubyte.gz --trainLabelsFile ${DATA_DIR}/train-labels-idx1-ubyte.gz --testImagesFile ${DATA_DIR}/t10k-images-idx3-ubyte.gz --testLabelsFile ${DATA_DIR}/t10k-labels-idx1-ubyte.gz --learningRate 0.001 --trainingIters 20000\'',
framework= 'tensorflow',
framework_version = '1.5',
runtime = 'python',
runtime_version = '3.5',
run_definition = 'wml-tensorflow-definition',
run_name = 'wml-tensorflow-run',
model_name='wml-tensorflow-mnist',
scoring_payload='tf-mnist-test-payload.json'
): ):
# op1 - this operation will create the credentials as secrets to be used by other operations # op1 - this operation will create the credentials as secrets to be used by other operations
config_op = dsl.ContainerOp( get_configuration = configuration_op(
name="config", token = GITHUB_TOKEN,
image="aipipeline/wml-config", url = CONFIG_FILE_URL,
command=['python3'], name = secret_name
arguments=['/app/config.py',
'--token', GITHUB_TOKEN,
'--url', CONFIG_FILE_URL],
file_outputs={'secret-name' : '/tmp/'+secret_name}
) )
# op2 - this operation trains the model with the model codes and data saved in the cloud object store # op2 - this operation trains the model with the model codes and data saved in the cloud object store
train_op = dsl.ContainerOp( wml_train = train_op(
name="train", get_configuration.output,
image="aipipeline/wml-train", train_code,
command=['python3'], execution_command
arguments=['/app/wml-train.py', ).apply(params.use_ai_pipeline_params(secret_name))
'--config', config_op.output,
'--train-code', 'tf-model.zip',
'--execution-command', '\'python3 convolutional_network.py --trainImagesFile ${DATA_DIR}/train-images-idx3-ubyte.gz --trainLabelsFile ${DATA_DIR}/train-labels-idx1-ubyte.gz --testImagesFile ${DATA_DIR}/t10k-images-idx3-ubyte.gz --testLabelsFile ${DATA_DIR}/t10k-labels-idx1-ubyte.gz --learningRate 0.001 --trainingIters 20000\''],
file_outputs={'run-uid' : '/tmp/run_uid'}).apply(params.use_ai_pipeline_params(secret_name))
# op3 - this operation stores the model trained above # op3 - this operation stores the model trained above
store_op = dsl.ContainerOp( wml_store = store_op(
name="store", wml_train.output,
image="aipipeline/wml-store", model_name
command=['python3'], ).apply(params.use_ai_pipeline_params(secret_name))
arguments=['/app/wml-store.py',
'--run-uid', train_op.output,
'--model-name', 'python-tensorflow-mnist'],
file_outputs={'model-uid' : '/tmp/model_uid'}).apply(params.use_ai_pipeline_params(secret_name))
# op4 - this operation deploys the model to a web service and run scoring with the payload in the cloud object store # op4 - this operation deploys the model to a web service and run scoring with the payload in the cloud object store
deploy_op = dsl.ContainerOp( wml_deploy = deploy_op(
name="deploy", wml_store.output,
image="aipipeline/wml-deploy", model_name,
command=['python3'], scoring_payload
arguments=['/app/wml-deploy.py', ).apply(params.use_ai_pipeline_params(secret_name))
'--model-uid', store_op.output,
'--model-name', 'python-tensorflow-mnist',
'--scoring-payload', 'tf-mnist-test-payload.json'],
file_outputs={'output' : '/tmp/output'}).apply(params.use_ai_pipeline_params(secret_name))
if __name__ == '__main__': if __name__ == '__main__':
# compile the pipeline # compile the pipeline