diff --git a/samples/ibm-samples/watson/watson_train_serve_pipeline.py b/samples/ibm-samples/watson/watson_train_serve_pipeline.py index f0ecfcb84d..058142b397 100644 --- a/samples/ibm-samples/watson/watson_train_serve_pipeline.py +++ b/samples/ibm-samples/watson/watson_train_serve_pipeline.py @@ -16,60 +16,63 @@ CONFIG_FILE_URL = 'https://raw.githubusercontent.com/user-name/kfp-secrets/maste # generate default secret name import os -secret_name = 'ai-pipeline-' + os.path.splitext(os.path.basename(CONFIG_FILE_URL))[0] - -# create pipelines -import kfp.dsl as dsl +import kfp +from kfp import components +from kfp import dsl import ai_pipeline_params as params +secret_name = 'kfp-creds' +configuration_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/master/components/ibm-components/commons/config/component.yaml') +train_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/master/components/ibm-components/watson/train/component.yaml') +store_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/master/components/ibm-components/watson/store/component.yaml') +deploy_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/master/components/ibm-components/watson/deploy/component.yaml') + +# create pipelines + @dsl.pipeline( name='KFP on WML training', description='Kubeflow pipelines running on WML performing tensorflow image recognition.' ) def kfp_wml_pipeline( + GITHUB_TOKEN='', + CONFIG_FILE_URL='https://raw.githubusercontent.com/user/repository/branch/creds.ini', + train_code='tf-model.zip', + execution_command='\'python3 convolutional_network.py --trainImagesFile ${DATA_DIR}/train-images-idx3-ubyte.gz --trainLabelsFile ${DATA_DIR}/train-labels-idx1-ubyte.gz --testImagesFile ${DATA_DIR}/t10k-images-idx3-ubyte.gz --testLabelsFile ${DATA_DIR}/t10k-labels-idx1-ubyte.gz --learningRate 0.001 --trainingIters 20000\'', + framework= 'tensorflow', + framework_version = '1.5', + runtime = 'python', + runtime_version = '3.5', + run_definition = 'wml-tensorflow-definition', + run_name = 'wml-tensorflow-run', + model_name='wml-tensorflow-mnist', + scoring_payload='tf-mnist-test-payload.json' ): # op1 - this operation will create the credentials as secrets to be used by other operations - config_op = dsl.ContainerOp( - name="config", - image="aipipeline/wml-config", - command=['python3'], - arguments=['/app/config.py', - '--token', GITHUB_TOKEN, - '--url', CONFIG_FILE_URL], - file_outputs={'secret-name' : '/tmp/'+secret_name} + get_configuration = configuration_op( + token = GITHUB_TOKEN, + url = CONFIG_FILE_URL, + name = secret_name ) # op2 - this operation trains the model with the model codes and data saved in the cloud object store - train_op = dsl.ContainerOp( - name="train", - image="aipipeline/wml-train", - command=['python3'], - arguments=['/app/wml-train.py', - '--config', config_op.output, - '--train-code', 'tf-model.zip', - '--execution-command', '\'python3 convolutional_network.py --trainImagesFile ${DATA_DIR}/train-images-idx3-ubyte.gz --trainLabelsFile ${DATA_DIR}/train-labels-idx1-ubyte.gz --testImagesFile ${DATA_DIR}/t10k-images-idx3-ubyte.gz --testLabelsFile ${DATA_DIR}/t10k-labels-idx1-ubyte.gz --learningRate 0.001 --trainingIters 20000\''], - file_outputs={'run-uid' : '/tmp/run_uid'}).apply(params.use_ai_pipeline_params(secret_name)) + wml_train = train_op( + get_configuration.output, + train_code, + execution_command + ).apply(params.use_ai_pipeline_params(secret_name)) # op3 - this operation stores the model trained above - store_op = dsl.ContainerOp( - name="store", - image="aipipeline/wml-store", - command=['python3'], - arguments=['/app/wml-store.py', - '--run-uid', train_op.output, - '--model-name', 'python-tensorflow-mnist'], - file_outputs={'model-uid' : '/tmp/model_uid'}).apply(params.use_ai_pipeline_params(secret_name)) + wml_store = store_op( + wml_train.output, + model_name + ).apply(params.use_ai_pipeline_params(secret_name)) # op4 - this operation deploys the model to a web service and run scoring with the payload in the cloud object store - deploy_op = dsl.ContainerOp( - name="deploy", - image="aipipeline/wml-deploy", - command=['python3'], - arguments=['/app/wml-deploy.py', - '--model-uid', store_op.output, - '--model-name', 'python-tensorflow-mnist', - '--scoring-payload', 'tf-mnist-test-payload.json'], - file_outputs={'output' : '/tmp/output'}).apply(params.use_ai_pipeline_params(secret_name)) + wml_deploy = deploy_op( + wml_store.output, + model_name, + scoring_payload + ).apply(params.use_ai_pipeline_params(secret_name)) if __name__ == '__main__': # compile the pipeline