From cda6efed27ddaf528c4742bb451c07fa6ef4d62f Mon Sep 17 00:00:00 2001 From: Chun-Hsiang Wang Date: Wed, 17 Jul 2019 19:50:11 -0700 Subject: [PATCH 1/8] Include newly trained model in the newly built docker image (#601) (#602) --- xgboost_synthetic/build-train-deploy.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/xgboost_synthetic/build-train-deploy.ipynb b/xgboost_synthetic/build-train-deploy.ipynb index 69db9982..efb18a29 100644 --- a/xgboost_synthetic/build-train-deploy.ipynb +++ b/xgboost_synthetic/build-train-deploy.ipynb @@ -453,7 +453,7 @@ "\n", "if not preprocessor.input_files:\n", " preprocessor.input_files = set()\n", - "input_files=[\"xgboost_util.py\"]\n", + "input_files=[\"xgboost_util.py\", \"mockup-model.dat\"]\n", "preprocessor.input_files = set([os.path.normpath(f) for f in input_files])\n", "preprocessor.preprocess()\n", "builder = append.append.AppendBuilder(registry=DOCKER_REGISTRY,\n", From fb6cd69def7d4ad1cb3a9259f33592e8c64040bc Mon Sep 17 00:00:00 2001 From: Chun-Hsiang Wang Date: Thu, 18 Jul 2019 22:35:12 -0700 Subject: [PATCH 2/8] Install pip dependencies and build base image with kaniko (#603) * Install required pip packages not included in the base package. * Use Kaniko builder to build the base image first. * Directly install packages from requirements.txt to be more flexible. --- xgboost_synthetic/build-train-deploy.ipynb | 361 ++++++++++++++------- xgboost_synthetic/util.py | 1 + 2 files changed, 241 insertions(+), 121 deletions(-) diff --git a/xgboost_synthetic/build-train-deploy.ipynb b/xgboost_synthetic/build-train-deploy.ipynb index efb18a29..1ee0422e 100644 --- a/xgboost_synthetic/build-train-deploy.ipynb +++ b/xgboost_synthetic/build-train-deploy.ipynb @@ -32,7 +32,9 @@ { "cell_type": "code", "execution_count": 1, - "metadata": {}, + "metadata": { + "scrolled": false + }, "outputs": [], "source": [ "import util\n", @@ -43,7 +45,7 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 2, "metadata": {}, "outputs": [], "source": [ @@ -68,7 +70,7 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 3, "metadata": {}, "outputs": [], "source": [ @@ -87,7 +89,7 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 4, "metadata": {}, "outputs": [], "source": [ @@ -110,7 +112,7 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": 5, "metadata": {}, "outputs": [], "source": [ @@ -161,7 +163,7 @@ }, { "cell_type": "code", - "execution_count": 13, + "execution_count": 6, "metadata": {}, "outputs": [], "source": [ @@ -217,7 +219,7 @@ }, { "cell_type": "code", - "execution_count": 14, + "execution_count": 7, "metadata": {}, "outputs": [ { @@ -225,72 +227,73 @@ "output_type": "stream", "text": [ "model_file=mockup-model.dat\n", - "[0]\tvalidation_0-rmse:97.625\n", + "[14:45:28] WARNING: /workspace/src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\n", + "[0]\tvalidation_0-rmse:95.4029\n", "Will train until validation_0-rmse hasn't improved in 40 rounds.\n", - "[1]\tvalidation_0-rmse:92.9346\n", - "[2]\tvalidation_0-rmse:88.4163\n", - "[3]\tvalidation_0-rmse:84.9513\n", - "[4]\tvalidation_0-rmse:81.4807\n", - "[5]\tvalidation_0-rmse:78.0301\n", - "[6]\tvalidation_0-rmse:74.3916\n", - "[7]\tvalidation_0-rmse:72.6324\n", - "[8]\tvalidation_0-rmse:70.0073\n", - "[9]\tvalidation_0-rmse:67.4423\n", - "[10]\tvalidation_0-rmse:66.0759\n", - "[11]\tvalidation_0-rmse:63.7281\n", - "[12]\tvalidation_0-rmse:61.7721\n", - "[13]\tvalidation_0-rmse:59.8362\n", - "[14]\tvalidation_0-rmse:58.0936\n", - "[15]\tvalidation_0-rmse:56.2871\n", - "[16]\tvalidation_0-rmse:54.6282\n", - "[17]\tvalidation_0-rmse:53.242\n", - "[18]\tvalidation_0-rmse:51.9367\n", - "[19]\tvalidation_0-rmse:50.4069\n", - "[20]\tvalidation_0-rmse:49.4686\n", - "[21]\tvalidation_0-rmse:48.2332\n", - "[22]\tvalidation_0-rmse:47.4084\n", - "[23]\tvalidation_0-rmse:46.8214\n", - "[24]\tvalidation_0-rmse:46.1743\n", - "[25]\tvalidation_0-rmse:45.2428\n", - "[26]\tvalidation_0-rmse:44.6314\n", - "[27]\tvalidation_0-rmse:43.7469\n", - "[28]\tvalidation_0-rmse:42.8601\n", - "[29]\tvalidation_0-rmse:41.9884\n", - "[30]\tvalidation_0-rmse:41.384\n", - "[31]\tvalidation_0-rmse:40.8639\n", - "[32]\tvalidation_0-rmse:40.1512\n", - "[33]\tvalidation_0-rmse:39.5409\n", - "[34]\tvalidation_0-rmse:39.0861\n", - "[35]\tvalidation_0-rmse:38.3517\n", - "[36]\tvalidation_0-rmse:37.8571\n", - "[37]\tvalidation_0-rmse:37.5808\n", - "[38]\tvalidation_0-rmse:36.9849\n", - "[39]\tvalidation_0-rmse:36.5718\n", - "[40]\tvalidation_0-rmse:36.1384\n", - "[41]\tvalidation_0-rmse:35.7462\n", - "[42]\tvalidation_0-rmse:35.2703\n", - "[43]\tvalidation_0-rmse:34.8709\n", - "[44]\tvalidation_0-rmse:34.4978\n", - "[45]\tvalidation_0-rmse:34.1141\n", - "[46]\tvalidation_0-rmse:33.7975\n", - "[47]\tvalidation_0-rmse:33.4405\n", - "[48]\tvalidation_0-rmse:33.0749\n", - "[49]\tvalidation_0-rmse:32.7983\n" + "[1]\tvalidation_0-rmse:88.2563\n", + "[2]\tvalidation_0-rmse:82.341\n", + "[3]\tvalidation_0-rmse:76.7723\n", + "[4]\tvalidation_0-rmse:71.9907\n", + "[5]\tvalidation_0-rmse:67.7698\n", + "[6]\tvalidation_0-rmse:63.2959\n", + "[7]\tvalidation_0-rmse:59.8439\n", + "[8]\tvalidation_0-rmse:56.9911\n", + "[9]\tvalidation_0-rmse:53.8091\n", + "[10]\tvalidation_0-rmse:51.4086\n", + "[11]\tvalidation_0-rmse:49.1506\n", + "[12]\tvalidation_0-rmse:47.4958\n", + "[13]\tvalidation_0-rmse:46.0659\n", + "[14]\tvalidation_0-rmse:44.4425\n", + "[15]\tvalidation_0-rmse:42.5323\n", + "[16]\tvalidation_0-rmse:41.3847\n", + "[17]\tvalidation_0-rmse:40.4265\n", + "[18]\tvalidation_0-rmse:39.2736\n", + "[19]\tvalidation_0-rmse:38.5218\n", + "[20]\tvalidation_0-rmse:37.3415\n", + "[21]\tvalidation_0-rmse:36.8546\n", + "[22]\tvalidation_0-rmse:36.0049\n", + "[23]\tvalidation_0-rmse:35.5978\n", + "[24]\tvalidation_0-rmse:35.0653\n", + "[25]\tvalidation_0-rmse:34.1586\n", + "[26]\tvalidation_0-rmse:33.6017\n", + "[27]\tvalidation_0-rmse:33.2441\n", + "[28]\tvalidation_0-rmse:32.477\n", + "[29]\tvalidation_0-rmse:31.7638\n", + "[30]\tvalidation_0-rmse:31.2781\n", + "[31]\tvalidation_0-rmse:30.9532\n", + "[32]\tvalidation_0-rmse:30.3881\n", + "[33]\tvalidation_0-rmse:29.9289\n", + "[34]\tvalidation_0-rmse:29.6362\n", + "[35]\tvalidation_0-rmse:29.3138\n", + "[36]\tvalidation_0-rmse:29.0621\n", + "[37]\tvalidation_0-rmse:28.5649\n", + "[38]\tvalidation_0-rmse:28.15\n", + "[39]\tvalidation_0-rmse:27.8467\n", + "[40]\tvalidation_0-rmse:27.5816\n", + "[41]\tvalidation_0-rmse:27.4534\n", + "[42]\tvalidation_0-rmse:27.2668\n", + "[43]\tvalidation_0-rmse:27.0583\n", + "[44]\tvalidation_0-rmse:26.7226\n", + "[45]\tvalidation_0-rmse:26.6145\n", + "[46]\tvalidation_0-rmse:26.3878\n", + "[47]\tvalidation_0-rmse:26.029\n", + "[48]\tvalidation_0-rmse:25.8776\n", + "[49]\tvalidation_0-rmse:25.8484\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ - "INFO:root:mean_absolute_error=25.64\n", - "INFO:root:Model export success: mockup-model.dat\n" + "mean_absolute_error=19.92\n", + "Model export success: mockup-model.dat\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ - "Best RMSE on eval: %.2f with %d rounds 32.798336 50\n" + "Best RMSE on eval: %.2f with %d rounds 25.848402 50\n" ] } ], @@ -309,7 +312,7 @@ }, { "cell_type": "code", - "execution_count": 16, + "execution_count": 8, "metadata": {}, "outputs": [ { @@ -317,16 +320,17 @@ "output_type": "stream", "text": [ "model_file not supplied; using the default\n", - "model_file=mockup-model.dat\n" + "model_file=mockup-model.dat\n", + "[14:45:28] WARNING: /workspace/src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\n" ] }, { "data": { "text/plain": [ - "[[-37.04857635498047, -37.04857635498047]]" + "[[68.33491516113281, 68.33491516113281]]" ] }, - "execution_count": 16, + "execution_count": 8, "metadata": {}, "output_type": "execute_result" } @@ -355,15 +359,15 @@ }, { "cell_type": "code", - "execution_count": 17, + "execution_count": 9, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "zahrakubeflowcodelab\n", - "gcr.io/zahrakubeflowcodelab/fairing-job\n" + "issue-label-bot-dev\n", + "gcr.io/issue-label-bot-dev/fairing-job\n" ] } ], @@ -391,7 +395,126 @@ }, { "cell_type": "code", - "execution_count": 18, + "execution_count": 10, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[PosixPath('build-train-deploy.py'), 'xgboost_util.py', 'mockup-model.dat']" + ] + }, + "execution_count": 10, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "from fairing.builders import cluster\n", + "preprocessor = ConvertNotebookPreprocessorWithFire(\"HousingServe\")\n", + "\n", + "if not preprocessor.input_files:\n", + " preprocessor.input_files = set()\n", + "input_files=[\"xgboost_util.py\", \"mockup-model.dat\"]\n", + "preprocessor.input_files = set([os.path.normpath(f) for f in input_files])\n", + "preprocessor.preprocess()" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Building image using cluster builder.\n", + "Creating docker context: /tmp/fairing_context_5d629kor\n", + "Waiting for fairing-builder-lz9zx to start...\n", + "Pod started running True\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[36mINFO\u001b[0m[0000] Downloading base image gcr.io/kubeflow-images-public/xgboost-fairing-example-base:v-20190612\n", + "\u001b[36mINFO\u001b[0m[0000] Downloading base image gcr.io/kubeflow-images-public/xgboost-fairing-example-base:v-20190612\n", + "\u001b[33mWARN\u001b[0m[0000] Error while retrieving image from cache: getting image from path: open /cache/sha256:f90e54e312c4cfba28bec6993add2a85b4e127b52149ec0aaf41e5f8889a4086: no such file or directory\n", + "\u001b[36mINFO\u001b[0m[0000] Checking for cached layer gcr.io/issue-label-bot-dev/fairing-job/fairing-job/cache:e46cfa04f5f0d0445ce3ce8b91886d94e96f2875510a69aa9afaeb0ba9e62fc4...\n", + "\u001b[36mINFO\u001b[0m[0000] Using caching version of cmd: RUN if [ -e requirements.txt ];then pip install --no-cache -r requirements.txt; fi\n", + "\u001b[36mINFO\u001b[0m[0000] Using files from context: [/kaniko/buildcontext/app]\n", + "\u001b[36mINFO\u001b[0m[0000] Taking snapshot of full filesystem...\n", + "\u001b[36mINFO\u001b[0m[0000] Skipping paths under /dev, as it is a whitelisted directory\n", + "\u001b[36mINFO\u001b[0m[0000] Skipping paths under /etc/secrets, as it is a whitelisted directory\n", + "\u001b[36mINFO\u001b[0m[0000] Skipping paths under /kaniko, as it is a whitelisted directory\n", + "\u001b[36mINFO\u001b[0m[0000] Skipping paths under /proc, as it is a whitelisted directory\n", + "\u001b[36mINFO\u001b[0m[0000] Skipping paths under /sys, as it is a whitelisted directory\n", + "\u001b[36mINFO\u001b[0m[0000] Skipping paths under /var/run, as it is a whitelisted directory\n", + "\u001b[36mINFO\u001b[0m[0000] WORKDIR /app/\n", + "\u001b[36mINFO\u001b[0m[0000] cmd: workdir\n", + "\u001b[36mINFO\u001b[0m[0000] Changed working directory to /app/\n", + "\u001b[36mINFO\u001b[0m[0000] Creating directory /app/\n", + "\u001b[36mINFO\u001b[0m[0000] Taking snapshot of files...\n", + "\u001b[36mINFO\u001b[0m[0000] ENV FAIRING_RUNTIME 1\n", + "\u001b[36mINFO\u001b[0m[0000] No files changed in this command, skipping snapshotting.\n", + "\u001b[36mINFO\u001b[0m[0000] RUN if [ -e requirements.txt ];then pip install --no-cache -r requirements.txt; fi\n", + "\u001b[36mINFO\u001b[0m[0000] Found cached layer, extracting to filesystem\n", + "\u001b[36mINFO\u001b[0m[0001] No files changed in this command, skipping snapshotting.\n", + "\u001b[36mINFO\u001b[0m[0001] Using files from context: [/kaniko/buildcontext/app]\n", + "\u001b[36mINFO\u001b[0m[0001] COPY /app/ /app/\n", + "\u001b[36mINFO\u001b[0m[0001] Taking snapshot of files...\n", + "2019/07/18 21:45:45 existing blob: sha256:d13453f7d2b8d0adfd86c3989a5b695cef5afc3efaafe559643071f258c9f06d\n", + "2019/07/18 21:45:45 existing blob: sha256:0ba512db704a2eb85f7f372d1c809d58589531e3bae794f0aaba86cee912f923\n", + "2019/07/18 21:45:45 existing blob: sha256:9ee379bde91a3cecfb08d4189af0a2bcecc2da1c5102e49443088ccd7bd9abfa\n", + "2019/07/18 21:45:45 existing blob: sha256:507170ae8cfaca6cf2999295221d1324f1051fa15ba59e04dd7dafdc8de565bc\n", + "2019/07/18 21:45:45 existing blob: sha256:2f1ee468081da0ca09360c50281ed261d8b3fb01f664262c3f278d8619eb4e9a\n", + "2019/07/18 21:45:45 existing blob: sha256:d099b15c53311dc296426716edabe61dcc19e88009c19098b17ba965357c4391\n", + "2019/07/18 21:45:45 existing blob: sha256:bad6918fba4b1c68f82d1a4b6063b3ce64975a73b33b38b35454b1d484a6b57b\n", + "2019/07/18 21:45:45 existing blob: sha256:0fd02182c40eb28e13c4d7efd5dd4c81d985d9b07c9c809cc26e7bdb2dced07e\n", + "2019/07/18 21:45:45 existing blob: sha256:079dd3e30fa3eed702bb20a2f725da9907e2732bdc4dfb2fb5084a3423c3f743\n", + "2019/07/18 21:45:45 existing blob: sha256:e7fea64fabbc6f5961864ce5c6bcc143ab616d325b0c5a26848d8e427806104f\n", + "2019/07/18 21:45:45 existing blob: sha256:a5ba9de0ac70b35658f5898c27b52063a597d790308fb853021e881e04a6efb7\n", + "2019/07/18 21:45:45 existing blob: sha256:124c757242f88002a858c23fc79f8262f9587fa30fd92507e586ad074afb42b6\n", + "2019/07/18 21:45:45 existing blob: sha256:bbf0f5f91e8108d9b0be1ceeb749e63788ce7394a184bc8a70d24017eca7b7ba\n", + "2019/07/18 21:45:45 existing blob: sha256:9d866f8bde2a0d607a6d17edc0fbd5e00b58306efc2b0a57e0ba72f269e7c6be\n", + "2019/07/18 21:45:45 existing blob: sha256:afde35469481d2bc446d649a7a3d099147bbf7696b66333e76a411686b617ea1\n", + "2019/07/18 21:45:45 existing blob: sha256:398d32b153e84fe343f0c5b07d65e89b05551aae6cb8b3a03bb2b662976eb3b8\n", + "2019/07/18 21:45:45 existing blob: sha256:55dbf73eb7c7c005c3ccff29b62ff180e2f29245d14794dd6d5d8ad855d0ea88\n", + "2019/07/18 21:45:45 existing blob: sha256:4bfa6a63a3897359eff3ca3ee27c2e05ba76b790a07e6583714c1d324c2d4f21\n", + "2019/07/18 21:45:45 existing blob: sha256:5d8a6f34a39a1e098f09b39ee4e9d4a178fef6ec71c2046fe0b040c4667c8143\n", + "2019/07/18 21:45:45 existing blob: sha256:b893ca5fa31bb87be0d3fa3a403dac7ca12c955d6fd522fd35e3260dbd0e99da\n", + "2019/07/18 21:45:45 existing blob: sha256:ecc17173ccb5b7692a6d31b0077b8e4f543fb45f8c2b5c252dcad9ad0c9be0f7\n", + "2019/07/18 21:45:45 existing blob: sha256:eed14867f5ee443ad7efc89d0d4392683799a413244feec120f43074bc2d43ef\n", + "2019/07/18 21:45:45 existing blob: sha256:07e06c833ecb3b115e378d7f2ba5817ba77cfd02f5794a9817ede0622fbbf8a5\n", + "2019/07/18 21:45:45 existing blob: sha256:541a15d3a9d79f7d3e5e0f552f396406b3e3093247f71e0ae71dd8b7242ec428\n", + "2019/07/18 21:45:45 existing blob: sha256:fa3f2f277e67c5cbbf1dac21dc27111a60d3cd2ef494d94aa1515d3319f2a245\n", + "2019/07/18 21:45:45 existing blob: sha256:8143617e89d7ba1957e3dc6d7093a48bd0cd4a2a709bc0c9d0ffc6dde11467e8\n", + "2019/07/18 21:45:45 existing blob: sha256:2327f2e2474891211dbf7fb2d54e16e7b2889fea157b726645cc05e75ad917e8\n", + "2019/07/18 21:45:45 existing blob: sha256:8c58e650bb886ab24426958165c15abe1a1c10e8710f50233701fd503e23e7ac\n", + "2019/07/18 21:45:45 existing blob: sha256:90a7e2cb4d7460e55f83c6e47f9f8d089895ee6e1cc51ae5c23eab3bdcb70363\n", + "2019/07/18 21:45:45 existing blob: sha256:1cf84c00b8903926c231b4b5974c0419556a4a578bf9416f585fcbf1b7aa70ab\n", + "2019/07/18 21:45:46 pushed blob sha256:8ab941f264e893bf2d02a0f6d2972fa5f725995cba85b0a897cee1531525bba1\n", + "2019/07/18 21:45:46 pushed blob sha256:acb611ba3316584866914521fe68dd9892e3fea865900f7c15f2f7268587cd93\n", + "2019/07/18 21:45:46 pushed blob sha256:80794aeb9ef80da69469ae895f20899b52d9115e4161543c83774863e97fc507\n", + "2019/07/18 21:45:47 gcr.io/issue-label-bot-dev/fairing-job/fairing-job:E480ACAF: digest: sha256:1c10c3629d920b78e54f16fe268eb77f976d1ff5a48b31a9f54df478ff012a2a size: 5468\n" + ] + } + ], + "source": [ + "cluster_builder = cluster.cluster.ClusterBuilder(registry=DOCKER_REGISTRY,\n", + " base_image=base_image,\n", + " namespace='kubeflow',\n", + " preprocessor=preprocessor,\n", + " pod_spec_mutators=[fairing.cloud.gcp.add_gcp_credentials_if_exists],\n", + " context_source=cluster.gcs_context.GCSContextSource())\n", + "cluster_builder.build()" + ] + }, + { + "cell_type": "code", + "execution_count": 12, "metadata": { "scrolled": true }, @@ -400,64 +523,60 @@ "name": "stderr", "output_type": "stream", "text": [ - "WARNING:fairing.builders.append.append:Building image using Append builder...\n", - "INFO:root:Creating docker context: /tmp/fairing_context_de6bgft2\n", - "INFO:root:Loading Docker credentials for repository 'gcr.io/kubeflow-images-public/xgboost-fairing-example-base:v-20190612'\n", - "INFO:root:Invoking 'docker-credential-gcloud' to obtain Docker credentials.\n", - "INFO:root:Successfully obtained Docker credentials.\n", - "WARNING:fairing.builders.append.append:Image successfully built in 1.157013630028814s.\n", - "WARNING:fairing.builders.append.append:Pushing image gcr.io/zahrakubeflowcodelab/fairing-job/fairing-job:6F63F28C...\n", - "INFO:root:Loading Docker credentials for repository 'gcr.io/zahrakubeflowcodelab/fairing-job/fairing-job:6F63F28C'\n", - "INFO:root:Invoking 'docker-credential-gcloud' to obtain Docker credentials.\n", - "INFO:root:Successfully obtained Docker credentials.\n", - "WARNING:fairing.builders.append.append:Uploading gcr.io/zahrakubeflowcodelab/fairing-job/fairing-job:6F63F28C\n", - "INFO:root:Layer sha256:2f1ee468081da0ca09360c50281ed261d8b3fb01f664262c3f278d8619eb4e9a exists, skipping\n", - "INFO:root:Layer sha256:90a7e2cb4d7460e55f83c6e47f9f8d089895ee6e1cc51ae5c23eab3bdcb70363 exists, skipping\n", - "INFO:root:Layer sha256:b893ca5fa31bb87be0d3fa3a403dac7ca12c955d6fd522fd35e3260dbd0e99da exists, skipping\n", - "INFO:root:Layer sha256:eed14867f5ee443ad7efc89d0d4392683799a413244feec120f43074bc2d43ef exists, skipping\n", - "INFO:root:Layer sha256:afde35469481d2bc446d649a7a3d099147bbf7696b66333e76a411686b617ea1 exists, skipping\n", - "INFO:root:Layer sha256:124c757242f88002a858c23fc79f8262f9587fa30fd92507e586ad074afb42b6 exists, skipping\n", - "INFO:root:Layer sha256:fa3f2f277e67c5cbbf1dac21dc27111a60d3cd2ef494d94aa1515d3319f2a245 exists, skipping\n", - "INFO:root:Layer sha256:07e06c833ecb3b115e378d7f2ba5817ba77cfd02f5794a9817ede0622fbbf8a5 exists, skipping\n", - "INFO:root:Layer sha256:507170ae8cfaca6cf2999295221d1324f1051fa15ba59e04dd7dafdc8de565bc exists, skipping\n", - "INFO:root:Layer sha256:0ba512db704a2eb85f7f372d1c809d58589531e3bae794f0aaba86cee912f923 exists, skipping\n", - "INFO:root:Layer sha256:079dd3e30fa3eed702bb20a2f725da9907e2732bdc4dfb2fb5084a3423c3f743 exists, skipping\n", - "INFO:root:Layer sha256:a5ba9de0ac70b35658f5898c27b52063a597d790308fb853021e881e04a6efb7 exists, skipping\n", - "INFO:root:Layer sha256:541a15d3a9d79f7d3e5e0f552f396406b3e3093247f71e0ae71dd8b7242ec428 exists, skipping\n", - "INFO:root:Layer sha256:9d866f8bde2a0d607a6d17edc0fbd5e00b58306efc2b0a57e0ba72f269e7c6be exists, skipping\n", - "INFO:root:Layer sha256:8143617e89d7ba1957e3dc6d7093a48bd0cd4a2a709bc0c9d0ffc6dde11467e8 exists, skipping\n", - "INFO:root:Layer sha256:398d32b153e84fe343f0c5b07d65e89b05551aae6cb8b3a03bb2b662976eb3b8 exists, skipping\n", - "INFO:root:Layer sha256:d13453f7d2b8d0adfd86c3989a5b695cef5afc3efaafe559643071f258c9f06d exists, skipping\n", - "INFO:root:Layer sha256:9ee379bde91a3cecfb08d4189af0a2bcecc2da1c5102e49443088ccd7bd9abfa exists, skipping\n", - "INFO:root:Layer sha256:2327f2e2474891211dbf7fb2d54e16e7b2889fea157b726645cc05e75ad917e8 exists, skipping\n", - "INFO:root:Layer sha256:0fd02182c40eb28e13c4d7efd5dd4c81d985d9b07c9c809cc26e7bdb2dced07e exists, skipping\n", - "INFO:root:Layer sha256:d099b15c53311dc296426716edabe61dcc19e88009c19098b17ba965357c4391 exists, skipping\n", - "INFO:root:Layer sha256:ecc17173ccb5b7692a6d31b0077b8e4f543fb45f8c2b5c252dcad9ad0c9be0f7 exists, skipping\n", - "INFO:root:Layer sha256:4bfa6a63a3897359eff3ca3ee27c2e05ba76b790a07e6583714c1d324c2d4f21 exists, skipping\n", - "INFO:root:Layer sha256:bbf0f5f91e8108d9b0be1ceeb749e63788ce7394a184bc8a70d24017eca7b7ba exists, skipping\n", - "INFO:root:Layer sha256:1cf84c00b8903926c231b4b5974c0419556a4a578bf9416f585fcbf1b7aa70ab exists, skipping\n", - "INFO:root:Layer sha256:e7fea64fabbc6f5961864ce5c6bcc143ab616d325b0c5a26848d8e427806104f exists, skipping\n", - "INFO:root:Layer sha256:55dbf73eb7c7c005c3ccff29b62ff180e2f29245d14794dd6d5d8ad855d0ea88 exists, skipping\n", - "INFO:root:Layer sha256:8c58e650bb886ab24426958165c15abe1a1c10e8710f50233701fd503e23e7ac exists, skipping\n", - "INFO:root:Layer sha256:bad6918fba4b1c68f82d1a4b6063b3ce64975a73b33b38b35454b1d484a6b57b exists, skipping\n", - "INFO:root:Layer sha256:5d8a6f34a39a1e098f09b39ee4e9d4a178fef6ec71c2046fe0b040c4667c8143 exists, skipping\n", - "INFO:root:Layer sha256:da6ff2e3a7d15c18b14d5ea8dbb333b0c912de8d39cee59de1a60bc3926ea0c2 pushed.\n", - "INFO:root:Layer sha256:4014b6769afd9b7e404e70bb8a089f355eb7cb6ffecd6f3167010f479a60f21b pushed.\n", - "INFO:root:Finished upload of: gcr.io/zahrakubeflowcodelab/fairing-job/fairing-job:6F63F28C\n", - "WARNING:fairing.builders.append.append:Pushed image gcr.io/zahrakubeflowcodelab/fairing-job/fairing-job:6F63F28C in 3.056034044129774s.\n" + "Building image using Append builder...\n", + "Creating docker context: /tmp/fairing_context_xpzlon_h\n", + "build-train-deploy.py already exists in Fairing context, skipping...\n", + "Loading Docker credentials for repository 'gcr.io/issue-label-bot-dev/fairing-job/fairing-job:E480ACAF'\n", + "Invoking 'docker-credential-gcloud' to obtain Docker credentials.\n", + "Successfully obtained Docker credentials.\n", + "Image successfully built in 1.2515304939588532s.\n", + "Pushing image gcr.io/issue-label-bot-dev/fairing-job/fairing-job:DA1D5CB0...\n", + "Loading Docker credentials for repository 'gcr.io/issue-label-bot-dev/fairing-job/fairing-job:DA1D5CB0'\n", + "Invoking 'docker-credential-gcloud' to obtain Docker credentials.\n", + "Successfully obtained Docker credentials.\n", + "Uploading gcr.io/issue-label-bot-dev/fairing-job/fairing-job:DA1D5CB0\n", + "Layer sha256:9d866f8bde2a0d607a6d17edc0fbd5e00b58306efc2b0a57e0ba72f269e7c6be exists, skipping\n", + "Layer sha256:124c757242f88002a858c23fc79f8262f9587fa30fd92507e586ad074afb42b6 exists, skipping\n", + "Layer sha256:bbf0f5f91e8108d9b0be1ceeb749e63788ce7394a184bc8a70d24017eca7b7ba exists, skipping\n", + "Layer sha256:e7fea64fabbc6f5961864ce5c6bcc143ab616d325b0c5a26848d8e427806104f exists, skipping\n", + "Layer sha256:d099b15c53311dc296426716edabe61dcc19e88009c19098b17ba965357c4391 exists, skipping\n", + "Layer sha256:079dd3e30fa3eed702bb20a2f725da9907e2732bdc4dfb2fb5084a3423c3f743 exists, skipping\n", + "Layer sha256:80794aeb9ef80da69469ae895f20899b52d9115e4161543c83774863e97fc507 exists, skipping\n", + "Layer sha256:eed14867f5ee443ad7efc89d0d4392683799a413244feec120f43074bc2d43ef exists, skipping\n", + "Layer sha256:55dbf73eb7c7c005c3ccff29b62ff180e2f29245d14794dd6d5d8ad855d0ea88 exists, skipping\n", + "Layer sha256:8ab941f264e893bf2d02a0f6d2972fa5f725995cba85b0a897cee1531525bba1 exists, skipping\n", + "Layer sha256:2327f2e2474891211dbf7fb2d54e16e7b2889fea157b726645cc05e75ad917e8 exists, skipping\n", + "Layer sha256:fa3f2f277e67c5cbbf1dac21dc27111a60d3cd2ef494d94aa1515d3319f2a245 exists, skipping\n", + "Layer sha256:afde35469481d2bc446d649a7a3d099147bbf7696b66333e76a411686b617ea1 exists, skipping\n", + "Layer sha256:d13453f7d2b8d0adfd86c3989a5b695cef5afc3efaafe559643071f258c9f06d exists, skipping\n", + "Layer sha256:2f1ee468081da0ca09360c50281ed261d8b3fb01f664262c3f278d8619eb4e9a exists, skipping\n", + "Layer sha256:8c58e650bb886ab24426958165c15abe1a1c10e8710f50233701fd503e23e7ac exists, skipping\n", + "Layer sha256:507170ae8cfaca6cf2999295221d1324f1051fa15ba59e04dd7dafdc8de565bc exists, skipping\n", + "Layer sha256:b893ca5fa31bb87be0d3fa3a403dac7ca12c955d6fd522fd35e3260dbd0e99da exists, skipping\n", + "Layer sha256:8143617e89d7ba1957e3dc6d7093a48bd0cd4a2a709bc0c9d0ffc6dde11467e8 exists, skipping\n", + "Layer sha256:1cf84c00b8903926c231b4b5974c0419556a4a578bf9416f585fcbf1b7aa70ab exists, skipping\n", + "Layer sha256:4bfa6a63a3897359eff3ca3ee27c2e05ba76b790a07e6583714c1d324c2d4f21 exists, skipping\n", + "Layer sha256:5d8a6f34a39a1e098f09b39ee4e9d4a178fef6ec71c2046fe0b040c4667c8143 exists, skipping\n", + "Layer sha256:0ba512db704a2eb85f7f372d1c809d58589531e3bae794f0aaba86cee912f923 exists, skipping\n", + "Layer sha256:a5ba9de0ac70b35658f5898c27b52063a597d790308fb853021e881e04a6efb7 exists, skipping\n", + "Layer sha256:bad6918fba4b1c68f82d1a4b6063b3ce64975a73b33b38b35454b1d484a6b57b exists, skipping\n", + "Layer sha256:0fd02182c40eb28e13c4d7efd5dd4c81d985d9b07c9c809cc26e7bdb2dced07e exists, skipping\n", + "Layer sha256:541a15d3a9d79f7d3e5e0f552f396406b3e3093247f71e0ae71dd8b7242ec428 exists, skipping\n", + "Layer sha256:ecc17173ccb5b7692a6d31b0077b8e4f543fb45f8c2b5c252dcad9ad0c9be0f7 exists, skipping\n", + "Layer sha256:07e06c833ecb3b115e378d7f2ba5817ba77cfd02f5794a9817ede0622fbbf8a5 exists, skipping\n", + "Layer sha256:9ee379bde91a3cecfb08d4189af0a2bcecc2da1c5102e49443088ccd7bd9abfa exists, skipping\n", + "Layer sha256:90a7e2cb4d7460e55f83c6e47f9f8d089895ee6e1cc51ae5c23eab3bdcb70363 exists, skipping\n", + "Layer sha256:398d32b153e84fe343f0c5b07d65e89b05551aae6cb8b3a03bb2b662976eb3b8 exists, skipping\n", + "Layer sha256:3885f9a80c70bf1aa3d3b925004fcca76334d45aa96d5e95412b40cae1dbdbba pushed.\n", + "Layer sha256:e94d45d512ce4033820c7df7dae67aa2d300528fed0ea5a53d6dcd099b2e4ca1 pushed.\n", + "Finished upload of: gcr.io/issue-label-bot-dev/fairing-job/fairing-job:DA1D5CB0\n", + "Pushed image gcr.io/issue-label-bot-dev/fairing-job/fairing-job:DA1D5CB0 in 3.6773080190178007s.\n" ] } ], "source": [ - "preprocessor = ConvertNotebookPreprocessorWithFire(\"HousingServe\")\n", - "\n", - "if not preprocessor.input_files:\n", - " preprocessor.input_files = set()\n", - "input_files=[\"xgboost_util.py\", \"mockup-model.dat\"]\n", - "preprocessor.input_files = set([os.path.normpath(f) for f in input_files])\n", - "preprocessor.preprocess()\n", "builder = append.append.AppendBuilder(registry=DOCKER_REGISTRY,\n", - " base_image=base_image, preprocessor=preprocessor)\n", + " base_image=cluster_builder.image_tag, preprocessor=preprocessor)\n", "builder.build()\n" ] }, diff --git a/xgboost_synthetic/util.py b/xgboost_synthetic/util.py index 03058c89..41cb1b55 100644 --- a/xgboost_synthetic/util.py +++ b/xgboost_synthetic/util.py @@ -11,6 +11,7 @@ KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.20/kfp.tar def notebook_setup(): # Install the SDK + subprocess.check_call(["pip3", "install", "-r", "requirements.txt"]) subprocess.check_call(["pip3", "install", KFP_PACKAGE, "--upgrade"]) logging.basicConfig(format='%(message)s') From 6e5ba488e2b0ec96248eb821d06ddc5d7a903f4f Mon Sep 17 00:00:00 2001 From: Chun-Hsiang Wang Date: Mon, 22 Jul 2019 18:20:54 -0700 Subject: [PATCH 3/8] Update readme for xgboost-synthetic and remove outdated yaml file (#605) * Update readme for xgboost-synthetic and remove outdated yaml file. * Update the class name to be more general. * Update readme. * Set google_application_credentials in the notebook. * Install fairing from master branch. * Do not set credentials again. * Update readme. --- xgboost_synthetic/README.md | 14 +- xgboost_synthetic/build-train-deploy.ipynb | 124 +++++++++++++++++- .../notebook.xgboost-synthetic.yaml | 30 ----- 3 files changed, 125 insertions(+), 43 deletions(-) delete mode 100644 xgboost_synthetic/notebook.xgboost-synthetic.yaml diff --git a/xgboost_synthetic/README.md b/xgboost_synthetic/README.md index 273fd296..3f58231d 100644 --- a/xgboost_synthetic/README.md +++ b/xgboost_synthetic/README.md @@ -1,9 +1,11 @@ # xgboost-synthetic -Kubeflow fairing, pipelines demo using synthetic data +Kubeflow fairing, pipelines demo using synthetic data. This notebook `build-train-deploy.ipynb` can be executed using one of the stock notebook images launched through Kubeflow UI. -1. Launch a notebook +1. Follow the [Set up your notebook](https://www.kubeflow.org/docs/notebooks/setup/) guide to get started with Jupyter notebooks on Kubeflow - ``` - kubectl apply -f notebook.xgboost-synthetic.yaml - ``` -1. Attach an extra data volume named +1. Open the notebook terminal and run + ``` + $ git clone https://github.com/kubeflow/examples.git + ``` + +1. In the directory `xgboost_synthetic`, open the notebook `build-train-deploy.ipynb` diff --git a/xgboost_synthetic/build-train-deploy.ipynb b/xgboost_synthetic/build-train-deploy.ipynb index 1ee0422e..51a3150a 100644 --- a/xgboost_synthetic/build-train-deploy.ipynb +++ b/xgboost_synthetic/build-train-deploy.ipynb @@ -29,6 +29,116 @@ "Import the libraries required to train this model." ] }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Requirement already satisfied: retrying in ./env/lib/python3.6/site-packages\n", + "Requirement already satisfied: six>=1.7.0 in ./env/lib/python3.6/site-packages (from retrying)\n", + "Collecting https://github.com/kubeflow/fairing/archive/master.zip\n", + " Downloading https://github.com/kubeflow/fairing/archive/master.zip (878kB)\n", + "\u001b[K 100% |████████████████████████████████| 880kB 1.6MB/s eta 0:00:01\n", + "\u001b[?25h Requirement already satisfied (use --upgrade to upgrade): fairing==0.5.3 from https://github.com/kubeflow/fairing/archive/master.zip in ./env/lib/python3.6/site-packages/fairing-0.5.3-py3.6.egg\n", + "Requirement already satisfied: docker>=3.4.1 in ./env/lib/python3.6/site-packages (from fairing==0.5.3)\n", + "Requirement already satisfied: notebook>=5.6.0 in ./env/lib/python3.6/site-packages (from fairing==0.5.3)\n", + "Requirement already satisfied: kubernetes>=9.0.0 in ./env/lib/python3.6/site-packages (from fairing==0.5.3)\n", + "Requirement already satisfied: future>=0.17.1 in ./env/lib/python3.6/site-packages (from fairing==0.5.3)\n", + "Requirement already satisfied: six>=1.11.0 in ./env/lib/python3.6/site-packages (from fairing==0.5.3)\n", + "Requirement already satisfied: google-cloud-storage>=1.13.2 in ./env/lib/python3.6/site-packages (from fairing==0.5.3)\n", + "Requirement already satisfied: requests>=2.21.0 in ./env/lib/python3.6/site-packages (from fairing==0.5.3)\n", + "Requirement already satisfied: setuptools>=34.0.0 in ./env/lib/python3.6/site-packages (from fairing==0.5.3)\n", + "Requirement already satisfied: google-auth>=1.6.2 in ./env/lib/python3.6/site-packages (from fairing==0.5.3)\n", + "Requirement already satisfied: httplib2>=0.12.0 in ./env/lib/python3.6/site-packages (from fairing==0.5.3)\n", + "Requirement already satisfied: oauth2client>=4.0.0 in ./env/lib/python3.6/site-packages (from fairing==0.5.3)\n", + "Requirement already satisfied: tornado<6.0.0,>=5.1.1 in ./env/lib/python3.6/site-packages (from fairing==0.5.3)\n", + "Requirement already satisfied: google-api-python-client>=1.7.8 in ./env/lib/python3.6/site-packages (from fairing==0.5.3)\n", + "Requirement already satisfied: cloudpickle>=0.8 in ./env/lib/python3.6/site-packages (from fairing==0.5.3)\n", + "Requirement already satisfied: numpy>=1.14 in ./env/lib/python3.6/site-packages (from fairing==0.5.3)\n", + "Collecting urllib3==1.24.2 (from fairing==0.5.3)\n", + " Using cached https://files.pythonhosted.org/packages/df/1c/59cca3abf96f991f2ec3131a4ffe72ae3d9ea1f5894abe8a9c5e3c77cfee/urllib3-1.24.2-py2.py3-none-any.whl\n", + "Requirement already satisfied: boto3>=1.9.0 in ./env/lib/python3.6/site-packages/boto3-1.9.187-py3.6.egg (from fairing==0.5.3)\n", + "Requirement already satisfied: websocket-client>=0.32.0 in ./env/lib/python3.6/site-packages (from docker>=3.4.1->fairing==0.5.3)\n", + "Requirement already satisfied: nbconvert in ./env/lib/python3.6/site-packages (from notebook>=5.6.0->fairing==0.5.3)\n", + "Requirement already satisfied: prometheus-client in ./env/lib/python3.6/site-packages (from notebook>=5.6.0->fairing==0.5.3)\n", + "Requirement already satisfied: traitlets>=4.2.1 in ./env/lib/python3.6/site-packages (from notebook>=5.6.0->fairing==0.5.3)\n", + "Requirement already satisfied: pyzmq>=17 in ./env/lib/python3.6/site-packages (from notebook>=5.6.0->fairing==0.5.3)\n", + "Requirement already satisfied: Send2Trash in ./env/lib/python3.6/site-packages (from notebook>=5.6.0->fairing==0.5.3)\n", + "Requirement already satisfied: jupyter-client>=5.2.0 in ./env/lib/python3.6/site-packages (from notebook>=5.6.0->fairing==0.5.3)\n", + "Requirement already satisfied: ipython-genutils in ./env/lib/python3.6/site-packages (from notebook>=5.6.0->fairing==0.5.3)\n", + "Requirement already satisfied: nbformat in ./env/lib/python3.6/site-packages (from notebook>=5.6.0->fairing==0.5.3)\n", + "Requirement already satisfied: ipykernel in ./env/lib/python3.6/site-packages (from notebook>=5.6.0->fairing==0.5.3)\n", + "Requirement already satisfied: terminado>=0.8.1 in ./env/lib/python3.6/site-packages (from notebook>=5.6.0->fairing==0.5.3)\n", + "Requirement already satisfied: jinja2 in ./env/lib/python3.6/site-packages (from notebook>=5.6.0->fairing==0.5.3)\n", + "Requirement already satisfied: jupyter-core>=4.4.0 in ./env/lib/python3.6/site-packages (from notebook>=5.6.0->fairing==0.5.3)\n", + "Requirement already satisfied: certifi>=14.05.14 in ./env/lib/python3.6/site-packages (from kubernetes>=9.0.0->fairing==0.5.3)\n", + "Requirement already satisfied: python-dateutil>=2.5.3 in ./env/lib/python3.6/site-packages (from kubernetes>=9.0.0->fairing==0.5.3)\n", + "Requirement already satisfied: requests-oauthlib in ./env/lib/python3.6/site-packages (from kubernetes>=9.0.0->fairing==0.5.3)\n", + "Requirement already satisfied: pyyaml>=3.12 in ./env/lib/python3.6/site-packages (from kubernetes>=9.0.0->fairing==0.5.3)\n", + "Requirement already satisfied: google-resumable-media>=0.3.1 in ./env/lib/python3.6/site-packages (from google-cloud-storage>=1.13.2->fairing==0.5.3)\n", + "Requirement already satisfied: google-cloud-core<2.0dev,>=1.0.0 in ./env/lib/python3.6/site-packages (from google-cloud-storage>=1.13.2->fairing==0.5.3)\n", + "Requirement already satisfied: idna<2.9,>=2.5 in ./env/lib/python3.6/site-packages (from requests>=2.21.0->fairing==0.5.3)\n", + "Requirement already satisfied: chardet<3.1.0,>=3.0.2 in ./env/lib/python3.6/site-packages (from requests>=2.21.0->fairing==0.5.3)\n", + "Requirement already satisfied: rsa>=3.1.4 in ./env/lib/python3.6/site-packages (from google-auth>=1.6.2->fairing==0.5.3)\n", + "Requirement already satisfied: cachetools>=2.0.0 in ./env/lib/python3.6/site-packages (from google-auth>=1.6.2->fairing==0.5.3)\n", + "Requirement already satisfied: pyasn1-modules>=0.2.1 in ./env/lib/python3.6/site-packages (from google-auth>=1.6.2->fairing==0.5.3)\n", + "Requirement already satisfied: pyasn1>=0.1.7 in ./env/lib/python3.6/site-packages (from oauth2client>=4.0.0->fairing==0.5.3)\n", + "Requirement already satisfied: uritemplate<4dev,>=3.0.0 in ./env/lib/python3.6/site-packages (from google-api-python-client>=1.7.8->fairing==0.5.3)\n", + "Requirement already satisfied: google-auth-httplib2>=0.0.3 in ./env/lib/python3.6/site-packages (from google-api-python-client>=1.7.8->fairing==0.5.3)\n", + "Requirement already satisfied: botocore<1.13.0,>=1.12.187 in ./env/lib/python3.6/site-packages/botocore-1.12.187-py3.6.egg (from boto3>=1.9.0->fairing==0.5.3)\n", + "Requirement already satisfied: jmespath<1.0.0,>=0.7.1 in ./env/lib/python3.6/site-packages/jmespath-0.9.4-py3.6.egg (from boto3>=1.9.0->fairing==0.5.3)\n", + "Requirement already satisfied: s3transfer<0.3.0,>=0.2.0 in ./env/lib/python3.6/site-packages/s3transfer-0.2.1-py3.6.egg (from boto3>=1.9.0->fairing==0.5.3)\n", + "Requirement already satisfied: pandocfilters>=1.4.1 in ./env/lib/python3.6/site-packages (from nbconvert->notebook>=5.6.0->fairing==0.5.3)\n", + "Requirement already satisfied: bleach in ./env/lib/python3.6/site-packages (from nbconvert->notebook>=5.6.0->fairing==0.5.3)\n", + "Requirement already satisfied: testpath in ./env/lib/python3.6/site-packages (from nbconvert->notebook>=5.6.0->fairing==0.5.3)\n", + "Requirement already satisfied: defusedxml in ./env/lib/python3.6/site-packages (from nbconvert->notebook>=5.6.0->fairing==0.5.3)\n", + "Requirement already satisfied: entrypoints>=0.2.2 in ./env/lib/python3.6/site-packages (from nbconvert->notebook>=5.6.0->fairing==0.5.3)\n", + "Requirement already satisfied: pygments in ./env/lib/python3.6/site-packages (from nbconvert->notebook>=5.6.0->fairing==0.5.3)\n", + "Requirement already satisfied: mistune>=0.8.1 in ./env/lib/python3.6/site-packages (from nbconvert->notebook>=5.6.0->fairing==0.5.3)\n", + "Requirement already satisfied: decorator in ./env/lib/python3.6/site-packages (from traitlets>=4.2.1->notebook>=5.6.0->fairing==0.5.3)\n", + "Requirement already satisfied: jsonschema!=2.5.0,>=2.4 in ./env/lib/python3.6/site-packages (from nbformat->notebook>=5.6.0->fairing==0.5.3)\n", + "Requirement already satisfied: ipython>=5.0.0 in ./env/lib/python3.6/site-packages (from ipykernel->notebook>=5.6.0->fairing==0.5.3)\n", + "Requirement already satisfied: ptyprocess; os_name != \"nt\" in ./env/lib/python3.6/site-packages (from terminado>=0.8.1->notebook>=5.6.0->fairing==0.5.3)\n", + "Requirement already satisfied: MarkupSafe>=0.23 in ./env/lib/python3.6/site-packages (from jinja2->notebook>=5.6.0->fairing==0.5.3)\n", + "Requirement already satisfied: oauthlib>=3.0.0 in ./env/lib/python3.6/site-packages (from requests-oauthlib->kubernetes>=9.0.0->fairing==0.5.3)\n", + "Requirement already satisfied: google-api-core<2.0.0dev,>=1.11.0 in ./env/lib/python3.6/site-packages (from google-cloud-core<2.0dev,>=1.0.0->google-cloud-storage>=1.13.2->fairing==0.5.3)\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Requirement already satisfied: docutils>=0.10 in ./env/lib/python3.6/site-packages/docutils-0.14-py3.6.egg (from botocore<1.13.0,>=1.12.187->boto3>=1.9.0->fairing==0.5.3)\n", + "Requirement already satisfied: webencodings in ./env/lib/python3.6/site-packages (from bleach->nbconvert->notebook>=5.6.0->fairing==0.5.3)\n", + "Requirement already satisfied: pyrsistent>=0.14.0 in ./env/lib/python3.6/site-packages (from jsonschema!=2.5.0,>=2.4->nbformat->notebook>=5.6.0->fairing==0.5.3)\n", + "Requirement already satisfied: attrs>=17.4.0 in ./env/lib/python3.6/site-packages (from jsonschema!=2.5.0,>=2.4->nbformat->notebook>=5.6.0->fairing==0.5.3)\n", + "Requirement already satisfied: pexpect; sys_platform != \"win32\" in ./env/lib/python3.6/site-packages (from ipython>=5.0.0->ipykernel->notebook>=5.6.0->fairing==0.5.3)\n", + "Requirement already satisfied: jedi>=0.10 in ./env/lib/python3.6/site-packages (from ipython>=5.0.0->ipykernel->notebook>=5.6.0->fairing==0.5.3)\n", + "Requirement already satisfied: backcall in ./env/lib/python3.6/site-packages (from ipython>=5.0.0->ipykernel->notebook>=5.6.0->fairing==0.5.3)\n", + "Requirement already satisfied: prompt-toolkit<2.1.0,>=2.0.0 in ./env/lib/python3.6/site-packages (from ipython>=5.0.0->ipykernel->notebook>=5.6.0->fairing==0.5.3)\n", + "Requirement already satisfied: pickleshare in ./env/lib/python3.6/site-packages (from ipython>=5.0.0->ipykernel->notebook>=5.6.0->fairing==0.5.3)\n", + "Requirement already satisfied: googleapis-common-protos<2.0dev,>=1.6.0 in ./env/lib/python3.6/site-packages (from google-api-core<2.0.0dev,>=1.11.0->google-cloud-core<2.0dev,>=1.0.0->google-cloud-storage>=1.13.2->fairing==0.5.3)\n", + "Requirement already satisfied: pytz in ./env/lib/python3.6/site-packages (from google-api-core<2.0.0dev,>=1.11.0->google-cloud-core<2.0dev,>=1.0.0->google-cloud-storage>=1.13.2->fairing==0.5.3)\n", + "Requirement already satisfied: protobuf>=3.4.0 in ./env/lib/python3.6/site-packages (from google-api-core<2.0.0dev,>=1.11.0->google-cloud-core<2.0dev,>=1.0.0->google-cloud-storage>=1.13.2->fairing==0.5.3)\n", + "Requirement already satisfied: parso>=0.3.0 in ./env/lib/python3.6/site-packages (from jedi>=0.10->ipython>=5.0.0->ipykernel->notebook>=5.6.0->fairing==0.5.3)\n", + "Requirement already satisfied: wcwidth in ./env/lib/python3.6/site-packages (from prompt-toolkit<2.1.0,>=2.0.0->ipython>=5.0.0->ipykernel->notebook>=5.6.0->fairing==0.5.3)\n", + "Installing collected packages: urllib3\n", + " Found existing installation: urllib3 1.24.3\n", + " Uninstalling urllib3-1.24.3:\n", + " Successfully uninstalled urllib3-1.24.3\n", + "Successfully installed urllib3-1.24.2\n" + ] + } + ], + "source": [ + "!pip3 install retrying\n", + "!pip3 install https://github.com/kubeflow/fairing/archive/master.zip" + ] + }, { "cell_type": "code", "execution_count": 1, @@ -168,7 +278,7 @@ "outputs": [], "source": [ "# fairing:include-cell\n", - "class HousingServe(object):\n", + "class ModelServe(object):\n", " \n", " def __init__(self, model_file=None):\n", " self.n_estimators = 50\n", @@ -298,7 +408,7 @@ } ], "source": [ - "HousingServe(model_file=\"mockup-model.dat\").train()" + "ModelServe(model_file=\"mockup-model.dat\").train()" ] }, { @@ -338,7 +448,7 @@ "source": [ "(train_X, train_y), (test_X, test_y) =read_synthetic_input()\n", "\n", - "HousingServe().predict(test_X, None)" + "ModelServe().predict(test_X, None)" ] }, { @@ -411,7 +521,7 @@ ], "source": [ "from fairing.builders import cluster\n", - "preprocessor = ConvertNotebookPreprocessorWithFire(\"HousingServe\")\n", + "preprocessor = ConvertNotebookPreprocessorWithFire(\"ModelServe\")\n", "\n", "if not preprocessor.input_files:\n", " preprocessor.input_files = set()\n", @@ -800,7 +910,7 @@ "pod_spec = builder.generate_pod_spec()\n", "\n", "module_name = os.path.splitext(preprocessor.executable.name)[0]\n", - "deployer = serving.serving.Serving(module_name + \".HousingServe\",\n", + "deployer = serving.serving.Serving(module_name + \".ModelServe\",\n", " service_type=\"ClusterIP\",\n", " labels={\"app\": \"mockup\"})\n", " \n", @@ -859,7 +969,7 @@ " containers:\r\n", " - command:\r\n", " - seldon-core-microservice\r\n", - " - mockup-data-xgboost-build-train-deploy.HousingServe\r\n", + " - mockup-data-xgboost-build-train-deploy.ModelServe\r\n", " - REST\r\n", " - --service-type=MODEL\r\n", " - --persistence=0\r\n", @@ -1113,7 +1223,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.6.5" + "version": "3.6.6" } }, "nbformat": 4, diff --git a/xgboost_synthetic/notebook.xgboost-synthetic.yaml b/xgboost_synthetic/notebook.xgboost-synthetic.yaml deleted file mode 100644 index 73b7a228..00000000 --- a/xgboost_synthetic/notebook.xgboost-synthetic.yaml +++ /dev/null @@ -1,30 +0,0 @@ -apiVersion: kubeflow.org/v1alpha1 -kind: Notebook -metadata: - labels: - app: notebook - name: xgboost-synthetic - namespace: kubeflow -spec: - template: - spec: - containers: - - env: [] - image: gcr.io/kubeflow-images-public/tensorflow-1.12.0-notebook-cpu:v0.5.0 - name: tf-cpu - resources: - limits: - cpu: 8 - memory: 16Gi - requests: - cpu: 1 - memory: 1Gi - volumeMounts: - - mountPath: /home/jovyan - name: xgboost-synthetic - serviceAccountName: jupyter-notebook - ttlSecondsAfterFinished: 300 - volumes: - - name: xgboost-synthetic - persistentVolumeClaim: - claimName: xgboost-synthetic From a9c6e69f0e9fc92859144f36f370743b28368dc5 Mon Sep 17 00:00:00 2001 From: David Sabater Dinter Date: Thu, 25 Jul 2019 03:23:52 +0100 Subject: [PATCH 4/8] Lint fixes mnist (#581) * Remove modules from .pylintrc * Add lint inline exceptions * Add lint inline exceptions as all as the specific exception is not available for Pylint 1.8 * Fix string formatting logging message and remove unnecessary Pylint exception * Update app.yaml with correct environment details --- .pylintrc | 2 +- pytorch_mnist/ks_app/app.yaml | 13 ++++--- .../serving/seldon-wrapper/mnistddpserving.py | 2 +- pytorch_mnist/training/ddp/mnist/mnist_DDP.py | 39 ++++++++++--------- 4 files changed, 30 insertions(+), 26 deletions(-) diff --git a/.pylintrc b/.pylintrc index 3df7b18d..bd424053 100644 --- a/.pylintrc +++ b/.pylintrc @@ -13,7 +13,7 @@ ignore=third_party # Add files or directories matching the regex patterns to the blacklist. The # regex matches against base names, not paths. -ignore-patterns=object_detection_grpc_client.py,prediction_pb2.py,prediction_pb2_grpc.py,mnist_DDP.py,mnistddpserving.py +ignore-patterns=object_detection_grpc_client.py,prediction_pb2.py,prediction_pb2_grpc.py # Pickle collected data for later comparisons. persistent=no diff --git a/pytorch_mnist/ks_app/app.yaml b/pytorch_mnist/ks_app/app.yaml index 2baa0bd2..6b0280e5 100644 --- a/pytorch_mnist/ks_app/app.yaml +++ b/pytorch_mnist/ks_app/app.yaml @@ -1,8 +1,11 @@ apiVersion: 0.3.0 +environments: + default: + destination: + namespace: default + server: https://104.154.168.244 + k8sVersion: v1.8.0 + path: default kind: ksonnet.io/app -name: ks-app -registries: - incubator: - protocol: github - uri: github.com/ksonnet/parts/tree/master/incubator +name: ks_app version: 0.0.1 diff --git a/pytorch_mnist/serving/seldon-wrapper/mnistddpserving.py b/pytorch_mnist/serving/seldon-wrapper/mnistddpserving.py index 1d821788..4b418b1a 100644 --- a/pytorch_mnist/serving/seldon-wrapper/mnistddpserving.py +++ b/pytorch_mnist/serving/seldon-wrapper/mnistddpserving.py @@ -34,7 +34,7 @@ class Net(torch.nn.Module): self.fc1 = torch.nn.Linear(320, 50) self.fc2 = torch.nn.Linear(50, 10) - def forward(self, x): + def forward(self, x): # pylint: disable = arguments-differ x = f.relu(f.max_pool2d(self.conv1(x), 2)) x = f.relu(f.max_pool2d(self.conv2_drop(self.conv2(x)), 2)) x = x.view(-1, 320) diff --git a/pytorch_mnist/training/ddp/mnist/mnist_DDP.py b/pytorch_mnist/training/ddp/mnist/mnist_DDP.py index 253d21e0..39e9b63c 100755 --- a/pytorch_mnist/training/ddp/mnist/mnist_DDP.py +++ b/pytorch_mnist/training/ddp/mnist/mnist_DDP.py @@ -17,15 +17,14 @@ limitations under the License. import datetime import logging import os -import sys from math import ceil from random import Random import torch import torch.distributed as dist -import torch.nn as nn +import torch.nn as nn # pylint: disable = all import torch.nn.functional as F -import torch.optim as optim +import torch.optim as optim # pylint: disable = all import torch.utils.data import torch.utils.data.distributed from torch._utils import _flatten_dense_tensors, _unflatten_dense_tensors @@ -44,7 +43,7 @@ class DistributedDataParallel(Module): def allreduce_params(): if self.needs_reduction: - self.needs_reduction = False + self.needs_reduction = False # pylint: disable = attribute-defined-outside-init buckets = {} for param in self.module.parameters(): if param.requires_grad and param.grad is not None: @@ -62,8 +61,8 @@ class DistributedDataParallel(Module): buf.copy_(synced) for param in list(self.module.parameters()): - def allreduce_hook(*unused): - Variable._execution_engine.queue_callback(allreduce_params) + def allreduce_hook(*unused): # pylint: disable = unused-argument + Variable._execution_engine.queue_callback(allreduce_params) # pylint: disable = protected-access if param.requires_grad: param.register_hook(allreduce_hook) @@ -72,17 +71,17 @@ class DistributedDataParallel(Module): for param in self.module.parameters(): dist.broadcast(param.data, 0) - def forward(self, *inputs, **kwargs): + def forward(self, *inputs, **kwargs): # pylint: disable = arguments-differ if self.first_call: logging.info("first broadcast start") self.weight_broadcast() self.first_call = False logging.info("first broadcast done") - self.needs_reduction = True + self.needs_reduction = True # pylint: disable = attribute-defined-outside-init return self.module(*inputs, **kwargs) -class Partition(object): +class Partition(object): # pylint: disable = all """ Dataset-like object, but only access a subset of it. """ def __init__(self, data, index): @@ -97,10 +96,10 @@ class Partition(object): return self.data[data_idx] -class DataPartitioner(object): +class DataPartitioner(object): # pylint: disable = all """ Partitions a dataset into different chuncks. """ - def __init__(self, data, sizes=[0.7, 0.2, 0.1], seed=1234): + def __init__(self, data, sizes=[0.7, 0.2, 0.1], seed=1234): # pylint: disable = dangerous-default-value self.data = data self.partitions = [] rng = Random() @@ -129,7 +128,7 @@ class Net(nn.Module): self.fc1 = nn.Linear(320, 50) self.fc2 = nn.Linear(50, 10) - def forward(self, x): + def forward(self, x): # pylint: disable = arguments-differ x = F.relu(F.max_pool2d(self.conv1(x), 2)) x = F.relu(F.max_pool2d(self.conv2_drop(self.conv2(x)), 2)) x = x.view(-1, 320) @@ -208,12 +207,14 @@ def run(modelpath, gpu): model_path = model_dir + "/model_gpu.dat" else: model_path = model_dir + "/model_cpu.dat" - logging.info("Saving model in {}".format(model_path)) + logging.info("Saving model in {}".format(model_path)) # pylint: disable = logging-format-interpolation torch.save(model.module.state_dict(), model_path) if gpu: - logging.info("GPU training time= {}".format(str(datetime.datetime.now() - time_start))) + logging.info("GPU training time= {}".format( # pylint: disable = logging-format-interpolation + str(datetime.datetime.now() - time_start))) # pylint: disable = logging-format-interpolation else: - logging.info("CPU training time= {}".format(str(datetime.datetime.now() - time_start))) + logging.info("CPU training time= {}".format( # pylint: disable = logging-format-interpolation + str(datetime.datetime.now() - time_start))) # pylint: disable = logging-format-interpolation if __name__ == "__main__": @@ -234,10 +235,10 @@ if __name__ == "__main__": args = parser.parse_args() if args.gpu: logging.info("\n======= CUDA INFO =======") - logging.info("CUDA Availibility:", torch.cuda.is_available()) - if (torch.cuda.is_available()): - logging.info("CUDA Device Name:", torch.cuda.get_device_name(0)) - logging.info("CUDA Version:", torch.version.cuda) + logging.info("CUDA Availibility: %s", torch.cuda.is_available()) + if torch.cuda.is_available(): + logging.info("CUDA Device Name: %s", torch.cuda.get_device_name(0)) + logging.info("CUDA Version: %s", torch.version.cuda) logging.info("=========================\n") dist.init_process_group(backend='gloo') run(modelpath=args.modelpath, gpu=args.gpu) From 607533311e8ef4a6fb9faa3a407c7f884bb6a045 Mon Sep 17 00:00:00 2001 From: Xiao Kou Date: Tue, 30 Jul 2019 11:12:51 +0800 Subject: [PATCH 5/8] Fix mnist readme service name and deployments name typo (#611) --- mnist/README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/mnist/README.md b/mnist/README.md index 0ff8f7f0..9ff64ed0 100644 --- a/mnist/README.md +++ b/mnist/README.md @@ -703,12 +703,12 @@ kustomize build . |kubectl apply -f - You can check the deployment by running ``` -kubectl describe deployments mnist-deploy-local +kubectl describe deployments mnist-service-local ``` -The service should make the `mnist-deploy-local` deployment accessible over port 9000. +The service should make the `mnist-service-local` deployment accessible over port 9000. ``` -kubectl describe service mnist-service +kubectl describe service mnist-service-local ``` ## Web Front End From 22de8cf7c1d0f1293406e21fdbec06cdb4cba4c6 Mon Sep 17 00:00:00 2001 From: Zhenghui Wang Date: Mon, 5 Aug 2019 20:45:54 -0700 Subject: [PATCH 6/8] Add metadata logging to xgboost-synthetic example (#610) * meta logging * lint * pip install fairing * update prredict() functuion --- xgboost_synthetic/build-train-deploy.ipynb | 293 +++++++++------------ 1 file changed, 124 insertions(+), 169 deletions(-) diff --git a/xgboost_synthetic/build-train-deploy.ipynb b/xgboost_synthetic/build-train-deploy.ipynb index 51a3150a..2db84a32 100644 --- a/xgboost_synthetic/build-train-deploy.ipynb +++ b/xgboost_synthetic/build-train-deploy.ipynb @@ -31,117 +31,18 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Requirement already satisfied: retrying in ./env/lib/python3.6/site-packages\n", - "Requirement already satisfied: six>=1.7.0 in ./env/lib/python3.6/site-packages (from retrying)\n", - "Collecting https://github.com/kubeflow/fairing/archive/master.zip\n", - " Downloading https://github.com/kubeflow/fairing/archive/master.zip (878kB)\n", - "\u001b[K 100% |████████████████████████████████| 880kB 1.6MB/s eta 0:00:01\n", - "\u001b[?25h Requirement already satisfied (use --upgrade to upgrade): fairing==0.5.3 from https://github.com/kubeflow/fairing/archive/master.zip in ./env/lib/python3.6/site-packages/fairing-0.5.3-py3.6.egg\n", - "Requirement already satisfied: docker>=3.4.1 in ./env/lib/python3.6/site-packages (from fairing==0.5.3)\n", - "Requirement already satisfied: notebook>=5.6.0 in ./env/lib/python3.6/site-packages (from fairing==0.5.3)\n", - "Requirement already satisfied: kubernetes>=9.0.0 in ./env/lib/python3.6/site-packages (from fairing==0.5.3)\n", - "Requirement already satisfied: future>=0.17.1 in ./env/lib/python3.6/site-packages (from fairing==0.5.3)\n", - "Requirement already satisfied: six>=1.11.0 in ./env/lib/python3.6/site-packages (from fairing==0.5.3)\n", - "Requirement already satisfied: google-cloud-storage>=1.13.2 in ./env/lib/python3.6/site-packages (from fairing==0.5.3)\n", - "Requirement already satisfied: requests>=2.21.0 in ./env/lib/python3.6/site-packages (from fairing==0.5.3)\n", - "Requirement already satisfied: setuptools>=34.0.0 in ./env/lib/python3.6/site-packages (from fairing==0.5.3)\n", - "Requirement already satisfied: google-auth>=1.6.2 in ./env/lib/python3.6/site-packages (from fairing==0.5.3)\n", - "Requirement already satisfied: httplib2>=0.12.0 in ./env/lib/python3.6/site-packages (from fairing==0.5.3)\n", - "Requirement already satisfied: oauth2client>=4.0.0 in ./env/lib/python3.6/site-packages (from fairing==0.5.3)\n", - "Requirement already satisfied: tornado<6.0.0,>=5.1.1 in ./env/lib/python3.6/site-packages (from fairing==0.5.3)\n", - "Requirement already satisfied: google-api-python-client>=1.7.8 in ./env/lib/python3.6/site-packages (from fairing==0.5.3)\n", - "Requirement already satisfied: cloudpickle>=0.8 in ./env/lib/python3.6/site-packages (from fairing==0.5.3)\n", - "Requirement already satisfied: numpy>=1.14 in ./env/lib/python3.6/site-packages (from fairing==0.5.3)\n", - "Collecting urllib3==1.24.2 (from fairing==0.5.3)\n", - " Using cached https://files.pythonhosted.org/packages/df/1c/59cca3abf96f991f2ec3131a4ffe72ae3d9ea1f5894abe8a9c5e3c77cfee/urllib3-1.24.2-py2.py3-none-any.whl\n", - "Requirement already satisfied: boto3>=1.9.0 in ./env/lib/python3.6/site-packages/boto3-1.9.187-py3.6.egg (from fairing==0.5.3)\n", - "Requirement already satisfied: websocket-client>=0.32.0 in ./env/lib/python3.6/site-packages (from docker>=3.4.1->fairing==0.5.3)\n", - "Requirement already satisfied: nbconvert in ./env/lib/python3.6/site-packages (from notebook>=5.6.0->fairing==0.5.3)\n", - "Requirement already satisfied: prometheus-client in ./env/lib/python3.6/site-packages (from notebook>=5.6.0->fairing==0.5.3)\n", - "Requirement already satisfied: traitlets>=4.2.1 in ./env/lib/python3.6/site-packages (from notebook>=5.6.0->fairing==0.5.3)\n", - "Requirement already satisfied: pyzmq>=17 in ./env/lib/python3.6/site-packages (from notebook>=5.6.0->fairing==0.5.3)\n", - "Requirement already satisfied: Send2Trash in ./env/lib/python3.6/site-packages (from notebook>=5.6.0->fairing==0.5.3)\n", - "Requirement already satisfied: jupyter-client>=5.2.0 in ./env/lib/python3.6/site-packages (from notebook>=5.6.0->fairing==0.5.3)\n", - "Requirement already satisfied: ipython-genutils in ./env/lib/python3.6/site-packages (from notebook>=5.6.0->fairing==0.5.3)\n", - "Requirement already satisfied: nbformat in ./env/lib/python3.6/site-packages (from notebook>=5.6.0->fairing==0.5.3)\n", - "Requirement already satisfied: ipykernel in ./env/lib/python3.6/site-packages (from notebook>=5.6.0->fairing==0.5.3)\n", - "Requirement already satisfied: terminado>=0.8.1 in ./env/lib/python3.6/site-packages (from notebook>=5.6.0->fairing==0.5.3)\n", - "Requirement already satisfied: jinja2 in ./env/lib/python3.6/site-packages (from notebook>=5.6.0->fairing==0.5.3)\n", - "Requirement already satisfied: jupyter-core>=4.4.0 in ./env/lib/python3.6/site-packages (from notebook>=5.6.0->fairing==0.5.3)\n", - "Requirement already satisfied: certifi>=14.05.14 in ./env/lib/python3.6/site-packages (from kubernetes>=9.0.0->fairing==0.5.3)\n", - "Requirement already satisfied: python-dateutil>=2.5.3 in ./env/lib/python3.6/site-packages (from kubernetes>=9.0.0->fairing==0.5.3)\n", - "Requirement already satisfied: requests-oauthlib in ./env/lib/python3.6/site-packages (from kubernetes>=9.0.0->fairing==0.5.3)\n", - "Requirement already satisfied: pyyaml>=3.12 in ./env/lib/python3.6/site-packages (from kubernetes>=9.0.0->fairing==0.5.3)\n", - "Requirement already satisfied: google-resumable-media>=0.3.1 in ./env/lib/python3.6/site-packages (from google-cloud-storage>=1.13.2->fairing==0.5.3)\n", - "Requirement already satisfied: google-cloud-core<2.0dev,>=1.0.0 in ./env/lib/python3.6/site-packages (from google-cloud-storage>=1.13.2->fairing==0.5.3)\n", - "Requirement already satisfied: idna<2.9,>=2.5 in ./env/lib/python3.6/site-packages (from requests>=2.21.0->fairing==0.5.3)\n", - "Requirement already satisfied: chardet<3.1.0,>=3.0.2 in ./env/lib/python3.6/site-packages (from requests>=2.21.0->fairing==0.5.3)\n", - "Requirement already satisfied: rsa>=3.1.4 in ./env/lib/python3.6/site-packages (from google-auth>=1.6.2->fairing==0.5.3)\n", - "Requirement already satisfied: cachetools>=2.0.0 in ./env/lib/python3.6/site-packages (from google-auth>=1.6.2->fairing==0.5.3)\n", - "Requirement already satisfied: pyasn1-modules>=0.2.1 in ./env/lib/python3.6/site-packages (from google-auth>=1.6.2->fairing==0.5.3)\n", - "Requirement already satisfied: pyasn1>=0.1.7 in ./env/lib/python3.6/site-packages (from oauth2client>=4.0.0->fairing==0.5.3)\n", - "Requirement already satisfied: uritemplate<4dev,>=3.0.0 in ./env/lib/python3.6/site-packages (from google-api-python-client>=1.7.8->fairing==0.5.3)\n", - "Requirement already satisfied: google-auth-httplib2>=0.0.3 in ./env/lib/python3.6/site-packages (from google-api-python-client>=1.7.8->fairing==0.5.3)\n", - "Requirement already satisfied: botocore<1.13.0,>=1.12.187 in ./env/lib/python3.6/site-packages/botocore-1.12.187-py3.6.egg (from boto3>=1.9.0->fairing==0.5.3)\n", - "Requirement already satisfied: jmespath<1.0.0,>=0.7.1 in ./env/lib/python3.6/site-packages/jmespath-0.9.4-py3.6.egg (from boto3>=1.9.0->fairing==0.5.3)\n", - "Requirement already satisfied: s3transfer<0.3.0,>=0.2.0 in ./env/lib/python3.6/site-packages/s3transfer-0.2.1-py3.6.egg (from boto3>=1.9.0->fairing==0.5.3)\n", - "Requirement already satisfied: pandocfilters>=1.4.1 in ./env/lib/python3.6/site-packages (from nbconvert->notebook>=5.6.0->fairing==0.5.3)\n", - "Requirement already satisfied: bleach in ./env/lib/python3.6/site-packages (from nbconvert->notebook>=5.6.0->fairing==0.5.3)\n", - "Requirement already satisfied: testpath in ./env/lib/python3.6/site-packages (from nbconvert->notebook>=5.6.0->fairing==0.5.3)\n", - "Requirement already satisfied: defusedxml in ./env/lib/python3.6/site-packages (from nbconvert->notebook>=5.6.0->fairing==0.5.3)\n", - "Requirement already satisfied: entrypoints>=0.2.2 in ./env/lib/python3.6/site-packages (from nbconvert->notebook>=5.6.0->fairing==0.5.3)\n", - "Requirement already satisfied: pygments in ./env/lib/python3.6/site-packages (from nbconvert->notebook>=5.6.0->fairing==0.5.3)\n", - "Requirement already satisfied: mistune>=0.8.1 in ./env/lib/python3.6/site-packages (from nbconvert->notebook>=5.6.0->fairing==0.5.3)\n", - "Requirement already satisfied: decorator in ./env/lib/python3.6/site-packages (from traitlets>=4.2.1->notebook>=5.6.0->fairing==0.5.3)\n", - "Requirement already satisfied: jsonschema!=2.5.0,>=2.4 in ./env/lib/python3.6/site-packages (from nbformat->notebook>=5.6.0->fairing==0.5.3)\n", - "Requirement already satisfied: ipython>=5.0.0 in ./env/lib/python3.6/site-packages (from ipykernel->notebook>=5.6.0->fairing==0.5.3)\n", - "Requirement already satisfied: ptyprocess; os_name != \"nt\" in ./env/lib/python3.6/site-packages (from terminado>=0.8.1->notebook>=5.6.0->fairing==0.5.3)\n", - "Requirement already satisfied: MarkupSafe>=0.23 in ./env/lib/python3.6/site-packages (from jinja2->notebook>=5.6.0->fairing==0.5.3)\n", - "Requirement already satisfied: oauthlib>=3.0.0 in ./env/lib/python3.6/site-packages (from requests-oauthlib->kubernetes>=9.0.0->fairing==0.5.3)\n", - "Requirement already satisfied: google-api-core<2.0.0dev,>=1.11.0 in ./env/lib/python3.6/site-packages (from google-cloud-core<2.0dev,>=1.0.0->google-cloud-storage>=1.13.2->fairing==0.5.3)\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Requirement already satisfied: docutils>=0.10 in ./env/lib/python3.6/site-packages/docutils-0.14-py3.6.egg (from botocore<1.13.0,>=1.12.187->boto3>=1.9.0->fairing==0.5.3)\n", - "Requirement already satisfied: webencodings in ./env/lib/python3.6/site-packages (from bleach->nbconvert->notebook>=5.6.0->fairing==0.5.3)\n", - "Requirement already satisfied: pyrsistent>=0.14.0 in ./env/lib/python3.6/site-packages (from jsonschema!=2.5.0,>=2.4->nbformat->notebook>=5.6.0->fairing==0.5.3)\n", - "Requirement already satisfied: attrs>=17.4.0 in ./env/lib/python3.6/site-packages (from jsonschema!=2.5.0,>=2.4->nbformat->notebook>=5.6.0->fairing==0.5.3)\n", - "Requirement already satisfied: pexpect; sys_platform != \"win32\" in ./env/lib/python3.6/site-packages (from ipython>=5.0.0->ipykernel->notebook>=5.6.0->fairing==0.5.3)\n", - "Requirement already satisfied: jedi>=0.10 in ./env/lib/python3.6/site-packages (from ipython>=5.0.0->ipykernel->notebook>=5.6.0->fairing==0.5.3)\n", - "Requirement already satisfied: backcall in ./env/lib/python3.6/site-packages (from ipython>=5.0.0->ipykernel->notebook>=5.6.0->fairing==0.5.3)\n", - "Requirement already satisfied: prompt-toolkit<2.1.0,>=2.0.0 in ./env/lib/python3.6/site-packages (from ipython>=5.0.0->ipykernel->notebook>=5.6.0->fairing==0.5.3)\n", - "Requirement already satisfied: pickleshare in ./env/lib/python3.6/site-packages (from ipython>=5.0.0->ipykernel->notebook>=5.6.0->fairing==0.5.3)\n", - "Requirement already satisfied: googleapis-common-protos<2.0dev,>=1.6.0 in ./env/lib/python3.6/site-packages (from google-api-core<2.0.0dev,>=1.11.0->google-cloud-core<2.0dev,>=1.0.0->google-cloud-storage>=1.13.2->fairing==0.5.3)\n", - "Requirement already satisfied: pytz in ./env/lib/python3.6/site-packages (from google-api-core<2.0.0dev,>=1.11.0->google-cloud-core<2.0dev,>=1.0.0->google-cloud-storage>=1.13.2->fairing==0.5.3)\n", - "Requirement already satisfied: protobuf>=3.4.0 in ./env/lib/python3.6/site-packages (from google-api-core<2.0.0dev,>=1.11.0->google-cloud-core<2.0dev,>=1.0.0->google-cloud-storage>=1.13.2->fairing==0.5.3)\n", - "Requirement already satisfied: parso>=0.3.0 in ./env/lib/python3.6/site-packages (from jedi>=0.10->ipython>=5.0.0->ipykernel->notebook>=5.6.0->fairing==0.5.3)\n", - "Requirement already satisfied: wcwidth in ./env/lib/python3.6/site-packages (from prompt-toolkit<2.1.0,>=2.0.0->ipython>=5.0.0->ipykernel->notebook>=5.6.0->fairing==0.5.3)\n", - "Installing collected packages: urllib3\n", - " Found existing installation: urllib3 1.24.3\n", - " Uninstalling urllib3-1.24.3:\n", - " Successfully uninstalled urllib3-1.24.3\n", - "Successfully installed urllib3-1.24.2\n" - ] - } - ], + "outputs": [], "source": [ "!pip3 install retrying\n", - "!pip3 install https://github.com/kubeflow/fairing/archive/master.zip" + "!pip3 install fairing\n", + "!pip3 install kfmd" ] }, { "cell_type": "code", - "execution_count": 1, + "execution_count": 32, "metadata": { "scrolled": false }, @@ -150,12 +51,13 @@ "import util\n", "from pathlib import Path\n", "import os\n", + "\n", "util.notebook_setup()\n" ] }, { "cell_type": "code", - "execution_count": 2, + "execution_count": 33, "metadata": {}, "outputs": [], "source": [ @@ -163,6 +65,7 @@ "import fire\n", "import joblib\n", "import logging\n", + "import kfmd\n", "import nbconvert\n", "import os\n", "import pathlib\n", @@ -175,12 +78,14 @@ "from sklearn.impute import SimpleImputer\n", "from xgboost import XGBRegressor\n", "from importlib import reload\n", - "from sklearn.datasets import make_regression\n" + "from sklearn.datasets import make_regression\n", + "from kfmd import metadata\n", + "from datetime import datetime\n" ] }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 34, "metadata": {}, "outputs": [], "source": [ @@ -199,7 +104,7 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 35, "metadata": {}, "outputs": [], "source": [ @@ -222,7 +127,7 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 36, "metadata": {}, "outputs": [], "source": [ @@ -249,7 +154,9 @@ "def eval_model(model, test_X, test_y):\n", " \"\"\"Evaluate the model performance.\"\"\"\n", " predictions = model.predict(test_X)\n", - " logging.info(\"mean_absolute_error=%.2f\", mean_absolute_error(predictions, test_y))\n", + " mae=mean_absolute_error(predictions, test_y)\n", + " logging.info(\"mean_absolute_error=%.2f\", mae)\n", + " return mae\n", "\n", "def save_model(model, model_file):\n", " \"\"\"Save XGBoost model for serving.\"\"\"\n", @@ -273,7 +180,7 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": 37, "metadata": {}, "outputs": [], "source": [ @@ -295,9 +202,17 @@ " print(\"model_file={0}\".format(self.model_file))\n", " \n", " self.model = None\n", + " self.exec = self.create_execution()\n", "\n", " def train(self):\n", " (train_X, train_y), (test_X, test_y) = read_synthetic_input()\n", + " self.exec.log_input(metadata.DataSet(\n", + " description=\"xgboost synthetic data\",\n", + " name=\"synthetic-data\",\n", + " owner=\"someone@kubeflow.org\",\n", + " uri=\"file://path/to/dataset\",\n", + " version=\"v1.0.0\"))\n", + " \n", " model = train_model(train_X,\n", " train_y,\n", " test_X,\n", @@ -305,9 +220,32 @@ " self.n_estimators,\n", " self.learning_rate)\n", "\n", - " eval_model(model, test_X, test_y)\n", + " mae = eval_model(model, test_X, test_y)\n", + " self.exec.log_output(metadata.Metrics(\n", + " name=\"xgboost-synthetic-traing-eval\",\n", + " owner=\"someone@kubeflow.org\",\n", + " description=\"training evaluation for xgboost synthetic\",\n", + " uri=\"gcs://path/to/metrics\",\n", + " metrics_type=metadata.Metrics.VALIDATION,\n", + " values={\"mean_absolute_error\": mae}))\n", + " \n", " save_model(model, self.model_file)\n", - "\n", + " self.exec.log_output(metadata.Model(\n", + " name=\"housing-price-model\",\n", + " description=\"housing price prediction model using synthetic data\",\n", + " owner=\"someone@kubeflow.org\",\n", + " uri=self.model_file,\n", + " model_type=\"linear_regression\",\n", + " training_framework={\n", + " \"name\": \"xgboost\",\n", + " \"version\": \"0.9.0\"\n", + " },\n", + " hyperparameters={\n", + " \"learning_rate\": self.learning_rate,\n", + " \"n_estimators\": self.n_estimators\n", + " },\n", + " version=datetime.utcnow().isoformat(\"T\")))\n", + " \n", " def predict(self, X, feature_names):\n", " \"\"\"Predict using the model for given ndarray.\"\"\"\n", " if not self.model:\n", @@ -315,7 +253,25 @@ " # Do any preprocessing\n", " prediction = self.model.predict(data=X)\n", " # Do any postprocessing\n", - " return [[prediction.item(0), prediction.item(0)]]" + " return [[prediction.item(0), prediction.item(1)]]\n", + " \n", + " def create_execution(self):\n", + " workspace = metadata.Workspace(\n", + " # Connect to metadata-service in namesapce kubeflow in k8s cluster.\n", + " backend_url_prefix=\"metadata-service.kubeflow:8080\",\n", + " name=\"xgboost-synthetic\",\n", + " description=\"workspace for xgboost-synthetic artifacts and executions\")\n", + " \n", + " r = metadata.Run(\n", + " workspace=workspace,\n", + " name=\"xgboost-synthetic-faring-run\" + datetime.utcnow().isoformat(\"T\"),\n", + " description=\"a notebook run\")\n", + "\n", + " return metadata.Execution(\n", + " name = \"execution\" + datetime.utcnow().isoformat(\"T\"),\n", + " workspace=workspace,\n", + " run=r,\n", + " description=\"execution for training xgboost-synthetic\")" ] }, { @@ -329,7 +285,7 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": 39, "metadata": {}, "outputs": [ { @@ -337,65 +293,64 @@ "output_type": "stream", "text": [ "model_file=mockup-model.dat\n", - "[14:45:28] WARNING: /workspace/src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\n", - "[0]\tvalidation_0-rmse:95.4029\n", + "[0]\tvalidation_0-rmse:145.743\n", "Will train until validation_0-rmse hasn't improved in 40 rounds.\n", - "[1]\tvalidation_0-rmse:88.2563\n", - "[2]\tvalidation_0-rmse:82.341\n", - "[3]\tvalidation_0-rmse:76.7723\n", - "[4]\tvalidation_0-rmse:71.9907\n", - "[5]\tvalidation_0-rmse:67.7698\n", - "[6]\tvalidation_0-rmse:63.2959\n", - "[7]\tvalidation_0-rmse:59.8439\n", - "[8]\tvalidation_0-rmse:56.9911\n", - "[9]\tvalidation_0-rmse:53.8091\n", - "[10]\tvalidation_0-rmse:51.4086\n", - "[11]\tvalidation_0-rmse:49.1506\n", - "[12]\tvalidation_0-rmse:47.4958\n", - "[13]\tvalidation_0-rmse:46.0659\n", - "[14]\tvalidation_0-rmse:44.4425\n", - "[15]\tvalidation_0-rmse:42.5323\n", - "[16]\tvalidation_0-rmse:41.3847\n", - "[17]\tvalidation_0-rmse:40.4265\n", - "[18]\tvalidation_0-rmse:39.2736\n", - "[19]\tvalidation_0-rmse:38.5218\n", - "[20]\tvalidation_0-rmse:37.3415\n", - "[21]\tvalidation_0-rmse:36.8546\n", - "[22]\tvalidation_0-rmse:36.0049\n", - "[23]\tvalidation_0-rmse:35.5978\n", - "[24]\tvalidation_0-rmse:35.0653\n", - "[25]\tvalidation_0-rmse:34.1586\n", - "[26]\tvalidation_0-rmse:33.6017\n", - "[27]\tvalidation_0-rmse:33.2441\n", - "[28]\tvalidation_0-rmse:32.477\n", - "[29]\tvalidation_0-rmse:31.7638\n", - "[30]\tvalidation_0-rmse:31.2781\n", - "[31]\tvalidation_0-rmse:30.9532\n", - "[32]\tvalidation_0-rmse:30.3881\n", - "[33]\tvalidation_0-rmse:29.9289\n", - "[34]\tvalidation_0-rmse:29.6362\n", - "[35]\tvalidation_0-rmse:29.3138\n", - "[36]\tvalidation_0-rmse:29.0621\n", - "[37]\tvalidation_0-rmse:28.5649\n", - "[38]\tvalidation_0-rmse:28.15\n", - "[39]\tvalidation_0-rmse:27.8467\n", - "[40]\tvalidation_0-rmse:27.5816\n", - "[41]\tvalidation_0-rmse:27.4534\n", - "[42]\tvalidation_0-rmse:27.2668\n", - "[43]\tvalidation_0-rmse:27.0583\n", - "[44]\tvalidation_0-rmse:26.7226\n", - "[45]\tvalidation_0-rmse:26.6145\n", - "[46]\tvalidation_0-rmse:26.3878\n", - "[47]\tvalidation_0-rmse:26.029\n", - "[48]\tvalidation_0-rmse:25.8776\n", - "[49]\tvalidation_0-rmse:25.8484\n" + "[1]\tvalidation_0-rmse:137.786\n", + "[2]\tvalidation_0-rmse:129.221\n", + "[3]\tvalidation_0-rmse:122.795\n", + "[4]\tvalidation_0-rmse:117.913\n", + "[5]\tvalidation_0-rmse:113.441\n", + "[6]\tvalidation_0-rmse:108.843\n", + "[7]\tvalidation_0-rmse:104.968\n", + "[8]\tvalidation_0-rmse:101.756\n", + "[9]\tvalidation_0-rmse:98.9659\n", + "[10]\tvalidation_0-rmse:96.2215\n", + "[11]\tvalidation_0-rmse:93.6806\n", + "[12]\tvalidation_0-rmse:90.5423\n", + "[13]\tvalidation_0-rmse:88.1216\n", + "[14]\tvalidation_0-rmse:85.4835\n", + "[15]\tvalidation_0-rmse:83.1785\n", + "[16]\tvalidation_0-rmse:80.9087\n", + "[17]\tvalidation_0-rmse:78.916\n", + "[18]\tvalidation_0-rmse:77.5187\n", + "[19]\tvalidation_0-rmse:75.0274\n", + "[20]\tvalidation_0-rmse:74.0297\n", + "[21]\tvalidation_0-rmse:72.1579\n", + "[22]\tvalidation_0-rmse:70.6119\n", + "[23]\tvalidation_0-rmse:69.7389\n", + "[24]\tvalidation_0-rmse:67.9469\n", + "[25]\tvalidation_0-rmse:66.8921\n", + "[26]\tvalidation_0-rmse:66.1554\n", + "[27]\tvalidation_0-rmse:64.6994\n", + "[28]\tvalidation_0-rmse:63.5188\n", + "[29]\tvalidation_0-rmse:62.7831\n", + "[30]\tvalidation_0-rmse:62.3533\n", + "[31]\tvalidation_0-rmse:61.9013\n", + "[32]\tvalidation_0-rmse:60.8512\n", + "[33]\tvalidation_0-rmse:60.1541\n", + "[34]\tvalidation_0-rmse:59.5948\n", + "[35]\tvalidation_0-rmse:59.0876\n", + "[36]\tvalidation_0-rmse:58.6049\n", + "[37]\tvalidation_0-rmse:58.2507\n", + "[38]\tvalidation_0-rmse:57.4195\n", + "[39]\tvalidation_0-rmse:57.0364\n", + "[40]\tvalidation_0-rmse:56.634\n", + "[41]\tvalidation_0-rmse:56.279\n", + "[42]\tvalidation_0-rmse:56.1874\n", + "[43]\tvalidation_0-rmse:55.5723\n", + "[44]\tvalidation_0-rmse:55.4855\n", + "[45]\tvalidation_0-rmse:54.8205\n", + "[46]\tvalidation_0-rmse:54.663\n", + "[47]\tvalidation_0-rmse:54.1199\n", + "[48]\tvalidation_0-rmse:53.8837\n", + "[49]\tvalidation_0-rmse:53.6094\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ - "mean_absolute_error=19.92\n", + "mean_absolute_error=41.16\n", "Model export success: mockup-model.dat\n" ] }, @@ -403,7 +358,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "Best RMSE on eval: %.2f with %d rounds 25.848402 50\n" + "Best RMSE on eval: %.2f with %d rounds 53.609386 50\n" ] } ], @@ -1223,7 +1178,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.6.6" + "version": "3.6.7" } }, "nbformat": 4, From ef9484595f23ac49881d981390d756941fd21f58 Mon Sep 17 00:00:00 2001 From: Simon Rey <51708585+eqqe@users.noreply.github.com> Date: Tue, 13 Aug 2019 01:03:38 +0200 Subject: [PATCH 7/8] Add tensorboard support for local mninst example (#616) * Add files via upload * Update kustomization.yaml * Update README.md * Update README.md * Update README.md --- mnist/README.md | 18 +++++++++++- mnist/monitoring/local/deployment_patch.yaml | 12 ++++++++ mnist/monitoring/local/kustomization.yaml | 30 ++++++++++++++++++++ mnist/monitoring/local/params.yaml | 5 ++++ 4 files changed, 64 insertions(+), 1 deletion(-) create mode 100644 mnist/monitoring/local/deployment_patch.yaml create mode 100644 mnist/monitoring/local/kustomization.yaml create mode 100644 mnist/monitoring/local/params.yaml diff --git a/mnist/README.md b/mnist/README.md index 9ff64ed0..e8a096cb 100644 --- a/mnist/README.md +++ b/mnist/README.md @@ -16,13 +16,14 @@ - [Using S3](#using-s3) - [Monitoring](#monitoring) - [Tensorboard](#tensorboard) + - [Local storage](#local-storage-1) - [Using GCS](#using-gcs-1) - [Using S3](#using-s3-1) - [Deploying TensorBoard](#deploying-tensorboard) - [Serving the model](#serving-the-model) - [GCS](#gcs) - [S3](#s3) - - [Local storage](#local-storage-1) + - [Local storage](#local-storage-2) - [Web Front End](#web-front-end) - [Connecting via port forwarding](#connecting-via-port-forwarding) - [Using IAP on GCP](#using-iap-on-gcp) @@ -469,6 +470,21 @@ There are various ways to monitor workflow/training job. In addition to using `k ### Tensorboard +#### Local storage + +Enter the `monitoring/local` from the `mnist` application directory. +``` +cd monitoring/local +``` + +Configure PVC name, mount point, and set log directory. +``` +kustomize edit add configmap mnist-map-monitoring --from-literal=pvcName=${PVC_NAME} +kustomize edit add configmap mnist-map-monitoring --from-literal=pvcMountPath=/mnt +kustomize edit add configmap mnist-map-monitoring --from-literal=logDir=/mnt +``` + + #### Using GCS Enter the `monitoring/GCS` from the `mnist` application directory. diff --git a/mnist/monitoring/local/deployment_patch.yaml b/mnist/monitoring/local/deployment_patch.yaml new file mode 100644 index 00000000..83ee30fe --- /dev/null +++ b/mnist/monitoring/local/deployment_patch.yaml @@ -0,0 +1,12 @@ +- op: add + path: /spec/template/spec/containers/0/volumeMounts + value: + - mountPath: $(pvcMountPath) + name: local-storage + +- op: add + path: /spec/template/spec/volumes + value: + - name: local-storage + persistentVolumeClaim: + claimName: $(pvcName) diff --git a/mnist/monitoring/local/kustomization.yaml b/mnist/monitoring/local/kustomization.yaml new file mode 100644 index 00000000..aef16d83 --- /dev/null +++ b/mnist/monitoring/local/kustomization.yaml @@ -0,0 +1,30 @@ +apiVersion: kustomize.config.k8s.io/v1beta1 +kind: Kustomization +bases: +- ../base +configurations: +- params.yaml + +vars: +- fieldref: + fieldPath: data.pvcName + name: pvcName + objref: + apiVersion: v1 + kind: ConfigMap + name: mnist-map-monitoring +- fieldref: + fieldPath: data.pvcMountPath + name: pvcMountPath + objref: + apiVersion: v1 + kind: ConfigMap + name: mnist-map-monitoring + +patchesJson6902: +- path: deployment_patch.yaml + target: + group: apps + kind: Deployment + name: tensorboard-tb + version: v1beta1 diff --git a/mnist/monitoring/local/params.yaml b/mnist/monitoring/local/params.yaml new file mode 100644 index 00000000..62647c8a --- /dev/null +++ b/mnist/monitoring/local/params.yaml @@ -0,0 +1,5 @@ +varReference: +- path: spec/template/spec/volumes/persistentVolumeClaim/claimName + kind: Deployment +- path: spec/template/spec/containers/volumeMounts/mountPath + kind: Deployment From 2acf34f9162a56b06b9c955c585c1735676e0447 Mon Sep 17 00:00:00 2001 From: MrXinWang <38582160+MrXinWang@users.noreply.github.com> Date: Thu, 15 Aug 2019 09:12:34 +0800 Subject: [PATCH 8/8] object_detection: fix typo error in tf-serving.libsonnet (#618) modified tf-serving.libsonnet in object_detection example to fix the error of "FileSystemStoragePathSource encountered a file-system access error: Could not find base path /models/model for servable model" Change-Id: I946a0a7fbb6c80992d66fe003ca90b1c21c67cfc Signed-off-by: Henry Wang --- .../ks-app/vendor/kubeflow/tf-serving/tf-serving.libsonnet | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/object_detection/ks-app/vendor/kubeflow/tf-serving/tf-serving.libsonnet b/object_detection/ks-app/vendor/kubeflow/tf-serving/tf-serving.libsonnet index 61273cf7..8e39ae8a 100644 --- a/object_detection/ks-app/vendor/kubeflow/tf-serving/tf-serving.libsonnet +++ b/object_detection/ks-app/vendor/kubeflow/tf-serving/tf-serving.libsonnet @@ -119,8 +119,10 @@ name: $.params.name, image: $.params.modelServerImage, imagePullPolicy: "IfNotPresent", - args: [ + command: [ "/usr/bin/tensorflow_model_server", + ], + args: [ "--port=9000", "--model_name=" + $.params.modelName, "--model_base_path=" + $.params.modelPath,