fix ML example (#5983)

* fix ML example

* fix dockerize

* type variable df

* fix code
This commit is contained in:
mattn 2024-12-29 11:17:11 +09:00 committed by GitHub
parent 99d9bb9c27
commit b8ff86b34a
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
3 changed files with 121 additions and 69 deletions

View File

@ -39,15 +39,18 @@ as API endpoint with Knative Serving.
API, which is the entry point for accessing this machine learning service. API, which is the entry point for accessing this machine learning service.
```python ```python
from bentoml import env, artifacts, api, BentoService import bentoml
from bentoml.handlers import DataframeHandler import joblib
from bentoml.artifact import SklearnModelArtifact
@env(auto_pip_dependencies=True)
@artifacts([SklearnModelArtifact('model')])
class IrisClassifier(BentoService):
@api(DataframeHandler) @bentoml.service
class IrisClassifier:
iris_model = bentoml.models.get("iris_classifier:latest")
def __init__(self):
self.model = joblib.load(self.iris_model.path_of("model.pkl"))
@bentoml.api
def predict(self, df): def predict(self, df):
return self.artifacts.model.predict(df) return self.artifacts.model.predict(df)
``` ```
@ -58,10 +61,11 @@ as API endpoint with Knative Serving.
given data and then save the model with BentoML to local disk. given data and then save the model with BentoML to local disk.
```python ```python
import joblib
from sklearn import svm from sklearn import svm
from sklearn import datasets from sklearn import datasets
from iris_classifier import IrisClassifier import bentoml
if __name__ == "__main__": if __name__ == "__main__":
# Load training data # Load training data
@ -72,14 +76,9 @@ as API endpoint with Knative Serving.
clf = svm.SVC(gamma='scale') clf = svm.SVC(gamma='scale')
clf.fit(X, y) clf.fit(X, y)
# Create a iris classifier service instance with bentoml.models.create("iris_classifier") as bento_model:
iris_classifier_service = IrisClassifier() joblib.dump(clf, bento_model.path_of("model.pkl"))
print(f"Model saved: {bento_model}")
# Pack the newly trained model artifact
iris_classifier_service.pack('model', clf)
# Save the prediction service to disk for model serving
saved_path = iris_classifier_service.save()
``` ```
1. Run the `main.py` file to train and save the model: 1. Run the `main.py` file to train and save the model:
@ -91,48 +90,97 @@ as API endpoint with Knative Serving.
1. Use BentoML CLI to check saved model's information. 1. Use BentoML CLI to check saved model's information.
```bash ```bash
bentoml get IrisClassifier:latest bentoml get iris_classifier:latest
``` ```
Example: Example:
```bash ```bash
> bentoml get IrisClassifier:latest > bentoml get iris_classifier:latest -o json
{
"service": "iris_classifier:IrisClassifier",
"name": "iris_classifier",
"version": "ar67rxqxqcrqi7ol",
"bentoml_version": "1.2.16",
"creation_time": "2024-05-21T14:40:20.737900+00:00",
"labels": {
"owner": "bentoml-team",
"project": "gallery"
},
"models": [],
"runners": [],
"entry_service": "IrisClassifier",
"services": [
{ {
"name": "IrisClassifier", "name": "IrisClassifier",
"version": "20200305171229_0A1411", "service": "",
"uri": { "models": [
"type": "LOCAL",
"uri": "/Users/bozhaoyu/bentoml/repository/IrisClassifier/20200305171229_0A1411"
},
"bentoServiceMetadata": {
"name": "IrisClassifier",
"version": "20200305171229_0A1411",
"createdAt": "2020-03-06T01:12:49.431011Z",
"env": {
"condaEnv": "name: bentoml-IrisClassifier\nchannels:\n- defaults\ndependencies:\n- python=3.7.3\n- pip\n",
"pipDependencies": "bentoml==0.6.2\nscikit-learn",
"pythonVersion": "3.7.3"
},
"artifacts": [
{ {
"name": "model", "tag": "iris_sklearn:ml5evdaxpwrqi7ol",
"artifactType": "SklearnModelArtifact" "module": "",
"creation_time": "2024-05-21T14:21:17.070059+00:00"
} }
], ],
"apis": [ "dependencies": [],
"config": {}
}
],
"envs": [],
"schema": {
"name": "IrisClassifier",
"type": "service",
"routes": [
{ {
"name": "predict", "name": "predict",
"handlerType": "DataframeHandler", "route": "/predict",
"docs": "BentoService API", "batchable": false,
"handlerConfig": { "input": {
"orient": "records", "properties": {
"typ": "frame", "df": {
"input_dtypes": null, "title": "Df"
"output_orient": "records" }
},
"required": [
"df"
],
"title": "Input",
"type": "object"
},
"output": {
"title": "AnyIODescriptor"
} }
} }
] ]
},
"apis": [],
"docker": {
"distro": "debian",
"python_version": "3.11",
"cuda_version": null,
"env": null,
"system_packages": null,
"setup_script": null,
"base_image": null,
"dockerfile_template": null
},
"python": {
"requirements_txt": "./requirements.txt",
"packages": null,
"lock_packages": true,
"pack_git_packages": true,
"index_url": null,
"no_index": null,
"trusted_host": null,
"find_links": null,
"extra_index_url": null,
"pip_args": null,
"wheels": null
},
"conda": {
"environment_yml": null,
"channels": null,
"dependencies": null,
"pip": null
} }
} }
``` ```
@ -141,7 +189,7 @@ as API endpoint with Knative Serving.
BentoML CLI command to start an API server locally and test it with the `curl` command. BentoML CLI command to start an API server locally and test it with the `curl` command.
```bash ```bash
bentoml serve IrisClassifier:latest bentoml serve iris_classifier:latest
``` ```
In another terminal window, make `curl` request with sample data to the API server In another terminal window, make `curl` request with sample data to the API server
@ -164,12 +212,8 @@ a Dockerfile is automatically generated when saving the model.
username and run the following commands. username and run the following commands.
```bash ```bash
# jq might not be installed on your local system, please follow jq install
# instruction at https://stedolan.github.io/jq/download/
saved_path=$(bentoml get IrisClassifier:latest -q | jq -r ".uri.uri")
# Build and push the container on your local machine. # Build and push the container on your local machine.
docker buildx build --platform linux/arm64,linux/amd64 -t "{username}/iris-classifier" --push $saved_path bentoml containerize iris_classifier:latest -t "{username}/iris-classifier" --push
``` ```
1. In `service.yaml`, replace `{username}` with your Docker hub username: 1. In `service.yaml`, replace `{username}` with your Docker hub username:

View File

@ -1,11 +1,23 @@
from bentoml import env, artifacts, api, BentoService import numpy as np
from bentoml.handlers import DataframeHandler import bentoml
from bentoml.artifact import SklearnModelArtifact from pydantic import Field
from bentoml.validators import Shape
from typing_extensions import Annotated
import joblib
@env(auto_pip_dependencies=True)
@artifacts([SklearnModelArtifact('model')])
class IrisClassifier(BentoService):
@api(DataframeHandler) @bentoml.service
def predict(self, df): class IrisClassifier:
return self.artifacts.model.predict(df) iris_model = bentoml.models.get("iris_sklearn:latest")
def __init__(self):
self.model = joblib.load(self.iris_model.path_of("model.pkl"))
@bentoml.api
def predict(
self,
df: Annotated[np.ndarray, Shape((-1, 4))] = Field(
default=[[5.2, 2.3, 5.0, 0.7]]
),
) -> np.ndarray:
return self.model.predict(df)

View File

@ -1,7 +1,8 @@
import joblib
from sklearn import svm from sklearn import svm
from sklearn import datasets from sklearn import datasets
from iris_classifier import IrisClassifier import bentoml
if __name__ == "__main__": if __name__ == "__main__":
# Load training data # Load training data
@ -12,11 +13,6 @@ if __name__ == "__main__":
clf = svm.SVC(gamma='scale') clf = svm.SVC(gamma='scale')
clf.fit(X, y) clf.fit(X, y)
# Create a iris classifier service instance with bentoml.models.create("iris_classifier") as bento_model:
iris_classifier_service = IrisClassifier() joblib.dump(clf, bento_model.path_of("model.pkl"))
print(f"Model saved: {bento_model}")
# Pack the newly trained model artifact
iris_classifier_service.pack('model', clf)
# Save the prediction service to disk for model serving
saved_path = iris_classifier_service.save()