mirror of
https://github.com/clearml/clearml-serving
synced 2025-01-31 02:46:54 +00:00
fix model name
This commit is contained in:
parent
eaa2b8a9e8
commit
2d3ac1fe63
@ -17,12 +17,12 @@ Prerequisites, Keras/Tensorflow models require Triton engine support, please use
|
||||
1. Create serving Service: `clearml-serving create --name "serving example"` (write down the service ID)
|
||||
2. Create model endpoint:
|
||||
|
||||
`clearml-serving --id <service_id> model add --engine triton --endpoint "test_model_keras" --preprocess "examples/keras/preprocess.py" --name "train keras model" --project "serving examples" --input-size 1 784 --input-name "dense_input" --input-type float32 --output-size -1 10 --output-name "activation_2" --output-type float32
|
||||
`clearml-serving --id <service_id> model add --engine triton --endpoint "test_model_keras" --preprocess "examples/keras/preprocess.py" --name "train keras model - serving_model" --project "serving examples" --input-size 1 784 --input-name "dense_input" --input-type float32 --output-size -1 10 --output-name "activation_2" --output-type float32
|
||||
`
|
||||
|
||||
Or auto update
|
||||
|
||||
`clearml-serving --id <service_id> model auto-update --engine triton --endpoint "test_model_auto" --preprocess "examples/keras/preprocess.py" --name "train keras model" --project "serving examples" --max-versions 2
|
||||
`clearml-serving --id <service_id> model auto-update --engine triton --endpoint "test_model_auto" --preprocess "examples/keras/preprocess.py" --name "train keras model - serving_model" --project "serving examples" --max-versions 2
|
||||
--input-size 1 784 --input-name "dense_input" --input-type float32
|
||||
--output-size -1 10 --output-name "activation_2" --output-type float32`
|
||||
|
||||
|
@ -16,11 +16,11 @@ The output will be a model created on the project "serving examples", by the nam
|
||||
|
||||
2. Create model endpoint:
|
||||
|
||||
`clearml-serving --id <service_id> model add --engine lightgbm --endpoint "test_model_lgbm" --preprocess "examples/lightgbm/preprocess.py" --name "train lightgbm model" --project "serving examples"`
|
||||
`clearml-serving --id <service_id> model add --engine lightgbm --endpoint "test_model_lgbm" --preprocess "examples/lightgbm/preprocess.py" --name "train lightgbm model - lgbm_model" --project "serving examples"`
|
||||
|
||||
Or auto-update
|
||||
|
||||
`clearml-serving --id <service_id> model auto-update --engine lightgbm --endpoint "test_model_auto" --preprocess "examples/lightgbm/preprocess.py" --name "train lightgbm model" --project "serving examples" --max-versions 2`
|
||||
`clearml-serving --id <service_id> model auto-update --engine lightgbm --endpoint "test_model_auto" --preprocess "examples/lightgbm/preprocess.py" --name "train lightgbm model - lgbm_model" --project "serving examples" --max-versions 2`
|
||||
|
||||
Or add Canary endpoint
|
||||
|
||||
|
@ -14,11 +14,11 @@ The output will be a model created on the project "serving examples", by the nam
|
||||
|
||||
1. Create serving Service: `clearml-serving create --name "serving example"` (write down the service ID)
|
||||
2. Create model endpoint:
|
||||
`clearml-serving --id <service_id> model add --engine sklearn --endpoint "test_model_sklearn" --preprocess "examples/sklearn/preprocess.py" --name "train sklearn model" --project "serving examples"`
|
||||
`clearml-serving --id <service_id> model add --engine sklearn --endpoint "test_model_sklearn" --preprocess "examples/sklearn/preprocess.py" --name "train sklearn model - sklearn-model" --project "serving examples"`
|
||||
|
||||
Or auto update
|
||||
|
||||
`clearml-serving --id <service_id> model auto-update --engine sklearn --endpoint "test_model_sklearn_auto" --preprocess "examples/sklearn/preprocess.py" --name "train sklearn model" --project "serving examples" --max-versions 2`
|
||||
`clearml-serving --id <service_id> model auto-update --engine sklearn --endpoint "test_model_sklearn_auto" --preprocess "examples/sklearn/preprocess.py" --name "train sklearn model - sklearn-model" --project "serving examples" --max-versions 2`
|
||||
|
||||
Or add Canary endpoint
|
||||
|
||||
|
@ -15,11 +15,11 @@ The output will be a model created on the project "serving examples", by the nam
|
||||
1. Create serving Service: `clearml-serving create --name "serving example"` (write down the service ID)
|
||||
2. Create model endpoint:
|
||||
|
||||
3. `clearml-serving --id <service_id> model add --engine xgboost --endpoint "test_model_xgb" --preprocess "examples/xgboost/preprocess.py" --name "train xgboost model" --project "serving examples"`
|
||||
3. `clearml-serving --id <service_id> model add --engine xgboost --endpoint "test_model_xgb" --preprocess "examples/xgboost/preprocess.py" --name "train xgboost model - xgb_model" --project "serving examples"`
|
||||
|
||||
Or auto update
|
||||
|
||||
`clearml-serving --id <service_id> model auto-update --engine xgboost --endpoint "test_model_xgb_auto" --preprocess "examples/xgboost/preprocess.py" --name "train xgboost model" --project "serving examples" --max-versions 2`
|
||||
`clearml-serving --id <service_id> model auto-update --engine xgboost --endpoint "test_model_xgb_auto" --preprocess "examples/xgboost/preprocess.py" --name "train xgboost model - xgb_model" --project "serving examples" --max-versions 2`
|
||||
|
||||
Or add Canary endpoint
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user