diff --git a/examples/keras/preprocess.py b/examples/keras/preprocess.py index 1738ea8..8d69acf 100644 --- a/examples/keras/preprocess.py +++ b/examples/keras/preprocess.py @@ -21,7 +21,8 @@ class Preprocess(object): try: image = Image.open(io.BytesIO(body)).convert("RGB") except Exception: - raise ValueError("Image could not be decoded") + # value error would return 404, we want to return 500 so any other exception + raise RuntimeError("Image could not be decoded") if isinstance(body, dict) and "url" in body.keys(): # image is given as url, and is fetched diff --git a/examples/keras/readme.md b/examples/keras/readme.md index 4cc03ee..429207f 100644 --- a/examples/keras/readme.md +++ b/examples/keras/readme.md @@ -32,7 +32,11 @@ Or add Canary endpoint 3. Make sure you have the `clearml-serving` `docker-compose-triton.yml` (or `docker-compose-triton-gpu.yml`) running, it might take it a minute or two to sync with the new endpoint. -4. Test new endpoint (do notice the first call will trigger the model pulling, so it might take longer, from here on, it's all in memory): `curl -X POST "http://127.0.0.1:8080/serve/test_model_keras" -H "accept: application/json" -H "Content-Type: application/json" -d '{"url": "https://camo.githubusercontent.com/8385ca52c9cba1f6e629eb938ab725ec8c9449f12db81f9a34e18208cd328ce9/687474703a2f2f706574722d6d6172656b2e636f6d2f77702d636f6e74656e742f75706c6f6164732f323031372f30372f6465636f6d707265737365642e6a7067"}'` +4. Test new endpoint (do notice the first call will trigger the model pulling, so it might take longer, from here on, it's all in memory): \ + `curl -X POST "http://127.0.0.1:8080/serve/test_model_keras" -H "accept: application/json" -H "Content-Type: application/json" -d '{"url": "https://camo.githubusercontent.com/8385ca52c9cba1f6e629eb938ab725ec8c9449f12db81f9a34e18208cd328ce9/687474703a2f2f706574722d6d6172656b2e636f6d2f77702d636f6e74656e742f75706c6f6164732f323031372f30372f6465636f6d707265737365642e6a7067"}'` + \ + or send a local file to be classified with \ + `curl -X POST "http://127.0.0.1:8080/serve/test_model_keras" -H "Content-Type: image/jpeg" --data-binary "@5.jpg"` > **_Notice:_** You can also change the serving service while it is already running! This includes adding/removing endpoints, adding canary model routing etc. diff --git a/examples/pytorch/preprocess.py b/examples/pytorch/preprocess.py index 1738ea8..8d69acf 100644 --- a/examples/pytorch/preprocess.py +++ b/examples/pytorch/preprocess.py @@ -21,7 +21,8 @@ class Preprocess(object): try: image = Image.open(io.BytesIO(body)).convert("RGB") except Exception: - raise ValueError("Image could not be decoded") + # value error would return 404, we want to return 500 so any other exception + raise RuntimeError("Image could not be decoded") if isinstance(body, dict) and "url" in body.keys(): # image is given as url, and is fetched diff --git a/examples/pytorch/readme.md b/examples/pytorch/readme.md index 3ddfde3..d341b50 100644 --- a/examples/pytorch/readme.md +++ b/examples/pytorch/readme.md @@ -36,7 +36,10 @@ Or add Canary endpoint 3. Make sure you have the `clearml-serving` `docker-compose-triton.yml` (or `docker-compose-triton-gpu.yml`) running, it might take it a minute or two to sync with the new endpoint. -4. Test new endpoint (do notice the first call will trigger the model pulling, so it might take longer, from here on, it's all in memory): `curl -X POST "http://127.0.0.1:8080/serve/test_model_pytorch" -H "accept: application/json" -H "Content-Type: application/json" -d '{"url": "https://camo.githubusercontent.com/8385ca52c9cba1f6e629eb938ab725ec8c9449f12db81f9a34e18208cd328ce9/687474703a2f2f706574722d6d6172656b2e636f6d2f77702d636f6e74656e742f75706c6f6164732f323031372f30372f6465636f6d707265737365642e6a7067"}'` +4. Test new endpoint (do notice the first call will trigger the model pulling, so it might take longer, from here on, it's all in memory): \ + `curl -X POST "http://127.0.0.1:8080/serve/test_model_pytorch" -H "accept: application/json" -H "Content-Type: application/json" -d '{"url": "https://camo.githubusercontent.com/8385ca52c9cba1f6e629eb938ab725ec8c9449f12db81f9a34e18208cd328ce9/687474703a2f2f706574722d6d6172656b2e636f6d2f77702d636f6e74656e742f75706c6f6164732f323031372f30372f6465636f6d707265737365642e6a7067"}'` \ + or send a local file to be classified with \ + `curl -X POST "http://127.0.0.1:8080/serve/test_model_pytorch" -H "Content-Type: image/jpeg" --data-binary "@5.jpg"` > **_Notice:_** You can also change the serving service while it is already running! This includes adding/removing endpoints, adding canary model routing etc.