From 25e294059650c01e8c0aa56829407b6f812397fb Mon Sep 17 00:00:00 2001 From: IlyaMescheryakov1402 Date: Tue, 11 Mar 2025 22:44:32 +0300 Subject: [PATCH] fix jsonresponse --- clearml_serving/serving/preprocess_service.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/clearml_serving/serving/preprocess_service.py b/clearml_serving/serving/preprocess_service.py index ed9f270..b236833 100644 --- a/clearml_serving/serving/preprocess_service.py +++ b/clearml_serving/serving/preprocess_service.py @@ -818,7 +818,7 @@ class VllmEngine(Singleton): ) -> Any: request, raw_request = data["request"], data["raw_request"] models_ = await self.openai_serving_models.show_available_models() - return JSONResponse(content=models_.model_dump()) + return self._fastapi["json_response"](content=models_.model_dump()) @BasePreprocessRequest.register_engine("vllm", modules=["vllm", "fastapi"])