From be06b61e831f77ae4b3767bddf308f2e8dee332d Mon Sep 17 00:00:00 2001 From: "Timothy J. Baek" <timothyjrbeck@gmail.com> Date: Tue, 21 May 2024 18:49:34 -0700 Subject: [PATCH] fix --- main.py | 4 ++-- utils.py | 5 ++--- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/main.py b/main.py index dbc34cf..b239dec 100644 --- a/main.py +++ b/main.py @@ -135,13 +135,13 @@ async def generate_openai_chat_completion(form_data: OpenAIChatCompletionForm): print(res) if isinstance(res, str): - message = stream_message_template(res) + message = stream_message_template(form_data.model, res) yield f"data: {json.dumps(message)}\n\n" elif isinstance(res, Generator): for message in res: print(message) - message = stream_message_template(message) + message = stream_message_template(form_data.model, message) yield f"data: {json.dumps(message)}\n\n" finish_message = { diff --git a/utils.py b/utils.py index 67af85c..8ab0c4d 100644 --- a/utils.py +++ b/utils.py @@ -3,15 +3,14 @@ import time from typing import List from schemas import OpenAIChatMessage -from config import MODEL_ID -def stream_message_template(message: str): +def stream_message_template(model: str, message: str): return { "id": f"rag-{str(uuid.uuid4())}", "object": "chat.completion.chunk", "created": int(time.time()), - "model": MODEL_ID, + "model": model, "choices": [ { "index": 0,