diff --git a/main.py b/main.py index dbc34cf..b239dec 100644 --- a/main.py +++ b/main.py @@ -135,13 +135,13 @@ async def generate_openai_chat_completion(form_data: OpenAIChatCompletionForm): print(res) if isinstance(res, str): - message = stream_message_template(res) + message = stream_message_template(form_data.model, res) yield f"data: {json.dumps(message)}\n\n" elif isinstance(res, Generator): for message in res: print(message) - message = stream_message_template(message) + message = stream_message_template(form_data.model, message) yield f"data: {json.dumps(message)}\n\n" finish_message = { diff --git a/utils.py b/utils.py index 67af85c..8ab0c4d 100644 --- a/utils.py +++ b/utils.py @@ -3,15 +3,14 @@ import time from typing import List from schemas import OpenAIChatMessage -from config import MODEL_ID -def stream_message_template(message: str): +def stream_message_template(model: str, message: str): return { "id": f"rag-{str(uuid.uuid4())}", "object": "chat.completion.chunk", "created": int(time.time()), - "model": MODEL_ID, + "model": model, "choices": [ { "index": 0,