This commit is contained in:
Timothy Jaeryang Baek
2024-12-12 23:31:08 -08:00
parent 9a081c8593
commit f9a05dd1e1
6 changed files with 190 additions and 144 deletions

View File

@@ -21,8 +21,63 @@ async def convert_streaming_response_ollama_to_openai(ollama_streaming_response)
message_content = data.get("message", {}).get("content", "")
done = data.get("done", False)
usage = None
if done:
usage = {
"response_token/s": (
round(
(
(
data.get("eval_count", 0)
/ ((data.get("eval_duration", 0) / 1_000_000_000))
)
* 100
),
2,
)
if data.get("eval_duration", 0) > 0
else "N/A"
),
"prompt_token/s": (
round(
(
(
data.get("prompt_eval_count", 0)
/ (
(
data.get("prompt_eval_duration", 0)
/ 1_000_000_000
)
)
)
* 100
),
2,
)
if data.get("prompt_eval_duration", 0) > 0
else "N/A"
),
"total_duration": round(
((data.get("total_duration", 0) / 1_000_000) * 100), 2
),
"load_duration": round(
((data.get("load_duration", 0) / 1_000_000) * 100), 2
),
"prompt_eval_count": data.get("prompt_eval_count", 0),
"prompt_eval_duration": round(
((data.get("prompt_eval_duration", 0) / 1_000_000) * 100), 2
),
"eval_count": data.get("eval_count", 0),
"eval_duration": round(
((data.get("eval_duration", 0) / 1_000_000) * 100), 2
),
"approximate_total": (
lambda s: f"{s // 3600}h{(s % 3600) // 60}m{s % 60}s"
)((data.get("total_duration", 0) or 0) // 1_000_000_000),
}
data = openai_chat_chunk_message_template(
model, message_content if not done else None
model, message_content if not done else None, usage
)
line = f"data: {json.dumps(data)}\n\n"