mirror of
https://github.com/open-webui/open-webui
synced 2025-04-16 13:39:39 +00:00
refac
This commit is contained in:
parent
3a0a1aca11
commit
95985e7bbb
@ -1056,7 +1056,18 @@ async def generate_chat_completions(form_data: dict, user=Depends(get_verified_u
|
|||||||
if model.get("pipe"):
|
if model.get("pipe"):
|
||||||
return await generate_function_chat_completion(form_data, user=user)
|
return await generate_function_chat_completion(form_data, user=user)
|
||||||
if model["owned_by"] == "ollama":
|
if model["owned_by"] == "ollama":
|
||||||
return await generate_ollama_openai_chat_completion(form_data, user=user)
|
# Using /ollama/api/chat endpoint
|
||||||
|
form_data = convert_payload_openai_to_ollama(form_data)
|
||||||
|
form_data = GenerateChatCompletionForm(**form_data)
|
||||||
|
response = await generate_ollama_chat_completion(form_data=form_data, user=user)
|
||||||
|
if form_data.stream:
|
||||||
|
response.headers["content-type"] = "text/event-stream"
|
||||||
|
return StreamingResponse(
|
||||||
|
convert_streaming_response_ollama_to_openai(response),
|
||||||
|
headers=dict(response.headers),
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
return convert_response_ollama_to_openai(response)
|
||||||
else:
|
else:
|
||||||
return await generate_openai_chat_completion(form_data, user=user)
|
return await generate_openai_chat_completion(form_data, user=user)
|
||||||
|
|
||||||
@ -1468,20 +1479,6 @@ Prompt: {{prompt:middletruncate:8000}}"""
|
|||||||
if "chat_id" in payload:
|
if "chat_id" in payload:
|
||||||
del payload["chat_id"]
|
del payload["chat_id"]
|
||||||
|
|
||||||
# Check if task model is ollama model
|
|
||||||
if model["owned_by"] == "ollama":
|
|
||||||
payload = convert_payload_openai_to_ollama(payload)
|
|
||||||
form_data = GenerateChatCompletionForm(**payload)
|
|
||||||
response = await generate_ollama_chat_completion(form_data=form_data, user=user)
|
|
||||||
if form_data.stream:
|
|
||||||
response.headers["content-type"] = "text/event-stream"
|
|
||||||
return StreamingResponse(
|
|
||||||
convert_streaming_response_ollama_to_openai(response),
|
|
||||||
headers=dict(response.headers),
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
return convert_response_ollama_to_openai(response)
|
|
||||||
else:
|
|
||||||
return await generate_chat_completions(form_data=payload, user=user)
|
return await generate_chat_completions(form_data=payload, user=user)
|
||||||
|
|
||||||
|
|
||||||
@ -1559,20 +1556,6 @@ Search Query:"""
|
|||||||
if "chat_id" in payload:
|
if "chat_id" in payload:
|
||||||
del payload["chat_id"]
|
del payload["chat_id"]
|
||||||
|
|
||||||
# Check if task model is ollama model
|
|
||||||
if model["owned_by"] == "ollama":
|
|
||||||
payload = convert_payload_openai_to_ollama(payload)
|
|
||||||
form_data = GenerateChatCompletionForm(**payload)
|
|
||||||
response = await generate_ollama_chat_completion(form_data=form_data, user=user)
|
|
||||||
if form_data.stream:
|
|
||||||
response.headers["content-type"] = "text/event-stream"
|
|
||||||
return StreamingResponse(
|
|
||||||
convert_streaming_response_ollama_to_openai(response),
|
|
||||||
headers=dict(response.headers),
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
return convert_response_ollama_to_openai(response)
|
|
||||||
else:
|
|
||||||
return await generate_chat_completions(form_data=payload, user=user)
|
return await generate_chat_completions(form_data=payload, user=user)
|
||||||
|
|
||||||
|
|
||||||
@ -1641,20 +1624,6 @@ Message: """{{prompt}}"""
|
|||||||
if "chat_id" in payload:
|
if "chat_id" in payload:
|
||||||
del payload["chat_id"]
|
del payload["chat_id"]
|
||||||
|
|
||||||
# Check if task model is ollama model
|
|
||||||
if model["owned_by"] == "ollama":
|
|
||||||
payload = convert_payload_openai_to_ollama(payload)
|
|
||||||
form_data = GenerateChatCompletionForm(**payload)
|
|
||||||
response = await generate_ollama_chat_completion(form_data=form_data, user=user)
|
|
||||||
if form_data.stream:
|
|
||||||
response.headers["content-type"] = "text/event-stream"
|
|
||||||
return StreamingResponse(
|
|
||||||
convert_streaming_response_ollama_to_openai(response),
|
|
||||||
headers=dict(response.headers),
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
return convert_response_ollama_to_openai(response)
|
|
||||||
else:
|
|
||||||
return await generate_chat_completions(form_data=payload, user=user)
|
return await generate_chat_completions(form_data=payload, user=user)
|
||||||
|
|
||||||
|
|
||||||
@ -1713,20 +1682,6 @@ Responses from models: {{responses}}"""
|
|||||||
if "chat_id" in payload:
|
if "chat_id" in payload:
|
||||||
del payload["chat_id"]
|
del payload["chat_id"]
|
||||||
|
|
||||||
# Check if task model is ollama model
|
|
||||||
if model["owned_by"] == "ollama":
|
|
||||||
payload = convert_payload_openai_to_ollama(payload)
|
|
||||||
form_data = GenerateChatCompletionForm(**payload)
|
|
||||||
response = await generate_ollama_chat_completion(form_data=form_data, user=user)
|
|
||||||
if form_data.stream:
|
|
||||||
response.headers["content-type"] = "text/event-stream"
|
|
||||||
return StreamingResponse(
|
|
||||||
convert_streaming_response_ollama_to_openai(response),
|
|
||||||
headers=dict(response.headers),
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
return convert_response_ollama_to_openai(response)
|
|
||||||
else:
|
|
||||||
return await generate_chat_completions(form_data=payload, user=user)
|
return await generate_chat_completions(form_data=payload, user=user)
|
||||||
|
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user