mirror of
https://github.com/open-webui/open-webui
synced 2025-06-26 18:26:48 +00:00
chore: format
This commit is contained in:
@@ -341,9 +341,7 @@ def convert_embedding_payload_openai_to_ollama(openai_payload: dict) -> dict:
|
||||
Returns:
|
||||
dict: A payload compatible with the Ollama API embeddings endpoint.
|
||||
"""
|
||||
ollama_payload = {
|
||||
"model": openai_payload.get("model")
|
||||
}
|
||||
ollama_payload = {"model": openai_payload.get("model")}
|
||||
input_value = openai_payload.get("input")
|
||||
|
||||
# Ollama expects 'input' as a list, and 'prompt' as a single string.
|
||||
|
||||
@@ -126,12 +126,13 @@ async def convert_streaming_response_ollama_to_openai(ollama_streaming_response)
|
||||
|
||||
yield "data: [DONE]\n\n"
|
||||
|
||||
|
||||
def convert_embedding_response_ollama_to_openai(response) -> dict:
|
||||
"""
|
||||
Convert the response from Ollama embeddings endpoint to the OpenAI-compatible format.
|
||||
|
||||
Args:
|
||||
response (dict): The response from the Ollama API,
|
||||
response (dict): The response from the Ollama API,
|
||||
e.g. {"embedding": [...], "model": "..."}
|
||||
or {"embeddings": [{"embedding": [...], "index": 0}, ...], "model": "..."}
|
||||
|
||||
@@ -150,11 +151,13 @@ def convert_embedding_response_ollama_to_openai(response) -> dict:
|
||||
if isinstance(response, dict) and "embeddings" in response:
|
||||
openai_data = []
|
||||
for i, emb in enumerate(response["embeddings"]):
|
||||
openai_data.append({
|
||||
"object": "embedding",
|
||||
"embedding": emb.get("embedding"),
|
||||
"index": emb.get("index", i),
|
||||
})
|
||||
openai_data.append(
|
||||
{
|
||||
"object": "embedding",
|
||||
"embedding": emb.get("embedding"),
|
||||
"index": emb.get("index", i),
|
||||
}
|
||||
)
|
||||
return {
|
||||
"object": "list",
|
||||
"data": openai_data,
|
||||
@@ -164,16 +167,22 @@ def convert_embedding_response_ollama_to_openai(response) -> dict:
|
||||
elif isinstance(response, dict) and "embedding" in response:
|
||||
return {
|
||||
"object": "list",
|
||||
"data": [{
|
||||
"object": "embedding",
|
||||
"embedding": response["embedding"],
|
||||
"index": 0,
|
||||
}],
|
||||
"data": [
|
||||
{
|
||||
"object": "embedding",
|
||||
"embedding": response["embedding"],
|
||||
"index": 0,
|
||||
}
|
||||
],
|
||||
"model": response.get("model"),
|
||||
}
|
||||
# Already OpenAI-compatible?
|
||||
elif isinstance(response, dict) and "data" in response and isinstance(response["data"], list):
|
||||
elif (
|
||||
isinstance(response, dict)
|
||||
and "data" in response
|
||||
and isinstance(response["data"], list)
|
||||
):
|
||||
return response
|
||||
|
||||
# Fallback: return as is if unrecognized
|
||||
return response
|
||||
return response
|
||||
|
||||
Reference in New Issue
Block a user