mirror of
https://github.com/open-webui/open-webui
synced 2025-06-23 02:16:52 +00:00
feat: better handle openai errors, add error message to message
This commit is contained in:
parent
9a95767062
commit
3113825895
@ -6,6 +6,8 @@ type TextStreamUpdate = {
|
|||||||
value: string;
|
value: string;
|
||||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
citations?: any;
|
citations?: any;
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
|
error?: any;
|
||||||
};
|
};
|
||||||
|
|
||||||
// createOpenAITextStream takes a responseBody with a SSE response,
|
// createOpenAITextStream takes a responseBody with a SSE response,
|
||||||
@ -47,6 +49,11 @@ async function* openAIStreamToIterator(
|
|||||||
const parsedData = JSON.parse(data);
|
const parsedData = JSON.parse(data);
|
||||||
console.log(parsedData);
|
console.log(parsedData);
|
||||||
|
|
||||||
|
if (parsedData.error) {
|
||||||
|
yield { done: true, value: '', error: parsedData.error };
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
if (parsedData.citations) {
|
if (parsedData.citations) {
|
||||||
yield { done: false, value: '', citations: parsedData.citations };
|
yield { done: false, value: '', citations: parsedData.citations };
|
||||||
continue;
|
continue;
|
||||||
|
@ -557,6 +557,7 @@
|
|||||||
|
|
||||||
scrollToBottom();
|
scrollToBottom();
|
||||||
|
|
||||||
|
try {
|
||||||
const [res, controller] = await generateOpenAIChatCompletion(
|
const [res, controller] = await generateOpenAIChatCompletion(
|
||||||
localStorage.token,
|
localStorage.token,
|
||||||
{
|
{
|
||||||
@ -597,13 +598,15 @@
|
|||||||
}
|
}
|
||||||
: {
|
: {
|
||||||
content:
|
content:
|
||||||
arr.length - 1 !== idx ? message.content : message?.raContent ?? message.content
|
arr.length - 1 !== idx
|
||||||
|
? message.content
|
||||||
|
: message?.raContent ?? message.content
|
||||||
})
|
})
|
||||||
})),
|
})),
|
||||||
seed: $settings?.options?.seed ?? undefined,
|
seed: $settings?.options?.seed ?? undefined,
|
||||||
stop:
|
stop:
|
||||||
$settings?.options?.stop ?? undefined
|
$settings?.options?.stop ?? undefined
|
||||||
? $settings?.options?.stop.map((str) =>
|
? $settings.options.stop.map((str) =>
|
||||||
decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
|
decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
|
||||||
)
|
)
|
||||||
: undefined,
|
: undefined,
|
||||||
@ -629,7 +632,11 @@
|
|||||||
const textStream = await createOpenAITextStream(res.body, $settings.splitLargeChunks);
|
const textStream = await createOpenAITextStream(res.body, $settings.splitLargeChunks);
|
||||||
|
|
||||||
for await (const update of textStream) {
|
for await (const update of textStream) {
|
||||||
const { value, done, citations } = update;
|
const { value, done, citations, error } = update;
|
||||||
|
if (error) {
|
||||||
|
await handleOpenAIError(error, null, model, responseMessage);
|
||||||
|
break;
|
||||||
|
}
|
||||||
if (done || stopResponseFlag || _chatId !== $chatId) {
|
if (done || stopResponseFlag || _chatId !== $chatId) {
|
||||||
responseMessage.done = true;
|
responseMessage.done = true;
|
||||||
messages = messages;
|
messages = messages;
|
||||||
@ -684,39 +691,12 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if (res !== null) {
|
await handleOpenAIError(null, res, model, responseMessage);
|
||||||
const error = await res.json();
|
|
||||||
console.log(error);
|
|
||||||
if ('detail' in error) {
|
|
||||||
toast.error(error.detail);
|
|
||||||
responseMessage.content = error.detail;
|
|
||||||
} else {
|
|
||||||
if ('message' in error.error) {
|
|
||||||
toast.error(error.error.message);
|
|
||||||
responseMessage.content = error.error.message;
|
|
||||||
} else {
|
|
||||||
toast.error(error.error);
|
|
||||||
responseMessage.content = error.error;
|
|
||||||
}
|
}
|
||||||
|
} catch (error) {
|
||||||
|
await handleOpenAIError(error, null, model, responseMessage);
|
||||||
}
|
}
|
||||||
} else {
|
|
||||||
toast.error(
|
|
||||||
$i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
|
|
||||||
provider: model.name ?? model.id
|
|
||||||
})
|
|
||||||
);
|
|
||||||
responseMessage.content = $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
|
|
||||||
provider: model.name ?? model.id
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
responseMessage.error = true;
|
|
||||||
responseMessage.content = $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
|
|
||||||
provider: model.name ?? model.id
|
|
||||||
});
|
|
||||||
responseMessage.done = true;
|
|
||||||
messages = messages;
|
messages = messages;
|
||||||
}
|
|
||||||
|
|
||||||
stopResponseFlag = false;
|
stopResponseFlag = false;
|
||||||
await tick();
|
await tick();
|
||||||
@ -733,6 +713,44 @@
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const handleOpenAIError = async (error, res: Response | null, model, responseMessage) => {
|
||||||
|
let errorMessage = '';
|
||||||
|
let innerError;
|
||||||
|
|
||||||
|
if (error) {
|
||||||
|
innerError = error;
|
||||||
|
} else if (res !== null) {
|
||||||
|
innerError = await res.json();
|
||||||
|
}
|
||||||
|
console.error(innerError);
|
||||||
|
if ('detail' in innerError) {
|
||||||
|
toast.error(innerError.detail);
|
||||||
|
errorMessage = innerError.detail;
|
||||||
|
} else if ('error' in innerError) {
|
||||||
|
if ('message' in innerError.error) {
|
||||||
|
toast.error(innerError.error.message);
|
||||||
|
errorMessage = innerError.error.message;
|
||||||
|
} else {
|
||||||
|
toast.error(innerError.error);
|
||||||
|
errorMessage = innerError.error;
|
||||||
|
}
|
||||||
|
} else if ('message' in innerError) {
|
||||||
|
toast.error(innerError.message);
|
||||||
|
errorMessage = innerError.message;
|
||||||
|
}
|
||||||
|
|
||||||
|
responseMessage.error = true;
|
||||||
|
responseMessage.content =
|
||||||
|
$i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
|
||||||
|
provider: model.name ?? model.id
|
||||||
|
}) +
|
||||||
|
'\n' +
|
||||||
|
errorMessage;
|
||||||
|
responseMessage.done = true;
|
||||||
|
|
||||||
|
messages = messages;
|
||||||
|
};
|
||||||
|
|
||||||
const stopResponse = () => {
|
const stopResponse = () => {
|
||||||
stopResponseFlag = true;
|
stopResponseFlag = true;
|
||||||
console.log('stopResponse');
|
console.log('stopResponse');
|
||||||
|
@ -561,6 +561,7 @@
|
|||||||
|
|
||||||
scrollToBottom();
|
scrollToBottom();
|
||||||
|
|
||||||
|
try {
|
||||||
const [res, controller] = await generateOpenAIChatCompletion(
|
const [res, controller] = await generateOpenAIChatCompletion(
|
||||||
localStorage.token,
|
localStorage.token,
|
||||||
{
|
{
|
||||||
@ -601,7 +602,9 @@
|
|||||||
}
|
}
|
||||||
: {
|
: {
|
||||||
content:
|
content:
|
||||||
arr.length - 1 !== idx ? message.content : message?.raContent ?? message.content
|
arr.length - 1 !== idx
|
||||||
|
? message.content
|
||||||
|
: message?.raContent ?? message.content
|
||||||
})
|
})
|
||||||
})),
|
})),
|
||||||
seed: $settings?.options?.seed ?? undefined,
|
seed: $settings?.options?.seed ?? undefined,
|
||||||
@ -633,7 +636,11 @@
|
|||||||
const textStream = await createOpenAITextStream(res.body, $settings.splitLargeChunks);
|
const textStream = await createOpenAITextStream(res.body, $settings.splitLargeChunks);
|
||||||
|
|
||||||
for await (const update of textStream) {
|
for await (const update of textStream) {
|
||||||
const { value, done, citations } = update;
|
const { value, done, citations, error } = update;
|
||||||
|
if (error) {
|
||||||
|
await handleOpenAIError(error, null, model, responseMessage);
|
||||||
|
break;
|
||||||
|
}
|
||||||
if (done || stopResponseFlag || _chatId !== $chatId) {
|
if (done || stopResponseFlag || _chatId !== $chatId) {
|
||||||
responseMessage.done = true;
|
responseMessage.done = true;
|
||||||
messages = messages;
|
messages = messages;
|
||||||
@ -688,38 +695,10 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if (res !== null) {
|
await handleOpenAIError(null, res, model, responseMessage);
|
||||||
const error = await res.json();
|
|
||||||
console.log(error);
|
|
||||||
if ('detail' in error) {
|
|
||||||
toast.error(error.detail);
|
|
||||||
responseMessage.content = error.detail;
|
|
||||||
} else {
|
|
||||||
if ('message' in error.error) {
|
|
||||||
toast.error(error.error.message);
|
|
||||||
responseMessage.content = error.error.message;
|
|
||||||
} else {
|
|
||||||
toast.error(error.error);
|
|
||||||
responseMessage.content = error.error;
|
|
||||||
}
|
}
|
||||||
}
|
} catch (error) {
|
||||||
} else {
|
await handleOpenAIError(error, null, model, responseMessage);
|
||||||
toast.error(
|
|
||||||
$i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
|
|
||||||
provider: model.name ?? model.id
|
|
||||||
})
|
|
||||||
);
|
|
||||||
responseMessage.content = $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
|
|
||||||
provider: model.name ?? model.id
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
responseMessage.error = true;
|
|
||||||
responseMessage.content = $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
|
|
||||||
provider: model.name ?? model.id
|
|
||||||
});
|
|
||||||
responseMessage.done = true;
|
|
||||||
messages = messages;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
stopResponseFlag = false;
|
stopResponseFlag = false;
|
||||||
@ -737,6 +716,44 @@
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const handleOpenAIError = async (error, res: Response | null, model, responseMessage) => {
|
||||||
|
let errorMessage = '';
|
||||||
|
let innerError;
|
||||||
|
|
||||||
|
if (error) {
|
||||||
|
innerError = error;
|
||||||
|
} else if (res !== null) {
|
||||||
|
innerError = await res.json();
|
||||||
|
}
|
||||||
|
console.error(innerError);
|
||||||
|
if ('detail' in innerError) {
|
||||||
|
toast.error(innerError.detail);
|
||||||
|
errorMessage = innerError.detail;
|
||||||
|
} else if ('error' in innerError) {
|
||||||
|
if ('message' in innerError.error) {
|
||||||
|
toast.error(innerError.error.message);
|
||||||
|
errorMessage = innerError.error.message;
|
||||||
|
} else {
|
||||||
|
toast.error(innerError.error);
|
||||||
|
errorMessage = innerError.error;
|
||||||
|
}
|
||||||
|
} else if ('message' in innerError) {
|
||||||
|
toast.error(innerError.message);
|
||||||
|
errorMessage = innerError.message;
|
||||||
|
}
|
||||||
|
|
||||||
|
responseMessage.error = true;
|
||||||
|
responseMessage.content =
|
||||||
|
$i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
|
||||||
|
provider: model.name ?? model.id
|
||||||
|
}) +
|
||||||
|
'\n' +
|
||||||
|
errorMessage;
|
||||||
|
responseMessage.done = true;
|
||||||
|
|
||||||
|
messages = messages;
|
||||||
|
};
|
||||||
|
|
||||||
const stopResponse = () => {
|
const stopResponse = () => {
|
||||||
stopResponseFlag = true;
|
stopResponseFlag = true;
|
||||||
console.log('stopResponse');
|
console.log('stopResponse');
|
||||||
|
Loading…
Reference in New Issue
Block a user