mirror of
https://github.com/open-webui/open-webui
synced 2025-01-30 14:29:55 +00:00
feat: add OpenAI generation stats
This commit is contained in:
parent
b6b71c08f3
commit
99b1661638
@ -574,6 +574,7 @@ ENABLE_COMMUNITY_SHARING = PersistentConfig(
|
|||||||
os.environ.get("ENABLE_COMMUNITY_SHARING", "True").lower() == "true",
|
os.environ.get("ENABLE_COMMUNITY_SHARING", "True").lower() == "true",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class BannerModel(BaseModel):
|
class BannerModel(BaseModel):
|
||||||
id: str
|
id: str
|
||||||
type: str
|
type: str
|
||||||
|
@ -8,6 +8,16 @@ type TextStreamUpdate = {
|
|||||||
citations?: any;
|
citations?: any;
|
||||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
error?: any;
|
error?: any;
|
||||||
|
usage?: ResponseUsage;
|
||||||
|
};
|
||||||
|
|
||||||
|
type ResponseUsage = {
|
||||||
|
/** Including images and tools if any */
|
||||||
|
prompt_tokens: number;
|
||||||
|
/** The tokens generated */
|
||||||
|
completion_tokens: number;
|
||||||
|
/** Sum of the above two fields */
|
||||||
|
total_tokens: number;
|
||||||
};
|
};
|
||||||
|
|
||||||
// createOpenAITextStream takes a responseBody with a SSE response,
|
// createOpenAITextStream takes a responseBody with a SSE response,
|
||||||
@ -59,7 +69,11 @@ async function* openAIStreamToIterator(
|
|||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
yield { done: false, value: parsedData.choices?.[0]?.delta?.content ?? '' };
|
yield {
|
||||||
|
done: false,
|
||||||
|
value: parsedData.choices?.[0]?.delta?.content ?? '',
|
||||||
|
usage: parsedData.usage
|
||||||
|
};
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.error('Error extracting delta from SSE event:', e);
|
console.error('Error extracting delta from SSE event:', e);
|
||||||
}
|
}
|
||||||
|
@ -685,6 +685,12 @@
|
|||||||
{
|
{
|
||||||
model: model.id,
|
model: model.id,
|
||||||
stream: true,
|
stream: true,
|
||||||
|
stream_options:
|
||||||
|
model.info?.meta?.capabilities?.usage ?? false
|
||||||
|
? {
|
||||||
|
include_usage: true
|
||||||
|
}
|
||||||
|
: undefined,
|
||||||
messages: [
|
messages: [
|
||||||
$settings.system || (responseMessage?.userContext ?? null)
|
$settings.system || (responseMessage?.userContext ?? null)
|
||||||
? {
|
? {
|
||||||
@ -753,9 +759,10 @@
|
|||||||
|
|
||||||
if (res && res.ok && res.body) {
|
if (res && res.ok && res.body) {
|
||||||
const textStream = await createOpenAITextStream(res.body, $settings.splitLargeChunks);
|
const textStream = await createOpenAITextStream(res.body, $settings.splitLargeChunks);
|
||||||
|
let lastUsage = null;
|
||||||
|
|
||||||
for await (const update of textStream) {
|
for await (const update of textStream) {
|
||||||
const { value, done, citations, error } = update;
|
const { value, done, citations, error, usage } = update;
|
||||||
if (error) {
|
if (error) {
|
||||||
await handleOpenAIError(error, null, model, responseMessage);
|
await handleOpenAIError(error, null, model, responseMessage);
|
||||||
break;
|
break;
|
||||||
@ -771,6 +778,10 @@
|
|||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (usage) {
|
||||||
|
lastUsage = usage;
|
||||||
|
}
|
||||||
|
|
||||||
if (citations) {
|
if (citations) {
|
||||||
responseMessage.citations = citations;
|
responseMessage.citations = citations;
|
||||||
continue;
|
continue;
|
||||||
@ -804,6 +815,10 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (lastUsage) {
|
||||||
|
responseMessage.info = { ...lastUsage, openai: true };
|
||||||
|
}
|
||||||
|
|
||||||
if ($chatId == _chatId) {
|
if ($chatId == _chatId) {
|
||||||
if ($settings.saveChatHistory ?? true) {
|
if ($settings.saveChatHistory ?? true) {
|
||||||
chat = await updateChatById(localStorage.token, _chatId, {
|
chat = await updateChatById(localStorage.token, _chatId, {
|
||||||
|
@ -106,8 +106,13 @@
|
|||||||
renderLatex();
|
renderLatex();
|
||||||
|
|
||||||
if (message.info) {
|
if (message.info) {
|
||||||
tooltipInstance = tippy(`#info-${message.id}`, {
|
let tooltipContent = '';
|
||||||
content: `<span class="text-xs" id="tooltip-${message.id}">response_token/s: ${
|
if (message.info.openai) {
|
||||||
|
tooltipContent = `prompt_tokens: ${message.info.prompt_tokens ?? 'N/A'}<br/>
|
||||||
|
completion_tokens: ${message.info.completion_tokens ?? 'N/A'}<br/>
|
||||||
|
total_tokens: ${message.info.total_tokens ?? 'N/A'}`;
|
||||||
|
} else {
|
||||||
|
tooltipContent = `response_token/s: ${
|
||||||
`${
|
`${
|
||||||
Math.round(
|
Math.round(
|
||||||
((message.info.eval_count ?? 0) / (message.info.eval_duration / 1000000000)) * 100
|
((message.info.eval_count ?? 0) / (message.info.eval_duration / 1000000000)) * 100
|
||||||
@ -137,9 +142,10 @@
|
|||||||
eval_duration: ${
|
eval_duration: ${
|
||||||
Math.round(((message.info.eval_duration ?? 0) / 1000000) * 100) / 100 ?? 'N/A'
|
Math.round(((message.info.eval_duration ?? 0) / 1000000) * 100) / 100 ?? 'N/A'
|
||||||
}ms<br/>
|
}ms<br/>
|
||||||
approximate_total: ${approximateToHumanReadable(
|
approximate_total: ${approximateToHumanReadable(message.info.total_duration)}`;
|
||||||
message.info.total_duration
|
}
|
||||||
)}</span>`,
|
tooltipInstance = tippy(`#info-${message.id}`, {
|
||||||
|
content: `<span class="text-xs" id="tooltip-${message.id}">${tooltipContent}</span>`,
|
||||||
allowHTML: true
|
allowHTML: true
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -56,6 +56,20 @@
|
|||||||
id = name.replace(/\s+/g, '-').toLowerCase();
|
id = name.replace(/\s+/g, '-').toLowerCase();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let baseModel = null;
|
||||||
|
$: {
|
||||||
|
baseModel = $models.find((m) => m.id === info.base_model_id);
|
||||||
|
console.log(baseModel);
|
||||||
|
if (baseModel) {
|
||||||
|
if (baseModel.owned_by === 'openai') {
|
||||||
|
capabilities.usage = baseModel.info?.meta?.capabilities?.usage ?? false;
|
||||||
|
} else {
|
||||||
|
delete capabilities.usage;
|
||||||
|
}
|
||||||
|
capabilities = capabilities;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const submitHandler = async () => {
|
const submitHandler = async () => {
|
||||||
loading = true;
|
loading = true;
|
||||||
|
|
||||||
|
@ -107,6 +107,10 @@
|
|||||||
params = { ...params, ...model?.info?.params };
|
params = { ...params, ...model?.info?.params };
|
||||||
params.stop = params?.stop ? (params?.stop ?? []).join(',') : null;
|
params.stop = params?.stop ? (params?.stop ?? []).join(',') : null;
|
||||||
|
|
||||||
|
if (model?.owned_by === 'openai') {
|
||||||
|
capabilities.usage = false;
|
||||||
|
}
|
||||||
|
|
||||||
if (model?.info?.meta?.capabilities) {
|
if (model?.info?.meta?.capabilities) {
|
||||||
capabilities = { ...capabilities, ...model?.info?.meta?.capabilities };
|
capabilities = { ...capabilities, ...model?.info?.meta?.capabilities };
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user