diff --git a/src/lib/apis/streaming/index.ts b/src/lib/apis/streaming/index.ts index 722cca9a1..a8249abe0 100644 --- a/src/lib/apis/streaming/index.ts +++ b/src/lib/apis/streaming/index.ts @@ -7,6 +7,7 @@ type TextStreamUpdate = { // eslint-disable-next-line @typescript-eslint/no-explicit-any citations?: any; // eslint-disable-next-line @typescript-eslint/no-explicit-any + selectedModelId?: any; error?: any; usage?: ResponseUsage; }; @@ -71,6 +72,11 @@ async function* openAIStreamToIterator( continue; } + if (parsedData.selected_model_id) { + yield { done: false, value: '', selectedModelId: parsedData.selected_model_id }; + continue; + } + yield { done: false, value: parsedData.choices?.[0]?.delta?.content ?? '', diff --git a/src/lib/components/chat/Chat.svelte b/src/lib/components/chat/Chat.svelte index e94f5f004..d8c3b2a5d 100644 --- a/src/lib/components/chat/Chat.svelte +++ b/src/lib/components/chat/Chat.svelte @@ -1591,7 +1591,7 @@ const textStream = await createOpenAITextStream(res.body, $settings.splitLargeChunks); for await (const update of textStream) { - const { value, done, citations, error, usage } = update; + const { value, done, citations, selectedModelId, error, usage } = update; if (error) { await handleOpenAIError(error, null, model, responseMessage); break; @@ -1611,6 +1611,12 @@ responseMessage.info = { ...usage, openai: true, usage }; } + if (selectedModelId) { + responseMessage.selectedModelId = selectedModelId; + responseMessage.arena = true; + continue; + } + if (citations) { responseMessage.citations = citations; // Only remove status if it was initially set diff --git a/src/lib/components/chat/Messages/RateComment.svelte b/src/lib/components/chat/Messages/RateComment.svelte index 68fdf737d..7c6955ed9 100644 --- a/src/lib/components/chat/Messages/RateComment.svelte +++ b/src/lib/components/chat/Messages/RateComment.svelte @@ -2,7 +2,7 @@ import { toast } from 'svelte-sonner'; import { createEventDispatcher, onMount, getContext } from 'svelte'; - import { config } from '$lib/stores'; + import { config, models } from '$lib/stores'; const i18n = getContext('i18n'); @@ -39,6 +39,8 @@ let selectedReason = null; let comment = ''; + let selectedModel = null; + $: if (message?.annotation?.rating === 1) { reasons = LIKE_REASONS; } else if (message?.annotation?.rating === -1) { @@ -48,6 +50,16 @@ onMount(() => { selectedReason = message?.annotation?.reason ?? ''; comment = message?.annotation?.comment ?? ''; + + if (message?.arena) { + selectedModel = $models.find((m) => m.id === message.selectedModelId); + toast.success( + $i18n.t('This response was generated by "{{model}}"', { + model: selectedModel.name + }) + ); + } + loadReasons(); }); @@ -69,6 +81,12 @@ }; +{#if selectedModel} +