feat: add UI support for updating model info

This commit is contained in:
Jun Siang Cheah 2024-05-09 23:49:54 +08:00
parent 0dbddebcb0
commit 02a4412dfc
16 changed files with 336 additions and 41 deletions

View File

@ -283,7 +283,7 @@ async def get_models(user=Depends(get_current_user)):
def add_custom_info_to_model(model: dict):
model["custom_info"] = next(
(item for item in app.state.MODEL_CONFIG if item["name"] == model["id"]), {}
(item for item in app.state.MODEL_CONFIG if item["id"] == model["id"]), {}
)

View File

@ -176,7 +176,7 @@ async def get_all_models():
def add_custom_info_to_model(model: dict):
model["custom_info"] = next(
(item for item in app.state.MODEL_CONFIG if item["name"] == model["model"]), {}
(item for item in app.state.MODEL_CONFIG if item["id"] == model["model"]), {}
)

View File

@ -230,7 +230,7 @@ async def get_all_models():
def add_custom_info_to_model(model: dict):
model["custom_info"] = next(
(item for item in app.state.MODEL_CONFIG if item["name"] == model["id"]), {}
(item for item in app.state.MODEL_CONFIG if item["id"] == model["id"]), {}
)

View File

@ -58,6 +58,7 @@ from config import (
SRC_LOG_LEVELS,
WEBHOOK_URL,
ENABLE_ADMIN_EXPORT,
MODEL_CONFIG,
)
from constants import ERROR_MESSAGES
@ -97,6 +98,8 @@ app = FastAPI(docs_url="/docs" if ENV == "dev" else None, redoc_url=None)
app.state.ENABLE_MODEL_FILTER = ENABLE_MODEL_FILTER
app.state.MODEL_FILTER_LIST = MODEL_FILTER_LIST
app.state.MODEL_CONFIG = MODEL_CONFIG
app.state.WEBHOOK_URL = WEBHOOK_URL
origins = ["*"]
@ -311,12 +314,19 @@ async def update_model_config(
litellm_app.state.MODEL_CONFIG = data.get("litellm", [])
return {
app.state.MODEL_CONFIG = {
"ollama": ollama_app.state.MODEL_CONFIG,
"openai": openai_app.state.MODEL_CONFIG,
"litellm": litellm_app.state.MODEL_CONFIG,
}
return {"models": app.state.MODEL_CONFIG}
@app.get("/api/config/models")
async def get_model_config(user=Depends(get_admin_user)):
return {"models": app.state.MODEL_CONFIG}
@app.get("/api/webhook")
async def get_webhook_url(user=Depends(get_admin_user)):

View File

@ -196,3 +196,71 @@ export const updateWebhookUrl = async (token: string, url: string) => {
return res.url;
};
export const getModelConfig = async (token: string): Promise<GlobalModelConfig> => {
let error = null;
const res = await fetch(`${WEBUI_BASE_URL}/api/config/models`, {
method: 'GET',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${token}`
}
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
error = err;
return null;
});
if (error) {
throw error;
}
return res.models;
};
export interface ModelConfig {
id: string;
name?: string;
description?: string;
vision_capable?: boolean;
}
export interface GlobalModelConfig {
ollama: ModelConfig[];
litellm: ModelConfig[];
openai: ModelConfig[];
}
export const updateModelConfig = async (token: string, config: GlobalModelConfig) => {
let error = null;
const res = await fetch(`${WEBUI_BASE_URL}/api/config/models`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${token}`
},
body: JSON.stringify(config)
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
error = err;
return null;
});
if (error) {
throw error;
}
return res;
};

View File

@ -125,7 +125,7 @@
<option value="" disabled selected>{$i18n.t('Select a model')}</option>
{#each $models.filter((model) => model.id) as model}
<option value={model.id} class="bg-gray-100 dark:bg-gray-700"
>{model.custom_info?.displayName ?? model.name}</option
>{model.custom_info?.name ?? model.name}</option
>
{/each}
</select>

View File

@ -522,7 +522,7 @@
/>
<div>
Talking to <span class=" font-medium"
>{selectedModel.custom_info?.displayName ?? selectedModel.name}
>{selectedModel.custom_info?.name ?? selectedModel.name}
</span>
</div>
</div>

View File

@ -22,10 +22,10 @@
$: filteredModels = $models
.filter((p) =>
(p.custom_info?.displayName ?? p.name).includes(prompt.split(' ')?.at(0)?.substring(1) ?? '')
(p.custom_info?.name ?? p.name).includes(prompt.split(' ')?.at(0)?.substring(1) ?? '')
)
.sort((a, b) =>
(a.custom_info?.displayName ?? a.name).localeCompare(b.custom_info?.displayName ?? b.name)
(a.custom_info?.name ?? a.name).localeCompare(b.custom_info?.name ?? b.name)
);
$: if (prompt) {
@ -160,7 +160,7 @@
on:focus={() => {}}
>
<div class=" font-medium text-black line-clamp-1">
{model.custom_info?.displayName ?? model.name}
{model.custom_info?.name ?? model.name}
</div>
<!-- <div class=" text-xs text-gray-600 line-clamp-1">

View File

@ -49,7 +49,7 @@
.filter((model) => model.name !== 'hr')
.map((model) => ({
value: model.id,
label: model.custom_info?.displayName ?? model.name,
label: model.custom_info?.name ?? model.name,
info: model
}))}
bind:value={selectedModel}

View File

@ -244,7 +244,7 @@
{#each $models as model}
{#if model.size != null}
<option value={model.name} class="bg-gray-100 dark:bg-gray-700">
{(model.custom_info?.displayName ?? model.name) +
{(model.custom_info?.name ?? model.name) +
' (' +
(model.size / 1024 ** 3).toFixed(1) +
' GB)'}
@ -265,7 +265,7 @@
{#each $models as model}
{#if model.name !== 'hr'}
<option value={model.name} class="bg-gray-100 dark:bg-gray-700">
{model.custom_info?.displayName ?? model.name}
{model.custom_info?.name ?? model.name}
</option>
{/if}
{/each}

View File

@ -13,10 +13,11 @@
uploadModel
} from '$lib/apis/ollama';
import { WEBUI_API_BASE_URL, WEBUI_BASE_URL } from '$lib/constants';
import { WEBUI_NAME, models, MODEL_DOWNLOAD_POOL, user } from '$lib/stores';
import { WEBUI_NAME, models, MODEL_DOWNLOAD_POOL, user, config } from '$lib/stores';
import { splitStream } from '$lib/utils';
import { onMount, getContext } from 'svelte';
import { addLiteLLMModel, deleteLiteLLMModel, getLiteLLMModelInfo } from '$lib/apis/litellm';
import { getModelConfig, type GlobalModelConfig, updateModelConfig } from '$lib/apis';
import Tooltip from '$lib/components/common/Tooltip.svelte';
const i18n = getContext('i18n');
@ -67,6 +68,23 @@
let deleteModelTag = '';
// Model configuration
let modelConfig: GlobalModelConfig;
let showModelInfo = false;
let selectedModelId = '';
let modelName = '';
let modelDescription = '';
let modelIsVisionCapable = false;
const onModelInfoIdChange = () => {
const model = $models.find((m) => m.id === selectedModelId);
if (model) {
modelName = model.custom_info?.name ?? model.name;
modelDescription = model.custom_info?.description ?? '';
modelIsVisionCapable = model.custom_info?.vision_capable ?? false;
}
};
const updateModelsHandler = async () => {
for (const model of $models.filter(
(m) =>
@ -492,6 +510,53 @@
models.set(await getModels());
};
const addModelInfoHandler = async () => {
if (!selectedModelId) {
return;
}
let model = $models.find((m) => m.id === selectedModelId);
if (!model) {
return;
}
const modelSource =
'details' in model ? 'ollama' : model.source === 'LiteLLM' ? 'litellm' : 'openai';
// Remove any existing config
modelConfig[modelSource] = modelConfig[modelSource].filter((m) => m.id !== selectedModelId);
// Add new config
modelConfig[modelSource].push({
id: selectedModelId,
name: modelName,
description: modelDescription,
vision_capable: modelIsVisionCapable
});
await updateModelConfig(localStorage.token, modelConfig);
toast.success(
$i18n.t('Model info for {{modelName}} added successfully', { modelName: selectedModelId })
);
models.set(await getModels());
};
const deleteModelInfoHandler = async () => {
if (!selectedModelId) {
return;
}
let model = $models.find((m) => m.id === selectedModelId);
if (!model) {
return;
}
const modelSource =
'details' in model ? 'ollama' : model.source === 'LiteLLM' ? 'litellm' : 'openai';
modelConfig[modelSource] = modelConfig[modelSource].filter((m) => m.id !== selectedModelId);
await updateModelConfig(localStorage.token, modelConfig);
toast.success(
$i18n.t('Model info for {{modelName}} deleted successfully', { modelName: selectedModelId })
);
};
const toggleIsVisionCapable = () => {
modelIsVisionCapable = !modelIsVisionCapable;
};
onMount(async () => {
OLLAMA_URLS = await getOllamaUrls(localStorage.token).catch((error) => {
toast.error(error);
@ -502,8 +567,9 @@
selectedOllamaUrlIdx = 0;
}
ollamaVersion = await getOllamaVersion(localStorage.token).catch((error) => false);
liteLLMModelInfo = await getLiteLLMModelInfo(localStorage.token);
modelConfig = await getModelConfig(localStorage.token);
ollamaVersion = await getOllamaVersion(localStorage.token).catch((error) => false);
});
</script>
@ -587,24 +653,28 @@
viewBox="0 0 24 24"
fill="currentColor"
xmlns="http://www.w3.org/2000/svg"
><style>
>
<style>
.spinner_ajPY {
transform-origin: center;
animation: spinner_AtaB 0.75s infinite linear;
}
@keyframes spinner_AtaB {
100% {
transform: rotate(360deg);
}
}
</style><path
</style>
<path
d="M12,1A11,11,0,1,0,23,12,11,11,0,0,0,12,1Zm0,19a8,8,0,1,1,8-8A8,8,0,0,1,12,20Z"
opacity=".25"
/><path
/>
<path
d="M10.14,1.16a11,11,0,0,0-9,8.92A1.59,1.59,0,0,0,2.46,12,1.52,1.52,0,0,0,4.11,10.7a8,8,0,0,1,6.66-6.61A1.42,1.42,0,0,0,12,2.69h0A1.57,1.57,0,0,0,10.14,1.16Z"
class="spinner_ajPY"
/></svg
>
/>
</svg>
</div>
{:else}
<svg
@ -705,7 +775,7 @@
{/if}
{#each $models.filter((m) => m.size != null && (selectedOllamaUrlIdx === null ? true : (m?.urls ?? []).includes(selectedOllamaUrlIdx))) as model}
<option value={model.name} class="bg-gray-100 dark:bg-gray-700"
>{(model.custom_info?.displayName ?? model.name) +
>{(model.custom_info?.name ?? model.name) +
' (' +
(model.size / 1024 ** 3).toFixed(1) +
' GB)'}</option
@ -836,24 +906,28 @@
viewBox="0 0 24 24"
fill="currentColor"
xmlns="http://www.w3.org/2000/svg"
><style>
>
<style>
.spinner_ajPY {
transform-origin: center;
animation: spinner_AtaB 0.75s infinite linear;
}
@keyframes spinner_AtaB {
100% {
transform: rotate(360deg);
}
}
</style><path
</style>
<path
d="M12,1A11,11,0,1,0,23,12,11,11,0,0,0,12,1Zm0,19a8,8,0,1,1,8-8A8,8,0,0,1,12,20Z"
opacity=".25"
/><path
/>
<path
d="M10.14,1.16a11,11,0,0,0-9,8.92A1.59,1.59,0,0,0,2.46,12,1.52,1.52,0,0,0,4.11,10.7a8,8,0,0,1,6.66-6.61A1.42,1.42,0,0,0,12,2.69h0A1.57,1.57,0,0,0,10.14,1.16Z"
class="spinner_ajPY"
/></svg
>
/>
</svg>
</div>
{:else}
<svg
@ -935,6 +1009,7 @@
<hr class=" dark:border-gray-700 my-2" />
{/if}
<!--TODO: Hide LiteLLM options when ENABLE_LITELLM=false-->
<div class=" space-y-3">
<div class="mt-2 space-y-3 pr-1.5">
<div>
@ -1129,6 +1204,148 @@
{/if}
</div>
</div>
<hr class=" dark:border-gray-700 my-2" />
</div>
<div class=" space-y-3">
<div class="mt-2 space-y-3 pr-1.5">
<div>
<div class="mb-2">
<div class="flex justify-between items-center text-xs">
<div class=" text-sm font-medium">{$i18n.t('Manage Model Information')}</div>
<button
class=" text-xs font-medium text-gray-500"
type="button"
on:click={() => {
showModelInfo = !showModelInfo;
}}>{showModelInfo ? $i18n.t('Hide') : $i18n.t('Show')}</button
>
</div>
</div>
{#if showModelInfo}
<div>
<div class="flex justify-between items-center text-xs">
<div class=" text-sm font-medium">{$i18n.t('Current Models')}</div>
</div>
<div class="flex gap-2">
<div class="flex-1 pb-1">
<select
class="w-full rounded-lg py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
bind:value={selectedModelId}
on:change={onModelInfoIdChange}
>
{#if !selectedModelId}
<option value="" disabled selected>{$i18n.t('Select a model')}</option>
{/if}
{#each $models as model}
<option value={model.id} class="bg-gray-100 dark:bg-gray-700"
>{'details' in model
? 'Ollama'
: model.source === 'LiteLLM'
? 'LiteLLM'
: 'OpenAI'}: {model.name}{`${
model.custom_info?.name
? ' - ' + model.custom_info?.name
: ''
}`}</option
>
{/each}
</select>
</div>
<button
class="px-2.5 bg-gray-100 hover:bg-gray-200 text-gray-800 dark:bg-gray-850 dark:hover:bg-gray-800 dark:text-gray-100 rounded-lg transition"
on:click={() => {
deleteModelInfoHandler();
}}
>
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 16 16"
fill="currentColor"
class="w-4 h-4"
>
<path
fill-rule="evenodd"
d="M5 3.25V4H2.75a.75.75 0 0 0 0 1.5h.3l.815 8.15A1.5 1.5 0 0 0 5.357 15h5.285a1.5 1.5 0 0 0 1.493-1.35l.815-8.15h.3a.75.75 0 0 0 0-1.5H11v-.75A2.25 2.25 0 0 0 8.75 1h-1.5A2.25 2.25 0 0 0 5 3.25Zm2.25-.75a.75.75 0 0 0-.75.75V4h3v-.75a.75.75 0 0 0-.75-.75h-1.5ZM6.05 6a.75.75 0 0 1 .787.713l.275 5.5a.75.75 0 0 1-1.498.075l-.275-5.5A.75.75 0 0 1 6.05 6Zm3.9 0a.75.75 0 0 1 .712.787l-.275 5.5a.75.75 0 0 1-1.498-.075l.275-5.5a.75.75 0 0 1 .786-.711Z"
clip-rule="evenodd"
/>
</svg>
</button>
</div>
{#if selectedModelId}
<div>
<div class=" mb-1.5 text-sm font-medium">{$i18n.t('Model Display Name')}</div>
<div class="flex w-full mb-1.5">
<div class="flex-1 mr-2">
<input
class="w-full rounded-lg py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
placeholder={$i18n.t('Enter Model Display Name')}
bind:value={modelName}
autocomplete="off"
/>
</div>
<button
class="px-2.5 bg-gray-100 hover:bg-gray-200 text-gray-800 dark:bg-gray-850 dark:hover:bg-gray-800 dark:text-gray-100 rounded-lg transition"
on:click={() => {
addModelInfoHandler();
}}
>
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 16 16"
fill="currentColor"
class="w-4 h-4"
>
<path
d="M8.75 3.75a.75.75 0 0 0-1.5 0v3.5h-3.5a.75.75 0 0 0 0 1.5h3.5v3.5a.75.75 0 0 0 1.5 0v-3.5h3.5a.75.75 0 0 0 0-1.5h-3.5v-3.5Z"
/>
</svg>
</button>
</div>
</div>
<div>
<div class=" mb-1.5 text-sm font-medium">{$i18n.t('Model Description')}</div>
<div class="flex w-full">
<div class="flex-1">
<textarea
class="px-3 py-1.5 text-sm w-full bg-transparent border dark:border-gray-600 outline-none rounded-lg -mb-1"
rows="2"
bind:value={modelDescription}
/>
</div>
</div>
</div>
<div class="py-0.5 flex w-full justify-between">
<div class=" self-center text-sm font-medium">
{$i18n.t('Is Model Vision Capable')}
</div>
<button
class="p-1 px-3sm flex rounded transition"
on:click={() => {
toggleIsVisionCapable();
}}
type="button"
>
{#if modelIsVisionCapable === true}
<span class="ml-2 self-center">{$i18n.t('Yes')}</span>
{:else}
<span class="ml-2 self-center">{$i18n.t('No')}</span>
{/if}
</button>
</div>
{/if}
</div>
{/if}
</div>
</div>
</div>
</div>
</div>

View File

@ -321,7 +321,7 @@
{/if}
{#each $models.filter((m) => m.id && !m.external) as model}
<option value={model.name} class="bg-gray-100 dark:bg-gray-700"
>{(model.custom_info?.displayName ?? model.name) +
>{(model.custom_info?.name ?? model.name) +
' (' +
(model.size / 1024 ** 3).toFixed(1) +
' GB)'}</option

View File

@ -42,8 +42,8 @@ export const showChangelog = writable(false);
export type Model = OpenAIModel | OllamaModel;
type ModelCustomInfo = {
id?: string;
name?: string;
displayName?: string;
description?: string;
vision_capable?: boolean;
};
@ -54,12 +54,12 @@ type BaseModel = {
custom_info?: ModelCustomInfo;
};
interface OpenAIModel extends BaseModel {
export interface OpenAIModel extends BaseModel {
external: boolean;
source?: string;
}
interface OllamaModel extends BaseModel {
export interface OllamaModel extends BaseModel {
details: OllamaModelDetails;
size: number;
description: string;

View File

@ -268,7 +268,7 @@
if (hasImages && !(model.custom_info?.vision_capable ?? true)) {
toast.error(
$i18n.t('Model {{modelName}} is not vision capable', {
modelName: model.custom_info?.displayName ?? model.name ?? model.id
modelName: model.custom_info?.name ?? model.name ?? model.id
})
);
}
@ -282,7 +282,7 @@
role: 'assistant',
content: '',
model: model.id,
modelName: model.custom_info?.displayName ?? model.name ?? model.id,
modelName: model.custom_info?.name ?? model.name ?? model.id,
timestamp: Math.floor(Date.now() / 1000) // Unix epoch
};
@ -314,7 +314,7 @@
};
const sendPromptOllama = async (model, userPrompt, responseMessageId, _chatId) => {
const modelName = model.custom_info?.displayName ?? model.name ?? model.id;
const modelName = model.custom_info?.name ?? model.name ?? model.id;
model = model.id;
const responseMessage = history.messages[responseMessageId];
@ -719,17 +719,17 @@
} else {
toast.error(
$i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
provider: model.custom_info?.displayName ?? model.name ?? model.id
provider: model.custom_info?.name ?? model.name ?? model.id
})
);
responseMessage.content = $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
provider: model.custom_info?.displayName ?? model.name ?? model.id
provider: model.custom_info?.name ?? model.name ?? model.id
});
}
responseMessage.error = true;
responseMessage.content = $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
provider: model.custom_info?.displayName ?? model.name ?? model.id
provider: model.custom_info?.name ?? model.name ?? model.id
});
responseMessage.done = true;
messages = messages;

View File

@ -272,7 +272,7 @@
if (hasImages && !(model.custom_info?.vision_capable ?? true)) {
toast.error(
$i18n.t('Model {{modelName}} is not vision capable', {
modelName: model.custom_info?.displayName ?? model.name ?? model.id
modelName: model.custom_info?.name ?? model.name ?? model.id
})
);
}
@ -286,7 +286,7 @@
role: 'assistant',
content: '',
model: model.id,
modelName: model.custom_info?.displayName ?? model.name ?? model.id,
modelName: model.custom_info?.name ?? model.name ?? model.id,
timestamp: Math.floor(Date.now() / 1000) // Unix epoch
};
@ -722,17 +722,17 @@
} else {
toast.error(
$i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
provider: model.custom_info?.displayName ?? model.name ?? model.id
provider: model.custom_info?.name ?? model.name ?? model.id
})
);
responseMessage.content = $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
provider: model.custom_info?.displayName ?? model.name ?? model.id
provider: model.custom_info?.name ?? model.name ?? model.id
});
}
responseMessage.error = true;
responseMessage.content = $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
provider: model.custom_info?.displayName ?? model.name ?? model.id
provider: model.custom_info?.name ?? model.name ?? model.id
});
responseMessage.done = true;
messages = messages;

View File

@ -326,7 +326,7 @@
.filter((model) => model.name !== 'hr')
.map((model) => ({
value: model.id,
label: model.custom_info?.displayName ?? model.name,
label: model.custom_info?.name ?? model.name,
info: model
}))}
bind:value={selectedModelId}