diff --git a/README.md b/README.md index 549bce654..0207999f3 100644 --- a/README.md +++ b/README.md @@ -13,6 +13,8 @@ ChatGPT-Style Web Interface for Ollama 🦙 +**Disclaimer:** *ollama-webui is a community-driven project and is not affiliated with the Ollama team in any way. This initiative is independent, and any inquiries or feedback should be directed to [our community on Discord](https://discord.gg/5rJgQTnV4s). We kindly request users to refrain from contacting or harassing the Ollama team regarding this project.* + ![Ollama Web UI Demo](./demo.gif) Also check our sibling project, [OllamaHub](https://ollamahub.com/), where you can discover, download, and explore customized Modelfiles for Ollama! 🦙🔍 @@ -130,6 +132,10 @@ docker run -d -p 3000:8080 -e OLLAMA_API_BASE_URL=https://example.com/api --name While we strongly recommend using our convenient Docker container installation for optimal support, we understand that some situations may require a non-Docker setup, especially for development purposes. Please note that non-Docker installations are not officially supported, and you might need to troubleshoot on your own. +**Warning: Backend Dependency for Proper Functionality** + +In order to ensure the seamless operation of our application, it is crucial to run both the backend and frontend components simultaneously. Serving only the frontend in isolation is not supported and may lead to unpredictable outcomes, rendering the application inoperable. Attempting to raise an issue when solely serving the frontend will not be addressed, as it falls outside the intended usage. To achieve optimal results, please strictly adhere to the specified steps outlined in this documentation. Utilize the frontend solely for building static files, and subsequently run the complete application with the provided backend commands. Failure to follow these instructions may result in unsupported configurations, and we may not be able to provide assistance in such cases. Your cooperation in following the prescribed procedures is essential for a smooth user experience and effective issue resolution. + ### TL;DR 🚀 Run the following commands to install: diff --git a/TROUBLESHOOTING.md b/TROUBLESHOOTING.md index d0d8ce2d0..2fabe497e 100644 --- a/TROUBLESHOOTING.md +++ b/TROUBLESHOOTING.md @@ -45,6 +45,15 @@ Becomes docker run --platform linux/amd64 -d -p 3000:8080 -e OLLAMA_API_BASE_URL=http://example.com:11434/api --name ollama-webui --restart always ghcr.io/ollama-webui/ollama-webui:main ``` +## Running ollama-webui as a container on WSL Ubuntu +If you're running ollama-webui via docker on WSL Ubuntu and have chosen to install webui and ollama separately, you might encounter connection issues. This is often due to the docker container being unable to reach the Ollama server at 127.0.0.1:11434. To resolve this, you can use the `--network=host` flag in the docker command. When done so port would be changed from 3000 to 8080, and the link would be: http://localhost:8080. + +Here's an example of the command you should run: + +```bash +docker run -d --network=host -e OLLAMA_API_BASE_URL=http://127.0.0.1:11434/api --name ollama-webui --restart always ghcr.io/ollama-webui/ollama-webui:main +``` + ## References [Change Docker Desktop Settings on Mac](https://docs.docker.com/desktop/settings/mac/) Search for "x86" in that page. diff --git a/backend/config.py b/backend/config.py index 1dabe48ae..c5a79f57a 100644 --- a/backend/config.py +++ b/backend/config.py @@ -30,7 +30,7 @@ if ENV == "prod": # WEBUI_VERSION #################################### -WEBUI_VERSION = os.environ.get("WEBUI_VERSION", "v1.0.0-alpha.33") +WEBUI_VERSION = os.environ.get("WEBUI_VERSION", "v1.0.0-alpha.35") #################################### # WEBUI_AUTH diff --git a/src/lib/components/chat/MessageInput.svelte b/src/lib/components/chat/MessageInput.svelte index 172485ca2..bb941c90e 100644 --- a/src/lib/components/chat/MessageInput.svelte +++ b/src/lib/components/chat/MessageInput.svelte @@ -2,6 +2,7 @@ import { settings } from '$lib/stores'; import toast from 'svelte-french-toast'; import Suggestions from './MessageInput/Suggestions.svelte'; + import { onMount } from 'svelte'; export let submitPrompt: Function; export let stopResponse: Function; @@ -11,6 +12,7 @@ let filesInputElement; let inputFiles; + let dragged = false; export let files = []; @@ -82,12 +84,78 @@ } } }; + + onMount(() => { + const dropZone = document.querySelector('body'); + + dropZone?.addEventListener('dragover', (e) => { + e.preventDefault(); + dragged = true; + }); + + dropZone.addEventListener('drop', (e) => { + e.preventDefault(); + console.log(e); + + if (e.dataTransfer?.files) { + let reader = new FileReader(); + + reader.onload = (event) => { + files = [ + ...files, + { + type: 'image', + url: `${event.target.result}` + } + ]; + }; + + if ( + e.dataTransfer?.files && + e.dataTransfer?.files.length > 0 && + ['image/gif', 'image/jpeg', 'image/png'].includes(e.dataTransfer?.files[0]['type']) + ) { + reader.readAsDataURL(e.dataTransfer?.files[0]); + } else { + toast.error(`Unsupported File Type '${e.dataTransfer?.files[0]['type']}'.`); + } + } + + dragged = false; + }); + + dropZone?.addEventListener('dragleave', () => { + dragged = false; + }); + }); +{#if dragged} +
+
+
+
+
🏞️
+
Add Images
+ +
+ Drop any images here to add to the conversation +
+
+
+
+
+{/if} +
{#if messages.length == 0 && suggestionPrompts.length !== 0} -
+
{/if} diff --git a/src/lib/components/chat/MessageInput/Suggestions.svelte b/src/lib/components/chat/MessageInput/Suggestions.svelte index 6bd1876b5..58c75fd19 100644 --- a/src/lib/components/chat/MessageInput/Suggestions.svelte +++ b/src/lib/components/chat/MessageInput/Suggestions.svelte @@ -3,7 +3,7 @@ export let suggestionPrompts = []; -
+
{#each suggestionPrompts as prompt, promptIdx}
+ + +
+ {:else if selectedTab === 'addons'}
{ saveSettings({ gravatarEmail: gravatarEmail !== '' ? gravatarEmail : undefined, - gravatarUrl: gravatarEmail !== '' ? getGravatarURL(gravatarEmail) : undefined, - OPENAI_API_KEY: OPENAI_API_KEY !== '' ? OPENAI_API_KEY : undefined + gravatarUrl: gravatarEmail !== '' ? getGravatarURL(gravatarEmail) : undefined }); show = false; }} @@ -962,26 +1053,6 @@ >
- -
-
-
- OpenAI API Key (optional) -
-
-
- -
-
-
- Adds optional support for 'gpt-*' models available. -
-
diff --git a/src/routes/(app)/+layout.svelte b/src/routes/(app)/+layout.svelte index 4f4774232..af8c75224 100644 --- a/src/routes/(app)/+layout.svelte +++ b/src/routes/(app)/+layout.svelte @@ -55,7 +55,9 @@ // If OpenAI API Key exists if ($settings.OPENAI_API_KEY) { // Validate OPENAI_API_KEY - const openaiModelRes = await fetch(`https://api.openai.com/v1/models`, { + + const API_BASE_URL = $settings.OPENAI_API_BASE_URL ?? 'https://api.openai.com/v1'; + const openaiModelRes = await fetch(`${API_BASE_URL}/models`, { method: 'GET', headers: { 'Content-Type': 'application/json', @@ -72,15 +74,19 @@ return null; }); - const openAIModels = openaiModelRes?.data ?? null; + const openAIModels = Array.isArray(openaiModelRes) + ? openaiModelRes + : openaiModelRes?.data ?? null; models.push( ...(openAIModels ? [ { name: 'hr' }, ...openAIModels - .map((model) => ({ name: model.id, label: 'OpenAI' })) - .filter((model) => model.name.includes('gpt')) + .map((model) => ({ name: model.id, external: true })) + .filter((model) => + API_BASE_URL.includes('openai') ? model.name.includes('gpt') : true + ) ] : []) ); @@ -236,36 +242,39 @@
-
-
- Ollama Update Required -
+
+
+
+ Connection Issue or Update Needed +
-
- Oops! It seems like your Ollama needs a little attention. - We encountered a connection issue or noticed that you're running an outdated version. Please - update to - {requiredOllamaVersion} or above. -
+
+ Oops! It seems like your Ollama needs a little attention. We've detected either a connection hiccup or observed that you're using an older + version. Ensure you're on the latest Ollama version + (version + {requiredOllamaVersion} or higher) + or check your connection. +
-
- +
+ - + +
diff --git a/src/routes/(app)/+page.svelte b/src/routes/(app)/+page.svelte index d0b83b80d..c0885d192 100644 --- a/src/routes/(app)/+page.svelte +++ b/src/routes/(app)/+page.svelte @@ -7,7 +7,7 @@ import { splitStream } from '$lib/utils'; import { goto } from '$app/navigation'; - import { config, modelfiles, user, settings, db, chats, chatId } from '$lib/stores'; + import { config, models, modelfiles, user, settings, db, chats, chatId } from '$lib/stores'; import MessageInput from '$lib/components/chat/MessageInput.svelte'; import Messages from '$lib/components/chat/Messages.svelte'; @@ -130,7 +130,8 @@ const sendPrompt = async (userPrompt, parentId, _chatId) => { await Promise.all( selectedModels.map(async (model) => { - if (model.includes('gpt-')) { + console.log(model); + if ($models.filter((m) => m.name === model)[0].external) { await sendPromptOpenAI(model, userPrompt, parentId, _chatId); } else { await sendPromptOllama(model, userPrompt, parentId, _chatId); @@ -364,133 +365,163 @@ ]; } - await tick(); - window.scrollTo({ top: document.body.scrollHeight }); - const res = await fetch(`https://api.openai.com/v1/chat/completions`, { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - Authorization: `Bearer ${$settings.OPENAI_API_KEY}` - }, - body: JSON.stringify({ - model: model, - stream: true, - messages: [ - $settings.system - ? { - role: 'system', - content: $settings.system - } - : undefined, - ...messages - ] - .filter((message) => message) - .map((message) => ({ - role: message.role, - ...(message.files + const res = await fetch( + `${$settings.OPENAI_API_BASE_URL ?? 'https://api.openai.com/v1'}/chat/completions`, + { + method: 'POST', + headers: { + Authorization: `Bearer ${$settings.OPENAI_API_KEY}`, + 'Content-Type': 'application/json' + }, + body: JSON.stringify({ + model: model, + stream: true, + messages: [ + $settings.system ? { - content: [ - { - type: 'text', - text: message.content - }, - ...message.files - .filter((file) => file.type === 'image') - .map((file) => ({ - type: 'image_url', - image_url: { - url: file.url - } - })) - ] + role: 'system', + content: $settings.system } - : { content: message.content }) - })), - temperature: $settings.temperature ?? undefined, - top_p: $settings.top_p ?? undefined, - num_ctx: $settings.num_ctx ?? undefined, - frequency_penalty: $settings.repeat_penalty ?? undefined - }) + : undefined, + ...messages + ] + .filter((message) => message) + .map((message) => ({ + role: message.role, + ...(message.files + ? { + content: [ + { + type: 'text', + text: message.content + }, + ...message.files + .filter((file) => file.type === 'image') + .map((file) => ({ + type: 'image_url', + image_url: { + url: file.url + } + })) + ] + } + : { content: message.content }) + })), + temperature: $settings.temperature ?? undefined, + top_p: $settings.top_p ?? undefined, + num_ctx: $settings.num_ctx ?? undefined, + frequency_penalty: $settings.repeat_penalty ?? undefined + }) + } + ).catch((err) => { + console.log(err); + return null; }); - const reader = res.body - .pipeThrough(new TextDecoderStream()) - .pipeThrough(splitStream('\n')) - .getReader(); + if (res && res.ok) { + const reader = res.body + .pipeThrough(new TextDecoderStream()) + .pipeThrough(splitStream('\n')) + .getReader(); - while (true) { - const { value, done } = await reader.read(); - if (done || stopResponseFlag || _chatId !== $chatId) { - responseMessage.done = true; - messages = messages; - break; - } + while (true) { + const { value, done } = await reader.read(); + if (done || stopResponseFlag || _chatId !== $chatId) { + responseMessage.done = true; + messages = messages; + break; + } - try { - let lines = value.split('\n'); + try { + let lines = value.split('\n'); - for (const line of lines) { - if (line !== '') { - console.log(line); - if (line === 'data: [DONE]') { - responseMessage.done = true; - messages = messages; - } else { - let data = JSON.parse(line.replace(/^data: /, '')); - console.log(data); - - if (responseMessage.content == '' && data.choices[0].delta.content == '\n') { - continue; - } else { - responseMessage.content += data.choices[0].delta.content ?? ''; + for (const line of lines) { + if (line !== '') { + console.log(line); + if (line === 'data: [DONE]') { + responseMessage.done = true; messages = messages; + } else { + let data = JSON.parse(line.replace(/^data: /, '')); + console.log(data); + + if (responseMessage.content == '' && data.choices[0].delta.content == '\n') { + continue; + } else { + responseMessage.content += data.choices[0].delta.content ?? ''; + messages = messages; + } } } } + } catch (error) { + console.log(error); } - } catch (error) { + + if ($settings.notificationEnabled && !document.hasFocus()) { + const notification = new Notification(`OpenAI ${model}`, { + body: responseMessage.content, + icon: '/favicon.png' + }); + } + + if ($settings.responseAutoCopy) { + copyToClipboard(responseMessage.content); + } + + if (autoScroll) { + window.scrollTo({ top: document.body.scrollHeight }); + } + + await $db.updateChatById(_chatId, { + title: title === '' ? 'New Chat' : title, + models: selectedModels, + system: $settings.system ?? undefined, + options: { + seed: $settings.seed ?? undefined, + temperature: $settings.temperature ?? undefined, + repeat_penalty: $settings.repeat_penalty ?? undefined, + top_k: $settings.top_k ?? undefined, + top_p: $settings.top_p ?? undefined, + num_ctx: $settings.num_ctx ?? undefined, + ...($settings.options ?? {}) + }, + messages: messages, + history: history + }); + } + } else { + if (res !== null) { + const error = await res.json(); console.log(error); + if ('detail' in error) { + toast.error(error.detail); + responseMessage.content = error.detail; + } else { + if ('message' in error.error) { + toast.error(error.error.message); + responseMessage.content = error.error.message; + } else { + toast.error(error.error); + responseMessage.content = error.error; + } + } + } else { + toast.error(`Uh-oh! There was an issue connecting to ${model}.`); + responseMessage.content = `Uh-oh! There was an issue connecting to ${model}.`; } - if (autoScroll) { - window.scrollTo({ top: document.body.scrollHeight }); - } - - await $db.updateChatById(_chatId, { - title: title === '' ? 'New Chat' : title, - models: selectedModels, - system: $settings.system ?? undefined, - options: { - seed: $settings.seed ?? undefined, - temperature: $settings.temperature ?? undefined, - repeat_penalty: $settings.repeat_penalty ?? undefined, - top_k: $settings.top_k ?? undefined, - top_p: $settings.top_p ?? undefined, - num_ctx: $settings.num_ctx ?? undefined, - ...($settings.options ?? {}) - }, - messages: messages, - history: history - }); + responseMessage.error = true; + responseMessage.content = `Uh-oh! There was an issue connecting to ${model}.`; + responseMessage.done = true; + messages = messages; } stopResponseFlag = false; - await tick(); - if ($settings.notificationEnabled && !document.hasFocus()) { - const notification = new Notification(`OpenAI ${model}`, { - body: responseMessage.content, - icon: '/favicon.png' - }); - } - - if ($settings.responseAutoCopy) { - copyToClipboard(responseMessage.content); - } - if (autoScroll) { window.scrollTo({ top: document.body.scrollHeight }); } diff --git a/src/routes/(app)/c/[id]/+page.svelte b/src/routes/(app)/c/[id]/+page.svelte index bf7207fb3..6ff95cd55 100644 --- a/src/routes/(app)/c/[id]/+page.svelte +++ b/src/routes/(app)/c/[id]/+page.svelte @@ -6,7 +6,7 @@ import { onMount, tick } from 'svelte'; import { convertMessagesToHistory, splitStream } from '$lib/utils'; import { goto } from '$app/navigation'; - import { config, modelfiles, user, settings, db, chats, chatId } from '$lib/stores'; + import { config, models, modelfiles, user, settings, db, chats, chatId } from '$lib/stores'; import MessageInput from '$lib/components/chat/MessageInput.svelte'; import Messages from '$lib/components/chat/Messages.svelte'; @@ -144,7 +144,8 @@ const sendPrompt = async (userPrompt, parentId, _chatId) => { await Promise.all( selectedModels.map(async (model) => { - if (model.includes('gpt-')) { + console.log(model); + if ($models.filter((m) => m.name === model)[0].external) { await sendPromptOpenAI(model, userPrompt, parentId, _chatId); } else { await sendPromptOllama(model, userPrompt, parentId, _chatId); @@ -378,133 +379,163 @@ ]; } - await tick(); - window.scrollTo({ top: document.body.scrollHeight }); - const res = await fetch(`https://api.openai.com/v1/chat/completions`, { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - Authorization: `Bearer ${$settings.OPENAI_API_KEY}` - }, - body: JSON.stringify({ - model: model, - stream: true, - messages: [ - $settings.system - ? { - role: 'system', - content: $settings.system - } - : undefined, - ...messages - ] - .filter((message) => message) - .map((message) => ({ - role: message.role, - ...(message.files + const res = await fetch( + `${$settings.OPENAI_API_BASE_URL ?? 'https://api.openai.com/v1'}/chat/completions`, + { + method: 'POST', + headers: { + Authorization: `Bearer ${$settings.OPENAI_API_KEY}`, + 'Content-Type': 'application/json' + }, + body: JSON.stringify({ + model: model, + stream: true, + messages: [ + $settings.system ? { - content: [ - { - type: 'text', - text: message.content - }, - ...message.files - .filter((file) => file.type === 'image') - .map((file) => ({ - type: 'image_url', - image_url: { - url: file.url - } - })) - ] + role: 'system', + content: $settings.system } - : { content: message.content }) - })), - temperature: $settings.temperature ?? undefined, - top_p: $settings.top_p ?? undefined, - num_ctx: $settings.num_ctx ?? undefined, - frequency_penalty: $settings.repeat_penalty ?? undefined - }) + : undefined, + ...messages + ] + .filter((message) => message) + .map((message) => ({ + role: message.role, + ...(message.files + ? { + content: [ + { + type: 'text', + text: message.content + }, + ...message.files + .filter((file) => file.type === 'image') + .map((file) => ({ + type: 'image_url', + image_url: { + url: file.url + } + })) + ] + } + : { content: message.content }) + })), + temperature: $settings.temperature ?? undefined, + top_p: $settings.top_p ?? undefined, + num_ctx: $settings.num_ctx ?? undefined, + frequency_penalty: $settings.repeat_penalty ?? undefined + }) + } + ).catch((err) => { + console.log(err); + return null; }); - const reader = res.body - .pipeThrough(new TextDecoderStream()) - .pipeThrough(splitStream('\n')) - .getReader(); + if (res && res.ok) { + const reader = res.body + .pipeThrough(new TextDecoderStream()) + .pipeThrough(splitStream('\n')) + .getReader(); - while (true) { - const { value, done } = await reader.read(); - if (done || stopResponseFlag || _chatId !== $chatId) { - responseMessage.done = true; - messages = messages; - break; - } + while (true) { + const { value, done } = await reader.read(); + if (done || stopResponseFlag || _chatId !== $chatId) { + responseMessage.done = true; + messages = messages; + break; + } - try { - let lines = value.split('\n'); + try { + let lines = value.split('\n'); - for (const line of lines) { - if (line !== '') { - console.log(line); - if (line === 'data: [DONE]') { - responseMessage.done = true; - messages = messages; - } else { - let data = JSON.parse(line.replace(/^data: /, '')); - console.log(data); - - if (responseMessage.content == '' && data.choices[0].delta.content == '\n') { - continue; - } else { - responseMessage.content += data.choices[0].delta.content ?? ''; + for (const line of lines) { + if (line !== '') { + console.log(line); + if (line === 'data: [DONE]') { + responseMessage.done = true; messages = messages; + } else { + let data = JSON.parse(line.replace(/^data: /, '')); + console.log(data); + + if (responseMessage.content == '' && data.choices[0].delta.content == '\n') { + continue; + } else { + responseMessage.content += data.choices[0].delta.content ?? ''; + messages = messages; + } } } } + } catch (error) { + console.log(error); } - } catch (error) { + + if ($settings.notificationEnabled && !document.hasFocus()) { + const notification = new Notification(`OpenAI ${model}`, { + body: responseMessage.content, + icon: '/favicon.png' + }); + } + + if ($settings.responseAutoCopy) { + copyToClipboard(responseMessage.content); + } + + if (autoScroll) { + window.scrollTo({ top: document.body.scrollHeight }); + } + + await $db.updateChatById(_chatId, { + title: title === '' ? 'New Chat' : title, + models: selectedModels, + system: $settings.system ?? undefined, + options: { + seed: $settings.seed ?? undefined, + temperature: $settings.temperature ?? undefined, + repeat_penalty: $settings.repeat_penalty ?? undefined, + top_k: $settings.top_k ?? undefined, + top_p: $settings.top_p ?? undefined, + num_ctx: $settings.num_ctx ?? undefined, + ...($settings.options ?? {}) + }, + messages: messages, + history: history + }); + } + } else { + if (res !== null) { + const error = await res.json(); console.log(error); + if ('detail' in error) { + toast.error(error.detail); + responseMessage.content = error.detail; + } else { + if ('message' in error.error) { + toast.error(error.error.message); + responseMessage.content = error.error.message; + } else { + toast.error(error.error); + responseMessage.content = error.error; + } + } + } else { + toast.error(`Uh-oh! There was an issue connecting to ${model}.`); + responseMessage.content = `Uh-oh! There was an issue connecting to ${model}.`; } - if (autoScroll) { - window.scrollTo({ top: document.body.scrollHeight }); - } - - await $db.updateChatById(_chatId, { - title: title === '' ? 'New Chat' : title, - models: selectedModels, - system: $settings.system ?? undefined, - options: { - seed: $settings.seed ?? undefined, - temperature: $settings.temperature ?? undefined, - repeat_penalty: $settings.repeat_penalty ?? undefined, - top_k: $settings.top_k ?? undefined, - top_p: $settings.top_p ?? undefined, - num_ctx: $settings.num_ctx ?? undefined, - ...($settings.options ?? {}) - }, - messages: messages, - history: history - }); + responseMessage.error = true; + responseMessage.content = `Uh-oh! There was an issue connecting to ${model}.`; + responseMessage.done = true; + messages = messages; } stopResponseFlag = false; - await tick(); - if ($settings.notificationEnabled && !document.hasFocus()) { - const notification = new Notification(`OpenAI ${model}`, { - body: responseMessage.content, - icon: '/favicon.png' - }); - } - - if ($settings.responseAutoCopy) { - copyToClipboard(responseMessage.content); - } - if (autoScroll) { window.scrollTo({ top: document.body.scrollHeight }); } diff --git a/src/routes/(app)/modelfiles/+page.svelte b/src/routes/(app)/modelfiles/+page.svelte index ff7519dae..297766f91 100644 --- a/src/routes/(app)/modelfiles/+page.svelte +++ b/src/routes/(app)/modelfiles/+page.svelte @@ -98,7 +98,7 @@