diff --git a/README.md b/README.md
index 549bce654..0207999f3 100644
--- a/README.md
+++ b/README.md
@@ -13,6 +13,8 @@
ChatGPT-Style Web Interface for Ollama 🦙
+**Disclaimer:** *ollama-webui is a community-driven project and is not affiliated with the Ollama team in any way. This initiative is independent, and any inquiries or feedback should be directed to [our community on Discord](https://discord.gg/5rJgQTnV4s). We kindly request users to refrain from contacting or harassing the Ollama team regarding this project.*
+
![Ollama Web UI Demo](./demo.gif)
Also check our sibling project, [OllamaHub](https://ollamahub.com/), where you can discover, download, and explore customized Modelfiles for Ollama! 🦙🔍
@@ -130,6 +132,10 @@ docker run -d -p 3000:8080 -e OLLAMA_API_BASE_URL=https://example.com/api --name
While we strongly recommend using our convenient Docker container installation for optimal support, we understand that some situations may require a non-Docker setup, especially for development purposes. Please note that non-Docker installations are not officially supported, and you might need to troubleshoot on your own.
+**Warning: Backend Dependency for Proper Functionality**
+
+In order to ensure the seamless operation of our application, it is crucial to run both the backend and frontend components simultaneously. Serving only the frontend in isolation is not supported and may lead to unpredictable outcomes, rendering the application inoperable. Attempting to raise an issue when solely serving the frontend will not be addressed, as it falls outside the intended usage. To achieve optimal results, please strictly adhere to the specified steps outlined in this documentation. Utilize the frontend solely for building static files, and subsequently run the complete application with the provided backend commands. Failure to follow these instructions may result in unsupported configurations, and we may not be able to provide assistance in such cases. Your cooperation in following the prescribed procedures is essential for a smooth user experience and effective issue resolution.
+
### TL;DR 🚀
Run the following commands to install:
diff --git a/TROUBLESHOOTING.md b/TROUBLESHOOTING.md
index d0d8ce2d0..2fabe497e 100644
--- a/TROUBLESHOOTING.md
+++ b/TROUBLESHOOTING.md
@@ -45,6 +45,15 @@ Becomes
docker run --platform linux/amd64 -d -p 3000:8080 -e OLLAMA_API_BASE_URL=http://example.com:11434/api --name ollama-webui --restart always ghcr.io/ollama-webui/ollama-webui:main
```
+## Running ollama-webui as a container on WSL Ubuntu
+If you're running ollama-webui via docker on WSL Ubuntu and have chosen to install webui and ollama separately, you might encounter connection issues. This is often due to the docker container being unable to reach the Ollama server at 127.0.0.1:11434. To resolve this, you can use the `--network=host` flag in the docker command. When done so port would be changed from 3000 to 8080, and the link would be: http://localhost:8080.
+
+Here's an example of the command you should run:
+
+```bash
+docker run -d --network=host -e OLLAMA_API_BASE_URL=http://127.0.0.1:11434/api --name ollama-webui --restart always ghcr.io/ollama-webui/ollama-webui:main
+```
+
## References
[Change Docker Desktop Settings on Mac](https://docs.docker.com/desktop/settings/mac/) Search for "x86" in that page.
diff --git a/backend/config.py b/backend/config.py
index 1dabe48ae..c5a79f57a 100644
--- a/backend/config.py
+++ b/backend/config.py
@@ -30,7 +30,7 @@ if ENV == "prod":
# WEBUI_VERSION
####################################
-WEBUI_VERSION = os.environ.get("WEBUI_VERSION", "v1.0.0-alpha.33")
+WEBUI_VERSION = os.environ.get("WEBUI_VERSION", "v1.0.0-alpha.35")
####################################
# WEBUI_AUTH
diff --git a/src/lib/components/chat/MessageInput.svelte b/src/lib/components/chat/MessageInput.svelte
index 172485ca2..bb941c90e 100644
--- a/src/lib/components/chat/MessageInput.svelte
+++ b/src/lib/components/chat/MessageInput.svelte
@@ -2,6 +2,7 @@
import { settings } from '$lib/stores';
import toast from 'svelte-french-toast';
import Suggestions from './MessageInput/Suggestions.svelte';
+ import { onMount } from 'svelte';
export let submitPrompt: Function;
export let stopResponse: Function;
@@ -11,6 +12,7 @@
let filesInputElement;
let inputFiles;
+ let dragged = false;
export let files = [];
@@ -82,12 +84,78 @@
}
}
};
+
+ onMount(() => {
+ const dropZone = document.querySelector('body');
+
+ dropZone?.addEventListener('dragover', (e) => {
+ e.preventDefault();
+ dragged = true;
+ });
+
+ dropZone.addEventListener('drop', (e) => {
+ e.preventDefault();
+ console.log(e);
+
+ if (e.dataTransfer?.files) {
+ let reader = new FileReader();
+
+ reader.onload = (event) => {
+ files = [
+ ...files,
+ {
+ type: 'image',
+ url: `${event.target.result}`
+ }
+ ];
+ };
+
+ if (
+ e.dataTransfer?.files &&
+ e.dataTransfer?.files.length > 0 &&
+ ['image/gif', 'image/jpeg', 'image/png'].includes(e.dataTransfer?.files[0]['type'])
+ ) {
+ reader.readAsDataURL(e.dataTransfer?.files[0]);
+ } else {
+ toast.error(`Unsupported File Type '${e.dataTransfer?.files[0]['type']}'.`);
+ }
+ }
+
+ dragged = false;
+ });
+
+ dropZone?.addEventListener('dragleave', () => {
+ dragged = false;
+ });
+ });
+{#if dragged}
+
+
+
+
+
🏞️
+
Add Images
+
+
+ Drop any images here to add to the conversation
+
- Oops! It seems like your Ollama needs a little attention.
- We encountered a connection issue or noticed that you're running an outdated version. Please
- update to
- {requiredOllamaVersion} or above.
-
+
+ Oops! It seems like your Ollama needs a little attention. We've detected either a connection hiccup or observed that you're using an older
+ version. Ensure you're on the latest Ollama version
+ (version
+ {requiredOllamaVersion} or higher)
+ or check your connection.
+