diff --git a/example.env b/.env.example
similarity index 83%
rename from example.env
rename to .env.example
index 4a4fdaa6c..de763f31c 100644
--- a/example.env
+++ b/.env.example
@@ -5,6 +5,8 @@ OLLAMA_API_BASE_URL='http://localhost:11434/api'
OPENAI_API_BASE_URL=''
OPENAI_API_KEY=''
+# AUTOMATIC1111_BASE_URL="http://localhost:7860"
+
# DO NOT TRACK
SCARF_NO_ANALYTICS=true
DO_NOT_TRACK=true
\ No newline at end of file
diff --git a/README.md b/README.md
index ef18a0acc..7645418ad 100644
--- a/README.md
+++ b/README.md
@@ -283,7 +283,7 @@ git clone https://github.com/open-webui/open-webui.git
cd open-webui/
# Copying required .env file
-cp -RPp example.env .env
+cp -RPp .env.example .env
# Building Frontend Using Node
npm i
diff --git a/backend/apps/images/main.py b/backend/apps/images/main.py
new file mode 100644
index 000000000..998af3ddb
--- /dev/null
+++ b/backend/apps/images/main.py
@@ -0,0 +1,165 @@
+import os
+import requests
+from fastapi import (
+ FastAPI,
+ Request,
+ Depends,
+ HTTPException,
+ status,
+ UploadFile,
+ File,
+ Form,
+)
+from fastapi.middleware.cors import CORSMiddleware
+from faster_whisper import WhisperModel
+
+from constants import ERROR_MESSAGES
+from utils.utils import (
+ get_current_user,
+ get_admin_user,
+)
+from utils.misc import calculate_sha256
+from typing import Optional
+from pydantic import BaseModel
+from config import AUTOMATIC1111_BASE_URL
+
+app = FastAPI()
+app.add_middleware(
+ CORSMiddleware,
+ allow_origins=["*"],
+ allow_credentials=True,
+ allow_methods=["*"],
+ allow_headers=["*"],
+)
+
+app.state.AUTOMATIC1111_BASE_URL = AUTOMATIC1111_BASE_URL
+app.state.ENABLED = app.state.AUTOMATIC1111_BASE_URL != ""
+
+
+@app.get("/enabled", response_model=bool)
+async def get_enable_status(request: Request, user=Depends(get_admin_user)):
+ return app.state.ENABLED
+
+
+@app.get("/enabled/toggle", response_model=bool)
+async def toggle_enabled(request: Request, user=Depends(get_admin_user)):
+ try:
+ r = requests.head(app.state.AUTOMATIC1111_BASE_URL)
+ app.state.ENABLED = not app.state.ENABLED
+ return app.state.ENABLED
+ except Exception as e:
+ raise HTTPException(status_code=r.status_code, detail=ERROR_MESSAGES.DEFAULT(e))
+
+
+class UrlUpdateForm(BaseModel):
+ url: str
+
+
+@app.get("/url")
+async def get_openai_url(user=Depends(get_admin_user)):
+ return {"AUTOMATIC1111_BASE_URL": app.state.AUTOMATIC1111_BASE_URL}
+
+
+@app.post("/url/update")
+async def update_openai_url(form_data: UrlUpdateForm, user=Depends(get_admin_user)):
+
+ if form_data.url == "":
+ app.state.AUTOMATIC1111_BASE_URL = AUTOMATIC1111_BASE_URL
+ else:
+ app.state.AUTOMATIC1111_BASE_URL = form_data.url.strip("/")
+
+ return {
+ "AUTOMATIC1111_BASE_URL": app.state.AUTOMATIC1111_BASE_URL,
+ "status": True,
+ }
+
+
+@app.get("/models")
+def get_models(user=Depends(get_current_user)):
+ try:
+ r = requests.get(url=f"{app.state.AUTOMATIC1111_BASE_URL}/sdapi/v1/sd-models")
+ models = r.json()
+ return models
+ except Exception as e:
+ raise HTTPException(status_code=r.status_code, detail=ERROR_MESSAGES.DEFAULT(e))
+
+
+@app.get("/models/default")
+async def get_default_model(user=Depends(get_admin_user)):
+ try:
+ r = requests.get(url=f"{app.state.AUTOMATIC1111_BASE_URL}/sdapi/v1/options")
+ options = r.json()
+
+ return {"model": options["sd_model_checkpoint"]}
+ except Exception as e:
+ raise HTTPException(status_code=r.status_code, detail=ERROR_MESSAGES.DEFAULT(e))
+
+
+class UpdateModelForm(BaseModel):
+ model: str
+
+
+def set_model_handler(model: str):
+ r = requests.get(url=f"{app.state.AUTOMATIC1111_BASE_URL}/sdapi/v1/options")
+ options = r.json()
+
+ if model != options["sd_model_checkpoint"]:
+ options["sd_model_checkpoint"] = model
+ r = requests.post(
+ url=f"{app.state.AUTOMATIC1111_BASE_URL}/sdapi/v1/options", json=options
+ )
+
+ return options
+
+
+@app.post("/models/default/update")
+def update_default_model(
+ form_data: UpdateModelForm,
+ user=Depends(get_current_user),
+):
+ return set_model_handler(form_data.model)
+
+
+class GenerateImageForm(BaseModel):
+ model: Optional[str] = None
+ prompt: str
+ n: int = 1
+ size: str = "512x512"
+ negative_prompt: Optional[str] = None
+
+
+@app.post("/generations")
+def generate_image(
+ form_data: GenerateImageForm,
+ user=Depends(get_current_user),
+):
+
+ print(form_data)
+
+ try:
+ if form_data.model:
+ set_model_handler(form_data.model)
+
+ width, height = tuple(map(int, form_data.size.split("x")))
+
+ data = {
+ "prompt": form_data.prompt,
+ "batch_size": form_data.n,
+ "width": width,
+ "height": height,
+ }
+
+ if form_data.negative_prompt != None:
+ data["negative_prompt"] = form_data.negative_prompt
+
+ print(data)
+
+ r = requests.post(
+ url=f"{app.state.AUTOMATIC1111_BASE_URL}/sdapi/v1/txt2img",
+ json=data,
+ )
+
+ return r.json()
+ except Exception as e:
+ print(e)
+ raise HTTPException(status_code=r.status_code, detail=ERROR_MESSAGES.DEFAULT(e))
diff --git a/backend/apps/web/main.py b/backend/apps/web/main.py
index 761a11cc9..bd14f4bda 100644
--- a/backend/apps/web/main.py
+++ b/backend/apps/web/main.py
@@ -57,7 +57,6 @@ app.include_router(utils.router, prefix="/utils", tags=["utils"])
async def get_status():
return {
"status": True,
- "version": WEBUI_VERSION,
"auth": WEBUI_AUTH,
"default_models": app.state.DEFAULT_MODELS,
"default_prompt_suggestions": app.state.DEFAULT_PROMPT_SUGGESTIONS,
diff --git a/backend/config.py b/backend/config.py
index 8167d4f13..caf2cc457 100644
--- a/backend/config.py
+++ b/backend/config.py
@@ -185,3 +185,10 @@ Query: [query]"""
WHISPER_MODEL = os.getenv("WHISPER_MODEL", "base")
WHISPER_MODEL_DIR = os.getenv("WHISPER_MODEL_DIR", f"{CACHE_DIR}/whisper/models")
+
+
+####################################
+# Images
+####################################
+
+AUTOMATIC1111_BASE_URL = os.getenv("AUTOMATIC1111_BASE_URL", "")
diff --git a/backend/main.py b/backend/main.py
index 3a28670ef..d1fb0c205 100644
--- a/backend/main.py
+++ b/backend/main.py
@@ -11,10 +11,10 @@ from starlette.exceptions import HTTPException as StarletteHTTPException
from apps.ollama.main import app as ollama_app
from apps.openai.main import app as openai_app
from apps.audio.main import app as audio_app
-
+from apps.images.main import app as images_app
+from apps.rag.main import app as rag_app
from apps.web.main import app as webui_app
-from apps.rag.main import app as rag_app
from config import ENV, FRONTEND_BUILD_DIR
@@ -58,10 +58,21 @@ app.mount("/api/v1", webui_app)
app.mount("/ollama/api", ollama_app)
app.mount("/openai/api", openai_app)
+app.mount("/images/api/v1", images_app)
app.mount("/audio/api/v1", audio_app)
app.mount("/rag/api/v1", rag_app)
+@app.get("/api/config")
+async def get_app_config():
+ return {
+ "status": True,
+ "images": images_app.state.ENABLED,
+ "default_models": webui_app.state.DEFAULT_MODELS,
+ "default_prompt_suggestions": webui_app.state.DEFAULT_PROMPT_SUGGESTIONS,
+ }
+
+
app.mount(
"/",
SPAStaticFiles(directory=FRONTEND_BUILD_DIR, html=True),
diff --git a/package.json b/package.json
index 20159f9c1..edc4762fc 100644
--- a/package.json
+++ b/package.json
@@ -1,6 +1,6 @@
{
"name": "open-webui",
- "version": "0.0.1",
+ "version": "v1.0.0-alpha.101",
"private": true,
"scripts": {
"dev": "vite dev --host",
diff --git a/src/lib/apis/images/index.ts b/src/lib/apis/images/index.ts
new file mode 100644
index 000000000..b25499d64
--- /dev/null
+++ b/src/lib/apis/images/index.ts
@@ -0,0 +1,266 @@
+import { IMAGES_API_BASE_URL } from '$lib/constants';
+
+export const getImageGenerationEnabledStatus = async (token: string = '') => {
+ let error = null;
+
+ const res = await fetch(`${IMAGES_API_BASE_URL}/enabled`, {
+ method: 'GET',
+ headers: {
+ Accept: 'application/json',
+ 'Content-Type': 'application/json',
+ ...(token && { authorization: `Bearer ${token}` })
+ }
+ })
+ .then(async (res) => {
+ if (!res.ok) throw await res.json();
+ return res.json();
+ })
+ .catch((err) => {
+ console.log(err);
+ if ('detail' in err) {
+ error = err.detail;
+ } else {
+ error = 'Server connection failed';
+ }
+ return null;
+ });
+
+ if (error) {
+ throw error;
+ }
+
+ return res;
+};
+
+export const toggleImageGenerationEnabledStatus = async (token: string = '') => {
+ let error = null;
+
+ const res = await fetch(`${IMAGES_API_BASE_URL}/enabled/toggle`, {
+ method: 'GET',
+ headers: {
+ Accept: 'application/json',
+ 'Content-Type': 'application/json',
+ ...(token && { authorization: `Bearer ${token}` })
+ }
+ })
+ .then(async (res) => {
+ if (!res.ok) throw await res.json();
+ return res.json();
+ })
+ .catch((err) => {
+ console.log(err);
+ if ('detail' in err) {
+ error = err.detail;
+ } else {
+ error = 'Server connection failed';
+ }
+ return null;
+ });
+
+ if (error) {
+ throw error;
+ }
+
+ return res;
+};
+
+export const getAUTOMATIC1111Url = async (token: string = '') => {
+ let error = null;
+
+ const res = await fetch(`${IMAGES_API_BASE_URL}/url`, {
+ method: 'GET',
+ headers: {
+ Accept: 'application/json',
+ 'Content-Type': 'application/json',
+ ...(token && { authorization: `Bearer ${token}` })
+ }
+ })
+ .then(async (res) => {
+ if (!res.ok) throw await res.json();
+ return res.json();
+ })
+ .catch((err) => {
+ console.log(err);
+ if ('detail' in err) {
+ error = err.detail;
+ } else {
+ error = 'Server connection failed';
+ }
+ return null;
+ });
+
+ if (error) {
+ throw error;
+ }
+
+ return res.AUTOMATIC1111_BASE_URL;
+};
+
+export const updateAUTOMATIC1111Url = async (token: string = '', url: string) => {
+ let error = null;
+
+ const res = await fetch(`${IMAGES_API_BASE_URL}/url/update`, {
+ method: 'POST',
+ headers: {
+ Accept: 'application/json',
+ 'Content-Type': 'application/json',
+ ...(token && { authorization: `Bearer ${token}` })
+ },
+ body: JSON.stringify({
+ url: url
+ })
+ })
+ .then(async (res) => {
+ if (!res.ok) throw await res.json();
+ return res.json();
+ })
+ .catch((err) => {
+ console.log(err);
+ if ('detail' in err) {
+ error = err.detail;
+ } else {
+ error = 'Server connection failed';
+ }
+ return null;
+ });
+
+ if (error) {
+ throw error;
+ }
+
+ return res.AUTOMATIC1111_BASE_URL;
+};
+
+export const getDiffusionModels = async (token: string = '') => {
+ let error = null;
+
+ const res = await fetch(`${IMAGES_API_BASE_URL}/models`, {
+ method: 'GET',
+ headers: {
+ Accept: 'application/json',
+ 'Content-Type': 'application/json',
+ ...(token && { authorization: `Bearer ${token}` })
+ }
+ })
+ .then(async (res) => {
+ if (!res.ok) throw await res.json();
+ return res.json();
+ })
+ .catch((err) => {
+ console.log(err);
+ if ('detail' in err) {
+ error = err.detail;
+ } else {
+ error = 'Server connection failed';
+ }
+ return null;
+ });
+
+ if (error) {
+ throw error;
+ }
+
+ return res;
+};
+
+export const getDefaultDiffusionModel = async (token: string = '') => {
+ let error = null;
+
+ const res = await fetch(`${IMAGES_API_BASE_URL}/models/default`, {
+ method: 'GET',
+ headers: {
+ Accept: 'application/json',
+ 'Content-Type': 'application/json',
+ ...(token && { authorization: `Bearer ${token}` })
+ }
+ })
+ .then(async (res) => {
+ if (!res.ok) throw await res.json();
+ return res.json();
+ })
+ .catch((err) => {
+ console.log(err);
+ if ('detail' in err) {
+ error = err.detail;
+ } else {
+ error = 'Server connection failed';
+ }
+ return null;
+ });
+
+ if (error) {
+ throw error;
+ }
+
+ return res.model;
+};
+
+export const updateDefaultDiffusionModel = async (token: string = '', model: string) => {
+ let error = null;
+
+ const res = await fetch(`${IMAGES_API_BASE_URL}/models/default/update`, {
+ method: 'POST',
+ headers: {
+ Accept: 'application/json',
+ 'Content-Type': 'application/json',
+ ...(token && { authorization: `Bearer ${token}` })
+ },
+ body: JSON.stringify({
+ model: model
+ })
+ })
+ .then(async (res) => {
+ if (!res.ok) throw await res.json();
+ return res.json();
+ })
+ .catch((err) => {
+ console.log(err);
+ if ('detail' in err) {
+ error = err.detail;
+ } else {
+ error = 'Server connection failed';
+ }
+ return null;
+ });
+
+ if (error) {
+ throw error;
+ }
+
+ return res.model;
+};
+
+export const imageGenerations = async (token: string = '', prompt: string) => {
+ let error = null;
+
+ const res = await fetch(`${IMAGES_API_BASE_URL}/generations`, {
+ method: 'POST',
+ headers: {
+ Accept: 'application/json',
+ 'Content-Type': 'application/json',
+ ...(token && { authorization: `Bearer ${token}` })
+ },
+ body: JSON.stringify({
+ prompt: prompt
+ })
+ })
+ .then(async (res) => {
+ if (!res.ok) throw await res.json();
+ return res.json();
+ })
+ .catch((err) => {
+ console.log(err);
+ if ('detail' in err) {
+ error = err.detail;
+ } else {
+ error = 'Server connection failed';
+ }
+ return null;
+ });
+
+ if (error) {
+ throw error;
+ }
+
+ return res;
+};
diff --git a/src/lib/apis/index.ts b/src/lib/apis/index.ts
index 915121661..c20107ce2 100644
--- a/src/lib/apis/index.ts
+++ b/src/lib/apis/index.ts
@@ -1,9 +1,9 @@
-import { WEBUI_API_BASE_URL } from '$lib/constants';
+import { WEBUI_BASE_URL } from '$lib/constants';
export const getBackendConfig = async () => {
let error = null;
- const res = await fetch(`${WEBUI_API_BASE_URL}/`, {
+ const res = await fetch(`${WEBUI_BASE_URL}/api/config`, {
method: 'GET',
headers: {
'Content-Type': 'application/json'
diff --git a/src/lib/components/chat/Messages.svelte b/src/lib/components/chat/Messages.svelte
index b02ba1166..64574530f 100644
--- a/src/lib/components/chat/Messages.svelte
+++ b/src/lib/components/chat/Messages.svelte
@@ -11,6 +11,7 @@
import ResponseMessage from './Messages/ResponseMessage.svelte';
import Placeholder from './Messages/Placeholder.svelte';
import Spinner from '../common/Spinner.svelte';
+ import { imageGenerations } from '$lib/apis/images';
export let chatId = '';
export let sendPrompt: Function;
@@ -308,6 +309,16 @@
{copyToClipboard}
{continueGeneration}
{regenerateResponse}
+ on:save={async (e) => {
+ console.log('save', e);
+
+ const message = e.detail;
+ history.messages[message.id] = message;
+ await updateChatById(localStorage.token, chatId, {
+ messages: messages,
+ history: history
+ });
+ }}
/>
{/if}
diff --git a/src/lib/components/chat/Messages/ResponseMessage.svelte b/src/lib/components/chat/Messages/ResponseMessage.svelte
index ca64575d5..e9dedc12f 100644
--- a/src/lib/components/chat/Messages/ResponseMessage.svelte
+++ b/src/lib/components/chat/Messages/ResponseMessage.svelte
@@ -2,21 +2,25 @@
import toast from 'svelte-french-toast';
import dayjs from 'dayjs';
import { marked } from 'marked';
- import { settings } from '$lib/stores';
import tippy from 'tippy.js';
import auto_render from 'katex/dist/contrib/auto-render.mjs';
import 'katex/dist/katex.min.css';
+ import { createEventDispatcher } from 'svelte';
import { onMount, tick } from 'svelte';
+ const dispatch = createEventDispatcher();
+
+ import { config, settings } from '$lib/stores';
+ import { synthesizeOpenAISpeech } from '$lib/apis/openai';
+ import { imageGenerations } from '$lib/apis/images';
+ import { extractSentences } from '$lib/utils';
+
import Name from './Name.svelte';
import ProfileImage from './ProfileImage.svelte';
import Skeleton from './Skeleton.svelte';
import CodeBlock from './CodeBlock.svelte';
- import { synthesizeOpenAISpeech } from '$lib/apis/openai';
- import { extractSentences } from '$lib/utils';
-
export let modelfiles = [];
export let message;
export let siblings;
@@ -43,6 +47,8 @@
let loadingSpeech = false;
+ let generatingImage = false;
+
$: tokens = marked.lexer(message.content);
const renderer = new marked.Renderer();
@@ -267,6 +273,23 @@
renderStyling();
};
+ const generateImage = async (message) => {
+ generatingImage = true;
+ const res = await imageGenerations(localStorage.token, message.content);
+ console.log(res);
+
+ if (res) {
+ message.files = res.images.map((image) => ({
+ type: 'image',
+ url: `data:image/png;base64,${image}`
+ }));
+
+ dispatch('save', message);
+ }
+
+ generatingImage = false;
+ };
+
onMount(async () => {
await tick();
renderStyling();
@@ -295,6 +318,18 @@
{#if message.content === ''}