2024-01-07 06:59:22 +00:00
import os
2024-03-20 23:11:36 +00:00
import sys
import logging
2024-01-07 06:59:22 +00:00
import chromadb
2024-01-07 09:40:36 +00:00
from chromadb import Settings
2023-11-19 00:47:12 +00:00
from base64 import b64encode
2024-02-24 01:12:19 +00:00
from bs4 import BeautifulSoup
2024-01-07 09:40:36 +00:00
from pathlib import Path
2024-02-20 02:54:22 +00:00
import json
2024-02-25 06:35:11 +00:00
import yaml
2024-02-23 08:36:53 +00:00
import markdown
2024-02-24 01:12:19 +00:00
import requests
import shutil
from secrets import token_bytes
from constants import ERROR_MESSAGES
2024-02-20 02:54:22 +00:00
2024-01-07 09:40:36 +00:00
2024-01-19 20:12:14 +00:00
try :
from dotenv import load_dotenv , find_dotenv
2024-01-22 09:47:07 +00:00
2024-01-19 20:12:14 +00:00
load_dotenv ( find_dotenv ( " ../.env " ) )
except ImportError :
2024-03-20 23:11:36 +00:00
log . warning ( " dotenv not installed, skipping... " )
2023-11-15 00:28:51 +00:00
2024-02-24 01:12:19 +00:00
WEBUI_NAME = " Open WebUI "
2024-03-26 07:45:36 +00:00
WEBUI_FAVICON_URL = " https://openwebui.com/favicon.png "
2024-02-24 01:24:46 +00:00
shutil . copyfile ( " ../build/favicon.png " , " ./static/favicon.png " )
2024-01-07 09:40:36 +00:00
####################################
2024-01-25 08:40:19 +00:00
# ENV (dev,test,prod)
2024-01-07 09:40:36 +00:00
####################################
2024-01-25 08:40:19 +00:00
ENV = os . environ . get ( " ENV " , " dev " )
2024-01-07 09:40:36 +00:00
2024-02-23 08:30:26 +00:00
try :
with open ( f " ../package.json " , " r " ) as f :
PACKAGE_DATA = json . load ( f )
except :
PACKAGE_DATA = { " version " : " 0.0.0 " }
VERSION = PACKAGE_DATA [ " version " ]
2024-02-23 08:36:53 +00:00
# Function to parse each section
def parse_section ( section ) :
items = [ ]
for li in section . find_all ( " li " ) :
# Extract raw HTML string
raw_html = str ( li )
# Extract text without HTML tags
text = li . get_text ( separator = " " , strip = True )
# Split into title and content
parts = text . split ( " : " , 1 )
title = parts [ 0 ] . strip ( ) if len ( parts ) > 1 else " "
content = parts [ 1 ] . strip ( ) if len ( parts ) > 1 else text
items . append ( { " title " : title , " content " : content , " raw " : raw_html } )
return items
try :
with open ( " ../CHANGELOG.md " , " r " ) as file :
changelog_content = file . read ( )
except :
changelog_content = " "
# Convert markdown content to HTML
html_content = markdown . markdown ( changelog_content )
# Parse the HTML content
soup = BeautifulSoup ( html_content , " html.parser " )
# Initialize JSON structure
changelog_json = { }
# Iterate over each version
for version in soup . find_all ( " h2 " ) :
version_number = version . get_text ( ) . strip ( ) . split ( " - " ) [ 0 ] [ 1 : - 1 ] # Remove brackets
date = version . get_text ( ) . strip ( ) . split ( " - " ) [ 1 ]
version_data = { " date " : date }
# Find the next sibling that is a h3 tag (section title)
current = version . find_next_sibling ( )
while current and current . name != " h2 " :
if current . name == " h3 " :
section_title = current . get_text ( ) . lower ( ) # e.g., "added", "fixed"
section_items = parse_section ( current . find_next_sibling ( " ul " ) )
version_data [ section_title ] = section_items
# Move to the next element
current = current . find_next_sibling ( )
changelog_json [ version_number ] = version_data
CHANGELOG = changelog_json
2024-02-24 01:12:19 +00:00
2024-03-20 23:11:36 +00:00
####################################
# LOGGING
####################################
log_levels = [ " CRITICAL " , " ERROR " , " WARNING " , " INFO " , " DEBUG " ]
GLOBAL_LOG_LEVEL = os . environ . get ( " GLOBAL_LOG_LEVEL " , " " ) . upper ( )
if GLOBAL_LOG_LEVEL in log_levels :
logging . basicConfig ( stream = sys . stdout , level = GLOBAL_LOG_LEVEL , force = True )
else :
GLOBAL_LOG_LEVEL = " INFO "
log = logging . getLogger ( __name__ )
log . info ( f " GLOBAL_LOG_LEVEL: { GLOBAL_LOG_LEVEL } " )
log_sources = [ " AUDIO " , " CONFIG " , " DB " , " IMAGES " , " LITELLM " , " MAIN " , " MODELS " , " OLLAMA " , " OPENAI " , " RAG " ]
SRC_LOG_LEVELS = { }
for source in log_sources :
log_env_var = source + " _LOG_LEVEL "
SRC_LOG_LEVELS [ source ] = os . environ . get ( log_env_var , " " ) . upper ( )
if SRC_LOG_LEVELS [ source ] not in log_levels :
SRC_LOG_LEVELS [ source ] = GLOBAL_LOG_LEVEL
log . info ( f " { log_env_var } : { SRC_LOG_LEVELS [ source ] } " )
log . setLevel ( SRC_LOG_LEVELS [ " CONFIG " ] )
2024-02-24 01:12:19 +00:00
####################################
# CUSTOM_NAME
####################################
CUSTOM_NAME = os . environ . get ( " CUSTOM_NAME " , " " )
if CUSTOM_NAME :
2024-02-24 01:36:38 +00:00
try :
r = requests . get ( f " https://api.openwebui.com/api/v1/custom/ { CUSTOM_NAME } " )
data = r . json ( )
if r . ok :
if " logo " in data :
2024-03-26 07:45:36 +00:00
WEBUI_FAVICON_URL = url = (
2024-02-24 01:36:38 +00:00
f " https://api.openwebui.com { data [ ' logo ' ] } "
if data [ " logo " ] [ 0 ] == " / "
else data [ " logo " ]
)
r = requests . get ( url , stream = True )
if r . status_code == 200 :
with open ( " ./static/favicon.png " , " wb " ) as f :
r . raw . decode_content = True
shutil . copyfileobj ( r . raw , f )
WEBUI_NAME = data [ " name " ]
except Exception as e :
2024-03-20 23:11:36 +00:00
log . exception ( e )
2024-02-24 01:36:38 +00:00
pass
2024-02-24 01:12:19 +00:00
2024-01-25 08:40:19 +00:00
####################################
# DATA/FRONTEND BUILD DIR
####################################
2024-01-23 15:59:52 +00:00
2024-01-25 08:40:19 +00:00
DATA_DIR = str ( Path ( os . getenv ( " DATA_DIR " , " ./data " ) ) . resolve ( ) )
2024-01-23 15:59:52 +00:00
FRONTEND_BUILD_DIR = str ( Path ( os . getenv ( " FRONTEND_BUILD_DIR " , " ../build " ) ) )
2024-01-07 09:40:36 +00:00
2024-02-20 02:54:22 +00:00
try :
with open ( f " { DATA_DIR } /config.json " , " r " ) as f :
CONFIG_DATA = json . load ( f )
except :
CONFIG_DATA = { }
2023-11-19 00:47:12 +00:00
####################################
2024-01-25 08:40:19 +00:00
# File Upload DIR
2023-11-19 00:47:12 +00:00
####################################
2024-01-25 08:40:19 +00:00
UPLOAD_DIR = f " { DATA_DIR } /uploads "
Path ( UPLOAD_DIR ) . mkdir ( parents = True , exist_ok = True )
2023-11-15 00:28:51 +00:00
2024-02-06 06:51:08 +00:00
####################################
# Cache DIR
####################################
CACHE_DIR = f " { DATA_DIR } /cache "
Path ( CACHE_DIR ) . mkdir ( parents = True , exist_ok = True )
2024-02-18 05:06:08 +00:00
####################################
# Docs DIR
####################################
DOCS_DIR = f " { DATA_DIR } /docs "
Path ( DOCS_DIR ) . mkdir ( parents = True , exist_ok = True )
2024-02-06 06:51:08 +00:00
2024-02-25 06:35:11 +00:00
####################################
# LITELLM_CONFIG
####################################
def create_config_file ( file_path ) :
directory = os . path . dirname ( file_path )
# Check if directory exists, if not, create it
if not os . path . exists ( directory ) :
os . makedirs ( directory )
# Data to write into the YAML file
config_data = {
" general_settings " : { } ,
" litellm_settings " : { } ,
" model_list " : [ ] ,
" router_settings " : { } ,
}
# Write data to YAML file
with open ( file_path , " w " ) as file :
yaml . dump ( config_data , file )
LITELLM_CONFIG_PATH = f " { DATA_DIR } /litellm/config.yaml "
if not os . path . exists ( LITELLM_CONFIG_PATH ) :
2024-03-20 23:11:36 +00:00
log . info ( " Config file doesn ' t exist. Creating... " )
2024-02-25 06:35:11 +00:00
create_config_file ( LITELLM_CONFIG_PATH )
2024-03-20 23:11:36 +00:00
log . info ( " Config file created successfully. " )
2024-02-25 06:35:11 +00:00
2023-11-19 00:47:12 +00:00
####################################
2024-03-06 19:51:51 +00:00
# OLLAMA_BASE_URL
2023-11-19 00:47:12 +00:00
####################################
2024-01-07 06:59:22 +00:00
OLLAMA_API_BASE_URL = os . environ . get (
" OLLAMA_API_BASE_URL " , " http://localhost:11434/api "
)
2023-11-15 00:28:51 +00:00
2024-03-06 19:44:00 +00:00
OLLAMA_BASE_URL = os . environ . get ( " OLLAMA_BASE_URL " , " " )
2024-03-26 06:20:16 +00:00
KUBERNETES_SERVICE_HOST = os . environ . get ( " KUBERNETES_SERVICE_HOST " , " " )
2024-03-03 02:16:02 +00:00
2024-03-06 19:44:00 +00:00
if OLLAMA_BASE_URL == " " and OLLAMA_API_BASE_URL != " " :
2024-03-03 02:16:02 +00:00
OLLAMA_BASE_URL = (
OLLAMA_API_BASE_URL [ : - 4 ]
if OLLAMA_API_BASE_URL . endswith ( " /api " )
else OLLAMA_API_BASE_URL
)
2024-03-11 02:26:06 +00:00
if ENV == " prod " :
2024-03-26 06:20:16 +00:00
if OLLAMA_BASE_URL == " /ollama " and KUBERNETES_SERVICE_HOST == " " :
2024-03-11 02:26:06 +00:00
OLLAMA_BASE_URL = " http://host.docker.internal:11434 "
2024-03-25 20:21:32 +00:00
else :
OLLAMA_BASE_URL = " http://ollama-service.open-webui.svc.cluster.local:11434 "
2024-03-11 02:26:06 +00:00
2024-03-06 19:44:00 +00:00
OLLAMA_BASE_URLS = os . environ . get ( " OLLAMA_BASE_URLS " , " " )
OLLAMA_BASE_URLS = OLLAMA_BASE_URLS if OLLAMA_BASE_URLS != " " else OLLAMA_BASE_URL
2024-03-06 20:42:14 +00:00
OLLAMA_BASE_URLS = [ url . strip ( ) for url in OLLAMA_BASE_URLS . split ( " ; " ) ]
2024-03-06 19:44:00 +00:00
2024-03-03 02:16:02 +00:00
2024-01-05 00:49:34 +00:00
####################################
# OPENAI_API
####################################
OPENAI_API_KEY = os . environ . get ( " OPENAI_API_KEY " , " " )
2024-01-05 02:38:03 +00:00
OPENAI_API_BASE_URL = os . environ . get ( " OPENAI_API_BASE_URL " , " " )
2024-03-07 04:25:24 +00:00
2024-01-05 02:38:03 +00:00
if OPENAI_API_BASE_URL == " " :
OPENAI_API_BASE_URL = " https://api.openai.com/v1 "
2024-01-05 00:49:34 +00:00
2024-03-06 21:18:17 +00:00
OPENAI_API_KEYS = os . environ . get ( " OPENAI_API_KEYS " , " " )
OPENAI_API_KEYS = OPENAI_API_KEYS if OPENAI_API_KEYS != " " else OPENAI_API_KEY
2024-03-07 00:13:25 +00:00
OPENAI_API_KEYS = [ url . strip ( ) for url in OPENAI_API_KEYS . split ( " ; " ) ]
2024-03-06 21:18:17 +00:00
OPENAI_API_BASE_URLS = os . environ . get ( " OPENAI_API_BASE_URLS " , " " )
OPENAI_API_BASE_URLS = (
OPENAI_API_BASE_URLS if OPENAI_API_BASE_URLS != " " else OPENAI_API_BASE_URL
)
2024-03-18 08:11:48 +00:00
OPENAI_API_BASE_URLS = [
url . strip ( ) if url != " " else " https://api.openai.com/v1 "
for url in OPENAI_API_BASE_URLS . split ( " ; " )
]
2024-01-23 05:07:40 +00:00
####################################
# WEBUI
####################################
2024-03-02 07:38:14 +00:00
ENABLE_SIGNUP = os . environ . get ( " ENABLE_SIGNUP " , " True " ) . lower ( ) == " true "
2024-01-23 05:07:40 +00:00
DEFAULT_MODELS = os . environ . get ( " DEFAULT_MODELS " , None )
2024-02-20 02:54:22 +00:00
DEFAULT_PROMPT_SUGGESTIONS = (
CONFIG_DATA [ " ui " ] [ " prompt_suggestions " ]
2024-02-20 03:09:09 +00:00
if " ui " in CONFIG_DATA
and " prompt_suggestions " in CONFIG_DATA [ " ui " ]
and type ( CONFIG_DATA [ " ui " ] [ " prompt_suggestions " ] ) is list
2024-02-20 02:54:22 +00:00
else [
2024-01-23 05:07:40 +00:00
{
" title " : [ " Help me study " , " vocabulary for a college entrance exam " ] ,
" content " : " Help me study vocabulary: write a sentence for me to fill in the blank, and I ' ll try to pick the correct option. " ,
} ,
{
" title " : [ " Give me ideas " , " for what to do with my kids ' art " ] ,
" content " : " What are 5 creative things I could do with my kids ' art? I don ' t want to throw them away, but it ' s also so much clutter. " ,
} ,
{
" title " : [ " Tell me a fun fact " , " about the Roman Empire " ] ,
" content " : " Tell me a random fun fact about the Roman Empire " ,
} ,
{
" title " : [ " Show me a code snippet " , " of a website ' s sticky header " ] ,
" content " : " Show me a code snippet of a website ' s sticky header in CSS and JavaScript. " ,
} ,
2024-02-20 02:54:22 +00:00
]
2024-01-23 05:07:40 +00:00
)
2024-02-20 02:54:22 +00:00
2024-02-23 10:03:06 +00:00
DEFAULT_USER_ROLE = os . getenv ( " DEFAULT_USER_ROLE " , " pending " )
2024-03-13 15:17:22 +00:00
2024-03-19 21:21:46 +00:00
USER_PERMISSIONS_CHAT_DELETION = (
os . environ . get ( " USER_PERMISSIONS_CHAT_DELETION " , " True " ) . lower ( ) == " true "
)
2024-03-13 16:01:46 +00:00
2024-03-19 21:21:46 +00:00
USER_PERMISSIONS = { " chat " : { " deletion " : USER_PERMISSIONS_CHAT_DELETION } }
2024-02-14 09:17:43 +00:00
2024-01-23 05:07:40 +00:00
2024-03-21 02:28:33 +00:00
MODEL_FILTER_ENABLED = os . environ . get ( " MODEL_FILTER_ENABLED " , " False " ) . lower ( ) == " true "
2024-03-10 05:47:01 +00:00
MODEL_FILTER_LIST = os . environ . get ( " MODEL_FILTER_LIST " , " " )
MODEL_FILTER_LIST = [ model . strip ( ) for model in MODEL_FILTER_LIST . split ( " ; " ) ]
2024-03-21 01:35:02 +00:00
WEBHOOK_URL = os . environ . get ( " WEBHOOK_URL " , " " )
2024-03-10 05:47:01 +00:00
2023-11-19 00:47:12 +00:00
####################################
2023-11-19 08:41:29 +00:00
# WEBUI_VERSION
2023-11-19 00:47:12 +00:00
####################################
2024-02-14 09:19:39 +00:00
WEBUI_VERSION = os . environ . get ( " WEBUI_VERSION " , " v1.0.0-alpha.100 " )
2023-11-19 00:47:12 +00:00
####################################
2023-12-26 06:14:06 +00:00
# WEBUI_AUTH (Required for security)
2023-11-19 00:47:12 +00:00
####################################
2023-12-26 06:14:06 +00:00
WEBUI_AUTH = True
2024-03-26 21:30:53 +00:00
WEBUI_AUTH_TRUSTED_EMAIL_HEADER = os . environ . get (
2024-03-31 21:07:43 +00:00
" WEBUI_AUTH_TRUSTED_EMAIL_HEADER " , None
)
2023-11-19 00:47:12 +00:00
2023-11-21 00:22:43 +00:00
####################################
2024-02-01 19:40:59 +00:00
# WEBUI_SECRET_KEY
2023-11-21 00:22:43 +00:00
####################################
2024-02-01 19:40:59 +00:00
WEBUI_SECRET_KEY = os . environ . get (
" WEBUI_SECRET_KEY " ,
2024-02-04 01:42:08 +00:00
os . environ . get (
" WEBUI_JWT_SECRET_KEY " , " t0p-s3cr3t "
) , # DEPRECATED: remove at next major version
2024-02-01 19:40:59 +00:00
)
2023-11-19 00:47:12 +00:00
2024-02-01 19:40:59 +00:00
if WEBUI_AUTH and WEBUI_SECRET_KEY == " " :
2023-11-21 00:22:43 +00:00
raise ValueError ( ERROR_MESSAGES . ENV_VAR_NOT_FOUND )
2024-01-07 06:59:22 +00:00
####################################
# RAG
####################################
2024-01-22 09:47:07 +00:00
CHROMA_DATA_PATH = f " { DATA_DIR } /vector_db "
2024-02-17 18:38:29 +00:00
# this uses the model defined in the Dockerfile ENV variable. If you dont use docker or docker based deployments such as k8s, the default embedding model will be used (all-MiniLM-L6-v2)
2024-02-19 18:56:50 +00:00
RAG_EMBEDDING_MODEL = os . environ . get ( " RAG_EMBEDDING_MODEL " , " all-MiniLM-L6-v2 " )
2024-02-19 06:51:17 +00:00
# device type ebbeding models - "cpu" (default), "cuda" (nvidia gpu required) or "mps" (apple silicon) - choosing this right can lead to better performance
2024-02-19 18:56:50 +00:00
RAG_EMBEDDING_MODEL_DEVICE_TYPE = os . environ . get (
" RAG_EMBEDDING_MODEL_DEVICE_TYPE " , " cpu "
)
2024-01-07 09:40:36 +00:00
CHROMA_CLIENT = chromadb . PersistentClient (
2024-02-04 01:42:08 +00:00
path = CHROMA_DATA_PATH ,
settings = Settings ( allow_reset = True , anonymized_telemetry = False ) ,
2024-01-07 09:40:36 +00:00
)
2024-01-07 06:59:22 +00:00
CHUNK_SIZE = 1500
CHUNK_OVERLAP = 100
2024-02-11 08:17:50 +00:00
2024-02-18 06:41:03 +00:00
RAG_TEMPLATE = """ Use the following context as your learned knowledge, inside <context></context> XML tags.
< context >
[ context ]
< / context >
When answer to user :
- If you don ' t know, just say that you don ' t know .
- If you don ' t know when you are not sure, ask for clarification.
Avoid mentioning that you obtained the information from the context .
And answer according to the language of the user ' s question.
Given the context information , answer the query .
Query : [ query ] """
2024-02-11 08:17:50 +00:00
####################################
# Transcribe
####################################
2024-02-15 07:32:54 +00:00
WHISPER_MODEL = os . getenv ( " WHISPER_MODEL " , " base " )
WHISPER_MODEL_DIR = os . getenv ( " WHISPER_MODEL_DIR " , f " { CACHE_DIR } /whisper/models " )
2024-02-22 02:12:01 +00:00
####################################
# Images
####################################
AUTOMATIC1111_BASE_URL = os . getenv ( " AUTOMATIC1111_BASE_URL " , " " )
2024-03-23 22:38:59 +00:00
COMFYUI_BASE_URL = os . getenv ( " COMFYUI_BASE_URL " , " " )