2024-08-25 14:52:36 +00:00
import importlib . metadata
2024-08-27 22:10:27 +00:00
import json
import logging
import os
2024-08-25 14:52:36 +00:00
import pkgutil
2024-08-27 22:10:27 +00:00
import sys
2024-09-04 15:14:24 +00:00
import shutil
2024-08-27 22:10:27 +00:00
from pathlib import Path
2024-08-25 14:52:36 +00:00
import markdown
from bs4 import BeautifulSoup
2024-09-04 14:54:48 +00:00
from open_webui . constants import ERROR_MESSAGES
2024-08-25 14:52:36 +00:00
####################################
# Load .env file
####################################
2024-09-04 14:54:48 +00:00
OPEN_WEBUI_DIR = Path ( __file__ ) . parent # the path containing this file
print ( OPEN_WEBUI_DIR )
BACKEND_DIR = OPEN_WEBUI_DIR . parent # the path containing this file
2024-08-25 14:52:36 +00:00
BASE_DIR = BACKEND_DIR . parent # the path containing the backend/
2024-09-04 14:54:48 +00:00
print ( BACKEND_DIR )
2024-08-25 14:52:36 +00:00
print ( BASE_DIR )
try :
2024-08-27 22:10:27 +00:00
from dotenv import find_dotenv , load_dotenv
2024-08-25 14:52:36 +00:00
load_dotenv ( find_dotenv ( str ( BASE_DIR / " .env " ) ) )
except ImportError :
print ( " dotenv not installed, skipping... " )
2024-09-19 18:56:13 +00:00
DOCKER = os . environ . get ( " DOCKER " , " False " ) . lower ( ) == " true "
2024-08-25 14:52:36 +00:00
2024-09-10 00:37:36 +00:00
# device type embedding models - "cpu" (default), "cuda" (nvidia gpu required) or "mps" (apple silicon) - choosing this right can lead to better performance
USE_CUDA = os . environ . get ( " USE_CUDA_DOCKER " , " false " )
if USE_CUDA . lower ( ) == " true " :
2024-09-13 15:12:00 +00:00
try :
import torch
2024-09-13 15:18:44 +00:00
2024-09-13 15:12:00 +00:00
assert torch . cuda . is_available ( ) , " CUDA not available "
DEVICE_TYPE = " cuda "
except Exception as e :
cuda_error = (
" Error when testing CUDA but USE_CUDA_DOCKER is true. "
f " Resetting USE_CUDA_DOCKER to false: { e } "
)
os . environ [ " USE_CUDA_DOCKER " ] = " false "
USE_CUDA = " false "
DEVICE_TYPE = " cpu "
2024-09-10 00:37:36 +00:00
else :
DEVICE_TYPE = " cpu "
2024-08-25 14:52:36 +00:00
####################################
# LOGGING
####################################
log_levels = [ " CRITICAL " , " ERROR " , " WARNING " , " INFO " , " DEBUG " ]
GLOBAL_LOG_LEVEL = os . environ . get ( " GLOBAL_LOG_LEVEL " , " " ) . upper ( )
if GLOBAL_LOG_LEVEL in log_levels :
logging . basicConfig ( stream = sys . stdout , level = GLOBAL_LOG_LEVEL , force = True )
else :
GLOBAL_LOG_LEVEL = " INFO "
log = logging . getLogger ( __name__ )
log . info ( f " GLOBAL_LOG_LEVEL: { GLOBAL_LOG_LEVEL } " )
2024-09-13 15:12:00 +00:00
if " cuda_error " in locals ( ) :
log . exception ( cuda_error )
2024-08-25 14:52:36 +00:00
log_sources = [
" AUDIO " ,
" COMFYUI " ,
" CONFIG " ,
" DB " ,
" IMAGES " ,
" MAIN " ,
" MODELS " ,
" OLLAMA " ,
" OPENAI " ,
" RAG " ,
" WEBHOOK " ,
2024-09-22 00:14:59 +00:00
" SOCKET " ,
2024-08-25 14:52:36 +00:00
]
SRC_LOG_LEVELS = { }
for source in log_sources :
log_env_var = source + " _LOG_LEVEL "
SRC_LOG_LEVELS [ source ] = os . environ . get ( log_env_var , " " ) . upper ( )
if SRC_LOG_LEVELS [ source ] not in log_levels :
SRC_LOG_LEVELS [ source ] = GLOBAL_LOG_LEVEL
log . info ( f " { log_env_var } : { SRC_LOG_LEVELS [ source ] } " )
log . setLevel ( SRC_LOG_LEVELS [ " CONFIG " ] )
WEBUI_NAME = os . environ . get ( " WEBUI_NAME " , " Open WebUI " )
if WEBUI_NAME != " Open WebUI " :
WEBUI_NAME + = " (Open WebUI) "
WEBUI_URL = os . environ . get ( " WEBUI_URL " , " http://localhost:3000 " )
WEBUI_FAVICON_URL = " https://openwebui.com/favicon.png "
####################################
# ENV (dev,test,prod)
####################################
ENV = os . environ . get ( " ENV " , " dev " )
2024-09-06 02:59:20 +00:00
FROM_INIT_PY = os . environ . get ( " FROM_INIT_PY " , " False " ) . lower ( ) == " true "
2024-09-04 14:54:48 +00:00
2024-09-06 02:59:20 +00:00
if FROM_INIT_PY :
2024-09-04 14:54:48 +00:00
PACKAGE_DATA = { " version " : importlib . metadata . version ( " open-webui " ) }
else :
2024-08-25 14:52:36 +00:00
try :
2024-09-04 14:54:48 +00:00
PACKAGE_DATA = json . loads ( ( BASE_DIR / " package.json " ) . read_text ( ) )
except Exception :
2024-08-25 14:52:36 +00:00
PACKAGE_DATA = { " version " : " 0.0.0 " }
2024-09-04 14:54:48 +00:00
2024-08-25 14:52:36 +00:00
VERSION = PACKAGE_DATA [ " version " ]
# Function to parse each section
def parse_section ( section ) :
items = [ ]
for li in section . find_all ( " li " ) :
# Extract raw HTML string
raw_html = str ( li )
# Extract text without HTML tags
text = li . get_text ( separator = " " , strip = True )
# Split into title and content
parts = text . split ( " : " , 1 )
title = parts [ 0 ] . strip ( ) if len ( parts ) > 1 else " "
content = parts [ 1 ] . strip ( ) if len ( parts ) > 1 else text
items . append ( { " title " : title , " content " : content , " raw " : raw_html } )
return items
try :
changelog_path = BASE_DIR / " CHANGELOG.md "
with open ( str ( changelog_path . absolute ( ) ) , " r " , encoding = " utf8 " ) as file :
changelog_content = file . read ( )
except Exception :
changelog_content = ( pkgutil . get_data ( " open_webui " , " CHANGELOG.md " ) or b " " ) . decode ( )
# Convert markdown content to HTML
html_content = markdown . markdown ( changelog_content )
# Parse the HTML content
soup = BeautifulSoup ( html_content , " html.parser " )
# Initialize JSON structure
changelog_json = { }
# Iterate over each version
for version in soup . find_all ( " h2 " ) :
version_number = version . get_text ( ) . strip ( ) . split ( " - " ) [ 0 ] [ 1 : - 1 ] # Remove brackets
date = version . get_text ( ) . strip ( ) . split ( " - " ) [ 1 ]
version_data = { " date " : date }
# Find the next sibling that is a h3 tag (section title)
current = version . find_next_sibling ( )
while current and current . name != " h2 " :
if current . name == " h3 " :
section_title = current . get_text ( ) . lower ( ) # e.g., "added", "fixed"
section_items = parse_section ( current . find_next_sibling ( " ul " ) )
version_data [ section_title ] = section_items
# Move to the next element
current = current . find_next_sibling ( )
changelog_json [ version_number ] = version_data
CHANGELOG = changelog_json
####################################
# SAFE_MODE
####################################
SAFE_MODE = os . environ . get ( " SAFE_MODE " , " false " ) . lower ( ) == " true "
2024-11-01 15:23:18 +00:00
####################################
2024-11-01 15:24:10 +00:00
# ENABLE_FORWARD_USER_INFO_HEADERS
2024-11-01 15:23:18 +00:00
####################################
2024-11-04 01:07:24 +00:00
ENABLE_FORWARD_USER_INFO_HEADERS = (
os . environ . get ( " ENABLE_FORWARD_USER_INFO_HEADERS " , " False " ) . lower ( ) == " true "
)
2024-11-01 15:23:18 +00:00
2024-08-25 14:52:36 +00:00
####################################
# WEBUI_BUILD_HASH
####################################
WEBUI_BUILD_HASH = os . environ . get ( " WEBUI_BUILD_HASH " , " dev-build " )
####################################
# DATA/FRONTEND BUILD DIR
####################################
DATA_DIR = Path ( os . getenv ( " DATA_DIR " , BACKEND_DIR / " data " ) ) . resolve ( )
2024-09-04 14:54:48 +00:00
2024-09-06 02:59:20 +00:00
if FROM_INIT_PY :
2024-09-04 15:47:51 +00:00
NEW_DATA_DIR = Path ( os . getenv ( " DATA_DIR " , OPEN_WEBUI_DIR / " data " ) ) . resolve ( )
NEW_DATA_DIR . mkdir ( parents = True , exist_ok = True )
2024-09-04 14:54:48 +00:00
# Check if the data directory exists in the package directory
2024-09-05 14:11:07 +00:00
if DATA_DIR . exists ( ) and DATA_DIR != NEW_DATA_DIR :
2024-09-04 15:47:51 +00:00
log . info ( f " Moving { DATA_DIR } to { NEW_DATA_DIR } " )
for item in DATA_DIR . iterdir ( ) :
dest = NEW_DATA_DIR / item . name
if item . is_dir ( ) :
shutil . copytree ( item , dest , dirs_exist_ok = True )
else :
shutil . copy2 ( item , dest )
2024-09-26 01:13:38 +00:00
# Zip the data directory
shutil . make_archive ( DATA_DIR . parent / " open_webui_data " , " zip " , DATA_DIR )
# Remove the old data directory
shutil . rmtree ( DATA_DIR )
2024-09-26 01:02:08 +00:00
2024-09-05 06:17:58 +00:00
DATA_DIR = Path ( os . getenv ( " DATA_DIR " , OPEN_WEBUI_DIR / " data " ) )
2024-09-04 14:54:48 +00:00
2024-10-13 07:05:28 +00:00
STATIC_DIR = Path ( os . getenv ( " STATIC_DIR " , OPEN_WEBUI_DIR / " static " ) )
2024-09-06 13:52:23 +00:00
FONTS_DIR = Path ( os . getenv ( " FONTS_DIR " , OPEN_WEBUI_DIR / " static " / " fonts " ) )
2024-08-25 14:52:36 +00:00
FRONTEND_BUILD_DIR = Path ( os . getenv ( " FRONTEND_BUILD_DIR " , BASE_DIR / " build " ) ) . resolve ( )
2024-09-05 14:11:07 +00:00
2024-09-06 02:59:20 +00:00
if FROM_INIT_PY :
2024-09-04 15:47:51 +00:00
FRONTEND_BUILD_DIR = Path (
os . getenv ( " FRONTEND_BUILD_DIR " , OPEN_WEBUI_DIR / " frontend " )
) . resolve ( )
2024-08-25 14:52:36 +00:00
####################################
# Database
####################################
# Check if the file exists
if os . path . exists ( f " { DATA_DIR } /ollama.db " ) :
# Rename the file
os . rename ( f " { DATA_DIR } /ollama.db " , f " { DATA_DIR } /webui.db " )
log . info ( " Database migrated from Ollama-WebUI successfully. " )
else :
pass
DATABASE_URL = os . environ . get ( " DATABASE_URL " , f " sqlite:/// { DATA_DIR } /webui.db " )
# Replace the postgres:// with postgresql://
if " postgres:// " in DATABASE_URL :
DATABASE_URL = DATABASE_URL . replace ( " postgres:// " , " postgresql:// " )
2024-10-06 14:46:35 +00:00
DATABASE_POOL_SIZE = os . environ . get ( " DATABASE_POOL_SIZE " , 0 )
if DATABASE_POOL_SIZE == " " :
DATABASE_POOL_SIZE = 0
else :
try :
DATABASE_POOL_SIZE = int ( DATABASE_POOL_SIZE )
except Exception :
DATABASE_POOL_SIZE = 0
DATABASE_POOL_MAX_OVERFLOW = os . environ . get ( " DATABASE_POOL_MAX_OVERFLOW " , 0 )
if DATABASE_POOL_MAX_OVERFLOW == " " :
DATABASE_POOL_MAX_OVERFLOW = 0
else :
try :
DATABASE_POOL_MAX_OVERFLOW = int ( DATABASE_POOL_MAX_OVERFLOW )
except Exception :
DATABASE_POOL_MAX_OVERFLOW = 0
DATABASE_POOL_TIMEOUT = os . environ . get ( " DATABASE_POOL_TIMEOUT " , 30 )
if DATABASE_POOL_TIMEOUT == " " :
DATABASE_POOL_TIMEOUT = 30
else :
try :
DATABASE_POOL_TIMEOUT = int ( DATABASE_POOL_TIMEOUT )
except Exception :
DATABASE_POOL_TIMEOUT = 30
DATABASE_POOL_RECYCLE = os . environ . get ( " DATABASE_POOL_RECYCLE " , 3600 )
if DATABASE_POOL_RECYCLE == " " :
DATABASE_POOL_RECYCLE = 3600
else :
try :
DATABASE_POOL_RECYCLE = int ( DATABASE_POOL_RECYCLE )
except Exception :
DATABASE_POOL_RECYCLE = 3600
2024-08-25 14:52:36 +00:00
2024-09-24 23:06:11 +00:00
RESET_CONFIG_ON_START = (
os . environ . get ( " RESET_CONFIG_ON_START " , " False " ) . lower ( ) == " true "
)
2024-10-09 06:37:37 +00:00
####################################
# REDIS
####################################
REDIS_URL = os . environ . get ( " REDIS_URL " , " redis://localhost:6379/0 " )
2024-08-25 14:52:36 +00:00
####################################
# WEBUI_AUTH (Required for security)
####################################
WEBUI_AUTH = os . environ . get ( " WEBUI_AUTH " , " True " ) . lower ( ) == " true "
WEBUI_AUTH_TRUSTED_EMAIL_HEADER = os . environ . get (
" WEBUI_AUTH_TRUSTED_EMAIL_HEADER " , None
)
WEBUI_AUTH_TRUSTED_NAME_HEADER = os . environ . get ( " WEBUI_AUTH_TRUSTED_NAME_HEADER " , None )
####################################
# WEBUI_SECRET_KEY
####################################
WEBUI_SECRET_KEY = os . environ . get (
" WEBUI_SECRET_KEY " ,
os . environ . get (
" WEBUI_JWT_SECRET_KEY " , " t0p-s3cr3t "
) , # DEPRECATED: remove at next major version
)
WEBUI_SESSION_COOKIE_SAME_SITE = os . environ . get (
" WEBUI_SESSION_COOKIE_SAME_SITE " ,
os . environ . get ( " WEBUI_SESSION_COOKIE_SAME_SITE " , " lax " ) ,
)
WEBUI_SESSION_COOKIE_SECURE = os . environ . get (
" WEBUI_SESSION_COOKIE_SECURE " ,
os . environ . get ( " WEBUI_SESSION_COOKIE_SECURE " , " false " ) . lower ( ) == " true " ,
)
if WEBUI_AUTH and WEBUI_SECRET_KEY == " " :
raise ValueError ( ERROR_MESSAGES . ENV_VAR_NOT_FOUND )
2024-09-09 22:17:17 +00:00
ENABLE_WEBSOCKET_SUPPORT = (
os . environ . get ( " ENABLE_WEBSOCKET_SUPPORT " , " True " ) . lower ( ) == " true "
)
2024-09-20 21:43:22 +00:00
WEBSOCKET_MANAGER = os . environ . get ( " WEBSOCKET_MANAGER " , " " )
2024-10-09 06:37:37 +00:00
WEBSOCKET_REDIS_URL = os . environ . get ( " WEBSOCKET_REDIS_URL " , REDIS_URL )
2024-09-30 14:32:38 +00:00
AIOHTTP_CLIENT_TIMEOUT = os . environ . get ( " AIOHTTP_CLIENT_TIMEOUT " , " " )
if AIOHTTP_CLIENT_TIMEOUT == " " :
AIOHTTP_CLIENT_TIMEOUT = None
else :
try :
AIOHTTP_CLIENT_TIMEOUT = int ( AIOHTTP_CLIENT_TIMEOUT )
except Exception :
AIOHTTP_CLIENT_TIMEOUT = 300
2024-10-08 05:13:49 +00:00
2024-10-14 05:56:33 +00:00
AIOHTTP_CLIENT_TIMEOUT_OPENAI_MODEL_LIST = os . environ . get (
" AIOHTTP_CLIENT_TIMEOUT_OPENAI_MODEL_LIST " , " 3 "
)
2024-10-14 00:52:59 +00:00
2024-10-14 05:56:33 +00:00
if AIOHTTP_CLIENT_TIMEOUT_OPENAI_MODEL_LIST == " " :
AIOHTTP_CLIENT_TIMEOUT_OPENAI_MODEL_LIST = None
2024-10-14 00:52:59 +00:00
else :
try :
2024-10-14 05:56:33 +00:00
AIOHTTP_CLIENT_TIMEOUT_OPENAI_MODEL_LIST = int (
AIOHTTP_CLIENT_TIMEOUT_OPENAI_MODEL_LIST
)
2024-10-14 00:52:59 +00:00
except Exception :
2024-10-14 07:28:21 +00:00
AIOHTTP_CLIENT_TIMEOUT_OPENAI_MODEL_LIST = 3
2024-10-14 00:52:59 +00:00
2024-10-08 05:13:49 +00:00
####################################
# OFFLINE_MODE
####################################
OFFLINE_MODE = os . environ . get ( " OFFLINE_MODE " , " false " ) . lower ( ) == " true "