This commit is contained in:
Timothy J. Baek 2024-09-10 01:37:36 +01:00
parent eb0e683b47
commit 28087ccf40
4 changed files with 12 additions and 47 deletions
backend/open_webui

View File

@ -21,14 +21,14 @@ from open_webui.config import (
AUDIO_TTS_VOICE,
CACHE_DIR,
CORS_ALLOW_ORIGIN,
DEVICE_TYPE,
WHISPER_MODEL,
WHISPER_MODEL_AUTO_UPDATE,
WHISPER_MODEL_DIR,
AppConfig,
)
from open_webui.constants import ERROR_MESSAGES
from open_webui.env import SRC_LOG_LEVELS
from open_webui.env import SRC_LOG_LEVELS, DEVICE_TYPE
from fastapi import Depends, FastAPI, File, HTTPException, Request, UploadFile, status
from fastapi.middleware.cors import CORSMiddleware
from fastapi.responses import FileResponse

View File

@ -41,7 +41,6 @@ from open_webui.config import (
CHUNK_SIZE,
CONTENT_EXTRACTION_ENGINE,
CORS_ALLOW_ORIGIN,
DEVICE_TYPE,
DOCS_DIR,
ENABLE_RAG_HYBRID_SEARCH,
ENABLE_RAG_LOCAL_WEB_FETCH,
@ -84,7 +83,7 @@ from open_webui.config import (
AppConfig,
)
from open_webui.constants import ERROR_MESSAGES
from open_webui.env import SRC_LOG_LEVELS
from open_webui.env import SRC_LOG_LEVELS, DEVICE_TYPE
from fastapi import Depends, FastAPI, File, Form, HTTPException, UploadFile, status
from fastapi.middleware.cors import CORSMiddleware
from langchain.text_splitter import RecursiveCharacterTextSplitter

View File

@ -540,40 +540,6 @@ Path(TOOLS_DIR).mkdir(parents=True, exist_ok=True)
FUNCTIONS_DIR = os.getenv("FUNCTIONS_DIR", f"{DATA_DIR}/functions")
Path(FUNCTIONS_DIR).mkdir(parents=True, exist_ok=True)
####################################
# LITELLM_CONFIG
####################################
def create_config_file(file_path):
directory = os.path.dirname(file_path)
# Check if directory exists, if not, create it
if not os.path.exists(directory):
os.makedirs(directory)
# Data to write into the YAML file
config_data = {
"general_settings": {},
"litellm_settings": {},
"model_list": [],
"router_settings": {},
}
# Write data to YAML file
with open(file_path, "w") as file:
yaml.dump(config_data, file)
LITELLM_CONFIG_PATH = f"{DATA_DIR}/litellm/config.yaml"
# if not os.path.exists(LITELLM_CONFIG_PATH):
# log.info("Config file doesn't exist. Creating...")
# create_config_file(LITELLM_CONFIG_PATH)
# log.info("Config file created successfully.")
####################################
# OLLAMA_BASE_URL
####################################
@ -1070,15 +1036,6 @@ RAG_RERANKING_MODEL_TRUST_REMOTE_CODE = (
os.environ.get("RAG_RERANKING_MODEL_TRUST_REMOTE_CODE", "").lower() == "true"
)
# device type embedding models - "cpu" (default), "cuda" (nvidia gpu required) or "mps" (apple silicon) - choosing this right can lead to better performance
USE_CUDA = os.environ.get("USE_CUDA_DOCKER", "false")
if USE_CUDA.lower() == "true":
DEVICE_TYPE = "cuda"
else:
DEVICE_TYPE = "cpu"
CHUNK_SIZE = PersistentConfig(
"CHUNK_SIZE", "rag.chunk_size", int(os.environ.get("CHUNK_SIZE", "1500"))
)

View File

@ -32,6 +32,15 @@ except ImportError:
print("dotenv not installed, skipping...")
# device type embedding models - "cpu" (default), "cuda" (nvidia gpu required) or "mps" (apple silicon) - choosing this right can lead to better performance
USE_CUDA = os.environ.get("USE_CUDA_DOCKER", "false")
if USE_CUDA.lower() == "true":
DEVICE_TYPE = "cuda"
else:
DEVICE_TYPE = "cpu"
####################################
# LOGGING
####################################