2024-01-07 06:59:22 +00:00
import os
import chromadb
2024-01-07 09:40:36 +00:00
from chromadb import Settings
2023-11-19 00:47:12 +00:00
from secrets import token_bytes
from base64 import b64encode
2024-01-07 06:59:22 +00:00
from constants import ERROR_MESSAGES
2024-01-07 09:40:36 +00:00
from pathlib import Path
2024-01-19 20:12:14 +00:00
try :
from dotenv import load_dotenv , find_dotenv
2024-01-22 09:47:07 +00:00
2024-01-19 20:12:14 +00:00
load_dotenv ( find_dotenv ( " ../.env " ) )
except ImportError :
print ( " dotenv not installed, skipping... " )
2023-11-15 00:28:51 +00:00
2024-01-07 09:40:36 +00:00
####################################
2024-01-25 08:40:19 +00:00
# ENV (dev,test,prod)
2024-01-07 09:40:36 +00:00
####################################
2024-01-25 08:40:19 +00:00
ENV = os . environ . get ( " ENV " , " dev " )
2024-01-07 09:40:36 +00:00
2024-01-25 08:40:19 +00:00
####################################
# DATA/FRONTEND BUILD DIR
####################################
2024-01-23 15:59:52 +00:00
2024-01-25 08:40:19 +00:00
DATA_DIR = str ( Path ( os . getenv ( " DATA_DIR " , " ./data " ) ) . resolve ( ) )
2024-01-23 15:59:52 +00:00
FRONTEND_BUILD_DIR = str ( Path ( os . getenv ( " FRONTEND_BUILD_DIR " , " ../build " ) ) )
2024-01-07 09:40:36 +00:00
2023-11-19 00:47:12 +00:00
####################################
2024-01-25 08:40:19 +00:00
# File Upload DIR
2023-11-19 00:47:12 +00:00
####################################
2024-01-25 08:40:19 +00:00
UPLOAD_DIR = f " { DATA_DIR } /uploads "
Path ( UPLOAD_DIR ) . mkdir ( parents = True , exist_ok = True )
2023-11-15 00:28:51 +00:00
2024-02-06 06:51:08 +00:00
####################################
# Cache DIR
####################################
CACHE_DIR = f " { DATA_DIR } /cache "
Path ( CACHE_DIR ) . mkdir ( parents = True , exist_ok = True )
2024-02-18 05:06:08 +00:00
####################################
# Docs DIR
####################################
DOCS_DIR = f " { DATA_DIR } /docs "
Path ( DOCS_DIR ) . mkdir ( parents = True , exist_ok = True )
2024-02-06 06:51:08 +00:00
2023-11-19 00:47:12 +00:00
####################################
# OLLAMA_API_BASE_URL
####################################
2024-01-07 06:59:22 +00:00
OLLAMA_API_BASE_URL = os . environ . get (
" OLLAMA_API_BASE_URL " , " http://localhost:11434/api "
)
2023-11-15 00:28:51 +00:00
if ENV == " prod " :
if OLLAMA_API_BASE_URL == " /ollama/api " :
OLLAMA_API_BASE_URL = " http://host.docker.internal:11434/api "
2023-11-19 00:47:12 +00:00
2024-01-05 00:49:34 +00:00
####################################
# OPENAI_API
####################################
OPENAI_API_KEY = os . environ . get ( " OPENAI_API_KEY " , " " )
2024-01-05 02:38:03 +00:00
OPENAI_API_BASE_URL = os . environ . get ( " OPENAI_API_BASE_URL " , " " )
if OPENAI_API_BASE_URL == " " :
OPENAI_API_BASE_URL = " https://api.openai.com/v1 "
2024-01-05 00:49:34 +00:00
2024-01-23 05:07:40 +00:00
####################################
# WEBUI
####################################
2024-02-06 18:58:07 +00:00
ENABLE_SIGNUP = os . environ . get ( " ENABLE_SIGNUP " , True )
2024-01-23 05:07:40 +00:00
DEFAULT_MODELS = os . environ . get ( " DEFAULT_MODELS " , None )
DEFAULT_PROMPT_SUGGESTIONS = os . environ . get (
" DEFAULT_PROMPT_SUGGESTIONS " ,
[
{
" title " : [ " Help me study " , " vocabulary for a college entrance exam " ] ,
" content " : " Help me study vocabulary: write a sentence for me to fill in the blank, and I ' ll try to pick the correct option. " ,
} ,
{
" title " : [ " Give me ideas " , " for what to do with my kids ' art " ] ,
" content " : " What are 5 creative things I could do with my kids ' art? I don ' t want to throw them away, but it ' s also so much clutter. " ,
} ,
{
" title " : [ " Tell me a fun fact " , " about the Roman Empire " ] ,
" content " : " Tell me a random fun fact about the Roman Empire " ,
} ,
{
" title " : [ " Show me a code snippet " , " of a website ' s sticky header " ] ,
" content " : " Show me a code snippet of a website ' s sticky header in CSS and JavaScript. " ,
} ,
] ,
)
2024-02-14 09:17:43 +00:00
DEFAULT_USER_ROLE = " pending "
USER_PERMISSIONS = { " chat " : { " deletion " : True } }
2024-01-23 05:07:40 +00:00
2023-11-19 00:47:12 +00:00
####################################
2023-11-19 08:41:29 +00:00
# WEBUI_VERSION
2023-11-19 00:47:12 +00:00
####################################
2024-02-14 09:19:39 +00:00
WEBUI_VERSION = os . environ . get ( " WEBUI_VERSION " , " v1.0.0-alpha.100 " )
2023-11-19 00:47:12 +00:00
####################################
2023-12-26 06:14:06 +00:00
# WEBUI_AUTH (Required for security)
2023-11-19 00:47:12 +00:00
####################################
2023-12-26 06:14:06 +00:00
WEBUI_AUTH = True
2023-11-19 00:47:12 +00:00
2023-11-21 00:22:43 +00:00
####################################
2024-02-01 19:40:59 +00:00
# WEBUI_SECRET_KEY
2023-11-21 00:22:43 +00:00
####################################
2024-02-01 19:40:59 +00:00
WEBUI_SECRET_KEY = os . environ . get (
" WEBUI_SECRET_KEY " ,
2024-02-04 01:42:08 +00:00
os . environ . get (
" WEBUI_JWT_SECRET_KEY " , " t0p-s3cr3t "
) , # DEPRECATED: remove at next major version
2024-02-01 19:40:59 +00:00
)
2023-11-19 00:47:12 +00:00
2024-02-01 19:40:59 +00:00
if WEBUI_AUTH and WEBUI_SECRET_KEY == " " :
2023-11-21 00:22:43 +00:00
raise ValueError ( ERROR_MESSAGES . ENV_VAR_NOT_FOUND )
2024-01-07 06:59:22 +00:00
####################################
# RAG
####################################
2024-01-22 09:47:07 +00:00
CHROMA_DATA_PATH = f " { DATA_DIR } /vector_db "
2024-02-17 18:38:29 +00:00
# this uses the model defined in the Dockerfile ENV variable. If you dont use docker or docker based deployments such as k8s, the default embedding model will be used (all-MiniLM-L6-v2)
2024-02-19 18:56:50 +00:00
RAG_EMBEDDING_MODEL = os . environ . get ( " RAG_EMBEDDING_MODEL " , " all-MiniLM-L6-v2 " )
2024-02-19 06:51:17 +00:00
# device type ebbeding models - "cpu" (default), "cuda" (nvidia gpu required) or "mps" (apple silicon) - choosing this right can lead to better performance
2024-02-19 18:56:50 +00:00
RAG_EMBEDDING_MODEL_DEVICE_TYPE = os . environ . get (
" RAG_EMBEDDING_MODEL_DEVICE_TYPE " , " cpu "
)
2024-01-07 09:40:36 +00:00
CHROMA_CLIENT = chromadb . PersistentClient (
2024-02-04 01:42:08 +00:00
path = CHROMA_DATA_PATH ,
settings = Settings ( allow_reset = True , anonymized_telemetry = False ) ,
2024-01-07 09:40:36 +00:00
)
2024-01-07 06:59:22 +00:00
CHUNK_SIZE = 1500
CHUNK_OVERLAP = 100
2024-02-11 08:17:50 +00:00
2024-02-18 06:41:03 +00:00
RAG_TEMPLATE = """ Use the following context as your learned knowledge, inside <context></context> XML tags.
< context >
[ context ]
< / context >
When answer to user :
- If you don ' t know, just say that you don ' t know .
- If you don ' t know when you are not sure, ask for clarification.
Avoid mentioning that you obtained the information from the context .
And answer according to the language of the user ' s question.
Given the context information , answer the query .
Query : [ query ] """
2024-02-11 08:17:50 +00:00
####################################
# Transcribe
####################################
2024-02-15 07:32:54 +00:00
WHISPER_MODEL = os . getenv ( " WHISPER_MODEL " , " base " )
WHISPER_MODEL_DIR = os . getenv ( " WHISPER_MODEL_DIR " , f " { CACHE_DIR } /whisper/models " )