mirror of
https://github.com/open-webui/open-webui
synced 2025-06-04 03:37:35 +00:00
refac: rename WrappedConfig to PersistedConfig
This commit is contained in:
parent
a0dceb06a5
commit
5d64822c84
@ -201,7 +201,7 @@ def get_config_value(config_path: str):
|
|||||||
T = TypeVar("T")
|
T = TypeVar("T")
|
||||||
|
|
||||||
|
|
||||||
class WrappedConfig(Generic[T]):
|
class PersistedConfig(Generic[T]):
|
||||||
def __init__(self, env_name: str, config_path: str, env_value: T):
|
def __init__(self, env_name: str, config_path: str, env_value: T):
|
||||||
self.env_name = env_name
|
self.env_name = env_name
|
||||||
self.config_path = config_path
|
self.config_path = config_path
|
||||||
@ -219,13 +219,13 @@ class WrappedConfig(Generic[T]):
|
|||||||
@property
|
@property
|
||||||
def __dict__(self):
|
def __dict__(self):
|
||||||
raise TypeError(
|
raise TypeError(
|
||||||
"WrappedConfig object cannot be converted to dict, use config_get or .value instead."
|
"PersistedConfig object cannot be converted to dict, use config_get or .value instead."
|
||||||
)
|
)
|
||||||
|
|
||||||
def __getattribute__(self, item):
|
def __getattribute__(self, item):
|
||||||
if item == "__dict__":
|
if item == "__dict__":
|
||||||
raise TypeError(
|
raise TypeError(
|
||||||
"WrappedConfig object cannot be converted to dict, use config_get or .value instead."
|
"PersistedConfig object cannot be converted to dict, use config_get or .value instead."
|
||||||
)
|
)
|
||||||
return super().__getattribute__(item)
|
return super().__getattribute__(item)
|
||||||
|
|
||||||
@ -247,13 +247,13 @@ class WrappedConfig(Generic[T]):
|
|||||||
|
|
||||||
|
|
||||||
class AppConfig:
|
class AppConfig:
|
||||||
_state: dict[str, WrappedConfig]
|
_state: dict[str, PersistedConfig]
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
super().__setattr__("_state", {})
|
super().__setattr__("_state", {})
|
||||||
|
|
||||||
def __setattr__(self, key, value):
|
def __setattr__(self, key, value):
|
||||||
if isinstance(value, WrappedConfig):
|
if isinstance(value, PersistedConfig):
|
||||||
self._state[key] = value
|
self._state[key] = value
|
||||||
else:
|
else:
|
||||||
self._state[key].value = value
|
self._state[key].value = value
|
||||||
@ -271,7 +271,7 @@ WEBUI_AUTH = os.environ.get("WEBUI_AUTH", "True").lower() == "true"
|
|||||||
WEBUI_AUTH_TRUSTED_EMAIL_HEADER = os.environ.get(
|
WEBUI_AUTH_TRUSTED_EMAIL_HEADER = os.environ.get(
|
||||||
"WEBUI_AUTH_TRUSTED_EMAIL_HEADER", None
|
"WEBUI_AUTH_TRUSTED_EMAIL_HEADER", None
|
||||||
)
|
)
|
||||||
JWT_EXPIRES_IN = WrappedConfig(
|
JWT_EXPIRES_IN = PersistedConfig(
|
||||||
"JWT_EXPIRES_IN", "auth.jwt_expiry", os.environ.get("JWT_EXPIRES_IN", "-1")
|
"JWT_EXPIRES_IN", "auth.jwt_expiry", os.environ.get("JWT_EXPIRES_IN", "-1")
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -409,7 +409,7 @@ OLLAMA_BASE_URLS = os.environ.get("OLLAMA_BASE_URLS", "")
|
|||||||
OLLAMA_BASE_URLS = OLLAMA_BASE_URLS if OLLAMA_BASE_URLS != "" else OLLAMA_BASE_URL
|
OLLAMA_BASE_URLS = OLLAMA_BASE_URLS if OLLAMA_BASE_URLS != "" else OLLAMA_BASE_URL
|
||||||
|
|
||||||
OLLAMA_BASE_URLS = [url.strip() for url in OLLAMA_BASE_URLS.split(";")]
|
OLLAMA_BASE_URLS = [url.strip() for url in OLLAMA_BASE_URLS.split(";")]
|
||||||
OLLAMA_BASE_URLS = WrappedConfig(
|
OLLAMA_BASE_URLS = PersistedConfig(
|
||||||
"OLLAMA_BASE_URLS", "ollama.base_urls", OLLAMA_BASE_URLS
|
"OLLAMA_BASE_URLS", "ollama.base_urls", OLLAMA_BASE_URLS
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -428,7 +428,7 @@ OPENAI_API_KEYS = os.environ.get("OPENAI_API_KEYS", "")
|
|||||||
OPENAI_API_KEYS = OPENAI_API_KEYS if OPENAI_API_KEYS != "" else OPENAI_API_KEY
|
OPENAI_API_KEYS = OPENAI_API_KEYS if OPENAI_API_KEYS != "" else OPENAI_API_KEY
|
||||||
|
|
||||||
OPENAI_API_KEYS = [url.strip() for url in OPENAI_API_KEYS.split(";")]
|
OPENAI_API_KEYS = [url.strip() for url in OPENAI_API_KEYS.split(";")]
|
||||||
OPENAI_API_KEYS = WrappedConfig("OPENAI_API_KEYS", "openai.api_keys", OPENAI_API_KEYS)
|
OPENAI_API_KEYS = PersistedConfig("OPENAI_API_KEYS", "openai.api_keys", OPENAI_API_KEYS)
|
||||||
|
|
||||||
OPENAI_API_BASE_URLS = os.environ.get("OPENAI_API_BASE_URLS", "")
|
OPENAI_API_BASE_URLS = os.environ.get("OPENAI_API_BASE_URLS", "")
|
||||||
OPENAI_API_BASE_URLS = (
|
OPENAI_API_BASE_URLS = (
|
||||||
@ -439,7 +439,7 @@ OPENAI_API_BASE_URLS = [
|
|||||||
url.strip() if url != "" else "https://api.openai.com/v1"
|
url.strip() if url != "" else "https://api.openai.com/v1"
|
||||||
for url in OPENAI_API_BASE_URLS.split(";")
|
for url in OPENAI_API_BASE_URLS.split(";")
|
||||||
]
|
]
|
||||||
OPENAI_API_BASE_URLS = WrappedConfig(
|
OPENAI_API_BASE_URLS = PersistedConfig(
|
||||||
"OPENAI_API_BASE_URLS", "openai.api_base_urls", OPENAI_API_BASE_URLS
|
"OPENAI_API_BASE_URLS", "openai.api_base_urls", OPENAI_API_BASE_URLS
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -458,7 +458,7 @@ OPENAI_API_BASE_URL = "https://api.openai.com/v1"
|
|||||||
# WEBUI
|
# WEBUI
|
||||||
####################################
|
####################################
|
||||||
|
|
||||||
ENABLE_SIGNUP = WrappedConfig(
|
ENABLE_SIGNUP = PersistedConfig(
|
||||||
"ENABLE_SIGNUP",
|
"ENABLE_SIGNUP",
|
||||||
"ui.enable_signup",
|
"ui.enable_signup",
|
||||||
(
|
(
|
||||||
@ -467,11 +467,11 @@ ENABLE_SIGNUP = WrappedConfig(
|
|||||||
else os.environ.get("ENABLE_SIGNUP", "True").lower() == "true"
|
else os.environ.get("ENABLE_SIGNUP", "True").lower() == "true"
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
DEFAULT_MODELS = WrappedConfig(
|
DEFAULT_MODELS = PersistedConfig(
|
||||||
"DEFAULT_MODELS", "ui.default_models", os.environ.get("DEFAULT_MODELS", None)
|
"DEFAULT_MODELS", "ui.default_models", os.environ.get("DEFAULT_MODELS", None)
|
||||||
)
|
)
|
||||||
|
|
||||||
DEFAULT_PROMPT_SUGGESTIONS = WrappedConfig(
|
DEFAULT_PROMPT_SUGGESTIONS = PersistedConfig(
|
||||||
"DEFAULT_PROMPT_SUGGESTIONS",
|
"DEFAULT_PROMPT_SUGGESTIONS",
|
||||||
"ui.prompt_suggestions",
|
"ui.prompt_suggestions",
|
||||||
[
|
[
|
||||||
@ -505,7 +505,7 @@ DEFAULT_PROMPT_SUGGESTIONS = WrappedConfig(
|
|||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
DEFAULT_USER_ROLE = WrappedConfig(
|
DEFAULT_USER_ROLE = PersistedConfig(
|
||||||
"DEFAULT_USER_ROLE",
|
"DEFAULT_USER_ROLE",
|
||||||
"ui.default_user_role",
|
"ui.default_user_role",
|
||||||
os.getenv("DEFAULT_USER_ROLE", "pending"),
|
os.getenv("DEFAULT_USER_ROLE", "pending"),
|
||||||
@ -515,25 +515,25 @@ USER_PERMISSIONS_CHAT_DELETION = (
|
|||||||
os.environ.get("USER_PERMISSIONS_CHAT_DELETION", "True").lower() == "true"
|
os.environ.get("USER_PERMISSIONS_CHAT_DELETION", "True").lower() == "true"
|
||||||
)
|
)
|
||||||
|
|
||||||
USER_PERMISSIONS = WrappedConfig(
|
USER_PERMISSIONS = PersistedConfig(
|
||||||
"USER_PERMISSIONS",
|
"USER_PERMISSIONS",
|
||||||
"ui.user_permissions",
|
"ui.user_permissions",
|
||||||
{"chat": {"deletion": USER_PERMISSIONS_CHAT_DELETION}},
|
{"chat": {"deletion": USER_PERMISSIONS_CHAT_DELETION}},
|
||||||
)
|
)
|
||||||
|
|
||||||
ENABLE_MODEL_FILTER = WrappedConfig(
|
ENABLE_MODEL_FILTER = PersistedConfig(
|
||||||
"ENABLE_MODEL_FILTER",
|
"ENABLE_MODEL_FILTER",
|
||||||
"model_filter.enable",
|
"model_filter.enable",
|
||||||
os.environ.get("ENABLE_MODEL_FILTER", "False").lower() == "true",
|
os.environ.get("ENABLE_MODEL_FILTER", "False").lower() == "true",
|
||||||
)
|
)
|
||||||
MODEL_FILTER_LIST = os.environ.get("MODEL_FILTER_LIST", "")
|
MODEL_FILTER_LIST = os.environ.get("MODEL_FILTER_LIST", "")
|
||||||
MODEL_FILTER_LIST = WrappedConfig(
|
MODEL_FILTER_LIST = PersistedConfig(
|
||||||
"MODEL_FILTER_LIST",
|
"MODEL_FILTER_LIST",
|
||||||
"model_filter.list",
|
"model_filter.list",
|
||||||
[model.strip() for model in MODEL_FILTER_LIST.split(";")],
|
[model.strip() for model in MODEL_FILTER_LIST.split(";")],
|
||||||
)
|
)
|
||||||
|
|
||||||
WEBHOOK_URL = WrappedConfig(
|
WEBHOOK_URL = PersistedConfig(
|
||||||
"WEBHOOK_URL", "webhook_url", os.environ.get("WEBHOOK_URL", "")
|
"WEBHOOK_URL", "webhook_url", os.environ.get("WEBHOOK_URL", "")
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -573,40 +573,40 @@ else:
|
|||||||
CHROMA_HTTP_SSL = os.environ.get("CHROMA_HTTP_SSL", "false").lower() == "true"
|
CHROMA_HTTP_SSL = os.environ.get("CHROMA_HTTP_SSL", "false").lower() == "true"
|
||||||
# this uses the model defined in the Dockerfile ENV variable. If you dont use docker or docker based deployments such as k8s, the default embedding model will be used (sentence-transformers/all-MiniLM-L6-v2)
|
# this uses the model defined in the Dockerfile ENV variable. If you dont use docker or docker based deployments such as k8s, the default embedding model will be used (sentence-transformers/all-MiniLM-L6-v2)
|
||||||
|
|
||||||
RAG_TOP_K = WrappedConfig(
|
RAG_TOP_K = PersistedConfig(
|
||||||
"RAG_TOP_K", "rag.top_k", int(os.environ.get("RAG_TOP_K", "5"))
|
"RAG_TOP_K", "rag.top_k", int(os.environ.get("RAG_TOP_K", "5"))
|
||||||
)
|
)
|
||||||
RAG_RELEVANCE_THRESHOLD = WrappedConfig(
|
RAG_RELEVANCE_THRESHOLD = PersistedConfig(
|
||||||
"RAG_RELEVANCE_THRESHOLD",
|
"RAG_RELEVANCE_THRESHOLD",
|
||||||
"rag.relevance_threshold",
|
"rag.relevance_threshold",
|
||||||
float(os.environ.get("RAG_RELEVANCE_THRESHOLD", "0.0")),
|
float(os.environ.get("RAG_RELEVANCE_THRESHOLD", "0.0")),
|
||||||
)
|
)
|
||||||
|
|
||||||
ENABLE_RAG_HYBRID_SEARCH = WrappedConfig(
|
ENABLE_RAG_HYBRID_SEARCH = PersistedConfig(
|
||||||
"ENABLE_RAG_HYBRID_SEARCH",
|
"ENABLE_RAG_HYBRID_SEARCH",
|
||||||
"rag.enable_hybrid_search",
|
"rag.enable_hybrid_search",
|
||||||
os.environ.get("ENABLE_RAG_HYBRID_SEARCH", "").lower() == "true",
|
os.environ.get("ENABLE_RAG_HYBRID_SEARCH", "").lower() == "true",
|
||||||
)
|
)
|
||||||
|
|
||||||
ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION = WrappedConfig(
|
ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION = PersistedConfig(
|
||||||
"ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION",
|
"ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION",
|
||||||
"rag.enable_web_loader_ssl_verification",
|
"rag.enable_web_loader_ssl_verification",
|
||||||
os.environ.get("ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION", "True").lower() == "true",
|
os.environ.get("ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION", "True").lower() == "true",
|
||||||
)
|
)
|
||||||
|
|
||||||
RAG_EMBEDDING_ENGINE = WrappedConfig(
|
RAG_EMBEDDING_ENGINE = PersistedConfig(
|
||||||
"RAG_EMBEDDING_ENGINE",
|
"RAG_EMBEDDING_ENGINE",
|
||||||
"rag.embedding_engine",
|
"rag.embedding_engine",
|
||||||
os.environ.get("RAG_EMBEDDING_ENGINE", ""),
|
os.environ.get("RAG_EMBEDDING_ENGINE", ""),
|
||||||
)
|
)
|
||||||
|
|
||||||
PDF_EXTRACT_IMAGES = WrappedConfig(
|
PDF_EXTRACT_IMAGES = PersistedConfig(
|
||||||
"PDF_EXTRACT_IMAGES",
|
"PDF_EXTRACT_IMAGES",
|
||||||
"rag.pdf_extract_images",
|
"rag.pdf_extract_images",
|
||||||
os.environ.get("PDF_EXTRACT_IMAGES", "False").lower() == "true",
|
os.environ.get("PDF_EXTRACT_IMAGES", "False").lower() == "true",
|
||||||
)
|
)
|
||||||
|
|
||||||
RAG_EMBEDDING_MODEL = WrappedConfig(
|
RAG_EMBEDDING_MODEL = PersistedConfig(
|
||||||
"RAG_EMBEDDING_MODEL",
|
"RAG_EMBEDDING_MODEL",
|
||||||
"rag.embedding_model",
|
"rag.embedding_model",
|
||||||
os.environ.get("RAG_EMBEDDING_MODEL", "sentence-transformers/all-MiniLM-L6-v2"),
|
os.environ.get("RAG_EMBEDDING_MODEL", "sentence-transformers/all-MiniLM-L6-v2"),
|
||||||
@ -621,7 +621,7 @@ RAG_EMBEDDING_MODEL_TRUST_REMOTE_CODE = (
|
|||||||
os.environ.get("RAG_EMBEDDING_MODEL_TRUST_REMOTE_CODE", "").lower() == "true"
|
os.environ.get("RAG_EMBEDDING_MODEL_TRUST_REMOTE_CODE", "").lower() == "true"
|
||||||
)
|
)
|
||||||
|
|
||||||
RAG_RERANKING_MODEL = WrappedConfig(
|
RAG_RERANKING_MODEL = PersistedConfig(
|
||||||
"RAG_RERANKING_MODEL",
|
"RAG_RERANKING_MODEL",
|
||||||
"rag.reranking_model",
|
"rag.reranking_model",
|
||||||
os.environ.get("RAG_RERANKING_MODEL", ""),
|
os.environ.get("RAG_RERANKING_MODEL", ""),
|
||||||
@ -665,10 +665,10 @@ if USE_CUDA.lower() == "true":
|
|||||||
else:
|
else:
|
||||||
DEVICE_TYPE = "cpu"
|
DEVICE_TYPE = "cpu"
|
||||||
|
|
||||||
CHUNK_SIZE = WrappedConfig(
|
CHUNK_SIZE = PersistedConfig(
|
||||||
"CHUNK_SIZE", "rag.chunk_size", int(os.environ.get("CHUNK_SIZE", "1500"))
|
"CHUNK_SIZE", "rag.chunk_size", int(os.environ.get("CHUNK_SIZE", "1500"))
|
||||||
)
|
)
|
||||||
CHUNK_OVERLAP = WrappedConfig(
|
CHUNK_OVERLAP = PersistedConfig(
|
||||||
"CHUNK_OVERLAP",
|
"CHUNK_OVERLAP",
|
||||||
"rag.chunk_overlap",
|
"rag.chunk_overlap",
|
||||||
int(os.environ.get("CHUNK_OVERLAP", "100")),
|
int(os.environ.get("CHUNK_OVERLAP", "100")),
|
||||||
@ -688,18 +688,18 @@ And answer according to the language of the user's question.
|
|||||||
Given the context information, answer the query.
|
Given the context information, answer the query.
|
||||||
Query: [query]"""
|
Query: [query]"""
|
||||||
|
|
||||||
RAG_TEMPLATE = WrappedConfig(
|
RAG_TEMPLATE = PersistedConfig(
|
||||||
"RAG_TEMPLATE",
|
"RAG_TEMPLATE",
|
||||||
"rag.template",
|
"rag.template",
|
||||||
os.environ.get("RAG_TEMPLATE", DEFAULT_RAG_TEMPLATE),
|
os.environ.get("RAG_TEMPLATE", DEFAULT_RAG_TEMPLATE),
|
||||||
)
|
)
|
||||||
|
|
||||||
RAG_OPENAI_API_BASE_URL = WrappedConfig(
|
RAG_OPENAI_API_BASE_URL = PersistedConfig(
|
||||||
"RAG_OPENAI_API_BASE_URL",
|
"RAG_OPENAI_API_BASE_URL",
|
||||||
"rag.openai_api_base_url",
|
"rag.openai_api_base_url",
|
||||||
os.getenv("RAG_OPENAI_API_BASE_URL", OPENAI_API_BASE_URL),
|
os.getenv("RAG_OPENAI_API_BASE_URL", OPENAI_API_BASE_URL),
|
||||||
)
|
)
|
||||||
RAG_OPENAI_API_KEY = WrappedConfig(
|
RAG_OPENAI_API_KEY = PersistedConfig(
|
||||||
"RAG_OPENAI_API_KEY",
|
"RAG_OPENAI_API_KEY",
|
||||||
"rag.openai_api_key",
|
"rag.openai_api_key",
|
||||||
os.getenv("RAG_OPENAI_API_KEY", OPENAI_API_KEY),
|
os.getenv("RAG_OPENAI_API_KEY", OPENAI_API_KEY),
|
||||||
@ -709,7 +709,7 @@ ENABLE_RAG_LOCAL_WEB_FETCH = (
|
|||||||
os.getenv("ENABLE_RAG_LOCAL_WEB_FETCH", "False").lower() == "true"
|
os.getenv("ENABLE_RAG_LOCAL_WEB_FETCH", "False").lower() == "true"
|
||||||
)
|
)
|
||||||
|
|
||||||
YOUTUBE_LOADER_LANGUAGE = WrappedConfig(
|
YOUTUBE_LOADER_LANGUAGE = PersistedConfig(
|
||||||
"YOUTUBE_LOADER_LANGUAGE",
|
"YOUTUBE_LOADER_LANGUAGE",
|
||||||
"rag.youtube_loader_language",
|
"rag.youtube_loader_language",
|
||||||
os.getenv("YOUTUBE_LOADER_LANGUAGE", "en").split(","),
|
os.getenv("YOUTUBE_LOADER_LANGUAGE", "en").split(","),
|
||||||
@ -730,49 +730,49 @@ WHISPER_MODEL_AUTO_UPDATE = (
|
|||||||
# Images
|
# Images
|
||||||
####################################
|
####################################
|
||||||
|
|
||||||
IMAGE_GENERATION_ENGINE = WrappedConfig(
|
IMAGE_GENERATION_ENGINE = PersistedConfig(
|
||||||
"IMAGE_GENERATION_ENGINE",
|
"IMAGE_GENERATION_ENGINE",
|
||||||
"image_generation.engine",
|
"image_generation.engine",
|
||||||
os.getenv("IMAGE_GENERATION_ENGINE", ""),
|
os.getenv("IMAGE_GENERATION_ENGINE", ""),
|
||||||
)
|
)
|
||||||
|
|
||||||
ENABLE_IMAGE_GENERATION = WrappedConfig(
|
ENABLE_IMAGE_GENERATION = PersistedConfig(
|
||||||
"ENABLE_IMAGE_GENERATION",
|
"ENABLE_IMAGE_GENERATION",
|
||||||
"image_generation.enable",
|
"image_generation.enable",
|
||||||
os.environ.get("ENABLE_IMAGE_GENERATION", "").lower() == "true",
|
os.environ.get("ENABLE_IMAGE_GENERATION", "").lower() == "true",
|
||||||
)
|
)
|
||||||
AUTOMATIC1111_BASE_URL = WrappedConfig(
|
AUTOMATIC1111_BASE_URL = PersistedConfig(
|
||||||
"AUTOMATIC1111_BASE_URL",
|
"AUTOMATIC1111_BASE_URL",
|
||||||
"image_generation.automatic1111.base_url",
|
"image_generation.automatic1111.base_url",
|
||||||
os.getenv("AUTOMATIC1111_BASE_URL", ""),
|
os.getenv("AUTOMATIC1111_BASE_URL", ""),
|
||||||
)
|
)
|
||||||
|
|
||||||
COMFYUI_BASE_URL = WrappedConfig(
|
COMFYUI_BASE_URL = PersistedConfig(
|
||||||
"COMFYUI_BASE_URL",
|
"COMFYUI_BASE_URL",
|
||||||
"image_generation.comfyui.base_url",
|
"image_generation.comfyui.base_url",
|
||||||
os.getenv("COMFYUI_BASE_URL", ""),
|
os.getenv("COMFYUI_BASE_URL", ""),
|
||||||
)
|
)
|
||||||
|
|
||||||
IMAGES_OPENAI_API_BASE_URL = WrappedConfig(
|
IMAGES_OPENAI_API_BASE_URL = PersistedConfig(
|
||||||
"IMAGES_OPENAI_API_BASE_URL",
|
"IMAGES_OPENAI_API_BASE_URL",
|
||||||
"image_generation.openai.api_base_url",
|
"image_generation.openai.api_base_url",
|
||||||
os.getenv("IMAGES_OPENAI_API_BASE_URL", OPENAI_API_BASE_URL),
|
os.getenv("IMAGES_OPENAI_API_BASE_URL", OPENAI_API_BASE_URL),
|
||||||
)
|
)
|
||||||
IMAGES_OPENAI_API_KEY = WrappedConfig(
|
IMAGES_OPENAI_API_KEY = PersistedConfig(
|
||||||
"IMAGES_OPENAI_API_KEY",
|
"IMAGES_OPENAI_API_KEY",
|
||||||
"image_generation.openai.api_key",
|
"image_generation.openai.api_key",
|
||||||
os.getenv("IMAGES_OPENAI_API_KEY", OPENAI_API_KEY),
|
os.getenv("IMAGES_OPENAI_API_KEY", OPENAI_API_KEY),
|
||||||
)
|
)
|
||||||
|
|
||||||
IMAGE_SIZE = WrappedConfig(
|
IMAGE_SIZE = PersistedConfig(
|
||||||
"IMAGE_SIZE", "image_generation.size", os.getenv("IMAGE_SIZE", "512x512")
|
"IMAGE_SIZE", "image_generation.size", os.getenv("IMAGE_SIZE", "512x512")
|
||||||
)
|
)
|
||||||
|
|
||||||
IMAGE_STEPS = WrappedConfig(
|
IMAGE_STEPS = PersistedConfig(
|
||||||
"IMAGE_STEPS", "image_generation.steps", int(os.getenv("IMAGE_STEPS", 50))
|
"IMAGE_STEPS", "image_generation.steps", int(os.getenv("IMAGE_STEPS", 50))
|
||||||
)
|
)
|
||||||
|
|
||||||
IMAGE_GENERATION_MODEL = WrappedConfig(
|
IMAGE_GENERATION_MODEL = PersistedConfig(
|
||||||
"IMAGE_GENERATION_MODEL",
|
"IMAGE_GENERATION_MODEL",
|
||||||
"image_generation.model",
|
"image_generation.model",
|
||||||
os.getenv("IMAGE_GENERATION_MODEL", ""),
|
os.getenv("IMAGE_GENERATION_MODEL", ""),
|
||||||
@ -782,22 +782,22 @@ IMAGE_GENERATION_MODEL = WrappedConfig(
|
|||||||
# Audio
|
# Audio
|
||||||
####################################
|
####################################
|
||||||
|
|
||||||
AUDIO_OPENAI_API_BASE_URL = WrappedConfig(
|
AUDIO_OPENAI_API_BASE_URL = PersistedConfig(
|
||||||
"AUDIO_OPENAI_API_BASE_URL",
|
"AUDIO_OPENAI_API_BASE_URL",
|
||||||
"audio.openai.api_base_url",
|
"audio.openai.api_base_url",
|
||||||
os.getenv("AUDIO_OPENAI_API_BASE_URL", OPENAI_API_BASE_URL),
|
os.getenv("AUDIO_OPENAI_API_BASE_URL", OPENAI_API_BASE_URL),
|
||||||
)
|
)
|
||||||
AUDIO_OPENAI_API_KEY = WrappedConfig(
|
AUDIO_OPENAI_API_KEY = PersistedConfig(
|
||||||
"AUDIO_OPENAI_API_KEY",
|
"AUDIO_OPENAI_API_KEY",
|
||||||
"audio.openai.api_key",
|
"audio.openai.api_key",
|
||||||
os.getenv("AUDIO_OPENAI_API_KEY", OPENAI_API_KEY),
|
os.getenv("AUDIO_OPENAI_API_KEY", OPENAI_API_KEY),
|
||||||
)
|
)
|
||||||
AUDIO_OPENAI_API_MODEL = WrappedConfig(
|
AUDIO_OPENAI_API_MODEL = PersistedConfig(
|
||||||
"AUDIO_OPENAI_API_MODEL",
|
"AUDIO_OPENAI_API_MODEL",
|
||||||
"audio.openai.api_model",
|
"audio.openai.api_model",
|
||||||
os.getenv("AUDIO_OPENAI_API_MODEL", "tts-1"),
|
os.getenv("AUDIO_OPENAI_API_MODEL", "tts-1"),
|
||||||
)
|
)
|
||||||
AUDIO_OPENAI_API_VOICE = WrappedConfig(
|
AUDIO_OPENAI_API_VOICE = PersistedConfig(
|
||||||
"AUDIO_OPENAI_API_VOICE",
|
"AUDIO_OPENAI_API_VOICE",
|
||||||
"audio.openai.api_voice",
|
"audio.openai.api_voice",
|
||||||
os.getenv("AUDIO_OPENAI_API_VOICE", "alloy"),
|
os.getenv("AUDIO_OPENAI_API_VOICE", "alloy"),
|
||||||
|
Loading…
Reference in New Issue
Block a user