mirror of
https://github.com/open-webui/open-webui
synced 2024-11-16 05:24:02 +00:00
fix: if cuda is not available fallback to cpu
This commit is contained in:
parent
c1d3481c41
commit
a7ea07036e
@ -39,6 +39,18 @@ def serve(
|
||||
"/usr/local/lib/python3.11/site-packages/nvidia/cudnn/lib",
|
||||
]
|
||||
)
|
||||
try:
|
||||
import torch
|
||||
assert torch.cuda.is_available(), "CUDA not available"
|
||||
typer.echo("CUDA seems to be working")
|
||||
except Exception as e:
|
||||
typer.echo(
|
||||
"Error when testing CUDA but USE_CUDA_DOCKER is true. "
|
||||
"Resetting USE_CUDA_DOCKER to false and removing "
|
||||
f"LD_LIBRARY_PATH modifications: {e}"
|
||||
)
|
||||
os.environ["USE_CUDA_DOCKER"] = "false"
|
||||
os.environ["LD_LIBRARY_PATH"] = ":".join(LD_LIBRARY_PATH)
|
||||
import open_webui.main # we need set environment variables before importing main
|
||||
|
||||
uvicorn.run(open_webui.main.app, host=host, port=port, forwarded_allow_ips="*")
|
||||
|
@ -36,7 +36,18 @@ except ImportError:
|
||||
USE_CUDA = os.environ.get("USE_CUDA_DOCKER", "false")
|
||||
|
||||
if USE_CUDA.lower() == "true":
|
||||
try:
|
||||
import torch
|
||||
assert torch.cuda.is_available(), "CUDA not available"
|
||||
DEVICE_TYPE = "cuda"
|
||||
except Exception as e:
|
||||
cuda_error = (
|
||||
"Error when testing CUDA but USE_CUDA_DOCKER is true. "
|
||||
f"Resetting USE_CUDA_DOCKER to false: {e}"
|
||||
)
|
||||
os.environ["USE_CUDA_DOCKER"] = "false"
|
||||
USE_CUDA = "false"
|
||||
DEVICE_TYPE = "cpu"
|
||||
else:
|
||||
DEVICE_TYPE = "cpu"
|
||||
|
||||
@ -56,6 +67,9 @@ else:
|
||||
log = logging.getLogger(__name__)
|
||||
log.info(f"GLOBAL_LOG_LEVEL: {GLOBAL_LOG_LEVEL}")
|
||||
|
||||
if "cuda_error" in locals():
|
||||
log.exception(cuda_error)
|
||||
|
||||
log_sources = [
|
||||
"AUDIO",
|
||||
"COMFYUI",
|
||||
|
Loading…
Reference in New Issue
Block a user