mirror of
https://github.com/open-webui/open-webui
synced 2024-11-06 08:56:39 +00:00
Merge pull request #6380 from open-webui/dev
fix: arena model exclude filter
This commit is contained in:
commit
9b12b75df6
@ -761,6 +761,7 @@ async def generate_chat_completion(
|
||||
form_data: GenerateChatCompletionForm,
|
||||
url_idx: Optional[int] = None,
|
||||
user=Depends(get_verified_user),
|
||||
bypass_filter: Optional[bool] = False,
|
||||
):
|
||||
payload = {**form_data.model_dump(exclude_none=True)}
|
||||
log.debug(f"generate_chat_completion() - 1.payload = {payload}")
|
||||
@ -769,7 +770,7 @@ async def generate_chat_completion(
|
||||
|
||||
model_id = form_data.model
|
||||
|
||||
if app.state.config.ENABLE_MODEL_FILTER:
|
||||
if not bypass_filter and app.state.config.ENABLE_MODEL_FILTER:
|
||||
if user.role == "user" and model_id not in app.state.config.MODEL_FILTER_LIST:
|
||||
raise HTTPException(
|
||||
status_code=403,
|
||||
|
@ -1154,7 +1154,9 @@ async def generate_chat_completions(
|
||||
# Using /ollama/api/chat endpoint
|
||||
form_data = convert_payload_openai_to_ollama(form_data)
|
||||
form_data = GenerateChatCompletionForm(**form_data)
|
||||
response = await generate_ollama_chat_completion(form_data=form_data, user=user)
|
||||
response = await generate_ollama_chat_completion(
|
||||
form_data=form_data, user=user, bypass_filter=True
|
||||
)
|
||||
if form_data.stream:
|
||||
response.headers["content-type"] = "text/event-stream"
|
||||
return StreamingResponse(
|
||||
|
Loading…
Reference in New Issue
Block a user