Feature: Adjusted process_chat_payload to handle individual rag config (send individual rag template)

This commit is contained in:
Maytown 2025-05-02 09:59:42 +02:00
parent e3a93b24a0
commit 4f93577d3d

View File

@ -912,20 +912,24 @@ async def process_chat_payload(request, form_data, user, metadata, model):
f"With a 0 relevancy threshold for RAG, the context cannot be empty" f"With a 0 relevancy threshold for RAG, the context cannot be empty"
) )
# Adjusted RAG template step to use knowledge-base-specific configuration
rag_template_config = request.app.state.config.RAG_TEMPLATE
if model_knowledge and not model_knowledge.data.get("DEFAULT_RAG_SETTINGS", True):
rag_template_config = model_knowledge.data.get("rag_config", {}).get(
"template", request.app.state.config.RAG_TEMPLATE
)
# Workaround for Ollama 2.0+ system prompt issue # Workaround for Ollama 2.0+ system prompt issue
# TODO: replace with add_or_update_system_message # TODO: replace with add_or_update_system_message
if model.get("owned_by") == "ollama": if model.get("owned_by") == "ollama":
form_data["messages"] = prepend_to_first_user_message_content( form_data["messages"] = prepend_to_first_user_message_content(
rag_template( rag_template(rag_template_config, context_string, prompt),
request.app.state.config.RAG_TEMPLATE, context_string, prompt
),
form_data["messages"], form_data["messages"],
) )
else: else:
form_data["messages"] = add_or_update_system_message( form_data["messages"] = add_or_update_system_message(
rag_template( rag_template(rag_template_config, context_string, prompt),
request.app.state.config.RAG_TEMPLATE, context_string, prompt
),
form_data["messages"], form_data["messages"],
) )