mirror of
https://github.com/open-webui/open-webui
synced 2025-06-26 18:26:48 +00:00
feat: follow ups backend integration
This commit is contained in:
@@ -1419,12 +1419,13 @@ FOLLOW_UP_GENERATION_PROMPT_TEMPLATE = PersistentConfig(
|
||||
)
|
||||
|
||||
DEFAULT_FOLLOW_UP_GENERATION_PROMPT_TEMPLATE = """### Task:
|
||||
Suggest 3-5 relevant follow-up questions or discussion prompts based on the chat history to help continue or deepen the conversation.
|
||||
SSuggest 3-5 relevant follow-up questions or prompts that the **user** might naturally ask next in this conversation, based on the chat history, to help continue or deepen the discussion.
|
||||
### Guidelines:
|
||||
- Phrase all follow-up questions from the user’s perspective, addressed to the assistant or expert.
|
||||
- Make questions concise, clear, and directly related to the discussed topic(s).
|
||||
- Only generate follow-ups that make sense given the chat content and do not repeat what was already covered.
|
||||
- If the conversation is very short or not specific, suggest more general follow-ups.
|
||||
- Use the chat's primary language; default to English if multilingual.
|
||||
- Only suggest follow-ups that make sense given the chat content and do not repeat what was already covered.
|
||||
- If the conversation is very short or not specific, suggest more general (but relevant) follow-ups the user might ask.
|
||||
- Use the conversation's primary language; default to English if multilingual.
|
||||
- Response must be a JSON array of strings, no extra text or formatting.
|
||||
### Output:
|
||||
JSON format: { "follow_ups": ["Question 1?", "Question 2?", "Question 3?"] }
|
||||
|
||||
@@ -32,6 +32,7 @@ from open_webui.socket.main import (
|
||||
from open_webui.routers.tasks import (
|
||||
generate_queries,
|
||||
generate_title,
|
||||
generate_follow_ups,
|
||||
generate_image_prompt,
|
||||
generate_chat_tags,
|
||||
)
|
||||
@@ -1104,6 +1105,59 @@ async def process_chat_response(
|
||||
}
|
||||
)
|
||||
|
||||
if (
|
||||
TASKS.FOLLOW_UP_GENERATION in tasks
|
||||
and tasks[TASKS.FOLLOW_UP_GENERATION]
|
||||
):
|
||||
res = await generate_follow_ups(
|
||||
request,
|
||||
{
|
||||
"model": message["model"],
|
||||
"messages": messages,
|
||||
"message_id": metadata["message_id"],
|
||||
"chat_id": metadata["chat_id"],
|
||||
},
|
||||
user,
|
||||
)
|
||||
|
||||
if res and isinstance(res, dict):
|
||||
if len(res.get("choices", [])) == 1:
|
||||
follow_ups_string = (
|
||||
res.get("choices", [])[0]
|
||||
.get("message", {})
|
||||
.get("content", "")
|
||||
)
|
||||
else:
|
||||
follow_ups_string = ""
|
||||
|
||||
follow_ups_string = follow_ups_string[
|
||||
follow_ups_string.find("{") : follow_ups_string.rfind("}")
|
||||
+ 1
|
||||
]
|
||||
|
||||
try:
|
||||
follow_ups = json.loads(follow_ups_string).get(
|
||||
"follow_ups", []
|
||||
)
|
||||
Chats.upsert_message_to_chat_by_id_and_message_id(
|
||||
metadata["chat_id"],
|
||||
metadata["message_id"],
|
||||
{
|
||||
"followUps": follow_ups,
|
||||
},
|
||||
)
|
||||
|
||||
await event_emitter(
|
||||
{
|
||||
"type": "chat:message:follow_ups",
|
||||
"data": {
|
||||
"follow_ups": follow_ups,
|
||||
},
|
||||
}
|
||||
)
|
||||
except Exception as e:
|
||||
pass
|
||||
|
||||
if TASKS.TAGS_GENERATION in tasks and tasks[TASKS.TAGS_GENERATION]:
|
||||
res = await generate_chat_tags(
|
||||
request,
|
||||
|
||||
Reference in New Issue
Block a user