Show citations from Perplexity models in UI

This commit is contained in:
Phil Szalay 2025-03-21 16:50:32 +01:00
parent a5edf76a2c
commit 9b65e0f0af
3 changed files with 72 additions and 10 deletions

View File

@ -573,9 +573,10 @@ async def generate_chat_completion(
# Initialize the credit cost variable
model_message_credit_cost = 0
model_id = model_info.base_model_id if model_info.base_model_id else model_info.id
if has_chat_id:
# Check for base_model_id first in case of user defined custom model
model_id = model_info.base_model_id if model_info.base_model_id else model_info.id
model_message_credit_cost = ModelMessageCreditCosts.get_cost_by_model(model_id)
# Get current credit balance
@ -616,9 +617,6 @@ async def generate_chat_completion(
# Check model info and override the payload
if model_info:
# Initialize model_id here to ensure it's always defined
model_id = model_info.id
if model_info.base_model_id:
payload["model"] = model_info.base_model_id
model_id = model_info.base_model_id

View File

@ -1364,6 +1364,7 @@ async def process_chat_response(
"content": content,
}
]
sources = None # Store sources from the LLMs ("citations") at this scope
# We might want to disable this by default
DETECT_REASONING = True
@ -1423,6 +1424,18 @@ async def process_chat_response(
try:
data = json.loads(data)
if "citations" in data:
nonlocal sources
sources = list(map(
lambda citationUrl: {
"source": {"name": citationUrl},
"document": [citationUrl],
"metadata": [{"source": citationUrl}],
"distances": [0],
},
data["citations"]
))
if "selected_model_id" in data:
model_id = data["selected_model_id"]
Chats.upsert_message_to_chat_by_id_and_message_id(
@ -1807,12 +1820,23 @@ async def process_chat_response(
if not ENABLE_REALTIME_CHAT_SAVE:
# Save message in the database
message = {
"content": serialize_content_blocks(content_blocks),
}
if sources: # Use the stored sources
message["sources"] = sources
await event_emitter({
"type": "chat:completion",
"data": {
"sources": sources,
},
})
Chats.upsert_message_to_chat_by_id_and_message_id(
metadata["chat_id"],
metadata["message_id"],
{
"content": serialize_content_blocks(content_blocks),
},
message
)
# Send a webhook notification if the user is not active
@ -1844,12 +1868,23 @@ async def process_chat_response(
if not ENABLE_REALTIME_CHAT_SAVE:
# Save message in the database
message = {
"content": serialize_content_blocks(content_blocks),
}
if sources: # Use the stored sources
message["sources"] = sources
await event_emitter({
"type": "chat:completion",
"data": {
"sources": sources,
},
})
Chats.upsert_message_to_chat_by_id_and_message_id(
metadata["chat_id"],
metadata["message_id"],
{
"content": serialize_content_blocks(content_blocks),
},
message
)
if response.background is not None:

View File

@ -103,6 +103,15 @@
id={`source-${citation.source.name}`}
class="no-toggle outline-none flex dark:text-gray-300 p-1 bg-white dark:bg-gray-900 rounded-xl max-w-96"
on:click={() => {
// In this case the citation comes from the LLM response
if (
citation.document &&
citation.document[0] &&
citation.source &&
citation.document[0] === citation.source.name
) {
return;
}
showCitationModal = true;
selectedCitation = citation;
}}
@ -133,6 +142,16 @@
<button
class="no-toggle outline-none flex dark:text-gray-300 p-1 bg-gray-50 hover:bg-gray-100 dark:bg-gray-900 dark:hover:bg-gray-850 transition rounded-xl max-w-96"
on:click={() => {
// In this case the citation comes from the LLM response
if (
citation.document &&
citation.document[0] &&
citation.source &&
citation.document[0] === citation.source.name
) {
return;
}
showCitationModal = true;
selectedCitation = citation;
}}
@ -172,6 +191,16 @@
<button
class="no-toggle outline-none flex dark:text-gray-300 p-1 bg-gray-50 hover:bg-gray-100 dark:bg-gray-900 dark:hover:bg-gray-850 transition rounded-xl max-w-96"
on:click={() => {
// In this case the citation comes from the LLM response
if (
citation.document &&
citation.document[0] &&
citation.source &&
citation.document[0] === citation.source.name
) {
return;
}
showCitationModal = true;
selectedCitation = citation;
}}