mirror of
https://github.com/stackblitz-labs/bolt.diy
synced 2025-06-26 18:26:38 +00:00
Pass req.abortSignal to llm
This commit is contained in:
parent
1ce6ad6b59
commit
09a7b326be
@ -15,8 +15,9 @@ export async function createSummary(props: {
|
||||
promptId?: string;
|
||||
contextOptimization?: boolean;
|
||||
onFinish?: (resp: GenerateTextResult<Record<string, CoreTool<any, any>>, never>) => void;
|
||||
abortSignal?: AbortSignal;
|
||||
}) {
|
||||
const { messages, env: serverEnv, apiKeys, providerSettings, onFinish } = props;
|
||||
const { messages, env: serverEnv, apiKeys, providerSettings, onFinish, abortSignal } = props;
|
||||
let currentModel = DEFAULT_MODEL;
|
||||
let currentProvider = DEFAULT_PROVIDER.name;
|
||||
const processedMessages = messages.map((message) => {
|
||||
@ -75,8 +76,8 @@ export async function createSummary(props: {
|
||||
|
||||
if (summary && summary.type === 'chatSummary') {
|
||||
chatId = summary.chatId;
|
||||
summaryText = `Below is the Chat Summary till now, this is chat summary before the conversation provided by the user
|
||||
you should also use this as historical message while providing the response to the user.
|
||||
summaryText = `Below is the Chat Summary till now, this is chat summary before the conversation provided by the user
|
||||
you should also use this as historical message while providing the response to the user.
|
||||
${summary.summary}`;
|
||||
|
||||
if (chatId) {
|
||||
@ -152,7 +153,7 @@ Note:
|
||||
|
||||
|
||||
---
|
||||
|
||||
|
||||
RULES:
|
||||
* Only provide the whole summary of the chat till now.
|
||||
* Do not provide any new information.
|
||||
@ -163,7 +164,7 @@ Note:
|
||||
|
||||
Here is the previous summary of the chat:
|
||||
<old_summary>
|
||||
${summaryText}
|
||||
${summaryText}
|
||||
</old_summary>
|
||||
|
||||
Below is the chat after that:
|
||||
@ -185,6 +186,7 @@ Please provide a summary of the chat till now including the hitorical summary of
|
||||
apiKeys,
|
||||
providerSettings,
|
||||
}),
|
||||
abortSignal,
|
||||
});
|
||||
|
||||
const response = resp.text;
|
||||
|
@ -22,8 +22,9 @@ export async function selectContext(props: {
|
||||
contextOptimization?: boolean;
|
||||
summary: string;
|
||||
onFinish?: (resp: GenerateTextResult<Record<string, CoreTool<any, any>>, never>) => void;
|
||||
abortSignal?: AbortSignal;
|
||||
}) {
|
||||
const { messages, env: serverEnv, apiKeys, files, providerSettings, summary, onFinish } = props;
|
||||
const { messages, env: serverEnv, apiKeys, files, providerSettings, summary, onFinish, abortSignal } = props;
|
||||
let currentModel = DEFAULT_MODEL;
|
||||
let currentProvider = DEFAULT_PROVIDER.name;
|
||||
const processedMessages = messages.map((message) => {
|
||||
@ -174,6 +175,7 @@ export async function selectContext(props: {
|
||||
apiKeys,
|
||||
providerSettings,
|
||||
}),
|
||||
abortSignal,
|
||||
});
|
||||
|
||||
const response = resp.text;
|
||||
|
@ -28,6 +28,7 @@ export async function streamText(props: {
|
||||
contextFiles?: FileMap;
|
||||
summary?: string;
|
||||
messageSliceId?: number;
|
||||
abortSignal?: AbortSignal;
|
||||
}) {
|
||||
const {
|
||||
messages,
|
||||
@ -40,6 +41,7 @@ export async function streamText(props: {
|
||||
contextOptimization,
|
||||
contextFiles,
|
||||
summary,
|
||||
abortSignal,
|
||||
} = props;
|
||||
let currentModel = DEFAULT_MODEL;
|
||||
let currentProvider = DEFAULT_PROVIDER.name;
|
||||
@ -148,6 +150,7 @@ ${props.summary}
|
||||
apiKeys,
|
||||
providerSettings,
|
||||
}),
|
||||
abortSignal,
|
||||
system: systemPrompt,
|
||||
maxTokens: dynamicMaxTokens,
|
||||
messages: convertToCoreMessages(processedMessages as any),
|
||||
|
@ -97,6 +97,7 @@ async function chatAction({ context, request }: ActionFunctionArgs) {
|
||||
providerSettings,
|
||||
promptId,
|
||||
contextOptimization,
|
||||
abortSignal: request.signal,
|
||||
onFinish(resp) {
|
||||
if (resp.usage) {
|
||||
logger.debug('createSummary token usage', JSON.stringify(resp.usage));
|
||||
@ -141,6 +142,7 @@ async function chatAction({ context, request }: ActionFunctionArgs) {
|
||||
promptId,
|
||||
contextOptimization,
|
||||
summary,
|
||||
abortSignal: request.signal,
|
||||
onFinish(resp) {
|
||||
if (resp.usage) {
|
||||
logger.debug('selectContext token usage', JSON.stringify(resp.usage));
|
||||
@ -242,18 +244,28 @@ async function chatAction({ context, request }: ActionFunctionArgs) {
|
||||
contextFiles: filteredFiles,
|
||||
summary,
|
||||
messageSliceId,
|
||||
abortSignal: request.signal,
|
||||
});
|
||||
|
||||
result.mergeIntoDataStream(dataStream);
|
||||
|
||||
(async () => {
|
||||
for await (const part of result.fullStream) {
|
||||
if (part.type === 'error') {
|
||||
const error: any = part.error;
|
||||
logger.error(`${error}`);
|
||||
try {
|
||||
for await (const part of result.fullStream) {
|
||||
if (part.type === 'error') {
|
||||
const error: any = part.error;
|
||||
logger.error(`${error}`);
|
||||
|
||||
return;
|
||||
}
|
||||
}
|
||||
} catch (e: any) {
|
||||
if (e.name === 'AbortError') {
|
||||
logger.info('Request aborted.');
|
||||
return;
|
||||
}
|
||||
|
||||
throw e;
|
||||
}
|
||||
})();
|
||||
|
||||
@ -281,16 +293,26 @@ async function chatAction({ context, request }: ActionFunctionArgs) {
|
||||
contextFiles: filteredFiles,
|
||||
summary,
|
||||
messageSliceId,
|
||||
abortSignal: request.signal,
|
||||
});
|
||||
|
||||
(async () => {
|
||||
for await (const part of result.fullStream) {
|
||||
if (part.type === 'error') {
|
||||
const error: any = part.error;
|
||||
logger.error(`${error}`);
|
||||
try {
|
||||
for await (const part of result.fullStream) {
|
||||
if (part.type === 'error') {
|
||||
const error: any = part.error;
|
||||
logger.error(`${error}`);
|
||||
|
||||
return;
|
||||
}
|
||||
}
|
||||
} catch (e: any) {
|
||||
if (e.name === 'AbortError') {
|
||||
logger.info('Request aborted.');
|
||||
return;
|
||||
}
|
||||
|
||||
throw e;
|
||||
}
|
||||
})();
|
||||
result.mergeIntoDataStream(dataStream);
|
||||
|
@ -127,6 +127,7 @@ async function llmCallAction({ context, request }: ActionFunctionArgs) {
|
||||
}),
|
||||
maxTokens: dynamicMaxTokens,
|
||||
toolChoice: 'none',
|
||||
abortSignal: request.signal,
|
||||
});
|
||||
logger.info(`Generated response`);
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user