2025-01-22 17:18:13 +00:00
|
|
|
import { convertToCoreMessages, streamText as _streamText, type Message } from 'ai';
|
|
|
|
import { MAX_TOKENS, type FileMap } from './constants';
|
2024-12-15 11:17:16 +00:00
|
|
|
import { getSystemPrompt } from '~/lib/common/prompts/prompts';
|
2025-01-22 17:18:13 +00:00
|
|
|
import { DEFAULT_MODEL, DEFAULT_PROVIDER, MODIFICATIONS_TAG_NAME, PROVIDER_LIST, WORK_DIR } from '~/utils/constants';
|
2024-12-11 08:32:21 +00:00
|
|
|
import type { IProviderSetting } from '~/types/model';
|
2024-12-15 11:17:16 +00:00
|
|
|
import { PromptLibrary } from '~/lib/common/prompt-library';
|
|
|
|
import { allowedHTMLElements } from '~/utils/markdown';
|
2024-12-31 17:17:32 +00:00
|
|
|
import { LLMManager } from '~/lib/modules/llm/manager';
|
|
|
|
import { createScopedLogger } from '~/utils/logger';
|
2025-01-22 17:18:13 +00:00
|
|
|
import { createFilesContext, extractPropertiesFromMessage, simplifyBoltActions } from './utils';
|
|
|
|
import { getFilePaths } from './select-context';
|
2024-07-17 18:54:46 +00:00
|
|
|
|
|
|
|
export type Messages = Message[];
|
|
|
|
|
|
|
|
export type StreamingOptions = Omit<Parameters<typeof _streamText>[0], 'model'>;
|
|
|
|
|
2024-12-31 17:17:32 +00:00
|
|
|
const logger = createScopedLogger('stream-text');
|
|
|
|
|
2024-12-11 08:32:21 +00:00
|
|
|
export async function streamText(props: {
|
2025-01-22 17:18:13 +00:00
|
|
|
messages: Omit<Message, 'id'>[];
|
|
|
|
env?: Env;
|
2024-12-11 08:32:21 +00:00
|
|
|
options?: StreamingOptions;
|
|
|
|
apiKeys?: Record<string, string>;
|
2024-12-11 21:14:36 +00:00
|
|
|
files?: FileMap;
|
2024-12-11 08:32:21 +00:00
|
|
|
providerSettings?: Record<string, IProviderSetting>;
|
2024-12-15 11:17:16 +00:00
|
|
|
promptId?: string;
|
2024-12-29 10:06:31 +00:00
|
|
|
contextOptimization?: boolean;
|
2025-01-22 17:18:13 +00:00
|
|
|
contextFiles?: FileMap;
|
|
|
|
summary?: string;
|
2024-12-11 08:32:21 +00:00
|
|
|
}) {
|
2025-01-22 17:18:13 +00:00
|
|
|
const {
|
|
|
|
messages,
|
|
|
|
env: serverEnv,
|
|
|
|
options,
|
|
|
|
apiKeys,
|
|
|
|
files,
|
|
|
|
providerSettings,
|
|
|
|
promptId,
|
|
|
|
contextOptimization,
|
|
|
|
contextFiles,
|
|
|
|
summary,
|
|
|
|
} = props;
|
2024-10-13 18:53:43 +00:00
|
|
|
let currentModel = DEFAULT_MODEL;
|
2024-12-02 20:43:33 +00:00
|
|
|
let currentProvider = DEFAULT_PROVIDER.name;
|
2025-01-22 17:18:13 +00:00
|
|
|
let processedMessages = messages.map((message) => {
|
2024-10-13 18:53:43 +00:00
|
|
|
if (message.role === 'user') {
|
2024-11-06 10:10:08 +00:00
|
|
|
const { model, provider, content } = extractPropertiesFromMessage(message);
|
2024-12-31 17:17:32 +00:00
|
|
|
currentModel = model;
|
2024-11-06 10:10:08 +00:00
|
|
|
currentProvider = provider;
|
|
|
|
|
2024-12-07 10:28:13 +00:00
|
|
|
return { ...message, content };
|
|
|
|
} else if (message.role == 'assistant') {
|
2024-12-29 10:06:31 +00:00
|
|
|
let content = message.content;
|
2024-12-13 20:55:36 +00:00
|
|
|
|
2024-12-29 10:06:31 +00:00
|
|
|
if (contextOptimization) {
|
|
|
|
content = simplifyBoltActions(content);
|
|
|
|
}
|
2024-12-07 10:28:13 +00:00
|
|
|
|
2024-10-13 18:53:43 +00:00
|
|
|
return { ...message, content };
|
|
|
|
}
|
|
|
|
|
2024-11-21 21:05:35 +00:00
|
|
|
return message;
|
2024-11-06 10:10:08 +00:00
|
|
|
});
|
2024-10-13 18:53:43 +00:00
|
|
|
|
2024-12-31 17:17:32 +00:00
|
|
|
const provider = PROVIDER_LIST.find((p) => p.name === currentProvider) || DEFAULT_PROVIDER;
|
|
|
|
const staticModels = LLMManager.getInstance().getStaticModelListFromProvider(provider);
|
|
|
|
let modelDetails = staticModels.find((m) => m.name === currentModel);
|
|
|
|
|
|
|
|
if (!modelDetails) {
|
|
|
|
const modelsList = [
|
|
|
|
...(provider.staticModels || []),
|
|
|
|
...(await LLMManager.getInstance().getModelListFromProvider(provider, {
|
|
|
|
apiKeys,
|
|
|
|
providerSettings,
|
|
|
|
serverEnv: serverEnv as any,
|
|
|
|
})),
|
|
|
|
];
|
|
|
|
|
|
|
|
if (!modelsList.length) {
|
|
|
|
throw new Error(`No models found for provider ${provider.name}`);
|
|
|
|
}
|
2024-11-20 16:11:51 +00:00
|
|
|
|
2024-12-31 17:17:32 +00:00
|
|
|
modelDetails = modelsList.find((m) => m.name === currentModel);
|
2024-11-20 16:11:51 +00:00
|
|
|
|
2024-12-31 17:17:32 +00:00
|
|
|
if (!modelDetails) {
|
|
|
|
// Fallback to first model
|
|
|
|
logger.warn(
|
|
|
|
`MODEL [${currentModel}] not found in provider [${provider.name}]. Falling back to first model. ${modelsList[0].name}`,
|
|
|
|
);
|
|
|
|
modelDetails = modelsList[0];
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
const dynamicMaxTokens = modelDetails && modelDetails.maxTokenAllowed ? modelDetails.maxTokenAllowed : MAX_TOKENS;
|
2024-12-21 06:15:17 +00:00
|
|
|
|
2024-12-15 11:17:16 +00:00
|
|
|
let systemPrompt =
|
|
|
|
PromptLibrary.getPropmtFromLibrary(promptId || 'default', {
|
|
|
|
cwd: WORK_DIR,
|
|
|
|
allowedHtmlElements: allowedHTMLElements,
|
|
|
|
modificationTagName: MODIFICATIONS_TAG_NAME,
|
|
|
|
}) ?? getSystemPrompt();
|
2024-12-07 10:28:13 +00:00
|
|
|
|
2025-01-22 17:18:13 +00:00
|
|
|
if (files && contextFiles && contextOptimization) {
|
|
|
|
const codeContext = createFilesContext(contextFiles, true);
|
|
|
|
const filePaths = getFilePaths(files);
|
|
|
|
|
|
|
|
systemPrompt = `${systemPrompt}
|
|
|
|
Below are all the files present in the project:
|
|
|
|
---
|
|
|
|
${filePaths.join('\n')}
|
|
|
|
---
|
|
|
|
|
|
|
|
Below is the context loaded into context buffer for you to have knowledge of and might need changes to fullfill current user request.
|
|
|
|
CONTEXT BUFFER:
|
|
|
|
---
|
|
|
|
${codeContext}
|
|
|
|
---
|
|
|
|
`;
|
|
|
|
|
|
|
|
if (summary) {
|
|
|
|
systemPrompt = `${systemPrompt}
|
|
|
|
below is the chat history till now
|
|
|
|
CHAT SUMMARY:
|
|
|
|
---
|
|
|
|
${props.summary}
|
|
|
|
---
|
|
|
|
`;
|
|
|
|
|
|
|
|
const lastMessage = processedMessages.pop();
|
|
|
|
|
|
|
|
if (lastMessage) {
|
|
|
|
processedMessages = [lastMessage];
|
|
|
|
}
|
|
|
|
}
|
2024-12-07 10:28:13 +00:00
|
|
|
}
|
|
|
|
|
2024-12-31 17:17:32 +00:00
|
|
|
logger.info(`Sending llm call to ${provider.name} with model ${modelDetails.name}`);
|
|
|
|
|
2025-01-22 17:18:13 +00:00
|
|
|
// console.log(systemPrompt,processedMessages);
|
|
|
|
|
2025-01-12 22:51:29 +00:00
|
|
|
return await _streamText({
|
2024-12-21 06:15:17 +00:00
|
|
|
model: provider.getModelInstance({
|
2025-01-15 11:06:33 +00:00
|
|
|
model: modelDetails.name,
|
2024-12-21 06:15:17 +00:00
|
|
|
serverEnv,
|
|
|
|
apiKeys,
|
|
|
|
providerSettings,
|
|
|
|
}),
|
2024-12-07 10:28:13 +00:00
|
|
|
system: systemPrompt,
|
2024-11-20 16:11:51 +00:00
|
|
|
maxTokens: dynamicMaxTokens,
|
2024-12-02 20:43:33 +00:00
|
|
|
messages: convertToCoreMessages(processedMessages as any),
|
2024-07-17 18:54:46 +00:00
|
|
|
...options,
|
|
|
|
});
|
|
|
|
}
|