2024-11-23 09:08:14 +00:00
|
|
|
import { convertToCoreMessages, streamText as _streamText } from 'ai';
|
2024-10-13 18:53:43 +00:00
|
|
|
import { getModel } from '~/lib/.server/llm/model';
|
2024-07-17 18:54:46 +00:00
|
|
|
import { MAX_TOKENS } from './constants';
|
|
|
|
import { getSystemPrompt } from './prompts';
|
2024-12-02 20:43:33 +00:00
|
|
|
import { DEFAULT_MODEL, DEFAULT_PROVIDER, getModelList, MODEL_REGEX, PROVIDER_REGEX } from '~/utils/constants';
|
2024-07-17 18:54:46 +00:00
|
|
|
|
|
|
|
interface ToolResult<Name extends string, Args, Result> {
|
|
|
|
toolCallId: string;
|
|
|
|
toolName: Name;
|
|
|
|
args: Args;
|
|
|
|
result: Result;
|
|
|
|
}
|
|
|
|
|
|
|
|
interface Message {
|
|
|
|
role: 'user' | 'assistant';
|
|
|
|
content: string;
|
|
|
|
toolInvocations?: ToolResult<string, unknown, unknown>[];
|
2024-10-13 18:53:43 +00:00
|
|
|
model?: string;
|
2024-07-17 18:54:46 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
export type Messages = Message[];
|
|
|
|
|
|
|
|
export type StreamingOptions = Omit<Parameters<typeof _streamText>[0], 'model'>;
|
|
|
|
|
2024-11-06 10:10:08 +00:00
|
|
|
function extractPropertiesFromMessage(message: Message): { model: string; provider: string; content: string } {
|
2024-11-19 00:55:28 +00:00
|
|
|
const textContent = Array.isArray(message.content)
|
2024-11-30 03:02:35 +00:00
|
|
|
? message.content.find((item) => item.type === 'text')?.text || ''
|
2024-11-19 00:55:28 +00:00
|
|
|
: message.content;
|
|
|
|
|
|
|
|
const modelMatch = textContent.match(MODEL_REGEX);
|
|
|
|
const providerMatch = textContent.match(PROVIDER_REGEX);
|
|
|
|
|
2024-11-30 03:02:35 +00:00
|
|
|
/*
|
|
|
|
* Extract model
|
|
|
|
* const modelMatch = message.content.match(MODEL_REGEX);
|
|
|
|
*/
|
2024-11-06 10:10:08 +00:00
|
|
|
const model = modelMatch ? modelMatch[1] : DEFAULT_MODEL;
|
2024-10-13 18:53:43 +00:00
|
|
|
|
2024-11-30 03:02:35 +00:00
|
|
|
/*
|
|
|
|
* Extract provider
|
|
|
|
* const providerMatch = message.content.match(PROVIDER_REGEX);
|
|
|
|
*/
|
2024-12-02 20:43:33 +00:00
|
|
|
const provider = providerMatch ? providerMatch[1] : DEFAULT_PROVIDER.name;
|
2024-11-06 10:10:08 +00:00
|
|
|
|
2024-11-19 00:55:28 +00:00
|
|
|
const cleanedContent = Array.isArray(message.content)
|
2024-11-30 03:02:35 +00:00
|
|
|
? message.content.map((item) => {
|
|
|
|
if (item.type === 'text') {
|
|
|
|
return {
|
|
|
|
type: 'text',
|
|
|
|
text: item.text?.replace(MODEL_REGEX, '').replace(PROVIDER_REGEX, ''),
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
|
|
|
return item; // Preserve image_url and other types as is
|
|
|
|
})
|
2024-11-20 01:37:23 +00:00
|
|
|
: textContent.replace(MODEL_REGEX, '').replace(PROVIDER_REGEX, '');
|
2024-11-06 10:10:08 +00:00
|
|
|
|
|
|
|
return { model, provider, content: cleanedContent };
|
2024-10-13 18:53:43 +00:00
|
|
|
}
|
2024-11-21 21:05:35 +00:00
|
|
|
|
2024-12-06 11:28:04 +00:00
|
|
|
export async function streamText(
|
|
|
|
messages: Messages,
|
|
|
|
env: Env,
|
|
|
|
options?: StreamingOptions,
|
|
|
|
apiKeys?: Record<string, string>,
|
|
|
|
) {
|
2024-10-13 18:53:43 +00:00
|
|
|
let currentModel = DEFAULT_MODEL;
|
2024-12-02 20:43:33 +00:00
|
|
|
let currentProvider = DEFAULT_PROVIDER.name;
|
2024-12-06 11:28:04 +00:00
|
|
|
const MODEL_LIST = await getModelList(apiKeys || {});
|
2024-10-13 18:53:43 +00:00
|
|
|
const processedMessages = messages.map((message) => {
|
|
|
|
if (message.role === 'user') {
|
2024-11-06 10:10:08 +00:00
|
|
|
const { model, provider, content } = extractPropertiesFromMessage(message);
|
|
|
|
|
|
|
|
if (MODEL_LIST.find((m) => m.name === model)) {
|
|
|
|
currentModel = model;
|
2024-10-13 18:53:43 +00:00
|
|
|
}
|
2024-12-06 11:28:04 +00:00
|
|
|
|
2024-11-06 10:10:08 +00:00
|
|
|
currentProvider = provider;
|
|
|
|
|
2024-10-13 18:53:43 +00:00
|
|
|
return { ...message, content };
|
|
|
|
}
|
|
|
|
|
2024-11-21 21:05:35 +00:00
|
|
|
return message;
|
2024-11-06 10:10:08 +00:00
|
|
|
});
|
2024-10-13 18:53:43 +00:00
|
|
|
|
2024-11-20 16:11:51 +00:00
|
|
|
const modelDetails = MODEL_LIST.find((m) => m.name === currentModel);
|
|
|
|
|
2024-11-21 21:05:35 +00:00
|
|
|
const dynamicMaxTokens = modelDetails && modelDetails.maxTokenAllowed ? modelDetails.maxTokenAllowed : MAX_TOKENS;
|
2024-11-20 16:11:51 +00:00
|
|
|
|
2024-07-17 18:54:46 +00:00
|
|
|
return _streamText({
|
2024-12-02 20:43:33 +00:00
|
|
|
model: getModel(currentProvider, currentModel, env, apiKeys) as any,
|
2024-07-17 18:54:46 +00:00
|
|
|
system: getSystemPrompt(),
|
2024-11-20 16:11:51 +00:00
|
|
|
maxTokens: dynamicMaxTokens,
|
2024-12-02 20:43:33 +00:00
|
|
|
messages: convertToCoreMessages(processedMessages as any),
|
2024-07-17 18:54:46 +00:00
|
|
|
...options,
|
|
|
|
});
|
|
|
|
}
|