2024-10-13 18:53:43 +00:00
|
|
|
// @ts-nocheck
|
|
|
|
// Preventing TS checks with files presented in the video for a better presentation.
|
2024-07-17 18:54:46 +00:00
|
|
|
import { streamText as _streamText, convertToCoreMessages } from 'ai';
|
2024-10-13 18:53:43 +00:00
|
|
|
import { getModel } from '~/lib/.server/llm/model';
|
2024-07-17 18:54:46 +00:00
|
|
|
import { MAX_TOKENS } from './constants';
|
|
|
|
import { getSystemPrompt } from './prompts';
|
2024-11-06 20:35:54 +00:00
|
|
|
import { MODEL_LIST, DEFAULT_MODEL, DEFAULT_PROVIDER, MODEL_REGEX, PROVIDER_REGEX } from '~/utils/constants';
|
2024-07-17 18:54:46 +00:00
|
|
|
|
|
|
|
interface ToolResult<Name extends string, Args, Result> {
|
|
|
|
toolCallId: string;
|
|
|
|
toolName: Name;
|
|
|
|
args: Args;
|
|
|
|
result: Result;
|
|
|
|
}
|
|
|
|
|
|
|
|
interface Message {
|
|
|
|
role: 'user' | 'assistant';
|
|
|
|
content: string;
|
|
|
|
toolInvocations?: ToolResult<string, unknown, unknown>[];
|
2024-10-13 18:53:43 +00:00
|
|
|
model?: string;
|
2024-07-17 18:54:46 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
export type Messages = Message[];
|
|
|
|
|
|
|
|
export type StreamingOptions = Omit<Parameters<typeof _streamText>[0], 'model'>;
|
|
|
|
|
2024-11-06 10:10:08 +00:00
|
|
|
function extractPropertiesFromMessage(message: Message): { model: string; provider: string; content: string } {
|
2024-11-19 00:55:28 +00:00
|
|
|
const textContent = Array.isArray(message.content)
|
|
|
|
? message.content.find(item => item.type === 'text')?.text || ''
|
|
|
|
: message.content;
|
|
|
|
|
|
|
|
const modelMatch = textContent.match(MODEL_REGEX);
|
|
|
|
const providerMatch = textContent.match(PROVIDER_REGEX);
|
|
|
|
|
2024-11-06 10:10:08 +00:00
|
|
|
// Extract model
|
2024-11-19 00:55:28 +00:00
|
|
|
// const modelMatch = message.content.match(MODEL_REGEX);
|
2024-11-06 10:10:08 +00:00
|
|
|
const model = modelMatch ? modelMatch[1] : DEFAULT_MODEL;
|
2024-10-13 18:53:43 +00:00
|
|
|
|
2024-11-06 10:10:08 +00:00
|
|
|
// Extract provider
|
2024-11-19 00:55:28 +00:00
|
|
|
// const providerMatch = message.content.match(PROVIDER_REGEX);
|
2024-11-06 10:10:08 +00:00
|
|
|
const provider = providerMatch ? providerMatch[1] : DEFAULT_PROVIDER;
|
|
|
|
|
2024-11-19 00:55:28 +00:00
|
|
|
const cleanedContent = Array.isArray(message.content)
|
|
|
|
? message.content.map(item => {
|
|
|
|
if (item.type === 'text') {
|
|
|
|
return {
|
|
|
|
type: 'text',
|
2024-11-20 01:37:23 +00:00
|
|
|
text: item.text?.replace(MODEL_REGEX, '').replace(PROVIDER_REGEX, '')
|
2024-11-19 00:55:28 +00:00
|
|
|
};
|
|
|
|
}
|
|
|
|
return item; // Preserve image_url and other types as is
|
|
|
|
})
|
2024-11-20 01:37:23 +00:00
|
|
|
: textContent.replace(MODEL_REGEX, '').replace(PROVIDER_REGEX, '');
|
2024-11-06 10:10:08 +00:00
|
|
|
|
|
|
|
return { model, provider, content: cleanedContent };
|
2024-10-13 18:53:43 +00:00
|
|
|
}
|
2024-11-22 04:31:41 +00:00
|
|
|
|
2024-10-29 03:19:30 +00:00
|
|
|
export function streamText(
|
2024-11-19 00:55:28 +00:00
|
|
|
messages: Messages,
|
|
|
|
env: Env,
|
2024-10-29 03:19:30 +00:00
|
|
|
options?: StreamingOptions,
|
|
|
|
apiKeys?: Record<string, string>
|
|
|
|
) {
|
2024-10-13 18:53:43 +00:00
|
|
|
let currentModel = DEFAULT_MODEL;
|
2024-11-06 10:10:08 +00:00
|
|
|
let currentProvider = DEFAULT_PROVIDER;
|
|
|
|
|
2024-10-13 18:53:43 +00:00
|
|
|
const processedMessages = messages.map((message) => {
|
|
|
|
if (message.role === 'user') {
|
2024-11-06 10:10:08 +00:00
|
|
|
const { model, provider, content } = extractPropertiesFromMessage(message);
|
|
|
|
|
|
|
|
if (MODEL_LIST.find((m) => m.name === model)) {
|
|
|
|
currentModel = model;
|
2024-10-13 18:53:43 +00:00
|
|
|
}
|
2024-11-06 10:10:08 +00:00
|
|
|
|
|
|
|
currentProvider = provider;
|
|
|
|
|
2024-10-13 18:53:43 +00:00
|
|
|
return { ...message, content };
|
|
|
|
}
|
2024-11-22 04:31:41 +00:00
|
|
|
return message;
|
|
|
|
});
|
|
|
|
|
|
|
|
const modelDetails = MODEL_LIST.find((m) => m.name === currentModel);
|
|
|
|
|
|
|
|
const dynamicMaxTokens =
|
|
|
|
modelDetails && modelDetails.maxTokenAllowed
|
|
|
|
? modelDetails.maxTokenAllowed
|
|
|
|
: MAX_TOKENS;
|
|
|
|
|
|
|
|
return _streamText({
|
|
|
|
...options,
|
|
|
|
model: getModel(currentProvider, currentModel, env, apiKeys),
|
|
|
|
system: getSystemPrompt(),
|
|
|
|
maxTokens: dynamicMaxTokens,
|
|
|
|
messages: convertToCoreMessages(processedMessages),
|
|
|
|
});
|
|
|
|
}
|