merge with upstream

This commit is contained in:
Andrew Trokhymenko 2024-11-21 23:31:41 -05:00
parent 70d14df953
commit 074161024d
4 changed files with 17 additions and 24 deletions

View File

@ -22,7 +22,6 @@ export function getAnthropicModel(apiKey: string, model: string) {
}
export function getOpenAILikeModel(baseURL: string, apiKey: string, model: string) {
// console.log('OpenAILike config:', { baseURL, hasApiKey: !!apiKey, model });
const openai = createOpenAI({
baseURL,
apiKey,
@ -132,8 +131,6 @@ export function getModel(provider: string, model: string, env: Env, apiKeys?: Re
apiKey = getAPIKey(env, provider, apiKeys); // Then assign
baseURL = getBaseURL(env, provider);
// console.log('getModel inputs:', { provider, model, baseURL, hasApiKey: !!apiKey });
switch (provider) {
case 'Anthropic':
return getAnthropicModel(apiKey, model);

View File

@ -52,12 +52,9 @@ function extractPropertiesFromMessage(message: Message): { model: string; provid
})
: textContent.replace(MODEL_REGEX, '').replace(PROVIDER_REGEX, '');
// console.log('Model from message:', model);
// console.log('Found in MODEL_LIST:', MODEL_LIST.find((m) => m.name === model));
// console.log('Current MODEL_LIST:', MODEL_LIST);
return { model, provider, content: cleanedContent };
}
export function streamText(
messages: Messages,
env: Env,
@ -79,20 +76,21 @@ export function streamText(
return { ...message, content };
}
return message;
});
const modelDetails = MODEL_LIST.find((m) => m.name === currentModel);
const modelDetails = MODEL_LIST.find((m) => m.name === currentModel);
const dynamicMaxTokens =
modelDetails && modelDetails.maxTokenAllowed
? modelDetails.maxTokenAllowed
: MAX_TOKENS;
const dynamicMaxTokens =
modelDetails && modelDetails.maxTokenAllowed
? modelDetails.maxTokenAllowed
: MAX_TOKENS;
return _streamText({
model: getModel(currentProvider, currentModel, env, apiKeys),
system: getSystemPrompt(),
maxTokens: dynamicMaxTokens,
messages: convertToCoreMessages(processedMessages),
...options,
});
}
)}
return _streamText({
...options,
model: getModel(currentProvider, currentModel, env, apiKeys),
system: getSystemPrompt(),
maxTokens: dynamicMaxTokens,
messages: convertToCoreMessages(processedMessages),
});
}

View File

@ -37,8 +37,6 @@ async function chatAction({ context, request }: ActionFunctionArgs) {
model: string
}>();
// console.log('ChatAction:', JSON.stringify(messages));
const cookieHeader = request.headers.get("Cookie");
// Parse the cookie's value (returns an object or null if no cookie exists)

View File

@ -32,7 +32,7 @@ const PROVIDER_LIST: ProviderInfo[] = [
name: 'OpenAILike',
staticModels: [
{ name: 'o1-mini', label: 'o1-mini', provider: 'OpenAILike' },
{ name: 'gpt-4o-mini', label: 'GPT-4o Mini', provider: 'OpenAI' },
{ name: 'gpt-4o-mini', label: 'GPT-4o Mini', provider: 'OpenAILike' },
],
getDynamicModels: getOpenAILikeModels
},