From 1d6304a169fe04a58825013f77f4cb6035e8ce40 Mon Sep 17 00:00:00 2001 From: bjc Date: Tue, 8 Oct 2024 17:32:01 -0700 Subject: [PATCH] refactor(llm): remove unused model.ts file and update OpenAI LLM implementation --- app/lib/.server/llm/anthropic-llm.ts | 8 ++---- app/lib/.server/llm/openai-llm.ts | 16 ++++------- app/lib/.server/llm/prompts.ts | 42 ---------------------------- 3 files changed, 8 insertions(+), 58 deletions(-) delete mode 100644 app/lib/.server/llm/prompts.ts diff --git a/app/lib/.server/llm/anthropic-llm.ts b/app/lib/.server/llm/anthropic-llm.ts index 62557a8..bad07eb 100644 --- a/app/lib/.server/llm/anthropic-llm.ts +++ b/app/lib/.server/llm/anthropic-llm.ts @@ -1,14 +1,10 @@ import {streamText as _streamText, convertToCoreMessages } from 'ai'; import { createAnthropic } from '@ai-sdk/anthropic'; import { MAX_TOKENS } from './constants'; -import { getPrompts } from './prompts'; import type { LLM } from './llm-interface'; import type { Prompts } from './prompts-interface'; import type { Messages, StreamingOptions }from './llm-interface'; - -// export type Messages = Message[]; - -// export type StreamingOptions = Omit[0], 'model'>; +import { AnthropicPrompts } from './anthropic-prompts'; export class AnthropicLLM implements LLM { private apiKey: string = ''; @@ -42,6 +38,6 @@ export class AnthropicLLM implements LLM { } getPrompts(): Prompts { - return getPrompts(); + return new AnthropicPrompts(); } } \ No newline at end of file diff --git a/app/lib/.server/llm/openai-llm.ts b/app/lib/.server/llm/openai-llm.ts index f1ccd25..d8479d7 100644 --- a/app/lib/.server/llm/openai-llm.ts +++ b/app/lib/.server/llm/openai-llm.ts @@ -1,14 +1,10 @@ import { streamText as _streamText, convertToCoreMessages } from 'ai'; import { createOpenAI } from '@ai-sdk/openai'; import { MAX_TOKENS } from './constants'; -import { getPrompts } from './prompts'; import type { LLM } from './llm-interface'; import type { Prompts } from './prompts-interface'; import type { Message, Messages, StreamingOptions }from './llm-interface'; - -// export type Messages = Message[]; - -// export type StreamingOptions = Omit[0], 'model'>; +import { OpenAIPrompts } from './openai-prompts'; export class OpenAILLM implements LLM { private apiKey: string = ''; @@ -28,8 +24,8 @@ export class OpenAILLM implements LLM { } const openai = createOpenAI({ apiKey: this.apiKey, compatibility: 'strict' }); - type model_name_t = 'gpt-4o' | 'o1-mini' | 'o1-preview'; - const model_name: model_name_t = process.env.OPEN_AI_MODEL as model_name_t; + type model_name_t = 'gpt-4o' | 'gpt-4o-mini' | 'o1-mini' | 'o1-preview'; + const model_name = process.env.OPENAI_MODEL as model_name_t; const model = openai(model_name); if (model_name === 'o1-mini' || model_name === 'o1-preview') { @@ -39,13 +35,13 @@ export class OpenAILLM implements LLM { }; return _streamText({ - model: model as any, // Use type assertion to bypass strict type checking + model, messages: [o1sysmessage, ...convertToCoreMessages(messages)], ...options, }); } else { return _streamText({ - model: model as any, // Use type assertion to bypass strict type checking + model, system: this.getPrompts().getSystemPrompt(), messages: convertToCoreMessages(messages), maxTokens: MAX_TOKENS, @@ -55,6 +51,6 @@ export class OpenAILLM implements LLM { } getPrompts(): Prompts { - return getPrompts(); + return new OpenAIPrompts(); } } \ No newline at end of file diff --git a/app/lib/.server/llm/prompts.ts b/app/lib/.server/llm/prompts.ts deleted file mode 100644 index 87b0d12..0000000 --- a/app/lib/.server/llm/prompts.ts +++ /dev/null @@ -1,42 +0,0 @@ -import { MODIFICATIONS_TAG_NAME, WORK_DIR } from '~/utils/constants'; -import { allowedHTMLElements } from '~/utils/markdown'; -import { stripIndents } from '~/utils/stripIndent'; -import type { Prompts } from './prompts-interface'; -import { getCurrentLLMType } from './llm-selector'; -import { AnthropicPrompts } from './anthropic-prompts'; -import { OpenAIPrompts } from './openai-prompts'; - -class GenericPrompts implements Prompts { - getSystemPrompt(cwd: string = WORK_DIR): string { - return ` - You are an AI assistant. Please help the user with their task. - The current working directory is ${cwd}. - `; - } - - getContinuePrompt(): string { - return stripIndents` - Continue your prior response. Please begin from where you left off without any interruptions. - `; - } -} - -export function getPrompts(): Prompts { - const llmType = getCurrentLLMType(); - switch (llmType) { - case 'anthropic': - return new AnthropicPrompts(); - case 'openai': - return new OpenAIPrompts(); - default: - return new GenericPrompts(); - } -} - -export function getSystemPrompt(cwd: string = WORK_DIR): string { - return getPrompts().getSystemPrompt(cwd); -} - -export function getContinuePrompt(): string { - return getPrompts().getContinuePrompt(); -}