mirror of
https://github.com/stackblitz/bolt.new
synced 2025-06-26 18:17:50 +00:00
refactor(llm): update OpenAI LLM implementation
Refactor the OpenAI LLM implementation in the `openai-llm.ts` file. - Update the model selection logic to support both 'gpt-4o' and 'o1-mini' models. - Add conditional logic to handle different models and their respective prompts and options.
This commit is contained in:
parent
aa50183dd8
commit
6f8158a8d1
@ -28,23 +28,30 @@ export class OpenAILLM implements LLM {
|
||||
}
|
||||
|
||||
const openai = createOpenAI({ apiKey: this.apiKey, compatibility: 'strict' });
|
||||
const model = openai('o1-mini');
|
||||
type model_name_t = 'gpt-4o' | 'o1-mini';
|
||||
const model_name: model_name_t = process.env.OPEN_AI_MODEL as model_name_t;
|
||||
const model = openai(model_name);
|
||||
|
||||
const o1sysmessage: Message = {
|
||||
role: 'user',
|
||||
content: this.getPrompts().getSystemPrompt()
|
||||
};
|
||||
if (model_name === 'o1-mini') {
|
||||
const o1sysmessage: Message = {
|
||||
role: 'user',
|
||||
content: this.getPrompts().getSystemPrompt()
|
||||
};
|
||||
|
||||
// this is just some jank to get o1 working, proof of concept.
|
||||
// for 4o, update the model above, remove the o1sysmessage, and set the system prompt and maxTokens
|
||||
|
||||
return _streamText({
|
||||
model: model as any, // Use type assertion to bypass strict type checking
|
||||
// system: this.getPrompts().getSystemPrompt(),
|
||||
messages: [o1sysmessage, ...convertToCoreMessages(messages)],
|
||||
// maxTokens: MAX_TOKENS,
|
||||
...options,
|
||||
});
|
||||
return _streamText({
|
||||
model: model as any, // Use type assertion to bypass strict type checking
|
||||
messages: [o1sysmessage, ...convertToCoreMessages(messages)],
|
||||
...options,
|
||||
});
|
||||
} else {
|
||||
return _streamText({
|
||||
model: model as any, // Use type assertion to bypass strict type checking
|
||||
system: this.getPrompts().getSystemPrompt(),
|
||||
messages: convertToCoreMessages(messages),
|
||||
maxTokens: MAX_TOKENS,
|
||||
...options,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
getPrompts(): Prompts {
|
||||
|
||||
@ -36,7 +36,7 @@ export class OpenAIPrompts implements Prompts {
|
||||
</system_constraints>
|
||||
|
||||
<code_formatting_info>
|
||||
Use 2 spaces for code indentation
|
||||
Use 2 spaces for code indentation.
|
||||
</code_formatting_info>
|
||||
|
||||
<message_formatting_info>
|
||||
@ -46,8 +46,8 @@ export class OpenAIPrompts implements Prompts {
|
||||
<diff_spec>
|
||||
For user-modified files, a \`<${MODIFICATIONS_TAG_NAME}>\` section will appear at the start of the user message, containing either \`<diff>\` or \`<file>\` elements for each modified file:
|
||||
|
||||
- \`<diff path="/some/file/path.ext">\`: Contains GNU unified diff format changes
|
||||
- \`<file path="/some/file/path.ext">\`: Contains the full new content of the file
|
||||
- \`<diff path="/some/file/path.ext">\`: Contains GNU unified diff format changes.
|
||||
- \`<file path="/some/file/path.ext">\`: Contains the full new content of the file.
|
||||
|
||||
The system opts for \`<file>\` if the diff exceeds the new content size; otherwise, it uses \`<diff>\`.
|
||||
|
||||
@ -55,13 +55,13 @@ export class OpenAIPrompts implements Prompts {
|
||||
|
||||
- The header with original and modified file names is omitted!
|
||||
- Changed sections start with @@ -X,Y +A,B @@ where:
|
||||
- X: Original file starting line
|
||||
- Y: Original file line count
|
||||
- A: Modified file starting line
|
||||
- B: Modified file line count
|
||||
- (-) lines: Removed from the original
|
||||
- (+) lines: Added in the modified version
|
||||
- Unmarked lines: Unchanged context
|
||||
- X: Original file starting line.
|
||||
- Y: Original file line count.
|
||||
- A: Modified file starting line.
|
||||
- B: Modified file line count.
|
||||
- (-) lines: Removed from the original.
|
||||
- (+) lines: Added in the modified version.
|
||||
- Unmarked lines: Unchanged context.
|
||||
|
||||
Example:
|
||||
|
||||
@ -84,15 +84,15 @@ export class OpenAIPrompts implements Prompts {
|
||||
<file path="/home/project/package.json">
|
||||
// full file content here
|
||||
</file>
|
||||
</</${MODIFICATIONS_TAG_NAME}>
|
||||
</${MODIFICATIONS_TAG_NAME}>
|
||||
</diff_spec>
|
||||
|
||||
<artifact_info>
|
||||
Bolt generates a SINGLE, comprehensive artifact for each project. This artifact includes all necessary steps and components, such as:
|
||||
|
||||
- Shell commands to execute, including dependencies to install via a package manager (NPM)
|
||||
- Files to create along with their contents
|
||||
- Folders to create if required
|
||||
- Shell commands to execute, including dependencies to install via a package manager (NPM).
|
||||
- Files to create along with their contents.
|
||||
- Folders to create if required.
|
||||
|
||||
<artifact_instructions>
|
||||
1. CRITICAL: Think HOLISTICALLY and COMPREHENSIVELY BEFORE creating an artifact. This means:
|
||||
|
||||
Loading…
Reference in New Issue
Block a user