feat: add support for message continuation (#1)

This commit is contained in:
Connor Fogarty 2024-07-19 04:12:55 -05:00 committed by GitHub
parent 7edf287768
commit cae55a7026
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
4 changed files with 105 additions and 3 deletions

View File

@ -1,2 +1,5 @@
// see https://docs.anthropic.com/en/docs/about-claude/models // see https://docs.anthropic.com/en/docs/about-claude/models
export const MAX_TOKENS = 8192; export const MAX_TOKENS = 8192;
// limits the number of model responses that can be returned in a single request
export const MAX_RESPONSE_SEGMENTS = 2;

View File

@ -1,4 +1,5 @@
import { WORK_DIR } from '../../../utils/constants'; import { WORK_DIR } from '../../../utils/constants';
import { stripIndents } from '../../../utils/stripIndent';
export const getSystemPrompt = (cwd: string = WORK_DIR) => ` export const getSystemPrompt = (cwd: string = WORK_DIR) => `
You are Bolt, an expert AI assistant and exceptional senior software developer with vast knowledge across multiple programming languages, frameworks, and best practices. You are Bolt, an expert AI assistant and exceptional senior software developer with vast knowledge across multiple programming languages, frameworks, and best practices.
@ -198,3 +199,8 @@ Here are some examples of correct usage of artifacts:
</example> </example>
</examples> </examples>
`; `;
export const CONTINUE_PROMPT = stripIndents`
Continue your prior response. IMPORTANT: Immediately begin from where you left off without any interruptions.
Do not repeat any content, including artifact and action tags.
`;

View File

@ -0,0 +1,65 @@
export default class SwitchableStream extends TransformStream {
private _controller: TransformStreamDefaultController | null = null;
private _currentReader: ReadableStreamDefaultReader | null = null;
private _switches = 0;
constructor() {
let controllerRef: TransformStreamDefaultController | undefined;
super({
start(controller) {
controllerRef = controller;
},
});
if (controllerRef === undefined) {
throw new Error('Controller not properly initialized');
}
this._controller = controllerRef;
}
async switchSource(newStream: ReadableStream) {
if (this._currentReader) {
await this._currentReader.cancel();
}
this._currentReader = newStream.getReader();
this._pumpStream();
this._switches++;
}
private async _pumpStream() {
if (!this._currentReader || !this._controller) {
throw new Error('Stream is not properly initialized');
}
try {
while (true) {
const { done, value } = await this._currentReader.read();
if (done) {
break;
}
this._controller.enqueue(value);
}
} catch (error) {
this._controller.error(error);
}
}
close() {
if (this._currentReader) {
this._currentReader.cancel();
}
this._controller?.terminate();
}
get switches() {
return this._switches;
}
}

View File

@ -1,12 +1,40 @@
import { type ActionFunctionArgs } from '@remix-run/cloudflare'; import { type ActionFunctionArgs } from '@remix-run/cloudflare';
import { streamText, type Messages } from '../lib/.server/llm/stream-text'; import { MAX_RESPONSE_SEGMENTS } from '../lib/.server/llm/constants';
import { CONTINUE_PROMPT } from '../lib/.server/llm/prompts';
import { streamText, type Messages, type StreamingOptions } from '../lib/.server/llm/stream-text';
import SwitchableStream from '../lib/.server/llm/switchable-stream';
import { StreamingTextResponse } from 'ai';
export async function action({ context, request }: ActionFunctionArgs) { export async function action({ context, request }: ActionFunctionArgs) {
const { messages } = await request.json<{ messages: Messages }>(); const { messages } = await request.json<{ messages: Messages }>();
const stream = new SwitchableStream();
try { try {
const result = await streamText(messages, context.cloudflare.env, { toolChoice: 'none' }); const options: StreamingOptions = {
return result.toAIStreamResponse(); toolChoice: 'none',
onFinish: async ({ text: content, finishReason }) => {
if (finishReason !== 'length') {
return stream.close();
}
if (stream.switches >= MAX_RESPONSE_SEGMENTS) {
throw Error('Cannot continue message: maximum segments reached');
}
messages.push({ role: 'assistant', content });
messages.push({ role: 'user', content: CONTINUE_PROMPT });
const result = await streamText(messages, context.cloudflare.env, options);
return stream.switchSource(result.toAIStream());
},
};
const result = await streamText(messages, context.cloudflare.env, options);
stream.switchSource(result.toAIStream());
return new StreamingTextResponse(stream.readable);
} catch (error) { } catch (error) {
console.log(error); console.log(error);