ChatGPT-Next-Web/app/requests.ts

121 lines
2.8 KiB
TypeScript
Raw Normal View History

2023-03-10 18:25:33 +00:00
import type { ChatRequest, ChatReponse } from "./api/chat/typing";
import { Message } from "./store";
2023-03-15 17:24:03 +00:00
const TIME_OUT_MS = 30000
2023-03-11 17:14:07 +00:00
const makeRequestParam = (
messages: Message[],
options?: {
filterBot?: boolean;
stream?: boolean;
}
): ChatRequest => {
let sendMessages = messages.map((v) => ({
role: v.role,
content: v.content,
}));
if (options?.filterBot) {
sendMessages = sendMessages.filter((m) => m.role !== "assistant");
}
2023-03-11 12:54:24 +00:00
return {
2023-03-10 18:25:33 +00:00
model: "gpt-3.5-turbo",
2023-03-11 17:14:07 +00:00
messages: sendMessages,
stream: options?.stream,
2023-03-10 18:25:33 +00:00
};
2023-03-11 12:54:24 +00:00
};
export async function requestChat(messages: Message[]) {
2023-03-11 17:14:07 +00:00
const req: ChatRequest = makeRequestParam(messages, { filterBot: true });
2023-03-10 18:25:33 +00:00
const res = await fetch("/api/chat", {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify(req),
});
return (await res.json()) as ChatReponse;
}
2023-03-11 12:54:24 +00:00
export async function requestChatStream(
messages: Message[],
options?: {
2023-03-11 17:14:07 +00:00
filterBot?: boolean;
2023-03-11 12:54:24 +00:00
onMessage: (message: string, done: boolean) => void;
2023-03-11 17:14:07 +00:00
onError: (error: Error) => void;
2023-03-11 12:54:24 +00:00
}
) {
2023-03-11 17:14:07 +00:00
const req = makeRequestParam(messages, {
stream: true,
filterBot: options?.filterBot,
});
2023-03-11 12:54:24 +00:00
2023-03-13 16:25:07 +00:00
const controller = new AbortController();
2023-03-15 17:24:03 +00:00
const reqTimeoutId = setTimeout(() => controller.abort(), TIME_OUT_MS);
2023-03-13 16:25:07 +00:00
try {
const res = await fetch("/api/chat-stream", {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify(req),
signal: controller.signal,
});
2023-03-13 16:40:04 +00:00
clearTimeout(reqTimeoutId);
2023-03-13 16:25:07 +00:00
let responseText = "";
2023-03-13 16:40:04 +00:00
const finish = () => {
options?.onMessage(responseText, true);
controller.abort();
};
2023-03-13 16:25:07 +00:00
if (res.ok) {
const reader = res.body?.getReader();
const decoder = new TextDecoder();
while (true) {
// handle time out, will stop if no response in 10 secs
2023-03-15 17:24:03 +00:00
const resTimeoutId = setTimeout(() => finish(), TIME_OUT_MS);
2023-03-13 16:25:07 +00:00
const content = await reader?.read();
2023-03-13 16:40:04 +00:00
clearTimeout(resTimeoutId);
2023-03-13 16:25:07 +00:00
const text = decoder.decode(content?.value);
responseText += text;
const done = !content || content.done;
options?.onMessage(responseText, false);
if (done) {
break;
}
2023-03-11 12:54:24 +00:00
}
2023-03-13 16:25:07 +00:00
finish();
} else {
2023-03-13 16:34:52 +00:00
console.error("Stream Error");
2023-03-13 16:25:07 +00:00
options?.onError(new Error("Stream Error"));
}
} catch (err) {
2023-03-13 16:34:52 +00:00
console.error("NetWork Error");
2023-03-11 17:14:07 +00:00
options?.onError(new Error("NetWork Error"));
2023-03-11 12:54:24 +00:00
}
}
2023-03-10 18:25:33 +00:00
export async function requestWithPrompt(messages: Message[], prompt: string) {
messages = messages.concat([
{
2023-03-13 16:25:07 +00:00
role: "user",
2023-03-10 18:25:33 +00:00
content: prompt,
date: new Date().toLocaleString(),
},
]);
const res = await requestChat(messages);
return res.choices.at(0)?.message?.content ?? "";
}