ChatGPT-Next-Web/app/requests.ts

247 lines
6.2 KiB
TypeScript
Raw Normal View History

2023-04-11 04:06:12 +00:00
import type { ChatRequest, ChatResponse } from "./api/openai/typing";
import { Message, ModelConfig, useAccessStore, useChatStore } from "./store";
import { showToast } from "./components/ui-lib";
2023-03-10 18:25:33 +00:00
2023-03-21 16:20:32 +00:00
const TIME_OUT_MS = 30000;
2023-03-15 17:24:03 +00:00
2023-03-11 17:14:07 +00:00
const makeRequestParam = (
messages: Message[],
options?: {
filterBot?: boolean;
stream?: boolean;
2023-04-06 23:18:53 +00:00
},
2023-03-11 17:14:07 +00:00
): ChatRequest => {
let sendMessages = messages.map((v) => ({
role: v.role,
content: v.content,
}));
if (options?.filterBot) {
sendMessages = sendMessages.filter((m) => m.role !== "assistant");
}
2023-04-07 19:03:07 +00:00
const modelConfig = { ...useChatStore.getState().config.modelConfig };
// @yidadaa: wont send max_tokens, because it is nonsense for Muggles
// @ts-expect-error
delete modelConfig.max_tokens;
2023-03-11 12:54:24 +00:00
return {
2023-03-11 17:14:07 +00:00
messages: sendMessages,
stream: options?.stream,
...modelConfig,
2023-03-10 18:25:33 +00:00
};
2023-03-11 12:54:24 +00:00
};
function getHeaders() {
const accessStore = useAccessStore.getState();
let headers: Record<string, string> = {};
if (accessStore.enabledAccessControl()) {
headers["access-code"] = accessStore.accessCode;
}
2023-03-26 11:58:25 +00:00
if (accessStore.token && accessStore.token.length > 0) {
headers["token"] = accessStore.token;
}
return headers;
}
export function requestOpenaiClient(path: string) {
return (body: any, method = "POST") =>
2023-04-06 09:14:19 +00:00
fetch("/api/openai?_vercel_no_cache=1", {
method,
headers: {
"Content-Type": "application/json",
path,
...getHeaders(),
},
body: body && JSON.stringify(body),
});
}
2023-03-11 12:54:24 +00:00
export async function requestChat(messages: Message[]) {
2023-03-11 17:14:07 +00:00
const req: ChatRequest = makeRequestParam(messages, { filterBot: true });
2023-03-10 18:25:33 +00:00
const res = await requestOpenaiClient("v1/chat/completions")(req);
2023-03-10 18:25:33 +00:00
try {
2023-04-11 04:06:12 +00:00
const response = (await res.json()) as ChatResponse;
return response;
} catch (error) {
console.error("[Request Chat] ", error, res.body);
}
}
export async function requestUsage() {
const formatDate = (d: Date) =>
`${d.getFullYear()}-${(d.getMonth() + 1).toString().padStart(2, "0")}-${d
.getDate()
.toString()
.padStart(2, "0")}`;
2023-04-06 09:34:17 +00:00
const ONE_DAY = 2 * 24 * 60 * 60 * 1000;
const now = new Date(Date.now() + ONE_DAY);
const startOfMonth = new Date(now.getFullYear(), now.getMonth(), 1);
const startDate = formatDate(startOfMonth);
const endDate = formatDate(now);
2023-04-05 19:56:54 +00:00
const [used, subs] = await Promise.all([
requestOpenaiClient(
2023-04-06 23:18:53 +00:00
`dashboard/billing/usage?start_date=${startDate}&end_date=${endDate}`,
2023-04-05 19:56:54 +00:00
)(null, "GET"),
requestOpenaiClient("dashboard/billing/subscription")(null, "GET"),
]);
2023-04-05 19:56:54 +00:00
const response = (await used.json()) as {
total_usage?: number;
error?: {
type: string;
message: string;
};
2023-04-05 19:56:54 +00:00
};
2023-04-02 18:55:08 +00:00
2023-04-05 19:56:54 +00:00
const total = (await subs.json()) as {
hard_limit_usd?: number;
};
2023-04-05 19:56:54 +00:00
if (response.error && response.error.type) {
showToast(response.error.message);
return;
}
2023-04-05 19:56:54 +00:00
if (response.total_usage) {
response.total_usage = Math.round(response.total_usage) / 100;
}
return {
used: response.total_usage,
subscription: total.hard_limit_usd,
};
2023-03-10 18:25:33 +00:00
}
2023-03-11 12:54:24 +00:00
export async function requestChatStream(
messages: Message[],
options?: {
2023-03-11 17:14:07 +00:00
filterBot?: boolean;
2023-03-21 16:20:32 +00:00
modelConfig?: ModelConfig;
2023-03-11 12:54:24 +00:00
onMessage: (message: string, done: boolean) => void;
onError: (error: Error, statusCode?: number) => void;
2023-03-26 10:59:09 +00:00
onController?: (controller: AbortController) => void;
2023-04-06 23:18:53 +00:00
},
2023-03-11 12:54:24 +00:00
) {
2023-03-11 17:14:07 +00:00
const req = makeRequestParam(messages, {
stream: true,
filterBot: options?.filterBot,
});
2023-03-11 12:54:24 +00:00
2023-03-21 16:20:32 +00:00
console.log("[Request] ", req);
2023-03-13 16:25:07 +00:00
const controller = new AbortController();
2023-03-15 17:24:03 +00:00
const reqTimeoutId = setTimeout(() => controller.abort(), TIME_OUT_MS);
2023-03-13 16:25:07 +00:00
try {
const res = await fetch("/api/chat-stream", {
method: "POST",
headers: {
"Content-Type": "application/json",
path: "v1/chat/completions",
...getHeaders(),
2023-03-13 16:25:07 +00:00
},
body: JSON.stringify(req),
signal: controller.signal,
});
2023-03-13 16:40:04 +00:00
clearTimeout(reqTimeoutId);
2023-03-13 16:25:07 +00:00
let responseText = "";
2023-03-13 16:40:04 +00:00
const finish = () => {
options?.onMessage(responseText, true);
controller.abort();
};
2023-03-13 16:25:07 +00:00
if (res.ok) {
const reader = res.body?.getReader();
const decoder = new TextDecoder();
2023-03-26 10:59:09 +00:00
options?.onController?.(controller);
2023-03-13 16:25:07 +00:00
while (true) {
// handle time out, will stop if no response in 10 secs
2023-03-15 17:24:03 +00:00
const resTimeoutId = setTimeout(() => finish(), TIME_OUT_MS);
2023-03-13 16:25:07 +00:00
const content = await reader?.read();
2023-03-13 16:40:04 +00:00
clearTimeout(resTimeoutId);
if (!content || !content.value) {
break;
}
const text = decoder.decode(content.value, { stream: true });
2023-03-13 16:25:07 +00:00
responseText += text;
const done = content.done;
2023-03-13 16:25:07 +00:00
options?.onMessage(responseText, false);
if (done) {
break;
}
2023-03-11 12:54:24 +00:00
}
finish();
} else if (res.status === 401) {
2023-04-13 03:07:42 +00:00
console.error("Unauthorized");
options?.onError(new Error("Unauthorized"), res.status);
2023-03-13 16:25:07 +00:00
} else {
console.error("Stream Error", res.body);
options?.onError(new Error("Stream Error"), res.status);
2023-03-13 16:25:07 +00:00
}
} catch (err) {
console.error("NetWork Error", err);
options?.onError(err as Error);
2023-03-11 12:54:24 +00:00
}
}
2023-03-10 18:25:33 +00:00
export async function requestWithPrompt(messages: Message[], prompt: string) {
messages = messages.concat([
{
2023-03-13 16:25:07 +00:00
role: "user",
2023-03-10 18:25:33 +00:00
content: prompt,
date: new Date().toLocaleString(),
},
]);
const res = await requestChat(messages);
return res?.choices?.at(0)?.message?.content ?? "";
2023-03-10 18:25:33 +00:00
}
2023-03-26 10:59:09 +00:00
// To store message streaming controller
export const ControllerPool = {
controllers: {} as Record<string, AbortController>,
addController(
sessionIndex: number,
2023-04-05 19:19:33 +00:00
messageId: number,
2023-04-06 23:18:53 +00:00
controller: AbortController,
2023-03-26 10:59:09 +00:00
) {
2023-04-05 19:19:33 +00:00
const key = this.key(sessionIndex, messageId);
2023-03-26 10:59:09 +00:00
this.controllers[key] = controller;
return key;
},
2023-04-05 19:19:33 +00:00
stop(sessionIndex: number, messageId: number) {
const key = this.key(sessionIndex, messageId);
2023-03-26 10:59:09 +00:00
const controller = this.controllers[key];
controller?.abort();
},
2023-04-05 19:19:33 +00:00
remove(sessionIndex: number, messageId: number) {
const key = this.key(sessionIndex, messageId);
2023-03-26 10:59:09 +00:00
delete this.controllers[key];
},
key(sessionIndex: number, messageIndex: number) {
return `${sessionIndex},${messageIndex}`;
},
};