fix: support custom api endpoint (#4016)

This commit is contained in:
fred-bf 2024-02-07 13:40:30 +08:00 committed by GitHub
parent 0869455612
commit b8f0822214
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
1 changed files with 17 additions and 16 deletions

View File

@ -72,23 +72,26 @@ export class GeminiProApi implements LLMApi {
],
};
const accessStore = useAccessStore.getState();
let baseUrl = accessStore.googleUrl;
const isApp = !!getClientConfig()?.isApp;
const shouldStream = !!options.config.stream;
let shouldStream = !!options.config.stream;
const controller = new AbortController();
options.onController?.(controller);
const accessStore = useAccessStore.getState();
try {
let chatPath = this.path(Google.ChatPath);
// let baseUrl = accessStore.googleUrl;
chatPath = isApp
? DEFAULT_API_HOST +
"/api/proxy/google/" +
Google.ChatPath +
`?key=${accessStore.googleApiKey}`
: chatPath;
if (!baseUrl) {
baseUrl = isApp
? DEFAULT_API_HOST +
"/api/proxy/google/" +
Google.ChatPath +
`?key=${accessStore.googleApiKey}`
: chatPath;
}
const chatPayload = {
method: "POST",
@ -96,7 +99,7 @@ export class GeminiProApi implements LLMApi {
signal: controller.signal,
headers: getHeaders(),
};
console.log("[Request] google chatPath: ", chatPath, isApp);
// make a fetch request
const requestTimeoutId = setTimeout(
() => controller.abort(),
@ -105,10 +108,6 @@ export class GeminiProApi implements LLMApi {
if (shouldStream) {
let responseText = "";
let remainText = "";
let streamChatPath = chatPath.replace(
"generateContent",
"streamGenerateContent",
);
let finished = false;
let existingTexts: string[] = [];
@ -139,8 +138,10 @@ export class GeminiProApi implements LLMApi {
// start animaion
animateResponseText();
console.log("[Proxy Endpoint] ", streamChatPath);
fetch(streamChatPath, chatPayload)
fetch(
baseUrl.replace("generateContent", "streamGenerateContent"),
chatPayload,
)
.then((response) => {
const reader = response?.body?.getReader();
const decoder = new TextDecoder();
@ -191,7 +192,7 @@ export class GeminiProApi implements LLMApi {
console.error("Error:", error);
});
} else {
const res = await fetch(chatPath, chatPayload);
const res = await fetch(baseUrl, chatPayload);
clearTimeout(requestTimeoutId);
const resJson = await res.json();
if (resJson?.promptFeedback?.blockReason) {