open-webui/src/lib/apis/openai/index.ts

453 lines
9.3 KiB
TypeScript
Raw Normal View History

2024-01-05 02:38:03 +00:00
import { OPENAI_API_BASE_URL } from '$lib/constants';
2024-04-14 21:04:24 +00:00
import { promptTemplate } from '$lib/utils';
import { type Model, models, settings } from '$lib/stores';
2024-01-05 02:38:03 +00:00
2024-05-17 17:30:22 +00:00
export const getOpenAIConfig = async (token: string = '') => {
let error = null;
const res = await fetch(`${OPENAI_API_BASE_URL}/config`, {
method: 'GET',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
...(token && { authorization: `Bearer ${token}` })
}
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
if ('detail' in err) {
error = err.detail;
} else {
error = 'Server connection failed';
}
return null;
});
if (error) {
throw error;
}
return res;
};
export const updateOpenAIConfig = async (token: string = '', enable_openai_api: boolean) => {
let error = null;
const res = await fetch(`${OPENAI_API_BASE_URL}/config/update`, {
method: 'POST',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
...(token && { authorization: `Bearer ${token}` })
},
body: JSON.stringify({
enable_openai_api: enable_openai_api
})
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
if ('detail' in err) {
error = err.detail;
} else {
error = 'Server connection failed';
}
return null;
});
if (error) {
throw error;
}
return res;
};
2024-03-07 00:13:25 +00:00
export const getOpenAIUrls = async (token: string = '') => {
2024-01-05 02:38:03 +00:00
let error = null;
2024-03-07 00:13:25 +00:00
const res = await fetch(`${OPENAI_API_BASE_URL}/urls`, {
2024-01-05 02:38:03 +00:00
method: 'GET',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
...(token && { authorization: `Bearer ${token}` })
}
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
if ('detail' in err) {
error = err.detail;
} else {
error = 'Server connection failed';
}
return null;
});
if (error) {
throw error;
}
2024-03-07 00:13:25 +00:00
return res.OPENAI_API_BASE_URLS;
2024-01-05 02:38:03 +00:00
};
2024-03-07 00:13:25 +00:00
export const updateOpenAIUrls = async (token: string = '', urls: string[]) => {
2024-01-05 02:38:03 +00:00
let error = null;
2024-03-07 00:13:25 +00:00
const res = await fetch(`${OPENAI_API_BASE_URL}/urls/update`, {
2024-01-05 02:38:03 +00:00
method: 'POST',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
...(token && { authorization: `Bearer ${token}` })
},
body: JSON.stringify({
2024-03-07 00:13:25 +00:00
urls: urls
2024-01-05 02:38:03 +00:00
})
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
if ('detail' in err) {
error = err.detail;
} else {
error = 'Server connection failed';
}
return null;
});
if (error) {
throw error;
}
2024-03-07 00:13:25 +00:00
return res.OPENAI_API_BASE_URLS;
2024-01-05 02:38:03 +00:00
};
2024-03-07 00:13:25 +00:00
export const getOpenAIKeys = async (token: string = '') => {
2024-01-05 02:38:03 +00:00
let error = null;
2024-03-07 00:13:25 +00:00
const res = await fetch(`${OPENAI_API_BASE_URL}/keys`, {
2024-01-05 02:38:03 +00:00
method: 'GET',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
...(token && { authorization: `Bearer ${token}` })
}
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
if ('detail' in err) {
error = err.detail;
} else {
error = 'Server connection failed';
}
return null;
});
if (error) {
throw error;
}
2024-03-07 00:13:25 +00:00
return res.OPENAI_API_KEYS;
2024-01-05 02:38:03 +00:00
};
2024-03-07 00:13:25 +00:00
export const updateOpenAIKeys = async (token: string = '', keys: string[]) => {
2024-01-05 02:38:03 +00:00
let error = null;
2024-03-07 00:13:25 +00:00
const res = await fetch(`${OPENAI_API_BASE_URL}/keys/update`, {
2024-01-05 02:38:03 +00:00
method: 'POST',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
...(token && { authorization: `Bearer ${token}` })
},
body: JSON.stringify({
2024-03-07 00:13:25 +00:00
keys: keys
2024-01-05 02:38:03 +00:00
})
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
if ('detail' in err) {
error = err.detail;
} else {
error = 'Server connection failed';
}
return null;
});
if (error) {
throw error;
}
2024-03-07 00:13:25 +00:00
return res.OPENAI_API_KEYS;
2024-01-05 02:38:03 +00:00
};
2024-05-29 08:12:25 +00:00
export const getOpenAIModels = async (token: string, urlIdx?: number) => {
2024-01-05 02:38:03 +00:00
let error = null;
const res = await fetch(
`${OPENAI_API_BASE_URL}/models${typeof urlIdx === 'number' ? `/${urlIdx}` : ''}`,
{
method: 'GET',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
...(token && { authorization: `Bearer ${token}` })
}
2024-01-05 02:38:03 +00:00
}
)
2024-01-05 02:38:03 +00:00
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
error = `OpenAI: ${err?.error?.message ?? 'Network Problem'}`;
return [];
});
if (error) {
throw error;
}
2024-05-29 08:12:25 +00:00
return res;
2024-01-05 02:38:03 +00:00
};
export const getOpenAIModelsDirect = async (
2023-12-26 19:32:22 +00:00
base_url: string = 'https://api.openai.com/v1',
api_key: string = ''
) => {
let error = null;
const res = await fetch(`${base_url}/models`, {
method: 'GET',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${api_key}`
}
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
error = `OpenAI: ${err?.error?.message ?? 'Network Problem'}`;
return null;
});
if (error) {
throw error;
}
const models = Array.isArray(res) ? res : res?.data ?? null;
2023-12-26 19:32:22 +00:00
return models
2024-02-22 12:12:26 +00:00
.map((model) => ({ id: model.id, name: model.name ?? model.id, external: true }))
.filter((model) => (base_url.includes('openai') ? model.name.includes('gpt') : true))
.sort((a, b) => {
return a.name.localeCompare(b.name);
});
2023-12-26 19:32:22 +00:00
};
2024-01-05 02:54:00 +00:00
2024-02-22 12:12:26 +00:00
export const generateOpenAIChatCompletion = async (
token: string = '',
body: object,
url: string = OPENAI_API_BASE_URL
): Promise<[Response | null, AbortController]> => {
const controller = new AbortController();
2024-01-05 02:54:00 +00:00
let error = null;
2024-02-22 12:12:26 +00:00
const res = await fetch(`${url}/chat/completions`, {
signal: controller.signal,
2024-01-05 02:54:00 +00:00
method: 'POST',
headers: {
Authorization: `Bearer ${token}`,
'Content-Type': 'application/json'
},
body: JSON.stringify(body)
}).catch((err) => {
console.log(err);
error = err;
return null;
});
if (error) {
throw error;
}
return [res, controller];
2024-01-05 02:54:00 +00:00
};
2024-02-06 06:51:08 +00:00
export const synthesizeOpenAISpeech = async (
token: string = '',
speaker: string = 'alloy',
text: string = '',
model: string = 'tts-1'
2024-02-06 06:51:08 +00:00
) => {
let error = null;
const res = await fetch(`${OPENAI_API_BASE_URL}/audio/speech`, {
method: 'POST',
headers: {
Authorization: `Bearer ${token}`,
'Content-Type': 'application/json'
},
body: JSON.stringify({
model: model,
2024-02-06 06:51:08 +00:00
input: text,
voice: speaker
})
}).catch((err) => {
console.log(err);
error = err;
return null;
});
if (error) {
throw error;
}
return res;
};
export const generateTitle = async (
token: string = '',
template: string,
model: string,
prompt: string,
url: string = OPENAI_API_BASE_URL
) => {
let error = null;
2024-04-14 21:04:24 +00:00
template = promptTemplate(template, prompt);
console.log(template);
const res = await fetch(`${url}/chat/completions`, {
method: 'POST',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
Authorization: `Bearer ${token}`
},
body: JSON.stringify({
model: model,
messages: [
{
role: 'user',
content: template
}
],
stream: false,
// Restricting the max tokens to 50 to avoid long titles
2024-05-06 19:58:24 +00:00
max_tokens: 50
})
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
if ('detail' in err) {
error = err.detail;
}
return null;
});
if (error) {
throw error;
}
2024-05-06 23:59:54 +00:00
return res?.choices[0]?.message?.content.replace(/["']/g, '') ?? 'New Chat';
};
export const generateSearchQuery = async (
token: string = '',
model: string,
previousMessages: string[],
prompt: string,
url: string = OPENAI_API_BASE_URL
): Promise<string | undefined> => {
let error = null;
// TODO: Allow users to specify the prompt
// Get the current date in the format "January 20, 2024"
const currentDate = new Intl.DateTimeFormat('en-US', {
year: 'numeric',
month: 'long',
day: '2-digit'
}).format(new Date());
const res = await fetch(`${url}/chat/completions`, {
method: 'POST',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
Authorization: `Bearer ${token}`
},
body: JSON.stringify({
model: model,
// Few shot prompting
messages: [
{
role: 'assistant',
content: `You are tasked with generating web search queries. Give me an appropriate query to answer my question for google search. Answer with only the query. Today is ${currentDate}.`
},
{
role: 'user',
2024-05-27 21:25:36 +00:00
content: prompt
}
2024-05-27 21:25:36 +00:00
// {
// role: 'user',
// content:
// (previousMessages.length > 0
// ? `Previous Questions:\n${previousMessages.join('\n')}\n\n`
// : '') + `Current Question: ${prompt}`
// }
],
stream: false,
// Restricting the max tokens to 30 to avoid long search queries
max_tokens: 30
})
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
if ('detail' in err) {
error = err.detail;
}
return undefined;
});
if (error) {
throw error;
}
return res?.choices[0]?.message?.content.replace(/["']/g, '') ?? undefined;
};