feat: 0.2.0 support

This commit is contained in:
Timothy J. Baek 2024-05-27 12:04:53 -07:00
parent 24ed32f7f2
commit 635b90c685
3 changed files with 42 additions and 41 deletions

File diff suppressed because one or more lines are too long

View File

@ -1,12 +1,12 @@
export const getOpenAIModels = async (token = "", url = "") => {
export const getModels = async (key, url) => {
let error = null;
const res = await fetch(`${url}/models`, {
const res = await fetch(`${url}/api/models`, {
method: "GET",
headers: {
Accept: "application/json",
"Content-Type": "application/json",
...(token && { authorization: `Bearer ${token}` }),
...(key && { authorization: `Bearer ${key}` }),
},
})
.then(async (res) => {
@ -14,48 +14,36 @@ export const getOpenAIModels = async (token = "", url = "") => {
return res.json();
})
.catch((err) => {
error = `OpenAI: ${err?.error?.message ?? "Network Problem"}`;
return [];
console.log(err);
error = err;
return null;
});
if (error) {
throw error;
}
const models = Array.isArray(res) ? res : res?.data ?? null;
return models
? models
.map((model) => ({
id: model.id,
name: model.name ?? model.id,
url: url,
custom_info: model.custom_info,
}))
.sort((a, b) => {
return a.name.localeCompare(b.name);
})
: models;
};
export const getModels = async (key, url) => {
let models = await Promise.all([
getOpenAIModels(key, `${url}/ollama/v1`).catch((error) => {
console.log(error);
return null;
}),
getOpenAIModels(key, `${url}/openai/api`).catch((error) => {
console.log(error);
return null;
}),
]);
let models = res?.data ?? [];
models = models
.filter((models) => models)
.reduce((a, e, i, arr) => a.concat(e), []);
.sort((a, b) => {
// Compare case-insensitively
const lowerA = a.name.toLowerCase();
const lowerB = b.name.toLowerCase();
if (lowerA < lowerB) return -1;
if (lowerA > lowerB) return 1;
// If same case-insensitively, sort by original strings,
// lowercase will come before uppercase due to ASCII values
if (a < b) return -1;
if (a > b) return 1;
return 0; // They are equal
});
console.log(models);
return models;
};

View File

@ -31,6 +31,7 @@
url = "";
key = "";
model = "";
models = [];
showConfig = true;
};
@ -80,10 +81,18 @@
key = _storageCache.key ?? "";
model = _storageCache.model ?? "";
if (_storageCache.url && _storageCache.key && _storageCache.model) {
models = await getModels(_storageCache.key, _storageCache.url);
models = await getModels(_storageCache.key, _storageCache.url).catch(
(error) => {
console.log(error);
resetConfig();
}
);
if (models) {
showConfig = false;
}
}
}
const down = async (e) => {
// Reset the configuration when ⌘Shift+Escape is pressed
@ -161,7 +170,11 @@
],
stream: true,
},
models.find((m) => m.id === model)?.url
models.find((m) => m.id === model)?.owned_by === "openai" ??
false
? `${url}/openai`
: `${url}/ollama/v1`
);
if (res && res.ok) {