chore: layout refac

This commit is contained in:
Timothy J. Baek 2023-12-26 11:32:22 -08:00
parent 753327522a
commit 0996f3c216
7 changed files with 101 additions and 73 deletions

View File

@ -1,5 +1,32 @@
import { WEBUI_API_BASE_URL } from '$lib/constants';
export const getSessionUser = async (token: string) => {
let error = null;
const res = await fetch(`${WEBUI_API_BASE_URL}/auths`, {
method: 'GET',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${token}`
}
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
error = err.detail;
return null;
});
if (error) {
throw error;
}
return res;
};
export const userSignIn = async (email: string, password: string) => {
let error = null;
@ -17,10 +44,10 @@ export const userSignIn = async (email: string, password: string) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((error) => {
console.log(error);
.catch((err) => {
console.log(err);
error = error.detail;
error = err.detail;
return null;
});
@ -49,9 +76,9 @@ export const userSignUp = async (name: string, email: string, password: string)
if (!res.ok) throw await res.json();
return res.json();
})
.catch((error) => {
console.log(error);
error = error.detail;
.catch((err) => {
console.log(err);
error = err.detail;
return null;
});

View File

@ -1,35 +1,23 @@
export const getOpenAIModels = async (
base_url: string = 'https://api.openai.com/v1',
api_key: string = ''
) => {
import { WEBUI_API_BASE_URL } from '$lib/constants';
export const getBackendConfig = async () => {
let error = null;
const res = await fetch(`${base_url}/models`, {
const res = await fetch(`${WEBUI_API_BASE_URL}/`, {
method: 'GET',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${api_key}`
'Content-Type': 'application/json'
}
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((error) => {
console.log(error);
error = `OpenAI: ${error?.error?.message ?? 'Network Problem'}`;
.catch((err) => {
console.log(err);
error = err;
return null;
});
if (error) {
throw error;
}
let models = Array.isArray(res) ? res : res?.data ?? null;
console.log(models);
return models
.map((model) => ({ name: model.id, external: true }))
.filter((model) => (base_url.includes('openai') ? model.name.includes('gpt') : true));
return res;
};

View File

@ -18,10 +18,10 @@ export const getOllamaVersion = async (
if (!res.ok) throw await res.json();
return res.json();
})
.catch((error) => {
console.log(error);
if ('detail' in error) {
error = error.detail;
.catch((err) => {
console.log(err);
if ('detail' in err) {
error = err.detail;
} else {
error = 'Server connection failed';
}
@ -53,10 +53,10 @@ export const getOllamaModels = async (
if (!res.ok) throw await res.json();
return res.json();
})
.catch((error) => {
console.log(error);
if ('detail' in error) {
error = error.detail;
.catch((err) => {
console.log(err);
if ('detail' in err) {
error = err.detail;
} else {
error = 'Server connection failed';
}

View File

@ -0,0 +1,35 @@
export const getOpenAIModels = async (
base_url: string = 'https://api.openai.com/v1',
api_key: string = ''
) => {
let error = null;
const res = await fetch(`${base_url}/models`, {
method: 'GET',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${api_key}`
}
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
error = `OpenAI: ${err?.error?.message ?? 'Network Problem'}`;
return null;
});
if (error) {
throw error;
}
let models = Array.isArray(res) ? res : res?.data ?? null;
console.log(models);
return models
.map((model) => ({ name: model.id, external: true }))
.filter((model) => (base_url.includes('openai') ? model.name.includes('gpt') : true));
};

View File

@ -18,9 +18,9 @@ export const updateUserRole = async (token: string, id: string, role: string) =>
if (!res.ok) throw await res.json();
return res.json();
})
.catch((error) => {
console.log(error);
error = error.detail;
.catch((err) => {
console.log(err);
error = err.detail;
return null;
});

View File

@ -22,7 +22,7 @@
import toast from 'svelte-french-toast';
import { OLLAMA_API_BASE_URL, WEBUI_API_BASE_URL } from '$lib/constants';
import { getOllamaModels, getOllamaVersion } from '$lib/apis/ollama';
import { getOpenAIModels } from '$lib/apis';
import { getOpenAIModels } from '$lib/apis/openai';
import {
createNewChat,
deleteChatById,

View File

@ -2,54 +2,32 @@
import { onMount, tick } from 'svelte';
import { config, user } from '$lib/stores';
import { goto } from '$app/navigation';
import { WEBUI_API_BASE_URL } from '$lib/constants';
import toast, { Toaster } from 'svelte-french-toast';
import { getBackendConfig } from '$lib/apis';
import { getSessionUser } from '$lib/apis/auths';
import '../app.css';
import '../tailwind.css';
import 'tippy.js/dist/tippy.css';
let loaded = false;
onMount(async () => {
// Check Backend Status
const res = await fetch(`${WEBUI_API_BASE_URL}/`, {
method: 'GET',
headers: {
'Content-Type': 'application/json'
}
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((error) => {
console.log(error);
return null;
});
const backendConfig = await getBackendConfig();
if (res) {
await config.set(res);
console.log(res);
if (backendConfig) {
await config.set(backendConfig);
console.log(backendConfig);
if ($config) {
if (localStorage.token) {
// Get Session User Info
const sessionUser = await fetch(`${WEBUI_API_BASE_URL}/auths`, {
method: 'GET',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${localStorage.token}`
}
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((error) => {
console.log(error);
toast.error(error.detail);
return null;
});
const sessionUser = await getSessionUser(localStorage.token).catch((error) => {
toast.error(error);
return null;
});
if (sessionUser) {
await user.set(sessionUser);