From f0da5b9ea421d19f3198d7c79503d1e8a2f3096d Mon Sep 17 00:00:00 2001 From: "Timothy J. Baek" Date: Sat, 27 Apr 2024 19:02:37 -0400 Subject: [PATCH] refac --- src/routes/(app)/+layout.svelte | 27 ++++----------------------- 1 file changed, 4 insertions(+), 23 deletions(-) diff --git a/src/routes/(app)/+layout.svelte b/src/routes/(app)/+layout.svelte index 391fdebb2..390afb792 100644 --- a/src/routes/(app)/+layout.svelte +++ b/src/routes/(app)/+layout.svelte @@ -7,11 +7,11 @@ import { goto } from '$app/navigation'; - import { getOllamaModels, getOllamaVersion } from '$lib/apis/ollama'; + import { getModels as _getModels } from '$lib/utils'; + import { getOllamaVersion } from '$lib/apis/ollama'; import { getModelfiles } from '$lib/apis/modelfiles'; import { getPrompts } from '$lib/apis/prompts'; - import { getOpenAIModels } from '$lib/apis/openai'; - import { getLiteLLMModels } from '$lib/apis/litellm'; + import { getDocs } from '$lib/apis/documents'; import { getAllChatTags } from '$lib/apis/chats'; @@ -47,26 +47,7 @@ let showShortcuts = false; const getModels = async () => { - let models = await Promise.all([ - await getOllamaModels(localStorage.token).catch((error) => { - console.log(error); - return null; - }), - await getOpenAIModels(localStorage.token).catch((error) => { - console.log(error); - return null; - }), - await getLiteLLMModels(localStorage.token).catch((error) => { - console.log(error); - return null; - }) - ]); - - models = models - .filter((models) => models) - .reduce((a, e, i, arr) => a.concat(e, ...(i < arr.length - 1 ? [{ name: 'hr' }] : [])), []); - - return models; + return _getModels(localStorage.token); }; const setOllamaVersion = async (version: string = '') => {