diff --git a/src/routes/(app)/+page.svelte b/src/routes/(app)/+page.svelte index 7b671755b..5adba70dc 100644 --- a/src/routes/(app)/+page.svelte +++ b/src/routes/(app)/+page.svelte @@ -534,6 +534,8 @@ console.log(docs); + console.log(model); + const res = await generateOpenAIChatCompletion( localStorage.token, { @@ -586,7 +588,9 @@ max_tokens: $settings?.options?.num_predict ?? undefined, docs: docs.length > 0 ? docs : undefined }, - model.source === 'litellm' ? `${LITELLM_API_BASE_URL}/v1` : `${OPENAI_API_BASE_URL}` + model.source.toLowerCase() === 'litellm' + ? `${LITELLM_API_BASE_URL}/v1` + : `${OPENAI_API_BASE_URL}` ); if (res && res.ok) { diff --git a/src/routes/(app)/c/[id]/+page.svelte b/src/routes/(app)/c/[id]/+page.svelte index 0eded240f..d66122e89 100644 --- a/src/routes/(app)/c/[id]/+page.svelte +++ b/src/routes/(app)/c/[id]/+page.svelte @@ -602,7 +602,9 @@ max_tokens: $settings?.options?.num_predict ?? undefined, docs: docs.length > 0 ? docs : undefined }, - model.source === 'litellm' ? `${LITELLM_API_BASE_URL}/v1` : `${OPENAI_API_BASE_URL}` + model.source.toLowerCase() === 'litellm' + ? `${LITELLM_API_BASE_URL}/v1` + : `${OPENAI_API_BASE_URL}` ); if (res && res.ok) {