import React, { useEffect, useState, useCallback } from 'react'; import { Switch } from '~/components/ui/Switch'; import { useSettings } from '~/lib/hooks/useSettings'; import { LOCAL_PROVIDERS, URL_CONFIGURABLE_PROVIDERS } from '~/lib/stores/settings'; import type { IProviderConfig } from '~/types/model'; import { logStore } from '~/lib/stores/logs'; import { motion } from 'framer-motion'; import { classNames } from '~/utils/classNames'; import { BsRobot } from 'react-icons/bs'; import type { IconType } from 'react-icons'; import { BiChip } from 'react-icons/bi'; import { TbBrandOpenai } from 'react-icons/tb'; import { providerBaseUrlEnvKeys } from '~/utils/constants'; import { useToast } from '~/components/ui/use-toast'; // Add type for provider names to ensure type safety type ProviderName = 'Ollama' | 'LMStudio' | 'OpenAILike'; // Update the PROVIDER_ICONS type to use the ProviderName type const PROVIDER_ICONS: Record = { Ollama: BsRobot, LMStudio: BsRobot, OpenAILike: TbBrandOpenai, }; // Update PROVIDER_DESCRIPTIONS to use the same type const PROVIDER_DESCRIPTIONS: Record = { Ollama: 'Run open-source models locally on your machine', LMStudio: 'Local model inference with LM Studio', OpenAILike: 'Connect to OpenAI-compatible API endpoints', }; // Add a constant for the Ollama API base URL const OLLAMA_API_URL = 'http://127.0.0.1:11434'; interface OllamaModel { name: string; digest: string; size: number; modified_at: string; details?: { family: string; parameter_size: string; quantization_level: string; }; status?: 'idle' | 'updating' | 'updated' | 'error' | 'checking'; error?: string; newDigest?: string; progress?: { current: number; total: number; status: string; }; } interface OllamaServiceStatus { isRunning: boolean; lastChecked: Date; error?: string; } interface OllamaPullResponse { status: string; completed?: number; total?: number; digest?: string; } const isOllamaPullResponse = (data: unknown): data is OllamaPullResponse => { return ( typeof data === 'object' && data !== null && 'status' in data && typeof (data as OllamaPullResponse).status === 'string' ); }; interface ManualInstallState { isOpen: boolean; modelString: string; } export function LocalProvidersTab() { const { success, error } = useToast(); const { providers, updateProviderSettings } = useSettings(); const [filteredProviders, setFilteredProviders] = useState([]); const [categoryEnabled, setCategoryEnabled] = useState(false); const [editingProvider, setEditingProvider] = useState(null); const [ollamaModels, setOllamaModels] = useState([]); const [isLoadingModels, setIsLoadingModels] = useState(false); const [serviceStatus, setServiceStatus] = useState({ isRunning: false, lastChecked: new Date(), }); const [isInstallingModel, setIsInstallingModel] = useState(null); const [installProgress, setInstallProgress] = useState<{ model: string; progress: number; status: string; } | null>(null); const [manualInstall, setManualInstall] = useState({ isOpen: false, modelString: '', }); // Effect to filter and sort providers useEffect(() => { const newFilteredProviders = Object.entries(providers || {}) .filter(([key]) => [...LOCAL_PROVIDERS, 'OpenAILike'].includes(key)) .map(([key, value]) => { const provider = value as IProviderConfig; const envKey = providerBaseUrlEnvKeys[key]?.baseUrlKey; // Get environment URL safely const envUrl = envKey ? (import.meta.env[envKey] as string | undefined) : undefined; console.log(`Checking env URL for ${key}:`, { envKey, envUrl, currentBaseUrl: provider.settings.baseUrl, }); // If there's an environment URL and no base URL set, update it if (envUrl && !provider.settings.baseUrl) { console.log(`Setting base URL for ${key} from env:`, envUrl); updateProviderSettings(key, { ...provider.settings, baseUrl: envUrl, }); } return { name: key, settings: { ...provider.settings, baseUrl: provider.settings.baseUrl || envUrl, }, staticModels: provider.staticModels || [], getDynamicModels: provider.getDynamicModels, getApiKeyLink: provider.getApiKeyLink, labelForGetApiKey: provider.labelForGetApiKey, icon: provider.icon, } as IProviderConfig; }); // Custom sort function to ensure LMStudio appears before OpenAILike const sorted = newFilteredProviders.sort((a, b) => { if (a.name === 'LMStudio') { return -1; } if (b.name === 'LMStudio') { return 1; } if (a.name === 'OpenAILike') { return 1; } if (b.name === 'OpenAILike') { return -1; } return a.name.localeCompare(b.name); }); setFilteredProviders(sorted); }, [providers, updateProviderSettings]); // Helper function to safely get environment URL const getEnvUrl = (provider: IProviderConfig): string | undefined => { const envKey = providerBaseUrlEnvKeys[provider.name]?.baseUrlKey; return envKey ? (import.meta.env[envKey] as string | undefined) : undefined; }; // Add effect to update category toggle state based on provider states useEffect(() => { const newCategoryState = filteredProviders.every((p) => p.settings.enabled); setCategoryEnabled(newCategoryState); }, [filteredProviders]); // Fetch Ollama models when enabled useEffect(() => { const ollamaProvider = filteredProviders.find((p) => p.name === 'Ollama'); if (ollamaProvider?.settings.enabled) { fetchOllamaModels(); } }, [filteredProviders]); const fetchOllamaModels = async () => { try { setIsLoadingModels(true); const response = await fetch('http://127.0.0.1:11434/api/tags'); const data = (await response.json()) as { models: OllamaModel[] }; setOllamaModels( data.models.map((model) => ({ ...model, status: 'idle' as const, })), ); } catch (error) { console.error('Error fetching Ollama models:', error); } finally { setIsLoadingModels(false); } }; const updateOllamaModel = async (modelName: string): Promise<{ success: boolean; newDigest?: string }> => { try { const response = await fetch(`${OLLAMA_API_URL}/api/pull`, { method: 'POST', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ name: modelName }), }); if (!response.ok) { throw new Error(`Failed to update ${modelName}`); } const reader = response.body?.getReader(); if (!reader) { throw new Error('No response reader available'); } while (true) { const { done, value } = await reader.read(); if (done) { break; } const text = new TextDecoder().decode(value); const lines = text.split('\n').filter(Boolean); for (const line of lines) { const rawData = JSON.parse(line); if (!isOllamaPullResponse(rawData)) { console.error('Invalid response format:', rawData); continue; } setOllamaModels((current) => current.map((m) => m.name === modelName ? { ...m, progress: { current: rawData.completed || 0, total: rawData.total || 0, status: rawData.status, }, newDigest: rawData.digest, } : m, ), ); } } const updatedResponse = await fetch('http://127.0.0.1:11434/api/tags'); const updatedData = (await updatedResponse.json()) as { models: OllamaModel[] }; const updatedModel = updatedData.models.find((m) => m.name === modelName); return { success: true, newDigest: updatedModel?.digest }; } catch (error) { console.error(`Error updating ${modelName}:`, error); return { success: false }; } }; const handleToggleCategory = useCallback( (enabled: boolean) => { setCategoryEnabled(enabled); filteredProviders.forEach((provider) => { updateProviderSettings(provider.name, { ...provider.settings, enabled }); }); success(enabled ? 'All local providers enabled' : 'All local providers disabled'); }, [filteredProviders, updateProviderSettings, success], ); const handleToggleProvider = (provider: IProviderConfig, enabled: boolean) => { updateProviderSettings(provider.name, { ...provider.settings, enabled }); if (enabled) { logStore.logProvider(`Provider ${provider.name} enabled`, { provider: provider.name }); success(`${provider.name} enabled`); } else { logStore.logProvider(`Provider ${provider.name} disabled`, { provider: provider.name }); success(`${provider.name} disabled`); } }; const handleUpdateBaseUrl = (provider: IProviderConfig, baseUrl: string) => { let newBaseUrl: string | undefined = baseUrl; if (newBaseUrl && newBaseUrl.trim().length === 0) { newBaseUrl = undefined; } updateProviderSettings(provider.name, { ...provider.settings, baseUrl: newBaseUrl }); logStore.logProvider(`Base URL updated for ${provider.name}`, { provider: provider.name, baseUrl: newBaseUrl, }); success(`${provider.name} base URL updated`); setEditingProvider(null); }; const handleUpdateOllamaModel = async (modelName: string) => { setOllamaModels((current) => current.map((m) => (m.name === modelName ? { ...m, status: 'updating' } : m))); const { success: updateSuccess, newDigest } = await updateOllamaModel(modelName); setOllamaModels((current) => current.map((m) => m.name === modelName ? { ...m, status: updateSuccess ? 'updated' : 'error', error: updateSuccess ? undefined : 'Update failed', newDigest, } : m, ), ); if (updateSuccess) { success(`Updated ${modelName}`); } else { error(`Failed to update ${modelName}`); } }; const handleDeleteOllamaModel = async (modelName: string) => { try { const response = await fetch(`${OLLAMA_API_URL}/api/delete`, { method: 'DELETE', headers: { 'Content-Type': 'application/json', }, body: JSON.stringify({ name: modelName }), }); if (!response.ok) { throw new Error(`Failed to delete ${modelName}`); } setOllamaModels((current) => current.filter((m) => m.name !== modelName)); success(`Deleted ${modelName}`); } catch (err) { const errorMessage = err instanceof Error ? err.message : 'Unknown error occurred'; console.error(`Error deleting ${modelName}:`, errorMessage); error(`Failed to delete ${modelName}`); } }; // Health check function const checkOllamaHealth = async () => { try { // Use the root endpoint instead of /api/health const response = await fetch(OLLAMA_API_URL); const text = await response.text(); const isRunning = text.includes('Ollama is running'); setServiceStatus({ isRunning, lastChecked: new Date(), }); if (isRunning) { // If Ollama is running, fetch models fetchOllamaModels(); } return isRunning; } catch (error) { console.error('Health check error:', error); setServiceStatus({ isRunning: false, lastChecked: new Date(), error: error instanceof Error ? error.message : 'Failed to connect to Ollama service', }); return false; } }; // Update manual installation function const handleManualInstall = async (modelString: string) => { try { setIsInstallingModel(modelString); setInstallProgress({ model: modelString, progress: 0, status: 'Starting download...' }); setManualInstall((prev) => ({ ...prev, isOpen: false })); const response = await fetch(`${OLLAMA_API_URL}/api/pull`, { method: 'POST', headers: { 'Content-Type': 'application/json', }, body: JSON.stringify({ name: modelString }), }); if (!response.ok) { throw new Error(`Failed to install ${modelString}`); } const reader = response.body?.getReader(); if (!reader) { throw new Error('No response reader available'); } while (true) { const { done, value } = await reader.read(); if (done) { break; } const text = new TextDecoder().decode(value); const lines = text.split('\n').filter(Boolean); for (const line of lines) { const rawData = JSON.parse(line); if (!isOllamaPullResponse(rawData)) { console.error('Invalid response format:', rawData); continue; } setInstallProgress({ model: modelString, progress: rawData.completed && rawData.total ? (rawData.completed / rawData.total) * 100 : 0, status: rawData.status, }); } } success(`Successfully installed ${modelString}`); await fetchOllamaModels(); } catch (err) { const errorMessage = err instanceof Error ? err.message : 'Unknown error occurred'; console.error(`Error installing ${modelString}:`, errorMessage); error(`Failed to install ${modelString}`); } finally { setIsInstallingModel(null); setInstallProgress(null); } }; // Add health check effect useEffect(() => { const checkHealth = async () => { const isHealthy = await checkOllamaHealth(); if (!isHealthy) { error('Ollama service is not running. Please start the Ollama service.'); } }; checkHealth(); const interval = setInterval(checkHealth, 50000); // Check every 30 seconds return () => clearInterval(interval); }, []); return (
{/* Service Status Indicator - Move to top */}
{serviceStatus.isRunning ? 'Ollama service is running' : 'Ollama service is not running'} Last checked: {serviceStatus.lastChecked.toLocaleTimeString()}

Local Providers

Configure and update local AI models on your machine

Enable All Local
{filteredProviders.map((provider, index) => (
Local {URL_CONFIGURABLE_PROVIDERS.includes(provider.name) && ( Configurable )}
{React.createElement(PROVIDER_ICONS[provider.name as ProviderName] || BsRobot, { className: 'w-full h-full', 'aria-label': `${provider.name} logo`, })}

{provider.name}

{PROVIDER_DESCRIPTIONS[provider.name as ProviderName]}

handleToggleProvider(provider, checked)} />
{provider.settings.enabled && URL_CONFIGURABLE_PROVIDERS.includes(provider.name) && (
{editingProvider === provider.name ? ( { if (e.key === 'Enter') { handleUpdateBaseUrl(provider, e.currentTarget.value); } else if (e.key === 'Escape') { setEditingProvider(null); } }} onBlur={(e) => handleUpdateBaseUrl(provider, e.target.value)} autoFocus /> ) : (
setEditingProvider(provider.name)} >
{provider.settings.baseUrl || 'Click to set base URL'}
)} {providerBaseUrlEnvKeys[provider.name]?.baseUrlKey && (
{getEnvUrl(provider) ? 'Environment URL set in .env.local' : 'Environment URL not set in .env.local'}
)}
)}
{provider.name === 'Ollama' && provider.settings.enabled && (
Installed Models
{isLoadingModels ? (
Loading models...
) : ( {ollamaModels.length} models available )}
{ollamaModels.map((model) => (
{model.name} {model.status === 'updating' && (
)} {model.status === 'updated' &&
} {model.status === 'error' &&
}
Version: {model.digest.substring(0, 7)} {model.status === 'updated' && model.newDigest && ( <>
{model.newDigest.substring(0, 7)} )} {model.progress && ( {model.progress.status}{' '} {model.progress.total > 0 && ( <>({Math.round((model.progress.current / model.progress.total) * 100)}%) )} )} {model.details && ( ({model.details.parameter_size}, {model.details.quantization_level}) )}
handleUpdateOllamaModel(model.name)} disabled={model.status === 'updating'} className={classNames( 'rounded-md px-4 py-2 text-sm', 'bg-purple-500 text-white', 'hover:bg-purple-600', 'dark:bg-purple-500 dark:hover:bg-purple-600', 'transition-all duration-200', )} whileHover={{ scale: 1.02 }} whileTap={{ scale: 0.98 }} >
Update { if (window.confirm(`Are you sure you want to delete ${model.name}?`)) { handleDeleteOllamaModel(model.name); } }} disabled={model.status === 'updating'} className={classNames( 'rounded-md px-4 py-2 text-sm', 'bg-red-500 text-white', 'hover:bg-red-600', 'dark:bg-red-500 dark:hover:bg-red-600', 'transition-all duration-200', )} whileHover={{ scale: 1.02 }} whileTap={{ scale: 0.98 }} >
Delete
))}
)} ))}
{/* Manual Installation Section */} {serviceStatus.isRunning && (

Install New Model

Enter the model name exactly as shown (e.g., deepseek-r1:1.5b)

{/* Model Information Section */}
Where to find models?

Browse available models at{' '} ollama.com/library

Popular models:

  • deepseek-r1:1.5b - DeepSeek's reasoning model
  • llama3:8b - Meta's Llama 3 (8B parameters)
  • mistral:7b - Mistral's 7B model
  • gemma:2b - Google's Gemma model
  • qwen2:7b - Alibaba's Qwen2 model

Note: Copy the exact model name including the tag (e.g., 'deepseek-r1:1.5b') from the library to ensure successful installation.

) => setManualInstall((prev) => ({ ...prev, modelString: e.target.value })) } />
handleManualInstall(manualInstall.modelString)} disabled={!manualInstall.modelString || !!isInstallingModel} className={classNames( 'rounded-md px-4 py-2 text-sm', 'bg-purple-500 text-white', 'hover:bg-purple-600', 'dark:bg-purple-500 dark:hover:bg-purple-600', 'transition-all duration-200', )} whileHover={{ scale: 1.02 }} whileTap={{ scale: 0.98 }} > {isInstallingModel ? (
Installing...
) : ( <>
Install Model )} {isInstallingModel && ( { setIsInstallingModel(null); setInstallProgress(null); error('Installation cancelled'); }} className={classNames( 'rounded-md px-4 py-2 text-sm', 'bg-red-500 text-white', 'hover:bg-red-600', 'dark:bg-red-500 dark:hover:bg-red-600', 'transition-all duration-200', )} whileHover={{ scale: 1.02 }} whileTap={{ scale: 0.98 }} >
Cancel )}
{installProgress && (
{installProgress.status} {Math.round(installProgress.progress)}%
)}
)}
); } export default LocalProvidersTab;