import React, { useEffect, useState } from 'react'; import { motion } from 'framer-motion'; import { toast } from 'react-toastify'; import { classNames } from '~/utils/classNames'; interface OllamaModel { name: string; digest: string; size: number; modified_at: string; details?: { family: string; parameter_size: string; quantization_level: string; }; status?: 'idle' | 'updating' | 'updated' | 'error' | 'checking'; error?: string; newDigest?: string; progress?: { current: number; total: number; status: string; }; } interface OllamaTagResponse { models: Array<{ name: string; digest: string; size: number; modified_at: string; details?: { family: string; parameter_size: string; quantization_level: string; }; }>; } interface OllamaPullResponse { status: string; digest?: string; total?: number; completed?: number; } export default function OllamaModelUpdater() { const [models, setModels] = useState([]); const [isLoading, setIsLoading] = useState(true); const [isBulkUpdating, setIsBulkUpdating] = useState(false); useEffect(() => { fetchModels(); }, []); const fetchModels = async () => { try { setIsLoading(true); const response = await fetch('http://localhost:11434/api/tags'); const data = (await response.json()) as OllamaTagResponse; setModels( data.models.map((model) => ({ name: model.name, digest: model.digest, size: model.size, modified_at: model.modified_at, details: model.details, status: 'idle' as const, })), ); } catch (error) { toast.error('Failed to fetch Ollama models'); console.error('Error fetching models:', error); } finally { setIsLoading(false); } }; const updateModel = async (modelName: string): Promise<{ success: boolean; newDigest?: string }> => { try { const response = await fetch('http://localhost:11434/api/pull', { method: 'POST', headers: { 'Content-Type': 'application/json', }, body: JSON.stringify({ name: modelName }), }); if (!response.ok) { throw new Error(`Failed to update ${modelName}`); } const reader = response.body?.getReader(); if (!reader) { throw new Error('No response reader available'); } while (true) { const { done, value } = await reader.read(); if (done) { break; } const text = new TextDecoder().decode(value); const lines = text.split('\n').filter(Boolean); for (const line of lines) { const data = JSON.parse(line) as OllamaPullResponse; setModels((current) => current.map((m) => m.name === modelName ? { ...m, progress: { current: data.completed || 0, total: data.total || 0, status: data.status, }, newDigest: data.digest, } : m, ), ); } } setModels((current) => current.map((m) => (m.name === modelName ? { ...m, status: 'checking' } : m))); const updatedResponse = await fetch('http://localhost:11434/api/tags'); const data = (await updatedResponse.json()) as OllamaTagResponse; const updatedModel = data.models.find((m) => m.name === modelName); return { success: true, newDigest: updatedModel?.digest }; } catch (error) { console.error(`Error updating ${modelName}:`, error); return { success: false }; } }; const handleBulkUpdate = async () => { setIsBulkUpdating(true); for (const model of models) { setModels((current) => current.map((m) => (m.name === model.name ? { ...m, status: 'updating' } : m))); const { success, newDigest } = await updateModel(model.name); setModels((current) => current.map((m) => m.name === model.name ? { ...m, status: success ? 'updated' : 'error', error: success ? undefined : 'Update failed', newDigest, } : m, ), ); } setIsBulkUpdating(false); toast.success('Bulk update completed'); }; const handleSingleUpdate = async (modelName: string) => { setModels((current) => current.map((m) => (m.name === modelName ? { ...m, status: 'updating' } : m))); const { success, newDigest } = await updateModel(modelName); setModels((current) => current.map((m) => m.name === modelName ? { ...m, status: success ? 'updated' : 'error', error: success ? undefined : 'Update failed', newDigest, } : m, ), ); if (success) { toast.success(`Updated ${modelName}`); } else { toast.error(`Failed to update ${modelName}`); } }; if (isLoading) { return (
Loading models...
); } return (

Ollama Model Manager

Update your local Ollama models to their latest versions

{models.length} models available
{isBulkUpdating ? ( <>
Updating All... ) : ( <>
Update All Models )}
{models.map((model) => (
{model.name} {model.status === 'updating' && (
)} {model.status === 'updated' &&
} {model.status === 'error' &&
}
Version: {model.digest.substring(0, 7)} {model.status === 'updated' && model.newDigest && ( <>
{model.newDigest.substring(0, 7)} )} {model.progress && ( {model.progress.status}{' '} {model.progress.total > 0 && ( <>({Math.round((model.progress.current / model.progress.total) * 100)}%) )} )} {model.details && ( ({model.details.parameter_size}, {model.details.quantization_level}) )}
handleSingleUpdate(model.name)} disabled={model.status === 'updating'} className={classNames( 'rounded-md px-4 py-2 text-sm', 'bg-purple-500 text-white', 'hover:bg-purple-600', 'dark:bg-purple-500 dark:hover:bg-purple-600', 'transition-all duration-200', )} whileHover={{ scale: 1.02 }} whileTap={{ scale: 0.98 }} >
Update
))}
); }