let the ollama models be auto generated from ollama api

This commit is contained in:
yunat 2024-10-18 14:34:08 +03:00
parent 4f7a06f56a
commit 0d777637ff
6 changed files with 94 additions and 35 deletions

View File

@ -20,5 +20,9 @@ ANTHROPIC_API_KEY=
# You only need this environment variable set if you want to use OpenRouter models
OPEN_ROUTER_API_KEY=
# You only need this environment variable set if you want to use oLLAMA models
#EXAMPLE http://localhost:11434
OLLAMA_API_BASE_URL=
# Include this environment variable if you want more logging for debugging locally
VITE_LOG_LEVEL=debug

View File

@ -5,6 +5,7 @@ import { renderToReadableStream } from 'react-dom/server';
import { renderHeadToString } from 'remix-island';
import { Head } from './root';
import { themeStore } from '~/lib/stores/theme';
import { initializeModelList } from '~/utils/constants';
export default async function handleRequest(
request: Request,
@ -13,6 +14,8 @@ export default async function handleRequest(
remixContext: EntryContext,
_loadContext: AppLoadContext,
) {
await initializeModelList();
const readable = await renderToReadableStream(<RemixServer context={remixContext} url={request.url} />, {
signal: request.signal,
onError(error: unknown) {

6
app/routes/api.models.ts Normal file
View File

@ -0,0 +1,6 @@
import { json } from '@remix-run/cloudflare';
import { MODEL_LIST } from '~/utils/constants';
export async function loader() {
return json(MODEL_LIST);
}

View File

@ -1,22 +1,15 @@
import type { ModelInfo } from './types';
export const WORK_DIR_NAME = 'project';
export const WORK_DIR = `/home/${WORK_DIR_NAME}`;
export const MODIFICATIONS_TAG_NAME = 'bolt_file_modifications';
export const MODEL_REGEX = /^\[Model: (.*?)\]\n\n/;
export const DEFAULT_MODEL = "claude-3-5-sonnet-20240620";
export const DEFAULT_PROVIDER = "Anthropic";
export const MODEL_LIST = [
export const DEFAULT_MODEL = 'claude-3-5-sonnet-20240620';
export const DEFAULT_PROVIDER = 'Anthropic';
const staticModels: ModelInfo[] = [
{ name: 'claude-3-5-sonnet-20240620', label: 'Claude 3.5 Sonnet', provider: 'Anthropic' },
{ name: 'gpt-4o', label: 'GPT-4o', provider: 'OpenAI' },
{ name: 'qwen2.5-coder:7b', label: 'Qwen 2.5 Coder 7b', provider: 'Ollama' },
{ name: 'qwen2.5-coder:1.5b', label: 'Qwen 2.5 Coder 1.5b', provider: 'Ollama' },
{ name: 'deepseek-coder-v2:236b', label: 'DeepSeek-Coder-V2 236b', provider: 'Ollama' },
{ name: 'deepseek-coder-v2:16b', label: 'DeepSeek-Coder-V2 16b', provider: 'Ollama' },
{ name: 'codebooga', label: 'Codebooga 34b', provider: 'Ollama' },
{ name: 'phind-codellama', label: 'Phind CodeLlama 34b', provider: 'Ollama' },
{ name: 'codellama:70b', label: 'Code Llama 70b', provider: 'Ollama' },
{ name: 'codellama:34b', label: 'Code Llama 34b', provider: 'Ollama' },
{ name: 'codellama:13b', label: 'Code Llama 13b', provider: 'Ollama' },
{ name: 'codellama:7b', label: 'Code Llama 7b', provider: 'Ollama' },
{ name: 'deepseek/deepseek-coder', label: 'Deepseek-Coder V2 236B (OpenRouter)', provider: 'OpenRouter' },
{ name: 'google/gemini-flash-1.5', label: 'Google Gemini Flash 1.5 (OpenRouter)', provider: 'OpenRouter' },
{ name: 'google/gemini-pro-1.5', label: 'Google Gemini Pro 1.5 (OpenRouter)', provider: 'OpenRouter' },
@ -36,3 +29,27 @@ export const MODEL_LIST = [
{ name: 'gpt-4', label: 'GPT-4', provider: 'OpenAI' },
{ name: 'gpt-3.5-turbo', label: 'GPT-3.5 Turbo', provider: 'OpenAI' },
];
export let MODEL_LIST: ModelInfo[] = [...staticModels];
async function getOllamaModels(): Promise<ModelInfo[]> {
try {
const response = await fetch(`http://localhost:11434/api/tags`);
const data = await response.json();
return data.models.map((model: any) => ({
name: model.name,
label: `${model.name} (${model.details.parameter_size})`,
provider: 'Ollama',
}));
} catch (e) {
return [];
}
}
async function initializeModelList(): Promise<void> {
const ollamaModels = await getOllamaModels();
MODEL_LIST = [...ollamaModels, ...staticModels];
}
initializeModelList().then();
export { getOllamaModels, initializeModelList };

28
app/utils/types.ts Normal file
View File

@ -0,0 +1,28 @@
interface OllamaModelDetails {
parent_model: string;
format: string;
family: string;
families: string[];
parameter_size: string;
quantization_level: string;
}
interface OllamaModel {
name: string;
model: string;
modified_at: string;
size: number;
digest: string;
details: OllamaModelDetails;
}
export interface OllamaApiResponse {
models: OllamaModel[];
}
export interface ModelInfo {
name: string;
label: string;
provider: string;
}

View File

@ -3,4 +3,5 @@ interface Env {
OPENAI_API_KEY: string;
GROQ_API_KEY: string;
OPEN_ROUTER_API_KEY: string;
OLLAMA_API_BASE_URL: string;
}