diff --git a/.env.example b/.env.example index 08774d2..613b95d 100644 --- a/.env.example +++ b/.env.example @@ -38,12 +38,18 @@ OLLAMA_API_BASE_URL= # You only need this environment variable set if you want to use OpenAI Like models OPENAI_LIKE_API_BASE_URL= +# You only need this environment variable set if you want to use Together AI models +TOGETHER_API_BASE_URL= + # You only need this environment variable set if you want to use DeepSeek models through their API DEEPSEEK_API_KEY= # Get your OpenAI Like API Key OPENAI_LIKE_API_KEY= +# Get your Together API Key +TOGATHER_API_KEY= + # Get your Mistral API Key by following these instructions - # https://console.mistral.ai/api-keys/ # You only need this environment variable set if you want to use Mistral models diff --git a/Dockerfile b/Dockerfile index c581f7f..06541d3 100644 --- a/Dockerfile +++ b/Dockerfile @@ -25,6 +25,8 @@ ARG ANTHROPIC_API_KEY ARG OPEN_ROUTER_API_KEY ARG GOOGLE_GENERATIVE_AI_API_KEY ARG OLLAMA_API_BASE_URL +ARG TOGETHER_API_KEY +ARG TOGETHER_API_BASE_URL ARG VITE_LOG_LEVEL=debug ARG DEFAULT_NUM_CTX @@ -36,6 +38,8 @@ ENV WRANGLER_SEND_METRICS=false \ OPEN_ROUTER_API_KEY=${OPEN_ROUTER_API_KEY} \ GOOGLE_GENERATIVE_AI_API_KEY=${GOOGLE_GENERATIVE_AI_API_KEY} \ OLLAMA_API_BASE_URL=${OLLAMA_API_BASE_URL} \ + TOGETHER_API_KEY=${TOGETHER_API_KEY} \ + TOGETHER_API_BASE_URL=${TOGETHER_API_BASE_URL} \ VITE_LOG_LEVEL=${VITE_LOG_LEVEL} \ DEFAULT_NUM_CTX=${DEFAULT_NUM_CTX} @@ -58,6 +62,8 @@ ARG ANTHROPIC_API_KEY ARG OPEN_ROUTER_API_KEY ARG GOOGLE_GENERATIVE_AI_API_KEY ARG OLLAMA_API_BASE_URL +ARG TOGETHER_API_KEY +ARG TOGETHER_API_BASE_URL ARG VITE_LOG_LEVEL=debug ARG DEFAULT_NUM_CTX @@ -68,6 +74,8 @@ ENV GROQ_API_KEY=${GROQ_API_KEY} \ OPEN_ROUTER_API_KEY=${OPEN_ROUTER_API_KEY} \ GOOGLE_GENERATIVE_AI_API_KEY=${GOOGLE_GENERATIVE_AI_API_KEY} \ OLLAMA_API_BASE_URL=${OLLAMA_API_BASE_URL} \ + TOGETHER_API_KEY=${TOGETHER_API_KEY} \ + TOGETHER_API_BASE_URL=${TOGETHER_API_BASE_URL} \ VITE_LOG_LEVEL=${VITE_LOG_LEVEL} \ DEFAULT_NUM_CTX=${DEFAULT_NUM_CTX} diff --git a/app/lib/.server/llm/api-key.ts b/app/lib/.server/llm/api-key.ts index 2956181..b06acb8 100644 --- a/app/lib/.server/llm/api-key.ts +++ b/app/lib/.server/llm/api-key.ts @@ -35,6 +35,8 @@ export function getAPIKey(cloudflareEnv: Env, provider: string, userApiKeys?: Re return env.MISTRAL_API_KEY || cloudflareEnv.MISTRAL_API_KEY; case 'OpenAILike': return env.OPENAI_LIKE_API_KEY || cloudflareEnv.OPENAI_LIKE_API_KEY; + case 'Together': + return env.TOGETHER_API_KEY || cloudflareEnv.TOGETHER_API_KEY; case 'xAI': return env.XAI_API_KEY || cloudflareEnv.XAI_API_KEY; case 'Cohere': @@ -48,6 +50,8 @@ export function getAPIKey(cloudflareEnv: Env, provider: string, userApiKeys?: Re export function getBaseURL(cloudflareEnv: Env, provider: string) { switch (provider) { + case 'Together': + return env.TOGETHER_API_BASE_URL || cloudflareEnv.TOGETHER_API_BASE_URL; case 'OpenAILike': return env.OPENAI_LIKE_API_BASE_URL || cloudflareEnv.OPENAI_LIKE_API_BASE_URL; case 'LMStudio': diff --git a/app/lib/.server/llm/model.ts b/app/lib/.server/llm/model.ts index 76a3711..8850a12 100644 --- a/app/lib/.server/llm/model.ts +++ b/app/lib/.server/llm/model.ts @@ -146,6 +146,8 @@ export function getModel(provider: string, model: string, env: Env, apiKeys?: Re return getGoogleModel(apiKey, model); case 'OpenAILike': return getOpenAILikeModel(baseURL, apiKey, model); + case 'Together': + return getOpenAILikeModel(baseURL, apiKey, model); case 'Deepseek': return getDeepseekModel(apiKey, model); case 'Mistral': diff --git a/app/utils/constants.ts b/app/utils/constants.ts index de68a82..3383f27 100644 --- a/app/utils/constants.ts +++ b/app/utils/constants.ts @@ -259,6 +259,23 @@ const PROVIDER_LIST: ProviderInfo[] = [ labelForGetApiKey: 'Get LMStudio', icon: 'i-ph:cloud-arrow-down', }, + { + name: 'Together', + staticModels: [ + + { name: 'Qwen/Qwen2.5-Coder-32B-Instruct', label: 'Qwen/Qwen2.5-Coder-32B-Instruct', provider: 'Together', maxTokenAllowed: 8000, }, + { + name: 'meta-llama/Llama-3.2-90B-Vision-Instruct-Turbo', + label: 'meta-llama/Llama-3.2-90B-Vision-Instruct-Turbo', + provider: 'Together', + maxTokenAllowed: 8000, + }, + + { name: 'mistralai/Mixtral-8x7B-Instruct-v0.1', label: 'Mixtral 8x7B Instruct', provider: 'Together', maxTokenAllowed: 8192 }, + + ], + getApiKeyLink: 'https://api.together.xyz/settings/api-keys', + }, ]; export const DEFAULT_PROVIDER = PROVIDER_LIST[0]; diff --git a/docker-compose.yaml b/docker-compose.yaml index 4a3cc0a..f504d80 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -20,6 +20,8 @@ services: - OPEN_ROUTER_API_KEY=${OPEN_ROUTER_API_KEY} - GOOGLE_GENERATIVE_AI_API_KEY=${GOOGLE_GENERATIVE_AI_API_KEY} - OLLAMA_API_BASE_URL=${OLLAMA_API_BASE_URL} + - TOGETHER_API_KEY=${TOGETHER_API_KEY} + - TOGETHER_API_BASE_URL=${TOGETHER_API_BASE_URL} - VITE_LOG_LEVEL=${VITE_LOG_LEVEL:-debug} - DEFAULT_NUM_CTX=${DEFAULT_NUM_CTX:-32768} - RUNNING_IN_DOCKER=true @@ -48,6 +50,8 @@ services: - OPEN_ROUTER_API_KEY=${OPEN_ROUTER_API_KEY} - GOOGLE_GENERATIVE_AI_API_KEY=${GOOGLE_GENERATIVE_AI_API_KEY} - OLLAMA_API_BASE_URL=${OLLAMA_API_BASE_URL} + - TOGETHER_API_KEY=${TOGETHER_API_KEY} + - TOGETHER_API_BASE_URL=${TOGETHER_API_BASE_URL} - VITE_LOG_LEVEL=${VITE_LOG_LEVEL:-debug} - DEFAULT_NUM_CTX=${DEFAULT_NUM_CTX:-32768} - RUNNING_IN_DOCKER=true diff --git a/worker-configuration.d.ts b/worker-configuration.d.ts index 9c074b8..4eaf210 100644 --- a/worker-configuration.d.ts +++ b/worker-configuration.d.ts @@ -7,6 +7,8 @@ interface Env { OLLAMA_API_BASE_URL: string; OPENAI_LIKE_API_KEY: string; OPENAI_LIKE_API_BASE_URL: string; + TOGETHER_API_KEY: string; + TOGETHER_API_BASE_URL: string; DEEPSEEK_API_KEY: string; LMSTUDIO_API_BASE_URL: string; GOOGLE_GENERATIVE_AI_API_KEY: string;