diff --git a/.env.example b/.env.example index 9710a8e..08774d2 100644 --- a/.env.example +++ b/.env.example @@ -5,6 +5,12 @@ # You only need this environment variable set if you want to use Groq models GROQ_API_KEY= +# Get your HuggingFace API Key here - +# https://huggingface.co/settings/tokens +# You only need this environment variable set if you want to use HuggingFace models +HuggingFace_API_KEY= + + # Get your Open AI API Key by following these instructions - # https://help.openai.com/en/articles/4936850-where-do-i-find-my-openai-api-key # You only need this environment variable set if you want to use GPT models @@ -43,6 +49,10 @@ OPENAI_LIKE_API_KEY= # You only need this environment variable set if you want to use Mistral models MISTRAL_API_KEY= +# Get the Cohere Api key by following these instructions - +# https://dashboard.cohere.com/api-keys +# You only need this environment variable set if you want to use Cohere models +COHERE_API_KEY= # Get LMStudio Base URL from LM Studio Developer Console # Make sure to enable CORS @@ -63,4 +73,4 @@ VITE_LOG_LEVEL=debug # DEFAULT_NUM_CTX=24576 # Consumes 32GB of VRAM # DEFAULT_NUM_CTX=12288 # Consumes 26GB of VRAM # DEFAULT_NUM_CTX=6144 # Consumes 24GB of VRAM -DEFAULT_NUM_CTX= \ No newline at end of file +DEFAULT_NUM_CTX= diff --git a/.gitignore b/.gitignore index b43105b..3303fba 100644 --- a/.gitignore +++ b/.gitignore @@ -22,6 +22,7 @@ dist-ssr *.sln *.sw? +/.history /.cache /build .env.local diff --git a/.husky/commit-msg b/.husky/commit-msg deleted file mode 100644 index d821bbc..0000000 --- a/.husky/commit-msg +++ /dev/null @@ -1,7 +0,0 @@ -#!/usr/bin/env sh - -. "$(dirname "$0")/_/husky.sh" - -npx commitlint --edit $1 - -exit 0 diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 23f2b8d..0f7744a 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -75,6 +75,7 @@ pnpm install - Add your LLM API keys (only set the ones you plan to use): ```bash GROQ_API_KEY=XXX +HuggingFace_API_KEY=XXX OPENAI_API_KEY=XXX ANTHROPIC_API_KEY=XXX ... diff --git a/Dockerfile b/Dockerfile index 1d68673..c581f7f 100644 --- a/Dockerfile +++ b/Dockerfile @@ -19,6 +19,7 @@ FROM base AS bolt-ai-production # Define environment variables with default values or let them be overridden ARG GROQ_API_KEY +ARG HuggingFace_API_KEY ARG OPENAI_API_KEY ARG ANTHROPIC_API_KEY ARG OPEN_ROUTER_API_KEY @@ -29,6 +30,7 @@ ARG DEFAULT_NUM_CTX ENV WRANGLER_SEND_METRICS=false \ GROQ_API_KEY=${GROQ_API_KEY} \ + HuggingFace_KEY=${HuggingFace_API_KEY} \ OPENAI_API_KEY=${OPENAI_API_KEY} \ ANTHROPIC_API_KEY=${ANTHROPIC_API_KEY} \ OPEN_ROUTER_API_KEY=${OPEN_ROUTER_API_KEY} \ @@ -50,6 +52,7 @@ FROM base AS bolt-ai-development # Define the same environment variables for development ARG GROQ_API_KEY +ARG HuggingFace ARG OPENAI_API_KEY ARG ANTHROPIC_API_KEY ARG OPEN_ROUTER_API_KEY @@ -59,6 +62,7 @@ ARG VITE_LOG_LEVEL=debug ARG DEFAULT_NUM_CTX ENV GROQ_API_KEY=${GROQ_API_KEY} \ + HuggingFace_API_KEY=${HuggingFace_API_KEY} \ OPENAI_API_KEY=${OPENAI_API_KEY} \ ANTHROPIC_API_KEY=${ANTHROPIC_API_KEY} \ OPEN_ROUTER_API_KEY=${OPEN_ROUTER_API_KEY} \ diff --git a/README.md b/README.md index 5288923..f952e0a 100644 --- a/README.md +++ b/README.md @@ -1,8 +1,12 @@ [![Bolt.new: AI-Powered Full-Stack Web Development in the Browser](./public/social_preview_index.jpg)](https://bolt.new) -# Bolt.new Fork by Cole Medin +# Bolt.new Fork by Cole Medin - oTToDev -This fork of Bolt.new allows you to choose the LLM that you use for each prompt! Currently, you can use OpenAI, Anthropic, Ollama, OpenRouter, Gemini, or Groq models - and it is easily extended to use any other model supported by the Vercel AI SDK! See the instructions below for running this locally and extending it to include more models. +This fork of Bolt.new (oTToDev) allows you to choose the LLM that you use for each prompt! Currently, you can use OpenAI, Anthropic, Ollama, OpenRouter, Gemini, LMStudio, Mistral, xAI, HuggingFace, DeepSeek, or Groq models - and it is easily extended to use any other model supported by the Vercel AI SDK! See the instructions below for running this locally and extending it to include more models. + +Join the community for oTToDev! + +https://thinktank.ottomator.ai # Requested Additions to this Fork - Feel Free to Contribute!! @@ -20,21 +24,24 @@ This fork of Bolt.new allows you to choose the LLM that you use for each prompt! - ✅ Publish projects directly to GitHub (@goncaloalves) - ✅ Ability to enter API keys in the UI (@ali00209) - ✅ xAI Grok Beta Integration (@milutinke) +- ✅ LM Studio Integration (@karrot0) +- ✅ HuggingFace Integration (@ahsan3219) +- ✅ Bolt terminal to see the output of LLM run commands (@thecodacus) +- ✅ Streaming of code output (@thecodacus) +- ✅ Ability to revert code to earlier version (@wonderwhy-er) - ⬜ **HIGH PRIORITY** - Prevent Bolt from rewriting files as often (file locking and diffs) - ⬜ **HIGH PRIORITY** - Better prompting for smaller LLMs (code window sometimes doesn't start) -- ⬜ **HIGH PRIORITY** Load local projects into the app +- ⬜ **HIGH PRIORITY** - Load local projects into the app - ⬜ **HIGH PRIORITY** - Attach images to prompts - ⬜ **HIGH PRIORITY** - Run agents in the backend as opposed to a single model call - ⬜ Mobile friendly -- ⬜ LM Studio Integration - ⬜ Together Integration - ⬜ Azure Open AI API Integration -- ⬜ HuggingFace Integration - ⬜ Perplexity Integration - ⬜ Vertex AI Integration -- ⬜ Cohere Integration +- ✅ Cohere Integration (@hasanraiyan) +- ✅ Dynamic model max token length (@hasanraiyan) - ⬜ Deploy directly to Vercel/Netlify/other similar platforms -- ⬜ Ability to revert code to earlier version - ⬜ Prompt caching - ⬜ Better prompt enhancing - ⬜ Have LLM plan the project in a MD file for better results/transparency diff --git a/app/components/chat/APIKeyManager.tsx b/app/components/chat/APIKeyManager.tsx index 5b2c85e..c61e466 100644 --- a/app/components/chat/APIKeyManager.tsx +++ b/app/components/chat/APIKeyManager.tsx @@ -10,11 +10,7 @@ interface APIKeyManagerProps { labelForGetApiKey?: string; } -export const APIKeyManager: React.FC = ({ - provider, - apiKey, - setApiKey, - }) => { +export const APIKeyManager: React.FC = ({ provider, apiKey, setApiKey }) => { const [isEditing, setIsEditing] = useState(false); const [tempKey, setTempKey] = useState(apiKey); @@ -24,15 +20,29 @@ export const APIKeyManager: React.FC = ({ }; return ( -
- {provider?.name} API Key: +
+
+ {provider?.name} API Key: + {!isEditing && ( +
+ + {apiKey ? '••••••••' : 'Not set (will still work if set in .env file)'} + + setIsEditing(true)} title="Edit API Key"> +
+ +
+ )} +
+ {isEditing ? ( - <> +
setTempKey(e.target.value)} - className="flex-1 p-1 text-sm rounded border border-bolt-elements-borderColor bg-bolt-elements-prompt-background text-bolt-elements-textPrimary focus:outline-none focus:ring-2 focus:ring-bolt-elements-focus" + className="flex-1 px-2 py-1 text-xs lg:text-sm rounded border border-bolt-elements-borderColor bg-bolt-elements-prompt-background text-bolt-elements-textPrimary focus:outline-none focus:ring-2 focus:ring-bolt-elements-focus" />
@@ -40,20 +50,15 @@ export const APIKeyManager: React.FC = ({ setIsEditing(false)} title="Cancel">
- +
) : ( <> - - {apiKey ? '••••••••' : 'Not set (will still work if set in .env file)'} - - setIsEditing(true)} title="Edit API Key"> -
- - - {provider?.getApiKeyLink && window.open(provider?.getApiKeyLink)} title="Edit API Key"> - {provider?.labelForGetApiKey || 'Get API Key'} -
- } + {provider?.getApiKeyLink && ( + window.open(provider?.getApiKeyLink)} title="Edit API Key"> + {provider?.labelForGetApiKey || 'Get API Key'} +
+ + )} )}
diff --git a/app/components/chat/Artifact.tsx b/app/components/chat/Artifact.tsx index 62020fd..682a4c7 100644 --- a/app/components/chat/Artifact.tsx +++ b/app/components/chat/Artifact.tsx @@ -7,6 +7,7 @@ import type { ActionState } from '~/lib/runtime/action-runner'; import { workbenchStore } from '~/lib/stores/workbench'; import { classNames } from '~/utils/classNames'; import { cubicEasingFn } from '~/utils/easings'; +import { WORK_DIR } from '~/utils/constants'; const highlighterOptions = { langs: ['shell'], @@ -129,6 +130,14 @@ const actionVariants = { visible: { opacity: 1, y: 0 }, }; +function openArtifactInWorkbench(filePath: any) { + if (workbenchStore.currentView.get() !== 'code') { + workbenchStore.currentView.set('code'); + } + + workbenchStore.setSelectedFile(`${WORK_DIR}/${filePath}`); +} + const ActionList = memo(({ actions }: ActionListProps) => { return ( @@ -169,7 +178,10 @@ const ActionList = memo(({ actions }: ActionListProps) => { {type === 'file' ? (
Create{' '} - + openArtifactInWorkbench(action.filePath)} + > {action.filePath}
diff --git a/app/components/chat/BaseChat.tsx b/app/components/chat/BaseChat.tsx index 629c5cb..396fb01 100644 --- a/app/components/chat/BaseChat.tsx +++ b/app/components/chat/BaseChat.tsx @@ -27,9 +27,9 @@ const EXAMPLE_PROMPTS = [ const providerList = PROVIDER_LIST; -const ModelSelector = ({ model, setModel, provider, setProvider, modelList, providerList }) => { +const ModelSelector = ({ model, setModel, provider, setProvider, modelList, providerList, apiKeys }) => { return ( -
+