Merge branch 'main' into together-ai-dynamic-model-list

This commit is contained in:
Anirban Kar 2024-12-06 16:35:36 +05:30 committed by GitHub
commit 5ead47992d
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
32 changed files with 1096 additions and 25703 deletions

View File

@ -56,6 +56,16 @@ body:
- OS: [e.g. macOS, Windows, Linux]
- Browser: [e.g. Chrome, Safari, Firefox]
- Version: [e.g. 91.1]
- type: input
id: provider
attributes:
label: Provider Used
description: Tell us the provider you are using.
- type: input
id: model
attributes:
label: Model Used
description: Tell us the model you are using.
- type: textarea
id: additional
attributes:

View File

@ -16,10 +16,10 @@ jobs:
repo-token: ${{ secrets.GITHUB_TOKEN }}
stale-issue-message: "This issue has been marked as stale due to inactivity. If no further activity occurs, it will be closed in 7 days."
stale-pr-message: "This pull request has been marked as stale due to inactivity. If no further activity occurs, it will be closed in 7 days."
days-before-stale: 14 # Number of days before marking an issue or PR as stale
days-before-close: 7 # Number of days after being marked stale before closing
days-before-stale: 10 # Number of days before marking an issue or PR as stale
days-before-close: 4 # Number of days after being marked stale before closing
stale-issue-label: "stale" # Label to apply to stale issues
stale-pr-label: "stale" # Label to apply to stale pull requests
exempt-issue-labels: "pinned,important" # Issues with these labels won't be marked stale
exempt-pr-labels: "pinned,important" # PRs with these labels won't be marked stale
operations-per-run: 90 # Limits the number of actions per run to avoid API rate limits
operations-per-run: 75 # Limits the number of actions per run to avoid API rate limits

View File

@ -2,6 +2,9 @@
echo "🔍 Running pre-commit hook to check the code looks good... 🔍"
export NVM_DIR="$HOME/.nvm"
[ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh" # Load nvm if you're using i
if ! pnpm typecheck; then
echo "❌ Type checking failed! Please review TypeScript types."
echo "Once you're done, don't forget to add your changes to the commit! 🚀"
@ -9,7 +12,7 @@ if ! pnpm typecheck; then
fi
if ! pnpm lint; then
echo "❌ Linting failed! 'pnpm lint:check' will help you fix the easy ones."
echo "❌ Linting failed! 'pnpm lint:fix' will help you fix the easy ones."
echo "Once you're done, don't forget to add your beautification to the commit! 🤩"
exit 1
fi

View File

@ -4,10 +4,13 @@
This fork of Bolt.new (oTToDev) allows you to choose the LLM that you use for each prompt! Currently, you can use OpenAI, Anthropic, Ollama, OpenRouter, Gemini, LMStudio, Mistral, xAI, HuggingFace, DeepSeek, or Groq models - and it is easily extended to use any other model supported by the Vercel AI SDK! See the instructions below for running this locally and extending it to include more models.
Check the [oTToDev Docs](https://coleam00.github.io/bolt.new-any-llm/) for more information.
## Join the community for oTToDev!
https://thinktank.ottomator.ai
## Requested Additions - Feel Free to Contribute!
- ✅ OpenRouter Integration (@coleam00)
@ -31,23 +34,24 @@ https://thinktank.ottomator.ai
- ✅ Ability to revert code to earlier version (@wonderwhy-er)
- ✅ Cohere Integration (@hasanraiyan)
- ✅ Dynamic model max token length (@hasanraiyan)
- ✅ Better prompt enhancing (@SujalXplores)
- ✅ Prompt caching (@SujalXplores)
- ✅ Load local projects into the app (@wonderwhy-er)
- ✅ Together Integration (@mouimet-infinisoft)
- ✅ Mobile friendly (@qwikode)
- ✅ Better prompt enhancing (@SujalXplores)
- **HIGH PRIORITY** - ALMOST DONE - Attach images to prompts (@atrokhym)
- Attach images to prompts (@atrokhym)
- ⬜ **HIGH PRIORITY** - Prevent Bolt from rewriting files as often (file locking and diffs)
- ⬜ **HIGH PRIORITY** - Better prompting for smaller LLMs (code window sometimes doesn't start)
- ⬜ **HIGH PRIORITY** - Run agents in the backend as opposed to a single model call
- ⬜ Azure Open AI API Integration
- ⬜ Perplexity Integration
- ⬜ Vertex AI Integration
- ⬜ Deploy directly to Vercel/Netlify/other similar platforms
- ⬜ Have LLM plan the project in a MD file for better results/transparency
- ⬜ VSCode Integration with git-like confirmations
- ⬜ Upload documents for knowledge - UI design templates, a code base to reference coding style, etc.
- ⬜ Voice prompting
- ⬜ Azure Open AI API Integration
- ⬜ Perplexity Integration
- ⬜ Vertex AI Integration
## Bolt.new: AI-Powered Full-Stack Web Development in the Browser

View File

@ -22,44 +22,9 @@ import { ExportChatButton } from '~/components/chat/chatExportAndImport/ExportCh
import { ImportButtons } from '~/components/chat/chatExportAndImport/ImportButtons';
import { ExamplePrompts } from '~/components/chat/ExamplePrompts';
// @ts-ignore TODO: Introduce proper types
// eslint-disable-next-line @typescript-eslint/no-unused-vars
const ModelSelector = ({ model, setModel, provider, setProvider, modelList, providerList, apiKeys }) => {
return (
<div className="mb-2 flex gap-2 flex-col sm:flex-row">
<select
value={provider?.name}
onChange={(e) => {
setProvider(providerList.find((p: ProviderInfo) => p.name === e.target.value));
const firstModel = [...modelList].find((m) => m.provider == e.target.value);
setModel(firstModel ? firstModel.name : '');
}}
className="flex-1 p-2 rounded-lg border border-bolt-elements-borderColor bg-bolt-elements-prompt-background text-bolt-elements-textPrimary focus:outline-none focus:ring-2 focus:ring-bolt-elements-focus transition-all"
>
{providerList.map((provider: ProviderInfo) => (
<option key={provider.name} value={provider.name}>
{provider.name}
</option>
))}
</select>
<select
key={provider?.name}
value={model}
onChange={(e) => setModel(e.target.value)}
className="flex-1 p-2 rounded-lg border border-bolt-elements-borderColor bg-bolt-elements-prompt-background text-bolt-elements-textPrimary focus:outline-none focus:ring-2 focus:ring-bolt-elements-focus transition-all lg:max-w-[70%]"
>
{[...modelList]
.filter((e) => e.provider == provider?.name && e.name)
.map((modelOption) => (
<option key={modelOption.name} value={modelOption.name}>
{modelOption.label}
</option>
))}
</select>
</div>
);
};
import FilePreview from './FilePreview';
import { ModelSelector } from '~/components/chat/ModelSelector';
import { SpeechRecognitionButton } from '~/components/chat/SpeechRecognition';
const TEXTAREA_MIN_HEIGHT = 76;
@ -85,6 +50,10 @@ interface BaseChatProps {
enhancePrompt?: () => void;
importChat?: (description: string, messages: Message[]) => Promise<void>;
exportChat?: () => void;
uploadedFiles?: File[];
setUploadedFiles?: (files: File[]) => void;
imageDataList?: string[];
setImageDataList?: (dataList: string[]) => void;
}
export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
@ -96,20 +65,24 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
showChat = true,
chatStarted = false,
isStreaming = false,
enhancingPrompt = false,
promptEnhanced = false,
messages,
input = '',
model,
setModel,
provider,
setProvider,
sendMessage,
input = '',
enhancingPrompt,
handleInputChange,
promptEnhanced,
enhancePrompt,
sendMessage,
handleStop,
importChat,
exportChat,
uploadedFiles = [],
setUploadedFiles,
imageDataList = [],
setImageDataList,
messages,
},
ref,
) => {
@ -117,7 +90,11 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
const [apiKeys, setApiKeys] = useState<Record<string, string>>({});
const [modelList, setModelList] = useState(MODEL_LIST);
const [isModelSettingsCollapsed, setIsModelSettingsCollapsed] = useState(false);
const [isListening, setIsListening] = useState(false);
const [recognition, setRecognition] = useState<SpeechRecognition | null>(null);
const [transcript, setTranscript] = useState('');
console.log(transcript);
useEffect(() => {
// Load API keys from cookies on component mount
try {
@ -140,8 +117,72 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
initializeModelList().then((modelList) => {
setModelList(modelList);
});
if (typeof window !== 'undefined' && ('SpeechRecognition' in window || 'webkitSpeechRecognition' in window)) {
const SpeechRecognition = window.SpeechRecognition || window.webkitSpeechRecognition;
const recognition = new SpeechRecognition();
recognition.continuous = true;
recognition.interimResults = true;
recognition.onresult = (event) => {
const transcript = Array.from(event.results)
.map((result) => result[0])
.map((result) => result.transcript)
.join('');
setTranscript(transcript);
if (handleInputChange) {
const syntheticEvent = {
target: { value: transcript },
} as React.ChangeEvent<HTMLTextAreaElement>;
handleInputChange(syntheticEvent);
}
};
recognition.onerror = (event) => {
console.error('Speech recognition error:', event.error);
setIsListening(false);
};
setRecognition(recognition);
}
}, []);
const startListening = () => {
if (recognition) {
recognition.start();
setIsListening(true);
}
};
const stopListening = () => {
if (recognition) {
recognition.stop();
setIsListening(false);
}
};
const handleSendMessage = (event: React.UIEvent, messageInput?: string) => {
if (sendMessage) {
sendMessage(event, messageInput);
if (recognition) {
recognition.abort(); // Stop current recognition
setTranscript(''); // Clear transcript
setIsListening(false);
// Clear the input by triggering handleInputChange with empty value
if (handleInputChange) {
const syntheticEvent = {
target: { value: '' },
} as React.ChangeEvent<HTMLTextAreaElement>;
handleInputChange(syntheticEvent);
}
}
}
};
const updateApiKey = (provider: string, key: string) => {
try {
const updatedApiKeys = { ...apiKeys, [provider]: key };
@ -159,6 +200,58 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
}
};
const handleFileUpload = () => {
const input = document.createElement('input');
input.type = 'file';
input.accept = 'image/*';
input.onchange = async (e) => {
const file = (e.target as HTMLInputElement).files?.[0];
if (file) {
const reader = new FileReader();
reader.onload = (e) => {
const base64Image = e.target?.result as string;
setUploadedFiles?.([...uploadedFiles, file]);
setImageDataList?.([...imageDataList, base64Image]);
};
reader.readAsDataURL(file);
}
};
input.click();
};
const handlePaste = async (e: React.ClipboardEvent) => {
const items = e.clipboardData?.items;
if (!items) {
return;
}
for (const item of items) {
if (item.type.startsWith('image/')) {
e.preventDefault();
const file = item.getAsFile();
if (file) {
const reader = new FileReader();
reader.onload = (e) => {
const base64Image = e.target?.result as string;
setUploadedFiles?.([...uploadedFiles, file]);
setImageDataList?.([...imageDataList, base64Image]);
};
reader.readAsDataURL(file);
}
break;
}
}
};
const baseChat = (
<div
ref={ref}
@ -275,7 +368,14 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
)}
</div>
</div>
<FilePreview
files={uploadedFiles}
imageDataList={imageDataList}
onRemove={(index) => {
setUploadedFiles?.(uploadedFiles.filter((_, i) => i !== index));
setImageDataList?.(imageDataList.filter((_, i) => i !== index));
}}
/>
<div
className={classNames(
'relative shadow-xs border border-bolt-elements-borderColor backdrop-blur rounded-lg',
@ -283,9 +383,41 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
>
<textarea
ref={textareaRef}
className={
'w-full pl-4 pt-4 pr-16 focus:outline-none resize-none text-bolt-elements-textPrimary placeholder-bolt-elements-textTertiary bg-transparent text-sm'
}
className={classNames(
'w-full pl-4 pt-4 pr-16 focus:outline-none resize-none text-bolt-elements-textPrimary placeholder-bolt-elements-textTertiary bg-transparent text-sm',
'transition-all duration-200',
'hover:border-bolt-elements-focus',
)}
onDragEnter={(e) => {
e.preventDefault();
e.currentTarget.style.border = '2px solid #1488fc';
}}
onDragOver={(e) => {
e.preventDefault();
e.currentTarget.style.border = '2px solid #1488fc';
}}
onDragLeave={(e) => {
e.preventDefault();
e.currentTarget.style.border = '1px solid var(--bolt-elements-borderColor)';
}}
onDrop={(e) => {
e.preventDefault();
e.currentTarget.style.border = '1px solid var(--bolt-elements-borderColor)';
const files = Array.from(e.dataTransfer.files);
files.forEach((file) => {
if (file.type.startsWith('image/')) {
const reader = new FileReader();
reader.onload = (e) => {
const base64Image = e.target?.result as string;
setUploadedFiles?.([...uploadedFiles, file]);
setImageDataList?.([...imageDataList, base64Image]);
};
reader.readAsDataURL(file);
}
});
}}
onKeyDown={(event) => {
if (event.key === 'Enter') {
if (event.shiftKey) {
@ -294,13 +426,19 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
event.preventDefault();
sendMessage?.(event);
if (isStreaming) {
handleStop?.();
return;
}
handleSendMessage?.(event);
}
}}
value={input}
onChange={(event) => {
handleInputChange?.(event);
}}
onPaste={handlePaste}
style={{
minHeight: TEXTAREA_MIN_HEIGHT,
maxHeight: TEXTAREA_MAX_HEIGHT,
@ -311,7 +449,7 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
<ClientOnly>
{() => (
<SendButton
show={input.length > 0 || isStreaming}
show={input.length > 0 || isStreaming || uploadedFiles.length > 0}
isStreaming={isStreaming}
onClick={(event) => {
if (isStreaming) {
@ -319,21 +457,28 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
return;
}
sendMessage?.(event);
if (input.length > 0 || uploadedFiles.length > 0) {
handleSendMessage?.(event);
}
}}
/>
)}
</ClientOnly>
<div className="flex justify-between items-center text-sm p-4 pt-2">
<div className="flex gap-1 items-center">
<IconButton title="Upload file" className="transition-all" onClick={() => handleFileUpload()}>
<div className="i-ph:paperclip text-xl"></div>
</IconButton>
<IconButton
title="Enhance prompt"
disabled={input.length === 0 || enhancingPrompt}
className={classNames('transition-all', {
'opacity-100!': enhancingPrompt,
'text-bolt-elements-item-contentAccent! pr-1.5 enabled:hover:bg-bolt-elements-item-backgroundAccent!':
promptEnhanced,
})}
className={classNames(
'transition-all',
enhancingPrompt ? 'opacity-100' : '',
promptEnhanced ? 'text-bolt-elements-item-contentAccent' : '',
promptEnhanced ? 'pr-1.5' : '',
promptEnhanced ? 'enabled:hover:bg-bolt-elements-item-backgroundAccent' : '',
)}
onClick={() => enhancePrompt?.()}
>
{enhancingPrompt ? (
@ -348,6 +493,13 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
</>
)}
</IconButton>
<SpeechRecognitionButton
isListening={isListening}
onStart={startListening}
onStop={stopListening}
disabled={isStreaming}
/>
{chatStarted && <ClientOnly>{() => <ExportChatButton exportChat={exportChat} />}</ClientOnly>}
</div>
{input.length > 3 ? (
@ -362,7 +514,15 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
</div>
</div>
{!chatStarted && ImportButtons(importChat)}
{!chatStarted && ExamplePrompts(sendMessage)}
{!chatStarted &&
ExamplePrompts((event, messageInput) => {
if (isStreaming) {
handleStop?.();
return;
}
handleSendMessage?.(event, messageInput);
})}
</div>
<ClientOnly>{() => <Workbench chatStarted={chatStarted} isStreaming={isStreaming} />}</ClientOnly>
</div>

View File

@ -12,7 +12,6 @@ import { useMessageParser, usePromptEnhancer, useShortcuts, useSnapScroll } from
import { description, useChatHistory } from '~/lib/persistence';
import { chatStore } from '~/lib/stores/chat';
import { workbenchStore } from '~/lib/stores/workbench';
import { fileModificationsToHTML } from '~/utils/diff';
import { DEFAULT_MODEL, DEFAULT_PROVIDER, PROMPT_COOKIE_KEY, PROVIDER_LIST } from '~/utils/constants';
import { cubicEasingFn } from '~/utils/easings';
import { createScopedLogger, renderLogger } from '~/utils/logger';
@ -89,8 +88,10 @@ export const ChatImpl = memo(
useShortcuts();
const textareaRef = useRef<HTMLTextAreaElement>(null);
const [chatStarted, setChatStarted] = useState(initialMessages.length > 0);
const [uploadedFiles, setUploadedFiles] = useState<File[]>([]); // Move here
const [imageDataList, setImageDataList] = useState<string[]>([]); // Move here
const [model, setModel] = useState(() => {
const savedModel = Cookies.get('selectedModel');
return savedModel || DEFAULT_MODEL;
@ -206,8 +207,6 @@ export const ChatImpl = memo(
runAnimation();
if (fileModifications !== undefined) {
const diff = fileModificationsToHTML(fileModifications);
/**
* If we have file modifications we append a new user message manually since we have to prefix
* the user input with the file modifications and we don't want the new user input to appear
@ -215,7 +214,19 @@ export const ChatImpl = memo(
* manually reset the input and we'd have to manually pass in file attachments. However, those
* aren't relevant here.
*/
append({ role: 'user', content: `[Model: ${model}]\n\n[Provider: ${provider.name}]\n\n${diff}\n\n${_input}` });
append({
role: 'user',
content: [
{
type: 'text',
text: `[Model: ${model}]\n\n[Provider: ${provider.name}]\n\n${_input}`,
},
...imageDataList.map((imageData) => ({
type: 'image',
image: imageData,
})),
] as any, // Type assertion to bypass compiler check
});
/**
* After sending a new message we reset all modifications since the model
@ -223,12 +234,28 @@ export const ChatImpl = memo(
*/
workbenchStore.resetAllFileModifications();
} else {
append({ role: 'user', content: `[Model: ${model}]\n\n[Provider: ${provider.name}]\n\n${_input}` });
append({
role: 'user',
content: [
{
type: 'text',
text: `[Model: ${model}]\n\n[Provider: ${provider.name}]\n\n${_input}`,
},
...imageDataList.map((imageData) => ({
type: 'image',
image: imageData,
})),
] as any, // Type assertion to bypass compiler check
});
}
setInput('');
Cookies.remove(PROMPT_COOKIE_KEY);
// Add file cleanup here
setUploadedFiles([]);
setImageDataList([]);
resetEnhancer();
textareaRef.current?.blur();
@ -321,6 +348,10 @@ export const ChatImpl = memo(
apiKeys,
);
}}
uploadedFiles={uploadedFiles}
setUploadedFiles={setUploadedFiles}
imageDataList={imageDataList}
setImageDataList={setImageDataList}
/>
);
},

View File

@ -0,0 +1,35 @@
import React from 'react';
interface FilePreviewProps {
files: File[];
imageDataList: string[];
onRemove: (index: number) => void;
}
const FilePreview: React.FC<FilePreviewProps> = ({ files, imageDataList, onRemove }) => {
if (!files || files.length === 0) {
return null;
}
return (
<div className="flex flex-row overflow-x-auto -mt-2">
{files.map((file, index) => (
<div key={file.name + file.size} className="mr-2 relative">
{imageDataList[index] && (
<div className="relative pt-4 pr-4">
<img src={imageDataList[index]} alt={file.name} className="max-h-20" />
<button
onClick={() => onRemove(index)}
className="absolute top-1 right-1 z-10 bg-black rounded-full w-5 h-5 shadow-md hover:bg-gray-900 transition-colors flex items-center justify-center"
>
<div className="i-ph:x w-3 h-3 text-gray-200" />
</button>
</div>
)}
</div>
))}
</div>
);
};
export default FilePreview;

View File

@ -0,0 +1,63 @@
import type { ProviderInfo } from '~/types/model';
import type { ModelInfo } from '~/utils/types';
interface ModelSelectorProps {
model?: string;
setModel?: (model: string) => void;
provider?: ProviderInfo;
setProvider?: (provider: ProviderInfo) => void;
modelList: ModelInfo[];
providerList: ProviderInfo[];
apiKeys: Record<string, string>;
}
export const ModelSelector = ({
model,
setModel,
provider,
setProvider,
modelList,
providerList,
}: ModelSelectorProps) => {
return (
<div className="mb-2 flex gap-2 flex-col sm:flex-row">
<select
value={provider?.name ?? ''}
onChange={(e) => {
const newProvider = providerList.find((p: ProviderInfo) => p.name === e.target.value);
if (newProvider && setProvider) {
setProvider(newProvider);
}
const firstModel = [...modelList].find((m) => m.provider === e.target.value);
if (firstModel && setModel) {
setModel(firstModel.name);
}
}}
className="flex-1 p-2 rounded-lg border border-bolt-elements-borderColor bg-bolt-elements-prompt-background text-bolt-elements-textPrimary focus:outline-none focus:ring-2 focus:ring-bolt-elements-focus transition-all"
>
{providerList.map((provider: ProviderInfo) => (
<option key={provider.name} value={provider.name}>
{provider.name}
</option>
))}
</select>
<select
key={provider?.name}
value={model}
onChange={(e) => setModel?.(e.target.value)}
className="flex-1 p-2 rounded-lg border border-bolt-elements-borderColor bg-bolt-elements-prompt-background text-bolt-elements-textPrimary focus:outline-none focus:ring-2 focus:ring-bolt-elements-focus transition-all lg:max-w-[70%]"
>
{[...modelList]
.filter((e) => e.provider == provider?.name && e.name)
.map((modelOption) => (
<option key={modelOption.name} value={modelOption.name}>
{modelOption.label}
</option>
))}
</select>
</div>
);
};

View File

@ -4,11 +4,12 @@ interface SendButtonProps {
show: boolean;
isStreaming?: boolean;
onClick?: (event: React.MouseEvent<HTMLButtonElement, MouseEvent>) => void;
onImagesSelected?: (images: File[]) => void;
}
const customEasingFn = cubicBezier(0.4, 0, 0.2, 1);
export function SendButton({ show, isStreaming, onClick }: SendButtonProps) {
export const SendButton = ({ show, isStreaming, onClick }: SendButtonProps) => {
return (
<AnimatePresence>
{show ? (
@ -30,4 +31,4 @@ export function SendButton({ show, isStreaming, onClick }: SendButtonProps) {
) : null}
</AnimatePresence>
);
}
};

View File

@ -0,0 +1,28 @@
import { IconButton } from '~/components/ui/IconButton';
import { classNames } from '~/utils/classNames';
import React from 'react';
export const SpeechRecognitionButton = ({
isListening,
onStart,
onStop,
disabled,
}: {
isListening: boolean;
onStart: () => void;
onStop: () => void;
disabled: boolean;
}) => {
return (
<IconButton
title={isListening ? 'Stop listening' : 'Start speech recognition'}
disabled={disabled}
className={classNames('transition-all', {
'text-bolt-elements-item-contentAccent': isListening,
})}
onClick={isListening ? onStop : onStart}
>
{isListening ? <div className="i-ph:microphone-slash text-xl" /> : <div className="i-ph:microphone text-xl" />}
</IconButton>
);
};

View File

@ -2,26 +2,52 @@
* @ts-nocheck
* Preventing TS checks with files presented in the video for a better presentation.
*/
import { modificationsRegex } from '~/utils/diff';
import { MODEL_REGEX, PROVIDER_REGEX } from '~/utils/constants';
import { Markdown } from './Markdown';
interface UserMessageProps {
content: string;
content: string | Array<{ type: string; text?: string; image?: string }>;
}
export function UserMessage({ content }: UserMessageProps) {
if (Array.isArray(content)) {
const textItem = content.find((item) => item.type === 'text');
const textContent = sanitizeUserMessage(textItem?.text || '');
const images = content.filter((item) => item.type === 'image' && item.image);
return (
<div className="overflow-hidden pt-[4px]">
<div className="flex items-start gap-4">
<div className="flex-1">
<Markdown limitedMarkdown>{textContent}</Markdown>
</div>
{images.length > 0 && (
<div className="flex-shrink-0 w-[160px]">
{images.map((item, index) => (
<div key={index} className="relative">
<img
src={item.image}
alt={`Uploaded image ${index + 1}`}
className="w-full h-[160px] rounded-lg object-cover border border-bolt-elements-borderColor"
/>
</div>
))}
</div>
)}
</div>
</div>
);
}
const textContent = sanitizeUserMessage(content);
return (
<div className="overflow-hidden pt-[4px]">
<Markdown limitedMarkdown>{sanitizeUserMessage(content)}</Markdown>
<Markdown limitedMarkdown>{textContent}</Markdown>
</div>
);
}
function sanitizeUserMessage(content: string) {
return content
.replace(modificationsRegex, '')
.replace(MODEL_REGEX, 'Using: $1')
.replace(PROVIDER_REGEX, ' ($1)\n\n')
.trim();
return content.replace(MODEL_REGEX, '').replace(PROVIDER_REGEX, '');
}

View File

@ -24,17 +24,19 @@ export function Header() {
<span className="i-bolt:logo-text?mask w-[46px] inline-block" />
</a>
</div>
<span className="flex-1 px-4 truncate text-center text-bolt-elements-textPrimary">
<ClientOnly>{() => <ChatDescription />}</ClientOnly>
</span>
{chat.started && (
<ClientOnly>
{() => (
<div className="mr-1">
<HeaderActionButtons />
</div>
)}
</ClientOnly>
{chat.started && ( // Display ChatDescription and HeaderActionButtons only when the chat has started.
<>
<span className="flex-1 px-4 truncate text-center text-bolt-elements-textPrimary">
<ClientOnly>{() => <ChatDescription />}</ClientOnly>
</span>
<ClientOnly>
{() => (
<div className="mr-1">
<HeaderActionButtons />
</div>
)}
</ClientOnly>
</>
)}
</header>
);

View File

@ -19,7 +19,7 @@ export function HeaderActionButtons({}: HeaderActionButtonsProps) {
<div className="flex border border-bolt-elements-borderColor rounded-md overflow-hidden">
<Button
active={showChat}
disabled={!canHideChat || isSmallViewport} // expand button is disabled on mobile as it's needed
disabled={!canHideChat || isSmallViewport} // expand button is disabled on mobile as it's not needed
onClick={() => {
if (canHideChat) {
chatStore.setKey('showChat', !showChat);

View File

@ -1,6 +1,9 @@
import { useParams } from '@remix-run/react';
import { classNames } from '~/utils/classNames';
import * as Dialog from '@radix-ui/react-dialog';
import { type ChatHistoryItem } from '~/lib/persistence';
import WithTooltip from '~/components/ui/Tooltip';
import { useEditChatDescription } from '~/lib/hooks';
interface HistoryItemProps {
item: ChatHistoryItem;
@ -10,48 +13,115 @@ interface HistoryItemProps {
}
export function HistoryItem({ item, onDelete, onDuplicate, exportChat }: HistoryItemProps) {
const { id: urlId } = useParams();
const isActiveChat = urlId === item.urlId;
const { editing, handleChange, handleBlur, handleSubmit, handleKeyDown, currentDescription, toggleEditMode } =
useEditChatDescription({
initialDescription: item.description,
customChatId: item.id,
syncWithGlobalStore: isActiveChat,
});
const renderDescriptionForm = (
<form onSubmit={handleSubmit} className="flex-1 flex items-center">
<input
type="text"
className="flex-1 bg-bolt-elements-background-depth-1 text-bolt-elements-textPrimary rounded px-2 mr-2"
autoFocus
value={currentDescription}
onChange={handleChange}
onBlur={handleBlur}
onKeyDown={handleKeyDown}
/>
<button
type="submit"
className="i-ph:check scale-110 hover:text-bolt-elements-item-contentAccent"
onMouseDown={handleSubmit}
/>
</form>
);
return (
<div className="group rounded-md text-bolt-elements-textSecondary hover:text-bolt-elements-textPrimary hover:bg-bolt-elements-background-depth-3 overflow-hidden flex justify-between items-center px-2 py-1">
<a href={`/chat/${item.urlId}`} className="flex w-full relative truncate block">
{item.description}
<div className="absolute right-0 z-1 top-0 bottom-0 bg-gradient-to-l from-bolt-elements-background-depth-2 group-hover:from-bolt-elements-background-depth-3 box-content pl-3 to-transparent w-10 flex justify-end group-hover:w-15 group-hover:from-99%">
<div className="flex items-center p-1 text-bolt-elements-textSecondary opacity-0 group-hover:opacity-100 transition-opacity">
<WithTooltip tooltip="Export chat">
<button
type="button"
className="i-ph:download-simple scale-110 mr-2 hover:text-bolt-elements-item-contentAccent"
<div
className={classNames(
'group rounded-md text-bolt-elements-textSecondary hover:text-bolt-elements-textPrimary hover:bg-bolt-elements-background-depth-3 overflow-hidden flex justify-between items-center px-2 py-1',
{ '[&&]:text-bolt-elements-textPrimary bg-bolt-elements-background-depth-3': isActiveChat },
)}
>
{editing ? (
renderDescriptionForm
) : (
<a href={`/chat/${item.urlId}`} className="flex w-full relative truncate block">
{currentDescription}
<div
className={classNames(
'absolute right-0 z-1 top-0 bottom-0 bg-gradient-to-l from-bolt-elements-background-depth-2 group-hover:from-bolt-elements-background-depth-3 box-content pl-3 to-transparent w-10 flex justify-end group-hover:w-22 group-hover:from-99%',
{ 'from-bolt-elements-background-depth-3 w-10 ': isActiveChat },
)}
>
<div className="flex items-center p-1 text-bolt-elements-textSecondary opacity-0 group-hover:opacity-100 transition-opacity">
<ChatActionButton
toolTipContent="Export chat"
icon="i-ph:download-simple"
onClick={(event) => {
event.preventDefault();
exportChat(item.id);
}}
title="Export chat"
/>
</WithTooltip>
{onDuplicate && (
<WithTooltip tooltip="Duplicate chat">
<button
type="button"
className="i-ph:copy scale-110 mr-2 hover:text-bolt-elements-item-contentAccent"
{onDuplicate && (
<ChatActionButton
toolTipContent="Duplicate chat"
icon="i-ph:copy"
onClick={() => onDuplicate?.(item.id)}
title="Duplicate chat"
/>
</WithTooltip>
)}
<Dialog.Trigger asChild>
<WithTooltip tooltip="Delete chat">
<button
type="button"
className="i-ph:trash scale-110 hover:text-bolt-elements-button-danger-text"
)}
<ChatActionButton
toolTipContent="Rename chat"
icon="i-ph:pencil-fill"
onClick={(event) => {
event.preventDefault();
toggleEditMode();
}}
/>
<Dialog.Trigger asChild>
<ChatActionButton
toolTipContent="Delete chat"
icon="i-ph:trash"
className="[&&]:hover:text-bolt-elements-button-danger-text"
onClick={(event) => {
event.preventDefault();
onDelete?.(event);
}}
/>
</WithTooltip>
</Dialog.Trigger>
</Dialog.Trigger>
</div>
</div>
</div>
</a>
</a>
)}
</div>
);
}
const ChatActionButton = ({
toolTipContent,
icon,
className,
onClick,
}: {
toolTipContent: string;
icon: string;
className?: string;
onClick: (event: React.MouseEvent<HTMLButtonElement, MouseEvent>) => void;
btnTitle?: string;
}) => {
return (
<WithTooltip tooltip={toolTipContent}>
<button
type="button"
className={`scale-110 mr-2 hover:text-bolt-elements-item-contentAccent ${icon} ${className ? className : ''}`}
onClick={onClick}
/>
</WithTooltip>
);
};

View File

@ -33,7 +33,7 @@ const menuVariants = {
type DialogContent = { type: 'delete'; item: ChatHistoryItem } | null;
export function Menu() {
export const Menu = () => {
const { duplicateCurrentChat, exportChat } = useChatHistory();
const menuRef = useRef<HTMLDivElement>(null);
const [list, setList] = useState<ChatHistoryItem[]>([]);
@ -206,4 +206,4 @@ export function Menu() {
</div>
</motion.div>
);
}
};

View File

@ -4,11 +4,16 @@ import { IconButton } from '~/components/ui/IconButton';
import { workbenchStore } from '~/lib/stores/workbench';
import { PortDropdown } from './PortDropdown';
type ResizeSide = 'left' | 'right' | null;
export const Preview = memo(() => {
const iframeRef = useRef<HTMLIFrameElement>(null);
const containerRef = useRef<HTMLDivElement>(null);
const inputRef = useRef<HTMLInputElement>(null);
const [activePreviewIndex, setActivePreviewIndex] = useState(0);
const [isPortDropdownOpen, setIsPortDropdownOpen] = useState(false);
const [isFullscreen, setIsFullscreen] = useState(false);
const hasSelectedPreview = useRef(false);
const previews = useStore(workbenchStore.previews);
const activePreview = previews[activePreviewIndex];
@ -16,6 +21,23 @@ export const Preview = memo(() => {
const [url, setUrl] = useState('');
const [iframeUrl, setIframeUrl] = useState<string | undefined>();
// Toggle between responsive mode and device mode
const [isDeviceModeOn, setIsDeviceModeOn] = useState(false);
// Use percentage for width
const [widthPercent, setWidthPercent] = useState<number>(37.5); // 375px assuming 1000px window width initially
const resizingState = useRef({
isResizing: false,
side: null as ResizeSide,
startX: 0,
startWidthPercent: 37.5,
windowWidth: window.innerWidth,
});
// Define the scaling factor
const SCALING_FACTOR = 2; // Adjust this value to increase/decrease sensitivity
useEffect(() => {
if (!activePreview) {
setUrl('');
@ -25,10 +47,9 @@ export const Preview = memo(() => {
}
const { baseUrl } = activePreview;
setUrl(baseUrl);
setIframeUrl(baseUrl);
}, [activePreview, iframeUrl]);
}, [activePreview]);
const validateUrl = useCallback(
(value: string) => {
@ -56,14 +77,13 @@ export const Preview = memo(() => {
[],
);
// when previews change, display the lowest port if user hasn't selected a preview
// When previews change, display the lowest port if user hasn't selected a preview
useEffect(() => {
if (previews.length > 1 && !hasSelectedPreview.current) {
const minPortIndex = previews.reduce(findMinPortIndex, 0);
setActivePreviewIndex(minPortIndex);
}
}, [previews]);
}, [previews, findMinPortIndex]);
const reloadPreview = () => {
if (iframeRef.current) {
@ -71,13 +91,134 @@ export const Preview = memo(() => {
}
};
const toggleFullscreen = async () => {
if (!isFullscreen && containerRef.current) {
await containerRef.current.requestFullscreen();
} else if (document.fullscreenElement) {
await document.exitFullscreen();
}
};
useEffect(() => {
const handleFullscreenChange = () => {
setIsFullscreen(!!document.fullscreenElement);
};
document.addEventListener('fullscreenchange', handleFullscreenChange);
return () => {
document.removeEventListener('fullscreenchange', handleFullscreenChange);
};
}, []);
const toggleDeviceMode = () => {
setIsDeviceModeOn((prev) => !prev);
};
const startResizing = (e: React.MouseEvent, side: ResizeSide) => {
if (!isDeviceModeOn) {
return;
}
// Prevent text selection
document.body.style.userSelect = 'none';
resizingState.current.isResizing = true;
resizingState.current.side = side;
resizingState.current.startX = e.clientX;
resizingState.current.startWidthPercent = widthPercent;
resizingState.current.windowWidth = window.innerWidth;
document.addEventListener('mousemove', onMouseMove);
document.addEventListener('mouseup', onMouseUp);
e.preventDefault(); // Prevent any text selection on mousedown
};
const onMouseMove = (e: MouseEvent) => {
if (!resizingState.current.isResizing) {
return;
}
const dx = e.clientX - resizingState.current.startX;
const windowWidth = resizingState.current.windowWidth;
// Apply scaling factor to increase sensitivity
const dxPercent = (dx / windowWidth) * 100 * SCALING_FACTOR;
let newWidthPercent = resizingState.current.startWidthPercent;
if (resizingState.current.side === 'right') {
newWidthPercent = resizingState.current.startWidthPercent + dxPercent;
} else if (resizingState.current.side === 'left') {
newWidthPercent = resizingState.current.startWidthPercent - dxPercent;
}
// Clamp the width between 10% and 90%
newWidthPercent = Math.max(10, Math.min(newWidthPercent, 90));
setWidthPercent(newWidthPercent);
};
const onMouseUp = () => {
resizingState.current.isResizing = false;
resizingState.current.side = null;
document.removeEventListener('mousemove', onMouseMove);
document.removeEventListener('mouseup', onMouseUp);
// Restore text selection
document.body.style.userSelect = '';
};
// Handle window resize to ensure widthPercent remains valid
useEffect(() => {
const handleWindowResize = () => {
/*
* Optional: Adjust widthPercent if necessary
* For now, since widthPercent is relative, no action is needed
*/
};
window.addEventListener('resize', handleWindowResize);
return () => {
window.removeEventListener('resize', handleWindowResize);
};
}, []);
// A small helper component for the handle's "grip" icon
const GripIcon = () => (
<div
style={{
display: 'flex',
justifyContent: 'center',
alignItems: 'center',
height: '100%',
pointerEvents: 'none',
}}
>
<div
style={{
color: 'rgba(0,0,0,0.5)',
fontSize: '10px',
lineHeight: '5px',
userSelect: 'none',
marginLeft: '1px',
}}
>
</div>
</div>
);
return (
<div className="w-full h-full flex flex-col">
<div ref={containerRef} className="w-full h-full flex flex-col relative">
{isPortDropdownOpen && (
<div className="z-iframe-overlay w-full h-full absolute" onClick={() => setIsPortDropdownOpen(false)} />
)}
<div className="bg-bolt-elements-background-depth-2 p-2 flex items-center gap-1.5">
<IconButton icon="i-ph:arrow-clockwise" onClick={reloadPreview} />
<div
className="flex items-center gap-1 flex-grow bg-bolt-elements-preview-addressBar-background border border-bolt-elements-borderColor text-bolt-elements-preview-addressBar-text rounded-full px-3 py-1 text-sm hover:bg-bolt-elements-preview-addressBar-backgroundHover hover:focus-within:bg-bolt-elements-preview-addressBar-backgroundActive focus-within:bg-bolt-elements-preview-addressBar-backgroundActive
focus-within-border-bolt-elements-borderColorActive focus-within:text-bolt-elements-preview-addressBar-textActive"
@ -101,6 +242,7 @@ export const Preview = memo(() => {
}}
/>
</div>
{previews.length > 1 && (
<PortDropdown
activePreviewIndex={activePreviewIndex}
@ -111,13 +253,93 @@ export const Preview = memo(() => {
previews={previews}
/>
)}
{/* Device mode toggle button */}
<IconButton
icon="i-ph:devices"
onClick={toggleDeviceMode}
title={isDeviceModeOn ? 'Switch to Responsive Mode' : 'Switch to Device Mode'}
/>
{/* Fullscreen toggle button */}
<IconButton
icon={isFullscreen ? 'i-ph:arrows-in' : 'i-ph:arrows-out'}
onClick={toggleFullscreen}
title={isFullscreen ? 'Exit Full Screen' : 'Full Screen'}
/>
</div>
<div className="flex-1 border-t border-bolt-elements-borderColor">
{activePreview ? (
<iframe ref={iframeRef} className="border-none w-full h-full bg-white" src={iframeUrl} />
) : (
<div className="flex w-full h-full justify-center items-center bg-white">No preview available</div>
)}
<div className="flex-1 border-t border-bolt-elements-borderColor flex justify-center items-center overflow-auto">
<div
style={{
width: isDeviceModeOn ? `${widthPercent}%` : '100%',
height: '100%', // Always full height
overflow: 'visible',
background: '#fff',
position: 'relative',
display: 'flex',
}}
>
{activePreview ? (
<iframe ref={iframeRef} className="border-none w-full h-full bg-white" src={iframeUrl} allowFullScreen />
) : (
<div className="flex w-full h-full justify-center items-center bg-white">No preview available</div>
)}
{isDeviceModeOn && (
<>
{/* Left handle */}
<div
onMouseDown={(e) => startResizing(e, 'left')}
style={{
position: 'absolute',
top: 0,
left: 0,
width: '15px',
marginLeft: '-15px',
height: '100%',
cursor: 'ew-resize',
background: 'rgba(255,255,255,.2)',
display: 'flex',
alignItems: 'center',
justifyContent: 'center',
transition: 'background 0.2s',
userSelect: 'none',
}}
onMouseOver={(e) => (e.currentTarget.style.background = 'rgba(255,255,255,.5)')}
onMouseOut={(e) => (e.currentTarget.style.background = 'rgba(255,255,255,.2)')}
title="Drag to resize width"
>
<GripIcon />
</div>
{/* Right handle */}
<div
onMouseDown={(e) => startResizing(e, 'right')}
style={{
position: 'absolute',
top: 0,
right: 0,
width: '15px',
marginRight: '-15px',
height: '100%',
cursor: 'ew-resize',
background: 'rgba(255,255,255,.2)',
display: 'flex',
alignItems: 'center',
justifyContent: 'center',
transition: 'background 0.2s',
userSelect: 'none',
}}
onMouseOver={(e) => (e.currentTarget.style.background = 'rgba(255,255,255,.5)')}
onMouseOut={(e) => (e.currentTarget.style.background = 'rgba(255,255,255,.2)')}
title="Drag to resize width"
>
<GripIcon />
</div>
</>
)}
</div>
</div>
</div>
);

View File

@ -128,7 +128,12 @@ export function getXAIModel(apiKey: OptionalApiKey, model: string) {
}
export function getModel(provider: string, model: string, env: Env, apiKeys?: Record<string, string>) {
const apiKey = getAPIKey(env, provider, apiKeys);
/*
* let apiKey; // Declare first
* let baseURL;
*/
const apiKey = getAPIKey(env, provider, apiKeys); // Then assign
const baseURL = getBaseURL(env, provider);
switch (provider) {

View File

@ -23,16 +23,37 @@ export type Messages = Message[];
export type StreamingOptions = Omit<Parameters<typeof _streamText>[0], 'model'>;
function extractPropertiesFromMessage(message: Message): { model: string; provider: string; content: string } {
// Extract model
const modelMatch = message.content.match(MODEL_REGEX);
const textContent = Array.isArray(message.content)
? message.content.find((item) => item.type === 'text')?.text || ''
: message.content;
const modelMatch = textContent.match(MODEL_REGEX);
const providerMatch = textContent.match(PROVIDER_REGEX);
/*
* Extract model
* const modelMatch = message.content.match(MODEL_REGEX);
*/
const model = modelMatch ? modelMatch[1] : DEFAULT_MODEL;
// Extract provider
const providerMatch = message.content.match(PROVIDER_REGEX);
/*
* Extract provider
* const providerMatch = message.content.match(PROVIDER_REGEX);
*/
const provider = providerMatch ? providerMatch[1] : DEFAULT_PROVIDER.name;
// Remove model and provider lines from content
const cleanedContent = message.content.replace(MODEL_REGEX, '').replace(PROVIDER_REGEX, '').trim();
const cleanedContent = Array.isArray(message.content)
? message.content.map((item) => {
if (item.type === 'text') {
return {
type: 'text',
text: item.text?.replace(MODEL_REGEX, '').replace(PROVIDER_REGEX, ''),
};
}
return item; // Preserve image_url and other types as is
})
: textContent.replace(MODEL_REGEX, '').replace(PROVIDER_REGEX, '');
return { model, provider, content: cleanedContent };
}

View File

@ -2,4 +2,5 @@ export * from './useMessageParser';
export * from './usePromptEnhancer';
export * from './useShortcuts';
export * from './useSnapScroll';
export * from './useEditChatDescription';
export { default } from './useViewport';

View File

@ -0,0 +1,163 @@
import { useStore } from '@nanostores/react';
import { useCallback, useEffect, useState } from 'react';
import { toast } from 'react-toastify';
import {
chatId as chatIdStore,
description as descriptionStore,
db,
updateChatDescription,
getMessages,
} from '~/lib/persistence';
interface EditChatDescriptionOptions {
initialDescription?: string;
customChatId?: string;
syncWithGlobalStore?: boolean;
}
type EditChatDescriptionHook = {
editing: boolean;
handleChange: (e: React.ChangeEvent<HTMLInputElement>) => void;
handleBlur: () => Promise<void>;
handleSubmit: (event: React.FormEvent) => Promise<void>;
handleKeyDown: (event: React.KeyboardEvent<HTMLInputElement>) => Promise<void>;
currentDescription: string;
toggleEditMode: () => void;
};
/**
* Hook to manage the state and behavior for editing chat descriptions.
*
* Offers functions to:
* - Switch between edit and view modes.
* - Manage input changes, blur, and form submission events.
* - Save updates to IndexedDB and optionally to the global application state.
*
* @param {Object} options
* @param {string} options.initialDescription - The current chat description.
* @param {string} options.customChatId - Optional ID for updating the description via the sidebar.
* @param {boolean} options.syncWithGlobalStore - Flag to indicate global description store synchronization.
* @returns {EditChatDescriptionHook} Methods and state for managing description edits.
*/
export function useEditChatDescription({
initialDescription = descriptionStore.get()!,
customChatId,
syncWithGlobalStore,
}: EditChatDescriptionOptions): EditChatDescriptionHook {
const chatIdFromStore = useStore(chatIdStore);
const [editing, setEditing] = useState(false);
const [currentDescription, setCurrentDescription] = useState(initialDescription);
const [chatId, setChatId] = useState<string>();
useEffect(() => {
setChatId(customChatId || chatIdFromStore);
}, [customChatId, chatIdFromStore]);
useEffect(() => {
setCurrentDescription(initialDescription);
}, [initialDescription]);
const toggleEditMode = useCallback(() => setEditing((prev) => !prev), []);
const handleChange = useCallback((e: React.ChangeEvent<HTMLInputElement>) => {
setCurrentDescription(e.target.value);
}, []);
const fetchLatestDescription = useCallback(async () => {
if (!db || !chatId) {
return initialDescription;
}
try {
const chat = await getMessages(db, chatId);
return chat?.description || initialDescription;
} catch (error) {
console.error('Failed to fetch latest description:', error);
return initialDescription;
}
}, [db, chatId, initialDescription]);
const handleBlur = useCallback(async () => {
const latestDescription = await fetchLatestDescription();
setCurrentDescription(latestDescription);
toggleEditMode();
}, [fetchLatestDescription, toggleEditMode]);
const isValidDescription = useCallback((desc: string): boolean => {
const trimmedDesc = desc.trim();
if (trimmedDesc === initialDescription) {
toggleEditMode();
return false; // No change, skip validation
}
const lengthValid = trimmedDesc.length > 0 && trimmedDesc.length <= 100;
const characterValid = /^[a-zA-Z0-9\s]+$/.test(trimmedDesc);
if (!lengthValid) {
toast.error('Description must be between 1 and 100 characters.');
return false;
}
if (!characterValid) {
toast.error('Description can only contain alphanumeric characters and spaces.');
return false;
}
return true;
}, []);
const handleSubmit = useCallback(
async (event: React.FormEvent) => {
event.preventDefault();
if (!isValidDescription(currentDescription)) {
return;
}
try {
if (!db) {
toast.error('Chat persistence is not available');
return;
}
if (!chatId) {
toast.error('Chat Id is not available');
return;
}
await updateChatDescription(db, chatId, currentDescription);
if (syncWithGlobalStore) {
descriptionStore.set(currentDescription);
}
toast.success('Chat description updated successfully');
} catch (error) {
toast.error('Failed to update chat description: ' + (error as Error).message);
}
toggleEditMode();
},
[currentDescription, db, chatId, initialDescription, customChatId],
);
const handleKeyDown = useCallback(
async (e: React.KeyboardEvent<HTMLInputElement>) => {
if (e.key === 'Escape') {
await handleBlur();
}
},
[handleBlur],
);
return {
editing,
handleChange,
handleBlur,
handleSubmit,
handleKeyDown,
currentDescription,
toggleEditMode,
};
}

View File

@ -1,6 +1,68 @@
import { useStore } from '@nanostores/react';
import { description } from './useChatHistory';
import { TooltipProvider } from '@radix-ui/react-tooltip';
import WithTooltip from '~/components/ui/Tooltip';
import { useEditChatDescription } from '~/lib/hooks';
import { description as descriptionStore } from '~/lib/persistence';
export function ChatDescription() {
return useStore(description);
const initialDescription = useStore(descriptionStore)!;
const { editing, handleChange, handleBlur, handleSubmit, handleKeyDown, currentDescription, toggleEditMode } =
useEditChatDescription({
initialDescription,
syncWithGlobalStore: true,
});
if (!initialDescription) {
// doing this to prevent showing edit button until chat description is set
return null;
}
return (
<div className="flex items-center justify-center">
{editing ? (
<form onSubmit={handleSubmit} className="flex items-center justify-center">
<input
type="text"
className="bg-bolt-elements-background-depth-1 text-bolt-elements-textPrimary rounded px-2 mr-2 w-fit"
autoFocus
value={currentDescription}
onChange={handleChange}
onBlur={handleBlur}
onKeyDown={handleKeyDown}
style={{ width: `${Math.max(currentDescription.length * 8, 100)}px` }}
/>
<TooltipProvider>
<WithTooltip tooltip="Save title">
<div className="flex justify-between items-center p-2 rounded-md bg-bolt-elements-item-backgroundAccent">
<button
type="submit"
className="i-ph:check-bold scale-110 hover:text-bolt-elements-item-contentAccent"
onMouseDown={handleSubmit}
/>
</div>
</WithTooltip>
</TooltipProvider>
</form>
) : (
<>
{currentDescription}
<TooltipProvider>
<WithTooltip tooltip="Rename chat">
<div className="flex justify-between items-center p-2 rounded-md bg-bolt-elements-item-backgroundAccent ml-2">
<button
type="button"
className="i-ph:pencil-fill scale-110 hover:text-bolt-elements-item-contentAccent"
onClick={(event) => {
event.preventDefault();
toggleEditMode();
}}
/>
</div>
</WithTooltip>
</TooltipProvider>
</>
)}
</div>
);
}

View File

@ -52,17 +52,23 @@ export async function setMessages(
messages: Message[],
urlId?: string,
description?: string,
timestamp?: string,
): Promise<void> {
return new Promise((resolve, reject) => {
const transaction = db.transaction('chats', 'readwrite');
const store = transaction.objectStore('chats');
if (timestamp && isNaN(Date.parse(timestamp))) {
reject(new Error('Invalid timestamp'));
return;
}
const request = store.put({
id,
messages,
urlId,
description,
timestamp: new Date().toISOString(),
timestamp: timestamp ?? new Date().toISOString(),
});
request.onsuccess = () => resolve();
@ -212,3 +218,17 @@ export async function createChatFromMessages(
return newUrlId; // Return the urlId instead of id for navigation
}
export async function updateChatDescription(db: IDBDatabase, id: string, description: string): Promise<void> {
const chat = await getMessages(db, id);
if (!chat) {
throw new Error('Chat not found');
}
if (!description.trim()) {
throw new Error('Description cannot be empty');
}
await setMessages(db, id, chat.messages, chat.urlId, description, chat.timestamp);
}

View File

@ -100,6 +100,10 @@ export class ActionRunner {
.catch((error) => {
console.error('Action failed:', error);
});
await this.#currentExecutionPromise;
return;
}
async #executeAction(actionId: string, isStreaming: boolean = false) {

View File

@ -212,9 +212,5 @@ function isBinaryFile(buffer: Uint8Array | undefined) {
* array buffer.
*/
function convertToBuffer(view: Uint8Array): Buffer {
const buffer = new Uint8Array(view.buffer, view.byteOffset, view.byteLength);
Object.setPrototypeOf(buffer, Buffer.prototype);
return buffer as Buffer;
return Buffer.from(view.buffer, view.byteOffset, view.byteLength);
}

View File

@ -29,8 +29,9 @@ function parseCookies(cookieHeader:string) {
}
async function chatAction({ context, request }: ActionFunctionArgs) {
const { messages } = await request.json<{
const { messages, model } = await request.json<{
messages: Messages;
model: string;
}>();
const cookieHeader = request.headers.get('Cookie');

View File

@ -44,8 +44,9 @@ async function enhancerAction({ context, request }: ActionFunctionArgs) {
content:
`[Model: ${model}]\n\n[Provider: ${providerName}]\n\n` +
stripIndents`
You are a professional prompt engineer specializing in crafting precise, effective prompts.
You are a professional prompt engineer specializing in crafting precise, effective prompts.
Your task is to enhance prompts by making them more specific, actionable, and effective.
I want you to improve the user prompt that is wrapped in \`<original_prompt>\` tags.
For valid prompts:
@ -55,12 +56,14 @@ async function enhancerAction({ context, request }: ActionFunctionArgs) {
- Maintain the core intent
- Ensure the prompt is self-contained
- Use professional language
For invalid or unclear prompts:
- Respond with a clear, professional guidance message
- Keep responses concise and actionable
- Maintain a helpful, constructive tone
- Focus on what the user should provide
- Use a standard template for consistency
IMPORTANT: Your response must ONLY contain the enhanced prompt text.
Do not include any explanations, metadata, or wrapper tags.

View File

@ -1,3 +1,5 @@
interface Window {
showDirectoryPicker(): Promise<FileSystemDirectoryHandle>;
webkitSpeechRecognition: typeof SpeechRecognition;
SpeechRecognition: typeof SpeechRecognition;
}

25548
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -101,6 +101,7 @@
"@cloudflare/workers-types": "^4.20241127.0",
"@remix-run/dev": "^2.15.0",
"@types/diff": "^5.2.3",
"@types/dom-speech-recognition": "^0.0.4",
"@types/file-saver": "^2.0.7",
"@types/js-cookie": "^3.0.6",
"@types/react": "^18.3.12",

View File

@ -222,6 +222,9 @@ importers:
'@types/diff':
specifier: ^5.2.3
version: 5.2.3
'@types/dom-speech-recognition':
specifier: ^0.0.4
version: 0.0.4
'@types/file-saver':
specifier: ^2.0.7
version: 2.0.7
@ -2039,6 +2042,9 @@ packages:
'@types/diff@5.2.3':
resolution: {integrity: sha512-K0Oqlrq3kQMaO2RhfrNQX5trmt+XLyom88zS0u84nnIcLvFnRUMRRHmrGny5GSM+kNO9IZLARsdQHDzkhAgmrQ==}
'@types/dom-speech-recognition@0.0.4':
resolution: {integrity: sha512-zf2GwV/G6TdaLwpLDcGTIkHnXf8JEf/viMux+khqKQKDa8/8BAUtXXZS563GnvJ4Fg0PBLGAaFf2GekEVSZ6GQ==}
'@types/eslint@8.56.10':
resolution: {integrity: sha512-Shavhk87gCtY2fhXDctcfS3e6FdxWkCx1iUZ9eEUbh7rTqlZT0/IzOkCOVt0fCjcFuZ9FPYfuezTBImfHCDBGQ==}
@ -7464,6 +7470,8 @@ snapshots:
'@types/diff@5.2.3': {}
'@types/dom-speech-recognition@0.0.4': {}
'@types/eslint@8.56.10':
dependencies:
'@types/estree': 1.0.6
@ -7812,7 +7820,7 @@ snapshots:
'@babel/plugin-syntax-typescript': 7.25.9(@babel/core@7.26.0)
'@vanilla-extract/babel-plugin-debug-ids': 1.1.0
'@vanilla-extract/css': 1.16.1
esbuild: 0.17.6
esbuild: 0.17.19
eval: 0.1.8
find-up: 5.0.0
javascript-stringify: 2.1.0

View File

@ -1,7 +1,7 @@
{
"compilerOptions": {
"lib": ["DOM", "DOM.Iterable", "ESNext"],
"types": ["@remix-run/cloudflare", "vite/client", "@cloudflare/workers-types/2023-07-01"],
"types": ["@remix-run/cloudflare", "vite/client", "@cloudflare/workers-types/2023-07-01", "@types/dom-speech-recognition"],
"isolatedModules": true,
"esModuleInterop": true,
"jsx": "react-jsx",

View File

@ -19,8 +19,7 @@ export default defineConfig((config) => {
future: {
v3_fetcherPersist: true,
v3_relativeSplatPath: true,
v3_throwAbortReason: true,
v3_lazyRouteDiscovery: true,
v3_throwAbortReason: true
},
}),
UnoCSS(),