Merge branch 'main' into github-import

This commit is contained in:
Anirban Kar 2024-12-06 18:54:06 +05:30 committed by GitHub
commit fe2f0080a3
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
29 changed files with 868 additions and 135 deletions

View File

@ -40,18 +40,18 @@ https://thinktank.ottomator.ai
- ✅ Together Integration (@mouimet-infinisoft)
- ✅ Mobile friendly (@qwikode)
- ✅ Better prompt enhancing (@SujalXplores)
- **HIGH PRIORITY** - ALMOST DONE - Attach images to prompts (@atrokhym)
- Attach images to prompts (@atrokhym)
- ⬜ **HIGH PRIORITY** - Prevent Bolt from rewriting files as often (file locking and diffs)
- ⬜ **HIGH PRIORITY** - Better prompting for smaller LLMs (code window sometimes doesn't start)
- ⬜ **HIGH PRIORITY** - Run agents in the backend as opposed to a single model call
- ⬜ Azure Open AI API Integration
- ⬜ Perplexity Integration
- ⬜ Vertex AI Integration
- ⬜ Deploy directly to Vercel/Netlify/other similar platforms
- ⬜ Have LLM plan the project in a MD file for better results/transparency
- ⬜ VSCode Integration with git-like confirmations
- ⬜ Upload documents for knowledge - UI design templates, a code base to reference coding style, etc.
- ⬜ Voice prompting
- ⬜ Azure Open AI API Integration
- ⬜ Perplexity Integration
- ⬜ Vertex AI Integration
## Bolt.new: AI-Powered Full-Stack Web Development in the Browser

View File

@ -28,6 +28,7 @@ interface ArtifactProps {
export const Artifact = memo(({ messageId }: ArtifactProps) => {
const userToggledActions = useRef(false);
const [showActions, setShowActions] = useState(false);
const [allActionFinished, setAllActionFinished] = useState(false);
const artifacts = useStore(workbenchStore.artifacts);
const artifact = artifacts[messageId];
@ -47,6 +48,11 @@ export const Artifact = memo(({ messageId }: ArtifactProps) => {
if (actions.length && !showActions && !userToggledActions.current) {
setShowActions(true);
}
if (actions.length !== 0) {
const finished = !actions.find((action) => action.status !== 'complete');
setAllActionFinished(finished);
}
}, [actions]);
return (
@ -59,6 +65,18 @@ export const Artifact = memo(({ messageId }: ArtifactProps) => {
workbenchStore.showWorkbench.set(!showWorkbench);
}}
>
{artifact.type == 'bundled' && (
<>
<div className="p-4">
{allActionFinished ? (
<div className={'i-ph:files-light'} style={{ fontSize: '2rem' }}></div>
) : (
<div className={'i-svg-spinners:90-ring-with-bg'} style={{ fontSize: '2rem' }}></div>
)}
</div>
<div className="bg-bolt-elements-artifacts-borderColor w-[1px]" />
</>
)}
<div className="px-5 p-3.5 w-full text-left">
<div className="w-full text-bolt-elements-textPrimary font-medium leading-5 text-sm">{artifact?.title}</div>
<div className="w-full w-full text-bolt-elements-textSecondary text-xs mt-0.5">Click to open Workbench</div>
@ -66,7 +84,7 @@ export const Artifact = memo(({ messageId }: ArtifactProps) => {
</button>
<div className="bg-bolt-elements-artifacts-borderColor w-[1px]" />
<AnimatePresence>
{actions.length && (
{actions.length && artifact.type !== 'bundled' && (
<motion.button
initial={{ width: 0 }}
animate={{ width: 'auto' }}
@ -83,7 +101,7 @@ export const Artifact = memo(({ messageId }: ArtifactProps) => {
</AnimatePresence>
</div>
<AnimatePresence>
{showActions && actions.length > 0 && (
{artifact.type !== 'bundled' && showActions && actions.length > 0 && (
<motion.div
className="actions"
initial={{ height: 0 }}
@ -92,6 +110,7 @@ export const Artifact = memo(({ messageId }: ArtifactProps) => {
transition={{ duration: 0.15 }}
>
<div className="bg-bolt-elements-artifacts-borderColor h-[1px]" />
<div className="p-5 text-left bg-bolt-elements-actions-background">
<ActionList actions={actions} />
</div>

View File

@ -23,44 +23,9 @@ import { ImportButtons } from '~/components/chat/chatExportAndImport/ImportButto
import { ExamplePrompts } from '~/components/chat/ExamplePrompts';
import GitCloneButton from './GitCloneButton';
// @ts-ignore TODO: Introduce proper types
// eslint-disable-next-line @typescript-eslint/no-unused-vars
const ModelSelector = ({ model, setModel, provider, setProvider, modelList, providerList, apiKeys }) => {
return (
<div className="mb-2 flex gap-2 flex-col sm:flex-row">
<select
value={provider?.name}
onChange={(e) => {
setProvider(providerList.find((p: ProviderInfo) => p.name === e.target.value));
const firstModel = [...modelList].find((m) => m.provider == e.target.value);
setModel(firstModel ? firstModel.name : '');
}}
className="flex-1 p-2 rounded-lg border border-bolt-elements-borderColor bg-bolt-elements-prompt-background text-bolt-elements-textPrimary focus:outline-none focus:ring-2 focus:ring-bolt-elements-focus transition-all"
>
{providerList.map((provider: ProviderInfo) => (
<option key={provider.name} value={provider.name}>
{provider.name}
</option>
))}
</select>
<select
key={provider?.name}
value={model}
onChange={(e) => setModel(e.target.value)}
className="flex-1 p-2 rounded-lg border border-bolt-elements-borderColor bg-bolt-elements-prompt-background text-bolt-elements-textPrimary focus:outline-none focus:ring-2 focus:ring-bolt-elements-focus transition-all lg:max-w-[70%]"
>
{[...modelList]
.filter((e) => e.provider == provider?.name && e.name)
.map((modelOption) => (
<option key={modelOption.name} value={modelOption.name}>
{modelOption.label}
</option>
))}
</select>
</div>
);
};
import FilePreview from './FilePreview';
import { ModelSelector } from '~/components/chat/ModelSelector';
import { SpeechRecognitionButton } from '~/components/chat/SpeechRecognition';
const TEXTAREA_MIN_HEIGHT = 76;
@ -86,6 +51,10 @@ interface BaseChatProps {
enhancePrompt?: () => void;
importChat?: (description: string, messages: Message[]) => Promise<void>;
exportChat?: () => void;
uploadedFiles?: File[];
setUploadedFiles?: (files: File[]) => void;
imageDataList?: string[];
setImageDataList?: (dataList: string[]) => void;
}
export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
@ -97,20 +66,24 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
showChat = true,
chatStarted = false,
isStreaming = false,
enhancingPrompt = false,
promptEnhanced = false,
messages,
input = '',
model,
setModel,
provider,
setProvider,
sendMessage,
input = '',
enhancingPrompt,
handleInputChange,
promptEnhanced,
enhancePrompt,
sendMessage,
handleStop,
importChat,
exportChat,
uploadedFiles = [],
setUploadedFiles,
imageDataList = [],
setImageDataList,
messages,
},
ref,
) => {
@ -118,7 +91,11 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
const [apiKeys, setApiKeys] = useState<Record<string, string>>({});
const [modelList, setModelList] = useState(MODEL_LIST);
const [isModelSettingsCollapsed, setIsModelSettingsCollapsed] = useState(false);
const [isListening, setIsListening] = useState(false);
const [recognition, setRecognition] = useState<SpeechRecognition | null>(null);
const [transcript, setTranscript] = useState('');
console.log(transcript);
useEffect(() => {
// Load API keys from cookies on component mount
try {
@ -141,8 +118,72 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
initializeModelList().then((modelList) => {
setModelList(modelList);
});
if (typeof window !== 'undefined' && ('SpeechRecognition' in window || 'webkitSpeechRecognition' in window)) {
const SpeechRecognition = window.SpeechRecognition || window.webkitSpeechRecognition;
const recognition = new SpeechRecognition();
recognition.continuous = true;
recognition.interimResults = true;
recognition.onresult = (event) => {
const transcript = Array.from(event.results)
.map((result) => result[0])
.map((result) => result.transcript)
.join('');
setTranscript(transcript);
if (handleInputChange) {
const syntheticEvent = {
target: { value: transcript },
} as React.ChangeEvent<HTMLTextAreaElement>;
handleInputChange(syntheticEvent);
}
};
recognition.onerror = (event) => {
console.error('Speech recognition error:', event.error);
setIsListening(false);
};
setRecognition(recognition);
}
}, []);
const startListening = () => {
if (recognition) {
recognition.start();
setIsListening(true);
}
};
const stopListening = () => {
if (recognition) {
recognition.stop();
setIsListening(false);
}
};
const handleSendMessage = (event: React.UIEvent, messageInput?: string) => {
if (sendMessage) {
sendMessage(event, messageInput);
if (recognition) {
recognition.abort(); // Stop current recognition
setTranscript(''); // Clear transcript
setIsListening(false);
// Clear the input by triggering handleInputChange with empty value
if (handleInputChange) {
const syntheticEvent = {
target: { value: '' },
} as React.ChangeEvent<HTMLTextAreaElement>;
handleInputChange(syntheticEvent);
}
}
}
};
const updateApiKey = (provider: string, key: string) => {
try {
const updatedApiKeys = { ...apiKeys, [provider]: key };
@ -160,6 +201,58 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
}
};
const handleFileUpload = () => {
const input = document.createElement('input');
input.type = 'file';
input.accept = 'image/*';
input.onchange = async (e) => {
const file = (e.target as HTMLInputElement).files?.[0];
if (file) {
const reader = new FileReader();
reader.onload = (e) => {
const base64Image = e.target?.result as string;
setUploadedFiles?.([...uploadedFiles, file]);
setImageDataList?.([...imageDataList, base64Image]);
};
reader.readAsDataURL(file);
}
};
input.click();
};
const handlePaste = async (e: React.ClipboardEvent) => {
const items = e.clipboardData?.items;
if (!items) {
return;
}
for (const item of items) {
if (item.type.startsWith('image/')) {
e.preventDefault();
const file = item.getAsFile();
if (file) {
const reader = new FileReader();
reader.onload = (e) => {
const base64Image = e.target?.result as string;
setUploadedFiles?.([...uploadedFiles, file]);
setImageDataList?.([...imageDataList, base64Image]);
};
reader.readAsDataURL(file);
}
break;
}
}
};
const baseChat = (
<div
ref={ref}
@ -276,7 +369,14 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
)}
</div>
</div>
<FilePreview
files={uploadedFiles}
imageDataList={imageDataList}
onRemove={(index) => {
setUploadedFiles?.(uploadedFiles.filter((_, i) => i !== index));
setImageDataList?.(imageDataList.filter((_, i) => i !== index));
}}
/>
<div
className={classNames(
'relative shadow-xs border border-bolt-elements-borderColor backdrop-blur rounded-lg',
@ -284,9 +384,41 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
>
<textarea
ref={textareaRef}
className={
'w-full pl-4 pt-4 pr-16 focus:outline-none resize-none text-bolt-elements-textPrimary placeholder-bolt-elements-textTertiary bg-transparent text-sm'
}
className={classNames(
'w-full pl-4 pt-4 pr-16 focus:outline-none resize-none text-bolt-elements-textPrimary placeholder-bolt-elements-textTertiary bg-transparent text-sm',
'transition-all duration-200',
'hover:border-bolt-elements-focus',
)}
onDragEnter={(e) => {
e.preventDefault();
e.currentTarget.style.border = '2px solid #1488fc';
}}
onDragOver={(e) => {
e.preventDefault();
e.currentTarget.style.border = '2px solid #1488fc';
}}
onDragLeave={(e) => {
e.preventDefault();
e.currentTarget.style.border = '1px solid var(--bolt-elements-borderColor)';
}}
onDrop={(e) => {
e.preventDefault();
e.currentTarget.style.border = '1px solid var(--bolt-elements-borderColor)';
const files = Array.from(e.dataTransfer.files);
files.forEach((file) => {
if (file.type.startsWith('image/')) {
const reader = new FileReader();
reader.onload = (e) => {
const base64Image = e.target?.result as string;
setUploadedFiles?.([...uploadedFiles, file]);
setImageDataList?.([...imageDataList, base64Image]);
};
reader.readAsDataURL(file);
}
});
}}
onKeyDown={(event) => {
if (event.key === 'Enter') {
if (event.shiftKey) {
@ -295,13 +427,19 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
event.preventDefault();
sendMessage?.(event);
if (isStreaming) {
handleStop?.();
return;
}
handleSendMessage?.(event);
}
}}
value={input}
onChange={(event) => {
handleInputChange?.(event);
}}
onPaste={handlePaste}
style={{
minHeight: TEXTAREA_MIN_HEIGHT,
maxHeight: TEXTAREA_MAX_HEIGHT,
@ -312,7 +450,7 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
<ClientOnly>
{() => (
<SendButton
show={input.length > 0 || isStreaming}
show={input.length > 0 || isStreaming || uploadedFiles.length > 0}
isStreaming={isStreaming}
onClick={(event) => {
if (isStreaming) {
@ -320,21 +458,28 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
return;
}
sendMessage?.(event);
if (input.length > 0 || uploadedFiles.length > 0) {
handleSendMessage?.(event);
}
}}
/>
)}
</ClientOnly>
<div className="flex justify-between items-center text-sm p-4 pt-2">
<div className="flex gap-1 items-center">
<IconButton title="Upload file" className="transition-all" onClick={() => handleFileUpload()}>
<div className="i-ph:paperclip text-xl"></div>
</IconButton>
<IconButton
title="Enhance prompt"
disabled={input.length === 0 || enhancingPrompt}
className={classNames('transition-all', {
'opacity-100!': enhancingPrompt,
'text-bolt-elements-item-contentAccent! pr-1.5 enabled:hover:bg-bolt-elements-item-backgroundAccent!':
promptEnhanced,
})}
className={classNames(
'transition-all',
enhancingPrompt ? 'opacity-100' : '',
promptEnhanced ? 'text-bolt-elements-item-contentAccent' : '',
promptEnhanced ? 'pr-1.5' : '',
promptEnhanced ? 'enabled:hover:bg-bolt-elements-item-backgroundAccent' : '',
)}
onClick={() => enhancePrompt?.()}
>
{enhancingPrompt ? (
@ -349,6 +494,13 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
</>
)}
</IconButton>
<SpeechRecognitionButton
isListening={isListening}
onStart={startListening}
onStop={stopListening}
disabled={isStreaming}
/>
{chatStarted && <ClientOnly>{() => <ExportChatButton exportChat={exportChat} />}</ClientOnly>}
</div>
{input.length > 3 ? (
@ -368,7 +520,15 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
<GitCloneButton importChat={importChat} />
</div>
)}
{!chatStarted && ExamplePrompts(sendMessage)}
{!chatStarted &&
ExamplePrompts((event, messageInput) => {
if (isStreaming) {
handleStop?.();
return;
}
handleSendMessage?.(event, messageInput);
})}
</div>
<ClientOnly>{() => <Workbench chatStarted={chatStarted} isStreaming={isStreaming} />}</ClientOnly>
</div>

View File

@ -12,7 +12,6 @@ import { useMessageParser, usePromptEnhancer, useShortcuts, useSnapScroll } from
import { description, useChatHistory } from '~/lib/persistence';
import { chatStore } from '~/lib/stores/chat';
import { workbenchStore } from '~/lib/stores/workbench';
import { fileModificationsToHTML } from '~/utils/diff';
import { DEFAULT_MODEL, DEFAULT_PROVIDER, PROMPT_COOKIE_KEY, PROVIDER_LIST } from '~/utils/constants';
import { cubicEasingFn } from '~/utils/easings';
import { createScopedLogger, renderLogger } from '~/utils/logger';
@ -89,8 +88,10 @@ export const ChatImpl = memo(
useShortcuts();
const textareaRef = useRef<HTMLTextAreaElement>(null);
const [chatStarted, setChatStarted] = useState(initialMessages.length > 0);
const [uploadedFiles, setUploadedFiles] = useState<File[]>([]); // Move here
const [imageDataList, setImageDataList] = useState<string[]>([]); // Move here
const [model, setModel] = useState(() => {
const savedModel = Cookies.get('selectedModel');
return savedModel || DEFAULT_MODEL;
@ -206,8 +207,6 @@ export const ChatImpl = memo(
runAnimation();
if (fileModifications !== undefined) {
const diff = fileModificationsToHTML(fileModifications);
/**
* If we have file modifications we append a new user message manually since we have to prefix
* the user input with the file modifications and we don't want the new user input to appear
@ -215,7 +214,19 @@ export const ChatImpl = memo(
* manually reset the input and we'd have to manually pass in file attachments. However, those
* aren't relevant here.
*/
append({ role: 'user', content: `[Model: ${model}]\n\n[Provider: ${provider.name}]\n\n${diff}\n\n${_input}` });
append({
role: 'user',
content: [
{
type: 'text',
text: `[Model: ${model}]\n\n[Provider: ${provider.name}]\n\n${_input}`,
},
...imageDataList.map((imageData) => ({
type: 'image',
image: imageData,
})),
] as any, // Type assertion to bypass compiler check
});
/**
* After sending a new message we reset all modifications since the model
@ -223,12 +234,28 @@ export const ChatImpl = memo(
*/
workbenchStore.resetAllFileModifications();
} else {
append({ role: 'user', content: `[Model: ${model}]\n\n[Provider: ${provider.name}]\n\n${_input}` });
append({
role: 'user',
content: [
{
type: 'text',
text: `[Model: ${model}]\n\n[Provider: ${provider.name}]\n\n${_input}`,
},
...imageDataList.map((imageData) => ({
type: 'image',
image: imageData,
})),
] as any, // Type assertion to bypass compiler check
});
}
setInput('');
Cookies.remove(PROMPT_COOKIE_KEY);
// Add file cleanup here
setUploadedFiles([]);
setImageDataList([]);
resetEnhancer();
textareaRef.current?.blur();
@ -321,6 +348,10 @@ export const ChatImpl = memo(
apiKeys,
);
}}
uploadedFiles={uploadedFiles}
setUploadedFiles={setUploadedFiles}
imageDataList={imageDataList}
setImageDataList={setImageDataList}
/>
);
},

View File

@ -0,0 +1,35 @@
import React from 'react';
interface FilePreviewProps {
files: File[];
imageDataList: string[];
onRemove: (index: number) => void;
}
const FilePreview: React.FC<FilePreviewProps> = ({ files, imageDataList, onRemove }) => {
if (!files || files.length === 0) {
return null;
}
return (
<div className="flex flex-row overflow-x-auto -mt-2">
{files.map((file, index) => (
<div key={file.name + file.size} className="mr-2 relative">
{imageDataList[index] && (
<div className="relative pt-4 pr-4">
<img src={imageDataList[index]} alt={file.name} className="max-h-20" />
<button
onClick={() => onRemove(index)}
className="absolute top-1 right-1 z-10 bg-black rounded-full w-5 h-5 shadow-md hover:bg-gray-900 transition-colors flex items-center justify-center"
>
<div className="i-ph:x w-3 h-3 text-gray-200" />
</button>
</div>
)}
</div>
))}
</div>
);
};
export default FilePreview;

View File

@ -79,7 +79,7 @@ ${content}
role: 'assistant',
content: `I'll help you set up these files.${binaryFilesMessage}
<boltArtifact id="imported-files" title="Imported Files">
<boltArtifact id="imported-files" title="Imported Files" type="bundled">
${fileArtifacts.join('\n\n')}
</boltArtifact>`,
id: generateId(),

View File

@ -0,0 +1,63 @@
import type { ProviderInfo } from '~/types/model';
import type { ModelInfo } from '~/utils/types';
interface ModelSelectorProps {
model?: string;
setModel?: (model: string) => void;
provider?: ProviderInfo;
setProvider?: (provider: ProviderInfo) => void;
modelList: ModelInfo[];
providerList: ProviderInfo[];
apiKeys: Record<string, string>;
}
export const ModelSelector = ({
model,
setModel,
provider,
setProvider,
modelList,
providerList,
}: ModelSelectorProps) => {
return (
<div className="mb-2 flex gap-2 flex-col sm:flex-row">
<select
value={provider?.name ?? ''}
onChange={(e) => {
const newProvider = providerList.find((p: ProviderInfo) => p.name === e.target.value);
if (newProvider && setProvider) {
setProvider(newProvider);
}
const firstModel = [...modelList].find((m) => m.provider === e.target.value);
if (firstModel && setModel) {
setModel(firstModel.name);
}
}}
className="flex-1 p-2 rounded-lg border border-bolt-elements-borderColor bg-bolt-elements-prompt-background text-bolt-elements-textPrimary focus:outline-none focus:ring-2 focus:ring-bolt-elements-focus transition-all"
>
{providerList.map((provider: ProviderInfo) => (
<option key={provider.name} value={provider.name}>
{provider.name}
</option>
))}
</select>
<select
key={provider?.name}
value={model}
onChange={(e) => setModel?.(e.target.value)}
className="flex-1 p-2 rounded-lg border border-bolt-elements-borderColor bg-bolt-elements-prompt-background text-bolt-elements-textPrimary focus:outline-none focus:ring-2 focus:ring-bolt-elements-focus transition-all lg:max-w-[70%]"
>
{[...modelList]
.filter((e) => e.provider == provider?.name && e.name)
.map((modelOption) => (
<option key={modelOption.name} value={modelOption.name}>
{modelOption.label}
</option>
))}
</select>
</div>
);
};

View File

@ -4,11 +4,12 @@ interface SendButtonProps {
show: boolean;
isStreaming?: boolean;
onClick?: (event: React.MouseEvent<HTMLButtonElement, MouseEvent>) => void;
onImagesSelected?: (images: File[]) => void;
}
const customEasingFn = cubicBezier(0.4, 0, 0.2, 1);
export function SendButton({ show, isStreaming, onClick }: SendButtonProps) {
export const SendButton = ({ show, isStreaming, onClick }: SendButtonProps) => {
return (
<AnimatePresence>
{show ? (
@ -30,4 +31,4 @@ export function SendButton({ show, isStreaming, onClick }: SendButtonProps) {
) : null}
</AnimatePresence>
);
}
};

View File

@ -0,0 +1,28 @@
import { IconButton } from '~/components/ui/IconButton';
import { classNames } from '~/utils/classNames';
import React from 'react';
export const SpeechRecognitionButton = ({
isListening,
onStart,
onStop,
disabled,
}: {
isListening: boolean;
onStart: () => void;
onStop: () => void;
disabled: boolean;
}) => {
return (
<IconButton
title={isListening ? 'Stop listening' : 'Start speech recognition'}
disabled={disabled}
className={classNames('transition-all', {
'text-bolt-elements-item-contentAccent': isListening,
})}
onClick={isListening ? onStop : onStart}
>
{isListening ? <div className="i-ph:microphone-slash text-xl" /> : <div className="i-ph:microphone text-xl" />}
</IconButton>
);
};

View File

@ -2,26 +2,52 @@
* @ts-nocheck
* Preventing TS checks with files presented in the video for a better presentation.
*/
import { modificationsRegex } from '~/utils/diff';
import { MODEL_REGEX, PROVIDER_REGEX } from '~/utils/constants';
import { Markdown } from './Markdown';
interface UserMessageProps {
content: string;
content: string | Array<{ type: string; text?: string; image?: string }>;
}
export function UserMessage({ content }: UserMessageProps) {
if (Array.isArray(content)) {
const textItem = content.find((item) => item.type === 'text');
const textContent = sanitizeUserMessage(textItem?.text || '');
const images = content.filter((item) => item.type === 'image' && item.image);
return (
<div className="overflow-hidden pt-[4px]">
<div className="flex items-start gap-4">
<div className="flex-1">
<Markdown limitedMarkdown>{textContent}</Markdown>
</div>
{images.length > 0 && (
<div className="flex-shrink-0 w-[160px]">
{images.map((item, index) => (
<div key={index} className="relative">
<img
src={item.image}
alt={`Uploaded image ${index + 1}`}
className="w-full h-[160px] rounded-lg object-cover border border-bolt-elements-borderColor"
/>
</div>
))}
</div>
)}
</div>
</div>
);
}
const textContent = sanitizeUserMessage(content);
return (
<div className="overflow-hidden pt-[4px]">
<Markdown limitedMarkdown>{sanitizeUserMessage(content)}</Markdown>
<Markdown limitedMarkdown>{textContent}</Markdown>
</div>
);
}
function sanitizeUserMessage(content: string) {
return content
.replace(modificationsRegex, '')
.replace(MODEL_REGEX, 'Using: $1')
.replace(PROVIDER_REGEX, ' ($1)\n\n')
.trim();
return content.replace(MODEL_REGEX, '').replace(PROVIDER_REGEX, '');
}

View File

@ -33,7 +33,7 @@ const menuVariants = {
type DialogContent = { type: 'delete'; item: ChatHistoryItem } | null;
export function Menu() {
export const Menu = () => {
const { duplicateCurrentChat, exportChat } = useChatHistory();
const menuRef = useRef<HTMLDivElement>(null);
const [list, setList] = useState<ChatHistoryItem[]>([]);
@ -206,4 +206,4 @@ export function Menu() {
</div>
</motion.div>
);
}
};

View File

@ -4,11 +4,16 @@ import { IconButton } from '~/components/ui/IconButton';
import { workbenchStore } from '~/lib/stores/workbench';
import { PortDropdown } from './PortDropdown';
type ResizeSide = 'left' | 'right' | null;
export const Preview = memo(() => {
const iframeRef = useRef<HTMLIFrameElement>(null);
const containerRef = useRef<HTMLDivElement>(null);
const inputRef = useRef<HTMLInputElement>(null);
const [activePreviewIndex, setActivePreviewIndex] = useState(0);
const [isPortDropdownOpen, setIsPortDropdownOpen] = useState(false);
const [isFullscreen, setIsFullscreen] = useState(false);
const hasSelectedPreview = useRef(false);
const previews = useStore(workbenchStore.previews);
const activePreview = previews[activePreviewIndex];
@ -16,6 +21,23 @@ export const Preview = memo(() => {
const [url, setUrl] = useState('');
const [iframeUrl, setIframeUrl] = useState<string | undefined>();
// Toggle between responsive mode and device mode
const [isDeviceModeOn, setIsDeviceModeOn] = useState(false);
// Use percentage for width
const [widthPercent, setWidthPercent] = useState<number>(37.5); // 375px assuming 1000px window width initially
const resizingState = useRef({
isResizing: false,
side: null as ResizeSide,
startX: 0,
startWidthPercent: 37.5,
windowWidth: window.innerWidth,
});
// Define the scaling factor
const SCALING_FACTOR = 2; // Adjust this value to increase/decrease sensitivity
useEffect(() => {
if (!activePreview) {
setUrl('');
@ -25,10 +47,9 @@ export const Preview = memo(() => {
}
const { baseUrl } = activePreview;
setUrl(baseUrl);
setIframeUrl(baseUrl);
}, [activePreview, iframeUrl]);
}, [activePreview]);
const validateUrl = useCallback(
(value: string) => {
@ -56,14 +77,13 @@ export const Preview = memo(() => {
[],
);
// when previews change, display the lowest port if user hasn't selected a preview
// When previews change, display the lowest port if user hasn't selected a preview
useEffect(() => {
if (previews.length > 1 && !hasSelectedPreview.current) {
const minPortIndex = previews.reduce(findMinPortIndex, 0);
setActivePreviewIndex(minPortIndex);
}
}, [previews]);
}, [previews, findMinPortIndex]);
const reloadPreview = () => {
if (iframeRef.current) {
@ -71,13 +91,134 @@ export const Preview = memo(() => {
}
};
const toggleFullscreen = async () => {
if (!isFullscreen && containerRef.current) {
await containerRef.current.requestFullscreen();
} else if (document.fullscreenElement) {
await document.exitFullscreen();
}
};
useEffect(() => {
const handleFullscreenChange = () => {
setIsFullscreen(!!document.fullscreenElement);
};
document.addEventListener('fullscreenchange', handleFullscreenChange);
return () => {
document.removeEventListener('fullscreenchange', handleFullscreenChange);
};
}, []);
const toggleDeviceMode = () => {
setIsDeviceModeOn((prev) => !prev);
};
const startResizing = (e: React.MouseEvent, side: ResizeSide) => {
if (!isDeviceModeOn) {
return;
}
// Prevent text selection
document.body.style.userSelect = 'none';
resizingState.current.isResizing = true;
resizingState.current.side = side;
resizingState.current.startX = e.clientX;
resizingState.current.startWidthPercent = widthPercent;
resizingState.current.windowWidth = window.innerWidth;
document.addEventListener('mousemove', onMouseMove);
document.addEventListener('mouseup', onMouseUp);
e.preventDefault(); // Prevent any text selection on mousedown
};
const onMouseMove = (e: MouseEvent) => {
if (!resizingState.current.isResizing) {
return;
}
const dx = e.clientX - resizingState.current.startX;
const windowWidth = resizingState.current.windowWidth;
// Apply scaling factor to increase sensitivity
const dxPercent = (dx / windowWidth) * 100 * SCALING_FACTOR;
let newWidthPercent = resizingState.current.startWidthPercent;
if (resizingState.current.side === 'right') {
newWidthPercent = resizingState.current.startWidthPercent + dxPercent;
} else if (resizingState.current.side === 'left') {
newWidthPercent = resizingState.current.startWidthPercent - dxPercent;
}
// Clamp the width between 10% and 90%
newWidthPercent = Math.max(10, Math.min(newWidthPercent, 90));
setWidthPercent(newWidthPercent);
};
const onMouseUp = () => {
resizingState.current.isResizing = false;
resizingState.current.side = null;
document.removeEventListener('mousemove', onMouseMove);
document.removeEventListener('mouseup', onMouseUp);
// Restore text selection
document.body.style.userSelect = '';
};
// Handle window resize to ensure widthPercent remains valid
useEffect(() => {
const handleWindowResize = () => {
/*
* Optional: Adjust widthPercent if necessary
* For now, since widthPercent is relative, no action is needed
*/
};
window.addEventListener('resize', handleWindowResize);
return () => {
window.removeEventListener('resize', handleWindowResize);
};
}, []);
// A small helper component for the handle's "grip" icon
const GripIcon = () => (
<div
style={{
display: 'flex',
justifyContent: 'center',
alignItems: 'center',
height: '100%',
pointerEvents: 'none',
}}
>
<div
style={{
color: 'rgba(0,0,0,0.5)',
fontSize: '10px',
lineHeight: '5px',
userSelect: 'none',
marginLeft: '1px',
}}
>
</div>
</div>
);
return (
<div className="w-full h-full flex flex-col">
<div ref={containerRef} className="w-full h-full flex flex-col relative">
{isPortDropdownOpen && (
<div className="z-iframe-overlay w-full h-full absolute" onClick={() => setIsPortDropdownOpen(false)} />
)}
<div className="bg-bolt-elements-background-depth-2 p-2 flex items-center gap-1.5">
<IconButton icon="i-ph:arrow-clockwise" onClick={reloadPreview} />
<div
className="flex items-center gap-1 flex-grow bg-bolt-elements-preview-addressBar-background border border-bolt-elements-borderColor text-bolt-elements-preview-addressBar-text rounded-full px-3 py-1 text-sm hover:bg-bolt-elements-preview-addressBar-backgroundHover hover:focus-within:bg-bolt-elements-preview-addressBar-backgroundActive focus-within:bg-bolt-elements-preview-addressBar-backgroundActive
focus-within-border-bolt-elements-borderColorActive focus-within:text-bolt-elements-preview-addressBar-textActive"
@ -101,6 +242,7 @@ export const Preview = memo(() => {
}}
/>
</div>
{previews.length > 1 && (
<PortDropdown
activePreviewIndex={activePreviewIndex}
@ -111,13 +253,93 @@ export const Preview = memo(() => {
previews={previews}
/>
)}
{/* Device mode toggle button */}
<IconButton
icon="i-ph:devices"
onClick={toggleDeviceMode}
title={isDeviceModeOn ? 'Switch to Responsive Mode' : 'Switch to Device Mode'}
/>
{/* Fullscreen toggle button */}
<IconButton
icon={isFullscreen ? 'i-ph:arrows-in' : 'i-ph:arrows-out'}
onClick={toggleFullscreen}
title={isFullscreen ? 'Exit Full Screen' : 'Full Screen'}
/>
</div>
<div className="flex-1 border-t border-bolt-elements-borderColor">
{activePreview ? (
<iframe ref={iframeRef} className="border-none w-full h-full bg-white" src={iframeUrl} />
) : (
<div className="flex w-full h-full justify-center items-center bg-white">No preview available</div>
)}
<div className="flex-1 border-t border-bolt-elements-borderColor flex justify-center items-center overflow-auto">
<div
style={{
width: isDeviceModeOn ? `${widthPercent}%` : '100%',
height: '100%', // Always full height
overflow: 'visible',
background: '#fff',
position: 'relative',
display: 'flex',
}}
>
{activePreview ? (
<iframe ref={iframeRef} className="border-none w-full h-full bg-white" src={iframeUrl} allowFullScreen />
) : (
<div className="flex w-full h-full justify-center items-center bg-white">No preview available</div>
)}
{isDeviceModeOn && (
<>
{/* Left handle */}
<div
onMouseDown={(e) => startResizing(e, 'left')}
style={{
position: 'absolute',
top: 0,
left: 0,
width: '15px',
marginLeft: '-15px',
height: '100%',
cursor: 'ew-resize',
background: 'rgba(255,255,255,.2)',
display: 'flex',
alignItems: 'center',
justifyContent: 'center',
transition: 'background 0.2s',
userSelect: 'none',
}}
onMouseOver={(e) => (e.currentTarget.style.background = 'rgba(255,255,255,.5)')}
onMouseOut={(e) => (e.currentTarget.style.background = 'rgba(255,255,255,.2)')}
title="Drag to resize width"
>
<GripIcon />
</div>
{/* Right handle */}
<div
onMouseDown={(e) => startResizing(e, 'right')}
style={{
position: 'absolute',
top: 0,
right: 0,
width: '15px',
marginRight: '-15px',
height: '100%',
cursor: 'ew-resize',
background: 'rgba(255,255,255,.2)',
display: 'flex',
alignItems: 'center',
justifyContent: 'center',
transition: 'background 0.2s',
userSelect: 'none',
}}
onMouseOver={(e) => (e.currentTarget.style.background = 'rgba(255,255,255,.5)')}
onMouseOut={(e) => (e.currentTarget.style.background = 'rgba(255,255,255,.2)')}
title="Drag to resize width"
>
<GripIcon />
</div>
</>
)}
</div>
</div>
</div>
);

View File

@ -51,7 +51,7 @@ export function getAPIKey(cloudflareEnv: Env, provider: string, userApiKeys?: Re
export function getBaseURL(cloudflareEnv: Env, provider: string) {
switch (provider) {
case 'Together':
return env.TOGETHER_API_BASE_URL || cloudflareEnv.TOGETHER_API_BASE_URL;
return env.TOGETHER_API_BASE_URL || cloudflareEnv.TOGETHER_API_BASE_URL || 'https://api.together.xyz/v1';
case 'OpenAILike':
return env.OPENAI_LIKE_API_BASE_URL || cloudflareEnv.OPENAI_LIKE_API_BASE_URL;
case 'LMStudio':

View File

@ -128,7 +128,12 @@ export function getXAIModel(apiKey: OptionalApiKey, model: string) {
}
export function getModel(provider: string, model: string, env: Env, apiKeys?: Record<string, string>) {
const apiKey = getAPIKey(env, provider, apiKeys);
/*
* let apiKey; // Declare first
* let baseURL;
*/
const apiKey = getAPIKey(env, provider, apiKeys); // Then assign
const baseURL = getBaseURL(env, provider);
switch (provider) {

View File

@ -1,11 +1,8 @@
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-nocheck TODO: Provider proper types
import { convertToCoreMessages, streamText as _streamText } from 'ai';
import { getModel } from '~/lib/.server/llm/model';
import { MAX_TOKENS } from './constants';
import { getSystemPrompt } from './prompts';
import { DEFAULT_MODEL, DEFAULT_PROVIDER, MODEL_LIST, MODEL_REGEX, PROVIDER_REGEX } from '~/utils/constants';
import { DEFAULT_MODEL, DEFAULT_PROVIDER, getModelList, MODEL_REGEX, PROVIDER_REGEX } from '~/utils/constants';
interface ToolResult<Name extends string, Args, Result> {
toolCallId: string;
@ -26,24 +23,50 @@ export type Messages = Message[];
export type StreamingOptions = Omit<Parameters<typeof _streamText>[0], 'model'>;
function extractPropertiesFromMessage(message: Message): { model: string; provider: string; content: string } {
// Extract model
const modelMatch = message.content.match(MODEL_REGEX);
const textContent = Array.isArray(message.content)
? message.content.find((item) => item.type === 'text')?.text || ''
: message.content;
const modelMatch = textContent.match(MODEL_REGEX);
const providerMatch = textContent.match(PROVIDER_REGEX);
/*
* Extract model
* const modelMatch = message.content.match(MODEL_REGEX);
*/
const model = modelMatch ? modelMatch[1] : DEFAULT_MODEL;
// Extract provider
const providerMatch = message.content.match(PROVIDER_REGEX);
const provider = providerMatch ? providerMatch[1] : DEFAULT_PROVIDER;
/*
* Extract provider
* const providerMatch = message.content.match(PROVIDER_REGEX);
*/
const provider = providerMatch ? providerMatch[1] : DEFAULT_PROVIDER.name;
// Remove model and provider lines from content
const cleanedContent = message.content.replace(MODEL_REGEX, '').replace(PROVIDER_REGEX, '').trim();
const cleanedContent = Array.isArray(message.content)
? message.content.map((item) => {
if (item.type === 'text') {
return {
type: 'text',
text: item.text?.replace(MODEL_REGEX, '').replace(PROVIDER_REGEX, ''),
};
}
return item; // Preserve image_url and other types as is
})
: textContent.replace(MODEL_REGEX, '').replace(PROVIDER_REGEX, '');
return { model, provider, content: cleanedContent };
}
export function streamText(messages: Messages, env: Env, options?: StreamingOptions, apiKeys?: Record<string, string>) {
export async function streamText(
messages: Messages,
env: Env,
options?: StreamingOptions,
apiKeys?: Record<string, string>,
) {
let currentModel = DEFAULT_MODEL;
let currentProvider = DEFAULT_PROVIDER;
let currentProvider = DEFAULT_PROVIDER.name;
const MODEL_LIST = await getModelList(apiKeys || {});
const processedMessages = messages.map((message) => {
if (message.role === 'user') {
const { model, provider, content } = extractPropertiesFromMessage(message);
@ -65,10 +88,10 @@ export function streamText(messages: Messages, env: Env, options?: StreamingOpti
const dynamicMaxTokens = modelDetails && modelDetails.maxTokenAllowed ? modelDetails.maxTokenAllowed : MAX_TOKENS;
return _streamText({
model: getModel(currentProvider, currentModel, env, apiKeys),
model: getModel(currentProvider, currentModel, env, apiKeys) as any,
system: getSystemPrompt(),
maxTokens: dynamicMaxTokens,
messages: convertToCoreMessages(processedMessages),
messages: convertToCoreMessages(processedMessages as any),
...options,
});
}

View File

@ -29,6 +29,7 @@ exports[`StreamingMessageParser > valid artifacts with actions > should correctl
"id": "artifact_1",
"messageId": "message_1",
"title": "Some title",
"type": undefined,
}
`;
@ -37,6 +38,7 @@ exports[`StreamingMessageParser > valid artifacts with actions > should correctl
"id": "artifact_1",
"messageId": "message_1",
"title": "Some title",
"type": undefined,
}
`;
@ -96,6 +98,7 @@ exports[`StreamingMessageParser > valid artifacts with actions > should correctl
"id": "artifact_1",
"messageId": "message_1",
"title": "Some title",
"type": undefined,
}
`;
@ -104,6 +107,7 @@ exports[`StreamingMessageParser > valid artifacts with actions > should correctl
"id": "artifact_1",
"messageId": "message_1",
"title": "Some title",
"type": undefined,
}
`;
@ -112,6 +116,7 @@ exports[`StreamingMessageParser > valid artifacts without actions > should corre
"id": "artifact_1",
"messageId": "message_1",
"title": "Some title",
"type": undefined,
}
`;
@ -120,6 +125,7 @@ exports[`StreamingMessageParser > valid artifacts without actions > should corre
"id": "artifact_1",
"messageId": "message_1",
"title": "Some title",
"type": undefined,
}
`;
@ -128,6 +134,7 @@ exports[`StreamingMessageParser > valid artifacts without actions > should corre
"id": "artifact_1",
"messageId": "message_1",
"title": "Some title",
"type": "bundled",
}
`;
@ -136,6 +143,7 @@ exports[`StreamingMessageParser > valid artifacts without actions > should corre
"id": "artifact_1",
"messageId": "message_1",
"title": "Some title",
"type": "bundled",
}
`;
@ -144,6 +152,7 @@ exports[`StreamingMessageParser > valid artifacts without actions > should corre
"id": "artifact_1",
"messageId": "message_1",
"title": "Some title",
"type": undefined,
}
`;
@ -152,6 +161,7 @@ exports[`StreamingMessageParser > valid artifacts without actions > should corre
"id": "artifact_1",
"messageId": "message_1",
"title": "Some title",
"type": undefined,
}
`;
@ -160,6 +170,7 @@ exports[`StreamingMessageParser > valid artifacts without actions > should corre
"id": "artifact_1",
"messageId": "message_1",
"title": "Some title",
"type": undefined,
}
`;
@ -168,6 +179,7 @@ exports[`StreamingMessageParser > valid artifacts without actions > should corre
"id": "artifact_1",
"messageId": "message_1",
"title": "Some title",
"type": undefined,
}
`;
@ -176,6 +188,7 @@ exports[`StreamingMessageParser > valid artifacts without actions > should corre
"id": "artifact_1",
"messageId": "message_1",
"title": "Some title",
"type": undefined,
}
`;
@ -184,6 +197,7 @@ exports[`StreamingMessageParser > valid artifacts without actions > should corre
"id": "artifact_1",
"messageId": "message_1",
"title": "Some title",
"type": undefined,
}
`;
@ -192,6 +206,7 @@ exports[`StreamingMessageParser > valid artifacts without actions > should corre
"id": "artifact_1",
"messageId": "message_1",
"title": "Some title",
"type": undefined,
}
`;
@ -200,6 +215,7 @@ exports[`StreamingMessageParser > valid artifacts without actions > should corre
"id": "artifact_1",
"messageId": "message_1",
"title": "Some title",
"type": undefined,
}
`;
@ -208,6 +224,7 @@ exports[`StreamingMessageParser > valid artifacts without actions > should corre
"id": "artifact_1",
"messageId": "message_1",
"title": "Some title",
"type": undefined,
}
`;
@ -216,5 +233,6 @@ exports[`StreamingMessageParser > valid artifacts without actions > should corre
"id": "artifact_1",
"messageId": "message_1",
"title": "Some title",
"type": undefined,
}
`;

View File

@ -59,7 +59,11 @@ describe('StreamingMessageParser', () => {
},
],
[
['Some text before <boltArti', 'fact', ' title="Some title" id="artifact_1">foo</boltArtifact> Some more text'],
[
'Some text before <boltArti',
'fact',
' title="Some title" id="artifact_1" type="bundled" >foo</boltArtifact> Some more text',
],
{
output: 'Some text before Some more text',
callbacks: { onArtifactOpen: 1, onArtifactClose: 1, onActionOpen: 0, onActionClose: 0 },

View File

@ -192,6 +192,7 @@ export class StreamingMessageParser {
const artifactTag = input.slice(i, openTagEnd + 1);
const artifactTitle = this.#extractAttribute(artifactTag, 'title') as string;
const type = this.#extractAttribute(artifactTag, 'type') as string;
const artifactId = this.#extractAttribute(artifactTag, 'id') as string;
if (!artifactTitle) {
@ -207,6 +208,7 @@ export class StreamingMessageParser {
const currentArtifact = {
id: artifactId,
title: artifactTitle,
type,
} satisfies BoltArtifactData;
state.currentArtifact = currentArtifact;

View File

@ -212,9 +212,5 @@ function isBinaryFile(buffer: Uint8Array | undefined) {
* array buffer.
*/
function convertToBuffer(view: Uint8Array): Buffer {
const buffer = new Uint8Array(view.buffer, view.byteOffset, view.byteLength);
Object.setPrototypeOf(buffer, Buffer.prototype);
return buffer as Buffer;
return Buffer.from(view.buffer, view.byteOffset, view.byteLength);
}

View File

@ -19,6 +19,7 @@ import { description } from '~/lib/persistence';
export interface ArtifactState {
id: string;
title: string;
type?: string;
closed: boolean;
runner: ActionRunner;
}
@ -230,7 +231,7 @@ export class WorkbenchStore {
// TODO: what do we wanna do and how do we wanna recover from this?
}
addArtifact({ messageId, title, id }: ArtifactCallbackData) {
addArtifact({ messageId, title, id, type }: ArtifactCallbackData) {
const artifact = this.#getArtifact(messageId);
if (artifact) {
@ -245,6 +246,7 @@ export class WorkbenchStore {
id,
title,
closed: false,
type,
runner: new ActionRunner(webcontainer, () => this.boltTerminal),
});
}

View File

@ -1,6 +1,3 @@
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-nocheck TODO: Provider proper types
import { type ActionFunctionArgs } from '@remix-run/cloudflare';
import { MAX_RESPONSE_SEGMENTS, MAX_TOKENS } from '~/lib/.server/llm/constants';
import { CONTINUE_PROMPT } from '~/lib/.server/llm/prompts';
@ -11,8 +8,8 @@ export async function action(args: ActionFunctionArgs) {
return chatAction(args);
}
function parseCookies(cookieHeader) {
const cookies = {};
function parseCookies(cookieHeader: string) {
const cookies: any = {};
// Split the cookie string by semicolons and spaces
const items = cookieHeader.split(';').map((cookie) => cookie.trim());
@ -34,19 +31,19 @@ function parseCookies(cookieHeader) {
async function chatAction({ context, request }: ActionFunctionArgs) {
const { messages } = await request.json<{
messages: Messages;
model: string;
}>();
const cookieHeader = request.headers.get('Cookie');
// Parse the cookie's value (returns an object or null if no cookie exists)
const apiKeys = JSON.parse(parseCookies(cookieHeader).apiKeys || '{}');
const apiKeys = JSON.parse(parseCookies(cookieHeader || '').apiKeys || '{}');
const stream = new SwitchableStream();
try {
const options: StreamingOptions = {
toolChoice: 'none',
apiKeys,
onFinish: async ({ text: content, finishReason }) => {
if (finishReason !== 'length') {
return stream.close();
@ -63,7 +60,7 @@ async function chatAction({ context, request }: ActionFunctionArgs) {
messages.push({ role: 'assistant', content });
messages.push({ role: 'user', content: CONTINUE_PROMPT });
const result = await streamText(messages, context.cloudflare.env, options);
const result = await streamText(messages, context.cloudflare.env, options, apiKeys);
return stream.switchSource(result.toAIStream());
},
@ -79,7 +76,7 @@ async function chatAction({ context, request }: ActionFunctionArgs) {
contentType: 'text/plain; charset=utf-8',
},
});
} catch (error) {
} catch (error: any) {
console.log(error);
if (error.message?.includes('API key')) {

View File

@ -1,4 +1,5 @@
export interface BoltArtifactData {
id: string;
title: string;
type?: string | undefined;
}

View File

@ -1,3 +1,5 @@
interface Window {
showDirectoryPicker(): Promise<FileSystemDirectoryHandle>;
webkitSpeechRecognition: typeof SpeechRecognition;
SpeechRecognition: typeof SpeechRecognition;
}

View File

@ -3,7 +3,7 @@ import type { ModelInfo } from '~/utils/types';
export type ProviderInfo = {
staticModels: ModelInfo[];
name: string;
getDynamicModels?: () => Promise<ModelInfo[]>;
getDynamicModels?: (apiKeys?: Record<string, string>) => Promise<ModelInfo[]>;
getApiKeyLink?: string;
labelForGetApiKey?: string;
icon?: string;

View File

@ -1,3 +1,4 @@
import Cookies from 'js-cookie';
import type { ModelInfo, OllamaApiResponse, OllamaModel } from './types';
import type { ProviderInfo } from '~/types/model';
@ -262,6 +263,7 @@ const PROVIDER_LIST: ProviderInfo[] = [
},
{
name: 'Together',
getDynamicModels: getTogetherModels,
staticModels: [
{
name: 'Qwen/Qwen2.5-Coder-32B-Instruct',
@ -293,6 +295,61 @@ const staticModels: ModelInfo[] = PROVIDER_LIST.map((p) => p.staticModels).flat(
export let MODEL_LIST: ModelInfo[] = [...staticModels];
export async function getModelList(apiKeys: Record<string, string>) {
MODEL_LIST = [
...(
await Promise.all(
PROVIDER_LIST.filter(
(p): p is ProviderInfo & { getDynamicModels: () => Promise<ModelInfo[]> } => !!p.getDynamicModels,
).map((p) => p.getDynamicModels(apiKeys)),
)
).flat(),
...staticModels,
];
return MODEL_LIST;
}
async function getTogetherModels(apiKeys?: Record<string, string>): Promise<ModelInfo[]> {
try {
const baseUrl = import.meta.env.TOGETHER_API_BASE_URL || '';
const provider = 'Together';
if (!baseUrl) {
return [];
}
let apiKey = import.meta.env.OPENAI_LIKE_API_KEY ?? '';
if (apiKeys && apiKeys[provider]) {
apiKey = apiKeys[provider];
}
if (!apiKey) {
return [];
}
const response = await fetch(`${baseUrl}/models`, {
headers: {
Authorization: `Bearer ${apiKey}`,
},
});
const res = (await response.json()) as any;
const data: any[] = (res || []).filter((model: any) => model.type == 'chat');
return data.map((m: any) => ({
name: m.id,
label: `${m.display_name} - in:$${m.pricing.input.toFixed(
2,
)} out:$${m.pricing.output.toFixed(2)} - context ${Math.floor(m.context_length / 1000)}k`,
provider,
maxTokenAllowed: 8000,
}));
} catch (e) {
console.error('Error getting OpenAILike models:', e);
return [];
}
}
const getOllamaBaseUrl = () => {
const defaultBaseUrl = import.meta.env.OLLAMA_API_BASE_URL || 'http://localhost:11434';
@ -340,7 +397,14 @@ async function getOpenAILikeModels(): Promise<ModelInfo[]> {
return [];
}
const apiKey = import.meta.env.OPENAI_LIKE_API_KEY ?? '';
let apiKey = import.meta.env.OPENAI_LIKE_API_KEY ?? '';
const apikeys = JSON.parse(Cookies.get('apiKeys') || '{}');
if (apikeys && apikeys.OpenAILike) {
apiKey = apikeys.OpenAILike;
}
const response = await fetch(`${baseUrl}/models`, {
headers: {
Authorization: `Bearer ${apiKey}`,
@ -414,16 +478,32 @@ async function getLMStudioModels(): Promise<ModelInfo[]> {
}
async function initializeModelList(): Promise<ModelInfo[]> {
let apiKeys: Record<string, string> = {};
try {
const storedApiKeys = Cookies.get('apiKeys');
if (storedApiKeys) {
const parsedKeys = JSON.parse(storedApiKeys);
if (typeof parsedKeys === 'object' && parsedKeys !== null) {
apiKeys = parsedKeys;
}
}
} catch (error: any) {
console.warn(`Failed to fetch apikeys from cookies:${error?.message}`);
}
MODEL_LIST = [
...(
await Promise.all(
PROVIDER_LIST.filter(
(p): p is ProviderInfo & { getDynamicModels: () => Promise<ModelInfo[]> } => !!p.getDynamicModels,
).map((p) => p.getDynamicModels()),
).map((p) => p.getDynamicModels(apiKeys)),
)
).flat(),
...staticModels,
];
return MODEL_LIST;
}

View File

@ -102,6 +102,7 @@
"@cloudflare/workers-types": "^4.20241127.0",
"@remix-run/dev": "^2.15.0",
"@types/diff": "^5.2.3",
"@types/dom-speech-recognition": "^0.0.4",
"@types/file-saver": "^2.0.7",
"@types/js-cookie": "^3.0.6",
"@types/react": "^18.3.12",

View File

@ -225,6 +225,9 @@ importers:
'@types/diff':
specifier: ^5.2.3
version: 5.2.3
'@types/dom-speech-recognition':
specifier: ^0.0.4
version: 0.0.4
'@types/file-saver':
specifier: ^2.0.7
version: 2.0.7
@ -2058,6 +2061,12 @@ packages:
'@types/diff@5.2.3':
resolution: {integrity: sha512-K0Oqlrq3kQMaO2RhfrNQX5trmt+XLyom88zS0u84nnIcLvFnRUMRRHmrGny5GSM+kNO9IZLARsdQHDzkhAgmrQ==}
'@types/dom-speech-recognition@0.0.4':
resolution: {integrity: sha512-zf2GwV/G6TdaLwpLDcGTIkHnXf8JEf/viMux+khqKQKDa8/8BAUtXXZS563GnvJ4Fg0PBLGAaFf2GekEVSZ6GQ==}
'@types/eslint@8.56.10':
resolution: {integrity: sha512-Shavhk87gCtY2fhXDctcfS3e6FdxWkCx1iUZ9eEUbh7rTqlZT0/IzOkCOVt0fCjcFuZ9FPYfuezTBImfHCDBGQ==}
'@types/estree-jsx@1.0.5':
resolution: {integrity: sha512-52CcUVNFyfb1A2ALocQw/Dd1BQFNmSdkuC3BkZ6iqhdMfQz7JWOFRuJFloOzjk+6WijU56m9oKXFAXc7o3Towg==}
@ -7485,6 +7494,15 @@ snapshots:
'@types/diff@5.2.3': {}
'@types/dom-speech-recognition@0.0.4': {}
'@types/eslint@8.56.10':
dependencies:
'@types/estree': 1.0.6
'@types/json-schema': 7.0.15
optional: true
'@types/estree-jsx@1.0.5':
dependencies:
'@types/estree': 1.0.6
@ -7821,7 +7839,7 @@ snapshots:
'@babel/plugin-syntax-typescript': 7.25.9(@babel/core@7.26.0)
'@vanilla-extract/babel-plugin-debug-ids': 1.1.0
'@vanilla-extract/css': 1.16.1
esbuild: 0.17.6
esbuild: 0.17.19
eval: 0.1.8
find-up: 5.0.0
javascript-stringify: 2.1.0

View File

@ -1,7 +1,7 @@
{
"compilerOptions": {
"lib": ["DOM", "DOM.Iterable", "ESNext"],
"types": ["@remix-run/cloudflare", "vite/client", "@cloudflare/workers-types/2023-07-01"],
"types": ["@remix-run/cloudflare", "vite/client", "@cloudflare/workers-types/2023-07-01", "@types/dom-speech-recognition"],
"isolatedModules": true,
"esModuleInterop": true,
"jsx": "react-jsx",

View File

@ -19,8 +19,7 @@ export default defineConfig((config) => {
future: {
v3_fetcherPersist: true,
v3_relativeSplatPath: true,
v3_throwAbortReason: true,
v3_lazyRouteDiscovery: true,
v3_throwAbortReason: true
},
}),
UnoCSS(),
@ -28,7 +27,7 @@ export default defineConfig((config) => {
chrome129IssuePlugin(),
config.mode === 'production' && optimizeCssModules({ apply: 'build' }),
],
envPrefix:["VITE_","OPENAI_LIKE_API_","OLLAMA_API_BASE_URL","LMSTUDIO_API_BASE_URL"],
envPrefix: ["VITE_", "OPENAI_LIKE_API_", "OLLAMA_API_BASE_URL", "LMSTUDIO_API_BASE_URL","TOGETHER_API_BASE_URL"],
css: {
preprocessorOptions: {
scss: {