base commit

more fixes

fix background

fix chat history

Update ci.yaml

remove hardcoded api key
This commit is contained in:
Shirisha 2025-05-14 20:45:42 -07:00
parent afd60de0ef
commit 1c15d13156
8 changed files with 62 additions and 107 deletions

View File

@ -22,6 +22,3 @@ jobs:
# - name: Run ESLint
# run: pnpm run lint
- name: Run tests
run: pnpm run test

View File

@ -12,7 +12,7 @@ import { classNames } from '~/utils/classNames';
import { PROVIDER_LIST } from '~/utils/constants';
import { Messages } from './Messages.client';
import { SendButton } from './SendButton.client';
import { APIKeyManager, getApiKeysFromCookies } from './APIKeyManager';
import { getApiKeysFromCookies } from './APIKeyManager';
import Cookies from 'js-cookie';
import * as Tooltip from '@radix-ui/react-tooltip';
@ -35,8 +35,6 @@ import type { ModelInfo } from '~/lib/modules/llm/types';
import ProgressCompilation from './ProgressCompilation';
import type { ProgressAnnotation } from '~/types/context';
import type { ActionRunner } from '~/lib/runtime/action-runner';
import { LOCAL_PROVIDERS } from '~/lib/stores/settings';
import { useLoaderData } from '@remix-run/react';
const TEXTAREA_MIN_HEIGHT = 76;
@ -121,8 +119,6 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
const [isModelLoading, setIsModelLoading] = useState<string | undefined>('all');
const [progressAnnotations, setProgressAnnotations] = useState<ProgressAnnotation[]>([]);
const { hideBaseChat, canImportChat } = useLoaderData<{ hideBaseChat?: boolean; canImportChat?: boolean }>();
useEffect(() => {
if (data) {
const progressList = data.filter(
@ -199,30 +195,32 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
}
}, [providerList, provider]);
const onApiKeysChange = async (providerName: string, apiKey: string) => {
const newApiKeys = { ...apiKeys, [providerName]: apiKey };
setApiKeys(newApiKeys);
Cookies.set('apiKeys', JSON.stringify(newApiKeys));
setIsModelLoading(providerName);
let providerModels: ModelInfo[] = [];
try {
const response = await fetch(`/code-editor/api/models/${encodeURIComponent(providerName)}`);
const data = await response.json();
providerModels = (data as { modelList: ModelInfo[] }).modelList;
} catch (error) {
console.error('Error loading dynamic models for:', providerName, error);
}
// Only update models for the specific provider
setModelList((prevModels) => {
const otherModels = prevModels.filter((model) => model.provider !== providerName);
return [...otherModels, ...providerModels];
});
setIsModelLoading(undefined);
};
/*
* const onApiKeysChange = async (providerName: string, apiKey: string) => {
* const newApiKeys = { ...apiKeys, [providerName]: apiKey };
* setApiKeys(newApiKeys);
* Cookies.set('apiKeys', JSON.stringify(newApiKeys));
*
* setIsModelLoading(providerName);
*
* let providerModels: ModelInfo[] = [];
*
* try {
* const response = await fetch(`/code-editor/api/models/${encodeURIComponent(providerName)}`);
* const data = await response.json();
* providerModels = (data as { modelList: ModelInfo[] }).modelList;
* } catch (error) {
* console.error('Error loading dynamic models for:', providerName, error);
* }
*
* // Only update models for the specific provider
* setModelList((prevModels) => {
* const otherModels = prevModels.filter((model) => model.provider !== providerName);
* return [...otherModels, ...providerModels];
* });
* setIsModelLoading(undefined);
* };
*/
const startListening = () => {
if (recognition) {
@ -417,6 +415,7 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
apiKeys={apiKeys}
modelLoading={isModelLoading}
/>
{/*
{(providerList || []).length > 0 &&
provider &&
(!LOCAL_PROVIDERS.includes(provider.name) || 'OpenAILike') && (
@ -427,7 +426,7 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
onApiKeysChange(provider.name, key);
}}
/>
)}
)} */}
</div>
)}
</ClientOnly>
@ -522,7 +521,7 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
minHeight: TEXTAREA_MIN_HEIGHT,
maxHeight: TEXTAREA_MAX_HEIGHT,
}}
placeholder="How can Bolt help you today?"
placeholder="How can RapidCanvas help you today?"
translate="no"
/>
<ClientOnly>
@ -632,20 +631,6 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
</div>
);
return (
<Tooltip.Provider delayDuration={200}>
{!hideBaseChat ? (
baseChat
) : (
<div ref={ref} className={classNames(styles.BaseChat, 'relative flex h-full w-full overflow-hidden')}>
<ClientOnly>{() => <Menu />}</ClientOnly>
<div className="flex justify-center gap-2 items-start mt-10% w-full">
{ImportButtons(importChat, canImportChat)}
<GitCloneButton importChat={importChat} />
</div>
</div>
)}
</Tooltip.Provider>
);
return <Tooltip.Provider delayDuration={200}>{baseChat}</Tooltip.Provider>;
},
);

View File

@ -12,7 +12,7 @@ import { useMessageParser, usePromptEnhancer, useShortcuts, useSnapScroll } from
import { description, useChatHistory } from '~/lib/persistence';
import { chatStore } from '~/lib/stores/chat';
import { workbenchStore } from '~/lib/stores/workbench';
import { DEFAULT_MODEL, DEFAULT_PROVIDER, PROMPT_COOKIE_KEY, PROVIDER_LIST } from '~/utils/constants';
import { PROMPT_COOKIE_KEY, PROVIDER_LIST } from '~/utils/constants';
import { cubicEasingFn } from '~/utils/easings';
import { createScopedLogger, renderLogger } from '~/utils/logger';
import { BaseChat } from './BaseChat';
@ -45,7 +45,9 @@ export function Chat() {
const [isLoading, setIsLoading] = useState(true);
const [error, setError] = useState<Error | null>(null);
const { ready, initialMessages, resetMessages, storeMessageHistory, importChat, exportChat } = useChatHistory();
const { ready, initialMessages, storeMessageHistory, importChat, exportChat } = useChatHistory();
console.log('initialMessages', initialMessages);
const title = useStore(description);
useEffect(() => {
@ -55,12 +57,6 @@ export function Chat() {
const { id: mixedId } = useLoaderData<{ id?: string }>();
const token = getToken();
useEffect(() => {
if (initialMessages.length > 0) {
resetMessages();
}
}, [initialMessages]);
useEffect(() => {
if (!mixedId) {
setError(new Error('No dataApp ID'));
@ -73,7 +69,7 @@ export function Chat() {
loadFilesFromDataApp(mixedId, token!)
.then(async (data) => {
await importChat(data.folderName, data.messages);
await importChat(data.folderName, !!initialMessages.length ? initialMessages : data.messages);
saveFilesToWorkbench({ fileArtifacts: data.updatedArtifacts.files });
removeTokenFromUrl();
setIsLoading(false);
@ -164,8 +160,6 @@ export const ChatImpl = memo(
({ description, initialMessages, storeMessageHistory, importChat, exportChat }: ChatProps) => {
useShortcuts();
const { showChat } = useLoaderData<{ showChat: boolean }>();
const textareaRef = useRef<HTMLTextAreaElement>(null);
const [chatStarted, setChatStarted] = useState(initialMessages.length > 0);
const [uploadedFiles, setUploadedFiles] = useState<File[]>([]);
@ -178,11 +172,10 @@ export const ChatImpl = memo(
const [model, setModel] = useState(() => {
const savedModel = Cookies.get('selectedModel');
return savedModel || DEFAULT_MODEL;
return savedModel || 'gpt-4o-mini';
});
const [provider, setProvider] = useState(() => {
const savedProvider = Cookies.get('selectedProvider');
return (PROVIDER_LIST.find((p) => p.name === savedProvider) || DEFAULT_PROVIDER) as ProviderInfo;
return PROVIDER_LIST.find((p) => p.name === 'OpenAI') as ProviderInfo;
});
const [animationScope, animate] = useAnimate();
@ -541,7 +534,7 @@ export const ChatImpl = memo(
ref={animationScope}
textareaRef={textareaRef}
input={input}
showChat={showChat}
showChat={true}
chatStarted={chatStarted}
isStreaming={isLoading || fakeLoading}
onStreamingChange={(streaming) => {

View File

@ -14,8 +14,8 @@ export default function ChatAlert({ alert, clearAlert, postMessage }: Props) {
const isPreview = source === 'preview';
const title = isPreview ? 'Preview Error' : 'Terminal Error';
const message = isPreview
? 'We encountered an error while running the preview. Would you like Bolt to analyze and help resolve this issue?'
: 'We encountered an error while running terminal commands. Would you like Bolt to analyze and help resolve this issue?';
? 'We encountered an error while running the preview. Would you like RapidCanvas to analyze and help resolve this issue?'
: 'We encountered an error while running terminal commands. Would you like RapidCanvas to analyze and help resolve this issue?';
return (
<AnimatePresence>
@ -84,7 +84,7 @@ export default function ChatAlert({ alert, clearAlert, postMessage }: Props) {
)}
>
<div className="i-ph:chat-circle-duotone"></div>
Ask Bolt
Ask RapidCanvas
</button>
<button
onClick={clearAlert}

View File

@ -168,6 +168,7 @@ export const ModelSelector = ({
<div className="mb-2 flex gap-2 flex-col sm:flex-row">
<select
value={provider?.name ?? ''}
disabled
onChange={(e) => {
const newProvider = providerList.find((p: ProviderInfo) => p.name === e.target.value);
@ -256,7 +257,7 @@ export const ModelSelector = ({
<div
className={classNames(
'max-h-60 overflow-y-auto',
'max-h-[60px] overflow-y-auto',
'sm:scrollbar-none',
'[&::-webkit-scrollbar]:w-2 [&::-webkit-scrollbar]:h-2',
'[&::-webkit-scrollbar-thumb]:bg-bolt-elements-borderColor',

View File

@ -7,7 +7,6 @@ import { workbenchStore } from '~/lib/stores/workbench';
import { logStore } from '~/lib/stores/logs'; // Import logStore
import {
getMessages,
getNextId,
getUrlId,
openDatabase,
setMessages,
@ -84,7 +83,7 @@ export function useChatHistory() {
startingIdx = -1;
}
let filteredMessages = storedMessages.messages.slice(startingIdx + 1, endingIdx);
let filteredMessages = storedMessages.messages;
let archivedMessages: Message[] = [];
if (startingIdx >= 0) {
@ -186,7 +185,7 @@ ${value.content}
setInitialMessages(filteredMessages);
setUrlId(storedMessages.urlId);
setUrlId(mixedId!);
description.set(storedMessages.description);
chatId.set(storedMessages.id);
chatMetadata.set(storedMessages.metadata);
@ -319,16 +318,6 @@ ${value.content}
description.set(firstArtifact?.title);
}
if (initialMessages.length === 0 && !chatId.get()) {
const nextId = await getNextId(db);
chatId.set(nextId);
if (!urlId) {
navigateChat(nextId);
}
}
await setMessages(
db,
chatId.get() as string,
@ -362,8 +351,10 @@ ${value.content}
const currentId = mixedId || chatId.get();
if (currentId) {
await setMessages(db, currentId, messages, urlId, description, undefined, metadata);
setInitialMessages(messages);
const chat = await getMessages(db, currentId);
const chatMessages = chat.messages.length > 0 ? chat.messages : messages;
await setMessages(db, currentId, chatMessages, urlId, description, undefined, metadata);
setInitialMessages(chatMessages);
setArchivedMessages([]);
return;

View File

@ -19,24 +19,6 @@ export const action = withAuth(async (args: ActionFunctionArgs) => {
const logger = createScopedLogger('api.chat');
function parseCookies(cookieHeader: string): Record<string, string> {
const cookies: Record<string, string> = {};
const items = cookieHeader.split(';').map((cookie) => cookie.trim());
items.forEach((item) => {
const [name, ...rest] = item.split('=');
if (name && rest) {
const decodedName = decodeURIComponent(name.trim());
const decodedValue = decodeURIComponent(rest.join('=').trim());
cookies[decodedName] = decodedValue;
}
});
return cookies;
}
async function chatAction({ context, request }: ActionFunctionArgs) {
const { messages, files, promptId, contextOptimization } = await request.json<{
messages: Messages;
@ -45,11 +27,15 @@ async function chatAction({ context, request }: ActionFunctionArgs) {
contextOptimization: boolean;
}>();
const cookieHeader = request.headers.get('Cookie');
const apiKeys = JSON.parse(parseCookies(cookieHeader || '').apiKeys || '{}');
const providerSettings: Record<string, IProviderSetting> = JSON.parse(
parseCookies(cookieHeader || '').providers || '{}',
);
const apiKeys = {
OpenAI: context.cloudflare?.env?.OPENAI_API_KEY || process.env.OPENAI_API_KEY || '',
};
if (!apiKeys.OpenAI) {
throw new Error('OpenAI API key is not configured. Please set OPENAI_API_KEY in your environment variables.');
}
const providerSettings: Record<string, IProviderSetting> = { OpenAI: { enabled: true } };
const stream = new SwitchableStream();
@ -224,6 +210,8 @@ async function chatAction({ context, request }: ActionFunctionArgs) {
const lastUserMessage = messages.filter((x) => x.role == 'user').slice(-1)[0];
const { model, provider } = extractPropertiesFromMessage(lastUserMessage);
console.log('model', model);
console.log('provider', provider);
messages.push({ id: generateId(), role: 'assistant', content });
messages.push({
id: generateId(),

View File

@ -49,7 +49,7 @@
--bolt-elements-actions-background: theme('colors.white');
--bolt-elements-actions-code-background: theme('colors.gray.800');
--bolt-elements-messages-background: theme('colors.gray.100');
--bolt-elements-messages-background: theme('colors.white');
--bolt-elements-messages-linkColor: theme('colors.accent.500');
--bolt-elements-messages-code-background: theme('colors.gray.800');
--bolt-elements-messages-inlineCode-background: theme('colors.gray.200');