feat: redact file contents from chat and put latest files into system prompt (#904)

This commit is contained in:
Anirban Kar 2024-12-29 15:36:31 +05:30 committed by GitHub
parent aecd9b1b40
commit 3a36a4469a
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
6 changed files with 50 additions and 9 deletions

View File

@ -118,7 +118,7 @@ export const ChatImpl = memo(
const [searchParams, setSearchParams] = useSearchParams(); const [searchParams, setSearchParams] = useSearchParams();
const files = useStore(workbenchStore.files); const files = useStore(workbenchStore.files);
const actionAlert = useStore(workbenchStore.alert); const actionAlert = useStore(workbenchStore.alert);
const { activeProviders, promptId } = useSettings(); const { activeProviders, promptId, contextOptimizationEnabled } = useSettings();
const [model, setModel] = useState(() => { const [model, setModel] = useState(() => {
const savedModel = Cookies.get('selectedModel'); const savedModel = Cookies.get('selectedModel');
@ -141,6 +141,7 @@ export const ChatImpl = memo(
apiKeys, apiKeys,
files, files,
promptId, promptId,
contextOptimization: contextOptimizationEnabled,
}, },
sendExtraMessageFields: true, sendExtraMessageFields: true,
onError: (error) => { onError: (error) => {

View File

@ -14,6 +14,8 @@ export default function FeaturesTab() {
enableLatestBranch, enableLatestBranch,
promptId, promptId,
setPromptId, setPromptId,
enableContextOptimization,
contextOptimizationEnabled,
} = useSettings(); } = useSettings();
const handleToggle = (enabled: boolean) => { const handleToggle = (enabled: boolean) => {
@ -39,6 +41,19 @@ export default function FeaturesTab() {
</div> </div>
<Switch className="ml-auto" checked={isLatestBranch} onCheckedChange={enableLatestBranch} /> <Switch className="ml-auto" checked={isLatestBranch} onCheckedChange={enableLatestBranch} />
</div> </div>
<div className="flex items-center justify-between">
<div>
<span className="text-bolt-elements-textPrimary">Use Context Optimization</span>
<p className="text-sm text-bolt-elements-textSecondary">
redact file contents form chat and puts the latest file contents on the system prompt
</p>
</div>
<Switch
className="ml-auto"
checked={contextOptimizationEnabled}
onCheckedChange={enableContextOptimization}
/>
</div>
</div> </div>
</div> </div>

View File

@ -150,8 +150,9 @@ export async function streamText(props: {
files?: FileMap; files?: FileMap;
providerSettings?: Record<string, IProviderSetting>; providerSettings?: Record<string, IProviderSetting>;
promptId?: string; promptId?: string;
contextOptimization?: boolean;
}) { }) {
const { messages, env: serverEnv, options, apiKeys, files, providerSettings, promptId } = props; const { messages, env: serverEnv, options, apiKeys, files, providerSettings, promptId, contextOptimization } = props;
// console.log({serverEnv}); // console.log({serverEnv});
@ -170,9 +171,11 @@ export async function streamText(props: {
return { ...message, content }; return { ...message, content };
} else if (message.role == 'assistant') { } else if (message.role == 'assistant') {
const content = message.content; let content = message.content;
// content = simplifyBoltActions(content); if (contextOptimization) {
content = simplifyBoltActions(content);
}
return { ...message, content }; return { ...message, content };
} }
@ -192,11 +195,9 @@ export async function streamText(props: {
allowedHtmlElements: allowedHTMLElements, allowedHtmlElements: allowedHTMLElements,
modificationTagName: MODIFICATIONS_TAG_NAME, modificationTagName: MODIFICATIONS_TAG_NAME,
}) ?? getSystemPrompt(); }) ?? getSystemPrompt();
let codeContext = '';
if (files) { if (files && contextOptimization) {
codeContext = createFilesContext(files); const codeContext = createFilesContext(files);
codeContext = '';
systemPrompt = `${systemPrompt}\n\n ${codeContext}`; systemPrompt = `${systemPrompt}\n\n ${codeContext}`;
} }

View File

@ -7,6 +7,7 @@ import {
promptStore, promptStore,
providersStore, providersStore,
latestBranchStore, latestBranchStore,
enableContextOptimizationStore,
} from '~/lib/stores/settings'; } from '~/lib/stores/settings';
import { useCallback, useEffect, useState } from 'react'; import { useCallback, useEffect, useState } from 'react';
import Cookies from 'js-cookie'; import Cookies from 'js-cookie';
@ -31,6 +32,7 @@ export function useSettings() {
const isLocalModel = useStore(isLocalModelsEnabled); const isLocalModel = useStore(isLocalModelsEnabled);
const isLatestBranch = useStore(latestBranchStore); const isLatestBranch = useStore(latestBranchStore);
const [activeProviders, setActiveProviders] = useState<ProviderInfo[]>([]); const [activeProviders, setActiveProviders] = useState<ProviderInfo[]>([]);
const contextOptimizationEnabled = useStore(enableContextOptimizationStore);
// Function to check if we're on stable version // Function to check if we're on stable version
const checkIsStableVersion = async () => { const checkIsStableVersion = async () => {
@ -118,6 +120,12 @@ export function useSettings() {
} else { } else {
latestBranchStore.set(savedLatestBranch === 'true'); latestBranchStore.set(savedLatestBranch === 'true');
} }
const savedContextOptimizationEnabled = Cookies.get('contextOptimizationEnabled');
if (savedContextOptimizationEnabled) {
enableContextOptimizationStore.set(savedContextOptimizationEnabled === 'true');
}
}, []); }, []);
// writing values to cookies on change // writing values to cookies on change
@ -179,6 +187,12 @@ export function useSettings() {
Cookies.set('isLatestBranch', String(enabled)); Cookies.set('isLatestBranch', String(enabled));
}, []); }, []);
const enableContextOptimization = useCallback((enabled: boolean) => {
enableContextOptimizationStore.set(enabled);
logStore.logSystem(`Context optimization ${enabled ? 'enabled' : 'disabled'}`);
Cookies.set('contextOptimizationEnabled', String(enabled));
}, []);
return { return {
providers, providers,
activeProviders, activeProviders,
@ -193,5 +207,7 @@ export function useSettings() {
setPromptId, setPromptId,
isLatestBranch, isLatestBranch,
enableLatestBranch, enableLatestBranch,
contextOptimizationEnabled,
enableContextOptimization,
}; };
} }

View File

@ -39,6 +39,9 @@ PROVIDER_LIST.forEach((provider) => {
}, },
}; };
}); });
//TODO: need to create one single map for all these flags
export const providersStore = map<ProviderSetting>(initialProviderSettings); export const providersStore = map<ProviderSetting>(initialProviderSettings);
export const isDebugMode = atom(false); export const isDebugMode = atom(false);
@ -50,3 +53,5 @@ export const isLocalModelsEnabled = atom(true);
export const promptStore = atom<string>('default'); export const promptStore = atom<string>('default');
export const latestBranchStore = atom(false); export const latestBranchStore = atom(false);
export const enableContextOptimizationStore = atom(false);

View File

@ -29,10 +29,11 @@ function parseCookies(cookieHeader: string): Record<string, string> {
} }
async function chatAction({ context, request }: ActionFunctionArgs) { async function chatAction({ context, request }: ActionFunctionArgs) {
const { messages, files, promptId } = await request.json<{ const { messages, files, promptId, contextOptimization } = await request.json<{
messages: Messages; messages: Messages;
files: any; files: any;
promptId?: string; promptId?: string;
contextOptimization: boolean;
}>(); }>();
const cookieHeader = request.headers.get('Cookie'); const cookieHeader = request.headers.get('Cookie');
@ -100,6 +101,7 @@ async function chatAction({ context, request }: ActionFunctionArgs) {
files, files,
providerSettings, providerSettings,
promptId, promptId,
contextOptimization,
}); });
return stream.switchSource(result.toDataStream()); return stream.switchSource(result.toDataStream());
@ -114,6 +116,7 @@ async function chatAction({ context, request }: ActionFunctionArgs) {
files, files,
providerSettings, providerSettings,
promptId, promptId,
contextOptimization,
}); });
stream.switchSource(result.toDataStream()); stream.switchSource(result.toDataStream());