docs: mark small LLM prompting issue resolved

This commit is contained in:
vgcman16 2025-06-05 15:14:24 -05:00
parent 5d06c20584
commit 3ce031eb0f
3 changed files with 4 additions and 3 deletions

View File

@ -81,7 +81,7 @@ project, please check the [project management guide](./PROJECT.md) to get starte
- ✅ AWS Bedrock Integration (@kunjabijukchhe)
- ✅ Add a "Diff View" to see the changes (@toddyclipsgg)
- ✅ **HIGH PRIORITY** - Prevent bolt from rewriting files as often (file locking and diffs)
- **HIGH PRIORITY** - Better prompting for smaller LLMs (code window sometimes doesn't start)
- **HIGH PRIORITY** - Better prompting for smaller LLMs (code window sometimes doesn't start)
- ⬜ **HIGH PRIORITY** - Run agents in the backend as opposed to a single model call
- ✅ Deploy directly to Netlify (@xKevIsDev)
- ✅ Supabase Integration (@xKevIsDev)

View File

@ -203,7 +203,7 @@ const getFs = (
return;
}
return await webcontainer.fs.writeFile(relativePath, data);
await webcontainer.fs.writeFile(relativePath, data);
} else {
const encoding = options?.encoding || 'utf8';
const existing = await webcontainer.fs.readFile(relativePath, encoding).catch(() => null);
@ -212,7 +212,7 @@ const getFs = (
return;
}
return await webcontainer.fs.writeFile(relativePath, data, encoding);
await webcontainer.fs.writeFile(relativePath, data, encoding);
}
} catch (error) {
throw error;

1
build/server.ts Normal file
View File

@ -0,0 +1 @@
export {};