mirror of
https://github.com/stackblitz-labs/bolt.diy
synced 2025-05-08 22:19:36 +00:00
Add 'grok-3-beta' to xAI provider and 'gemini-2.5-flash-preview-04-17' to Google provider. Also, ensure file saving when content is updated in WorkbenchStore and update streaming indicator styling in chat messages.
804 lines
24 KiB
TypeScript
804 lines
24 KiB
TypeScript
import { atom, map, type MapStore, type ReadableAtom, type WritableAtom } from 'nanostores';
|
|
import type { EditorDocument, ScrollPosition } from '~/components/editor/codemirror/CodeMirrorEditor';
|
|
import { ActionRunner } from '~/lib/runtime/action-runner';
|
|
import type { ActionCallbackData, ArtifactCallbackData } from '~/lib/runtime/message-parser';
|
|
import { webcontainer } from '~/lib/webcontainer';
|
|
import type { ITerminal } from '~/types/terminal';
|
|
import { unreachable } from '~/utils/unreachable';
|
|
import { EditorStore } from './editor';
|
|
import { FilesStore, type FileMap } from './files';
|
|
import { PreviewsStore } from './previews';
|
|
import { TerminalStore } from './terminal';
|
|
import JSZip from 'jszip';
|
|
import fileSaver from 'file-saver';
|
|
import { Octokit, type RestEndpointMethodTypes } from '@octokit/rest';
|
|
import { path } from '~/utils/path';
|
|
import { extractRelativePath } from '~/utils/diff';
|
|
import { description } from '~/lib/persistence';
|
|
import Cookies from 'js-cookie';
|
|
import { createSampler } from '~/utils/sampler';
|
|
import type { ActionAlert, DeployAlert, SupabaseAlert } from '~/types/actions';
|
|
|
|
const { saveAs } = fileSaver;
|
|
|
|
export interface ArtifactState {
|
|
id: string;
|
|
title: string;
|
|
type?: string;
|
|
closed: boolean;
|
|
runner: ActionRunner;
|
|
}
|
|
|
|
export type ArtifactUpdateState = Pick<ArtifactState, 'title' | 'closed'>;
|
|
|
|
type Artifacts = MapStore<Record<string, ArtifactState>>;
|
|
|
|
export type WorkbenchViewType = 'code' | 'diff' | 'preview';
|
|
|
|
export class WorkbenchStore {
|
|
#previewsStore = new PreviewsStore(webcontainer);
|
|
#filesStore = new FilesStore(webcontainer);
|
|
#editorStore = new EditorStore(this.#filesStore);
|
|
#terminalStore = new TerminalStore(webcontainer);
|
|
|
|
#reloadedMessages = new Set<string>();
|
|
|
|
artifacts: Artifacts = import.meta.hot?.data.artifacts ?? map({});
|
|
|
|
showWorkbench: WritableAtom<boolean> = import.meta.hot?.data.showWorkbench ?? atom(false);
|
|
currentView: WritableAtom<WorkbenchViewType> = import.meta.hot?.data.currentView ?? atom('code');
|
|
unsavedFiles: WritableAtom<Set<string>> = import.meta.hot?.data.unsavedFiles ?? atom(new Set<string>());
|
|
actionAlert: WritableAtom<ActionAlert | undefined> =
|
|
import.meta.hot?.data.unsavedFiles ?? atom<ActionAlert | undefined>(undefined);
|
|
supabaseAlert: WritableAtom<SupabaseAlert | undefined> =
|
|
import.meta.hot?.data.unsavedFiles ?? atom<ActionAlert | undefined>(undefined);
|
|
deployAlert: WritableAtom<DeployAlert | undefined> =
|
|
import.meta.hot?.data.unsavedFiles ?? atom<DeployAlert | undefined>(undefined);
|
|
modifiedFiles = new Set<string>();
|
|
artifactIdList: string[] = [];
|
|
#globalExecutionQueue = Promise.resolve();
|
|
constructor() {
|
|
if (import.meta.hot) {
|
|
import.meta.hot.data.artifacts = this.artifacts;
|
|
import.meta.hot.data.unsavedFiles = this.unsavedFiles;
|
|
import.meta.hot.data.showWorkbench = this.showWorkbench;
|
|
import.meta.hot.data.currentView = this.currentView;
|
|
import.meta.hot.data.actionAlert = this.actionAlert;
|
|
import.meta.hot.data.supabaseAlert = this.supabaseAlert;
|
|
import.meta.hot.data.deployAlert = this.deployAlert;
|
|
|
|
// Ensure binary files are properly preserved across hot reloads
|
|
const filesMap = this.files.get();
|
|
|
|
for (const [path, dirent] of Object.entries(filesMap)) {
|
|
if (dirent?.type === 'file' && dirent.isBinary && dirent.content) {
|
|
// Make sure binary content is preserved
|
|
this.files.setKey(path, { ...dirent });
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
addToExecutionQueue(callback: () => Promise<void>) {
|
|
this.#globalExecutionQueue = this.#globalExecutionQueue.then(() => callback());
|
|
}
|
|
|
|
get previews() {
|
|
return this.#previewsStore.previews;
|
|
}
|
|
|
|
get files() {
|
|
return this.#filesStore.files;
|
|
}
|
|
|
|
get currentDocument(): ReadableAtom<EditorDocument | undefined> {
|
|
return this.#editorStore.currentDocument;
|
|
}
|
|
|
|
get selectedFile(): ReadableAtom<string | undefined> {
|
|
return this.#editorStore.selectedFile;
|
|
}
|
|
|
|
get firstArtifact(): ArtifactState | undefined {
|
|
return this.#getArtifact(this.artifactIdList[0]);
|
|
}
|
|
|
|
get filesCount(): number {
|
|
return this.#filesStore.filesCount;
|
|
}
|
|
|
|
get showTerminal() {
|
|
return this.#terminalStore.showTerminal;
|
|
}
|
|
get boltTerminal() {
|
|
return this.#terminalStore.boltTerminal;
|
|
}
|
|
get alert() {
|
|
return this.actionAlert;
|
|
}
|
|
clearAlert() {
|
|
this.actionAlert.set(undefined);
|
|
}
|
|
|
|
get SupabaseAlert() {
|
|
return this.supabaseAlert;
|
|
}
|
|
|
|
clearSupabaseAlert() {
|
|
this.supabaseAlert.set(undefined);
|
|
}
|
|
|
|
get DeployAlert() {
|
|
return this.deployAlert;
|
|
}
|
|
|
|
clearDeployAlert() {
|
|
this.deployAlert.set(undefined);
|
|
}
|
|
|
|
toggleTerminal(value?: boolean) {
|
|
this.#terminalStore.toggleTerminal(value);
|
|
}
|
|
|
|
attachTerminal(terminal: ITerminal) {
|
|
this.#terminalStore.attachTerminal(terminal);
|
|
}
|
|
attachBoltTerminal(terminal: ITerminal) {
|
|
this.#terminalStore.attachBoltTerminal(terminal);
|
|
}
|
|
|
|
onTerminalResize(cols: number, rows: number) {
|
|
this.#terminalStore.onTerminalResize(cols, rows);
|
|
}
|
|
|
|
setDocuments(files: FileMap) {
|
|
this.#editorStore.setDocuments(files);
|
|
|
|
if (this.#filesStore.filesCount > 0 && this.currentDocument.get() === undefined) {
|
|
// we find the first file and select it
|
|
for (const [filePath, dirent] of Object.entries(files)) {
|
|
if (dirent?.type === 'file') {
|
|
this.setSelectedFile(filePath);
|
|
break;
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
setShowWorkbench(show: boolean) {
|
|
this.showWorkbench.set(show);
|
|
}
|
|
|
|
setCurrentDocumentContent(newContent: string) {
|
|
const filePath = this.currentDocument.get()?.filePath;
|
|
|
|
if (!filePath) {
|
|
return;
|
|
}
|
|
|
|
const originalContent = this.#filesStore.getFile(filePath)?.content;
|
|
const unsavedChanges = originalContent !== undefined && originalContent !== newContent;
|
|
|
|
this.#editorStore.updateFile(filePath, newContent);
|
|
|
|
const currentDocument = this.currentDocument.get();
|
|
|
|
if (currentDocument) {
|
|
const previousUnsavedFiles = this.unsavedFiles.get();
|
|
|
|
if (unsavedChanges && previousUnsavedFiles.has(currentDocument.filePath)) {
|
|
return;
|
|
}
|
|
|
|
const newUnsavedFiles = new Set(previousUnsavedFiles);
|
|
|
|
if (unsavedChanges) {
|
|
newUnsavedFiles.add(currentDocument.filePath);
|
|
} else {
|
|
newUnsavedFiles.delete(currentDocument.filePath);
|
|
}
|
|
|
|
this.unsavedFiles.set(newUnsavedFiles);
|
|
}
|
|
}
|
|
|
|
setCurrentDocumentScrollPosition(position: ScrollPosition) {
|
|
const editorDocument = this.currentDocument.get();
|
|
|
|
if (!editorDocument) {
|
|
return;
|
|
}
|
|
|
|
const { filePath } = editorDocument;
|
|
|
|
this.#editorStore.updateScrollPosition(filePath, position);
|
|
}
|
|
|
|
setSelectedFile(filePath: string | undefined) {
|
|
this.#editorStore.setSelectedFile(filePath);
|
|
}
|
|
|
|
async saveFile(filePath: string) {
|
|
const documents = this.#editorStore.documents.get();
|
|
const document = documents[filePath];
|
|
|
|
if (document === undefined) {
|
|
return;
|
|
}
|
|
|
|
await this.#filesStore.saveFile(filePath, document.value);
|
|
|
|
const newUnsavedFiles = new Set(this.unsavedFiles.get());
|
|
newUnsavedFiles.delete(filePath);
|
|
|
|
this.unsavedFiles.set(newUnsavedFiles);
|
|
}
|
|
|
|
async saveCurrentDocument() {
|
|
const currentDocument = this.currentDocument.get();
|
|
|
|
if (currentDocument === undefined) {
|
|
return;
|
|
}
|
|
|
|
await this.saveFile(currentDocument.filePath);
|
|
}
|
|
|
|
resetCurrentDocument() {
|
|
const currentDocument = this.currentDocument.get();
|
|
|
|
if (currentDocument === undefined) {
|
|
return;
|
|
}
|
|
|
|
const { filePath } = currentDocument;
|
|
const file = this.#filesStore.getFile(filePath);
|
|
|
|
if (!file) {
|
|
return;
|
|
}
|
|
|
|
this.setCurrentDocumentContent(file.content);
|
|
}
|
|
|
|
async saveAllFiles() {
|
|
for (const filePath of this.unsavedFiles.get()) {
|
|
await this.saveFile(filePath);
|
|
}
|
|
}
|
|
|
|
getFileModifcations() {
|
|
return this.#filesStore.getFileModifications();
|
|
}
|
|
|
|
getModifiedFiles() {
|
|
return this.#filesStore.getModifiedFiles();
|
|
}
|
|
|
|
resetAllFileModifications() {
|
|
this.#filesStore.resetFileModifications();
|
|
}
|
|
|
|
async createFile(filePath: string, content: string | Uint8Array = '') {
|
|
try {
|
|
const success = await this.#filesStore.createFile(filePath, content);
|
|
|
|
if (success) {
|
|
this.setSelectedFile(filePath);
|
|
|
|
/*
|
|
* For empty files, we need to ensure they're not marked as unsaved
|
|
* Only check for empty string, not empty Uint8Array
|
|
*/
|
|
if (typeof content === 'string' && content === '') {
|
|
const newUnsavedFiles = new Set(this.unsavedFiles.get());
|
|
newUnsavedFiles.delete(filePath);
|
|
this.unsavedFiles.set(newUnsavedFiles);
|
|
}
|
|
}
|
|
|
|
return success;
|
|
} catch (error) {
|
|
console.error('Failed to create file:', error);
|
|
throw error;
|
|
}
|
|
}
|
|
|
|
async createFolder(folderPath: string) {
|
|
try {
|
|
return await this.#filesStore.createFolder(folderPath);
|
|
} catch (error) {
|
|
console.error('Failed to create folder:', error);
|
|
throw error;
|
|
}
|
|
}
|
|
|
|
async deleteFile(filePath: string) {
|
|
try {
|
|
const currentDocument = this.currentDocument.get();
|
|
const isCurrentFile = currentDocument?.filePath === filePath;
|
|
|
|
const success = await this.#filesStore.deleteFile(filePath);
|
|
|
|
if (success) {
|
|
const newUnsavedFiles = new Set(this.unsavedFiles.get());
|
|
|
|
if (newUnsavedFiles.has(filePath)) {
|
|
newUnsavedFiles.delete(filePath);
|
|
this.unsavedFiles.set(newUnsavedFiles);
|
|
}
|
|
|
|
if (isCurrentFile) {
|
|
const files = this.files.get();
|
|
let nextFile: string | undefined = undefined;
|
|
|
|
for (const [path, dirent] of Object.entries(files)) {
|
|
if (dirent?.type === 'file') {
|
|
nextFile = path;
|
|
break;
|
|
}
|
|
}
|
|
|
|
this.setSelectedFile(nextFile);
|
|
}
|
|
}
|
|
|
|
return success;
|
|
} catch (error) {
|
|
console.error('Failed to delete file:', error);
|
|
throw error;
|
|
}
|
|
}
|
|
|
|
async deleteFolder(folderPath: string) {
|
|
try {
|
|
const currentDocument = this.currentDocument.get();
|
|
const isInCurrentFolder = currentDocument?.filePath?.startsWith(folderPath + '/');
|
|
|
|
const success = await this.#filesStore.deleteFolder(folderPath);
|
|
|
|
if (success) {
|
|
const unsavedFiles = this.unsavedFiles.get();
|
|
const newUnsavedFiles = new Set<string>();
|
|
|
|
for (const file of unsavedFiles) {
|
|
if (!file.startsWith(folderPath + '/')) {
|
|
newUnsavedFiles.add(file);
|
|
}
|
|
}
|
|
|
|
if (newUnsavedFiles.size !== unsavedFiles.size) {
|
|
this.unsavedFiles.set(newUnsavedFiles);
|
|
}
|
|
|
|
if (isInCurrentFolder) {
|
|
const files = this.files.get();
|
|
let nextFile: string | undefined = undefined;
|
|
|
|
for (const [path, dirent] of Object.entries(files)) {
|
|
if (dirent?.type === 'file') {
|
|
nextFile = path;
|
|
break;
|
|
}
|
|
}
|
|
|
|
this.setSelectedFile(nextFile);
|
|
}
|
|
}
|
|
|
|
return success;
|
|
} catch (error) {
|
|
console.error('Failed to delete folder:', error);
|
|
throw error;
|
|
}
|
|
}
|
|
|
|
abortAllActions() {
|
|
// TODO: what do we wanna do and how do we wanna recover from this?
|
|
}
|
|
|
|
setReloadedMessages(messages: string[]) {
|
|
this.#reloadedMessages = new Set(messages);
|
|
}
|
|
|
|
addArtifact({ messageId, title, id, type }: ArtifactCallbackData) {
|
|
const artifact = this.#getArtifact(messageId);
|
|
|
|
if (artifact) {
|
|
return;
|
|
}
|
|
|
|
if (!this.artifactIdList.includes(messageId)) {
|
|
this.artifactIdList.push(messageId);
|
|
}
|
|
|
|
this.artifacts.setKey(messageId, {
|
|
id,
|
|
title,
|
|
closed: false,
|
|
type,
|
|
runner: new ActionRunner(
|
|
webcontainer,
|
|
() => this.boltTerminal,
|
|
(alert) => {
|
|
if (this.#reloadedMessages.has(messageId)) {
|
|
return;
|
|
}
|
|
|
|
this.actionAlert.set(alert);
|
|
},
|
|
(alert) => {
|
|
if (this.#reloadedMessages.has(messageId)) {
|
|
return;
|
|
}
|
|
|
|
this.supabaseAlert.set(alert);
|
|
},
|
|
(alert) => {
|
|
if (this.#reloadedMessages.has(messageId)) {
|
|
return;
|
|
}
|
|
|
|
this.deployAlert.set(alert);
|
|
},
|
|
),
|
|
});
|
|
}
|
|
|
|
updateArtifact({ messageId }: ArtifactCallbackData, state: Partial<ArtifactUpdateState>) {
|
|
const artifact = this.#getArtifact(messageId);
|
|
|
|
if (!artifact) {
|
|
return;
|
|
}
|
|
|
|
this.artifacts.setKey(messageId, { ...artifact, ...state });
|
|
}
|
|
addAction(data: ActionCallbackData) {
|
|
// this._addAction(data);
|
|
|
|
this.addToExecutionQueue(() => this._addAction(data));
|
|
}
|
|
async _addAction(data: ActionCallbackData) {
|
|
const { messageId } = data;
|
|
|
|
const artifact = this.#getArtifact(messageId);
|
|
|
|
if (!artifact) {
|
|
unreachable('Artifact not found');
|
|
}
|
|
|
|
return artifact.runner.addAction(data);
|
|
}
|
|
|
|
runAction(data: ActionCallbackData, isStreaming: boolean = false) {
|
|
if (isStreaming) {
|
|
this.actionStreamSampler(data, isStreaming);
|
|
} else {
|
|
this.addToExecutionQueue(() => this._runAction(data, isStreaming));
|
|
}
|
|
}
|
|
async _runAction(data: ActionCallbackData, isStreaming: boolean = false) {
|
|
const { messageId } = data;
|
|
|
|
const artifact = this.#getArtifact(messageId);
|
|
|
|
if (!artifact) {
|
|
unreachable('Artifact not found');
|
|
}
|
|
|
|
const action = artifact.runner.actions.get()[data.actionId];
|
|
|
|
if (!action || action.executed) {
|
|
return;
|
|
}
|
|
|
|
if (data.action.type === 'file') {
|
|
const wc = await webcontainer;
|
|
const fullPath = path.join(wc.workdir, data.action.filePath);
|
|
|
|
if (this.selectedFile.value !== fullPath) {
|
|
this.setSelectedFile(fullPath);
|
|
}
|
|
|
|
if (this.currentView.value !== 'code') {
|
|
this.currentView.set('code');
|
|
}
|
|
|
|
const doc = this.#editorStore.documents.get()[fullPath];
|
|
|
|
if (!doc) {
|
|
await artifact.runner.runAction(data, isStreaming);
|
|
}
|
|
|
|
this.#editorStore.updateFile(fullPath, data.action.content);
|
|
|
|
if (!isStreaming && data.action.content) {
|
|
await this.saveFile(fullPath);
|
|
}
|
|
|
|
if (!isStreaming) {
|
|
await artifact.runner.runAction(data);
|
|
this.resetAllFileModifications();
|
|
}
|
|
} else {
|
|
await artifact.runner.runAction(data);
|
|
}
|
|
}
|
|
|
|
actionStreamSampler = createSampler(async (data: ActionCallbackData, isStreaming: boolean = false) => {
|
|
return await this._runAction(data, isStreaming);
|
|
}, 100); // TODO: remove this magic number to have it configurable
|
|
|
|
#getArtifact(id: string) {
|
|
const artifacts = this.artifacts.get();
|
|
return artifacts[id];
|
|
}
|
|
|
|
async downloadZip() {
|
|
const zip = new JSZip();
|
|
const files = this.files.get();
|
|
|
|
// Get the project name from the description input, or use a default name
|
|
const projectName = (description.value ?? 'project').toLocaleLowerCase().split(' ').join('_');
|
|
|
|
// Generate a simple 6-character hash based on the current timestamp
|
|
const timestampHash = Date.now().toString(36).slice(-6);
|
|
const uniqueProjectName = `${projectName}_${timestampHash}`;
|
|
|
|
for (const [filePath, dirent] of Object.entries(files)) {
|
|
if (dirent?.type === 'file' && !dirent.isBinary) {
|
|
const relativePath = extractRelativePath(filePath);
|
|
|
|
// split the path into segments
|
|
const pathSegments = relativePath.split('/');
|
|
|
|
// if there's more than one segment, we need to create folders
|
|
if (pathSegments.length > 1) {
|
|
let currentFolder = zip;
|
|
|
|
for (let i = 0; i < pathSegments.length - 1; i++) {
|
|
currentFolder = currentFolder.folder(pathSegments[i])!;
|
|
}
|
|
currentFolder.file(pathSegments[pathSegments.length - 1], dirent.content);
|
|
} else {
|
|
// if there's only one segment, it's a file in the root
|
|
zip.file(relativePath, dirent.content);
|
|
}
|
|
}
|
|
}
|
|
|
|
// Generate the zip file and save it
|
|
const content = await zip.generateAsync({ type: 'blob' });
|
|
saveAs(content, `${uniqueProjectName}.zip`);
|
|
}
|
|
|
|
async syncFiles(targetHandle: FileSystemDirectoryHandle) {
|
|
const files = this.files.get();
|
|
const syncedFiles = [];
|
|
|
|
for (const [filePath, dirent] of Object.entries(files)) {
|
|
if (dirent?.type === 'file' && !dirent.isBinary) {
|
|
const relativePath = extractRelativePath(filePath);
|
|
const pathSegments = relativePath.split('/');
|
|
let currentHandle = targetHandle;
|
|
|
|
for (let i = 0; i < pathSegments.length - 1; i++) {
|
|
currentHandle = await currentHandle.getDirectoryHandle(pathSegments[i], { create: true });
|
|
}
|
|
|
|
// create or get the file
|
|
const fileHandle = await currentHandle.getFileHandle(pathSegments[pathSegments.length - 1], {
|
|
create: true,
|
|
});
|
|
|
|
// write the file content
|
|
const writable = await fileHandle.createWritable();
|
|
await writable.write(dirent.content);
|
|
await writable.close();
|
|
|
|
syncedFiles.push(relativePath);
|
|
}
|
|
}
|
|
|
|
return syncedFiles;
|
|
}
|
|
|
|
async pushToGitHub(
|
|
repoName: string,
|
|
commitMessage?: string,
|
|
githubUsername?: string,
|
|
ghToken?: string,
|
|
isPrivate: boolean = false,
|
|
) {
|
|
try {
|
|
// Use cookies if username and token are not provided
|
|
const githubToken = ghToken || Cookies.get('githubToken');
|
|
const owner = githubUsername || Cookies.get('githubUsername');
|
|
|
|
if (!githubToken || !owner) {
|
|
throw new Error('GitHub token or username is not set in cookies or provided.');
|
|
}
|
|
|
|
// Log the isPrivate flag to verify it's being properly passed
|
|
console.log(`pushToGitHub called with isPrivate=${isPrivate}`);
|
|
|
|
// Initialize Octokit with the auth token
|
|
const octokit = new Octokit({ auth: githubToken });
|
|
|
|
// Check if the repository already exists before creating it
|
|
let repo: RestEndpointMethodTypes['repos']['get']['response']['data'];
|
|
let visibilityJustChanged = false;
|
|
|
|
try {
|
|
const resp = await octokit.repos.get({ owner, repo: repoName });
|
|
repo = resp.data;
|
|
console.log('Repository already exists, using existing repo');
|
|
|
|
// Check if we need to update visibility of existing repo
|
|
if (repo.private !== isPrivate) {
|
|
console.log(
|
|
`Updating repository visibility from ${repo.private ? 'private' : 'public'} to ${isPrivate ? 'private' : 'public'}`,
|
|
);
|
|
|
|
try {
|
|
// Update repository visibility using the update method
|
|
const { data: updatedRepo } = await octokit.repos.update({
|
|
owner,
|
|
repo: repoName,
|
|
private: isPrivate,
|
|
});
|
|
|
|
console.log('Repository visibility updated successfully');
|
|
repo = updatedRepo;
|
|
visibilityJustChanged = true;
|
|
|
|
// Add a delay after changing visibility to allow GitHub to fully process the change
|
|
console.log('Waiting for visibility change to propagate...');
|
|
await new Promise((resolve) => setTimeout(resolve, 3000)); // 3 second delay
|
|
} catch (visibilityError) {
|
|
console.error('Failed to update repository visibility:', visibilityError);
|
|
|
|
// Continue with push even if visibility update fails
|
|
}
|
|
}
|
|
} catch (error) {
|
|
if (error instanceof Error && 'status' in error && error.status === 404) {
|
|
// Repository doesn't exist, so create a new one
|
|
console.log(`Creating new repository with private=${isPrivate}`);
|
|
|
|
// Create new repository with specified privacy setting
|
|
const createRepoOptions = {
|
|
name: repoName,
|
|
private: isPrivate,
|
|
auto_init: true,
|
|
};
|
|
|
|
console.log('Create repo options:', createRepoOptions);
|
|
|
|
const { data: newRepo } = await octokit.repos.createForAuthenticatedUser(createRepoOptions);
|
|
|
|
console.log('Repository created:', newRepo.html_url, 'Private:', newRepo.private);
|
|
repo = newRepo;
|
|
|
|
// Add a small delay after creating a repository to allow GitHub to fully initialize it
|
|
console.log('Waiting for repository to initialize...');
|
|
await new Promise((resolve) => setTimeout(resolve, 2000)); // 2 second delay
|
|
} else {
|
|
console.error('Cannot create repo:', error);
|
|
throw error; // Some other error occurred
|
|
}
|
|
}
|
|
|
|
// Get all files
|
|
const files = this.files.get();
|
|
|
|
if (!files || Object.keys(files).length === 0) {
|
|
throw new Error('No files found to push');
|
|
}
|
|
|
|
// Function to push files with retry logic
|
|
const pushFilesToRepo = async (attempt = 1): Promise<string> => {
|
|
const maxAttempts = 3;
|
|
|
|
try {
|
|
console.log(`Pushing files to repository (attempt ${attempt}/${maxAttempts})...`);
|
|
|
|
// Create blobs for each file
|
|
const blobs = await Promise.all(
|
|
Object.entries(files).map(async ([filePath, dirent]) => {
|
|
if (dirent?.type === 'file' && dirent.content) {
|
|
const { data: blob } = await octokit.git.createBlob({
|
|
owner: repo.owner.login,
|
|
repo: repo.name,
|
|
content: Buffer.from(dirent.content).toString('base64'),
|
|
encoding: 'base64',
|
|
});
|
|
return { path: extractRelativePath(filePath), sha: blob.sha };
|
|
}
|
|
|
|
return null;
|
|
}),
|
|
);
|
|
|
|
const validBlobs = blobs.filter(Boolean); // Filter out any undefined blobs
|
|
|
|
if (validBlobs.length === 0) {
|
|
throw new Error('No valid files to push');
|
|
}
|
|
|
|
// Refresh repository reference to ensure we have the latest data
|
|
const repoRefresh = await octokit.repos.get({ owner, repo: repoName });
|
|
repo = repoRefresh.data;
|
|
|
|
// Get the latest commit SHA (assuming main branch, update dynamically if needed)
|
|
const { data: ref } = await octokit.git.getRef({
|
|
owner: repo.owner.login,
|
|
repo: repo.name,
|
|
ref: `heads/${repo.default_branch || 'main'}`, // Handle dynamic branch
|
|
});
|
|
const latestCommitSha = ref.object.sha;
|
|
|
|
// Create a new tree
|
|
const { data: newTree } = await octokit.git.createTree({
|
|
owner: repo.owner.login,
|
|
repo: repo.name,
|
|
base_tree: latestCommitSha,
|
|
tree: validBlobs.map((blob) => ({
|
|
path: blob!.path,
|
|
mode: '100644',
|
|
type: 'blob',
|
|
sha: blob!.sha,
|
|
})),
|
|
});
|
|
|
|
// Create a new commit
|
|
const { data: newCommit } = await octokit.git.createCommit({
|
|
owner: repo.owner.login,
|
|
repo: repo.name,
|
|
message: commitMessage || 'Initial commit from your app',
|
|
tree: newTree.sha,
|
|
parents: [latestCommitSha],
|
|
});
|
|
|
|
// Update the reference
|
|
await octokit.git.updateRef({
|
|
owner: repo.owner.login,
|
|
repo: repo.name,
|
|
ref: `heads/${repo.default_branch || 'main'}`, // Handle dynamic branch
|
|
sha: newCommit.sha,
|
|
});
|
|
|
|
console.log('Files successfully pushed to repository');
|
|
|
|
return repo.html_url;
|
|
} catch (error) {
|
|
console.error(`Error during push attempt ${attempt}:`, error);
|
|
|
|
// If we've just changed visibility and this is not our last attempt, wait and retry
|
|
if ((visibilityJustChanged || attempt === 1) && attempt < maxAttempts) {
|
|
const delayMs = attempt * 2000; // Increasing delay with each attempt
|
|
console.log(`Waiting ${delayMs}ms before retry...`);
|
|
await new Promise((resolve) => setTimeout(resolve, delayMs));
|
|
|
|
return pushFilesToRepo(attempt + 1);
|
|
}
|
|
|
|
throw error; // Rethrow if we're out of attempts
|
|
}
|
|
};
|
|
|
|
// Execute the push function with retry logic
|
|
const repoUrl = await pushFilesToRepo();
|
|
|
|
// Return the repository URL
|
|
return repoUrl;
|
|
} catch (error) {
|
|
console.error('Error pushing to GitHub:', error);
|
|
throw error; // Rethrow the error for further handling
|
|
}
|
|
}
|
|
}
|
|
|
|
export const workbenchStore = new WorkbenchStore();
|