mirror of
https://github.com/Dokploy/dokploy
synced 2025-06-26 18:27:59 +00:00
Implement web server backup and restore functionality: add new backup and restore methods for web servers, including S3 integration and improved logging. Refactor existing backup process to support web server type and streamline temporary file management.
This commit is contained in:
parent
f008a45bf2
commit
ff3e067866
@ -246,34 +246,30 @@ export const backupRouter = createTRPCRouter({
|
|||||||
const s3Path = `:s3:${destination.bucket}/${backup.prefix}${backupFileName}`;
|
const s3Path = `:s3:${destination.bucket}/${backup.prefix}${backupFileName}`;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Create temp directory structure
|
|
||||||
console.log("Creating temp directory structure...");
|
|
||||||
await execAsync(`mkdir -p ${tempDir}/filesystem`);
|
await execAsync(`mkdir -p ${tempDir}/filesystem`);
|
||||||
|
|
||||||
// Backup database
|
|
||||||
const postgresCommand = `docker exec $(docker ps --filter "name=dokploy-postgres" -q) pg_dump -v -Fc -U dokploy -d dokploy > ${tempDir}/database.sql`;
|
const postgresCommand = `docker exec $(docker ps --filter "name=dokploy-postgres" -q) pg_dump -v -Fc -U dokploy -d dokploy > ${tempDir}/database.sql`;
|
||||||
console.log("Executing database backup command:", postgresCommand);
|
|
||||||
await execAsync(postgresCommand);
|
await execAsync(postgresCommand);
|
||||||
|
|
||||||
// Backup filesystem (excluding temp directory)
|
await execAsync(`cp -r ${BASE_PATH}/* ${tempDir}/filesystem/`);
|
||||||
console.log("Copying filesystem...");
|
|
||||||
await execAsync(`cp -r /etc/dokploy/* ${tempDir}/filesystem/`);
|
|
||||||
|
|
||||||
// Create zip file
|
|
||||||
console.log("Creating zip file...");
|
|
||||||
await execAsync(
|
await execAsync(
|
||||||
`cd ${tempDir} && zip -r ${backupFileName} database.sql filesystem/`,
|
`cd ${tempDir} && zip -r ${backupFileName} database.sql filesystem/`,
|
||||||
);
|
);
|
||||||
|
|
||||||
// Show zip contents and size
|
// // Show zip contents and size
|
||||||
// console.log(`unzip -l ${tempDir}/${backupFileName}`);
|
// console.log("Zip file contents:");
|
||||||
// await execAsync(`unzip -l ${tempDir}/${backupFileName}`);
|
// await execAsync(`unzip -l ${tempDir}/${backupFileName}`);
|
||||||
// await execAsync(`du -sh ${tempDir}/${backupFileName}`);
|
// await execAsync(`du -sh ${tempDir}/${backupFileName}`);
|
||||||
|
|
||||||
|
// Upload to S3
|
||||||
|
const uploadCommand = `rclone copyto ${rcloneFlags.join(" ")} "${tempDir}/${backupFileName}" "${s3Path}"`;
|
||||||
|
await execAsync(uploadCommand);
|
||||||
return true;
|
return true;
|
||||||
} finally {
|
} finally {
|
||||||
// Keep the temp directory for inspection
|
// Cleanup temporary files
|
||||||
console.log("Backup files are in:", tempDir);
|
console.log("Cleaning up temporary files...");
|
||||||
|
await execAsync(`rm -rf ${tempDir}`);
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error("Backup error:", error);
|
console.error("Backup error:", error);
|
||||||
@ -428,6 +424,17 @@ export const backupRouter = createTRPCRouter({
|
|||||||
},
|
},
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
} else if (input.databaseType === "web-server") {
|
||||||
|
return observable<string>((emit) => {
|
||||||
|
restoreWebServerBackup(
|
||||||
|
webServer,
|
||||||
|
destination,
|
||||||
|
input.backupFile,
|
||||||
|
(log) => {
|
||||||
|
emit.next(log);
|
||||||
|
},
|
||||||
|
);
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
|
39
packages/server/src/utils/backups/web-server.ts
Normal file
39
packages/server/src/utils/backups/web-server.ts
Normal file
@ -0,0 +1,39 @@
|
|||||||
|
import type { BackupSchedule } from "@dokploy/server/services/backup";
|
||||||
|
import { execAsync } from "../process/execAsync";
|
||||||
|
import { getS3Credentials } from "./utils";
|
||||||
|
import { findDestinationById } from "@dokploy/server/services/destination";
|
||||||
|
import { paths } from "@dokploy/server/constants";
|
||||||
|
|
||||||
|
export const runWebServerBackup = async (backup: BackupSchedule) => {
|
||||||
|
try {
|
||||||
|
const destination = await findDestinationById(backup.destinationId);
|
||||||
|
const rcloneFlags = getS3Credentials(destination);
|
||||||
|
const timestamp = new Date().toISOString().replace(/[:.]/g, "-");
|
||||||
|
const { BASE_PATH } = paths();
|
||||||
|
const tempDir = `${BASE_PATH}/temp-backup-${timestamp}`;
|
||||||
|
const backupFileName = `webserver-backup-${timestamp}.zip`;
|
||||||
|
const s3Path = `:s3:${destination.bucket}/${backup.prefix}${backupFileName}`;
|
||||||
|
|
||||||
|
try {
|
||||||
|
await execAsync(`mkdir -p ${tempDir}/filesystem`);
|
||||||
|
|
||||||
|
const postgresCommand = `docker exec $(docker ps --filter "name=dokploy-postgres" -q) pg_dump -v -Fc -U dokploy -d dokploy > ${tempDir}/database.sql`;
|
||||||
|
await execAsync(postgresCommand);
|
||||||
|
|
||||||
|
await execAsync(`cp -r ${BASE_PATH}/* ${tempDir}/filesystem/`);
|
||||||
|
|
||||||
|
await execAsync(
|
||||||
|
`cd ${tempDir} && zip -r ${backupFileName} database.sql filesystem/`,
|
||||||
|
);
|
||||||
|
|
||||||
|
const uploadCommand = `rclone copyto ${rcloneFlags.join(" ")} "${tempDir}/${backupFileName}" "${s3Path}"`;
|
||||||
|
await execAsync(uploadCommand);
|
||||||
|
return true;
|
||||||
|
} finally {
|
||||||
|
await execAsync(`rm -rf ${tempDir}`);
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Backup error:", error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
};
|
57
packages/server/src/utils/restore/web-server.ts
Normal file
57
packages/server/src/utils/restore/web-server.ts
Normal file
@ -0,0 +1,57 @@
|
|||||||
|
import type { Destination } from "@dokploy/server/services/destination";
|
||||||
|
import { getS3Credentials } from "../backups/utils";
|
||||||
|
import {
|
||||||
|
getRemoteServiceContainer,
|
||||||
|
getServiceContainer,
|
||||||
|
} from "../docker/utils";
|
||||||
|
import { execAsync, execAsyncRemote } from "../process/execAsync";
|
||||||
|
|
||||||
|
export const restoreWebServerBackup = async (
|
||||||
|
destination: Destination,
|
||||||
|
backupFile: string,
|
||||||
|
emit: (log: string) => void,
|
||||||
|
) => {
|
||||||
|
try {
|
||||||
|
const { appName, databaseUser, serverId } = postgres;
|
||||||
|
|
||||||
|
const rcloneFlags = getS3Credentials(destination);
|
||||||
|
const bucketPath = `:s3:${destination.bucket}`;
|
||||||
|
|
||||||
|
const backupPath = `${bucketPath}/${backupFile}`;
|
||||||
|
|
||||||
|
const { Id: containerName } = serverId
|
||||||
|
? await getRemoteServiceContainer(serverId, appName)
|
||||||
|
: await getServiceContainer(appName);
|
||||||
|
|
||||||
|
emit("Starting restore...");
|
||||||
|
emit(`Backup path: ${backupPath}`);
|
||||||
|
|
||||||
|
const command = `\
|
||||||
|
rclone cat ${rcloneFlags.join(" ")} "${backupPath}" | gunzip | docker exec -i ${containerName} pg_restore -U ${databaseUser} -d ${database} --clean --if-exists`;
|
||||||
|
|
||||||
|
emit(`Executing command: ${command}`);
|
||||||
|
|
||||||
|
if (serverId) {
|
||||||
|
const { stdout, stderr } = await execAsyncRemote(serverId, command);
|
||||||
|
emit(stdout);
|
||||||
|
emit(stderr);
|
||||||
|
} else {
|
||||||
|
const { stdout, stderr } = await execAsync(command);
|
||||||
|
console.log("stdout", stdout);
|
||||||
|
console.log("stderr", stderr);
|
||||||
|
emit(stdout);
|
||||||
|
emit(stderr);
|
||||||
|
}
|
||||||
|
|
||||||
|
emit("Restore completed successfully!");
|
||||||
|
} catch (error) {
|
||||||
|
emit(
|
||||||
|
`Error: ${
|
||||||
|
error instanceof Error
|
||||||
|
? error.message
|
||||||
|
: "Error restoring postgres backup"
|
||||||
|
}`,
|
||||||
|
);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
};
|
Loading…
Reference in New Issue
Block a user