mirror of
https://github.com/Dokploy/dokploy
synced 2025-06-26 18:27:59 +00:00
Update restore-backup component and backup router for web server support: set default database name based on type, disable input for web server, and streamline backup restoration process with improved logging and error handling.
This commit is contained in:
parent
ff3e067866
commit
297439a348
@ -91,7 +91,7 @@ export const RestoreBackup = ({
|
|||||||
defaultValues: {
|
defaultValues: {
|
||||||
destinationId: "",
|
destinationId: "",
|
||||||
backupFile: "",
|
backupFile: "",
|
||||||
databaseName: "",
|
databaseName: databaseType === "web-server" ? "dokploy" : "",
|
||||||
},
|
},
|
||||||
resolver: zodResolver(RestoreBackupSchema),
|
resolver: zodResolver(RestoreBackupSchema),
|
||||||
});
|
});
|
||||||
@ -340,7 +340,11 @@ export const RestoreBackup = ({
|
|||||||
<FormItem className="">
|
<FormItem className="">
|
||||||
<FormLabel>Database Name</FormLabel>
|
<FormLabel>Database Name</FormLabel>
|
||||||
<FormControl>
|
<FormControl>
|
||||||
<Input {...field} placeholder="Enter database name" />
|
<Input
|
||||||
|
disabled={databaseType === "web-server"}
|
||||||
|
{...field}
|
||||||
|
placeholder="Enter database name"
|
||||||
|
/>
|
||||||
</FormControl>
|
</FormControl>
|
||||||
<FormMessage />
|
<FormMessage />
|
||||||
</FormItem>
|
</FormItem>
|
||||||
|
@ -19,13 +19,13 @@ import {
|
|||||||
findPostgresByBackupId,
|
findPostgresByBackupId,
|
||||||
findPostgresById,
|
findPostgresById,
|
||||||
findServerById,
|
findServerById,
|
||||||
paths,
|
|
||||||
removeBackupById,
|
removeBackupById,
|
||||||
removeScheduleBackup,
|
removeScheduleBackup,
|
||||||
runMariadbBackup,
|
runMariadbBackup,
|
||||||
runMongoBackup,
|
runMongoBackup,
|
||||||
runMySqlBackup,
|
runMySqlBackup,
|
||||||
runPostgresBackup,
|
runPostgresBackup,
|
||||||
|
runWebServerBackup,
|
||||||
scheduleBackup,
|
scheduleBackup,
|
||||||
updateBackupById,
|
updateBackupById,
|
||||||
} from "@dokploy/server";
|
} from "@dokploy/server";
|
||||||
@ -41,6 +41,7 @@ import {
|
|||||||
restoreMongoBackup,
|
restoreMongoBackup,
|
||||||
restoreMySqlBackup,
|
restoreMySqlBackup,
|
||||||
restorePostgresBackup,
|
restorePostgresBackup,
|
||||||
|
restoreWebServerBackup,
|
||||||
} from "@dokploy/server/utils/restore";
|
} from "@dokploy/server/utils/restore";
|
||||||
import { TRPCError } from "@trpc/server";
|
import { TRPCError } from "@trpc/server";
|
||||||
import { observable } from "@trpc/server/observable";
|
import { observable } from "@trpc/server/observable";
|
||||||
@ -235,50 +236,9 @@ export const backupRouter = createTRPCRouter({
|
|||||||
manualBackupWebServer: protectedProcedure
|
manualBackupWebServer: protectedProcedure
|
||||||
.input(apiFindOneBackup)
|
.input(apiFindOneBackup)
|
||||||
.mutation(async ({ input }) => {
|
.mutation(async ({ input }) => {
|
||||||
try {
|
|
||||||
const backup = await findBackupById(input.backupId);
|
const backup = await findBackupById(input.backupId);
|
||||||
const destination = await findDestinationById(backup.destinationId);
|
await runWebServerBackup(backup);
|
||||||
const rcloneFlags = getS3Credentials(destination);
|
|
||||||
const timestamp = new Date().toISOString().replace(/[:.]/g, "-");
|
|
||||||
const { BASE_PATH } = paths();
|
|
||||||
const tempDir = `${BASE_PATH}/temp-backup-${timestamp}`;
|
|
||||||
const backupFileName = `webserver-backup-${timestamp}.zip`;
|
|
||||||
const s3Path = `:s3:${destination.bucket}/${backup.prefix}${backupFileName}`;
|
|
||||||
|
|
||||||
try {
|
|
||||||
await execAsync(`mkdir -p ${tempDir}/filesystem`);
|
|
||||||
|
|
||||||
const postgresCommand = `docker exec $(docker ps --filter "name=dokploy-postgres" -q) pg_dump -v -Fc -U dokploy -d dokploy > ${tempDir}/database.sql`;
|
|
||||||
await execAsync(postgresCommand);
|
|
||||||
|
|
||||||
await execAsync(`cp -r ${BASE_PATH}/* ${tempDir}/filesystem/`);
|
|
||||||
|
|
||||||
await execAsync(
|
|
||||||
`cd ${tempDir} && zip -r ${backupFileName} database.sql filesystem/`,
|
|
||||||
);
|
|
||||||
|
|
||||||
// // Show zip contents and size
|
|
||||||
// console.log("Zip file contents:");
|
|
||||||
// await execAsync(`unzip -l ${tempDir}/${backupFileName}`);
|
|
||||||
// await execAsync(`du -sh ${tempDir}/${backupFileName}`);
|
|
||||||
|
|
||||||
// Upload to S3
|
|
||||||
const uploadCommand = `rclone copyto ${rcloneFlags.join(" ")} "${tempDir}/${backupFileName}" "${s3Path}"`;
|
|
||||||
await execAsync(uploadCommand);
|
|
||||||
return true;
|
return true;
|
||||||
} finally {
|
|
||||||
// Cleanup temporary files
|
|
||||||
console.log("Cleaning up temporary files...");
|
|
||||||
await execAsync(`rm -rf ${tempDir}`);
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
console.error("Backup error:", error);
|
|
||||||
throw new TRPCError({
|
|
||||||
code: "BAD_REQUEST",
|
|
||||||
message: "Error running manual Web Server backup",
|
|
||||||
cause: error,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}),
|
}),
|
||||||
listBackupFiles: protectedProcedure
|
listBackupFiles: protectedProcedure
|
||||||
.input(
|
.input(
|
||||||
@ -424,16 +384,12 @@ export const backupRouter = createTRPCRouter({
|
|||||||
},
|
},
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
} else if (input.databaseType === "web-server") {
|
}
|
||||||
|
if (input.databaseType === "web-server") {
|
||||||
return observable<string>((emit) => {
|
return observable<string>((emit) => {
|
||||||
restoreWebServerBackup(
|
restoreWebServerBackup(destination, input.backupFile, (log) => {
|
||||||
webServer,
|
|
||||||
destination,
|
|
||||||
input.backupFile,
|
|
||||||
(log) => {
|
|
||||||
emit.next(log);
|
emit.next(log);
|
||||||
},
|
});
|
||||||
);
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -48,6 +48,7 @@ export * from "./utils/backups/mongo";
|
|||||||
export * from "./utils/backups/mysql";
|
export * from "./utils/backups/mysql";
|
||||||
export * from "./utils/backups/postgres";
|
export * from "./utils/backups/postgres";
|
||||||
export * from "./utils/backups/utils";
|
export * from "./utils/backups/utils";
|
||||||
|
export * from "./utils/backups/web-server";
|
||||||
export * from "./templates/processors";
|
export * from "./templates/processors";
|
||||||
|
|
||||||
export * from "./utils/notifications/build-error";
|
export * from "./utils/notifications/build-error";
|
||||||
|
@ -2,3 +2,4 @@ export { restorePostgresBackup } from "./postgres";
|
|||||||
export { restoreMySqlBackup } from "./mysql";
|
export { restoreMySqlBackup } from "./mysql";
|
||||||
export { restoreMariadbBackup } from "./mariadb";
|
export { restoreMariadbBackup } from "./mariadb";
|
||||||
export { restoreMongoBackup } from "./mongo";
|
export { restoreMongoBackup } from "./mongo";
|
||||||
|
export { restoreWebServerBackup } from "./web-server";
|
||||||
|
@ -1,10 +1,7 @@
|
|||||||
import type { Destination } from "@dokploy/server/services/destination";
|
import type { Destination } from "@dokploy/server/services/destination";
|
||||||
import { getS3Credentials } from "../backups/utils";
|
import { getS3Credentials } from "../backups/utils";
|
||||||
import {
|
import { execAsync } from "../process/execAsync";
|
||||||
getRemoteServiceContainer,
|
import { paths } from "@dokploy/server";
|
||||||
getServiceContainer,
|
|
||||||
} from "../docker/utils";
|
|
||||||
import { execAsync, execAsyncRemote } from "../process/execAsync";
|
|
||||||
|
|
||||||
export const restoreWebServerBackup = async (
|
export const restoreWebServerBackup = async (
|
||||||
destination: Destination,
|
destination: Destination,
|
||||||
@ -12,46 +9,117 @@ export const restoreWebServerBackup = async (
|
|||||||
emit: (log: string) => void,
|
emit: (log: string) => void,
|
||||||
) => {
|
) => {
|
||||||
try {
|
try {
|
||||||
const { appName, databaseUser, serverId } = postgres;
|
|
||||||
|
|
||||||
const rcloneFlags = getS3Credentials(destination);
|
const rcloneFlags = getS3Credentials(destination);
|
||||||
const bucketPath = `:s3:${destination.bucket}`;
|
const bucketPath = `:s3:${destination.bucket}`;
|
||||||
|
|
||||||
const backupPath = `${bucketPath}/${backupFile}`;
|
const backupPath = `${bucketPath}/${backupFile}`;
|
||||||
|
const { BASE_PATH } = paths();
|
||||||
|
const tempDir = `${BASE_PATH}/temp-restore-${new Date().toISOString().replace(/[:.]/g, "-")}`;
|
||||||
|
|
||||||
const { Id: containerName } = serverId
|
try {
|
||||||
? await getRemoteServiceContainer(serverId, appName)
|
|
||||||
: await getServiceContainer(appName);
|
|
||||||
|
|
||||||
emit("Starting restore...");
|
emit("Starting restore...");
|
||||||
emit(`Backup path: ${backupPath}`);
|
emit(`Backup path: ${backupPath}`);
|
||||||
|
emit(`Temp directory: ${tempDir}`);
|
||||||
|
|
||||||
const command = `\
|
// Create temp directory
|
||||||
rclone cat ${rcloneFlags.join(" ")} "${backupPath}" | gunzip | docker exec -i ${containerName} pg_restore -U ${databaseUser} -d ${database} --clean --if-exists`;
|
emit("Creating temporary directory...");
|
||||||
|
await execAsync(`mkdir -p ${tempDir}`);
|
||||||
|
|
||||||
emit(`Executing command: ${command}`);
|
// Download backup from S3
|
||||||
|
emit("Downloading backup from S3...");
|
||||||
|
await execAsync(
|
||||||
|
`rclone copyto ${rcloneFlags.join(" ")} "${backupPath}" "${tempDir}/${backupFile}"`,
|
||||||
|
);
|
||||||
|
|
||||||
if (serverId) {
|
// List files before extraction
|
||||||
const { stdout, stderr } = await execAsyncRemote(serverId, command);
|
emit("Listing files before extraction...");
|
||||||
emit(stdout);
|
const { stdout: beforeFiles } = await execAsync(`ls -la ${tempDir}`);
|
||||||
emit(stderr);
|
emit(`Files before extraction: ${beforeFiles}`);
|
||||||
} else {
|
|
||||||
const { stdout, stderr } = await execAsync(command);
|
// Extract backup
|
||||||
console.log("stdout", stdout);
|
emit("Extracting backup...");
|
||||||
console.log("stderr", stderr);
|
await execAsync(`cd ${tempDir} && unzip ${backupFile}`);
|
||||||
emit(stdout);
|
|
||||||
emit(stderr);
|
// Check if database.sql.gz exists and decompress it
|
||||||
|
const { stdout: hasGzFile } = await execAsync(
|
||||||
|
`ls ${tempDir}/database.sql.gz || true`,
|
||||||
|
);
|
||||||
|
if (hasGzFile.includes("database.sql.gz")) {
|
||||||
|
emit("Found compressed database file, decompressing...");
|
||||||
|
await execAsync(`cd ${tempDir} && gunzip database.sql.gz`);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Verify database file exists
|
||||||
|
const { stdout: hasSqlFile } = await execAsync(
|
||||||
|
`ls ${tempDir}/database.sql || true`,
|
||||||
|
);
|
||||||
|
if (!hasSqlFile.includes("database.sql")) {
|
||||||
|
throw new Error("Database file not found after extraction");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Restore database
|
||||||
|
emit("Restoring database...");
|
||||||
|
|
||||||
|
// Drop and recreate database
|
||||||
|
emit("Disconnecting all users from database...");
|
||||||
|
await execAsync(
|
||||||
|
`docker exec $(docker ps --filter "name=dokploy-postgres" -q) psql -U dokploy postgres -c "SELECT pg_terminate_backend(pg_stat_activity.pid) FROM pg_stat_activity WHERE pg_stat_activity.datname = 'dokploy' AND pid <> pg_backend_pid();"`,
|
||||||
|
);
|
||||||
|
|
||||||
|
emit("Dropping existing database...");
|
||||||
|
await execAsync(
|
||||||
|
`docker exec $(docker ps --filter "name=dokploy-postgres" -q) psql -U dokploy postgres -c "DROP DATABASE IF EXISTS dokploy;"`,
|
||||||
|
);
|
||||||
|
|
||||||
|
emit("Creating fresh database...");
|
||||||
|
await execAsync(
|
||||||
|
`docker exec $(docker ps --filter "name=dokploy-postgres" -q) psql -U dokploy postgres -c "CREATE DATABASE dokploy;"`,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Copy the backup file into the container
|
||||||
|
emit("Copying backup file into container...");
|
||||||
|
await execAsync(
|
||||||
|
`docker cp ${tempDir}/database.sql $(docker ps --filter "name=dokploy-postgres" -q):/tmp/database.sql`,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Verify file in container
|
||||||
|
emit("Verifying file in container...");
|
||||||
|
await execAsync(
|
||||||
|
`docker exec $(docker ps --filter "name=dokploy-postgres" -q) ls -l /tmp/database.sql`,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Restore from the copied file
|
||||||
|
emit("Running database restore...");
|
||||||
|
await execAsync(
|
||||||
|
`docker exec $(docker ps --filter "name=dokploy-postgres" -q) pg_restore -v -U dokploy -d dokploy /tmp/database.sql`,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Cleanup the temporary file in the container
|
||||||
|
emit("Cleaning up container temp file...");
|
||||||
|
await execAsync(
|
||||||
|
`docker exec $(docker ps --filter "name=dokploy-postgres" -q) rm /tmp/database.sql`,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Restore filesystem
|
||||||
|
emit("Restoring filesystem...");
|
||||||
|
await execAsync(`cp -r ${tempDir}/filesystem/* ${BASE_PATH}/`);
|
||||||
|
|
||||||
emit("Restore completed successfully!");
|
emit("Restore completed successfully!");
|
||||||
|
} finally {
|
||||||
|
// Cleanup
|
||||||
|
emit("Cleaning up temporary files...");
|
||||||
|
await execAsync(`rm -rf ${tempDir}`);
|
||||||
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
console.error(error);
|
||||||
emit(
|
emit(
|
||||||
`Error: ${
|
`Error: ${
|
||||||
error instanceof Error
|
error instanceof Error
|
||||||
? error.message
|
? error.message
|
||||||
: "Error restoring postgres backup"
|
: "Error restoring web server backup"
|
||||||
}`,
|
}`,
|
||||||
);
|
);
|
||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
// docker exec $(docker ps --filter "name=dokploy-postgres" -q) pg_restore -v -U dokploy -d dokploy /Users/mauricio/Documents/Github/Personal/dokploy/apps/dokploy/.docker/temp-restore-2025-03-30T01-09-27-203Z/database.sql
|
||||||
|
// server/webserver-backup-2025-03-30T00-38-08-836Z.zip
|
||||||
|
Loading…
Reference in New Issue
Block a user