Implement enhanced backup logging and deployment tracking for database backups

- Refactored backup utilities for MariaDB, MongoDB, MySQL, and PostgreSQL to include detailed logging of command execution, improving traceability and debugging.
- Introduced deployment tracking for backups, allowing for better management of backup statuses and notifications.
- Updated backup command execution to utilize writable streams, ensuring efficient data handling during backup processes.
- Enhanced error handling and logging for backup operations, providing clearer feedback on success or failure.
This commit is contained in:
Mauricio Siu 2025-05-03 13:33:37 -06:00
parent 89306a7619
commit a8159e5f99
6 changed files with 201 additions and 24 deletions

View File

@ -92,8 +92,8 @@ export const runComposeBackup = async (
await execAsyncStream(
`${backupCommand} | ${rcloneCommand}`,
(data) => {
if (writeStream.write(data)) {
console.log(data);
if (writeStream.writable) {
writeStream.write(data);
}
},
{

View File

@ -3,8 +3,17 @@ import type { Mariadb } from "@dokploy/server/services/mariadb";
import { findProjectById } from "@dokploy/server/services/project";
import { getServiceContainer } from "../docker/utils";
import { sendDatabaseBackupNotifications } from "../notifications/database-backup";
import { execAsync, execAsyncRemote } from "../process/execAsync";
import { getS3Credentials, normalizeS3Path } from "./utils";
import { execAsyncRemote, execAsyncStream } from "../process/execAsync";
import {
getMariadbBackupCommand,
getS3Credentials,
normalizeS3Path,
} from "./utils";
import {
createDeploymentBackup,
updateDeploymentStatus,
} from "@dokploy/server/services/deployment";
import { createWriteStream } from "node:fs";
export const runMariadbBackup = async (
mariadb: Mariadb,
@ -16,7 +25,11 @@ export const runMariadbBackup = async (
const destination = backup.destination;
const backupFileName = `${new Date().toISOString()}.sql.gz`;
const bucketDestination = `${normalizeS3Path(prefix)}${backupFileName}`;
const deployment = await createDeploymentBackup({
backupId: backup.backupId,
title: "MariaDB Backup",
description: "MariaDB Backup",
});
try {
const rcloneFlags = getS3Credentials(destination);
const rcloneDestination = `:s3:${destination.bucket}/${bucketDestination}`;
@ -27,15 +40,43 @@ export const runMariadbBackup = async (
);
const rcloneCommand = `rclone rcat ${rcloneFlags.join(" ")} "${rcloneDestination}"`;
const command = getMariadbBackupCommand(
containerId,
database,
databaseUser,
databasePassword || "",
);
if (mariadb.serverId) {
const mariadbDumpCommand = `docker exec ${containerId} sh -c "mariadb-dump --user='${databaseUser}' --password='${databasePassword}' --databases ${database} | gzip"`;
await execAsyncRemote(
mariadb.serverId,
`${mariadbDumpCommand} | ${rcloneCommand}`,
`
set -e;
echo "Running command." >> ${deployment.logPath};
export RCLONE_LOG_LEVEL=DEBUG;
${command} | ${rcloneCommand} >> ${deployment.logPath} 2>> ${deployment.logPath} || {
echo "❌ Command failed" >> ${deployment.logPath};
exit 1;
}
echo "✅ Command executed successfully" >> ${deployment.logPath};
`,
);
} else {
const mariadbDumpCommand = `docker exec ${containerId} sh -c "mariadb-dump --user='${databaseUser}' --password='${databasePassword}' --databases ${database} | gzip"`;
await execAsync(`${mariadbDumpCommand} | ${rcloneCommand}`);
const writeStream = createWriteStream(deployment.logPath, { flags: "a" });
await execAsyncStream(
`${command} | ${rcloneCommand}`,
(data) => {
if (writeStream.writable) {
writeStream.write(data);
}
},
{
env: {
...process.env,
RCLONE_LOG_LEVEL: "DEBUG",
},
},
);
}
await sendDatabaseBackupNotifications({
@ -45,6 +86,7 @@ export const runMariadbBackup = async (
type: "success",
organizationId: project.organizationId,
});
await updateDeploymentStatus(deployment.deploymentId, "done");
} catch (error) {
console.log(error);
await sendDatabaseBackupNotifications({
@ -56,6 +98,7 @@ export const runMariadbBackup = async (
errorMessage: error?.message || "Error message not provided",
organizationId: project.organizationId,
});
await updateDeploymentStatus(deployment.deploymentId, "error");
throw error;
}
};

View File

@ -3,12 +3,17 @@ import type { Mongo } from "@dokploy/server/services/mongo";
import { findProjectById } from "@dokploy/server/services/project";
import { getServiceContainer } from "../docker/utils";
import { sendDatabaseBackupNotifications } from "../notifications/database-backup";
import { execAsync, execAsyncRemote } from "../process/execAsync";
import { execAsyncRemote, execAsyncStream } from "../process/execAsync";
import {
getMongoBackupCommand,
getS3Credentials,
normalizeS3Path,
} from "./utils";
import {
createDeploymentBackup,
updateDeploymentStatus,
} from "@dokploy/server/services/deployment";
import { createWriteStream } from "node:fs";
export const runMongoBackup = async (mongo: Mongo, backup: BackupSchedule) => {
const { appName, databasePassword, databaseUser, projectId, name } = mongo;
@ -17,7 +22,11 @@ export const runMongoBackup = async (mongo: Mongo, backup: BackupSchedule) => {
const destination = backup.destination;
const backupFileName = `${new Date().toISOString()}.dump.gz`;
const bucketDestination = `${normalizeS3Path(prefix)}${backupFileName}`;
const deployment = await createDeploymentBackup({
backupId: backup.backupId,
title: "MongoDB Backup",
description: "MongoDB Backup",
});
try {
const rcloneFlags = getS3Credentials(destination);
const rcloneDestination = `:s3:${destination.bucket}/${bucketDestination}`;
@ -35,9 +44,37 @@ export const runMongoBackup = async (mongo: Mongo, backup: BackupSchedule) => {
databasePassword || "",
);
if (mongo.serverId) {
await execAsyncRemote(mongo.serverId, `${command} | ${rcloneCommand}`);
await execAsyncRemote(
mongo.serverId,
`
set -e;
echo "Running command." >> ${deployment.logPath};
export RCLONE_LOG_LEVEL=DEBUG;
${command} | ${rcloneCommand} >> ${deployment.logPath} 2>> ${deployment.logPath} || {
echo "❌ Command failed" >> ${deployment.logPath};
exit 1;
}
echo "✅ Command executed successfully" >> ${deployment.logPath};
`,
);
} else {
await execAsync(`${command} | ${rcloneCommand}`);
const writeStream = createWriteStream(deployment.logPath, { flags: "a" });
await execAsyncStream(
`${command} | ${rcloneCommand}`,
(data) => {
if (writeStream.writable) {
writeStream.write(data);
}
},
{
env: {
...process.env,
RCLONE_LOG_LEVEL: "DEBUG",
},
},
);
writeStream.write("Backup done✅");
writeStream.end();
}
await sendDatabaseBackupNotifications({
@ -47,6 +84,7 @@ export const runMongoBackup = async (mongo: Mongo, backup: BackupSchedule) => {
type: "success",
organizationId: project.organizationId,
});
await updateDeploymentStatus(deployment.deploymentId, "done");
} catch (error) {
console.log(error);
await sendDatabaseBackupNotifications({
@ -58,6 +96,7 @@ export const runMongoBackup = async (mongo: Mongo, backup: BackupSchedule) => {
errorMessage: error?.message || "Error message not provided",
organizationId: project.organizationId,
});
await updateDeploymentStatus(deployment.deploymentId, "error");
throw error;
}
};

View File

@ -3,12 +3,17 @@ import type { MySql } from "@dokploy/server/services/mysql";
import { findProjectById } from "@dokploy/server/services/project";
import { getServiceContainer } from "../docker/utils";
import { sendDatabaseBackupNotifications } from "../notifications/database-backup";
import { execAsync, execAsyncRemote } from "../process/execAsync";
import { execAsyncRemote, execAsyncStream } from "../process/execAsync";
import {
getMysqlBackupCommand,
getS3Credentials,
normalizeS3Path,
} from "./utils";
import { createWriteStream } from "node:fs";
import {
createDeploymentBackup,
updateDeploymentStatus,
} from "@dokploy/server/services/deployment";
export const runMySqlBackup = async (mysql: MySql, backup: BackupSchedule) => {
const { appName, databaseRootPassword, projectId, name } = mysql;
@ -17,7 +22,11 @@ export const runMySqlBackup = async (mysql: MySql, backup: BackupSchedule) => {
const destination = backup.destination;
const backupFileName = `${new Date().toISOString()}.sql.gz`;
const bucketDestination = `${normalizeS3Path(prefix)}${backupFileName}`;
const deployment = await createDeploymentBackup({
backupId: backup.backupId,
title: "MySQL Backup",
description: "MySQL Backup",
});
try {
const rcloneFlags = getS3Credentials(destination);
const rcloneDestination = `:s3:${destination.bucket}/${bucketDestination}`;
@ -34,9 +43,37 @@ export const runMySqlBackup = async (mysql: MySql, backup: BackupSchedule) => {
databaseRootPassword || "",
);
if (mysql.serverId) {
await execAsyncRemote(mysql.serverId, `${command} | ${rcloneCommand}`);
await execAsyncRemote(
mysql.serverId,
`
set -e;
echo "Running command." >> ${deployment.logPath};
export RCLONE_LOG_LEVEL=DEBUG;
${command} | ${rcloneCommand} >> ${deployment.logPath} 2>> ${deployment.logPath} || {
echo "❌ Command failed" >> ${deployment.logPath};
exit 1;
}
echo "✅ Command executed successfully" >> ${deployment.logPath};
`,
);
} else {
await execAsync(`${command} | ${rcloneCommand}`);
const writeStream = createWriteStream(deployment.logPath, { flags: "a" });
await execAsyncStream(
`${command} | ${rcloneCommand}`,
(data) => {
if (writeStream.writable) {
writeStream.write(data);
}
},
{
env: {
...process.env,
RCLONE_LOG_LEVEL: "DEBUG",
},
},
);
writeStream.write("Backup done✅");
writeStream.end();
}
await sendDatabaseBackupNotifications({
applicationName: name,
@ -45,6 +82,7 @@ export const runMySqlBackup = async (mysql: MySql, backup: BackupSchedule) => {
type: "success",
organizationId: project.organizationId,
});
await updateDeploymentStatus(deployment.deploymentId, "done");
} catch (error) {
console.log(error);
await sendDatabaseBackupNotifications({
@ -56,6 +94,7 @@ export const runMySqlBackup = async (mysql: MySql, backup: BackupSchedule) => {
errorMessage: error?.message || "Error message not provided",
organizationId: project.organizationId,
});
await updateDeploymentStatus(deployment.deploymentId, "error");
throw error;
}
};

View File

@ -3,13 +3,17 @@ import type { Postgres } from "@dokploy/server/services/postgres";
import { findProjectById } from "@dokploy/server/services/project";
import { getServiceContainer } from "../docker/utils";
import { sendDatabaseBackupNotifications } from "../notifications/database-backup";
import { execAsync, execAsyncRemote } from "../process/execAsync";
import { execAsyncRemote, execAsyncStream } from "../process/execAsync";
import {
getPostgresBackupCommand,
getS3Credentials,
normalizeS3Path,
} from "./utils";
import { createDeploymentBackup } from "@dokploy/server/services/deployment";
import {
createDeploymentBackup,
updateDeploymentStatus,
} from "@dokploy/server/services/deployment";
import { createWriteStream } from "node:fs";
export const runPostgresBackup = async (
postgres: Postgres,
@ -45,9 +49,37 @@ export const runPostgresBackup = async (
);
if (postgres.serverId) {
await execAsyncRemote(postgres.serverId, `${command} | ${rcloneCommand}`);
await execAsyncRemote(
postgres.serverId,
`
set -e;
echo "Running command." >> ${deployment.logPath};
export RCLONE_LOG_LEVEL=DEBUG;
${command} | ${rcloneCommand} >> ${deployment.logPath} 2>> ${deployment.logPath} || {
echo "❌ Command failed" >> ${deployment.logPath};
exit 1;
}
echo "✅ Command executed successfully" >> ${deployment.logPath};
`,
);
} else {
await execAsync(`${command} | ${rcloneCommand}`);
const writeStream = createWriteStream(deployment.logPath, { flags: "a" });
await execAsyncStream(
`${command} | ${rcloneCommand}`,
(data) => {
if (writeStream.writable) {
writeStream.write(data);
}
},
{
env: {
...process.env,
RCLONE_LOG_LEVEL: "DEBUG",
},
},
);
writeStream.write("Backup done✅");
writeStream.end();
}
await sendDatabaseBackupNotifications({
@ -57,6 +89,8 @@ export const runPostgresBackup = async (
type: "success",
organizationId: project.organizationId,
});
await updateDeploymentStatus(deployment.deploymentId, "done");
} catch (error) {
await sendDatabaseBackupNotifications({
applicationName: name,
@ -68,6 +102,8 @@ export const runPostgresBackup = async (
organizationId: project.organizationId,
});
await updateDeploymentStatus(deployment.deploymentId, "error");
throw error;
} finally {
}

View File

@ -6,12 +6,25 @@ import { IS_CLOUD, paths } from "@dokploy/server/constants";
import { mkdtemp } from "node:fs/promises";
import { join } from "node:path";
import { tmpdir } from "node:os";
import {
createDeploymentBackup,
updateDeploymentStatus,
} from "@dokploy/server/services/deployment";
import { createWriteStream } from "node:fs";
export const runWebServerBackup = async (backup: BackupSchedule) => {
if (IS_CLOUD) {
return;
}
const deployment = await createDeploymentBackup({
backupId: backup.backupId,
title: "Web Server Backup",
description: "Web Server Backup",
});
const writeStream = createWriteStream(deployment.logPath, { flags: "a" });
try {
if (IS_CLOUD) {
return;
}
const destination = await findDestinationById(backup.destinationId);
const rcloneFlags = getS3Credentials(destination);
const timestamp = new Date().toISOString().replace(/[:.]/g, "-");
@ -46,12 +59,19 @@ export const runWebServerBackup = async (backup: BackupSchedule) => {
const uploadCommand = `rclone copyto ${rcloneFlags.join(" ")} "${tempDir}/${backupFileName}" "${s3Path}"`;
await execAsync(uploadCommand);
writeStream.write("Backup done✅");
writeStream.end();
await updateDeploymentStatus(deployment.deploymentId, "done");
return true;
} finally {
await execAsync(`rm -rf ${tempDir}`);
}
} catch (error) {
console.error("Backup error:", error);
writeStream.write("Backup error❌");
writeStream.write(error instanceof Error ? error.message : "Unknown error");
writeStream.end();
await updateDeploymentStatus(deployment.deploymentId, "error");
throw error;
}
};