Merge branch 'canary' into feat/add-gitea-repo

This commit is contained in:
Jason Parks
2025-03-17 15:25:02 -06:00
committed by GitHub
28 changed files with 2109 additions and 1230 deletions

View File

@@ -0,0 +1,4 @@
export { restorePostgresBackup } from "./postgres";
export { restoreMySqlBackup } from "./mysql";
export { restoreMariadbBackup } from "./mariadb";
export { restoreMongoBackup } from "./mongo";

View File

@@ -0,0 +1,56 @@
import type { Mariadb } from "@dokploy/server/services/mariadb";
import type { Destination } from "@dokploy/server/services/destination";
import {
getRemoteServiceContainer,
getServiceContainer,
} from "../docker/utils";
import { execAsync, execAsyncRemote } from "../process/execAsync";
import { getS3Credentials } from "../backups/utils";
export const restoreMariadbBackup = async (
mariadb: Mariadb,
destination: Destination,
database: string,
backupFile: string,
emit: (log: string) => void,
) => {
try {
const { appName, databasePassword, databaseUser, serverId } = mariadb;
const rcloneFlags = getS3Credentials(destination);
const bucketPath = `:s3:${destination.bucket}`;
const backupPath = `${bucketPath}/${backupFile}`;
const { Id: containerName } = serverId
? await getRemoteServiceContainer(serverId, appName)
: await getServiceContainer(appName);
const restoreCommand = `
rclone cat ${rcloneFlags.join(" ")} "${backupPath}" | gunzip | docker exec -i ${containerName} mariadb -u ${databaseUser} -p${databasePassword} ${database}
`;
emit("Starting restore...");
emit(`Executing command: ${restoreCommand}`);
if (serverId) {
await execAsyncRemote(serverId, restoreCommand);
} else {
await execAsync(restoreCommand);
}
emit("Restore completed successfully!");
} catch (error) {
console.error(error);
emit(
`Error: ${
error instanceof Error
? error.message
: "Error restoring mariadb backup"
}`,
);
throw new Error(
error instanceof Error ? error.message : "Error restoring mariadb backup",
);
}
};

View File

@@ -0,0 +1,64 @@
import type { Mongo } from "@dokploy/server/services/mongo";
import type { Destination } from "@dokploy/server/services/destination";
import {
getRemoteServiceContainer,
getServiceContainer,
} from "../docker/utils";
import { execAsync, execAsyncRemote } from "../process/execAsync";
import { getS3Credentials } from "../backups/utils";
export const restoreMongoBackup = async (
mongo: Mongo,
destination: Destination,
database: string,
backupFile: string,
emit: (log: string) => void,
) => {
try {
const { appName, databasePassword, databaseUser, serverId } = mongo;
const rcloneFlags = getS3Credentials(destination);
const bucketPath = `:s3:${destination.bucket}`;
const backupPath = `${bucketPath}/${backupFile}`;
const { Id: containerName } = serverId
? await getRemoteServiceContainer(serverId, appName)
: await getServiceContainer(appName);
// For MongoDB, we need to first download the backup file since mongorestore expects a directory
const tempDir = "/tmp/dokploy-restore";
const fileName = backupFile.split("/").pop() || "backup.dump.gz";
const decompressedName = fileName.replace(".gz", "");
const downloadCommand = `\
rm -rf ${tempDir} && \
mkdir -p ${tempDir} && \
rclone copy ${rcloneFlags.join(" ")} "${backupPath}" ${tempDir} && \
cd ${tempDir} && \
gunzip -f "${fileName}" && \
docker exec -i ${containerName} mongorestore --username ${databaseUser} --password ${databasePassword} --authenticationDatabase admin --db ${database} --archive < "${decompressedName}" && \
rm -rf ${tempDir}`;
emit("Starting restore...");
emit(`Executing command: ${downloadCommand}`);
if (serverId) {
await execAsyncRemote(serverId, downloadCommand);
} else {
await execAsync(downloadCommand);
}
emit("Restore completed successfully!");
} catch (error) {
console.error(error);
emit(
`Error: ${
error instanceof Error ? error.message : "Error restoring mongo backup"
}`,
);
throw new Error(
error instanceof Error ? error.message : "Error restoring mongo backup",
);
}
};

View File

@@ -0,0 +1,54 @@
import type { MySql } from "@dokploy/server/services/mysql";
import type { Destination } from "@dokploy/server/services/destination";
import {
getRemoteServiceContainer,
getServiceContainer,
} from "../docker/utils";
import { execAsync, execAsyncRemote } from "../process/execAsync";
import { getS3Credentials } from "../backups/utils";
export const restoreMySqlBackup = async (
mysql: MySql,
destination: Destination,
database: string,
backupFile: string,
emit: (log: string) => void,
) => {
try {
const { appName, databaseRootPassword, serverId } = mysql;
const rcloneFlags = getS3Credentials(destination);
const bucketPath = `:s3:${destination.bucket}`;
const backupPath = `${bucketPath}/${backupFile}`;
const { Id: containerName } = serverId
? await getRemoteServiceContainer(serverId, appName)
: await getServiceContainer(appName);
const restoreCommand = `
rclone cat ${rcloneFlags.join(" ")} "${backupPath}" | gunzip | docker exec -i ${containerName} mysql -u root -p${databaseRootPassword} ${database}
`;
emit("Starting restore...");
emit(`Executing command: ${restoreCommand}`);
if (serverId) {
await execAsyncRemote(serverId, restoreCommand);
} else {
await execAsync(restoreCommand);
}
emit("Restore completed successfully!");
} catch (error) {
console.error(error);
emit(
`Error: ${
error instanceof Error ? error.message : "Error restoring mysql backup"
}`,
);
throw new Error(
error instanceof Error ? error.message : "Error restoring mysql backup",
);
}
};

View File

@@ -0,0 +1,60 @@
import type { Postgres } from "@dokploy/server/services/postgres";
import type { Destination } from "@dokploy/server/services/destination";
import {
getRemoteServiceContainer,
getServiceContainer,
} from "../docker/utils";
import { execAsync, execAsyncRemote } from "../process/execAsync";
import { getS3Credentials } from "../backups/utils";
export const restorePostgresBackup = async (
postgres: Postgres,
destination: Destination,
database: string,
backupFile: string,
emit: (log: string) => void,
) => {
try {
const { appName, databaseUser, serverId } = postgres;
const rcloneFlags = getS3Credentials(destination);
const bucketPath = `:s3:${destination.bucket}`;
const backupPath = `${bucketPath}/${backupFile}`;
const { Id: containerName } = serverId
? await getRemoteServiceContainer(serverId, appName)
: await getServiceContainer(appName);
emit("Starting restore...");
emit(`Backup path: ${backupPath}`);
const command = `\
rclone cat ${rcloneFlags.join(" ")} "${backupPath}" | gunzip | docker exec -i ${containerName} pg_restore -U ${databaseUser} -d ${database} --clean --if-exists`;
emit(`Executing command: ${command}`);
if (serverId) {
const { stdout, stderr } = await execAsyncRemote(serverId, command);
emit(stdout);
emit(stderr);
} else {
const { stdout, stderr } = await execAsync(command);
console.log("stdout", stdout);
console.log("stderr", stderr);
emit(stdout);
emit(stderr);
}
emit("Restore completed successfully!");
} catch (error) {
emit(
`Error: ${
error instanceof Error
? error.message
: "Error restoring postgres backup"
}`,
);
throw error;
}
};