refactor(multi-server): wip remote backups

This commit is contained in:
Mauricio Siu
2024-09-18 23:03:54 -06:00
parent a46e7759b2
commit 79f39db502
4 changed files with 63 additions and 10 deletions

View File

@@ -8,7 +8,10 @@ import {
import { runMariadbBackup } from "@/server/utils/backups/mariadb";
import { runMongoBackup } from "@/server/utils/backups/mongo";
import { runMySqlBackup } from "@/server/utils/backups/mysql";
import { runPostgresBackup } from "@/server/utils/backups/postgres";
import {
runPostgresBackup,
runRemotePostgresBackup,
} from "@/server/utils/backups/postgres";
import {
removeScheduleBackup,
scheduleBackup,
@@ -89,7 +92,12 @@ export const backupRouter = createTRPCRouter({
try {
const backup = await findBackupById(input.backupId);
const postgres = await findPostgresByBackupId(backup.backupId);
await runPostgresBackup(postgres, backup);
if (postgres.serverId) {
await runRemotePostgresBackup(postgres, backup);
} else {
await runPostgresBackup(postgres, backup);
}
return true;
} catch (error) {

View File

@@ -3,7 +3,10 @@ import path from "node:path";
import type { BackupSchedule } from "@/server/api/services/backup";
import type { Postgres } from "@/server/api/services/postgres";
import { findProjectById } from "@/server/api/services/project";
import { getServiceContainer } from "../docker/utils";
import {
getRemoteServiceContainer,
getServiceContainer,
} from "../docker/utils";
import { sendDatabaseBackupNotifications } from "../notifications/database-backup";
import { execAsync, execAsyncRemote } from "../process/execAsync";
import { uploadToS3 } from "./utils";
@@ -71,9 +74,21 @@ export const runRemotePostgresBackup = async (
const bucketDestination = path.join(prefix, backupFileName);
const containerPath = `/backup/${backupFileName}`;
const hostPath = `./${backupFileName}`;
const { accessKey, secretAccessKey, bucket, region, endpoint } = destination;
const rcloneDestination = `s3:${bucket}:${prefix}/${backupFileName}`;
try {
const { Id: containerId } = await getServiceContainer(appName);
const { Id: containerId } = await getRemoteServiceContainer(
postgres.serverId,
appName,
);
const pgDumpCommand = `pg_dump -Fc --no-acl --no-owner -h localhost -U ${databaseUser} --no-password '${database}' | gzip`;
const rcloneCommand = `rclone rcat --s3-provider AWS \
--s3-access-key-id ${accessKey} \
--s3-secret-access-key ${secretAccessKey} \
--s3-region ${region} \
--s3-endpoint ${endpoint} \
--buffer-size 16M ${rcloneDestination}`;
// const rcloneCommand = `rclone rcat --buffer-size 16M ${rcloneDestination}`;
// const command = `
@@ -82,14 +97,14 @@ export const runRemotePostgresBackup = async (
await execAsyncRemote(
postgres.serverId,
`docker exec ${containerId} /bin/bash -c "rm -rf /backup && mkdir -p /backup"`,
`docker exec ${containerId} /bin/bash -c "${pgDumpCommand} | ${rcloneCommand}"`,
);
// await execAsync(
// `docker exec ${containerId} sh -c "pg_dump -Fc --no-acl --no-owner -h localhost -U ${databaseUser} --no-password '${database}' | gzip > ${containerPath}"`,
// );
await execAsync(`docker cp ${containerId}:${containerPath} ${hostPath}`);
// await execAsync(`docker cp ${containerId}:${containerPath} ${hostPath}`);
await uploadToS3(destination, bucketDestination, hostPath);
// await uploadToS3(destination, bucketDestination, hostPath);
await sendDatabaseBackupNotifications({
applicationName: name,
projectName: project.name,
@@ -97,6 +112,7 @@ export const runRemotePostgresBackup = async (
type: "success",
});
} catch (error) {
console.log(error);
await sendDatabaseBackupNotifications({
applicationName: name,
projectName: project.name,
@@ -108,6 +124,5 @@ export const runRemotePostgresBackup = async (
throw error;
} finally {
await unlink(hostPath);
}
};

View File

@@ -6,7 +6,7 @@ import { scheduleJob, scheduledJobs } from "node-schedule";
import { runMariadbBackup } from "./mariadb";
import { runMongoBackup } from "./mongo";
import { runMySqlBackup } from "./mysql";
import { runPostgresBackup } from "./postgres";
import { runPostgresBackup, runRemotePostgresBackup } from "./postgres";
export const uploadToS3 = async (
destination: Destination,
@@ -42,7 +42,11 @@ export const scheduleBackup = (backup: BackupSchedule) => {
backup;
scheduleJob(backupId, schedule, async () => {
if (databaseType === "postgres" && postgres) {
await runPostgresBackup(postgres, backup);
if (postgres.serverId) {
await runRemotePostgresBackup(postgres, backup);
} else {
await runPostgresBackup(postgres, backup);
}
} else if (databaseType === "mysql" && mysql) {
await runMySqlBackup(mysql, backup);
} else if (databaseType === "mongo" && mongo) {

View File

@@ -474,3 +474,29 @@ export const getServiceContainer = async (appName: string) => {
throw error;
}
};
export const getRemoteServiceContainer = async (
serverId: string,
appName: string,
) => {
try {
const filter = {
status: ["running"],
label: [`com.docker.swarm.service.name=${appName}`],
};
const remoteDocker = await getRemoteDocker(serverId);
const containers = await remoteDocker.listContainers({
filters: JSON.stringify(filter),
});
if (containers.length === 0 || !containers[0]) {
throw new Error(`No container found with name: ${appName}`);
}
const container = containers[0];
return container;
} catch (error) {
throw error;
}
};