Merge pull request #1581 from Dokploy/709-back-ups-for-dokploy

709 back ups for dokploy
This commit is contained in:
Mauricio Siu
2025-03-29 23:27:29 -06:00
committed by GitHub
19 changed files with 11061 additions and 182 deletions

View File

@@ -61,7 +61,7 @@ type AddPostgresBackup = z.infer<typeof AddPostgresBackup1Schema>;
interface Props {
databaseId: string;
databaseType: "postgres" | "mariadb" | "mysql" | "mongo";
databaseType: "postgres" | "mariadb" | "mysql" | "mongo" | "web-server";
refetch: () => void;
}
@@ -85,7 +85,7 @@ export const AddBackup = ({ databaseId, databaseType, refetch }: Props) => {
useEffect(() => {
form.reset({
database: "",
database: databaseType === "web-server" ? "dokploy" : "",
destinationId: "",
enabled: true,
prefix: "/",
@@ -112,7 +112,11 @@ export const AddBackup = ({ databaseId, databaseType, refetch }: Props) => {
? {
mongoId: databaseId,
}
: undefined;
: databaseType === "web-server"
? {
userId: databaseId,
}
: undefined;
await createBackup({
destinationId: data.destinationId,
@@ -236,7 +240,11 @@ export const AddBackup = ({ databaseId, databaseType, refetch }: Props) => {
<FormItem>
<FormLabel>Database</FormLabel>
<FormControl>
<Input placeholder={"dokploy"} {...field} />
<Input
disabled={databaseType === "web-server"}
placeholder={"dokploy"}
{...field}
/>
</FormControl>
<FormMessage />
</FormItem>

View File

@@ -47,8 +47,8 @@ import { type LogLine, parseLogs } from "../../docker/logs/utils";
interface Props {
databaseId: string;
databaseType: Exclude<ServiceType, "application" | "redis">;
serverId: string | null;
databaseType: Exclude<ServiceType, "application" | "redis"> | "web-server";
serverId?: string | null;
}
const RestoreBackupSchema = z.object({
@@ -91,7 +91,7 @@ export const RestoreBackup = ({
defaultValues: {
destinationId: "",
backupFile: "",
databaseName: "",
databaseName: databaseType === "web-server" ? "dokploy" : "",
},
resolver: zodResolver(RestoreBackupSchema),
});
@@ -340,7 +340,11 @@ export const RestoreBackup = ({
<FormItem className="">
<FormLabel>Database Name</FormLabel>
<FormControl>
<Input {...field} placeholder="Enter database name" />
<Input
disabled={databaseType === "web-server"}
{...field}
placeholder="Enter database name"
/>
</FormControl>
<FormMessage />
</FormItem>

View File

@@ -14,7 +14,7 @@ import {
TooltipTrigger,
} from "@/components/ui/tooltip";
import { api } from "@/utils/api";
import { DatabaseBackup, Play, Trash2 } from "lucide-react";
import { Database, DatabaseBackup, Play, Trash2 } from "lucide-react";
import Link from "next/link";
import { useState } from "react";
import { toast } from "sonner";
@@ -25,7 +25,7 @@ import { UpdateBackup } from "./update-backup";
interface Props {
id: string;
type: Exclude<ServiceType, "application" | "redis">;
type: Exclude<ServiceType, "application" | "redis"> | "web-server";
}
export const ShowBackups = ({ id, type }: Props) => {
const [activeManualBackup, setActiveManualBackup] = useState<
@@ -38,6 +38,7 @@ export const ShowBackups = ({ id, type }: Props) => {
mariadb: () =>
api.mariadb.one.useQuery({ mariadbId: id }, { enabled: !!id }),
mongo: () => api.mongo.one.useQuery({ mongoId: id }, { enabled: !!id }),
"web-server": () => api.user.getBackups.useQuery(),
};
const { data } = api.destination.all.useQuery();
const { data: postgres, refetch } = queryMap[type]
@@ -49,6 +50,7 @@ export const ShowBackups = ({ id, type }: Props) => {
mysql: () => api.backup.manualBackupMySql.useMutation(),
mariadb: () => api.backup.manualBackupMariadb.useMutation(),
mongo: () => api.backup.manualBackupMongo.useMutation(),
"web-server": () => api.backup.manualBackupWebServer.useMutation(),
};
const { mutateAsync: manualBackup, isLoading: isManualBackup } = mutationMap[
@@ -64,7 +66,10 @@ export const ShowBackups = ({ id, type }: Props) => {
<Card className="bg-background">
<CardHeader className="flex flex-row justify-between gap-4 flex-wrap">
<div className="flex flex-col gap-0.5">
<CardTitle className="text-xl">Backups</CardTitle>
<CardTitle className="text-xl flex flex-row gap-2">
<Database className="size-6 text-muted-foreground" />
Backups
</CardTitle>
<CardDescription>
Add backups to your database to save the data to a different
provider.
@@ -73,11 +78,17 @@ export const ShowBackups = ({ id, type }: Props) => {
{postgres && postgres?.backups?.length > 0 && (
<div className="flex flex-col lg:flex-row gap-4 w-full lg:w-auto">
<AddBackup databaseId={id} databaseType={type} refetch={refetch} />
{type !== "web-server" && (
<AddBackup
databaseId={id}
databaseType={type}
refetch={refetch}
/>
)}
<RestoreBackup
databaseId={id}
databaseType={type}
serverId={postgres.serverId}
serverId={"serverId" in postgres ? postgres.serverId : undefined}
/>
</div>
)}
@@ -115,7 +126,9 @@ export const ShowBackups = ({ id, type }: Props) => {
<RestoreBackup
databaseId={id}
databaseType={type}
serverId={postgres.serverId}
serverId={
"serverId" in postgres ? postgres.serverId : undefined
}
/>
</div>
</div>

View File

@@ -0,0 +1,2 @@
ALTER TABLE "backup" ADD COLUMN "userId" text;--> statement-breakpoint
ALTER TABLE "backup" ADD CONSTRAINT "backup_userId_user_temp_id_fk" FOREIGN KEY ("userId") REFERENCES "public"."user_temp"("id") ON DELETE no action ON UPDATE no action;

View File

@@ -0,0 +1 @@
ALTER TYPE "public"."databaseType" ADD VALUE 'web-server';

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -575,6 +575,20 @@
"when": 1743281254393,
"tag": "0081_lovely_mentallo",
"breakpoints": true
},
{
"idx": 82,
"version": "7",
"when": 1743287689974,
"tag": "0082_clean_mandarin",
"breakpoints": true
},
{
"idx": 83,
"version": "7",
"when": 1743288371413,
"tag": "0083_parallel_stranger",
"breakpoints": true
}
]
}

View File

@@ -8,56 +8,21 @@ import { createServerSideHelpers } from "@trpc/react-query/server";
import type { GetServerSidePropsContext } from "next";
import type { ReactElement } from "react";
import superjson from "superjson";
import { api } from "@/utils/api";
import { ShowBackups } from "@/components/dashboard/database/backups/show-backups";
import { Card } from "@/components/ui/card";
const Page = () => {
const { data: user } = api.user.get.useQuery();
return (
<div className="w-full">
<div className="h-full rounded-xl max-w-5xl mx-auto flex flex-col gap-4">
<WebDomain />
<WebServer />
{/* <Card className="h-full bg-sidebar p-2.5 rounded-xl ">
<div className="rounded-xl bg-background shadow-md ">
<CardHeader className="">
<CardTitle className="text-xl flex flex-row gap-2">
<LayoutDashboardIcon className="size-6 text-muted-foreground self-center" />
Paid Features
</CardTitle>
<CardDescription>
Enable or disable paid features like monitoring
</CardDescription>
</CardHeader>
<CardContent>
<div className="flex flex-row gap-2 items-center">
<span className="text-sm font-medium text-muted-foreground">
Enable Paid Features:
</span>
<Switch
checked={data?.enablePaidFeatures}
onCheckedChange={() => {
update({
enablePaidFeatures: !data?.enablePaidFeatures,
})
.then(() => {
toast.success(
`Paid features ${
data?.enablePaidFeatures ? "disabled" : "enabled"
} successfully`,
);
refetch();
})
.catch(() => {
toast.error("Error updating paid features");
});
}}
/>
</div>
</CardContent>
{data?.enablePaidFeatures && <SetupMonitoring />}
</div>
</Card> */}
{/* */}
<div className="w-full flex flex-col gap-4">
<Card className="h-full bg-sidebar p-2.5 rounded-xl max-w-5xl mx-auto">
<ShowBackups id={user?.userId ?? ""} type="web-server" />
</Card>
</div>
</div>
</div>
);

View File

@@ -25,6 +25,7 @@ import {
runMongoBackup,
runMySqlBackup,
runPostgresBackup,
runWebServerBackup,
scheduleBackup,
updateBackupById,
} from "@dokploy/server";
@@ -40,6 +41,7 @@ import {
restoreMongoBackup,
restoreMySqlBackup,
restorePostgresBackup,
restoreWebServerBackup,
} from "@dokploy/server/utils/restore";
import { TRPCError } from "@trpc/server";
import { observable } from "@trpc/server/observable";
@@ -85,9 +87,13 @@ export const backupRouter = createTRPCRouter({
}
}
} catch (error) {
console.error(error);
throw new TRPCError({
code: "BAD_REQUEST",
message: "Error creating the Backup",
message:
error instanceof Error
? error.message
: "Error creating the Backup",
cause: error,
});
}
@@ -227,6 +233,13 @@ export const backupRouter = createTRPCRouter({
});
}
}),
manualBackupWebServer: protectedProcedure
.input(apiFindOneBackup)
.mutation(async ({ input }) => {
const backup = await findBackupById(input.backupId);
await runWebServerBackup(backup);
return true;
}),
listBackupFiles: protectedProcedure
.input(
z.object({
@@ -301,7 +314,13 @@ export const backupRouter = createTRPCRouter({
.input(
z.object({
databaseId: z.string(),
databaseType: z.enum(["postgres", "mysql", "mariadb", "mongo"]),
databaseType: z.enum([
"postgres",
"mysql",
"mariadb",
"mongo",
"web-server",
]),
databaseName: z.string().min(1),
backupFile: z.string().min(1),
destinationId: z.string().min(1),
@@ -366,6 +385,13 @@ export const backupRouter = createTRPCRouter({
);
});
}
if (input.databaseType === "web-server") {
return observable<string>((emit) => {
restoreWebServerBackup(destination, input.backupFile, (log) => {
emit.next(log);
});
});
}
return true;
}),

View File

@@ -91,6 +91,28 @@ export const userRouter = createTRPCRouter({
return memberResult;
}),
getBackups: adminProcedure.query(async ({ ctx }) => {
const memberResult = await db.query.member.findFirst({
where: and(
eq(member.userId, ctx.user.id),
eq(member.organizationId, ctx.session?.activeOrganizationId || ""),
),
with: {
user: {
with: {
backups: {
with: {
destination: true,
},
},
apiKeys: true,
},
},
},
});
return memberResult?.user;
}),
getServerMetrics: protectedProcedure.query(async ({ ctx }) => {
const memberResult = await db.query.member.findFirst({
where: and(

View File

@@ -15,12 +15,13 @@ import { mariadb } from "./mariadb";
import { mongo } from "./mongo";
import { mysql } from "./mysql";
import { postgres } from "./postgres";
import { users_temp } from "./user";
export const databaseType = pgEnum("databaseType", [
"postgres",
"mariadb",
"mysql",
"mongo",
"web-server",
]);
export const backups = pgTable("backup", {
@@ -58,6 +59,7 @@ export const backups = pgTable("backup", {
mongoId: text("mongoId").references((): AnyPgColumn => mongo.mongoId, {
onDelete: "cascade",
}),
userId: text("userId").references(() => users_temp.id),
});
export const backupsRelations = relations(backups, ({ one }) => ({
@@ -81,6 +83,10 @@ export const backupsRelations = relations(backups, ({ one }) => ({
fields: [backups.mongoId],
references: [mongo.mongoId],
}),
user: one(users_temp, {
fields: [backups.userId],
references: [users_temp.id],
}),
}));
const createSchema = createInsertSchema(backups, {
@@ -91,11 +97,12 @@ const createSchema = createInsertSchema(backups, {
database: z.string().min(1),
schedule: z.string(),
keepLatestCount: z.number().optional(),
databaseType: z.enum(["postgres", "mariadb", "mysql", "mongo"]),
databaseType: z.enum(["postgres", "mariadb", "mysql", "mongo", "web-server"]),
postgresId: z.string().optional(),
mariadbId: z.string().optional(),
mysqlId: z.string().optional(),
mongoId: z.string().optional(),
userId: z.string().optional(),
});
export const apiCreateBackup = createSchema.pick({
@@ -110,6 +117,7 @@ export const apiCreateBackup = createSchema.pick({
postgresId: true,
mongoId: true,
databaseType: true,
userId: true,
});
export const apiFindOneBackup = createSchema

View File

@@ -13,6 +13,7 @@ import { z } from "zod";
import { account, apikey, organization } from "./account";
import { projects } from "./project";
import { certificateType } from "./shared";
import { backups } from "./backups";
/**
* This is an example of how to use the multi-project schema feature of Drizzle ORM. Use the same
* database instance for multiple projects.
@@ -124,6 +125,7 @@ export const usersRelations = relations(users_temp, ({ one, many }) => ({
organizations: many(organization),
projects: many(projects),
apiKeys: many(apikey),
backups: many(backups),
}));
const createSchema = createInsertSchema(users_temp, {

View File

@@ -48,6 +48,7 @@ export * from "./utils/backups/mongo";
export * from "./utils/backups/mysql";
export * from "./utils/backups/postgres";
export * from "./utils/backups/utils";
export * from "./utils/backups/web-server";
export * from "./templates/processors";
export * from "./utils/notifications/build-error";

View File

@@ -10,11 +10,7 @@ import {
} from "../docker/utils";
import { sendDockerCleanupNotifications } from "../notifications/docker-cleanup";
import { execAsync, execAsyncRemote } from "../process/execAsync";
import { runMariadbBackup } from "./mariadb";
import { runMongoBackup } from "./mongo";
import { runMySqlBackup } from "./mysql";
import { runPostgresBackup } from "./postgres";
import { getS3Credentials } from "./utils";
import { getS3Credentials, scheduleBackup } from "./utils";
import type { BackupSchedule } from "@dokploy/server/services/backup";
import { startLogCleanup } from "../access-log/handler";
@@ -56,126 +52,27 @@ export const initCronJobs = async () => {
}
}
const pgs = await db.query.postgres.findMany({
const backups = await db.query.backups.findMany({
with: {
backups: {
with: {
destination: true,
postgres: true,
mariadb: true,
mysql: true,
mongo: true,
},
},
},
});
for (const pg of pgs) {
for (const backup of pg.backups) {
const { schedule, backupId, enabled, database } = backup;
if (enabled) {
console.log(
`[Backup] Postgres DB ${pg.name} for ${database} Activated`,
);
scheduleJob(backupId, schedule, async () => {
console.log(
`PG-SERVER[${new Date().toLocaleString()}] Running Backup ${backupId}`,
);
await runPostgresBackup(pg, backup);
await keepLatestNBackups(backup, pg.serverId);
});
}
}
}
const mariadbs = await db.query.mariadb.findMany({
with: {
backups: {
with: {
destination: true,
postgres: true,
mariadb: true,
mysql: true,
mongo: true,
},
},
destination: true,
postgres: true,
mariadb: true,
mysql: true,
mongo: true,
user: true,
},
});
for (const maria of mariadbs) {
for (const backup of maria.backups) {
const { schedule, backupId, enabled, database } = backup;
if (enabled) {
console.log(
`[Backup] MariaDB DB ${maria.name} for ${database} Activated`,
);
scheduleJob(backupId, schedule, async () => {
console.log(
`MARIADB-SERVER[${new Date().toLocaleString()}] Running Backup ${backupId}`,
);
await runMariadbBackup(maria, backup);
await keepLatestNBackups(backup, maria.serverId);
});
}
}
}
const mongodbs = await db.query.mongo.findMany({
with: {
backups: {
with: {
destination: true,
postgres: true,
mariadb: true,
mysql: true,
mongo: true,
},
},
},
});
for (const mongo of mongodbs) {
for (const backup of mongo.backups) {
const { schedule, backupId, enabled } = backup;
if (enabled) {
console.log(`[Backup] MongoDB DB ${mongo.name} Activated`);
scheduleJob(backupId, schedule, async () => {
console.log(
`MONGO-SERVER[${new Date().toLocaleString()}] Running Backup ${backupId}`,
);
await runMongoBackup(mongo, backup);
await keepLatestNBackups(backup, mongo.serverId);
});
}
}
}
const mysqls = await db.query.mysql.findMany({
with: {
backups: {
with: {
destination: true,
postgres: true,
mariadb: true,
mysql: true,
mongo: true,
},
},
},
});
for (const mysql of mysqls) {
for (const backup of mysql.backups) {
const { schedule, backupId, enabled } = backup;
if (enabled) {
console.log(`[Backup] MySQL DB ${mysql.name} Activated`);
scheduleJob(backupId, schedule, async () => {
console.log(
`MYSQL-SERVER[${new Date().toLocaleString()}] Running Backup ${backupId}`,
);
await runMySqlBackup(mysql, backup);
await keepLatestNBackups(backup, mysql.serverId);
});
for (const backup of backups) {
try {
if (backup.enabled) {
scheduleBackup(backup);
}
console.log(
`[Backup] ${backup.databaseType} Enabled with cron: [${backup.schedule}]`,
);
} catch (error) {
console.error(`[Backup] ${backup.databaseType} Error`, error);
}
}

View File

@@ -6,6 +6,7 @@ import { runMariadbBackup } from "./mariadb";
import { runMongoBackup } from "./mongo";
import { runMySqlBackup } from "./mysql";
import { runPostgresBackup } from "./postgres";
import { runWebServerBackup } from "./web-server";
export const scheduleBackup = (backup: BackupSchedule) => {
const { schedule, backupId, databaseType, postgres, mysql, mongo, mariadb } =
@@ -23,6 +24,9 @@ export const scheduleBackup = (backup: BackupSchedule) => {
} else if (databaseType === "mariadb" && mariadb) {
await runMariadbBackup(mariadb, backup);
await keepLatestNBackups(backup, mariadb.serverId);
} else if (databaseType === "web-server") {
await runWebServerBackup(backup);
await keepLatestNBackups(backup);
}
});
};

View File

@@ -0,0 +1,45 @@
import type { BackupSchedule } from "@dokploy/server/services/backup";
import { execAsync } from "../process/execAsync";
import { getS3Credentials } from "./utils";
import { findDestinationById } from "@dokploy/server/services/destination";
import { IS_CLOUD, paths } from "@dokploy/server/constants";
import { mkdtemp } from "node:fs/promises";
import { join } from "node:path";
import { tmpdir } from "node:os";
export const runWebServerBackup = async (backup: BackupSchedule) => {
try {
if (IS_CLOUD) {
return;
}
const destination = await findDestinationById(backup.destinationId);
const rcloneFlags = getS3Credentials(destination);
const timestamp = new Date().toISOString().replace(/[:.]/g, "-");
const { BASE_PATH } = paths();
const tempDir = await mkdtemp(join(tmpdir(), "dokploy-backup-"));
const backupFileName = `webserver-backup-${timestamp}.zip`;
const s3Path = `:s3:${destination.bucket}/${backup.prefix}${backupFileName}`;
try {
await execAsync(`mkdir -p ${tempDir}/filesystem`);
const postgresCommand = `docker exec $(docker ps --filter "name=dokploy-postgres" -q) pg_dump -v -Fc -U dokploy -d dokploy > ${tempDir}/database.sql`;
await execAsync(postgresCommand);
await execAsync(`cp -r ${BASE_PATH}/* ${tempDir}/filesystem/`);
await execAsync(
`cd ${tempDir} && zip -r ${backupFileName} database.sql filesystem/`,
);
const uploadCommand = `rclone copyto ${rcloneFlags.join(" ")} "${tempDir}/${backupFileName}" "${s3Path}"`;
await execAsync(uploadCommand);
return true;
} finally {
await execAsync(`rm -rf ${tempDir}`);
}
} catch (error) {
console.error("Backup error:", error);
throw error;
}
};

View File

@@ -2,3 +2,4 @@ export { restorePostgresBackup } from "./postgres";
export { restoreMySqlBackup } from "./mysql";
export { restoreMariadbBackup } from "./mariadb";
export { restoreMongoBackup } from "./mongo";
export { restoreWebServerBackup } from "./web-server";

View File

@@ -0,0 +1,143 @@
import type { Destination } from "@dokploy/server/services/destination";
import { getS3Credentials } from "../backups/utils";
import { execAsync } from "../process/execAsync";
import { paths, IS_CLOUD } from "@dokploy/server/constants";
import { mkdtemp } from "node:fs/promises";
import { join } from "node:path";
import { tmpdir } from "node:os";
export const restoreWebServerBackup = async (
destination: Destination,
backupFile: string,
emit: (log: string) => void,
) => {
if (IS_CLOUD) {
return;
}
try {
const rcloneFlags = getS3Credentials(destination);
const bucketPath = `:s3:${destination.bucket}`;
const backupPath = `${bucketPath}/${backupFile}`;
const { BASE_PATH } = paths();
// Create a temporary directory outside of BASE_PATH
const tempDir = await mkdtemp(join(tmpdir(), "dokploy-restore-"));
try {
emit("Starting restore...");
emit(`Backup path: ${backupPath}`);
emit(`Temp directory: ${tempDir}`);
// Create temp directory
emit("Creating temporary directory...");
await execAsync(`mkdir -p ${tempDir}`);
// Download backup from S3
emit("Downloading backup from S3...");
await execAsync(
`rclone copyto ${rcloneFlags.join(" ")} "${backupPath}" "${tempDir}/${backupFile}"`,
);
// List files before extraction
emit("Listing files before extraction...");
const { stdout: beforeFiles } = await execAsync(`ls -la ${tempDir}`);
emit(`Files before extraction: ${beforeFiles}`);
// Extract backup
emit("Extracting backup...");
await execAsync(`cd ${tempDir} && unzip ${backupFile}`);
// Restore filesystem first
emit("Restoring filesystem...");
emit(`Copying from ${tempDir}/filesystem/* to ${BASE_PATH}/`);
// First clean the target directory
emit("Cleaning target directory...");
await execAsync(`rm -rf "${BASE_PATH}/"*`);
// Ensure the target directory exists
emit("Setting up target directory...");
await execAsync(`mkdir -p "${BASE_PATH}"`);
// Copy files preserving permissions
emit("Copying files...");
await execAsync(`cp -rp "${tempDir}/filesystem/"* "${BASE_PATH}/"`);
// Now handle database restore
emit("Starting database restore...");
// Check if database.sql.gz exists and decompress it
const { stdout: hasGzFile } = await execAsync(
`ls ${tempDir}/database.sql.gz || true`,
);
if (hasGzFile.includes("database.sql.gz")) {
emit("Found compressed database file, decompressing...");
await execAsync(`cd ${tempDir} && gunzip database.sql.gz`);
}
// Verify database file exists
const { stdout: hasSqlFile } = await execAsync(
`ls ${tempDir}/database.sql || true`,
);
if (!hasSqlFile.includes("database.sql")) {
throw new Error("Database file not found after extraction");
}
// Drop and recreate database
emit("Disconnecting all users from database...");
await execAsync(
`docker exec $(docker ps --filter "name=dokploy-postgres" -q) psql -U dokploy postgres -c "SELECT pg_terminate_backend(pg_stat_activity.pid) FROM pg_stat_activity WHERE pg_stat_activity.datname = 'dokploy' AND pid <> pg_backend_pid();"`,
);
emit("Dropping existing database...");
await execAsync(
`docker exec $(docker ps --filter "name=dokploy-postgres" -q) psql -U dokploy postgres -c "DROP DATABASE IF EXISTS dokploy;"`,
);
emit("Creating fresh database...");
await execAsync(
`docker exec $(docker ps --filter "name=dokploy-postgres" -q) psql -U dokploy postgres -c "CREATE DATABASE dokploy;"`,
);
// Copy the backup file into the container
emit("Copying backup file into container...");
await execAsync(
`docker cp ${tempDir}/database.sql $(docker ps --filter "name=dokploy-postgres" -q):/tmp/database.sql`,
);
// Verify file in container
emit("Verifying file in container...");
await execAsync(
`docker exec $(docker ps --filter "name=dokploy-postgres" -q) ls -l /tmp/database.sql`,
);
// Restore from the copied file
emit("Running database restore...");
await execAsync(
`docker exec $(docker ps --filter "name=dokploy-postgres" -q) pg_restore -v -U dokploy -d dokploy /tmp/database.sql`,
);
// Cleanup the temporary file in the container
emit("Cleaning up container temp file...");
await execAsync(
`docker exec $(docker ps --filter "name=dokploy-postgres" -q) rm /tmp/database.sql`,
);
emit("Restore completed successfully!");
} finally {
// Cleanup
emit("Cleaning up temporary files...");
await execAsync(`rm -rf ${tempDir}`);
}
} catch (error) {
console.error(error);
emit(
`Error: ${
error instanceof Error
? error.message
: "Error restoring web server backup"
}`,
);
throw error;
}
};