Implement metadata handling for database and compose backups. Update backup schemas to include metadata fields for various database types. Enhance backup creation and update processes to accommodate new metadata requirements. Modify UI components to support metadata input for different database types during backup operations.

This commit is contained in:
Mauricio Siu 2025-04-27 22:14:06 -06:00
parent 2ea2605ab1
commit 7c2eb63625
15 changed files with 6010 additions and 77 deletions

View File

@ -58,7 +58,36 @@ import { z } from "zod";
type CacheType = "cache" | "fetch"; type CacheType = "cache" | "fetch";
const AddPostgresBackup1Schema = z.object({ const getMetadataSchema = (
backupType: "database" | "compose",
databaseType: Props["databaseType"],
) => {
if (backupType !== "compose") return z.object({}).optional();
const schemas = {
postgres: z.object({
databaseUser: z.string().min(1, "Database user is required"),
}),
mariadb: z.object({
databaseUser: z.string().min(1, "Database user is required"),
databasePassword: z.string().min(1, "Database password is required"),
}),
mongo: z.object({
databaseUser: z.string().min(1, "Database user is required"),
databasePassword: z.string().min(1, "Database password is required"),
}),
mysql: z.object({
databaseRootPassword: z.string().min(1, "Root password is required"),
}),
"web-server": z.object({}),
};
return z.object({
[databaseType]: schemas[databaseType],
});
};
const Schema = z.object({
destinationId: z.string().min(1, "Destination required"), destinationId: z.string().min(1, "Destination required"),
schedule: z.string().min(1, "Schedule (Cron) required"), schedule: z.string().min(1, "Schedule (Cron) required"),
prefix: z.string().min(1, "Prefix required"), prefix: z.string().min(1, "Prefix required"),
@ -68,7 +97,7 @@ const AddPostgresBackup1Schema = z.object({
serviceName: z.string().nullable(), serviceName: z.string().nullable(),
}); });
type AddPostgresBackup = z.infer<typeof AddPostgresBackup1Schema>; type Schema = z.infer<typeof Schema>;
interface Props { interface Props {
id: string; id: string;
@ -89,7 +118,11 @@ export const AddBackup = ({
const { mutateAsync: createBackup, isLoading: isCreatingPostgresBackup } = const { mutateAsync: createBackup, isLoading: isCreatingPostgresBackup } =
api.backup.create.useMutation(); api.backup.create.useMutation();
const form = useForm<AddPostgresBackup>({ const schema = Schema.extend({
metadata: getMetadataSchema(backupType, databaseType),
});
const form = useForm<z.infer<typeof schema>>({
defaultValues: { defaultValues: {
database: "", database: "",
destinationId: "", destinationId: "",
@ -98,8 +131,9 @@ export const AddBackup = ({
schedule: "", schedule: "",
keepLatestCount: undefined, keepLatestCount: undefined,
serviceName: null, serviceName: null,
metadata: {},
}, },
resolver: zodResolver(AddPostgresBackup1Schema), resolver: zodResolver(schema),
}); });
const { const {
@ -128,10 +162,11 @@ export const AddBackup = ({
schedule: "", schedule: "",
keepLatestCount: undefined, keepLatestCount: undefined,
serviceName: null, serviceName: null,
metadata: {},
}); });
}, [form, form.reset, form.formState.isSubmitSuccessful, databaseType]); }, [form, form.reset, form.formState.isSubmitSuccessful, databaseType]);
const onSubmit = async (data: AddPostgresBackup) => { const onSubmit = async (data: Schema) => {
if (backupType === "compose" && !data.serviceName) { if (backupType === "compose" && !data.serviceName) {
form.setError("serviceName", { form.setError("serviceName", {
type: "manual", type: "manual",
@ -489,6 +524,115 @@ export const AddBackup = ({
</FormItem> </FormItem>
)} )}
/> />
{backupType === "compose" && (
<>
{databaseType === "postgres" && (
<FormField
control={form.control}
name="metadata.postgres.databaseUser"
render={({ field }) => (
<FormItem>
<FormLabel>Database User</FormLabel>
<FormControl>
<Input placeholder="postgres" {...field} />
</FormControl>
<FormMessage />
</FormItem>
)}
/>
)}
{databaseType === "mariadb" && (
<>
<FormField
control={form.control}
name="metadata.mariadb.databaseUser"
render={({ field }) => (
<FormItem>
<FormLabel>Database User</FormLabel>
<FormControl>
<Input placeholder="mariadb" {...field} />
</FormControl>
<FormMessage />
</FormItem>
)}
/>
<FormField
control={form.control}
name="metadata.mariadb.databasePassword"
render={({ field }) => (
<FormItem>
<FormLabel>Database Password</FormLabel>
<FormControl>
<Input
type="password"
placeholder="••••••••"
{...field}
/>
</FormControl>
<FormMessage />
</FormItem>
)}
/>
</>
)}
{databaseType === "mongo" && (
<>
<FormField
control={form.control}
name="metadata.mongo.databaseUser"
render={({ field }) => (
<FormItem>
<FormLabel>Database User</FormLabel>
<FormControl>
<Input placeholder="mongo" {...field} />
</FormControl>
<FormMessage />
</FormItem>
)}
/>
<FormField
control={form.control}
name="metadata.mongo.databasePassword"
render={({ field }) => (
<FormItem>
<FormLabel>Database Password</FormLabel>
<FormControl>
<Input
type="password"
placeholder="••••••••"
{...field}
/>
</FormControl>
<FormMessage />
</FormItem>
)}
/>
</>
)}
{databaseType === "mysql" && (
<FormField
control={form.control}
name="metadata.mysql.databaseRootPassword"
render={({ field }) => (
<FormItem>
<FormLabel>Root Password</FormLabel>
<FormControl>
<Input
type="password"
placeholder="••••••••"
{...field}
/>
</FormControl>
<FormMessage />
</FormItem>
)}
/>
)}
</>
)}
</div> </div>
<DialogFooter> <DialogFooter>
<Button <Button

View File

@ -61,21 +61,24 @@ export const ShowBackups = ({
? query() ? query()
: api.mongo.one.useQuery({ mongoId: id }, { enabled: !!id }); : api.mongo.one.useQuery({ mongoId: id }, { enabled: !!id });
console.log(postgres); const mutationMap =
backupType === "database"
? {
postgres: api.backup.manualBackupPostgres.useMutation(),
mysql: api.backup.manualBackupMySql.useMutation(),
mariadb: api.backup.manualBackupMariadb.useMutation(),
mongo: api.backup.manualBackupMongo.useMutation(),
"web-server": api.backup.manualBackupWebServer.useMutation(),
}
: {
compose: api.backup.manualBackupCompose.useMutation(),
};
const mutationMap = { const key2 = backupType === "database" ? databaseType : "compose";
postgres: () => api.backup.manualBackupPostgres.useMutation(), const mutation = mutationMap[key2 as keyof typeof mutationMap];
mysql: () => api.backup.manualBackupMySql.useMutation(),
mariadb: () => api.backup.manualBackupMariadb.useMutation(),
mongo: () => api.backup.manualBackupMongo.useMutation(),
"web-server": () => api.backup.manualBackupWebServer.useMutation(),
compose: () => api.backup.manualBackupCompose.useMutation(),
};
const { mutateAsync: manualBackup, isLoading: isManualBackup } = mutationMap[ const { mutateAsync: manualBackup, isLoading: isManualBackup } = mutation
databaseType ? mutation
]
? mutationMap[databaseType]()
: api.backup.manualBackupMongo.useMutation(); : api.backup.manualBackupMongo.useMutation();
const { mutateAsync: deleteBackup, isLoading: isRemoving } = const { mutateAsync: deleteBackup, isLoading: isRemoving } =

View File

@ -63,7 +63,36 @@ import { z } from "zod";
type CacheType = "cache" | "fetch"; type CacheType = "cache" | "fetch";
const UpdateBackupSchema = z.object({ const getMetadataSchema = (
backupType: "database" | "compose",
databaseType: string,
) => {
if (backupType !== "compose") return z.object({}).optional();
const schemas = {
postgres: z.object({
databaseUser: z.string().min(1, "Database user is required"),
}),
mariadb: z.object({
databaseUser: z.string().min(1, "Database user is required"),
databasePassword: z.string().min(1, "Database password is required"),
}),
mongo: z.object({
databaseUser: z.string().min(1, "Database user is required"),
databasePassword: z.string().min(1, "Database password is required"),
}),
mysql: z.object({
databaseRootPassword: z.string().min(1, "Root password is required"),
}),
"web-server": z.object({}),
};
return z.object({
[databaseType]: schemas[databaseType as keyof typeof schemas],
});
};
const Schema = z.object({
destinationId: z.string().min(1, "Destination required"), destinationId: z.string().min(1, "Destination required"),
schedule: z.string().min(1, "Schedule (Cron) required"), schedule: z.string().min(1, "Schedule (Cron) required"),
prefix: z.string().min(1, "Prefix required"), prefix: z.string().min(1, "Prefix required"),
@ -71,10 +100,9 @@ const UpdateBackupSchema = z.object({
database: z.string().min(1, "Database required"), database: z.string().min(1, "Database required"),
keepLatestCount: z.coerce.number().optional(), keepLatestCount: z.coerce.number().optional(),
serviceName: z.string().nullable(), serviceName: z.string().nullable(),
metadata: z.object({}).optional(),
}); });
type UpdateBackup = z.infer<typeof UpdateBackupSchema>;
interface Props { interface Props {
backupId: string; backupId: string;
refetch: () => void; refetch: () => void;
@ -114,7 +142,13 @@ export const UpdateBackup = ({ backupId, refetch }: Props) => {
const { mutateAsync, isLoading: isLoadingUpdate } = const { mutateAsync, isLoading: isLoadingUpdate } =
api.backup.update.useMutation(); api.backup.update.useMutation();
const form = useForm<UpdateBackup>({ const schema = backup
? Schema.extend({
metadata: getMetadataSchema(backup.backupType, backup.databaseType),
})
: Schema;
const form = useForm<z.infer<typeof schema>>({
defaultValues: { defaultValues: {
database: "", database: "",
destinationId: "", destinationId: "",
@ -123,8 +157,9 @@ export const UpdateBackup = ({ backupId, refetch }: Props) => {
schedule: "", schedule: "",
keepLatestCount: undefined, keepLatestCount: undefined,
serviceName: null, serviceName: null,
metadata: {},
}, },
resolver: zodResolver(UpdateBackupSchema), resolver: zodResolver(schema),
}); });
useEffect(() => { useEffect(() => {
@ -139,11 +174,12 @@ export const UpdateBackup = ({ backupId, refetch }: Props) => {
keepLatestCount: backup.keepLatestCount keepLatestCount: backup.keepLatestCount
? Number(backup.keepLatestCount) ? Number(backup.keepLatestCount)
: undefined, : undefined,
metadata: backup.metadata || {},
}); });
} }
}, [form, form.reset, backup]); }, [form, form.reset, backup]);
const onSubmit = async (data: UpdateBackup) => { const onSubmit = async (data: z.infer<typeof schema>) => {
if (backup?.backupType === "compose" && !data.serviceName) { if (backup?.backupType === "compose" && !data.serviceName) {
form.setError("serviceName", { form.setError("serviceName", {
type: "manual", type: "manual",
@ -161,6 +197,7 @@ export const UpdateBackup = ({ backupId, refetch }: Props) => {
database: data.database, database: data.database,
serviceName: data.serviceName, serviceName: data.serviceName,
keepLatestCount: data.keepLatestCount as number | null, keepLatestCount: data.keepLatestCount as number | null,
metadata: data.metadata || {},
}) })
.then(async () => { .then(async () => {
toast.success("Backup Updated"); toast.success("Backup Updated");
@ -473,6 +510,115 @@ export const UpdateBackup = ({ backupId, refetch }: Props) => {
</FormItem> </FormItem>
)} )}
/> />
{backup?.backupType === "compose" && (
<>
{backup.databaseType === "postgres" && (
<FormField
control={form.control}
name="metadata.postgres.databaseUser"
render={({ field }) => (
<FormItem>
<FormLabel>Database User</FormLabel>
<FormControl>
<Input placeholder="postgres" {...field} />
</FormControl>
<FormMessage />
</FormItem>
)}
/>
)}
{backup.databaseType === "mariadb" && (
<>
<FormField
control={form.control}
name="metadata.mariadb.databaseUser"
render={({ field }) => (
<FormItem>
<FormLabel>Database User</FormLabel>
<FormControl>
<Input placeholder="mariadb" {...field} />
</FormControl>
<FormMessage />
</FormItem>
)}
/>
<FormField
control={form.control}
name="metadata.mariadb.databasePassword"
render={({ field }) => (
<FormItem>
<FormLabel>Database Password</FormLabel>
<FormControl>
<Input
type="password"
placeholder="••••••••"
{...field}
/>
</FormControl>
<FormMessage />
</FormItem>
)}
/>
</>
)}
{backup.databaseType === "mongo" && (
<>
<FormField
control={form.control}
name="metadata.mongo.databaseUser"
render={({ field }) => (
<FormItem>
<FormLabel>Database User</FormLabel>
<FormControl>
<Input placeholder="mongo" {...field} />
</FormControl>
<FormMessage />
</FormItem>
)}
/>
<FormField
control={form.control}
name="metadata.mongo.databasePassword"
render={({ field }) => (
<FormItem>
<FormLabel>Database Password</FormLabel>
<FormControl>
<Input
type="password"
placeholder="••••••••"
{...field}
/>
</FormControl>
<FormMessage />
</FormItem>
)}
/>
</>
)}
{backup.databaseType === "mysql" && (
<FormField
control={form.control}
name="metadata.mysql.databaseRootPassword"
render={({ field }) => (
<FormItem>
<FormLabel>Root Password</FormLabel>
<FormControl>
<Input
type="password"
placeholder="••••••••"
{...field}
/>
</FormControl>
<FormMessage />
</FormItem>
)}
/>
)}
</>
)}
</div> </div>
<DialogFooter> <DialogFooter>
<Button <Button

View File

@ -0,0 +1 @@
ALTER TABLE "backup" ADD COLUMN "metadata" jsonb;

File diff suppressed because it is too large Load Diff

View File

@ -624,6 +624,13 @@
"when": 1745801614194, "when": 1745801614194,
"tag": "0088_same_ezekiel", "tag": "0088_same_ezekiel",
"breakpoints": true "breakpoints": true
},
{
"idx": 89,
"version": "7",
"when": 1745812150155,
"tag": "0089_dazzling_marrow",
"breakpoints": true
} }
] ]
} }

View File

@ -10,6 +10,7 @@ import {
IS_CLOUD, IS_CLOUD,
createBackup, createBackup,
findBackupById, findBackupById,
findComposeByBackupId,
findMariadbByBackupId, findMariadbByBackupId,
findMariadbById, findMariadbById,
findMongoByBackupId, findMongoByBackupId,
@ -31,6 +32,7 @@ import {
} from "@dokploy/server"; } from "@dokploy/server";
import { findDestinationById } from "@dokploy/server/services/destination"; import { findDestinationById } from "@dokploy/server/services/destination";
import { runComposeBackup } from "@dokploy/server/utils/backups/compose";
import { import {
getS3Credentials, getS3Credentials,
normalizeS3Path, normalizeS3Path,
@ -240,9 +242,18 @@ export const backupRouter = createTRPCRouter({
manualBackupCompose: protectedProcedure manualBackupCompose: protectedProcedure
.input(apiFindOneBackup) .input(apiFindOneBackup)
.mutation(async ({ input }) => { .mutation(async ({ input }) => {
// const backup = await findBackupById(input.backupId); try {
// await runComposeBackup(backup); const backup = await findBackupById(input.backupId);
return true; const compose = await findComposeByBackupId(backup.backupId);
await runComposeBackup(compose, backup);
return true;
} catch (error) {
throw new TRPCError({
code: "BAD_REQUEST",
message: "Error running manual Compose backup ",
cause: error,
});
}
}), }),
manualBackupMongo: protectedProcedure manualBackupMongo: protectedProcedure
.input(apiFindOneBackup) .input(apiFindOneBackup)

View File

@ -27,7 +27,6 @@ import {
createMount, createMount,
deleteMount, deleteMount,
findComposeById, findComposeById,
findDomainsByComposeId,
findProjectById, findProjectById,
findServerById, findServerById,
findUserById, findUserById,
@ -268,8 +267,7 @@ export const composeRouter = createTRPCRouter({
message: "You are not authorized to get this compose", message: "You are not authorized to get this compose",
}); });
} }
const domains = await findDomainsByComposeId(input.composeId); const composeFile = await addDomainToCompose(compose);
const composeFile = await addDomainToCompose(compose, domains);
return dump(composeFile, { return dump(composeFile, {
lineWidth: 1000, lineWidth: 1000,
}); });
@ -723,18 +721,4 @@ export const composeRouter = createTRPCRouter({
}); });
} }
}), }),
manualBackup: protectedProcedure
.input(z.object({ composeId: z.string() }))
.mutation(async ({ input, ctx }) => {
const compose = await findComposeById(input.composeId);
if (compose.project.organizationId !== ctx.session.activeOrganizationId) {
throw new TRPCError({
code: "UNAUTHORIZED",
message: "You are not authorized to backup this compose",
});
}
await createBackup({
composeId: compose.composeId,
});
}),
}); });

View File

@ -3,6 +3,7 @@ import {
type AnyPgColumn, type AnyPgColumn,
boolean, boolean,
integer, integer,
jsonb,
pgEnum, pgEnum,
pgTable, pgTable,
text, text,
@ -69,6 +70,23 @@ export const backups = pgTable("backup", {
onDelete: "cascade", onDelete: "cascade",
}), }),
userId: text("userId").references(() => users_temp.id), userId: text("userId").references(() => users_temp.id),
// Only for compose backups
metadata: jsonb("metadata").$type<{
postgres?: {
databaseUser: string;
};
mariadb?: {
databaseUser: string;
databasePassword: string;
};
mongo?: {
databaseUser: string;
databasePassword: string;
};
mysql?: {
databaseRootPassword: string;
};
}>(),
}); });
export const backupsRelations = relations(backups, ({ one }) => ({ export const backupsRelations = relations(backups, ({ one }) => ({
@ -134,6 +152,7 @@ export const apiCreateBackup = createSchema.pick({
backupType: true, backupType: true,
composeId: true, composeId: true,
serviceName: true, serviceName: true,
metadata: true,
}); });
export const apiFindOneBackup = createSchema export const apiFindOneBackup = createSchema
@ -158,5 +177,6 @@ export const apiUpdateBackup = createSchema
database: true, database: true,
keepLatestCount: true, keepLatestCount: true,
serviceName: true, serviceName: true,
metadata: true,
}) })
.required(); .required();

View File

@ -1,5 +1,10 @@
import { db } from "@dokploy/server/db"; import { db } from "@dokploy/server/db";
import { type apiCreateMongo, backups, mongo } from "@dokploy/server/db/schema"; import {
type apiCreateMongo,
backups,
compose,
mongo,
} from "@dokploy/server/db/schema";
import { buildAppName } from "@dokploy/server/db/schema"; import { buildAppName } from "@dokploy/server/db/schema";
import { generatePassword } from "@dokploy/server/templates"; import { generatePassword } from "@dokploy/server/templates";
import { buildMongo } from "@dokploy/server/utils/databases/mongo"; import { buildMongo } from "@dokploy/server/utils/databases/mongo";
@ -103,6 +108,25 @@ export const findMongoByBackupId = async (backupId: string) => {
return result[0]; return result[0];
}; };
export const findComposeByBackupId = async (backupId: string) => {
const result = await db
.select({
...getTableColumns(compose),
})
.from(compose)
.innerJoin(backups, eq(compose.composeId, backups.composeId))
.where(eq(backups.backupId, backupId))
.limit(1);
if (!result || !result[0]) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Compose not found",
});
}
return result[0];
};
export const removeMongoById = async (mongoId: string) => { export const removeMongoById = async (mongoId: string) => {
const result = await db const result = await db
.delete(mongo) .delete(mongo)

View File

@ -0,0 +1,91 @@
import type { BackupSchedule } from "@dokploy/server/services/backup";
import type { Compose } from "@dokploy/server/services/compose";
import { findProjectById } from "@dokploy/server/services/project";
import { sendDatabaseBackupNotifications } from "../notifications/database-backup";
import { execAsync, execAsyncRemote } from "../process/execAsync";
import { getS3Credentials, normalizeS3Path } from "./utils";
export const runComposeBackup = async (
compose: Compose,
backup: BackupSchedule,
) => {
const { projectId, name } = compose;
const project = await findProjectById(projectId);
const { prefix, database } = backup;
const destination = backup.destination;
const backupFileName = `${new Date().toISOString()}.dump.gz`;
const bucketDestination = `${normalizeS3Path(prefix)}${backupFileName}`;
try {
const rcloneFlags = getS3Credentials(destination);
const rcloneDestination = `:s3:${destination.bucket}/${bucketDestination}`;
const rcloneCommand = `rclone rcat ${rcloneFlags.join(" ")} "${rcloneDestination}"`;
const command = `docker ps --filter "status=running" --filter "label=dokploy.backup.id=${backup.backupId}" --format "{{.ID}}" | head -n 1`;
if (compose.serverId) {
const { stdout } = await execAsyncRemote(compose.serverId, command);
if (!stdout) {
throw new Error("Container not found");
}
const containerId = stdout.trim();
let backupCommand = "";
if (backup.databaseType === "postgres") {
backupCommand = `docker exec ${containerId} sh -c "pg_dump -Fc --no-acl --no-owner -h localhost -U ${backup.metadata?.postgres?.databaseUser} --no-password '${database}' | gzip"`;
} else if (backup.databaseType === "mariadb") {
backupCommand = `docker exec ${containerId} sh -c "mariadb-dump --user='${backup.metadata?.mariadb?.databaseUser}' --password='${backup.metadata?.mariadb?.databasePassword}' --databases ${database} | gzip"`;
} else if (backup.databaseType === "mysql") {
backupCommand = `docker exec ${containerId} sh -c "mysqldump --default-character-set=utf8mb4 -u 'root' --password='${backup.metadata?.mysql?.databaseRootPassword}' --single-transaction --no-tablespaces --quick '${database}' | gzip"`;
} else if (backup.databaseType === "mongo") {
backupCommand = `docker exec ${containerId} sh -c "mongodump -d '${database}' -u '${backup.metadata?.mongo?.databaseUser}' -p '${backup.metadata?.mongo?.databasePassword}' --archive --authenticationDatabase admin --gzip"`;
}
await execAsyncRemote(
compose.serverId,
`${backupCommand} | ${rcloneCommand}`,
);
} else {
const { stdout } = await execAsync(command);
if (!stdout) {
throw new Error("Container not found");
}
const containerId = stdout.trim();
let backupCommand = "";
if (backup.databaseType === "postgres") {
backupCommand = `docker exec ${containerId} sh -c "pg_dump -Fc --no-acl --no-owner -h localhost -U ${backup.metadata?.postgres?.databaseUser} --no-password '${database}' | gzip"`;
} else if (backup.databaseType === "mariadb") {
backupCommand = `docker exec ${containerId} sh -c "mariadb-dump --user='${backup.metadata?.mariadb?.databaseUser}' --password='${backup.metadata?.mariadb?.databasePassword}' --databases ${database} | gzip"`;
} else if (backup.databaseType === "mysql") {
backupCommand = `docker exec ${containerId} sh -c "mysqldump --default-character-set=utf8mb4 -u 'root' --password='${backup.metadata?.mysql?.databaseRootPassword}' --single-transaction --no-tablespaces --quick '${database}' | gzip"`;
} else if (backup.databaseType === "mongo") {
backupCommand = `docker exec ${containerId} sh -c "mongodump -d '${database}' -u '${backup.metadata?.mongo?.databaseUser}' -p '${backup.metadata?.mongo?.databasePassword}' --archive --authenticationDatabase admin --gzip"`;
}
await execAsync(`${backupCommand} | ${rcloneCommand}`);
}
await sendDatabaseBackupNotifications({
applicationName: name,
projectName: project.name,
databaseType: "mongodb",
type: "success",
organizationId: project.organizationId,
});
} catch (error) {
console.log(error);
await sendDatabaseBackupNotifications({
applicationName: name,
projectName: project.name,
databaseType: "mongodb",
type: "error",
// @ts-ignore
errorMessage: error?.message || "Error message not provided",
organizationId: project.organizationId,
});
throw error;
}
};
// mongorestore -d monguito -u mongo -p Bqh7AQl-PRbnBu --authenticationDatabase admin --gzip --archive=2024-04-13T05:03:58.937Z.dump.gz

View File

@ -7,26 +7,39 @@ import { runMongoBackup } from "./mongo";
import { runMySqlBackup } from "./mysql"; import { runMySqlBackup } from "./mysql";
import { runPostgresBackup } from "./postgres"; import { runPostgresBackup } from "./postgres";
import { runWebServerBackup } from "./web-server"; import { runWebServerBackup } from "./web-server";
import { runComposeBackup } from "./compose";
export const scheduleBackup = (backup: BackupSchedule) => { export const scheduleBackup = (backup: BackupSchedule) => {
const { schedule, backupId, databaseType, postgres, mysql, mongo, mariadb } = const {
backup; schedule,
backupId,
databaseType,
postgres,
mysql,
mongo,
mariadb,
compose,
} = backup;
scheduleJob(backupId, schedule, async () => { scheduleJob(backupId, schedule, async () => {
if (databaseType === "postgres" && postgres) { if (backup.backupType === "database") {
await runPostgresBackup(postgres, backup); if (databaseType === "postgres" && postgres) {
await keepLatestNBackups(backup, postgres.serverId); await runPostgresBackup(postgres, backup);
} else if (databaseType === "mysql" && mysql) { await keepLatestNBackups(backup, postgres.serverId);
await runMySqlBackup(mysql, backup); } else if (databaseType === "mysql" && mysql) {
await keepLatestNBackups(backup, mysql.serverId); await runMySqlBackup(mysql, backup);
} else if (databaseType === "mongo" && mongo) { await keepLatestNBackups(backup, mysql.serverId);
await runMongoBackup(mongo, backup); } else if (databaseType === "mongo" && mongo) {
await keepLatestNBackups(backup, mongo.serverId); await runMongoBackup(mongo, backup);
} else if (databaseType === "mariadb" && mariadb) { await keepLatestNBackups(backup, mongo.serverId);
await runMariadbBackup(mariadb, backup); } else if (databaseType === "mariadb" && mariadb) {
await keepLatestNBackups(backup, mariadb.serverId); await runMariadbBackup(mariadb, backup);
} else if (databaseType === "web-server") { await keepLatestNBackups(backup, mariadb.serverId);
await runWebServerBackup(backup); } else if (databaseType === "web-server") {
await keepLatestNBackups(backup); await runWebServerBackup(backup);
await keepLatestNBackups(backup);
}
} else if (backup.backupType === "compose" && compose) {
await runComposeBackup(compose, backup);
} }
}); });
}; };

View File

@ -22,15 +22,15 @@ import { spawnAsync } from "../process/spawnAsync";
export type ComposeNested = InferResultType< export type ComposeNested = InferResultType<
"compose", "compose",
{ project: true; mounts: true; domains: true } { project: true; mounts: true; domains: true; backups: true }
>; >;
export const buildCompose = async (compose: ComposeNested, logPath: string) => { export const buildCompose = async (compose: ComposeNested, logPath: string) => {
const writeStream = createWriteStream(logPath, { flags: "a" }); const writeStream = createWriteStream(logPath, { flags: "a" });
const { sourceType, appName, mounts, composeType, domains } = compose; const { sourceType, appName, mounts, composeType } = compose;
try { try {
const { COMPOSE_PATH } = paths(); const { COMPOSE_PATH } = paths();
const command = createCommand(compose); const command = createCommand(compose);
await writeDomainsToCompose(compose, domains); await writeDomainsToCompose(compose);
createEnvFile(compose); createEnvFile(compose);
if (compose.isolatedDeployment) { if (compose.isolatedDeployment) {

View File

@ -0,0 +1,4 @@
export const createBackupLabels = (backupId: string) => {
const labels = [`dokploy.backup.id=${backupId}`];
return labels;
};

View File

@ -38,6 +38,8 @@ import type {
PropertiesNetworks, PropertiesNetworks,
} from "./types"; } from "./types";
import { encodeBase64 } from "./utils"; import { encodeBase64 } from "./utils";
import type { Backup } from "@dokploy/server/services/backup";
import { createBackupLabels } from "./backup";
export const cloneCompose = async (compose: Compose) => { export const cloneCompose = async (compose: Compose) => {
if (compose.sourceType === "github") { if (compose.sourceType === "github") {
@ -132,13 +134,13 @@ export const readComposeFile = async (compose: Compose) => {
}; };
export const writeDomainsToCompose = async ( export const writeDomainsToCompose = async (
compose: Compose, compose: Compose & { domains: Domain[]; backups: Backup[] },
domains: Domain[],
) => { ) => {
if (!domains.length) { const { domains, backups } = compose;
if (!domains.length && !backups.length) {
return; return;
} }
const composeConverted = await addDomainToCompose(compose, domains); const composeConverted = await addDomainToCompose(compose);
const path = getComposePath(compose); const path = getComposePath(compose);
const composeString = dump(composeConverted, { lineWidth: 1000 }); const composeString = dump(composeConverted, { lineWidth: 1000 });
@ -150,7 +152,7 @@ export const writeDomainsToCompose = async (
}; };
export const writeDomainsToComposeRemote = async ( export const writeDomainsToComposeRemote = async (
compose: Compose, compose: Compose & { domains: Domain[]; backups: Backup[] },
domains: Domain[], domains: Domain[],
logPath: string, logPath: string,
) => { ) => {
@ -159,7 +161,7 @@ export const writeDomainsToComposeRemote = async (
} }
try { try {
const composeConverted = await addDomainToCompose(compose, domains); const composeConverted = await addDomainToCompose(compose);
const path = getComposePath(compose); const path = getComposePath(compose);
if (!composeConverted) { if (!composeConverted) {
@ -180,22 +182,20 @@ exit 1;
`; `;
} }
}; };
// (node:59875) MaxListenersExceededWarning: Possible EventEmitter memory leak detected. 11 SIGTERM listeners added to [process]. Use emitter.setMaxListeners() to increase limit
export const addDomainToCompose = async ( export const addDomainToCompose = async (
compose: Compose, compose: Compose & { domains: Domain[]; backups: Backup[] },
domains: Domain[],
) => { ) => {
const { appName } = compose; const { appName, domains, backups } = compose;
let result: ComposeSpecification | null; let result: ComposeSpecification | null;
if (compose.serverId) { if (compose.serverId) {
result = await loadDockerComposeRemote(compose); // aca hay que ir al servidor e ir a traer el compose file al servidor result = await loadDockerComposeRemote(compose);
} else { } else {
result = await loadDockerCompose(compose); result = await loadDockerCompose(compose);
} }
if (!result || domains.length === 0) { if (!result || (domains.length === 0 && backups.length === 0)) {
return null; return null;
} }
@ -264,6 +264,37 @@ export const addDomainToCompose = async (
} }
} }
for (const backup of backups) {
const { backupId, serviceName, enabled } = backup;
if (!enabled) {
continue;
}
if (!serviceName) {
throw new Error(
"Service name not found, please check the backups to use a valid service name",
);
}
if (!result?.services?.[serviceName]) {
throw new Error(`The service ${serviceName} not found in the compose`);
}
const backupLabels = createBackupLabels(backupId);
if (!result.services[serviceName].labels) {
result.services[serviceName].labels = [];
}
result.services[serviceName].labels = [
...(Array.isArray(result.services[serviceName].labels)
? result.services[serviceName].labels
: []),
...backupLabels,
];
}
// Add dokploy-network to the root of the compose file // Add dokploy-network to the root of the compose file
if (!compose.isolatedDeployment) { if (!compose.isolatedDeployment) {
result.networks = addDokployNetworkToRoot(result.networks); result.networks = addDokployNetworkToRoot(result.networks);