Merge branch 'Dokploy:canary' into add-disable-recurse-submodules-option

This commit is contained in:
Yusoof Moh 2025-03-30 21:11:25 +07:00 committed by GitHub
commit 96e9799afb
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
36 changed files with 11964 additions and 438 deletions

View File

@ -29,7 +29,7 @@ WORKDIR /app
# Set production
ENV NODE_ENV=production
RUN apt-get update && apt-get install -y curl unzip apache2-utils iproute2 && rm -rf /var/lib/apt/lists/*
RUN apt-get update && apt-get install -y curl unzip zip apache2-utils iproute2 && rm -rf /var/lib/apt/lists/*
# Copy only the necessary files
COPY --from=build /prod/dokploy/.next ./.next

View File

@ -1006,7 +1006,7 @@ services:
volumes:
db-config-testhash:
`) as ComposeSpecification;
`);
test("Expect to change the suffix in all the possible places (4 Try)", () => {
const composeData = load(composeFileComplex) as ComposeSpecification;
@ -1115,3 +1115,60 @@ test("Expect to change the suffix in all the possible places (5 Try)", () => {
expect(updatedComposeData).toEqual(expectedDockerComposeExample1);
});
const composeFileBackrest = `
services:
backrest:
image: garethgeorge/backrest:v1.7.3
restart: unless-stopped
ports:
- 9898
environment:
- BACKREST_PORT=9898
- BACKREST_DATA=/data
- BACKREST_CONFIG=/config/config.json
- XDG_CACHE_HOME=/cache
- TZ=\${TZ}
volumes:
- backrest/data:/data
- backrest/config:/config
- backrest/cache:/cache
- /:/userdata:ro
volumes:
backrest:
backrest-cache:
`;
const expectedDockerComposeBackrest = load(`
services:
backrest:
image: garethgeorge/backrest:v1.7.3
restart: unless-stopped
ports:
- 9898
environment:
- BACKREST_PORT=9898
- BACKREST_DATA=/data
- BACKREST_CONFIG=/config/config.json
- XDG_CACHE_HOME=/cache
- TZ=\${TZ}
volumes:
- backrest-testhash/data:/data
- backrest-testhash/config:/config
- backrest-testhash/cache:/cache
- /:/userdata:ro
volumes:
backrest-testhash:
backrest-cache-testhash:
`) as ComposeSpecification;
test("Should handle volume paths with subdirectories correctly", () => {
const composeData = load(composeFileBackrest) as ComposeSpecification;
const suffix = "testhash";
const updatedComposeData = addSuffixToAllVolumes(composeData, suffix);
expect(updatedComposeData).toEqual(expectedDockerComposeBackrest);
});

View File

@ -233,6 +233,49 @@ describe("processTemplate", () => {
expect(base64Value.length).toBeGreaterThanOrEqual(42);
expect(base64Value.length).toBeLessThanOrEqual(44);
});
it("should handle boolean values in env vars when provided as an array", () => {
const template: CompleteTemplate = {
metadata: {} as any,
variables: {},
config: {
domains: [],
env: [
"ENABLE_USER_SIGN_UP=false",
"DEBUG_MODE=true",
"SOME_NUMBER=42",
],
mounts: [],
},
};
const result = processTemplate(template, mockSchema);
expect(result.envs).toHaveLength(3);
expect(result.envs).toContain("ENABLE_USER_SIGN_UP=false");
expect(result.envs).toContain("DEBUG_MODE=true");
expect(result.envs).toContain("SOME_NUMBER=42");
});
it("should handle boolean values in env vars when provided as an object", () => {
const template: CompleteTemplate = {
metadata: {} as any,
variables: {},
config: {
domains: [],
env: {
ENABLE_USER_SIGN_UP: false,
DEBUG_MODE: true,
SOME_NUMBER: 42,
},
},
};
const result = processTemplate(template, mockSchema);
expect(result.envs).toHaveLength(3);
expect(result.envs).toContain("ENABLE_USER_SIGN_UP=false");
expect(result.envs).toContain("DEBUG_MODE=true");
expect(result.envs).toContain("SOME_NUMBER=42");
});
});
describe("mounts processing", () => {

View File

@ -20,7 +20,7 @@ import { useForm } from "react-hook-form";
import { toast } from "sonner";
import { z } from "zod";
enum BuildType {
export enum BuildType {
dockerfile = "dockerfile",
heroku_buildpacks = "heroku_buildpacks",
paketo_buildpacks = "paketo_buildpacks",
@ -29,9 +29,18 @@ enum BuildType {
railpack = "railpack",
}
const buildTypeDisplayMap: Record<BuildType, string> = {
[BuildType.dockerfile]: "Dockerfile",
[BuildType.railpack]: "Railpack",
[BuildType.nixpacks]: "Nixpacks",
[BuildType.heroku_buildpacks]: "Heroku Buildpacks",
[BuildType.paketo_buildpacks]: "Paketo Buildpacks",
[BuildType.static]: "Static",
};
const mySchema = z.discriminatedUnion("buildType", [
z.object({
buildType: z.literal("dockerfile"),
buildType: z.literal(BuildType.dockerfile),
dockerfile: z
.string({
required_error: "Dockerfile path is required",
@ -42,39 +51,88 @@ const mySchema = z.discriminatedUnion("buildType", [
dockerBuildStage: z.string().nullable().default(""),
}),
z.object({
buildType: z.literal("heroku_buildpacks"),
buildType: z.literal(BuildType.heroku_buildpacks),
herokuVersion: z.string().nullable().default(""),
}),
z.object({
buildType: z.literal("paketo_buildpacks"),
buildType: z.literal(BuildType.paketo_buildpacks),
}),
z.object({
buildType: z.literal("nixpacks"),
buildType: z.literal(BuildType.nixpacks),
publishDirectory: z.string().optional(),
}),
z.object({
buildType: z.literal("static"),
buildType: z.literal(BuildType.static),
}),
z.object({
buildType: z.literal("railpack"),
buildType: z.literal(BuildType.railpack),
}),
]);
type AddTemplate = z.infer<typeof mySchema>;
interface Props {
applicationId: string;
}
interface ApplicationData {
buildType: BuildType;
dockerfile?: string | null;
dockerContextPath?: string | null;
dockerBuildStage?: string | null;
herokuVersion?: string | null;
publishDirectory?: string | null;
}
function isValidBuildType(value: string): value is BuildType {
return Object.values(BuildType).includes(value as BuildType);
}
const resetData = (data: ApplicationData): AddTemplate => {
switch (data.buildType) {
case BuildType.dockerfile:
return {
buildType: BuildType.dockerfile,
dockerfile: data.dockerfile || "",
dockerContextPath: data.dockerContextPath || "",
dockerBuildStage: data.dockerBuildStage || "",
};
case BuildType.heroku_buildpacks:
return {
buildType: BuildType.heroku_buildpacks,
herokuVersion: data.herokuVersion || "",
};
case BuildType.nixpacks:
return {
buildType: BuildType.nixpacks,
publishDirectory: data.publishDirectory || undefined,
};
case BuildType.paketo_buildpacks:
return {
buildType: BuildType.paketo_buildpacks,
};
case BuildType.static:
return {
buildType: BuildType.static,
};
case BuildType.railpack:
return {
buildType: BuildType.railpack,
};
default:
const buildType = data.buildType as BuildType;
return {
buildType,
} as AddTemplate;
}
};
export const ShowBuildChooseForm = ({ applicationId }: Props) => {
const { mutateAsync, isLoading } =
api.application.saveBuildType.useMutation();
const { data, refetch } = api.application.one.useQuery(
{
applicationId,
},
{
enabled: !!applicationId,
},
{ applicationId },
{ enabled: !!applicationId },
);
const form = useForm<AddTemplate>({
@ -85,46 +143,36 @@ export const ShowBuildChooseForm = ({ applicationId }: Props) => {
});
const buildType = form.watch("buildType");
useEffect(() => {
if (data) {
if (data.buildType === "dockerfile") {
form.reset({
buildType: data.buildType,
...(data.buildType && {
dockerfile: data.dockerfile || "",
dockerContextPath: data.dockerContextPath || "",
dockerBuildStage: data.dockerBuildStage || "",
}),
});
} else if (data.buildType === "heroku_buildpacks") {
form.reset({
buildType: data.buildType,
...(data.buildType && {
herokuVersion: data.herokuVersion || "",
}),
});
} else {
form.reset({
buildType: data.buildType,
publishDirectory: data.publishDirectory || undefined,
});
}
const typedData: ApplicationData = {
...data,
buildType: isValidBuildType(data.buildType)
? (data.buildType as BuildType)
: BuildType.nixpacks, // fallback
};
form.reset(resetData(typedData));
}
}, [form.formState.isSubmitSuccessful, form.reset, data, form]);
}, [data, form]);
const onSubmit = async (data: AddTemplate) => {
await mutateAsync({
applicationId,
buildType: data.buildType,
publishDirectory:
data.buildType === "nixpacks" ? data.publishDirectory : null,
dockerfile: data.buildType === "dockerfile" ? data.dockerfile : null,
data.buildType === BuildType.nixpacks ? data.publishDirectory : null,
dockerfile:
data.buildType === BuildType.dockerfile ? data.dockerfile : null,
dockerContextPath:
data.buildType === "dockerfile" ? data.dockerContextPath : null,
data.buildType === BuildType.dockerfile ? data.dockerContextPath : null,
dockerBuildStage:
data.buildType === "dockerfile" ? data.dockerBuildStage : null,
data.buildType === BuildType.dockerfile ? data.dockerBuildStage : null,
herokuVersion:
data.buildType === "heroku_buildpacks" ? data.herokuVersion : null,
data.buildType === BuildType.heroku_buildpacks
? data.herokuVersion
: null,
})
.then(async () => {
toast.success("Build type saved");
@ -160,193 +208,143 @@ export const ShowBuildChooseForm = ({ applicationId }: Props) => {
control={form.control}
name="buildType"
defaultValue={form.control._defaultValues.buildType}
render={({ field }) => {
return (
<FormItem className="space-y-3">
<FormLabel>Build Type</FormLabel>
<FormControl>
<RadioGroup
onValueChange={field.onChange}
value={field.value}
className="flex flex-col space-y-1"
>
<FormItem className="flex items-center space-x-3 space-y-0">
<FormControl>
<RadioGroupItem value="dockerfile" />
</FormControl>
<FormLabel className="font-normal">
Dockerfile
</FormLabel>
</FormItem>
<FormItem className="flex items-center space-x-3 space-y-0">
<FormControl>
<RadioGroupItem value="railpack" />
</FormControl>
<FormLabel className="font-normal">
Railpack{" "}
<Badge className="ml-1 text-xs px-1">New</Badge>
</FormLabel>
</FormItem>
<FormItem className="flex items-center space-x-3 space-y-0">
<FormControl>
<RadioGroupItem value="nixpacks" />
</FormControl>
<FormLabel className="font-normal">
Nixpacks
</FormLabel>
</FormItem>
<FormItem className="flex items-center space-x-3 space-y-0">
<FormControl>
<RadioGroupItem value="heroku_buildpacks" />
</FormControl>
<FormLabel className="font-normal">
Heroku Buildpacks
</FormLabel>
</FormItem>
<FormItem className="flex items-center space-x-3 space-y-0">
<FormControl>
<RadioGroupItem value="paketo_buildpacks" />
</FormControl>
<FormLabel className="font-normal">
Paketo Buildpacks
</FormLabel>
</FormItem>
<FormItem className="flex items-center space-x-3 space-y-0">
<FormControl>
<RadioGroupItem value="static" />
</FormControl>
<FormLabel className="font-normal">Static</FormLabel>
</FormItem>
</RadioGroup>
</FormControl>
<FormMessage />
</FormItem>
);
}}
render={({ field }) => (
<FormItem className="space-y-3">
<FormLabel>Build Type</FormLabel>
<FormControl>
<RadioGroup
onValueChange={field.onChange}
value={field.value}
className="flex flex-col space-y-1"
>
{Object.entries(buildTypeDisplayMap).map(
([value, label]) => (
<FormItem
key={value}
className="flex items-center space-x-3 space-y-0"
>
<FormControl>
<RadioGroupItem value={value} />
</FormControl>
<FormLabel className="font-normal">
{label}
{value === BuildType.railpack && (
<Badge className="ml-2 px-1 text-xs">New</Badge>
)}
</FormLabel>
</FormItem>
),
)}
</RadioGroup>
</FormControl>
<FormMessage />
</FormItem>
)}
/>
{buildType === "heroku_buildpacks" && (
{buildType === BuildType.heroku_buildpacks && (
<FormField
control={form.control}
name="herokuVersion"
render={({ field }) => {
return (
<FormItem>
<FormLabel>Heroku Version (Optional)</FormLabel>
<FormControl>
<Input
placeholder={"Heroku Version (Default: 24)"}
{...field}
value={field.value ?? ""}
/>
</FormControl>
<FormMessage />
</FormItem>
);
}}
render={({ field }) => (
<FormItem>
<FormLabel>Heroku Version (Optional)</FormLabel>
<FormControl>
<Input
placeholder="Heroku Version (Default: 24)"
{...field}
value={field.value ?? ""}
/>
</FormControl>
<FormMessage />
</FormItem>
)}
/>
)}
{buildType === "dockerfile" && (
{buildType === BuildType.dockerfile && (
<>
<FormField
control={form.control}
name="dockerfile"
render={({ field }) => {
return (
<FormItem>
<FormLabel>Docker File</FormLabel>
<FormControl>
<Input
placeholder={"Path of your docker file"}
{...field}
value={field.value ?? ""}
/>
</FormControl>
<FormMessage />
</FormItem>
);
}}
/>
<FormField
control={form.control}
name="dockerContextPath"
render={({ field }) => {
return (
<FormItem>
<FormLabel>Docker Context Path</FormLabel>
<FormControl>
<Input
placeholder={
"Path of your docker context default: ."
}
{...field}
value={field.value ?? ""}
/>
</FormControl>
<FormMessage />
</FormItem>
);
}}
/>
<FormField
control={form.control}
name="dockerBuildStage"
render={({ field }) => {
return (
<FormItem>
<div className="space-y-0.5">
<FormLabel>Docker Build Stage</FormLabel>
<FormDescription>
Allows you to target a specific stage in a
Multi-stage Dockerfile. If empty, Docker defaults to
build the last defined stage.
</FormDescription>
</div>
<FormControl>
<Input
placeholder={"E.g. production"}
{...field}
value={field.value ?? ""}
/>
</FormControl>
</FormItem>
);
}}
/>
</>
)}
{buildType === "nixpacks" && (
<FormField
control={form.control}
name="publishDirectory"
render={({ field }) => {
return (
render={({ field }) => (
<FormItem>
<div className="space-y-0.5">
<FormLabel>Publish Directory</FormLabel>
<FormDescription>
Allows you to serve a single directory via NGINX after
the build phase. Useful if the final build assets
should be served as a static site.
</FormDescription>
</div>
<FormLabel>Docker File</FormLabel>
<FormControl>
<Input
placeholder={"Publish Directory"}
placeholder="Path of your docker file"
{...field}
value={field.value ?? ""}
/>
</FormControl>
<FormMessage />
</FormItem>
);
}}
)}
/>
<FormField
control={form.control}
name="dockerContextPath"
render={({ field }) => (
<FormItem>
<FormLabel>Docker Context Path</FormLabel>
<FormControl>
<Input
placeholder="Path of your docker context (default: .)"
{...field}
value={field.value ?? ""}
/>
</FormControl>
<FormMessage />
</FormItem>
)}
/>
<FormField
control={form.control}
name="dockerBuildStage"
render={({ field }) => (
<FormItem>
<div className="space-y-0.5">
<FormLabel>Docker Build Stage</FormLabel>
<FormDescription>
Allows you to target a specific stage in a Multi-stage
Dockerfile. If empty, Docker defaults to build the
last defined stage.
</FormDescription>
</div>
<FormControl>
<Input
placeholder="E.g. production"
{...field}
value={field.value ?? ""}
/>
</FormControl>
</FormItem>
)}
/>
</>
)}
{buildType === BuildType.nixpacks && (
<FormField
control={form.control}
name="publishDirectory"
render={({ field }) => (
<FormItem>
<div className="space-y-0.5">
<FormLabel>Publish Directory</FormLabel>
<FormDescription>
Allows you to serve a single directory via NGINX after
the build phase. Useful if the final build assets should
be served as a static site.
</FormDescription>
</div>
<FormControl>
<Input
placeholder="Publish Directory"
{...field}
value={field.value ?? ""}
/>
</FormControl>
<FormMessage />
</FormItem>
)}
/>
)}
<div className="flex w-full justify-end">

View File

@ -61,7 +61,7 @@ type AddPostgresBackup = z.infer<typeof AddPostgresBackup1Schema>;
interface Props {
databaseId: string;
databaseType: "postgres" | "mariadb" | "mysql" | "mongo";
databaseType: "postgres" | "mariadb" | "mysql" | "mongo" | "web-server";
refetch: () => void;
}
@ -85,7 +85,7 @@ export const AddBackup = ({ databaseId, databaseType, refetch }: Props) => {
useEffect(() => {
form.reset({
database: "",
database: databaseType === "web-server" ? "dokploy" : "",
destinationId: "",
enabled: true,
prefix: "/",
@ -112,7 +112,11 @@ export const AddBackup = ({ databaseId, databaseType, refetch }: Props) => {
? {
mongoId: databaseId,
}
: undefined;
: databaseType === "web-server"
? {
userId: databaseId,
}
: undefined;
await createBackup({
destinationId: data.destinationId,
@ -236,7 +240,11 @@ export const AddBackup = ({ databaseId, databaseType, refetch }: Props) => {
<FormItem>
<FormLabel>Database</FormLabel>
<FormControl>
<Input placeholder={"dokploy"} {...field} />
<Input
disabled={databaseType === "web-server"}
placeholder={"dokploy"}
{...field}
/>
</FormControl>
<FormMessage />
</FormItem>

View File

@ -47,8 +47,8 @@ import { type LogLine, parseLogs } from "../../docker/logs/utils";
interface Props {
databaseId: string;
databaseType: Exclude<ServiceType, "application" | "redis">;
serverId: string | null;
databaseType: Exclude<ServiceType, "application" | "redis"> | "web-server";
serverId?: string | null;
}
const RestoreBackupSchema = z.object({
@ -91,7 +91,7 @@ export const RestoreBackup = ({
defaultValues: {
destinationId: "",
backupFile: "",
databaseName: "",
databaseName: databaseType === "web-server" ? "dokploy" : "",
},
resolver: zodResolver(RestoreBackupSchema),
});
@ -340,7 +340,11 @@ export const RestoreBackup = ({
<FormItem className="">
<FormLabel>Database Name</FormLabel>
<FormControl>
<Input {...field} placeholder="Enter database name" />
<Input
disabled={databaseType === "web-server"}
{...field}
placeholder="Enter database name"
/>
</FormControl>
<FormMessage />
</FormItem>

View File

@ -14,7 +14,7 @@ import {
TooltipTrigger,
} from "@/components/ui/tooltip";
import { api } from "@/utils/api";
import { DatabaseBackup, Play, Trash2 } from "lucide-react";
import { Database, DatabaseBackup, Play, Trash2 } from "lucide-react";
import Link from "next/link";
import { useState } from "react";
import { toast } from "sonner";
@ -25,7 +25,7 @@ import { UpdateBackup } from "./update-backup";
interface Props {
id: string;
type: Exclude<ServiceType, "application" | "redis">;
type: Exclude<ServiceType, "application" | "redis"> | "web-server";
}
export const ShowBackups = ({ id, type }: Props) => {
const [activeManualBackup, setActiveManualBackup] = useState<
@ -38,6 +38,7 @@ export const ShowBackups = ({ id, type }: Props) => {
mariadb: () =>
api.mariadb.one.useQuery({ mariadbId: id }, { enabled: !!id }),
mongo: () => api.mongo.one.useQuery({ mongoId: id }, { enabled: !!id }),
"web-server": () => api.user.getBackups.useQuery(),
};
const { data } = api.destination.all.useQuery();
const { data: postgres, refetch } = queryMap[type]
@ -49,6 +50,7 @@ export const ShowBackups = ({ id, type }: Props) => {
mysql: () => api.backup.manualBackupMySql.useMutation(),
mariadb: () => api.backup.manualBackupMariadb.useMutation(),
mongo: () => api.backup.manualBackupMongo.useMutation(),
"web-server": () => api.backup.manualBackupWebServer.useMutation(),
};
const { mutateAsync: manualBackup, isLoading: isManualBackup } = mutationMap[
@ -64,7 +66,10 @@ export const ShowBackups = ({ id, type }: Props) => {
<Card className="bg-background">
<CardHeader className="flex flex-row justify-between gap-4 flex-wrap">
<div className="flex flex-col gap-0.5">
<CardTitle className="text-xl">Backups</CardTitle>
<CardTitle className="text-xl flex flex-row gap-2">
<Database className="size-6 text-muted-foreground" />
Backups
</CardTitle>
<CardDescription>
Add backups to your database to save the data to a different
provider.
@ -73,11 +78,17 @@ export const ShowBackups = ({ id, type }: Props) => {
{postgres && postgres?.backups?.length > 0 && (
<div className="flex flex-col lg:flex-row gap-4 w-full lg:w-auto">
<AddBackup databaseId={id} databaseType={type} refetch={refetch} />
{type !== "web-server" && (
<AddBackup
databaseId={id}
databaseType={type}
refetch={refetch}
/>
)}
<RestoreBackup
databaseId={id}
databaseType={type}
serverId={postgres.serverId}
serverId={"serverId" in postgres ? postgres.serverId : undefined}
/>
</div>
)}
@ -115,7 +126,9 @@ export const ShowBackups = ({ id, type }: Props) => {
<RestoreBackup
databaseId={id}
databaseType={type}
serverId={postgres.serverId}
serverId={
"serverId" in postgres ? postgres.serverId : undefined
}
/>
</div>
</div>

View File

@ -0,0 +1,172 @@
import { Button } from "@/components/ui/button";
import {
Dialog,
DialogContent,
DialogDescription,
DialogFooter,
DialogHeader,
DialogTitle,
DialogTrigger,
} from "@/components/ui/dialog";
import { Input } from "@/components/ui/input";
import { Label } from "@/components/ui/label";
import { api } from "@/utils/api";
import { Copy, Loader2 } from "lucide-react";
import { useRouter } from "next/router";
import { useState } from "react";
import { toast } from "sonner";
export type Services = {
appName: string;
serverId?: string | null;
name: string;
type:
| "mariadb"
| "application"
| "postgres"
| "mysql"
| "mongo"
| "redis"
| "compose";
description?: string | null;
id: string;
createdAt: string;
status?: "idle" | "running" | "done" | "error";
};
interface DuplicateProjectProps {
projectId: string;
services: Services[];
selectedServiceIds: string[];
}
export const DuplicateProject = ({
projectId,
services,
selectedServiceIds,
}: DuplicateProjectProps) => {
const [open, setOpen] = useState(false);
const [name, setName] = useState("");
const [description, setDescription] = useState("");
const utils = api.useUtils();
const router = useRouter();
const selectedServices = services.filter((service) =>
selectedServiceIds.includes(service.id),
);
const { mutateAsync: duplicateProject, isLoading } =
api.project.duplicate.useMutation({
onSuccess: async (newProject) => {
await utils.project.all.invalidate();
toast.success("Project duplicated successfully");
setOpen(false);
router.push(`/dashboard/project/${newProject.projectId}`);
},
onError: (error) => {
toast.error(error.message);
},
});
const handleDuplicate = async () => {
if (!name) {
toast.error("Project name is required");
return;
}
await duplicateProject({
sourceProjectId: projectId,
name,
description,
includeServices: true,
selectedServices: selectedServices.map((service) => ({
id: service.id,
type: service.type,
})),
});
};
return (
<Dialog
open={open}
onOpenChange={(isOpen) => {
setOpen(isOpen);
if (!isOpen) {
// Reset form when closing
setName("");
setDescription("");
}
}}
>
<DialogTrigger asChild>
<Button variant="ghost" className="w-full justify-start">
<Copy className="mr-2 h-4 w-4" />
Duplicate
</Button>
</DialogTrigger>
<DialogContent>
<DialogHeader>
<DialogTitle>Duplicate Project</DialogTitle>
<DialogDescription>
Create a new project with the selected services
</DialogDescription>
</DialogHeader>
<div className="grid gap-4 py-4">
<div className="grid gap-2">
<Label htmlFor="name">Name</Label>
<Input
id="name"
value={name}
onChange={(e) => setName(e.target.value)}
placeholder="New project name"
/>
</div>
<div className="grid gap-2">
<Label htmlFor="description">Description</Label>
<Input
id="description"
value={description}
onChange={(e) => setDescription(e.target.value)}
placeholder="Project description (optional)"
/>
</div>
<div className="grid gap-2">
<Label>Selected services to duplicate</Label>
<div className="space-y-2 max-h-[200px] overflow-y-auto border rounded-md p-4">
{selectedServices.map((service) => (
<div key={service.id} className="flex items-center space-x-2">
<span className="text-sm">
{service.name} ({service.type})
</span>
</div>
))}
</div>
</div>
</div>
<DialogFooter>
<Button
variant="outline"
onClick={() => setOpen(false)}
disabled={isLoading}
>
Cancel
</Button>
<Button onClick={handleDuplicate} disabled={isLoading}>
{isLoading ? (
<>
<Loader2 className="mr-2 h-4 w-4 animate-spin" />
Duplicating...
</>
) : (
"Duplicate"
)}
</Button>
</DialogFooter>
</DialogContent>
</Dialog>
);
};

View File

@ -0,0 +1,2 @@
ALTER TABLE "backup" ADD COLUMN "userId" text;--> statement-breakpoint
ALTER TABLE "backup" ADD CONSTRAINT "backup_userId_user_temp_id_fk" FOREIGN KEY ("userId") REFERENCES "public"."user_temp"("id") ON DELETE no action ON UPDATE no action;

View File

@ -0,0 +1 @@
ALTER TYPE "public"."databaseType" ADD VALUE 'web-server';

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -575,6 +575,20 @@
"when": 1743281254393,
"tag": "0081_lovely_mentallo",
"breakpoints": true
},
{
"idx": 82,
"version": "7",
"when": 1743287689974,
"tag": "0082_clean_mandarin",
"breakpoints": true
},
{
"idx": 83,
"version": "7",
"when": 1743288371413,
"tag": "0083_parallel_stranger",
"breakpoints": true
}
]
}

View File

@ -1,6 +1,6 @@
{
"name": "dokploy",
"version": "v0.20.8",
"version": "v0.21.0",
"private": true,
"license": "Apache-2.0",
"type": "module",
@ -150,7 +150,8 @@
"ws": "8.16.0",
"xterm-addon-fit": "^0.8.0",
"zod": "^3.23.4",
"zod-form-data": "^2.0.2"
"zod-form-data": "^2.0.2",
"toml": "3.0.0"
},
"devDependencies": {
"@types/adm-zip": "^0.5.5",

View File

@ -92,6 +92,7 @@ import { useRouter } from "next/router";
import { type ReactElement, useEffect, useMemo, useState } from "react";
import { toast } from "sonner";
import superjson from "superjson";
import { DuplicateProject } from "@/components/dashboard/project/duplicate-project";
export type Services = {
appName: string;
@ -553,7 +554,7 @@ const Project = (
</CardTitle>
<CardDescription>{data?.description}</CardDescription>
</CardHeader>
{(auth?.role === "owner" || auth?.canCreateServices) && (
<div className="flex flex-row gap-4 flex-wrap justify-between items-center">
<div className="flex flex-row gap-4 flex-wrap">
<ProjectEnvironment projectId={projectId}>
<Button variant="outline">Project Environment</Button>
@ -569,7 +570,7 @@ const Project = (
className="w-[200px] space-y-2"
align="end"
>
<DropdownMenuLabel className="text-sm font-normal ">
<DropdownMenuLabel className="text-sm font-normal">
Actions
</DropdownMenuLabel>
<DropdownMenuSeparator />
@ -593,7 +594,7 @@ const Project = (
</DropdownMenuContent>
</DropdownMenu>
</div>
)}
</div>
</div>
<CardContent className="space-y-2 py-8 border-t gap-4 flex flex-col min-h-[60vh]">
{isLoading ? (
@ -670,20 +671,27 @@ const Project = (
</DialogAction>
{(auth?.role === "owner" ||
auth?.canDeleteServices) && (
<DialogAction
title="Delete Services"
description={`Are you sure you want to delete ${selectedServices.length} services? This action cannot be undone.`}
type="destructive"
onClick={handleBulkDelete}
>
<Button
variant="ghost"
className="w-full justify-start text-destructive"
<>
<DialogAction
title="Delete Services"
description={`Are you sure you want to delete ${selectedServices.length} services? This action cannot be undone.`}
type="destructive"
onClick={handleBulkDelete}
>
<Trash2 className="mr-2 h-4 w-4" />
Delete
</Button>
</DialogAction>
<Button
variant="ghost"
className="w-full justify-start text-destructive"
>
<Trash2 className="mr-2 h-4 w-4" />
Delete
</Button>
</DialogAction>
<DuplicateProject
projectId={projectId}
services={applications}
selectedServiceIds={selectedServices}
/>
</>
)}
<Dialog

View File

@ -8,56 +8,21 @@ import { createServerSideHelpers } from "@trpc/react-query/server";
import type { GetServerSidePropsContext } from "next";
import type { ReactElement } from "react";
import superjson from "superjson";
import { api } from "@/utils/api";
import { ShowBackups } from "@/components/dashboard/database/backups/show-backups";
import { Card } from "@/components/ui/card";
const Page = () => {
const { data: user } = api.user.get.useQuery();
return (
<div className="w-full">
<div className="h-full rounded-xl max-w-5xl mx-auto flex flex-col gap-4">
<WebDomain />
<WebServer />
{/* <Card className="h-full bg-sidebar p-2.5 rounded-xl ">
<div className="rounded-xl bg-background shadow-md ">
<CardHeader className="">
<CardTitle className="text-xl flex flex-row gap-2">
<LayoutDashboardIcon className="size-6 text-muted-foreground self-center" />
Paid Features
</CardTitle>
<CardDescription>
Enable or disable paid features like monitoring
</CardDescription>
</CardHeader>
<CardContent>
<div className="flex flex-row gap-2 items-center">
<span className="text-sm font-medium text-muted-foreground">
Enable Paid Features:
</span>
<Switch
checked={data?.enablePaidFeatures}
onCheckedChange={() => {
update({
enablePaidFeatures: !data?.enablePaidFeatures,
})
.then(() => {
toast.success(
`Paid features ${
data?.enablePaidFeatures ? "disabled" : "enabled"
} successfully`,
);
refetch();
})
.catch(() => {
toast.error("Error updating paid features");
});
}}
/>
</div>
</CardContent>
{data?.enablePaidFeatures && <SetupMonitoring />}
</div>
</Card> */}
{/* */}
<div className="w-full flex flex-col gap-4">
<Card className="h-full bg-sidebar p-2.5 rounded-xl mx-auto w-full">
<ShowBackups id={user?.userId ?? ""} type="web-server" />
</Card>
</div>
</div>
</div>
);

View File

@ -25,6 +25,7 @@ import {
runMongoBackup,
runMySqlBackup,
runPostgresBackup,
runWebServerBackup,
scheduleBackup,
updateBackupById,
} from "@dokploy/server";
@ -40,6 +41,7 @@ import {
restoreMongoBackup,
restoreMySqlBackup,
restorePostgresBackup,
restoreWebServerBackup,
} from "@dokploy/server/utils/restore";
import { TRPCError } from "@trpc/server";
import { observable } from "@trpc/server/observable";
@ -85,9 +87,13 @@ export const backupRouter = createTRPCRouter({
}
}
} catch (error) {
console.error(error);
throw new TRPCError({
code: "BAD_REQUEST",
message: "Error creating the Backup",
message:
error instanceof Error
? error.message
: "Error creating the Backup",
cause: error,
});
}
@ -227,6 +233,13 @@ export const backupRouter = createTRPCRouter({
});
}
}),
manualBackupWebServer: protectedProcedure
.input(apiFindOneBackup)
.mutation(async ({ input }) => {
const backup = await findBackupById(input.backupId);
await runWebServerBackup(backup);
return true;
}),
listBackupFiles: protectedProcedure
.input(
z.object({
@ -301,7 +314,13 @@ export const backupRouter = createTRPCRouter({
.input(
z.object({
databaseId: z.string(),
databaseType: z.enum(["postgres", "mysql", "mariadb", "mongo"]),
databaseType: z.enum([
"postgres",
"mysql",
"mariadb",
"mongo",
"web-server",
]),
databaseName: z.string().min(1),
backupFile: z.string().min(1),
destinationId: z.string().min(1),
@ -366,6 +385,13 @@ export const backupRouter = createTRPCRouter({
);
});
}
if (input.databaseType === "web-server") {
return observable<string>((emit) => {
restoreWebServerBackup(destination, input.backupFile, (log) => {
emit.next(log);
});
});
}
return true;
}),

View File

@ -50,7 +50,8 @@ import {
import { processTemplate } from "@dokploy/server/templates/processors";
import { TRPCError } from "@trpc/server";
import { eq } from "drizzle-orm";
import { dump, load } from "js-yaml";
import { dump } from "js-yaml";
import { parse } from "toml";
import _ from "lodash";
import { nanoid } from "nanoid";
import { z } from "zod";
@ -594,7 +595,7 @@ export const composeRouter = createTRPCRouter({
serverIp = "127.0.0.1";
}
const templateData = JSON.parse(decodedData);
const config = load(templateData.config) as CompleteTemplate;
const config = parse(templateData.config) as CompleteTemplate;
if (!templateData.compose || !config) {
throw new TRPCError({
@ -663,7 +664,8 @@ export const composeRouter = createTRPCRouter({
}
const templateData = JSON.parse(decodedData);
const config = load(templateData.config) as CompleteTemplate;
const config = parse(templateData.config) as CompleteTemplate;
if (!templateData.compose || !config) {
throw new TRPCError({
@ -678,7 +680,6 @@ export const composeRouter = createTRPCRouter({
projectName: compose.appName,
});
// Update compose file
await updateCompose(input.composeId, {
composeFile: templateData.compose,
sourceType: "raw",
@ -686,7 +687,6 @@ export const composeRouter = createTRPCRouter({
isolatedDeployment: true,
});
// Create mounts
if (processedTemplate.mounts && processedTemplate.mounts.length > 0) {
for (const mount of processedTemplate.mounts) {
await createMount({
@ -700,7 +700,6 @@ export const composeRouter = createTRPCRouter({
}
}
// Create domains
if (processedTemplate.domains && processedTemplate.domains.length > 0) {
for (const domain of processedTemplate.domains) {
await createDomain({

View File

@ -14,21 +14,44 @@ import {
projects,
redis,
} from "@/server/db/schema";
import { z } from "zod";
import {
IS_CLOUD,
addNewProject,
checkProjectAccess,
createApplication,
createCompose,
createMariadb,
createMongo,
createMysql,
createPostgres,
createProject,
createRedis,
deleteProject,
findApplicationById,
findComposeById,
findMongoById,
findMemberById,
findRedisById,
findProjectById,
findUserById,
updateProjectById,
findPostgresById,
findMariadbById,
findMySqlById,
createDomain,
createPort,
createMount,
createRedirect,
createPreviewDeployment,
createBackup,
createSecurity,
} from "@dokploy/server";
import { TRPCError } from "@trpc/server";
import { and, desc, eq, sql } from "drizzle-orm";
import type { AnyPgColumn } from "drizzle-orm/pg-core";
export const projectRouter = createTRPCRouter({
create: protectedProcedure
.input(apiCreateProject)
@ -266,7 +289,317 @@ export const projectRouter = createTRPCRouter({
throw error;
}
}),
duplicate: protectedProcedure
.input(
z.object({
sourceProjectId: z.string(),
name: z.string(),
description: z.string().optional(),
includeServices: z.boolean().default(true),
selectedServices: z
.array(
z.object({
id: z.string(),
type: z.enum([
"application",
"postgres",
"mariadb",
"mongo",
"mysql",
"redis",
"compose",
]),
}),
)
.optional(),
}),
)
.mutation(async ({ ctx, input }) => {
try {
if (ctx.user.rol === "member") {
await checkProjectAccess(
ctx.user.id,
"create",
ctx.session.activeOrganizationId,
);
}
// Get source project
const sourceProject = await findProjectById(input.sourceProjectId);
if (sourceProject.organizationId !== ctx.session.activeOrganizationId) {
throw new TRPCError({
code: "UNAUTHORIZED",
message: "You are not authorized to access this project",
});
}
// Create new project
const newProject = await createProject(
{
name: input.name,
description: input.description,
env: sourceProject.env,
},
ctx.session.activeOrganizationId,
);
if (input.includeServices) {
const servicesToDuplicate = input.selectedServices || [];
// Helper function to duplicate a service
const duplicateService = async (id: string, type: string) => {
switch (type) {
case "application": {
const {
applicationId,
domains,
security,
ports,
registry,
redirects,
previewDeployments,
mounts,
...application
} = await findApplicationById(id);
const newApplication = await createApplication({
...application,
projectId: newProject.projectId,
});
for (const domain of domains) {
const { domainId, ...rest } = domain;
await createDomain({
...rest,
applicationId: newApplication.applicationId,
domainType: "application",
});
}
for (const port of ports) {
const { portId, ...rest } = port;
await createPort({
...rest,
applicationId: newApplication.applicationId,
});
}
for (const mount of mounts) {
const { mountId, ...rest } = mount;
await createMount({
...rest,
serviceId: newApplication.applicationId,
serviceType: "application",
});
}
for (const redirect of redirects) {
const { redirectId, ...rest } = redirect;
await createRedirect({
...rest,
applicationId: newApplication.applicationId,
});
}
for (const secure of security) {
const { securityId, ...rest } = secure;
await createSecurity({
...rest,
applicationId: newApplication.applicationId,
});
}
for (const previewDeployment of previewDeployments) {
const { previewDeploymentId, ...rest } = previewDeployment;
await createPreviewDeployment({
...rest,
applicationId: newApplication.applicationId,
});
}
break;
}
case "postgres": {
const { postgresId, mounts, backups, ...postgres } =
await findPostgresById(id);
const newPostgres = await createPostgres({
...postgres,
projectId: newProject.projectId,
});
for (const mount of mounts) {
const { mountId, ...rest } = mount;
await createMount({
...rest,
serviceId: newPostgres.postgresId,
serviceType: "postgres",
});
}
for (const backup of backups) {
const { backupId, ...rest } = backup;
await createBackup({
...rest,
postgresId: newPostgres.postgresId,
});
}
break;
}
case "mariadb": {
const { mariadbId, mounts, backups, ...mariadb } =
await findMariadbById(id);
const newMariadb = await createMariadb({
...mariadb,
projectId: newProject.projectId,
});
for (const mount of mounts) {
const { mountId, ...rest } = mount;
await createMount({
...rest,
serviceId: newMariadb.mariadbId,
serviceType: "mariadb",
});
}
for (const backup of backups) {
const { backupId, ...rest } = backup;
await createBackup({
...rest,
mariadbId: newMariadb.mariadbId,
});
}
break;
}
case "mongo": {
const { mongoId, mounts, backups, ...mongo } =
await findMongoById(id);
const newMongo = await createMongo({
...mongo,
projectId: newProject.projectId,
});
for (const mount of mounts) {
const { mountId, ...rest } = mount;
await createMount({
...rest,
serviceId: newMongo.mongoId,
serviceType: "mongo",
});
}
for (const backup of backups) {
const { backupId, ...rest } = backup;
await createBackup({
...rest,
mongoId: newMongo.mongoId,
});
}
break;
}
case "mysql": {
const { mysqlId, mounts, backups, ...mysql } =
await findMySqlById(id);
const newMysql = await createMysql({
...mysql,
projectId: newProject.projectId,
});
for (const mount of mounts) {
const { mountId, ...rest } = mount;
await createMount({
...rest,
serviceId: newMysql.mysqlId,
serviceType: "mysql",
});
}
for (const backup of backups) {
const { backupId, ...rest } = backup;
await createBackup({
...rest,
mysqlId: newMysql.mysqlId,
});
}
break;
}
case "redis": {
const { redisId, mounts, ...redis } = await findRedisById(id);
const newRedis = await createRedis({
...redis,
projectId: newProject.projectId,
});
for (const mount of mounts) {
const { mountId, ...rest } = mount;
await createMount({
...rest,
serviceId: newRedis.redisId,
serviceType: "redis",
});
}
break;
}
case "compose": {
const { composeId, mounts, domains, ...compose } =
await findComposeById(id);
const newCompose = await createCompose({
...compose,
projectId: newProject.projectId,
});
for (const mount of mounts) {
const { mountId, ...rest } = mount;
await createMount({
...rest,
serviceId: newCompose.composeId,
serviceType: "compose",
});
}
for (const domain of domains) {
const { domainId, ...rest } = domain;
await createDomain({
...rest,
composeId: newCompose.composeId,
domainType: "compose",
});
}
break;
}
}
};
// Duplicate selected services
for (const service of servicesToDuplicate) {
await duplicateService(service.id, service.type);
}
}
if (ctx.user.rol === "member") {
await addNewProject(
ctx.user.id,
newProject.projectId,
ctx.session.activeOrganizationId,
);
}
return newProject;
} catch (error) {
throw new TRPCError({
code: "BAD_REQUEST",
message: `Error duplicating the project: ${error instanceof Error ? error.message : error}`,
cause: error,
});
}
}),
});
function buildServiceFilter(
fieldName: AnyPgColumn,
accessedServices: string[],

View File

@ -91,6 +91,28 @@ export const userRouter = createTRPCRouter({
return memberResult;
}),
getBackups: adminProcedure.query(async ({ ctx }) => {
const memberResult = await db.query.member.findFirst({
where: and(
eq(member.userId, ctx.user.id),
eq(member.organizationId, ctx.session?.activeOrganizationId || ""),
),
with: {
user: {
with: {
backups: {
with: {
destination: true,
},
},
apiKeys: true,
},
},
},
});
return memberResult?.user;
}),
getServerMetrics: protectedProcedure.query(async ({ ctx }) => {
const memberResult = await db.query.member.findFirst({
where: and(

View File

@ -28,7 +28,7 @@
"typecheck": "tsc --noEmit"
},
"dependencies": {
"micromatch":"4.0.8",
"micromatch": "4.0.8",
"@ai-sdk/anthropic": "^1.0.6",
"@ai-sdk/azure": "^1.0.15",
"@ai-sdk/cohere": "^1.0.6",
@ -36,11 +36,11 @@
"@ai-sdk/mistral": "^1.0.6",
"@ai-sdk/openai": "^1.0.12",
"@ai-sdk/openai-compatible": "^0.0.13",
"@better-auth/utils":"0.2.3",
"@oslojs/encoding":"1.1.0",
"@oslojs/crypto":"1.0.1",
"drizzle-dbml-generator":"0.10.0",
"better-auth":"1.2.4",
"@better-auth/utils": "0.2.3",
"@oslojs/encoding": "1.1.0",
"@oslojs/crypto": "1.0.1",
"drizzle-dbml-generator": "0.10.0",
"better-auth": "1.2.4",
"@faker-js/faker": "^8.4.1",
"@octokit/auth-app": "^6.0.4",
"@react-email/components": "^0.0.21",
@ -76,7 +76,8 @@
"ws": "8.16.0",
"zod": "^3.23.4",
"ssh2": "1.15.0",
"@octokit/rest": "^20.0.2"
"@octokit/rest": "^20.0.2",
"toml": "3.0.0"
},
"devDependencies": {
"@types/micromatch": "4.0.9",

View File

@ -15,12 +15,13 @@ import { mariadb } from "./mariadb";
import { mongo } from "./mongo";
import { mysql } from "./mysql";
import { postgres } from "./postgres";
import { users_temp } from "./user";
export const databaseType = pgEnum("databaseType", [
"postgres",
"mariadb",
"mysql",
"mongo",
"web-server",
]);
export const backups = pgTable("backup", {
@ -58,6 +59,7 @@ export const backups = pgTable("backup", {
mongoId: text("mongoId").references((): AnyPgColumn => mongo.mongoId, {
onDelete: "cascade",
}),
userId: text("userId").references(() => users_temp.id),
});
export const backupsRelations = relations(backups, ({ one }) => ({
@ -81,6 +83,10 @@ export const backupsRelations = relations(backups, ({ one }) => ({
fields: [backups.mongoId],
references: [mongo.mongoId],
}),
user: one(users_temp, {
fields: [backups.userId],
references: [users_temp.id],
}),
}));
const createSchema = createInsertSchema(backups, {
@ -91,11 +97,12 @@ const createSchema = createInsertSchema(backups, {
database: z.string().min(1),
schedule: z.string(),
keepLatestCount: z.number().optional(),
databaseType: z.enum(["postgres", "mariadb", "mysql", "mongo"]),
databaseType: z.enum(["postgres", "mariadb", "mysql", "mongo", "web-server"]),
postgresId: z.string().optional(),
mariadbId: z.string().optional(),
mysqlId: z.string().optional(),
mongoId: z.string().optional(),
userId: z.string().optional(),
});
export const apiCreateBackup = createSchema.pick({
@ -110,6 +117,7 @@ export const apiCreateBackup = createSchema.pick({
postgresId: true,
mongoId: true,
databaseType: true,
userId: true,
});
export const apiFindOneBackup = createSchema

View File

@ -139,7 +139,7 @@ const createSchema = createInsertSchema(compose, {
name: z.string().min(1),
description: z.string(),
env: z.string().optional(),
composeFile: z.string().min(1),
composeFile: z.string().optional(),
projectId: z.string(),
customGitSSHKeyId: z.string().optional(),
command: z.string().optional(),
@ -155,6 +155,7 @@ export const apiCreateCompose = createSchema.pick({
composeType: true,
appName: true,
serverId: true,
composeFile: true,
});
export const apiCreateComposeByTemplate = createSchema

View File

@ -52,6 +52,7 @@ const createSchema = createInsertSchema(projects, {
export const apiCreateProject = createSchema.pick({
name: true,
description: true,
env: true,
});
export const apiFindOneProject = createSchema

View File

@ -13,6 +13,7 @@ import { z } from "zod";
import { account, apikey, organization } from "./account";
import { projects } from "./project";
import { certificateType } from "./shared";
import { backups } from "./backups";
/**
* This is an example of how to use the multi-project schema feature of Drizzle ORM. Use the same
* database instance for multiple projects.
@ -124,6 +125,7 @@ export const usersRelations = relations(users_temp, ({ one, many }) => ({
organizations: many(organization),
projects: many(projects),
apiKeys: many(apikey),
backups: many(backups),
}));
const createSchema = createInsertSchema(users_temp, {

View File

@ -48,6 +48,7 @@ export * from "./utils/backups/mongo";
export * from "./utils/backups/mysql";
export * from "./utils/backups/postgres";
export * from "./utils/backups/utils";
export * from "./utils/backups/web-server";
export * from "./templates/processors";
export * from "./utils/notifications/build-error";

View File

@ -69,7 +69,7 @@ export const createCompose = async (input: typeof apiCreateCompose._type) => {
.insert(compose)
.values({
...input,
composeFile: "",
composeFile: input.composeFile || "",
appName,
})
.returning()

View File

@ -1,4 +1,4 @@
import { load } from "js-yaml";
import { parse } from "toml";
/**
* Complete template interface that includes both metadata and configuration
@ -86,7 +86,7 @@ export async function fetchTemplateFiles(
try {
// Fetch both files in parallel
const [templateYmlResponse, dockerComposeResponse] = await Promise.all([
fetch(`${baseUrl}/blueprints/${templateId}/template.yml`),
fetch(`${baseUrl}/blueprints/${templateId}/template.toml`),
fetch(`${baseUrl}/blueprints/${templateId}/docker-compose.yml`),
]);
@ -99,7 +99,7 @@ export async function fetchTemplateFiles(
dockerComposeResponse.text(),
]);
const config = load(templateYml) as CompleteTemplate;
const config = parse(templateYml) as CompleteTemplate;
return { config, dockerCompose };
} catch (error) {

View File

@ -45,7 +45,9 @@ export interface CompleteTemplate {
variables: Record<string, string>;
config: {
domains: DomainConfig[];
env: Record<string, string> | string[];
env:
| Record<string, string | boolean | number>
| (string | Record<string, string | boolean | number>)[];
mounts?: MountConfig[];
};
}
@ -200,17 +202,27 @@ export function processEnvVars(
if (typeof env === "string") {
return processValue(env, variables, schema);
}
return env;
// Si es un objeto, asumimos que es un par clave-valor
if (typeof env === "object" && env !== null) {
const keys = Object.keys(env);
if (keys.length > 0) {
const key = keys[0];
return `${key}=${env[key as keyof typeof env]}`;
}
}
// Para valores primitivos (boolean, number)
return String(env);
});
}
// Handle object of env vars
return Object.entries(template.config.env).map(
([key, value]: [string, string]) => {
return Object.entries(template.config.env).map(([key, value]) => {
if (typeof value === "string") {
const processedValue = processValue(value, variables, schema);
return `${key}=${processedValue}`;
},
);
}
return `${key}=${value}`;
});
}
/**

View File

@ -10,11 +10,7 @@ import {
} from "../docker/utils";
import { sendDockerCleanupNotifications } from "../notifications/docker-cleanup";
import { execAsync, execAsyncRemote } from "../process/execAsync";
import { runMariadbBackup } from "./mariadb";
import { runMongoBackup } from "./mongo";
import { runMySqlBackup } from "./mysql";
import { runPostgresBackup } from "./postgres";
import { getS3Credentials } from "./utils";
import { getS3Credentials, scheduleBackup } from "./utils";
import type { BackupSchedule } from "@dokploy/server/services/backup";
import { startLogCleanup } from "../access-log/handler";
@ -56,126 +52,27 @@ export const initCronJobs = async () => {
}
}
const pgs = await db.query.postgres.findMany({
const backups = await db.query.backups.findMany({
with: {
backups: {
with: {
destination: true,
postgres: true,
mariadb: true,
mysql: true,
mongo: true,
},
},
},
});
for (const pg of pgs) {
for (const backup of pg.backups) {
const { schedule, backupId, enabled, database } = backup;
if (enabled) {
console.log(
`[Backup] Postgres DB ${pg.name} for ${database} Activated`,
);
scheduleJob(backupId, schedule, async () => {
console.log(
`PG-SERVER[${new Date().toLocaleString()}] Running Backup ${backupId}`,
);
await runPostgresBackup(pg, backup);
await keepLatestNBackups(backup, pg.serverId);
});
}
}
}
const mariadbs = await db.query.mariadb.findMany({
with: {
backups: {
with: {
destination: true,
postgres: true,
mariadb: true,
mysql: true,
mongo: true,
},
},
destination: true,
postgres: true,
mariadb: true,
mysql: true,
mongo: true,
user: true,
},
});
for (const maria of mariadbs) {
for (const backup of maria.backups) {
const { schedule, backupId, enabled, database } = backup;
if (enabled) {
console.log(
`[Backup] MariaDB DB ${maria.name} for ${database} Activated`,
);
scheduleJob(backupId, schedule, async () => {
console.log(
`MARIADB-SERVER[${new Date().toLocaleString()}] Running Backup ${backupId}`,
);
await runMariadbBackup(maria, backup);
await keepLatestNBackups(backup, maria.serverId);
});
}
}
}
const mongodbs = await db.query.mongo.findMany({
with: {
backups: {
with: {
destination: true,
postgres: true,
mariadb: true,
mysql: true,
mongo: true,
},
},
},
});
for (const mongo of mongodbs) {
for (const backup of mongo.backups) {
const { schedule, backupId, enabled } = backup;
if (enabled) {
console.log(`[Backup] MongoDB DB ${mongo.name} Activated`);
scheduleJob(backupId, schedule, async () => {
console.log(
`MONGO-SERVER[${new Date().toLocaleString()}] Running Backup ${backupId}`,
);
await runMongoBackup(mongo, backup);
await keepLatestNBackups(backup, mongo.serverId);
});
}
}
}
const mysqls = await db.query.mysql.findMany({
with: {
backups: {
with: {
destination: true,
postgres: true,
mariadb: true,
mysql: true,
mongo: true,
},
},
},
});
for (const mysql of mysqls) {
for (const backup of mysql.backups) {
const { schedule, backupId, enabled } = backup;
if (enabled) {
console.log(`[Backup] MySQL DB ${mysql.name} Activated`);
scheduleJob(backupId, schedule, async () => {
console.log(
`MYSQL-SERVER[${new Date().toLocaleString()}] Running Backup ${backupId}`,
);
await runMySqlBackup(mysql, backup);
await keepLatestNBackups(backup, mysql.serverId);
});
for (const backup of backups) {
try {
if (backup.enabled) {
scheduleBackup(backup);
}
console.log(
`[Backup] ${backup.databaseType} Enabled with cron: [${backup.schedule}]`,
);
} catch (error) {
console.error(`[Backup] ${backup.databaseType} Error`, error);
}
}

View File

@ -6,6 +6,7 @@ import { runMariadbBackup } from "./mariadb";
import { runMongoBackup } from "./mongo";
import { runMySqlBackup } from "./mysql";
import { runPostgresBackup } from "./postgres";
import { runWebServerBackup } from "./web-server";
export const scheduleBackup = (backup: BackupSchedule) => {
const { schedule, backupId, databaseType, postgres, mysql, mongo, mariadb } =
@ -23,6 +24,9 @@ export const scheduleBackup = (backup: BackupSchedule) => {
} else if (databaseType === "mariadb" && mariadb) {
await runMariadbBackup(mariadb, backup);
await keepLatestNBackups(backup, mariadb.serverId);
} else if (databaseType === "web-server") {
await runWebServerBackup(backup);
await keepLatestNBackups(backup);
}
});
};

View File

@ -0,0 +1,45 @@
import type { BackupSchedule } from "@dokploy/server/services/backup";
import { execAsync } from "../process/execAsync";
import { getS3Credentials } from "./utils";
import { findDestinationById } from "@dokploy/server/services/destination";
import { IS_CLOUD, paths } from "@dokploy/server/constants";
import { mkdtemp } from "node:fs/promises";
import { join } from "node:path";
import { tmpdir } from "node:os";
export const runWebServerBackup = async (backup: BackupSchedule) => {
try {
if (IS_CLOUD) {
return;
}
const destination = await findDestinationById(backup.destinationId);
const rcloneFlags = getS3Credentials(destination);
const timestamp = new Date().toISOString().replace(/[:.]/g, "-");
const { BASE_PATH } = paths();
const tempDir = await mkdtemp(join(tmpdir(), "dokploy-backup-"));
const backupFileName = `webserver-backup-${timestamp}.zip`;
const s3Path = `:s3:${destination.bucket}/${backup.prefix}${backupFileName}`;
try {
await execAsync(`mkdir -p ${tempDir}/filesystem`);
const postgresCommand = `docker exec $(docker ps --filter "name=dokploy-postgres" -q) pg_dump -v -Fc -U dokploy -d dokploy > ${tempDir}/database.sql`;
await execAsync(postgresCommand);
await execAsync(`cp -r ${BASE_PATH}/* ${tempDir}/filesystem/`);
await execAsync(
`cd ${tempDir} && zip -r ${backupFileName} database.sql filesystem/`,
);
const uploadCommand = `rclone copyto ${rcloneFlags.join(" ")} "${tempDir}/${backupFileName}" "${s3Path}"`;
await execAsync(uploadCommand);
return true;
} finally {
await execAsync(`rm -rf ${tempDir}`);
}
} catch (error) {
console.error("Backup error:", error);
throw error;
}
};

View File

@ -30,12 +30,22 @@ export const addSuffixToVolumesInServices = (
// skip bind mounts and variables (e.g. $PWD)
if (
volumeName?.startsWith(".") ||
volumeName?.startsWith("/") ||
volumeName?.startsWith("$")
!volumeName ||
volumeName.startsWith(".") ||
volumeName.startsWith("/") ||
volumeName.startsWith("$")
) {
return volume;
}
// Handle volume paths with subdirectories
const parts = volumeName.split("/");
if (parts.length > 1) {
const baseName = parts[0];
const rest = parts.slice(1).join("/");
return `${baseName}-${suffix}/${rest}:${path}`;
}
return `${volumeName}-${suffix}:${path}`;
}
if (_.isObject(volume) && volume.type === "volume" && volume.source) {

View File

@ -2,3 +2,4 @@ export { restorePostgresBackup } from "./postgres";
export { restoreMySqlBackup } from "./mysql";
export { restoreMariadbBackup } from "./mariadb";
export { restoreMongoBackup } from "./mongo";
export { restoreWebServerBackup } from "./web-server";

View File

@ -0,0 +1,143 @@
import type { Destination } from "@dokploy/server/services/destination";
import { getS3Credentials } from "../backups/utils";
import { execAsync } from "../process/execAsync";
import { paths, IS_CLOUD } from "@dokploy/server/constants";
import { mkdtemp } from "node:fs/promises";
import { join } from "node:path";
import { tmpdir } from "node:os";
export const restoreWebServerBackup = async (
destination: Destination,
backupFile: string,
emit: (log: string) => void,
) => {
if (IS_CLOUD) {
return;
}
try {
const rcloneFlags = getS3Credentials(destination);
const bucketPath = `:s3:${destination.bucket}`;
const backupPath = `${bucketPath}/${backupFile}`;
const { BASE_PATH } = paths();
// Create a temporary directory outside of BASE_PATH
const tempDir = await mkdtemp(join(tmpdir(), "dokploy-restore-"));
try {
emit("Starting restore...");
emit(`Backup path: ${backupPath}`);
emit(`Temp directory: ${tempDir}`);
// Create temp directory
emit("Creating temporary directory...");
await execAsync(`mkdir -p ${tempDir}`);
// Download backup from S3
emit("Downloading backup from S3...");
await execAsync(
`rclone copyto ${rcloneFlags.join(" ")} "${backupPath}" "${tempDir}/${backupFile}"`,
);
// List files before extraction
emit("Listing files before extraction...");
const { stdout: beforeFiles } = await execAsync(`ls -la ${tempDir}`);
emit(`Files before extraction: ${beforeFiles}`);
// Extract backup
emit("Extracting backup...");
await execAsync(`cd ${tempDir} && unzip ${backupFile}`);
// Restore filesystem first
emit("Restoring filesystem...");
emit(`Copying from ${tempDir}/filesystem/* to ${BASE_PATH}/`);
// First clean the target directory
emit("Cleaning target directory...");
await execAsync(`rm -rf "${BASE_PATH}/"*`);
// Ensure the target directory exists
emit("Setting up target directory...");
await execAsync(`mkdir -p "${BASE_PATH}"`);
// Copy files preserving permissions
emit("Copying files...");
await execAsync(`cp -rp "${tempDir}/filesystem/"* "${BASE_PATH}/"`);
// Now handle database restore
emit("Starting database restore...");
// Check if database.sql.gz exists and decompress it
const { stdout: hasGzFile } = await execAsync(
`ls ${tempDir}/database.sql.gz || true`,
);
if (hasGzFile.includes("database.sql.gz")) {
emit("Found compressed database file, decompressing...");
await execAsync(`cd ${tempDir} && gunzip database.sql.gz`);
}
// Verify database file exists
const { stdout: hasSqlFile } = await execAsync(
`ls ${tempDir}/database.sql || true`,
);
if (!hasSqlFile.includes("database.sql")) {
throw new Error("Database file not found after extraction");
}
// Drop and recreate database
emit("Disconnecting all users from database...");
await execAsync(
`docker exec $(docker ps --filter "name=dokploy-postgres" -q) psql -U dokploy postgres -c "SELECT pg_terminate_backend(pg_stat_activity.pid) FROM pg_stat_activity WHERE pg_stat_activity.datname = 'dokploy' AND pid <> pg_backend_pid();"`,
);
emit("Dropping existing database...");
await execAsync(
`docker exec $(docker ps --filter "name=dokploy-postgres" -q) psql -U dokploy postgres -c "DROP DATABASE IF EXISTS dokploy;"`,
);
emit("Creating fresh database...");
await execAsync(
`docker exec $(docker ps --filter "name=dokploy-postgres" -q) psql -U dokploy postgres -c "CREATE DATABASE dokploy;"`,
);
// Copy the backup file into the container
emit("Copying backup file into container...");
await execAsync(
`docker cp ${tempDir}/database.sql $(docker ps --filter "name=dokploy-postgres" -q):/tmp/database.sql`,
);
// Verify file in container
emit("Verifying file in container...");
await execAsync(
`docker exec $(docker ps --filter "name=dokploy-postgres" -q) ls -l /tmp/database.sql`,
);
// Restore from the copied file
emit("Running database restore...");
await execAsync(
`docker exec $(docker ps --filter "name=dokploy-postgres" -q) pg_restore -v -U dokploy -d dokploy /tmp/database.sql`,
);
// Cleanup the temporary file in the container
emit("Cleaning up container temp file...");
await execAsync(
`docker exec $(docker ps --filter "name=dokploy-postgres" -q) rm /tmp/database.sql`,
);
emit("Restore completed successfully!");
} finally {
// Cleanup
emit("Cleaning up temporary files...");
await execAsync(`rm -rf ${tempDir}`);
}
} catch (error) {
console.error(error);
emit(
`Error: ${
error instanceof Error
? error.message
: "Error restoring web server backup"
}`,
);
throw error;
}
};

View File

@ -424,6 +424,9 @@ importers:
tailwindcss-animate:
specifier: ^1.0.7
version: 1.0.7(tailwindcss@3.4.7(ts-node@10.9.2(@types/node@18.19.42)(typescript@5.5.3)))
toml:
specifier: 3.0.0
version: 3.0.0
undici:
specifier: ^6.19.2
version: 6.19.4
@ -723,6 +726,9 @@ importers:
ssh2:
specifier: 1.15.0
version: 1.15.0
toml:
specifier: 3.0.0
version: 3.0.0
ws:
specifier: 8.16.0
version: 8.16.0
@ -6982,6 +6988,9 @@ packages:
resolution: {integrity: sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==}
engines: {node: '>=0.6'}
toml@3.0.0:
resolution: {integrity: sha512-y/mWCZinnvxjTKYhJ+pYxwD0mRLVvOtdS2Awbgxln6iEnt4rk0yBxeSBHkGJcPucRiG0e55mwWp+g/05rsrd6w==}
tr46@0.0.3:
resolution: {integrity: sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==}
@ -14061,6 +14070,8 @@ snapshots:
toidentifier@1.0.1: {}
toml@3.0.0: {}
tr46@0.0.3: {}
tree-dump@1.0.2(tslib@2.6.3):