diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 015095aa..a69fa686 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -52,7 +52,7 @@ feat: add new feature Before you start, please make the clone based on the `canary` branch, since the `main` branch is the source of truth and should always reflect the latest stable release, also the PRs will be merged to the `canary` branch. -We use Node v20.9.0 +We use Node v20.9.0 and recommend this specific version. If you have nvm installed, you can run `nvm install 20.9.0 && nvm use` in the root directory. ```bash git clone https://github.com/dokploy/dokploy.git @@ -87,6 +87,8 @@ pnpm run dokploy:dev Go to http://localhost:3000 to see the development server +Note: this project uses Biome. If your editor is configured to use another formatter such as Prettier, it's recommended to either change it to use Biome or turn it off. + ## Build ```bash @@ -145,11 +147,9 @@ curl -sSL https://railpack.com/install.sh | sh ```bash # Install Buildpacks -curl -sSL "https://github.com/buildpacks/pack/releases/download/v0.32.1/pack-v0.32.1-linux.tgz" | tar -C /usr/local/bin/ --no-same-owner -xzv pack +curl -sSL "https://github.com/buildpacks/pack/releases/download/v0.35.0/pack-v0.35.0-linux.tgz" | tar -C /usr/local/bin/ --no-same-owner -xzv pack ``` - - ## Pull Request - The `main` branch is the source of truth and should always reflect the latest stable release. @@ -167,7 +167,6 @@ Thank you for your contribution! To add a new template, go to `https://github.com/Dokploy/templates` repository and read the README.md file. - ### Recommendations - Use the same name of the folder as the id of the template. diff --git a/Dockerfile b/Dockerfile index ad2239b0..a9b5f951 100644 --- a/Dockerfile +++ b/Dockerfile @@ -49,7 +49,7 @@ RUN curl -fsSL https://get.docker.com -o get-docker.sh && sh get-docker.sh && rm # Install Nixpacks and tsx # | VERBOSE=1 VERSION=1.21.0 bash -ARG NIXPACKS_VERSION=1.29.1 +ARG NIXPACKS_VERSION=1.35.0 RUN curl -sSL https://nixpacks.com/install.sh -o install.sh \ && chmod +x install.sh \ && ./install.sh \ diff --git a/apps/dokploy/__test__/drop/drop.test.test.ts b/apps/dokploy/__test__/drop/drop.test.test.ts index 7dc9e560..74a4eb66 100644 --- a/apps/dokploy/__test__/drop/drop.test.test.ts +++ b/apps/dokploy/__test__/drop/drop.test.test.ts @@ -34,7 +34,9 @@ const baseApp: ApplicationNested = { giteaRepository: "", cleanCache: false, watchPaths: [], + enableSubmodules: false, applicationStatus: "done", + triggerType: "push", appName: "", autoDeploy: true, serverId: "", diff --git a/apps/dokploy/__test__/templates/config.template.test.ts b/apps/dokploy/__test__/templates/config.template.test.ts index d6e87cb7..202abdf2 100644 --- a/apps/dokploy/__test__/templates/config.template.test.ts +++ b/apps/dokploy/__test__/templates/config.template.test.ts @@ -51,6 +51,35 @@ describe("processTemplate", () => { expect(result.domains).toHaveLength(0); expect(result.mounts).toHaveLength(0); }); + + it("should allow creation of real jwt secret", () => { + const template: CompleteTemplate = { + metadata: {} as any, + variables: { + jwt_secret: "cQsdycq1hDLopQonF6jUTqgQc5WEZTwWLL02J6XJ", + anon_payload: JSON.stringify({ + role: "tester", + iss: "dockploy", + iat: "${timestamps:2025-01-01T00:00:00Z}", + exp: "${timestamps:2030-01-01T00:00:00Z}", + }), + anon_key: "${jwt:jwt_secret:anon_payload}", + }, + config: { + domains: [], + env: { + ANON_KEY: "${anon_key}", + }, + }, + }; + const result = processTemplate(template, mockSchema); + expect(result.envs).toHaveLength(1); + expect(result.envs).toContain( + "ANON_KEY=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpYXQiOiIxNzM1Njg5NjAwIiwiZXhwIjoiMTg5MzQ1NjAwMCIsInJvbGUiOiJ0ZXN0ZXIiLCJpc3MiOiJkb2NrcGxveSJ9.BG5JoxL2_NaTFbPgyZdm3kRWenf_O3su_HIRKGCJ_kY", + ); + expect(result.mounts).toHaveLength(0); + expect(result.domains).toHaveLength(0); + }); }); describe("domains processing", () => { diff --git a/apps/dokploy/__test__/templates/helpers.template.test.ts b/apps/dokploy/__test__/templates/helpers.template.test.ts new file mode 100644 index 00000000..1144b65f --- /dev/null +++ b/apps/dokploy/__test__/templates/helpers.template.test.ts @@ -0,0 +1,232 @@ +import type { Schema } from "@dokploy/server/templates"; +import { processValue } from "@dokploy/server/templates/processors"; +import { describe, expect, it } from "vitest"; + +describe("helpers functions", () => { + // Mock schema for testing + const mockSchema: Schema = { + projectName: "test", + serverIp: "127.0.0.1", + }; + // some helpers to test jwt + type JWTParts = [string, string, string]; + const jwtMatchExp = /^[A-Za-z0-9-_]+\.[A-Za-z0-9-_]+\.[A-Za-z0-9-_]+$/; + const jwtBase64Decode = (str: string) => { + const base64 = str.replace(/-/g, "+").replace(/_/g, "/"); + const padding = "=".repeat((4 - (base64.length % 4)) % 4); + const decoded = Buffer.from(base64 + padding, "base64").toString("utf-8"); + return JSON.parse(decoded); + }; + const jwtCheckHeader = (jwtHeader: string) => { + const decodedHeader = jwtBase64Decode(jwtHeader); + expect(decodedHeader).toHaveProperty("alg"); + expect(decodedHeader).toHaveProperty("typ"); + expect(decodedHeader.alg).toEqual("HS256"); + expect(decodedHeader.typ).toEqual("JWT"); + }; + + describe("${domain}", () => { + it("should generate a random domain", () => { + const domain = processValue("${domain}", {}, mockSchema); + expect(domain.startsWith(`${mockSchema.projectName}-`)).toBeTruthy(); + expect( + domain.endsWith( + `${mockSchema.serverIp.replaceAll(".", "-")}.traefik.me`, + ), + ).toBeTruthy(); + }); + }); + + describe("${base64}", () => { + it("should generate a base64 string", () => { + const base64 = processValue("${base64}", {}, mockSchema); + expect(base64).toMatch(/^[A-Za-z0-9+=/]+={0,2}$/); + }); + it.each([ + [4, 8], + [8, 12], + [16, 24], + [32, 44], + [64, 88], + [128, 172], + ])( + "should generate a base64 string from parameter %d bytes length", + (length, finalLength) => { + const base64 = processValue(`\${base64:${length}}`, {}, mockSchema); + expect(base64).toMatch(/^[A-Za-z0-9+=/]+={0,2}$/); + expect(base64.length).toBe(finalLength); + }, + ); + }); + + describe("${password}", () => { + it("should generate a password string", () => { + const password = processValue("${password}", {}, mockSchema); + expect(password).toMatch(/^[A-Za-z0-9]+$/); + }); + it.each([6, 8, 12, 16, 32])( + "should generate a password string respecting parameter %d length", + (length) => { + const password = processValue(`\${password:${length}}`, {}, mockSchema); + expect(password).toMatch(/^[A-Za-z0-9]+$/); + expect(password.length).toBe(length); + }, + ); + }); + + describe("${hash}", () => { + it("should generate a hash string", () => { + const hash = processValue("${hash}", {}, mockSchema); + expect(hash).toMatch(/^[A-Za-z0-9]+$/); + }); + it.each([6, 8, 12, 16, 32])( + "should generate a hash string respecting parameter %d length", + (length) => { + const hash = processValue(`\${hash:${length}}`, {}, mockSchema); + expect(hash).toMatch(/^[A-Za-z0-9]+$/); + expect(hash.length).toBe(length); + }, + ); + }); + + describe("${uuid}", () => { + it("should generate a UUID string", () => { + const uuid = processValue("${uuid}", {}, mockSchema); + expect(uuid).toMatch( + /^[0-9a-f]{8}-[0-9a-f]{4}-4[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/, + ); + }); + }); + + describe("${timestamp}", () => { + it("should generate a timestamp string in milliseconds", () => { + const timestamp = processValue("${timestamp}", {}, mockSchema); + const nowLength = Math.floor(Date.now()).toString().length; + expect(timestamp).toMatch(/^\d+$/); + expect(timestamp.length).toBe(nowLength); + }); + }); + describe("${timestampms}", () => { + it("should generate a timestamp string in milliseconds", () => { + const timestamp = processValue("${timestampms}", {}, mockSchema); + const nowLength = Date.now().toString().length; + expect(timestamp).toMatch(/^\d+$/); + expect(timestamp.length).toBe(nowLength); + }); + it("should generate a timestamp string in milliseconds from parameter", () => { + const timestamp = processValue( + "${timestampms:2025-01-01}", + {}, + mockSchema, + ); + expect(timestamp).toEqual("1735689600000"); + }); + }); + describe("${timestamps}", () => { + it("should generate a timestamp string in seconds", () => { + const timestamps = processValue("${timestamps}", {}, mockSchema); + const nowLength = Math.floor(Date.now() / 1000).toString().length; + expect(timestamps).toMatch(/^\d+$/); + expect(timestamps.length).toBe(nowLength); + }); + it("should generate a timestamp string in seconds from parameter", () => { + const timestamps = processValue( + "${timestamps:2025-01-01}", + {}, + mockSchema, + ); + expect(timestamps).toEqual("1735689600"); + }); + }); + + describe("${randomPort}", () => { + it("should generate a random port string", () => { + const randomPort = processValue("${randomPort}", {}, mockSchema); + expect(randomPort).toMatch(/^\d+$/); + expect(Number(randomPort)).toBeLessThan(65536); + }); + }); + + describe("${username}", () => { + it("should generate a username string", () => { + const username = processValue("${username}", {}, mockSchema); + expect(username).toMatch(/^[a-zA-Z0-9._-]{3,}$/); + }); + }); + + describe("${email}", () => { + it("should generate an email string", () => { + const email = processValue("${email}", {}, mockSchema); + expect(email).toMatch(/^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}$/); + }); + }); + + describe("${jwt}", () => { + it("should generate a JWT string", () => { + const jwt = processValue("${jwt}", {}, mockSchema); + expect(jwt).toMatch(jwtMatchExp); + const parts = jwt.split(".") as JWTParts; + const decodedPayload = jwtBase64Decode(parts[1]); + jwtCheckHeader(parts[0]); + expect(decodedPayload).toHaveProperty("iat"); + expect(decodedPayload).toHaveProperty("iss"); + expect(decodedPayload).toHaveProperty("exp"); + expect(decodedPayload.iss).toEqual("dokploy"); + }); + it.each([6, 8, 12, 16, 32])( + "should generate a random hex string from parameter %d byte length", + (length) => { + const jwt = processValue(`\${jwt:${length}}`, {}, mockSchema); + expect(jwt).toMatch(/^[A-Za-z0-9-_.]+$/); + expect(jwt.length).toBeGreaterThanOrEqual(length); // bytes translated to hex can take up to 2x the length + expect(jwt.length).toBeLessThanOrEqual(length * 2); + }, + ); + }); + describe("${jwt:secret}", () => { + it("should generate a JWT string respecting parameter secret from variable", () => { + const jwt = processValue( + "${jwt:secret}", + { secret: "mysecret" }, + mockSchema, + ); + expect(jwt).toMatch(jwtMatchExp); + const parts = jwt.split(".") as JWTParts; + const decodedPayload = jwtBase64Decode(parts[1]); + jwtCheckHeader(parts[0]); + expect(decodedPayload).toHaveProperty("iat"); + expect(decodedPayload).toHaveProperty("iss"); + expect(decodedPayload).toHaveProperty("exp"); + expect(decodedPayload.iss).toEqual("dokploy"); + }); + }); + describe("${jwt:secret:payload}", () => { + it("should generate a JWT string respecting parameters secret and payload from variables", () => { + const iat = Math.floor(new Date("2025-01-01T00:00:00Z").getTime() / 1000); + const expiry = iat + 3600; + const jwt = processValue( + "${jwt:secret:payload}", + { + secret: "mysecret", + payload: `{"iss": "test-issuer", "iat": ${iat}, "exp": ${expiry}, "customprop": "customvalue"}`, + }, + mockSchema, + ); + expect(jwt).toMatch(jwtMatchExp); + const parts = jwt.split(".") as JWTParts; + jwtCheckHeader(parts[0]); + const decodedPayload = jwtBase64Decode(parts[1]); + expect(decodedPayload).toHaveProperty("iat"); + expect(decodedPayload.iat).toEqual(iat); + expect(decodedPayload).toHaveProperty("iss"); + expect(decodedPayload.iss).toEqual("test-issuer"); + expect(decodedPayload).toHaveProperty("exp"); + expect(decodedPayload.exp).toEqual(expiry); + expect(decodedPayload).toHaveProperty("customprop"); + expect(decodedPayload.customprop).toEqual("customvalue"); + expect(jwt).toEqual( + "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpYXQiOjE3MzU2ODk2MDAsImV4cCI6MTczNTY5MzIwMCwiaXNzIjoidGVzdC1pc3N1ZXIiLCJjdXN0b21wcm9wIjoiY3VzdG9tdmFsdWUifQ.m42U7PZSUSCf7gBOJrxJir0rQmyPq4rA59Dydr_QahI", + ); + }); + }); +}); diff --git a/apps/dokploy/__test__/traefik/server/update-server-config.test.ts b/apps/dokploy/__test__/traefik/server/update-server-config.test.ts index f33b37fd..201aee1e 100644 --- a/apps/dokploy/__test__/traefik/server/update-server-config.test.ts +++ b/apps/dokploy/__test__/traefik/server/update-server-config.test.ts @@ -14,6 +14,7 @@ import { import { beforeEach, expect, test, vi } from "vitest"; const baseAdmin: User = { + https: false, enablePaidFeatures: false, metricsConfig: { containers: { @@ -73,7 +74,6 @@ beforeEach(() => { test("Should read the configuration file", () => { const config: FileConfig = loadOrCreateConfig("dokploy"); - expect(config.http?.routers?.["dokploy-router-app"]?.service).toBe( "dokploy-service-app", ); @@ -83,6 +83,7 @@ test("Should apply redirect-to-https", () => { updateServerTraefik( { ...baseAdmin, + https: true, certificateType: "letsencrypt", }, "example.com", diff --git a/apps/dokploy/__test__/traefik/traefik.test.ts b/apps/dokploy/__test__/traefik/traefik.test.ts index d8a14ab4..5437e64d 100644 --- a/apps/dokploy/__test__/traefik/traefik.test.ts +++ b/apps/dokploy/__test__/traefik/traefik.test.ts @@ -16,6 +16,7 @@ const baseApp: ApplicationNested = { applicationStatus: "done", appName: "", autoDeploy: true, + enableSubmodules: false, serverId: "", branch: null, dockerBuildStage: "", @@ -24,6 +25,7 @@ const baseApp: ApplicationNested = { buildArgs: null, isPreviewDeploymentsActive: false, previewBuildArgs: null, + triggerType: "push", previewCertificateType: "none", previewEnv: null, previewHttps: false, diff --git a/apps/dokploy/__test__/utils/backups.test.ts b/apps/dokploy/__test__/utils/backups.test.ts new file mode 100644 index 00000000..c7bc310c --- /dev/null +++ b/apps/dokploy/__test__/utils/backups.test.ts @@ -0,0 +1,61 @@ +import { describe, expect, test } from "vitest"; +import { normalizeS3Path } from "@dokploy/server/utils/backups/utils"; + +describe("normalizeS3Path", () => { + test("should handle empty and whitespace-only prefix", () => { + expect(normalizeS3Path("")).toBe(""); + expect(normalizeS3Path("/")).toBe(""); + expect(normalizeS3Path(" ")).toBe(""); + expect(normalizeS3Path("\t")).toBe(""); + expect(normalizeS3Path("\n")).toBe(""); + expect(normalizeS3Path(" \n \t ")).toBe(""); + }); + + test("should trim whitespace from prefix", () => { + expect(normalizeS3Path(" prefix")).toBe("prefix/"); + expect(normalizeS3Path("prefix ")).toBe("prefix/"); + expect(normalizeS3Path(" prefix ")).toBe("prefix/"); + expect(normalizeS3Path("\tprefix\t")).toBe("prefix/"); + expect(normalizeS3Path(" prefix/nested ")).toBe("prefix/nested/"); + }); + + test("should remove leading slashes", () => { + expect(normalizeS3Path("/prefix")).toBe("prefix/"); + expect(normalizeS3Path("///prefix")).toBe("prefix/"); + }); + + test("should remove trailing slashes", () => { + expect(normalizeS3Path("prefix/")).toBe("prefix/"); + expect(normalizeS3Path("prefix///")).toBe("prefix/"); + }); + + test("should remove both leading and trailing slashes", () => { + expect(normalizeS3Path("/prefix/")).toBe("prefix/"); + expect(normalizeS3Path("///prefix///")).toBe("prefix/"); + }); + + test("should handle nested paths", () => { + expect(normalizeS3Path("prefix/nested")).toBe("prefix/nested/"); + expect(normalizeS3Path("/prefix/nested/")).toBe("prefix/nested/"); + expect(normalizeS3Path("///prefix/nested///")).toBe("prefix/nested/"); + }); + + test("should preserve middle slashes", () => { + expect(normalizeS3Path("prefix/nested/deep")).toBe("prefix/nested/deep/"); + expect(normalizeS3Path("/prefix/nested/deep/")).toBe("prefix/nested/deep/"); + }); + + test("should handle special characters", () => { + expect(normalizeS3Path("prefix-with-dashes")).toBe("prefix-with-dashes/"); + expect(normalizeS3Path("prefix_with_underscores")).toBe( + "prefix_with_underscores/", + ); + expect(normalizeS3Path("prefix.with.dots")).toBe("prefix.with.dots/"); + }); + + test("should handle the cases from the bug report", () => { + expect(normalizeS3Path("instance-backups/")).toBe("instance-backups/"); + expect(normalizeS3Path("/instance-backups/")).toBe("instance-backups/"); + expect(normalizeS3Path("instance-backups")).toBe("instance-backups/"); + }); +}); diff --git a/apps/dokploy/components/dashboard/application/general/generic/save-bitbucket-provider.tsx b/apps/dokploy/components/dashboard/application/general/generic/save-bitbucket-provider.tsx index b506fbac..f0179d9c 100644 --- a/apps/dokploy/components/dashboard/application/general/generic/save-bitbucket-provider.tsx +++ b/apps/dokploy/components/dashboard/application/general/generic/save-bitbucket-provider.tsx @@ -31,6 +31,7 @@ import { SelectTrigger, SelectValue, } from "@/components/ui/select"; +import { Switch } from "@/components/ui/switch"; import { Tooltip, TooltipContent, @@ -58,6 +59,7 @@ const BitbucketProviderSchema = z.object({ branch: z.string().min(1, "Branch is required"), bitbucketId: z.string().min(1, "Bitbucket Provider is required"), watchPaths: z.array(z.string()).optional(), + enableSubmodules: z.boolean().optional(), }); type BitbucketProvider = z.infer; @@ -84,6 +86,7 @@ export const SaveBitbucketProvider = ({ applicationId }: Props) => { bitbucketId: "", branch: "", watchPaths: [], + enableSubmodules: false, }, resolver: zodResolver(BitbucketProviderSchema), }); @@ -130,6 +133,7 @@ export const SaveBitbucketProvider = ({ applicationId }: Props) => { buildPath: data.bitbucketBuildPath || "/", bitbucketId: data.bitbucketId || "", watchPaths: data.watchPaths || [], + enableSubmodules: data.enableSubmodules || false, }); } }, [form.reset, data, form]); @@ -143,6 +147,7 @@ export const SaveBitbucketProvider = ({ applicationId }: Props) => { bitbucketId: data.bitbucketId, applicationId, watchPaths: data.watchPaths || [], + enableSubmodules: data.enableSubmodules || false, }) .then(async () => { toast.success("Service Provided Saved"); @@ -467,6 +472,21 @@ export const SaveBitbucketProvider = ({ applicationId }: Props) => { )} /> + ( + + + + + Enable Submodules + + )} + />
diff --git a/apps/dokploy/components/dashboard/application/general/generic/save-gitea-provider.tsx b/apps/dokploy/components/dashboard/application/general/generic/save-gitea-provider.tsx index 0ad88945..98d8cfd7 100644 --- a/apps/dokploy/components/dashboard/application/general/generic/save-gitea-provider.tsx +++ b/apps/dokploy/components/dashboard/application/general/generic/save-gitea-provider.tsx @@ -31,6 +31,7 @@ import { SelectTrigger, SelectValue, } from "@/components/ui/select"; +import { Switch } from "@/components/ui/switch"; import { Tooltip, TooltipContent, @@ -74,6 +75,7 @@ const GiteaProviderSchema = z.object({ branch: z.string().min(1, "Branch is required"), giteaId: z.string().min(1, "Gitea Provider is required"), watchPaths: z.array(z.string()).default([]), + enableSubmodules: z.boolean().optional(), }); type GiteaProvider = z.infer; @@ -99,6 +101,7 @@ export const SaveGiteaProvider = ({ applicationId }: Props) => { giteaId: "", branch: "", watchPaths: [], + enableSubmodules: false, }, resolver: zodResolver(GiteaProviderSchema), }); @@ -152,6 +155,7 @@ export const SaveGiteaProvider = ({ applicationId }: Props) => { buildPath: data.giteaBuildPath || "/", giteaId: data.giteaId || "", watchPaths: data.watchPaths || [], + enableSubmodules: data.enableSubmodules || false, }); } }, [form.reset, data, form]); @@ -165,6 +169,7 @@ export const SaveGiteaProvider = ({ applicationId }: Props) => { giteaId: data.giteaId, applicationId, watchPaths: data.watchPaths, + enableSubmodules: data.enableSubmodules || false, }) .then(async () => { toast.success("Service Provider Saved"); @@ -498,6 +503,21 @@ export const SaveGiteaProvider = ({ applicationId }: Props) => { )} /> + ( + + + + + Enable Submodules + + )} + />
-
- + > + + + + + + )} + /> + )} + + ( + + + + + Enable Submodules )} /> diff --git a/apps/dokploy/components/dashboard/application/general/generic/save-gitlab-provider.tsx b/apps/dokploy/components/dashboard/application/general/generic/save-gitlab-provider.tsx index 0f8bb849..b4b55d3f 100644 --- a/apps/dokploy/components/dashboard/application/general/generic/save-gitlab-provider.tsx +++ b/apps/dokploy/components/dashboard/application/general/generic/save-gitlab-provider.tsx @@ -31,6 +31,7 @@ import { SelectTrigger, SelectValue, } from "@/components/ui/select"; +import { Switch } from "@/components/ui/switch"; import { Tooltip, TooltipContent, @@ -60,6 +61,7 @@ const GitlabProviderSchema = z.object({ branch: z.string().min(1, "Branch is required"), gitlabId: z.string().min(1, "Gitlab Provider is required"), watchPaths: z.array(z.string()).optional(), + enableSubmodules: z.boolean().default(false), }); type GitlabProvider = z.infer; @@ -86,6 +88,7 @@ export const SaveGitlabProvider = ({ applicationId }: Props) => { }, gitlabId: "", branch: "", + enableSubmodules: false, }, resolver: zodResolver(GitlabProviderSchema), }); @@ -135,6 +138,7 @@ export const SaveGitlabProvider = ({ applicationId }: Props) => { buildPath: data.gitlabBuildPath || "/", gitlabId: data.gitlabId || "", watchPaths: data.watchPaths || [], + enableSubmodules: data.enableSubmodules ?? false, }); } }, [form.reset, data, form]); @@ -150,6 +154,7 @@ export const SaveGitlabProvider = ({ applicationId }: Props) => { gitlabProjectId: data.repository.id, gitlabPathNamespace: data.repository.gitlabPathNamespace, watchPaths: data.watchPaths || [], + enableSubmodules: data.enableSubmodules, }) .then(async () => { toast.success("Service Provided Saved"); @@ -483,6 +488,21 @@ export const SaveGitlabProvider = ({ applicationId }: Props) => { )} /> + ( + + + + + Enable Submodules + + )} + />
diff --git a/apps/dokploy/components/dashboard/compose/general/generic/save-gitea-provider-compose.tsx b/apps/dokploy/components/dashboard/compose/general/generic/save-gitea-provider-compose.tsx index 201f9da2..6f9b50da 100644 --- a/apps/dokploy/components/dashboard/compose/general/generic/save-gitea-provider-compose.tsx +++ b/apps/dokploy/components/dashboard/compose/general/generic/save-gitea-provider-compose.tsx @@ -31,6 +31,7 @@ import { SelectTrigger, SelectValue, } from "@/components/ui/select"; +import { Switch } from "@/components/ui/switch"; import { Tooltip, TooltipContent, @@ -59,6 +60,7 @@ const GiteaProviderSchema = z.object({ branch: z.string().min(1, "Branch is required"), giteaId: z.string().min(1, "Gitea Provider is required"), watchPaths: z.array(z.string()).optional(), + enableSubmodules: z.boolean().default(false), }); type GiteaProvider = z.infer; @@ -83,6 +85,7 @@ export const SaveGiteaProviderCompose = ({ composeId }: Props) => { giteaId: "", branch: "", watchPaths: [], + enableSubmodules: false, }, resolver: zodResolver(GiteaProviderSchema), }); @@ -136,6 +139,7 @@ export const SaveGiteaProviderCompose = ({ composeId }: Props) => { composePath: data.composePath || "./docker-compose.yml", giteaId: data.giteaId || "", watchPaths: data.watchPaths || [], + enableSubmodules: data.enableSubmodules ?? false, }); } }, [form.reset, data, form]); @@ -151,6 +155,7 @@ export const SaveGiteaProviderCompose = ({ composeId }: Props) => { sourceType: "gitea", composeStatus: "idle", watchPaths: data.watchPaths, + enableSubmodules: data.enableSubmodules, } as any) .then(async () => { toast.success("Service Provider Saved"); @@ -469,6 +474,21 @@ export const SaveGiteaProviderCompose = ({ composeId }: Props) => { )} /> + ( + + + + + Enable Submodules + + )} + />
diff --git a/apps/dokploy/components/dashboard/compose/general/generic/save-github-provider-compose.tsx b/apps/dokploy/components/dashboard/compose/general/generic/save-github-provider-compose.tsx index 4f4c1d5a..97b57f0b 100644 --- a/apps/dokploy/components/dashboard/compose/general/generic/save-github-provider-compose.tsx +++ b/apps/dokploy/components/dashboard/compose/general/generic/save-github-provider-compose.tsx @@ -30,6 +30,7 @@ import { SelectTrigger, SelectValue, } from "@/components/ui/select"; +import { Switch } from "@/components/ui/switch"; import { Tooltip, TooltipContent, @@ -39,7 +40,7 @@ import { import { cn } from "@/lib/utils"; import { api } from "@/utils/api"; import { zodResolver } from "@hookform/resolvers/zod"; -import { CheckIcon, ChevronsUpDown, X } from "lucide-react"; +import { CheckIcon, ChevronsUpDown, HelpCircle, X } from "lucide-react"; import Link from "next/link"; import { useEffect } from "react"; import { useForm } from "react-hook-form"; @@ -57,6 +58,8 @@ const GithubProviderSchema = z.object({ branch: z.string().min(1, "Branch is required"), githubId: z.string().min(1, "Github Provider is required"), watchPaths: z.array(z.string()).optional(), + triggerType: z.enum(["push", "tag"]).default("push"), + enableSubmodules: z.boolean().default(false), }); type GithubProvider = z.infer; @@ -82,13 +85,15 @@ export const SaveGithubProviderCompose = ({ composeId }: Props) => { githubId: "", branch: "", watchPaths: [], + triggerType: "push", + enableSubmodules: false, }, resolver: zodResolver(GithubProviderSchema), }); const repository = form.watch("repository"); const githubId = form.watch("githubId"); - + const triggerType = form.watch("triggerType"); const { data: repositories, isLoading: isLoadingRepositories } = api.github.getGithubRepositories.useQuery( { @@ -125,6 +130,8 @@ export const SaveGithubProviderCompose = ({ composeId }: Props) => { composePath: data.composePath, githubId: data.githubId || "", watchPaths: data.watchPaths || [], + triggerType: data.triggerType || "push", + enableSubmodules: data.enableSubmodules ?? false, }); } }, [form.reset, data, form]); @@ -140,6 +147,8 @@ export const SaveGithubProviderCompose = ({ composeId }: Props) => { sourceType: "github", composeStatus: "idle", watchPaths: data.watchPaths, + enableSubmodules: data.enableSubmodules, + triggerType: data.triggerType, }) .then(async () => { toast.success("Service Provided Saved"); @@ -384,79 +393,140 @@ export const SaveGithubProviderCompose = ({ composeId }: Props) => { /> (
- Watch Paths + Trigger Type - -
- ? -
+ +

- Add paths to watch for changes. When files in these - paths change, a new deployment will be triggered. + Choose when to trigger deployments: on push to the + selected branch or when a new tag is created.

-
- {field.value?.map((path, index) => ( - - {path} - { - const newPaths = [...(field.value || [])]; - newPaths.splice(index, 1); - form.setValue("watchPaths", newPaths); + + + + )} + /> + {triggerType === "push" && ( + ( + +
+ Watch Paths + + + +
+ ? +
+
+ +

+ Add paths to watch for changes. When files in + these paths change, a new deployment will be + triggered. +

+
+
+
+
+
+ {field.value?.map((path, index) => ( + + {path} + { + const newPaths = [...(field.value || [])]; + newPaths.splice(index, 1); + form.setValue("watchPaths", newPaths); + }} + /> + + ))} +
+ +
+ { + if (e.key === "Enter") { + e.preventDefault(); + const input = e.currentTarget; + const value = input.value.trim(); + if (value) { + const newPaths = [ + ...(field.value || []), + value, + ]; + form.setValue("watchPaths", newPaths); + input.value = ""; + } + } }} /> - - ))} -
- -
- { - if (e.key === "Enter") { - e.preventDefault(); - const input = e.currentTarget; + -
+ }} + > + Add + +
+ + +
+ )} + /> + )} + ( + + + - + Enable Submodules )} /> diff --git a/apps/dokploy/components/dashboard/compose/general/generic/save-gitlab-provider-compose.tsx b/apps/dokploy/components/dashboard/compose/general/generic/save-gitlab-provider-compose.tsx index c191248e..30b542ce 100644 --- a/apps/dokploy/components/dashboard/compose/general/generic/save-gitlab-provider-compose.tsx +++ b/apps/dokploy/components/dashboard/compose/general/generic/save-gitlab-provider-compose.tsx @@ -31,6 +31,7 @@ import { SelectTrigger, SelectValue, } from "@/components/ui/select"; +import { Switch } from "@/components/ui/switch"; import { Tooltip, TooltipContent, @@ -60,6 +61,7 @@ const GitlabProviderSchema = z.object({ branch: z.string().min(1, "Branch is required"), gitlabId: z.string().min(1, "Gitlab Provider is required"), watchPaths: z.array(z.string()).optional(), + enableSubmodules: z.boolean().default(false), }); type GitlabProvider = z.infer; @@ -87,6 +89,7 @@ export const SaveGitlabProviderCompose = ({ composeId }: Props) => { gitlabId: "", branch: "", watchPaths: [], + enableSubmodules: false, }, resolver: zodResolver(GitlabProviderSchema), }); @@ -136,6 +139,7 @@ export const SaveGitlabProviderCompose = ({ composeId }: Props) => { composePath: data.composePath, gitlabId: data.gitlabId || "", watchPaths: data.watchPaths || [], + enableSubmodules: data.enableSubmodules ?? false, }); } }, [form.reset, data, form]); @@ -153,6 +157,7 @@ export const SaveGitlabProviderCompose = ({ composeId }: Props) => { sourceType: "gitlab", composeStatus: "idle", watchPaths: data.watchPaths, + enableSubmodules: data.enableSubmodules, }) .then(async () => { toast.success("Service Provided Saved"); @@ -485,6 +490,21 @@ export const SaveGitlabProviderCompose = ({ composeId }: Props) => { )} /> + ( + + + + + Enable Submodules + + )} + />
) : ( - - {files.map((file) => ( + + {files?.map((file) => ( { - form.setValue("backupFile", file); + form.setValue("backupFile", file.Path); + if (file.IsDir) { + setSearch(`${file.Path}/`); + setDebouncedSearchTerm(`${file.Path}/`); + } else { + setSearch(file.Path); + setDebouncedSearchTerm(file.Path); + } }} > -
- {file} +
+
+ + {file.Path} + + + +
+
+ + Size: {formatBytes(file.Size)} + + {file.IsDir && ( + + Directory + + )} + {file.Hashes?.MD5 && ( + MD5: {file.Hashes.MD5} + )} +
- ))} diff --git a/apps/dokploy/components/dashboard/projects/handle-project.tsx b/apps/dokploy/components/dashboard/projects/handle-project.tsx index 85b8aea9..dcb81241 100644 --- a/apps/dokploy/components/dashboard/projects/handle-project.tsx +++ b/apps/dokploy/components/dashboard/projects/handle-project.tsx @@ -31,9 +31,14 @@ import { toast } from "sonner"; import { z } from "zod"; const AddProjectSchema = z.object({ - name: z.string().min(1, { - message: "Name is required", - }), + name: z + .string() + .min(1, { + message: "Name is required", + }) + .regex(/^[a-zA-Z]/, { + message: "Project name cannot start with a number", + }), description: z.string().optional(), }); @@ -97,18 +102,6 @@ export const HandleProject = ({ projectId }: Props) => { ); }); }; - // useEffect(() => { - // const getUsers = async () => { - // const users = await authClient.admin.listUsers({ - // query: { - // limit: 100, - // }, - // }); - // console.log(users); - // }; - - // getUsers(); - // }); return ( diff --git a/apps/dokploy/components/dashboard/projects/show.tsx b/apps/dokploy/components/dashboard/projects/show.tsx index 31ba80c8..03ebe7a8 100644 --- a/apps/dokploy/components/dashboard/projects/show.tsx +++ b/apps/dokploy/components/dashboard/projects/show.tsx @@ -115,7 +115,7 @@ export const ShowProjects = () => {
)} -
+
{filteredProjects?.map((project) => { const emptyServices = project?.mariadb.length === 0 && diff --git a/apps/dokploy/components/dashboard/settings/ai-form.tsx b/apps/dokploy/components/dashboard/settings/ai-form.tsx index 05ab93a4..b1923918 100644 --- a/apps/dokploy/components/dashboard/settings/ai-form.tsx +++ b/apps/dokploy/components/dashboard/settings/ai-form.tsx @@ -55,7 +55,7 @@ export const AiForm = () => { key={config.aiId} className="flex items-center justify-between bg-sidebar p-1 w-full rounded-lg" > -
+
{config.name} diff --git a/apps/dokploy/components/dashboard/settings/certificates/show-certificates.tsx b/apps/dokploy/components/dashboard/settings/certificates/show-certificates.tsx index 6aaa2563..b80c7b54 100644 --- a/apps/dokploy/components/dashboard/settings/certificates/show-certificates.tsx +++ b/apps/dokploy/components/dashboard/settings/certificates/show-certificates.tsx @@ -70,7 +70,7 @@ export const ShowCertificates = () => { key={certificate.certificateId} className="flex items-center justify-between bg-sidebar p-1 w-full rounded-lg" > -
+
diff --git a/apps/dokploy/components/dashboard/settings/certificates/utils.ts b/apps/dokploy/components/dashboard/settings/certificates/utils.ts index 34d3ad77..80f332d8 100644 --- a/apps/dokploy/components/dashboard/settings/certificates/utils.ts +++ b/apps/dokploy/components/dashboard/settings/certificates/utils.ts @@ -13,53 +13,65 @@ export const extractExpirationDate = (certData: string): Date | null => { bytes[i] = binaryStr.charCodeAt(i); } - let dateFound = 0; + // ASN.1 tag for UTCTime is 0x17, GeneralizedTime is 0x18 + // We need to find the second occurrence of either tag as it's the "not after" (expiration) date + let dateFound = false; for (let i = 0; i < bytes.length - 2; i++) { - if (bytes[i] === 0x17 || bytes[i] === 0x18) { - const dateType = bytes[i]; - const dateLength = bytes[i + 1]; - if (typeof dateLength === "undefined") continue; + // Look for sequence containing validity period (0x30) + if (bytes[i] === 0x30) { + // Check next bytes for UTCTime or GeneralizedTime + let j = i + 1; + while (j < bytes.length - 2) { + if (bytes[j] === 0x17 || bytes[j] === 0x18) { + const dateType = bytes[j]; + const dateLength = bytes[j + 1]; + if (typeof dateLength === "undefined") break; - if (dateFound === 0) { - dateFound++; - i += dateLength + 1; - continue; + if (!dateFound) { + // Skip "not before" date + dateFound = true; + j += dateLength + 2; + continue; + } + + // Found "not after" date + let dateStr = ""; + for (let k = 0; k < dateLength; k++) { + const charCode = bytes[j + 2 + k]; + if (typeof charCode === "undefined") continue; + dateStr += String.fromCharCode(charCode); + } + + if (dateType === 0x17) { + // UTCTime (YYMMDDhhmmssZ) + const year = Number.parseInt(dateStr.slice(0, 2)); + const fullYear = year >= 50 ? 1900 + year : 2000 + year; + return new Date( + Date.UTC( + fullYear, + Number.parseInt(dateStr.slice(2, 4)) - 1, + Number.parseInt(dateStr.slice(4, 6)), + Number.parseInt(dateStr.slice(6, 8)), + Number.parseInt(dateStr.slice(8, 10)), + Number.parseInt(dateStr.slice(10, 12)), + ), + ); + } + + // GeneralizedTime (YYYYMMDDhhmmssZ) + return new Date( + Date.UTC( + Number.parseInt(dateStr.slice(0, 4)), + Number.parseInt(dateStr.slice(4, 6)) - 1, + Number.parseInt(dateStr.slice(6, 8)), + Number.parseInt(dateStr.slice(8, 10)), + Number.parseInt(dateStr.slice(10, 12)), + Number.parseInt(dateStr.slice(12, 14)), + ), + ); + } + j++; } - - let dateStr = ""; - for (let j = 0; j < dateLength; j++) { - const charCode = bytes[i + 2 + j]; - if (typeof charCode === "undefined") continue; - dateStr += String.fromCharCode(charCode); - } - - if (dateType === 0x17) { - // UTCTime (YYMMDDhhmmssZ) - const year = Number.parseInt(dateStr.slice(0, 2)); - const fullYear = year >= 50 ? 1900 + year : 2000 + year; - return new Date( - Date.UTC( - fullYear, - Number.parseInt(dateStr.slice(2, 4)) - 1, - Number.parseInt(dateStr.slice(4, 6)), - Number.parseInt(dateStr.slice(6, 8)), - Number.parseInt(dateStr.slice(8, 10)), - Number.parseInt(dateStr.slice(10, 12)), - ), - ); - } - - // GeneralizedTime (YYYYMMDDhhmmssZ) - return new Date( - Date.UTC( - Number.parseInt(dateStr.slice(0, 4)), - Number.parseInt(dateStr.slice(4, 6)) - 1, - Number.parseInt(dateStr.slice(6, 8)), - Number.parseInt(dateStr.slice(8, 10)), - Number.parseInt(dateStr.slice(10, 12)), - Number.parseInt(dateStr.slice(12, 14)), - ), - ); } } return null; diff --git a/apps/dokploy/components/dashboard/settings/cluster/registry/show-registry.tsx b/apps/dokploy/components/dashboard/settings/cluster/registry/show-registry.tsx index 08cb0381..9ae595d6 100644 --- a/apps/dokploy/components/dashboard/settings/cluster/registry/show-registry.tsx +++ b/apps/dokploy/components/dashboard/settings/cluster/registry/show-registry.tsx @@ -54,7 +54,7 @@ export const ShowRegistry = () => { key={registry.registryId} className="flex items-center justify-between bg-sidebar p-1 w-full rounded-lg" > -
+
diff --git a/apps/dokploy/components/dashboard/settings/destination/show-destinations.tsx b/apps/dokploy/components/dashboard/settings/destination/show-destinations.tsx index 0639b0f7..014596ce 100644 --- a/apps/dokploy/components/dashboard/settings/destination/show-destinations.tsx +++ b/apps/dokploy/components/dashboard/settings/destination/show-destinations.tsx @@ -55,7 +55,7 @@ export const ShowDestinations = () => { key={destination.destinationId} className="flex items-center justify-between bg-sidebar p-1 w-full rounded-lg" > -
+
{index + 1}. {destination.name} diff --git a/apps/dokploy/components/dashboard/settings/git/gitlab/add-gitlab-provider.tsx b/apps/dokploy/components/dashboard/settings/git/gitlab/add-gitlab-provider.tsx index 4dd7da93..023e46ed 100644 --- a/apps/dokploy/components/dashboard/settings/git/gitlab/add-gitlab-provider.tsx +++ b/apps/dokploy/components/dashboard/settings/git/gitlab/add-gitlab-provider.tsx @@ -248,7 +248,9 @@ export const AddGitlabProvider = () => { name="groupName" render={({ field }) => ( - Group Name (Optional) + + Group Name (Optional, Comma-Separated List) + { name="groupName" render={({ field }) => ( - Group Name (Optional) + + Group Name (Optional, Comma-Separated List) + { key={notification.notificationId} className="flex items-center justify-between bg-sidebar p-1 w-full rounded-lg" > -
+
{notification.notificationType === "slack" && (
diff --git a/apps/dokploy/components/dashboard/settings/profile/enable-2fa.tsx b/apps/dokploy/components/dashboard/settings/profile/enable-2fa.tsx index 6cf2c6a5..afc859f4 100644 --- a/apps/dokploy/components/dashboard/settings/profile/enable-2fa.tsx +++ b/apps/dokploy/components/dashboard/settings/profile/enable-2fa.tsx @@ -36,6 +36,7 @@ const PasswordSchema = z.object({ password: z.string().min(8, { message: "Password is required", }), + issuer: z.string().optional(), }); const PinSchema = z.object({ @@ -60,12 +61,86 @@ export const Enable2FA = () => { const [isDialogOpen, setIsDialogOpen] = useState(false); const [step, setStep] = useState<"password" | "verify">("password"); const [isPasswordLoading, setIsPasswordLoading] = useState(false); + const [otpValue, setOtpValue] = useState(""); + + const handleVerifySubmit = async (e: React.FormEvent) => { + e.preventDefault(); + try { + const result = await authClient.twoFactor.verifyTotp({ + code: otpValue, + }); + + if (result.error) { + if (result.error.code === "INVALID_TWO_FACTOR_AUTHENTICATION") { + toast.error("Invalid verification code"); + return; + } + + throw result.error; + } + + if (!result.data) { + throw new Error("No response received from server"); + } + + toast.success("2FA configured successfully"); + utils.user.get.invalidate(); + setIsDialogOpen(false); + } catch (error) { + if (error instanceof Error) { + const errorMessage = + error.message === "Failed to fetch" + ? "Connection error. Please check your internet connection." + : error.message; + + toast.error(errorMessage); + } else { + toast.error("Error verifying 2FA code", { + description: error instanceof Error ? error.message : "Unknown error", + }); + } + } + }; + + const passwordForm = useForm({ + resolver: zodResolver(PasswordSchema), + defaultValues: { + password: "", + }, + }); + + const pinForm = useForm({ + resolver: zodResolver(PinSchema), + defaultValues: { + pin: "", + }, + }); + + useEffect(() => { + if (!isDialogOpen) { + setStep("password"); + setData(null); + setBackupCodes([]); + setOtpValue(""); + passwordForm.reset({ + password: "", + issuer: "", + }); + } + }, [isDialogOpen, passwordForm]); + + useEffect(() => { + if (step === "verify") { + setOtpValue(""); + } + }, [step]); const handlePasswordSubmit = async (formData: PasswordForm) => { setIsPasswordLoading(true); try { const { data: enableData, error } = await authClient.twoFactor.enable({ password: formData.password, + issuer: formData.issuer, }); if (!enableData) { @@ -103,75 +178,6 @@ export const Enable2FA = () => { } }; - const handleVerifySubmit = async (formData: PinForm) => { - try { - const result = await authClient.twoFactor.verifyTotp({ - code: formData.pin, - }); - - if (result.error) { - if (result.error.code === "INVALID_TWO_FACTOR_AUTHENTICATION") { - pinForm.setError("pin", { - message: "Invalid code. Please try again.", - }); - toast.error("Invalid verification code"); - return; - } - - throw result.error; - } - - if (!result.data) { - throw new Error("No response received from server"); - } - - toast.success("2FA configured successfully"); - utils.user.get.invalidate(); - setIsDialogOpen(false); - } catch (error) { - if (error instanceof Error) { - const errorMessage = - error.message === "Failed to fetch" - ? "Connection error. Please check your internet connection." - : error.message; - - pinForm.setError("pin", { - message: errorMessage, - }); - toast.error(errorMessage); - } else { - pinForm.setError("pin", { - message: "Error verifying code", - }); - toast.error("Error verifying 2FA code"); - } - } - }; - - const passwordForm = useForm({ - resolver: zodResolver(PasswordSchema), - defaultValues: { - password: "", - }, - }); - - const pinForm = useForm({ - resolver: zodResolver(PinSchema), - defaultValues: { - pin: "", - }, - }); - - useEffect(() => { - if (!isDialogOpen) { - setStep("password"); - setData(null); - setBackupCodes([]); - passwordForm.reset(); - pinForm.reset(); - } - }, [isDialogOpen, passwordForm, pinForm]); - return ( @@ -217,6 +223,27 @@ export const Enable2FA = () => { )} /> + ( + + Issuer + + + + + Use a custom issuer to identify the service you're + authenticating with. + + + + )} + /> diff --git a/apps/dokploy/components/dashboard/settings/profile/profile-form.tsx b/apps/dokploy/components/dashboard/settings/profile/profile-form.tsx index 32179378..9532b7d6 100644 --- a/apps/dokploy/components/dashboard/settings/profile/profile-form.tsx +++ b/apps/dokploy/components/dashboard/settings/profile/profile-form.tsx @@ -56,6 +56,7 @@ const randomImages = [ export const ProfileForm = () => { const _utils = api.useUtils(); const { data, refetch, isLoading } = api.user.get.useQuery(); + const { mutateAsync, isLoading: isUpdating, @@ -84,12 +85,17 @@ export const ProfileForm = () => { useEffect(() => { if (data) { - form.reset({ - email: data?.user?.email || "", - password: "", - image: data?.user?.image || "", - currentPassword: "", - }); + form.reset( + { + email: data?.user?.email || "", + password: form.getValues("password") || "", + image: data?.user?.image || "", + currentPassword: form.getValues("currentPassword") || "", + }, + { + keepValues: true, + }, + ); if (data.user.email) { generateSHA256Hash(data.user.email).then((hash) => { @@ -97,8 +103,7 @@ export const ProfileForm = () => { }); } } - form.reset(); - }, [form, form.reset, data]); + }, [form, data]); const onSubmit = async (values: Profile) => { await mutateAsync({ @@ -110,7 +115,12 @@ export const ProfileForm = () => { .then(async () => { await refetch(); toast.success("Profile Updated"); - form.reset(); + form.reset({ + email: values.email, + password: "", + image: values.image, + currentPassword: "", + }); }) .catch(() => { toast.error("Error updating the profile"); diff --git a/apps/dokploy/components/dashboard/settings/servers/actions/show-dokploy-actions.tsx b/apps/dokploy/components/dashboard/settings/servers/actions/show-dokploy-actions.tsx index f57dad3c..6850e864 100644 --- a/apps/dokploy/components/dashboard/settings/servers/actions/show-dokploy-actions.tsx +++ b/apps/dokploy/components/dashboard/settings/servers/actions/show-dokploy-actions.tsx @@ -22,6 +22,9 @@ export const ShowDokployActions = () => { const { mutateAsync: reloadServer, isLoading } = api.settings.reloadServer.useMutation(); + const { mutateAsync: cleanRedis } = api.settings.cleanRedis.useMutation(); + const { mutateAsync: reloadRedis } = api.settings.reloadRedis.useMutation(); + return ( @@ -69,6 +72,36 @@ export const ShowDokployActions = () => { {t("settings.server.webServer.updateServerIp")} + + { + await cleanRedis() + .then(async () => { + toast.success("Redis cleaned"); + }) + .catch(() => { + toast.error("Error cleaning Redis"); + }); + }} + > + Clean Redis + + + { + await reloadRedis() + .then(async () => { + toast.success("Redis reloaded"); + }) + .catch(() => { + toast.error("Error reloading Redis"); + }); + }} + > + Reload Redis + diff --git a/apps/dokploy/components/dashboard/settings/ssh-keys/show-ssh-keys.tsx b/apps/dokploy/components/dashboard/settings/ssh-keys/show-ssh-keys.tsx index 5842457b..00d685a8 100644 --- a/apps/dokploy/components/dashboard/settings/ssh-keys/show-ssh-keys.tsx +++ b/apps/dokploy/components/dashboard/settings/ssh-keys/show-ssh-keys.tsx @@ -56,7 +56,7 @@ export const ShowDestinations = () => { key={sshKey.sshKeyId} className="flex items-center justify-between bg-sidebar p-1 w-full rounded-lg" > -
+
diff --git a/apps/dokploy/components/dashboard/settings/web-domain.tsx b/apps/dokploy/components/dashboard/settings/web-domain.tsx index a579df39..d35dae35 100644 --- a/apps/dokploy/components/dashboard/settings/web-domain.tsx +++ b/apps/dokploy/components/dashboard/settings/web-domain.tsx @@ -9,6 +9,7 @@ import { import { Form, FormControl, + FormDescription, FormField, FormItem, FormLabel, @@ -22,6 +23,7 @@ import { SelectTrigger, SelectValue, } from "@/components/ui/select"; +import { Switch } from "@/components/ui/switch"; import { api } from "@/utils/api"; import { zodResolver } from "@hookform/resolvers/zod"; import { GlobeIcon } from "lucide-react"; @@ -33,11 +35,19 @@ import { z } from "zod"; const addServerDomain = z .object({ - domain: z.string().min(1, { message: "URL is required" }), + domain: z.string(), letsEncryptEmail: z.string(), + https: z.boolean().optional(), certificateType: z.enum(["letsencrypt", "none", "custom"]), }) .superRefine((data, ctx) => { + if (data.https && !data.certificateType) { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + path: ["certificateType"], + message: "Required", + }); + } if (data.certificateType === "letsencrypt" && !data.letsEncryptEmail) { ctx.addIssue({ code: z.ZodIssueCode.custom, @@ -61,15 +71,18 @@ export const WebDomain = () => { domain: "", certificateType: "none", letsEncryptEmail: "", + https: false, }, resolver: zodResolver(addServerDomain), }); + const https = form.watch("https"); useEffect(() => { if (data) { form.reset({ domain: data?.user?.host || "", certificateType: data?.user?.certificateType, letsEncryptEmail: data?.user?.letsEncryptEmail || "", + https: data?.user?.https || false, }); } }, [form, form.reset, data]); @@ -79,6 +92,7 @@ export const WebDomain = () => { host: data.domain, letsEncryptEmail: data.letsEncryptEmail, certificateType: data.certificateType, + https: data.https, }) .then(async () => { await refetch(); @@ -155,44 +169,67 @@ export const WebDomain = () => { /> { - return ( - - - {t("settings.server.domain.form.certificate.label")} - - + name="https" + render={({ field }) => ( + +
+ HTTPS + + Automatically provision SSL Certificate. + - - ); - }} +
+ + + +
+ )} /> + {https && ( + { + return ( + + + {t("settings.server.domain.form.certificate.label")} + + + + + ); + }} + /> + )}
- - - - -
- -
- ); -}; - -export default Page; - -Page.getLayout = (page: ReactElement) => { - return {page}; -}; -export async function getServerSideProps( - ctx: GetServerSidePropsContext<{ serviceId: string }>, -) { - const { req, res } = ctx; - const { user, session } = await validateRequest(ctx.req); - if (!user) { - return { - redirect: { - permanent: true, - destination: "/", - }, - }; - } - if (user.role === "member") { - return { - redirect: { - permanent: true, - destination: "/dashboard/settings/profile", - }, - }; - } - - const helpers = createServerSideHelpers({ - router: appRouter, - ctx: { - req: req as any, - res: res as any, - db: null as any, - session: session as any, - user: user as any, - }, - transformer: superjson, - }); - await helpers.user.get.prefetch(); - - return { - props: { - trpcState: helpers.dehydrate(), - }, - }; -} diff --git a/apps/dokploy/public/locales/nl/common.json b/apps/dokploy/public/locales/nl/common.json new file mode 100644 index 00000000..0967ef42 --- /dev/null +++ b/apps/dokploy/public/locales/nl/common.json @@ -0,0 +1 @@ +{} diff --git a/apps/dokploy/public/locales/nl/settings.json b/apps/dokploy/public/locales/nl/settings.json new file mode 100644 index 00000000..c76d9bb9 --- /dev/null +++ b/apps/dokploy/public/locales/nl/settings.json @@ -0,0 +1,58 @@ +{ + "settings.common.save": "Opslaan", + "settings.common.enterTerminal": "Terminal", + "settings.server.domain.title": "Server Domein", + "settings.server.domain.description": "Voeg een domein toe aan jouw server applicatie.", + "settings.server.domain.form.domain": "Domein", + "settings.server.domain.form.letsEncryptEmail": "Let's Encrypt Email", + "settings.server.domain.form.certificate.label": "Certificaat Aanbieder", + "settings.server.domain.form.certificate.placeholder": "Select een certificaat", + "settings.server.domain.form.certificateOptions.none": "Geen", + "settings.server.domain.form.certificateOptions.letsencrypt": "Let's Encrypt", + + "settings.server.webServer.title": "Web Server", + "settings.server.webServer.description": "Herlaad of maak de web server schoon.", + "settings.server.webServer.actions": "Acties", + "settings.server.webServer.reload": "Herladen", + "settings.server.webServer.watchLogs": "Bekijk Logs", + "settings.server.webServer.updateServerIp": "Update de Server IP", + "settings.server.webServer.server.label": "Server", + "settings.server.webServer.traefik.label": "Traefik", + "settings.server.webServer.traefik.modifyEnv": "Bewerk Omgeving", + "settings.server.webServer.traefik.managePorts": "Extra Poort Mappings", + "settings.server.webServer.traefik.managePortsDescription": "Bewerk extra Poorten voor Traefik", + "settings.server.webServer.traefik.targetPort": "Doel Poort", + "settings.server.webServer.traefik.publishedPort": "Gepubliceerde Poort", + "settings.server.webServer.traefik.addPort": "Voeg Poort toe", + "settings.server.webServer.traefik.portsUpdated": "Poorten succesvol aangepast", + "settings.server.webServer.traefik.portsUpdateError": "Poorten niet succesvol aangepast", + "settings.server.webServer.traefik.publishMode": "Publiceer Mode", + "settings.server.webServer.storage.label": "Opslag", + "settings.server.webServer.storage.cleanUnusedImages": "Maak ongebruikte images schoon", + "settings.server.webServer.storage.cleanUnusedVolumes": "Maak ongebruikte volumes schoon", + "settings.server.webServer.storage.cleanStoppedContainers": "Maak gestopte containers schoon", + "settings.server.webServer.storage.cleanDockerBuilder": "Maak Docker Builder & Systeem schoon", + "settings.server.webServer.storage.cleanMonitoring": "Maak monitoor schoon", + "settings.server.webServer.storage.cleanAll": "Maak alles schoon", + + "settings.profile.title": "Account", + "settings.profile.description": "Veramder details van account.", + "settings.profile.email": "Email", + "settings.profile.password": "Wachtwoord", + "settings.profile.avatar": "Profiel Icoon", + + "settings.appearance.title": "Uiterlijk", + "settings.appearance.description": "Verander het thema van je dashboard.", + "settings.appearance.theme": "Thema", + "settings.appearance.themeDescription": "Selecteer een thema voor je dashboard.", + "settings.appearance.themes.light": "Licht", + "settings.appearance.themes.dark": "Donker", + "settings.appearance.themes.system": "Systeem", + "settings.appearance.language": "Taal", + "settings.appearance.languageDescription": "Selecteer een taal voor je dashboard.", + + "settings.terminal.connectionSettings": "Verbindings instellingen", + "settings.terminal.ipAddress": "IP Address", + "settings.terminal.port": "Poort", + "settings.terminal.username": "Gebruikersnaam" +} diff --git a/apps/dokploy/public/locales/zh-Hans/common.json b/apps/dokploy/public/locales/zh-Hans/common.json index 0967ef42..91af07ff 100644 --- a/apps/dokploy/public/locales/zh-Hans/common.json +++ b/apps/dokploy/public/locales/zh-Hans/common.json @@ -1 +1,78 @@ -{} +{ + "dashboard.title": "仪表盘", + "dashboard.overview": "概览", + "dashboard.projects": "项目", + "dashboard.servers": "服务器", + "dashboard.docker": "Docker", + "dashboard.monitoring": "监控", + "dashboard.settings": "设置", + "dashboard.logout": "退出登录", + "dashboard.profile": "个人资料", + "dashboard.terminal": "终端", + "dashboard.containers": "容器", + "dashboard.images": "镜像", + "dashboard.volumes": "卷", + "dashboard.networks": "网络", + "button.create": "创建", + "button.edit": "编辑", + "button.delete": "删除", + "button.cancel": "取消", + "button.save": "保存", + "button.confirm": "确认", + "button.back": "返回", + "button.next": "下一步", + "button.finish": "完成", + "status.running": "运行中", + "status.stopped": "已停止", + "status.error": "错误", + "status.pending": "等待中", + "status.success": "成功", + "status.failed": "失败", + "form.required": "必填", + "form.invalid": "无效", + "form.submit": "提交", + "form.reset": "重置", + "notification.success": "操作成功", + "notification.error": "操作失败", + "notification.warning": "警告", + "notification.info": "信息", + "time.now": "刚刚", + "time.minutes": "分钟前", + "time.hours": "小时前", + "time.days": "天前", + "filter.all": "全部", + "filter.active": "活跃", + "filter.inactive": "不活跃", + "sort.asc": "升序", + "sort.desc": "降序", + "search.placeholder": "搜索...", + "search.noResults": "无结果", + "pagination.prev": "上一页", + "pagination.next": "下一页", + "pagination.of": "共 {0} 页", + "error.notFound": "未找到", + "error.serverError": "服务器错误", + "error.unauthorized": "未授权", + "error.forbidden": "禁止访问", + "loading": "加载中...", + "empty": "暂无数据", + "more": "更多", + "less": "收起", + "project.create": "创建项目", + "project.edit": "编辑项目", + "project.delete": "删除项目", + "project.name": "项目名称", + "project.description": "项目描述", + "service.create": "创建服务", + "service.edit": "编辑服务", + "service.delete": "删除服务", + "service.name": "服务名称", + "service.type": "服务类型", + "domain.add": "添加域名", + "domain.remove": "移除域名", + "environment.variables": "环境变量", + "environment.add": "添加环境变量", + "environment.edit": "编辑环境变量", + "environment.name": "变量名", + "environment.value": "变量值" +} diff --git a/apps/dokploy/public/locales/zh-Hans/settings.json b/apps/dokploy/public/locales/zh-Hans/settings.json index c74fb21f..d70676d6 100644 --- a/apps/dokploy/public/locales/zh-Hans/settings.json +++ b/apps/dokploy/public/locales/zh-Hans/settings.json @@ -1,17 +1,16 @@ { "settings.common.save": "保存", - "settings.common.enterTerminal": "进入终端", - "settings.server.domain.title": "域名设置", - "settings.server.domain.description": "添加域名到服务器", + "settings.common.enterTerminal": "终端", + "settings.server.domain.title": "服务器域名", + "settings.server.domain.description": "为您的服务器应用添加域名。", "settings.server.domain.form.domain": "域名", "settings.server.domain.form.letsEncryptEmail": "Let's Encrypt 邮箱", - "settings.server.domain.form.certificate.label": "证书", - "settings.server.domain.form.certificate.placeholder": "选择一个证书", + "settings.server.domain.form.certificate.label": "证书提供商", + "settings.server.domain.form.certificate.placeholder": "选择证书", "settings.server.domain.form.certificateOptions.none": "无", "settings.server.domain.form.certificateOptions.letsencrypt": "Let's Encrypt", - - "settings.server.webServer.title": "服务器设置", - "settings.server.webServer.description": "管理服务器", + "settings.server.webServer.title": "Web 服务器", + "settings.server.webServer.description": "重载或清理 Web 服务器。", "settings.server.webServer.actions": "操作", "settings.server.webServer.reload": "重新加载", "settings.server.webServer.watchLogs": "查看日志", @@ -19,40 +18,50 @@ "settings.server.webServer.server.label": "服务器", "settings.server.webServer.traefik.label": "Traefik", "settings.server.webServer.traefik.modifyEnv": "修改环境变量", - "settings.server.webServer.traefik.managePorts": "端口转发", - "settings.server.webServer.traefik.managePortsDescription": "添加或删除 Traefik 的其他端口", + "settings.server.webServer.traefik.managePorts": "额外端口映射", + "settings.server.webServer.traefik.managePortsDescription": "为 Traefik 添加或删除额外端口", "settings.server.webServer.traefik.targetPort": "目标端口", - "settings.server.webServer.traefik.publishedPort": "对外端口", + "settings.server.webServer.traefik.publishedPort": "发布端口", "settings.server.webServer.traefik.addPort": "添加端口", "settings.server.webServer.traefik.portsUpdated": "端口更新成功", "settings.server.webServer.traefik.portsUpdateError": "端口更新失败", - "settings.server.webServer.traefik.publishMode": "端口映射", + "settings.server.webServer.traefik.publishMode": "发布模式", "settings.server.webServer.storage.label": "存储空间", "settings.server.webServer.storage.cleanUnusedImages": "清理未使用的镜像", "settings.server.webServer.storage.cleanUnusedVolumes": "清理未使用的卷", "settings.server.webServer.storage.cleanStoppedContainers": "清理已停止的容器", - "settings.server.webServer.storage.cleanDockerBuilder": "清理 Docker Builder 与 系统缓存", + "settings.server.webServer.storage.cleanDockerBuilder": "清理 Docker Builder 和系统", "settings.server.webServer.storage.cleanMonitoring": "清理监控数据", "settings.server.webServer.storage.cleanAll": "清理所有内容", - "settings.profile.title": "账户", - "settings.profile.description": "更改您的个人资料", + "settings.profile.description": "在此更改您的个人资料详情。", "settings.profile.email": "邮箱", "settings.profile.password": "密码", "settings.profile.avatar": "头像", - "settings.appearance.title": "外观", - "settings.appearance.description": "自定义面板主题", + "settings.appearance.description": "自定义您的仪表盘主题。", "settings.appearance.theme": "主题", - "settings.appearance.themeDescription": "选择面板主题", + "settings.appearance.themeDescription": "为您的仪表盘选择主题", "settings.appearance.themes.light": "明亮", - "settings.appearance.themes.dark": "黑暗", - "settings.appearance.themes.system": "系统主题", + "settings.appearance.themes.dark": "暗黑", + "settings.appearance.themes.system": "跟随系统", "settings.appearance.language": "语言", - "settings.appearance.languageDescription": "选择面板语言", - - "settings.terminal.connectionSettings": "终端设置", - "settings.terminal.ipAddress": "IP", + "settings.appearance.languageDescription": "为您的仪表盘选择语言", + "settings.terminal.connectionSettings": "连接设置", + "settings.terminal.ipAddress": "IP 地址", "settings.terminal.port": "端口", - "settings.terminal.username": "用户名" + "settings.terminal.username": "用户名", + "settings.settings": "设置", + "settings.general": "通用设置", + "settings.security": "安全", + "settings.users": "用户管理", + "settings.roles": "角色管理", + "settings.permissions": "权限", + "settings.api": "API设置", + "settings.certificates": "证书管理", + "settings.ssh": "SSH密钥", + "settings.backups": "备份", + "settings.logs": "日志", + "settings.updates": "更新", + "settings.network": "网络" } diff --git a/apps/dokploy/server/api/routers/application.ts b/apps/dokploy/server/api/routers/application.ts index 2397e4ca..ca7939ac 100644 --- a/apps/dokploy/server/api/routers/application.ts +++ b/apps/dokploy/server/api/routers/application.ts @@ -33,6 +33,7 @@ import { findApplicationById, findProjectById, getApplicationStats, + mechanizeDockerContainer, readConfig, readRemoteConfig, removeDeployments, @@ -132,28 +133,36 @@ export const applicationRouter = createTRPCRouter({ .input(apiReloadApplication) .mutation(async ({ input, ctx }) => { const application = await findApplicationById(input.applicationId); - if ( - application.project.organizationId !== ctx.session.activeOrganizationId - ) { + + try { + if ( + application.project.organizationId !== + ctx.session.activeOrganizationId + ) { + throw new TRPCError({ + code: "UNAUTHORIZED", + message: "You are not authorized to reload this application", + }); + } + + if (application.serverId) { + await stopServiceRemote(application.serverId, input.appName); + } else { + await stopService(input.appName); + } + + await updateApplicationStatus(input.applicationId, "idle"); + await mechanizeDockerContainer(application); + await updateApplicationStatus(input.applicationId, "done"); + return true; + } catch (error) { + await updateApplicationStatus(input.applicationId, "error"); throw new TRPCError({ - code: "UNAUTHORIZED", - message: "You are not authorized to reload this application", + code: "INTERNAL_SERVER_ERROR", + message: "Error reloading application", + cause: error, }); } - if (application.serverId) { - await stopServiceRemote(application.serverId, input.appName); - } else { - await stopService(input.appName); - } - await updateApplicationStatus(input.applicationId, "idle"); - - if (application.serverId) { - await startServiceRemote(application.serverId, input.appName); - } else { - await startService(input.appName); - } - await updateApplicationStatus(input.applicationId, "done"); - return true; }), delete: protectedProcedure @@ -346,6 +355,8 @@ export const applicationRouter = createTRPCRouter({ applicationStatus: "idle", githubId: input.githubId, watchPaths: input.watchPaths, + triggerType: input.triggerType, + enableSubmodules: input.enableSubmodules, }); return true; @@ -373,6 +384,7 @@ export const applicationRouter = createTRPCRouter({ gitlabProjectId: input.gitlabProjectId, gitlabPathNamespace: input.gitlabPathNamespace, watchPaths: input.watchPaths, + enableSubmodules: input.enableSubmodules, }); return true; @@ -398,6 +410,7 @@ export const applicationRouter = createTRPCRouter({ applicationStatus: "idle", bitbucketId: input.bitbucketId, watchPaths: input.watchPaths, + enableSubmodules: input.enableSubmodules, }); return true; @@ -423,6 +436,7 @@ export const applicationRouter = createTRPCRouter({ applicationStatus: "idle", giteaId: input.giteaId, watchPaths: input.watchPaths, + enableSubmodules: input.enableSubmodules, }); return true; @@ -470,6 +484,7 @@ export const applicationRouter = createTRPCRouter({ sourceType: "git", applicationStatus: "idle", watchPaths: input.watchPaths, + enableSubmodules: input.enableSubmodules, }); return true; diff --git a/apps/dokploy/server/api/routers/backup.ts b/apps/dokploy/server/api/routers/backup.ts index c691a406..0aeebb02 100644 --- a/apps/dokploy/server/api/routers/backup.ts +++ b/apps/dokploy/server/api/routers/backup.ts @@ -31,7 +31,10 @@ import { } from "@dokploy/server"; import { findDestinationById } from "@dokploy/server/services/destination"; -import { getS3Credentials } from "@dokploy/server/utils/backups/utils"; +import { + getS3Credentials, + normalizeS3Path, +} from "@dokploy/server/utils/backups/utils"; import { execAsync, execAsyncRemote, @@ -47,6 +50,18 @@ import { TRPCError } from "@trpc/server"; import { observable } from "@trpc/server/observable"; import { z } from "zod"; +interface RcloneFile { + Path: string; + Name: string; + Size: number; + IsDir: boolean; + Tier?: string; + Hashes?: { + MD5?: string; + SHA1?: string; + }; +} + export const backupRouter = createTRPCRouter({ create: protectedProcedure .input(apiCreateBackup) @@ -257,7 +272,7 @@ export const backupRouter = createTRPCRouter({ const lastSlashIndex = input.search.lastIndexOf("/"); const baseDir = lastSlashIndex !== -1 - ? input.search.slice(0, lastSlashIndex + 1) + ? normalizeS3Path(input.search.slice(0, lastSlashIndex + 1)) : ""; const searchTerm = lastSlashIndex !== -1 @@ -265,32 +280,47 @@ export const backupRouter = createTRPCRouter({ : input.search; const searchPath = baseDir ? `${bucketPath}/${baseDir}` : bucketPath; - const listCommand = `rclone lsf ${rcloneFlags.join(" ")} "${searchPath}" | head -n 100`; + const listCommand = `rclone lsjson ${rcloneFlags.join(" ")} "${searchPath}" --no-mimetype --no-modtime 2>/dev/null`; let stdout = ""; if (input.serverId) { - const result = await execAsyncRemote(listCommand, input.serverId); + const result = await execAsyncRemote(input.serverId, listCommand); stdout = result.stdout; } else { const result = await execAsync(listCommand); stdout = result.stdout; } - const files = stdout.split("\n").filter(Boolean); + let files: RcloneFile[] = []; + try { + files = JSON.parse(stdout) as RcloneFile[]; + } catch (error) { + console.error("Error parsing JSON response:", error); + console.error("Raw stdout:", stdout); + throw new Error("Failed to parse backup files list"); + } + + // Limit to first 100 files const results = baseDir - ? files.map((file) => `${baseDir}${file}`) + ? files.map((file) => ({ + ...file, + Path: `${baseDir}${file.Path}`, + })) : files; if (searchTerm) { - return results.filter((file) => - file.toLowerCase().includes(searchTerm.toLowerCase()), - ); + return results + .filter((file) => + file.Path.toLowerCase().includes(searchTerm.toLowerCase()), + ) + .slice(0, 100); } - return results; + return results.slice(0, 100); } catch (error) { + console.error("Error in listBackupFiles:", error); throw new TRPCError({ code: "BAD_REQUEST", message: diff --git a/apps/dokploy/server/api/routers/mount.ts b/apps/dokploy/server/api/routers/mount.ts index 0cfb0c07..9a189f61 100644 --- a/apps/dokploy/server/api/routers/mount.ts +++ b/apps/dokploy/server/api/routers/mount.ts @@ -31,7 +31,6 @@ export const mountRouter = createTRPCRouter({ update: protectedProcedure .input(apiUpdateMount) .mutation(async ({ input }) => { - await updateMount(input.mountId, input); - return true; + return await updateMount(input.mountId, input); }), }); diff --git a/apps/dokploy/server/api/routers/registry.ts b/apps/dokploy/server/api/routers/registry.ts index a9a6be89..5486f37c 100644 --- a/apps/dokploy/server/api/routers/registry.ts +++ b/apps/dokploy/server/api/routers/registry.ts @@ -10,8 +10,8 @@ import { import { IS_CLOUD, createRegistry, - execAsync, execAsyncRemote, + execFileAsync, findRegistryById, removeRegistry, updateRegistry, @@ -83,7 +83,13 @@ export const registryRouter = createTRPCRouter({ .input(apiTestRegistry) .mutation(async ({ input }) => { try { - const loginCommand = `echo ${input.password} | docker login ${input.registryUrl} --username ${input.username} --password-stdin`; + const args = [ + "login", + input.registryUrl, + "--username", + input.username, + "--password-stdin", + ]; if (IS_CLOUD && !input.serverId) { throw new TRPCError({ @@ -93,9 +99,14 @@ export const registryRouter = createTRPCRouter({ } if (input.serverId && input.serverId !== "none") { - await execAsyncRemote(input.serverId, loginCommand); + await execAsyncRemote( + input.serverId, + `echo ${input.password} | docker ${args.join(" ")}`, + ); } else { - await execAsync(loginCommand); + await execFileAsync("docker", args, { + input: Buffer.from(input.password).toString(), + }); } return true; diff --git a/apps/dokploy/server/api/routers/settings.ts b/apps/dokploy/server/api/routers/settings.ts index 70f14ec3..5ca4c106 100644 --- a/apps/dokploy/server/api/routers/settings.ts +++ b/apps/dokploy/server/api/routers/settings.ts @@ -79,6 +79,33 @@ export const settingsRouter = createTRPCRouter({ await execAsync(`docker service update --force ${stdout.trim()}`); return true; }), + cleanRedis: adminProcedure.mutation(async () => { + if (IS_CLOUD) { + return true; + } + + const { stdout: containerId } = await execAsync( + `docker ps --filter "name=dokploy-redis" --filter "status=running" -q | head -n 1`, + ); + + if (!containerId) { + throw new Error("Redis container not found"); + } + + const redisContainerId = containerId.trim(); + + await execAsync(`docker exec -i ${redisContainerId} redis-cli flushall`); + return true; + }), + reloadRedis: adminProcedure.mutation(async () => { + if (IS_CLOUD) { + return true; + } + + await execAsync("docker service scale dokploy-redis=0"); + await execAsync("docker service scale dokploy-redis=1"); + return true; + }), reloadTraefik: adminProcedure .input(apiServerSchema) .mutation(async ({ input }) => { @@ -184,6 +211,7 @@ export const settingsRouter = createTRPCRouter({ letsEncryptEmail: input.letsEncryptEmail, }), certificateType: input.certificateType, + https: input.https, }); if (!user) { diff --git a/apps/dokploy/server/server.ts b/apps/dokploy/server/server.ts index 8ec533ff..fd908245 100644 --- a/apps/dokploy/server/server.ts +++ b/apps/dokploy/server/server.ts @@ -21,6 +21,7 @@ import { setupTerminalWebSocketServer } from "./wss/terminal"; config({ path: ".env" }); const PORT = Number.parseInt(process.env.PORT || "3000", 10); +const HOST = process.env.HOST || "0.0.0.0"; const dev = process.env.NODE_ENV !== "production"; const app = next({ dev, turbopack: process.env.TURBOPACK === "1" }); const handle = app.getRequestHandler(); @@ -55,8 +56,8 @@ void app.prepare().then(async () => { await migration(); } - server.listen(PORT); - console.log("Server Started:", PORT); + server.listen(PORT, HOST); + console.log(`Server Started on: http://${HOST}:${PORT}`); if (!IS_CLOUD) { console.log("Starting Deployment Worker"); const { deploymentWorker } = await import("./queues/deployments-queue"); diff --git a/apps/dokploy/server/wss/drawer-logs.ts b/apps/dokploy/server/wss/drawer-logs.ts index 404dfeee..0202ae52 100644 --- a/apps/dokploy/server/wss/drawer-logs.ts +++ b/apps/dokploy/server/wss/drawer-logs.ts @@ -1,9 +1,9 @@ import type http from "node:http"; -import { validateRequest } from "@dokploy/server/index"; import { applyWSSHandler } from "@trpc/server/adapters/ws"; import { WebSocketServer } from "ws"; import { appRouter } from "../api/root"; import { createTRPCContext } from "../api/trpc"; +import { validateRequest } from "@dokploy/server/lib/auth"; export const setupDrawerLogsWebSocketServer = ( server: http.Server, @@ -13,11 +13,13 @@ export const setupDrawerLogsWebSocketServer = ( path: "/drawer-logs", }); + // Set up tRPC WebSocket handler applyWSSHandler({ wss: wssTerm, router: appRouter, createContext: createTRPCContext as any, }); + server.on("upgrade", (req, socket, head) => { const { pathname } = new URL(req.url || "", `http://${req.headers.host}`); diff --git a/packages/server/package.json b/packages/server/package.json index 1ac0c8a7..a02d7c21 100644 --- a/packages/server/package.json +++ b/packages/server/package.json @@ -36,11 +36,11 @@ "@ai-sdk/mistral": "^1.0.6", "@ai-sdk/openai": "^1.0.12", "@ai-sdk/openai-compatible": "^0.0.13", - "@better-auth/utils": "0.2.3", + "@better-auth/utils": "0.2.4", "@oslojs/encoding": "1.1.0", "@oslojs/crypto": "1.0.1", "drizzle-dbml-generator": "0.10.0", - "better-auth": "1.2.4", + "better-auth": "1.2.6", "@faker-js/faker": "^8.4.1", "@octokit/auth-app": "^6.0.4", "@react-email/components": "^0.0.21", diff --git a/packages/server/src/db/schema/application.ts b/packages/server/src/db/schema/application.ts index 5ac076fc..2741b413 100644 --- a/packages/server/src/db/schema/application.ts +++ b/packages/server/src/db/schema/application.ts @@ -24,7 +24,7 @@ import { redirects } from "./redirects"; import { registry } from "./registry"; import { security } from "./security"; import { server } from "./server"; -import { applicationStatus, certificateType } from "./shared"; +import { applicationStatus, certificateType, triggerType } from "./shared"; import { sshKeys } from "./ssh-key"; import { generateAppName } from "./utils"; @@ -149,6 +149,7 @@ export const applications = pgTable("application", { owner: text("owner"), branch: text("branch"), buildPath: text("buildPath").default("/"), + triggerType: triggerType("triggerType").default("push"), autoDeploy: boolean("autoDeploy").$defaultFn(() => true), // Gitlab gitlabProjectId: integer("gitlabProjectId"), @@ -182,6 +183,7 @@ export const applications = pgTable("application", { onDelete: "set null", }, ), + enableSubmodules: boolean("enableSubmodules").notNull().default(false), dockerfile: text("dockerfile"), dockerContextPath: text("dockerContextPath"), dockerBuildStage: text("dockerBuildStage"), @@ -470,8 +472,12 @@ export const apiSaveGithubProvider = createSchema buildPath: true, githubId: true, watchPaths: true, + enableSubmodules: true, }) - .required(); + .required() + .extend({ + triggerType: z.enum(["push", "tag"]).default("push"), + }); export const apiSaveGitlabProvider = createSchema .pick({ @@ -484,6 +490,7 @@ export const apiSaveGitlabProvider = createSchema gitlabProjectId: true, gitlabPathNamespace: true, watchPaths: true, + enableSubmodules: true, }) .required(); @@ -496,6 +503,7 @@ export const apiSaveBitbucketProvider = createSchema bitbucketId: true, applicationId: true, watchPaths: true, + enableSubmodules: true, }) .required(); @@ -508,6 +516,7 @@ export const apiSaveGiteaProvider = createSchema giteaRepository: true, giteaId: true, watchPaths: true, + enableSubmodules: true, }) .required(); @@ -528,6 +537,7 @@ export const apiSaveGitProvider = createSchema customGitBuildPath: true, customGitUrl: true, watchPaths: true, + enableSubmodules: true, }) .required() .merge( diff --git a/packages/server/src/db/schema/compose.ts b/packages/server/src/db/schema/compose.ts index 86f1fcaf..5e62ce55 100644 --- a/packages/server/src/db/schema/compose.ts +++ b/packages/server/src/db/schema/compose.ts @@ -12,7 +12,7 @@ import { gitlab } from "./gitlab"; import { mounts } from "./mount"; import { projects } from "./project"; import { server } from "./server"; -import { applicationStatus } from "./shared"; +import { applicationStatus, triggerType } from "./shared"; import { sshKeys } from "./ssh-key"; import { generateAppName } from "./utils"; @@ -72,10 +72,12 @@ export const compose = pgTable("compose", { ), command: text("command").notNull().default(""), // + enableSubmodules: boolean("enableSubmodules").notNull().default(false), composePath: text("composePath").notNull().default("./docker-compose.yml"), suffix: text("suffix").notNull().default(""), randomize: boolean("randomize").notNull().default(false), isolatedDeployment: boolean("isolatedDeployment").notNull().default(false), + triggerType: triggerType("triggerType").default("push"), composeStatus: applicationStatus("composeStatus").notNull().default("idle"), projectId: text("projectId") .notNull() diff --git a/packages/server/src/db/schema/shared.ts b/packages/server/src/db/schema/shared.ts index 2fdc53e2..268ef987 100644 --- a/packages/server/src/db/schema/shared.ts +++ b/packages/server/src/db/schema/shared.ts @@ -12,3 +12,5 @@ export const certificateType = pgEnum("certificateType", [ "none", "custom", ]); + +export const triggerType = pgEnum("triggerType", ["push", "tag"]); diff --git a/packages/server/src/db/schema/user.ts b/packages/server/src/db/schema/user.ts index 8ae2cdc8..e67e11b1 100644 --- a/packages/server/src/db/schema/user.ts +++ b/packages/server/src/db/schema/user.ts @@ -50,6 +50,7 @@ export const users_temp = pgTable("user_temp", { // Admin serverIp: text("serverIp"), certificateType: certificateType("certificateType").notNull().default("none"), + https: boolean("https").notNull().default(false), host: text("host"), letsEncryptEmail: text("letsEncryptEmail"), sshPrivateKey: text("sshPrivateKey"), @@ -203,10 +204,12 @@ export const apiAssignDomain = createSchema host: true, certificateType: true, letsEncryptEmail: true, + https: true, }) .required() .partial({ letsEncryptEmail: true, + https: true, }); export const apiUpdateDockerCleanup = createSchema diff --git a/packages/server/src/lib/auth.ts b/packages/server/src/lib/auth.ts index 7ff53117..dbe8842d 100644 --- a/packages/server/src/lib/auth.ts +++ b/packages/server/src/lib/auth.ts @@ -201,7 +201,7 @@ const { handler, api } = betterAuth({ const host = process.env.NODE_ENV === "development" ? "http://localhost:3000" - : "https://dokploy.com"; + : "https://app.dokploy.com"; const inviteLink = `${host}/invitation?token=${data.id}`; await sendEmail({ diff --git a/packages/server/src/services/compose.ts b/packages/server/src/services/compose.ts index 5a318c0d..d855ff9e 100644 --- a/packages/server/src/services/compose.ts +++ b/packages/server/src/services/compose.ts @@ -356,6 +356,7 @@ export const deployRemoteCompose = async ({ deployment.logPath, true, ); + console.log(command); } else if (compose.sourceType === "raw") { command += getCreateComposeFileCommand(compose, deployment.logPath); } else if (compose.sourceType === "gitea") { diff --git a/packages/server/src/services/mount.ts b/packages/server/src/services/mount.ts index 836feace..aca0db05 100644 --- a/packages/server/src/services/mount.ts +++ b/packages/server/src/services/mount.ts @@ -144,7 +144,8 @@ export const updateMount = async ( await deleteFileMount(mountId); await createFileMount(mountId); } - return mount; + + return await findMountById(mountId); }); }; diff --git a/packages/server/src/setup/server-setup.ts b/packages/server/src/setup/server-setup.ts index 6fefabe9..bb0d34d8 100644 --- a/packages/server/src/setup/server-setup.ts +++ b/packages/server/src/setup/server-setup.ts @@ -76,7 +76,7 @@ CURRENT_USER=$USER echo "Installing requirements for: OS: $OS_TYPE" if [ $EUID != 0 ]; then - echo "Please run this script as root or with sudo ❌" + echo "Please run this script as root or with sudo ❌" exit fi @@ -263,7 +263,7 @@ const setupMainDirectory = () => ` # Create the /etc/dokploy directory mkdir -p /etc/dokploy chmod 777 /etc/dokploy - + echo "Directory /etc/dokploy created ✅" fi `; @@ -276,16 +276,16 @@ export const setupSwarm = () => ` # Get IP address get_ip() { local ip="" - + # Try IPv4 with multiple services # First attempt: ifconfig.io ip=\$(curl -4s --connect-timeout 5 https://ifconfig.io 2>/dev/null) - + # Second attempt: icanhazip.com if [ -z "\$ip" ]; then ip=\$(curl -4s --connect-timeout 5 https://icanhazip.com 2>/dev/null) fi - + # Third attempt: ipecho.net if [ -z "\$ip" ]; then ip=\$(curl -4s --connect-timeout 5 https://ipecho.net/plain 2>/dev/null) @@ -295,12 +295,12 @@ export const setupSwarm = () => ` if [ -z "\$ip" ]; then # Try IPv6 with ifconfig.io ip=\$(curl -6s --connect-timeout 5 https://ifconfig.io 2>/dev/null) - + # Try IPv6 with icanhazip.com if [ -z "\$ip" ]; then ip=\$(curl -6s --connect-timeout 5 https://icanhazip.com 2>/dev/null) fi - + # Try IPv6 with ipecho.net if [ -z "\$ip" ]; then ip=\$(curl -6s --connect-timeout 5 https://ipecho.net/plain 2>/dev/null) @@ -549,7 +549,7 @@ export const createTraefikInstance = () => { sleep 8 echo "Traefik migrated to Standalone ✅" fi - + if docker inspect dokploy-traefik > /dev/null 2>&1; then echo "Traefik already exists ✅" else @@ -577,7 +577,7 @@ const installNixpacks = () => ` if command_exists nixpacks; then echo "Nixpacks already installed ✅" else - export NIXPACKS_VERSION=1.29.1 + export NIXPACKS_VERSION=1.35.0 bash -c "$(curl -fsSL https://nixpacks.com/install.sh)" echo "Nixpacks version $NIXPACKS_VERSION installed ✅" fi diff --git a/packages/server/src/templates/index.ts b/packages/server/src/templates/index.ts index 6ae26418..c42dd1b7 100644 --- a/packages/server/src/templates/index.ts +++ b/packages/server/src/templates/index.ts @@ -1,4 +1,4 @@ -import { randomBytes } from "node:crypto"; +import { randomBytes, createHmac } from "node:crypto"; import { existsSync } from "node:fs"; import { mkdir, readFile, writeFile } from "node:fs/promises"; import { join } from "node:path"; @@ -24,6 +24,12 @@ export interface Template { domains: DomainSchema[]; } +export interface GenerateJWTOptions { + length?: number; + secret?: string; + payload?: Record | undefined; +} + export const generateRandomDomain = ({ serverIp, projectName, @@ -61,8 +67,48 @@ export function generateBase64(bytes = 32): string { return randomBytes(bytes).toString("base64"); } -export function generateJwt(length = 256): string { - return randomBytes(length).toString("hex"); +function safeBase64(str: string): string { + return str.replace(/=/g, "").replace(/\+/g, "-").replace(/\//g, "_"); +} +function objToJWTBase64(obj: any): string { + return safeBase64( + Buffer.from(JSON.stringify(obj), "utf8").toString("base64"), + ); +} + +export function generateJwt(options: GenerateJWTOptions = {}): string { + let { length, secret, payload = {} } = options; + if (length) { + return randomBytes(length).toString("hex"); + } + const encodedHeader = objToJWTBase64({ + alg: "HS256", + typ: "JWT", + }); + if (!payload.iss) { + payload.iss = "dokploy"; + } + if (!payload.iat) { + payload.iat = Math.floor(Date.now() / 1000); + } + if (!payload.exp) { + payload.exp = Math.floor(new Date("2030-01-01T00:00:00Z").getTime() / 1000); + } + const encodedPayload = objToJWTBase64({ + iat: Math.floor(Date.now() / 1000), + exp: Math.floor(new Date("2030-01-01T00:00:00Z").getTime() / 1000), + ...payload, + }); + if (!secret) { + secret = randomBytes(32).toString("hex"); + } + const signature = safeBase64( + createHmac("SHA256", secret) + .update(`${encodedHeader}.${encodedPayload}`) + .digest("base64"), + ); + + return `${encodedHeader}.${encodedPayload}.${signature}`; } /** diff --git a/packages/server/src/templates/processors.ts b/packages/server/src/templates/processors.ts index 31e7861a..5d9270aa 100644 --- a/packages/server/src/templates/processors.ts +++ b/packages/server/src/templates/processors.ts @@ -65,7 +65,7 @@ export interface Template { /** * Process a string value and replace variables */ -function processValue( +export function processValue( value: string, variables: Record, schema: Schema, @@ -84,11 +84,11 @@ function processValue( const length = Number.parseInt(varName.split(":")[1], 10) || 32; return generateBase64(length); } + if (varName.startsWith("password:")) { const length = Number.parseInt(varName.split(":")[1], 10) || 16; return generatePassword(length); } - if (varName === "password") { return generatePassword(16); } @@ -97,14 +97,31 @@ function processValue( const length = Number.parseInt(varName.split(":")[1], 10) || 8; return generateHash(length); } + if (varName === "hash") { + return generateHash(); + } + if (varName === "uuid") { return crypto.randomUUID(); } - if (varName === "timestamp") { + if (varName === "timestamp" || varName === "timestampms") { return Date.now().toString(); } + if (varName === "timestamps") { + return Math.round(Date.now() / 1000).toString(); + } + + if (varName.startsWith("timestampms:")) { + return new Date(varName.slice(12)).getTime().toString(); + } + if (varName.startsWith("timestamps:")) { + return Math.round( + new Date(varName.slice(11)).getTime() / 1000, + ).toString(); + } + if (varName === "randomPort") { return Math.floor(Math.random() * 65535).toString(); } @@ -114,8 +131,34 @@ function processValue( } if (varName.startsWith("jwt:")) { - const length = Number.parseInt(varName.split(":")[1], 10) || 256; - return generateJwt(length); + const params: string[] = varName.split(":").slice(1); + if (params.length === 1 && params[0] && params[0].match(/^\d{1,3}$/)) { + return generateJwt({ length: Number.parseInt(params[0], 10) }); + } + let [secret, payload] = params; + if (typeof payload === "string" && variables[payload]) { + payload = variables[payload]; + } + if ( + typeof payload === "string" && + payload.startsWith("{") && + payload.endsWith("}") + ) { + try { + payload = JSON.parse(payload); + } catch (e) { + // If payload is not a valid JSON, invalid it + payload = undefined; + console.error("Invalid JWT payload", e); + } + } + if (typeof payload !== "object") { + payload = undefined; + } + return generateJwt({ + secret: secret ? variables[secret] || secret : undefined, + payload: payload as any, + }); } if (varName === "username") { @@ -147,7 +190,7 @@ export function processVariables( ): Record { const variables: Record = {}; - // First pass: Process variables that don't depend on other variables + // First pass: Process some variables that don't depend on other variables for (const [key, value] of Object.entries(template.variables)) { if (typeof value !== "string") continue; @@ -161,6 +204,8 @@ export function processVariables( const match = value.match(/\${password:(\d+)}/); const length = match?.[1] ? Number.parseInt(match[1], 10) : 16; variables[key] = generatePassword(length); + } else if (value === "${hash}") { + variables[key] = generateHash(); } else if (value.startsWith("${hash:")) { const match = value.match(/\${hash:(\d+)}/); const length = match?.[1] ? Number.parseInt(match[1], 10) : 8; diff --git a/packages/server/src/utils/backups/index.ts b/packages/server/src/utils/backups/index.ts index b83d8279..6c940406 100644 --- a/packages/server/src/utils/backups/index.ts +++ b/packages/server/src/utils/backups/index.ts @@ -106,8 +106,8 @@ export const keepLatestNBackups = async ( backup.prefix, ); - // --include "*.sql.gz" ensures nothing else other than the db backup files are touched by rclone - const rcloneList = `rclone lsf ${rcloneFlags.join(" ")} --include "*.sql.gz" ${backupFilesPath}`; + // --include "*.sql.gz" or "*.zip" ensures nothing else other than the dokploy backup files are touched by rclone + const rcloneList = `rclone lsf ${rcloneFlags.join(" ")} --include "*${backup.databaseType === "web-server" ? ".zip" : ".sql.gz"}" ${backupFilesPath}`; // when we pipe the above command with this one, we only get the list of files we want to delete const sortAndPickUnwantedBackups = `sort -r | tail -n +$((${backup.keepLatestCount}+1)) | xargs -I{}`; // this command deletes the files diff --git a/packages/server/src/utils/backups/mariadb.ts b/packages/server/src/utils/backups/mariadb.ts index 56c2919c..776c5ff4 100644 --- a/packages/server/src/utils/backups/mariadb.ts +++ b/packages/server/src/utils/backups/mariadb.ts @@ -1,4 +1,3 @@ -import path from "node:path"; import type { BackupSchedule } from "@dokploy/server/services/backup"; import type { Mariadb } from "@dokploy/server/services/mariadb"; import { findProjectById } from "@dokploy/server/services/project"; @@ -8,7 +7,7 @@ import { } from "../docker/utils"; import { sendDatabaseBackupNotifications } from "../notifications/database-backup"; import { execAsync, execAsyncRemote } from "../process/execAsync"; -import { getS3Credentials } from "./utils"; +import { getS3Credentials, normalizeS3Path } from "./utils"; export const runMariadbBackup = async ( mariadb: Mariadb, @@ -19,7 +18,7 @@ export const runMariadbBackup = async ( const { prefix, database } = backup; const destination = backup.destination; const backupFileName = `${new Date().toISOString()}.sql.gz`; - const bucketDestination = path.join(prefix, backupFileName); + const bucketDestination = `${normalizeS3Path(prefix)}${backupFileName}`; try { const rcloneFlags = getS3Credentials(destination); diff --git a/packages/server/src/utils/backups/mongo.ts b/packages/server/src/utils/backups/mongo.ts index a40ec4f4..a043a5a7 100644 --- a/packages/server/src/utils/backups/mongo.ts +++ b/packages/server/src/utils/backups/mongo.ts @@ -1,4 +1,3 @@ -import path from "node:path"; import type { BackupSchedule } from "@dokploy/server/services/backup"; import type { Mongo } from "@dokploy/server/services/mongo"; import { findProjectById } from "@dokploy/server/services/project"; @@ -8,7 +7,7 @@ import { } from "../docker/utils"; import { sendDatabaseBackupNotifications } from "../notifications/database-backup"; import { execAsync, execAsyncRemote } from "../process/execAsync"; -import { getS3Credentials } from "./utils"; +import { getS3Credentials, normalizeS3Path } from "./utils"; // mongodb://mongo:Bqh7AQl-PRbnBu@localhost:27017/?tls=false&directConnection=true export const runMongoBackup = async (mongo: Mongo, backup: BackupSchedule) => { @@ -17,7 +16,7 @@ export const runMongoBackup = async (mongo: Mongo, backup: BackupSchedule) => { const { prefix, database } = backup; const destination = backup.destination; const backupFileName = `${new Date().toISOString()}.dump.gz`; - const bucketDestination = path.join(prefix, backupFileName); + const bucketDestination = `${normalizeS3Path(prefix)}${backupFileName}`; try { const rcloneFlags = getS3Credentials(destination); diff --git a/packages/server/src/utils/backups/mysql.ts b/packages/server/src/utils/backups/mysql.ts index 1272fc3e..d98a8ecc 100644 --- a/packages/server/src/utils/backups/mysql.ts +++ b/packages/server/src/utils/backups/mysql.ts @@ -1,4 +1,3 @@ -import path from "node:path"; import type { BackupSchedule } from "@dokploy/server/services/backup"; import type { MySql } from "@dokploy/server/services/mysql"; import { findProjectById } from "@dokploy/server/services/project"; @@ -8,7 +7,7 @@ import { } from "../docker/utils"; import { sendDatabaseBackupNotifications } from "../notifications/database-backup"; import { execAsync, execAsyncRemote } from "../process/execAsync"; -import { getS3Credentials } from "./utils"; +import { getS3Credentials, normalizeS3Path } from "./utils"; export const runMySqlBackup = async (mysql: MySql, backup: BackupSchedule) => { const { appName, databaseRootPassword, projectId, name } = mysql; @@ -16,7 +15,7 @@ export const runMySqlBackup = async (mysql: MySql, backup: BackupSchedule) => { const { prefix, database } = backup; const destination = backup.destination; const backupFileName = `${new Date().toISOString()}.sql.gz`; - const bucketDestination = path.join(prefix, backupFileName); + const bucketDestination = `${normalizeS3Path(prefix)}${backupFileName}`; try { const rcloneFlags = getS3Credentials(destination); diff --git a/packages/server/src/utils/backups/postgres.ts b/packages/server/src/utils/backups/postgres.ts index 5ada2aa9..cac582f7 100644 --- a/packages/server/src/utils/backups/postgres.ts +++ b/packages/server/src/utils/backups/postgres.ts @@ -1,4 +1,3 @@ -import path from "node:path"; import type { BackupSchedule } from "@dokploy/server/services/backup"; import type { Postgres } from "@dokploy/server/services/postgres"; import { findProjectById } from "@dokploy/server/services/project"; @@ -8,7 +7,7 @@ import { } from "../docker/utils"; import { sendDatabaseBackupNotifications } from "../notifications/database-backup"; import { execAsync, execAsyncRemote } from "../process/execAsync"; -import { getS3Credentials } from "./utils"; +import { getS3Credentials, normalizeS3Path } from "./utils"; export const runPostgresBackup = async ( postgres: Postgres, @@ -20,7 +19,7 @@ export const runPostgresBackup = async ( const { prefix, database } = backup; const destination = backup.destination; const backupFileName = `${new Date().toISOString()}.sql.gz`; - const bucketDestination = path.join(prefix, backupFileName); + const bucketDestination = `${normalizeS3Path(prefix)}${backupFileName}`; try { const rcloneFlags = getS3Credentials(destination); const rcloneDestination = `:s3:${destination.bucket}/${bucketDestination}`; diff --git a/packages/server/src/utils/backups/utils.ts b/packages/server/src/utils/backups/utils.ts index 1abf7be0..df3c8339 100644 --- a/packages/server/src/utils/backups/utils.ts +++ b/packages/server/src/utils/backups/utils.ts @@ -36,6 +36,13 @@ export const removeScheduleBackup = (backupId: string) => { currentJob?.cancel(); }; +export const normalizeS3Path = (prefix: string) => { + // Trim whitespace and remove leading/trailing slashes + const normalizedPrefix = prefix.trim().replace(/^\/+|\/+$/g, ""); + // Return empty string if prefix is empty, otherwise append trailing slash + return normalizedPrefix ? `${normalizedPrefix}/` : ""; +}; + export const getS3Credentials = (destination: Destination) => { const { accessKey, secretAccessKey, region, endpoint, provider } = destination; diff --git a/packages/server/src/utils/backups/web-server.ts b/packages/server/src/utils/backups/web-server.ts index 264ff764..2dea3d81 100644 --- a/packages/server/src/utils/backups/web-server.ts +++ b/packages/server/src/utils/backups/web-server.ts @@ -1,6 +1,6 @@ import type { BackupSchedule } from "@dokploy/server/services/backup"; import { execAsync } from "../process/execAsync"; -import { getS3Credentials } from "./utils"; +import { getS3Credentials, normalizeS3Path } from "./utils"; import { findDestinationById } from "@dokploy/server/services/destination"; import { IS_CLOUD, paths } from "@dokploy/server/constants"; import { mkdtemp } from "node:fs/promises"; @@ -18,18 +18,30 @@ export const runWebServerBackup = async (backup: BackupSchedule) => { const { BASE_PATH } = paths(); const tempDir = await mkdtemp(join(tmpdir(), "dokploy-backup-")); const backupFileName = `webserver-backup-${timestamp}.zip`; - const s3Path = `:s3:${destination.bucket}/${backup.prefix}${backupFileName}`; + const s3Path = `:s3:${destination.bucket}/${normalizeS3Path(backup.prefix)}${backupFileName}`; try { await execAsync(`mkdir -p ${tempDir}/filesystem`); - const postgresCommand = `docker exec $(docker ps --filter "name=dokploy-postgres" -q) pg_dump -v -Fc -U dokploy -d dokploy > ${tempDir}/database.sql`; + // First get the container ID + const { stdout: containerId } = await execAsync( + `docker ps --filter "name=dokploy-postgres" --filter "status=running" -q | head -n 1`, + ); + + if (!containerId) { + throw new Error("PostgreSQL container not found"); + } + + const postgresContainerId = containerId.trim(); + + const postgresCommand = `docker exec ${postgresContainerId} pg_dump -v -Fc -U dokploy -d dokploy > '${tempDir}/database.sql'`; await execAsync(postgresCommand); await execAsync(`cp -r ${BASE_PATH}/* ${tempDir}/filesystem/`); await execAsync( - `cd ${tempDir} && zip -r ${backupFileName} database.sql filesystem/`, + // Zip all .sql files since we created more than one + `cd ${tempDir} && zip -r ${backupFileName} *.sql filesystem/ > /dev/null 2>&1`, ); const uploadCommand = `rclone copyto ${rcloneFlags.join(" ")} "${tempDir}/${backupFileName}" "${s3Path}"`; diff --git a/packages/server/src/utils/builders/railpack.ts b/packages/server/src/utils/builders/railpack.ts index 612e02cf..55fd4049 100644 --- a/packages/server/src/utils/builders/railpack.ts +++ b/packages/server/src/utils/builders/railpack.ts @@ -84,7 +84,7 @@ export const buildRailpack = async ( for (const envVar of envVariables) { const [key, value] = envVar.split("="); if (key && value) { - buildArgs.push("--secret", `id=${key},env=${key}`); + buildArgs.push("--secret", `id=${key},env='${key}'`); env[key] = value; } } @@ -132,7 +132,7 @@ export const getRailpackCommand = ( ]; for (const env of envVariables) { - prepareArgs.push("--env", env); + prepareArgs.push("--env", `'${env}'`); } // Calculate secrets hash for layer invalidation @@ -164,7 +164,7 @@ export const getRailpackCommand = ( for (const envVar of envVariables) { const [key, value] = envVar.split("="); if (key && value) { - buildArgs.push("--secret", `id=${key},env=${key}`); + buildArgs.push("--secret", `id=${key},env='${key}'`); exportEnvs.push(`export ${key}=${value}`); } } diff --git a/packages/server/src/utils/builders/static.ts b/packages/server/src/utils/builders/static.ts index c46bdf2e..f7fc87ca 100644 --- a/packages/server/src/utils/builders/static.ts +++ b/packages/server/src/utils/builders/static.ts @@ -25,6 +25,12 @@ export const buildStatic = async ( ].join("\n"), ); + createFile( + buildAppDirectory, + ".dockerignore", + [".git", ".env", "Dockerfile", ".dockerignore"].join("\n"), + ); + await buildCustomDocker( { ...application, diff --git a/packages/server/src/utils/docker/domain.ts b/packages/server/src/utils/docker/domain.ts index 5a68146a..4f008397 100644 --- a/packages/server/src/utils/docker/domain.ts +++ b/packages/server/src/utils/docker/domain.ts @@ -249,6 +249,11 @@ export const addDomainToCompose = async ( labels.unshift("traefik.enable=true"); } labels.unshift(...httpLabels); + if (!compose.isolatedDeployment) { + if (!labels.includes("traefik.docker.network=dokploy-network")) { + labels.unshift("traefik.docker.network=dokploy-network"); + } + } } if (!compose.isolatedDeployment) { diff --git a/packages/server/src/utils/notifications/dokploy-restart.ts b/packages/server/src/utils/notifications/dokploy-restart.ts index 5a156aff..53ccf05d 100644 --- a/packages/server/src/utils/notifications/dokploy-restart.ts +++ b/packages/server/src/utils/notifications/dokploy-restart.ts @@ -40,68 +40,84 @@ export const sendDokployRestartNotifications = async () => { const decorate = (decoration: string, text: string) => `${discord.decoration ? decoration : ""} ${text}`.trim(); - await sendDiscordNotification(discord, { - title: decorate(">", "`✅` Dokploy Server Restarted"), - color: 0x57f287, - fields: [ - { - name: decorate("`📅`", "Date"), - value: ``, - inline: true, + try { + await sendDiscordNotification(discord, { + title: decorate(">", "`✅` Dokploy Server Restarted"), + color: 0x57f287, + fields: [ + { + name: decorate("`📅`", "Date"), + value: ``, + inline: true, + }, + { + name: decorate("`⌚`", "Time"), + value: ``, + inline: true, + }, + { + name: decorate("`❓`", "Type"), + value: "Successful", + inline: true, + }, + ], + timestamp: date.toISOString(), + footer: { + text: "Dokploy Restart Notification", }, - { - name: decorate("`⌚`", "Time"), - value: ``, - inline: true, - }, - { - name: decorate("`❓`", "Type"), - value: "Successful", - inline: true, - }, - ], - timestamp: date.toISOString(), - footer: { - text: "Dokploy Restart Notification", - }, - }); + }); + } catch (error) { + console.log(error); + } } if (gotify) { const decorate = (decoration: string, text: string) => `${gotify.decoration ? decoration : ""} ${text}\n`; - await sendGotifyNotification( - gotify, - decorate("✅", "Dokploy Server Restarted"), - `${decorate("🕒", `Date: ${date.toLocaleString()}`)}`, - ); + try { + await sendGotifyNotification( + gotify, + decorate("✅", "Dokploy Server Restarted"), + `${decorate("🕒", `Date: ${date.toLocaleString()}`)}`, + ); + } catch (error) { + console.log(error); + } } if (telegram) { - await sendTelegramNotification( - telegram, - `✅ Dokploy Server Restarted\n\nDate: ${format(date, "PP")}\nTime: ${format(date, "pp")}`, - ); + try { + await sendTelegramNotification( + telegram, + `✅ Dokploy Server Restarted\n\nDate: ${format(date, "PP")}\nTime: ${format(date, "pp")}`, + ); + } catch (error) { + console.log(error); + } } if (slack) { const { channel } = slack; - await sendSlackNotification(slack, { - channel: channel, - attachments: [ - { - color: "#00FF00", - pretext: ":white_check_mark: *Dokploy Server Restarted*", - fields: [ - { - title: "Time", - value: date.toLocaleString(), - short: true, - }, - ], - }, - ], - }); + try { + await sendSlackNotification(slack, { + channel: channel, + attachments: [ + { + color: "#00FF00", + pretext: ":white_check_mark: *Dokploy Server Restarted*", + fields: [ + { + title: "Time", + value: date.toLocaleString(), + short: true, + }, + ], + }, + ], + }); + } catch (error) { + console.log(error); + } } } }; diff --git a/packages/server/src/utils/process/execAsync.ts b/packages/server/src/utils/process/execAsync.ts index aee1e821..c3e40907 100644 --- a/packages/server/src/utils/process/execAsync.ts +++ b/packages/server/src/utils/process/execAsync.ts @@ -1,9 +1,48 @@ -import { exec } from "node:child_process"; +import { exec, execFile } from "node:child_process"; import util from "node:util"; import { findServerById } from "@dokploy/server/services/server"; import { Client } from "ssh2"; + export const execAsync = util.promisify(exec); +export const execFileAsync = async ( + command: string, + args: string[], + options: { input?: string } = {}, +): Promise<{ stdout: string; stderr: string }> => { + const child = execFile(command, args); + + if (options.input && child.stdin) { + child.stdin.write(options.input); + child.stdin.end(); + } + + return new Promise((resolve, reject) => { + let stdout = ""; + let stderr = ""; + + child.stdout?.on("data", (data) => { + stdout += data.toString(); + }); + + child.stderr?.on("data", (data) => { + stderr += data.toString(); + }); + + child.on("close", (code) => { + if (code === 0) { + resolve({ stdout, stderr }); + } else { + reject( + new Error(`Command failed with code ${code}. Stderr: ${stderr}`), + ); + } + }); + + child.on("error", reject); + }); +}; + export const execAsyncRemote = async ( serverId: string | null, command: string, diff --git a/packages/server/src/utils/providers/bitbucket.ts b/packages/server/src/utils/providers/bitbucket.ts index 11797a45..279ef0ce 100644 --- a/packages/server/src/utils/providers/bitbucket.ts +++ b/packages/server/src/utils/providers/bitbucket.ts @@ -37,6 +37,7 @@ export const cloneBitbucketRepository = async ( bitbucketBranch, bitbucketId, bitbucket, + enableSubmodules, } = entity; if (!bitbucketId) { @@ -53,25 +54,23 @@ export const cloneBitbucketRepository = async ( const cloneUrl = `https://${bitbucket?.bitbucketUsername}:${bitbucket?.appPassword}@${repoclone}`; try { writeStream.write(`\nCloning Repo ${repoclone} to ${outputPath}: ✅\n`); - await spawnAsync( - "git", - [ - "clone", - "--branch", - bitbucketBranch!, - "--depth", - "1", - "--recurse-submodules", - cloneUrl, - outputPath, - "--progress", - ], - (data) => { - if (writeStream.writable) { - writeStream.write(data); - } - }, - ); + const cloneArgs = [ + "clone", + "--branch", + bitbucketBranch!, + "--depth", + "1", + ...(enableSubmodules ? ["--recurse-submodules"] : []), + cloneUrl, + outputPath, + "--progress", + ]; + + await spawnAsync("git", cloneArgs, (data) => { + if (writeStream.writable) { + writeStream.write(data); + } + }); writeStream.write(`\nCloned ${repoclone} to ${outputPath}: ✅\n`); } catch (error) { writeStream.write(`ERROR Clonning: ${error}: ❌`); @@ -89,6 +88,7 @@ export const cloneRawBitbucketRepository = async (entity: Compose) => { bitbucketOwner, bitbucketBranch, bitbucketId, + enableSubmodules, } = entity; if (!bitbucketId) { @@ -106,17 +106,19 @@ export const cloneRawBitbucketRepository = async (entity: Compose) => { const cloneUrl = `https://${bitbucketProvider?.bitbucketUsername}:${bitbucketProvider?.appPassword}@${repoclone}`; try { - await spawnAsync("git", [ + const cloneArgs = [ "clone", "--branch", bitbucketBranch!, "--depth", "1", - "--recurse-submodules", + ...(enableSubmodules ? ["--recurse-submodules"] : []), cloneUrl, outputPath, "--progress", - ]); + ]; + + await spawnAsync("git", cloneArgs); } catch (error) { throw error; } @@ -131,6 +133,7 @@ export const cloneRawBitbucketRepositoryRemote = async (compose: Compose) => { bitbucketBranch, bitbucketId, serverId, + enableSubmodules, } = compose; if (!serverId) { @@ -153,11 +156,11 @@ export const cloneRawBitbucketRepositoryRemote = async (compose: Compose) => { const cloneUrl = `https://${bitbucketProvider?.bitbucketUsername}:${bitbucketProvider?.appPassword}@${repoclone}`; try { - const command = ` + const cloneCommand = ` rm -rf ${outputPath}; - git clone --branch ${bitbucketBranch} --depth 1 --recurse-submodules ${cloneUrl} ${outputPath} + git clone --branch ${bitbucketBranch} --depth 1 ${enableSubmodules ? "--recurse-submodules" : ""} ${cloneUrl} ${outputPath} `; - await execAsyncRemote(serverId, command); + await execAsyncRemote(serverId, cloneCommand); } catch (error) { throw error; } @@ -176,6 +179,7 @@ export const getBitbucketCloneCommand = async ( bitbucketBranch, bitbucketId, serverId, + enableSubmodules, } = entity; if (!serverId) { @@ -207,7 +211,7 @@ export const getBitbucketCloneCommand = async ( const cloneCommand = ` rm -rf ${outputPath}; mkdir -p ${outputPath}; -if ! git clone --branch ${bitbucketBranch} --depth 1 --recurse-submodules --progress ${cloneUrl} ${outputPath} >> ${logPath} 2>&1; then +if ! git clone --branch ${bitbucketBranch} --depth 1 ${enableSubmodules ? "--recurse-submodules" : ""} --progress ${cloneUrl} ${outputPath} >> ${logPath} 2>&1; then echo "❌ [ERROR] Fail to clone the repository ${repoclone}" >> ${logPath}; exit 1; fi diff --git a/packages/server/src/utils/providers/git.ts b/packages/server/src/utils/providers/git.ts index c26af3af..4bd06966 100644 --- a/packages/server/src/utils/providers/git.ts +++ b/packages/server/src/utils/providers/git.ts @@ -17,12 +17,19 @@ export const cloneGitRepository = async ( customGitUrl?: string | null; customGitBranch?: string | null; customGitSSHKeyId?: string | null; + enableSubmodules?: boolean; }, logPath: string, isCompose = false, ) => { const { SSH_PATH, COMPOSE_PATH, APPLICATIONS_PATH } = paths(); - const { appName, customGitUrl, customGitBranch, customGitSSHKeyId } = entity; + const { + appName, + customGitUrl, + customGitBranch, + customGitSSHKeyId, + enableSubmodules, + } = entity; if (!customGitUrl || !customGitBranch) { throw new TRPCError({ @@ -70,19 +77,21 @@ export const cloneGitRepository = async ( } const { port } = sanitizeRepoPathSSH(customGitUrl); + const cloneArgs = [ + "clone", + "--branch", + customGitBranch, + "--depth", + "1", + ...(enableSubmodules ? ["--recurse-submodules"] : []), + customGitUrl, + outputPath, + "--progress", + ]; + await spawnAsync( "git", - [ - "clone", - "--branch", - customGitBranch, - "--depth", - "1", - "--recurse-submodules", - customGitUrl, - outputPath, - "--progress", - ], + cloneArgs, (data) => { if (writeStream.writable) { writeStream.write(data); @@ -114,6 +123,7 @@ export const getCustomGitCloneCommand = async ( customGitBranch?: string | null; customGitSSHKeyId?: string | null; serverId: string | null; + enableSubmodules: boolean; }, logPath: string, isCompose = false, @@ -125,6 +135,7 @@ export const getCustomGitCloneCommand = async ( customGitBranch, customGitSSHKeyId, serverId, + enableSubmodules, } = entity; if (!customGitUrl || !customGitBranch) { @@ -181,7 +192,7 @@ export const getCustomGitCloneCommand = async ( } command.push( - `if ! git clone --branch ${customGitBranch} --depth 1 --recurse-submodules --progress ${customGitUrl} ${outputPath} >> ${logPath} 2>&1; then + `if ! git clone --branch ${customGitBranch} --depth 1 ${enableSubmodules ? "--recurse-submodules" : ""} --progress ${customGitUrl} ${outputPath} >> ${logPath} 2>&1; then echo "❌ [ERROR] Fail to clone the repository ${customGitUrl}" >> ${logPath}; exit 1; fi @@ -261,8 +272,15 @@ export const cloneGitRawRepository = async (entity: { customGitUrl?: string | null; customGitBranch?: string | null; customGitSSHKeyId?: string | null; + enableSubmodules?: boolean; }) => { - const { appName, customGitUrl, customGitBranch, customGitSSHKeyId } = entity; + const { + appName, + customGitUrl, + customGitBranch, + customGitSSHKeyId, + enableSubmodules, + } = entity; if (!customGitUrl || !customGitBranch) { throw new TRPCError({ @@ -307,29 +325,26 @@ export const cloneGitRawRepository = async (entity: { } const { port } = sanitizeRepoPathSSH(customGitUrl); - await spawnAsync( - "git", - [ - "clone", - "--branch", - customGitBranch, - "--depth", - "1", - "--recurse-submodules", - customGitUrl, - outputPath, - "--progress", - ], - (_data) => {}, - { - env: { - ...process.env, - ...(customGitSSHKeyId && { - GIT_SSH_COMMAND: `ssh -i ${temporalKeyPath}${port ? ` -p ${port}` : ""} -o UserKnownHostsFile=${knownHostsPath}`, - }), - }, + const cloneArgs = [ + "clone", + "--branch", + customGitBranch, + "--depth", + "1", + ...(enableSubmodules ? ["--recurse-submodules"] : []), + customGitUrl, + outputPath, + "--progress", + ]; + + await spawnAsync("git", cloneArgs, (_data) => {}, { + env: { + ...process.env, + ...(customGitSSHKeyId && { + GIT_SSH_COMMAND: `ssh -i ${temporalKeyPath}${port ? ` -p ${port}` : ""} -o UserKnownHostsFile=${knownHostsPath}`, + }), }, - ); + }); } catch (error) { throw error; } @@ -342,6 +357,7 @@ export const cloneRawGitRepositoryRemote = async (compose: Compose) => { customGitUrl, customGitSSHKeyId, serverId, + enableSubmodules, } = compose; if (!serverId) { @@ -396,7 +412,7 @@ export const cloneRawGitRepositoryRemote = async (compose: Compose) => { } command.push( - `if ! git clone --branch ${customGitBranch} --depth 1 --recurse-submodules --progress ${customGitUrl} ${outputPath} ; then + `if ! git clone --branch ${customGitBranch} --depth 1 ${enableSubmodules ? "--recurse-submodules" : ""} --progress ${customGitUrl} ${outputPath} ; then echo "[ERROR] Fail to clone the repository "; exit 1; fi diff --git a/packages/server/src/utils/providers/gitea.ts b/packages/server/src/utils/providers/gitea.ts index 68760559..fc8e05a4 100644 --- a/packages/server/src/utils/providers/gitea.ts +++ b/packages/server/src/utils/providers/gitea.ts @@ -119,6 +119,7 @@ export const getGiteaCloneCommand = async ( giteaRepository, serverId, gitea, + enableSubmodules, } = entity; if (!serverId) { @@ -155,7 +156,7 @@ export const getGiteaCloneCommand = async ( rm -rf ${outputPath}; mkdir -p ${outputPath}; - if ! git clone --branch ${giteaBranch} --depth 1 --recurse-submodules ${cloneUrl} ${outputPath} >> ${logPath} 2>&1; then + if ! git clone --branch ${giteaBranch} --depth 1 ${enableSubmodules ? "--recurse-submodules" : ""} ${cloneUrl} ${outputPath} >> ${logPath} 2>&1; then echo "❌ [ERROR] Failed to clone the repository ${repoClone}" >> ${logPath}; exit 1; fi @@ -174,7 +175,14 @@ export const cloneGiteaRepository = async ( const { APPLICATIONS_PATH, COMPOSE_PATH } = paths(); const writeStream = createWriteStream(logPath, { flags: "a" }); - const { appName, giteaBranch, giteaId, giteaOwner, giteaRepository } = entity; + const { + appName, + giteaBranch, + giteaId, + giteaOwner, + giteaRepository, + enableSubmodules, + } = entity; if (!giteaId) { throw new TRPCError({ @@ -211,7 +219,7 @@ export const cloneGiteaRepository = async ( giteaBranch!, "--depth", "1", - "--recurse-submodules", + ...(enableSubmodules ? ["--recurse-submodules"] : []), cloneUrl, outputPath, "--progress", @@ -232,7 +240,14 @@ export const cloneGiteaRepository = async ( }; export const cloneRawGiteaRepository = async (entity: Compose) => { - const { appName, giteaRepository, giteaOwner, giteaBranch, giteaId } = entity; + const { + appName, + giteaRepository, + giteaOwner, + giteaBranch, + giteaId, + enableSubmodules, + } = entity; const { COMPOSE_PATH } = paths(); if (!giteaId) { @@ -265,7 +280,7 @@ export const cloneRawGiteaRepository = async (entity: Compose) => { giteaBranch!, "--depth", "1", - "--recurse-submodules", + ...(enableSubmodules ? ["--recurse-submodules"] : []), cloneUrl, outputPath, "--progress", @@ -283,6 +298,7 @@ export const cloneRawGiteaRepositoryRemote = async (compose: Compose) => { giteaBranch, giteaId, serverId, + enableSubmodules, } = compose; if (!serverId) { @@ -307,7 +323,7 @@ export const cloneRawGiteaRepositoryRemote = async (compose: Compose) => { try { const command = ` rm -rf ${outputPath}; - git clone --branch ${giteaBranch} --depth 1 ${cloneUrl} ${outputPath} + git clone --branch ${giteaBranch} --depth 1 ${enableSubmodules ? "--recurse-submodules" : ""} ${cloneUrl} ${outputPath} `; await execAsyncRemote(serverId, command); } catch (error) { diff --git a/packages/server/src/utils/providers/github.ts b/packages/server/src/utils/providers/github.ts index c366eeba..17a5e0cc 100644 --- a/packages/server/src/utils/providers/github.ts +++ b/packages/server/src/utils/providers/github.ts @@ -83,6 +83,7 @@ interface CloneGithubRepository { repository: string | null; logPath: string; type?: "application" | "compose"; + enableSubmodules: boolean; } export const cloneGithubRepository = async ({ logPath, @@ -92,7 +93,8 @@ export const cloneGithubRepository = async ({ const isCompose = type === "compose"; const { APPLICATIONS_PATH, COMPOSE_PATH } = paths(); const writeStream = createWriteStream(logPath, { flags: "a" }); - const { appName, repository, owner, branch, githubId } = entity; + const { appName, repository, owner, branch, githubId, enableSubmodules } = + entity; if (!githubId) { throw new TRPCError({ @@ -128,25 +130,23 @@ export const cloneGithubRepository = async ({ try { writeStream.write(`\nClonning Repo ${repoclone} to ${outputPath}: ✅\n`); - await spawnAsync( - "git", - [ - "clone", - "--branch", - branch!, - "--depth", - "1", - "--recurse-submodules", - cloneUrl, - outputPath, - "--progress", - ], - (data) => { - if (writeStream.writable) { - writeStream.write(data); - } - }, - ); + const cloneArgs = [ + "clone", + "--branch", + branch!, + "--depth", + "1", + ...(enableSubmodules ? ["--recurse-submodules"] : []), + cloneUrl, + outputPath, + "--progress", + ]; + + await spawnAsync("git", cloneArgs, (data) => { + if (writeStream.writable) { + writeStream.write(data); + } + }); writeStream.write(`\nCloned ${repoclone}: ✅\n`); } catch (error) { writeStream.write(`ERROR Clonning: ${error}: ❌`); @@ -161,7 +161,15 @@ export const getGithubCloneCommand = async ({ type = "application", ...entity }: CloneGithubRepository & { serverId: string }) => { - const { appName, repository, owner, branch, githubId, serverId } = entity; + const { + appName, + repository, + owner, + branch, + githubId, + serverId, + enableSubmodules, + } = entity; const isCompose = type === "compose"; if (!serverId) { throw new TRPCError({ @@ -216,7 +224,7 @@ export const getGithubCloneCommand = async ({ const cloneCommand = ` rm -rf ${outputPath}; mkdir -p ${outputPath}; -if ! git clone --branch ${branch} --depth 1 --recurse-submodules --progress ${cloneUrl} ${outputPath} >> ${logPath} 2>&1; then +if ! git clone --branch ${branch} --depth 1 ${enableSubmodules ? "--recurse-submodules" : ""} --progress ${cloneUrl} ${outputPath} >> ${logPath} 2>&1; then echo "❌ [ERROR] Fail to clone repository ${repoclone}" >> ${logPath}; exit 1; fi @@ -227,7 +235,8 @@ echo "Cloned ${repoclone} to ${outputPath}: ✅" >> ${logPath}; }; export const cloneRawGithubRepository = async (entity: Compose) => { - const { appName, repository, owner, branch, githubId } = entity; + const { appName, repository, owner, branch, githubId, enableSubmodules } = + entity; if (!githubId) { throw new TRPCError({ @@ -245,24 +254,33 @@ export const cloneRawGithubRepository = async (entity: Compose) => { await recreateDirectory(outputPath); const cloneUrl = `https://oauth2:${token}@${repoclone}`; try { - await spawnAsync("git", [ + const cloneArgs = [ "clone", "--branch", branch!, "--depth", "1", - "--recurse-submodules", + ...(enableSubmodules ? ["--recurse-submodules"] : []), cloneUrl, outputPath, "--progress", - ]); + ]; + await spawnAsync("git", cloneArgs); } catch (error) { throw error; } }; export const cloneRawGithubRepositoryRemote = async (compose: Compose) => { - const { appName, repository, owner, branch, githubId, serverId } = compose; + const { + appName, + repository, + owner, + branch, + githubId, + serverId, + enableSubmodules, + } = compose; if (!serverId) { throw new TRPCError({ @@ -288,7 +306,7 @@ export const cloneRawGithubRepositoryRemote = async (compose: Compose) => { try { const command = ` rm -rf ${outputPath}; - git clone --branch ${branch} --depth 1 ${cloneUrl} ${outputPath} + git clone --branch ${branch} --depth 1 ${enableSubmodules ? "--recurse-submodules" : ""} ${cloneUrl} ${outputPath} `; await execAsyncRemote(serverId, command); } catch (error) { diff --git a/packages/server/src/utils/providers/gitlab.ts b/packages/server/src/utils/providers/gitlab.ts index facdeb59..9e848fd5 100644 --- a/packages/server/src/utils/providers/gitlab.ts +++ b/packages/server/src/utils/providers/gitlab.ts @@ -90,8 +90,14 @@ export const cloneGitlabRepository = async ( isCompose = false, ) => { const writeStream = createWriteStream(logPath, { flags: "a" }); - const { appName, gitlabBranch, gitlabId, gitlab, gitlabPathNamespace } = - entity; + const { + appName, + gitlabBranch, + gitlabId, + gitlab, + gitlabPathNamespace, + enableSubmodules, + } = entity; if (!gitlabId) { throw new TRPCError({ @@ -127,25 +133,23 @@ export const cloneGitlabRepository = async ( try { writeStream.write(`\nClonning Repo ${repoclone} to ${outputPath}: ✅\n`); - await spawnAsync( - "git", - [ - "clone", - "--branch", - gitlabBranch!, - "--depth", - "1", - "--recurse-submodules", - cloneUrl, - outputPath, - "--progress", - ], - (data) => { - if (writeStream.writable) { - writeStream.write(data); - } - }, - ); + const cloneArgs = [ + "clone", + "--branch", + gitlabBranch!, + "--depth", + "1", + ...(enableSubmodules ? ["--recurse-submodules"] : []), + cloneUrl, + outputPath, + "--progress", + ]; + + await spawnAsync("git", cloneArgs, (data) => { + if (writeStream.writable) { + writeStream.write(data); + } + }); writeStream.write(`\nCloned ${repoclone}: ✅\n`); } catch (error) { writeStream.write(`ERROR Clonning: ${error}: ❌`); @@ -167,6 +171,7 @@ export const getGitlabCloneCommand = async ( gitlabId, serverId, gitlab, + enableSubmodules, } = entity; if (!serverId) { @@ -222,7 +227,7 @@ export const getGitlabCloneCommand = async ( const cloneCommand = ` rm -rf ${outputPath}; mkdir -p ${outputPath}; -if ! git clone --branch ${gitlabBranch} --depth 1 --recurse-submodules --progress ${cloneUrl} ${outputPath} >> ${logPath} 2>&1; then +if ! git clone --branch ${gitlabBranch} --depth 1 ${enableSubmodules ? "--recurse-submodules" : ""} --progress ${cloneUrl} ${outputPath} >> ${logPath} 2>&1; then echo "❌ [ERROR] Fail to clone the repository ${repoclone}" >> ${logPath}; exit 1; fi @@ -264,7 +269,11 @@ export const getGitlabRepositories = async (gitlabId?: string) => { const groupName = gitlabProvider.groupName?.toLowerCase(); if (groupName) { - return full_path.toLowerCase().includes(groupName) && kind === "group"; + const isIncluded = groupName + .split(",") + .some((name) => full_path.toLowerCase().includes(name)); + + return isIncluded && kind === "group"; } return kind === "user"; }); @@ -326,7 +335,13 @@ export const getGitlabBranches = async (input: { }; export const cloneRawGitlabRepository = async (entity: Compose) => { - const { appName, gitlabBranch, gitlabId, gitlabPathNamespace } = entity; + const { + appName, + gitlabBranch, + gitlabId, + gitlabPathNamespace, + enableSubmodules, + } = entity; if (!gitlabId) { throw new TRPCError({ @@ -347,24 +362,32 @@ export const cloneRawGitlabRepository = async (entity: Compose) => { const cloneUrl = `https://oauth2:${gitlabProvider?.accessToken}@${repoclone}`; try { - await spawnAsync("git", [ + const cloneArgs = [ "clone", "--branch", gitlabBranch!, "--depth", "1", - "--recurse-submodules", + ...(enableSubmodules ? ["--recurse-submodules"] : []), cloneUrl, outputPath, "--progress", - ]); + ]; + await spawnAsync("git", cloneArgs); } catch (error) { throw error; } }; export const cloneRawGitlabRepositoryRemote = async (compose: Compose) => { - const { appName, gitlabPathNamespace, branch, gitlabId, serverId } = compose; + const { + appName, + gitlabPathNamespace, + branch, + gitlabId, + serverId, + enableSubmodules, + } = compose; if (!serverId) { throw new TRPCError({ @@ -388,7 +411,7 @@ export const cloneRawGitlabRepositoryRemote = async (compose: Compose) => { try { const command = ` rm -rf ${outputPath}; - git clone --branch ${branch} --depth 1 --recurse-submodules ${cloneUrl} ${outputPath} + git clone --branch ${branch} --depth 1 ${enableSubmodules ? "--recurse-submodules" : ""} ${cloneUrl} ${outputPath} `; await execAsyncRemote(serverId, command); } catch (error) { @@ -431,7 +454,9 @@ export const testGitlabConnection = async ( const { full_path, kind } = repo.namespace; if (groupName) { - return full_path.toLowerCase().includes(groupName) && kind === "group"; + return groupName + .split(",") + .some((name) => full_path.toLowerCase().includes(name)); } return kind === "user"; }); diff --git a/packages/server/src/utils/restore/web-server.ts b/packages/server/src/utils/restore/web-server.ts index fb810a47..8397dcf2 100644 --- a/packages/server/src/utils/restore/web-server.ts +++ b/packages/server/src/utils/restore/web-server.ts @@ -45,7 +45,7 @@ export const restoreWebServerBackup = async ( // Extract backup emit("Extracting backup..."); - await execAsync(`cd ${tempDir} && unzip ${backupFile}`); + await execAsync(`cd ${tempDir} && unzip ${backupFile} > /dev/null 2>&1`); // Restore filesystem first emit("Restoring filesystem..."); @@ -83,44 +83,54 @@ export const restoreWebServerBackup = async ( throw new Error("Database file not found after extraction"); } + const { stdout: postgresContainer } = await execAsync( + `docker ps --filter "name=dokploy-postgres" --filter "status=running" -q | head -n 1`, + ); + + if (!postgresContainer) { + throw new Error("Dokploy Postgres container not found"); + } + + const postgresContainerId = postgresContainer.trim(); + // Drop and recreate database emit("Disconnecting all users from database..."); await execAsync( - `docker exec $(docker ps --filter "name=dokploy-postgres" -q) psql -U dokploy postgres -c "SELECT pg_terminate_backend(pg_stat_activity.pid) FROM pg_stat_activity WHERE pg_stat_activity.datname = 'dokploy' AND pid <> pg_backend_pid();"`, + `docker exec ${postgresContainerId} psql -U dokploy postgres -c "SELECT pg_terminate_backend(pg_stat_activity.pid) FROM pg_stat_activity WHERE pg_stat_activity.datname = 'dokploy' AND pid <> pg_backend_pid();"`, ); emit("Dropping existing database..."); await execAsync( - `docker exec $(docker ps --filter "name=dokploy-postgres" -q) psql -U dokploy postgres -c "DROP DATABASE IF EXISTS dokploy;"`, + `docker exec ${postgresContainerId} psql -U dokploy postgres -c "DROP DATABASE IF EXISTS dokploy;"`, ); emit("Creating fresh database..."); await execAsync( - `docker exec $(docker ps --filter "name=dokploy-postgres" -q) psql -U dokploy postgres -c "CREATE DATABASE dokploy;"`, + `docker exec ${postgresContainerId} psql -U dokploy postgres -c "CREATE DATABASE dokploy;"`, ); // Copy the backup file into the container emit("Copying backup file into container..."); await execAsync( - `docker cp ${tempDir}/database.sql $(docker ps --filter "name=dokploy-postgres" -q):/tmp/database.sql`, + `docker cp ${tempDir}/database.sql ${postgresContainerId}:/tmp/database.sql`, ); // Verify file in container emit("Verifying file in container..."); await execAsync( - `docker exec $(docker ps --filter "name=dokploy-postgres" -q) ls -l /tmp/database.sql`, + `docker exec ${postgresContainerId} ls -l /tmp/database.sql`, ); // Restore from the copied file emit("Running database restore..."); await execAsync( - `docker exec $(docker ps --filter "name=dokploy-postgres" -q) pg_restore -v -U dokploy -d dokploy /tmp/database.sql`, + `docker exec ${postgresContainerId} pg_restore -v -U dokploy -d dokploy /tmp/database.sql`, ); // Cleanup the temporary file in the container emit("Cleaning up container temp file..."); await execAsync( - `docker exec $(docker ps --filter "name=dokploy-postgres" -q) rm /tmp/database.sql`, + `docker exec ${postgresContainerId} rm /tmp/database.sql`, ); emit("Restore completed successfully!"); diff --git a/packages/server/src/utils/traefik/web-server.ts b/packages/server/src/utils/traefik/web-server.ts index 78046c67..1534e2f1 100644 --- a/packages/server/src/utils/traefik/web-server.ts +++ b/packages/server/src/utils/traefik/web-server.ts @@ -3,7 +3,11 @@ import { join } from "node:path"; import { paths } from "@dokploy/server/constants"; import type { User } from "@dokploy/server/services/user"; import { dump, load } from "js-yaml"; -import { loadOrCreateConfig, writeTraefikConfig } from "./application"; +import { + loadOrCreateConfig, + removeTraefikConfig, + writeTraefikConfig, +} from "./application"; import type { FileConfig } from "./file-types"; import type { MainTraefikConfig } from "./types"; @@ -11,32 +15,62 @@ export const updateServerTraefik = ( user: User | null, newHost: string | null, ) => { + const { https, certificateType } = user || {}; const appName = "dokploy"; const config: FileConfig = loadOrCreateConfig(appName); config.http = config.http || { routers: {}, services: {} }; config.http.routers = config.http.routers || {}; + config.http.services = config.http.services || {}; - const currentRouterConfig = config.http.routers[`${appName}-router-app`]; + const currentRouterConfig = config.http.routers[`${appName}-router-app`] || { + rule: `Host(\`${newHost}\`)`, + service: `${appName}-service-app`, + entryPoints: ["web"], + }; + config.http.routers[`${appName}-router-app`] = currentRouterConfig; - if (currentRouterConfig && newHost) { - currentRouterConfig.rule = `Host(\`${newHost}\`)`; + config.http.services = { + ...config.http.services, + [`${appName}-service-app`]: { + loadBalancer: { + servers: [ + { + url: `http://dokploy:${process.env.PORT || 3000}`, + }, + ], + passHostHeader: true, + }, + }, + }; - if (user?.certificateType === "letsencrypt") { + if (https) { + currentRouterConfig.middlewares = ["redirect-to-https"]; + + if (certificateType === "letsencrypt") { config.http.routers[`${appName}-router-app-secure`] = { - ...currentRouterConfig, + rule: `Host(\`${newHost}\`)`, + service: `${appName}-service-app`, entryPoints: ["websecure"], tls: { certResolver: "letsencrypt" }, }; - - currentRouterConfig.middlewares = ["redirect-to-https"]; } else { - delete config.http.routers[`${appName}-router-app-secure`]; - currentRouterConfig.middlewares = []; + config.http.routers[`${appName}-router-app-secure`] = { + rule: `Host(\`${newHost}\`)`, + service: `${appName}-service-app`, + entryPoints: ["websecure"], + }; } + } else { + delete config.http.routers[`${appName}-router-app-secure`]; + currentRouterConfig.middlewares = []; } - writeTraefikConfig(config, appName); + if (newHost) { + writeTraefikConfig(config, appName); + } else { + removeTraefikConfig(appName); + } }; export const updateLetsEncryptEmail = (newEmail: string | null) => { diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 8b9558cb..50d8ecad 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -17,7 +17,7 @@ importers: version: 1.9.4 '@commitlint/cli': specifier: ^19.3.0 - version: 19.3.0(@types/node@18.19.42)(typescript@5.7.2) + version: 19.3.0(@types/node@18.19.42)(typescript@5.8.3) '@commitlint/config-conventional': specifier: ^19.2.2 version: 19.2.2 @@ -266,8 +266,8 @@ importers: specifier: 5.1.1 version: 5.1.1(encoding@0.1.13) better-auth: - specifier: 1.2.4 - version: 1.2.4(typescript@5.5.3) + specifier: 1.2.6 + version: 1.2.6 bl: specifier: 6.0.11 version: 6.0.11 @@ -686,8 +686,8 @@ importers: specifier: ^0.0.13 version: 0.0.13(zod@3.23.8) '@better-auth/utils': - specifier: 0.2.3 - version: 0.2.3 + specifier: 0.2.4 + version: 0.2.4 '@faker-js/faker': specifier: ^8.4.1 version: 8.4.1 @@ -719,8 +719,8 @@ importers: specifier: 5.1.1 version: 5.1.1(encoding@0.1.13) better-auth: - specifier: 1.2.4 - version: 1.2.4(typescript@5.5.3) + specifier: 1.2.6 + version: 1.2.6 bl: specifier: 6.0.11 version: 6.0.11 @@ -1003,11 +1003,11 @@ packages: '@balena/dockerignore@1.0.2': resolution: {integrity: sha512-wMue2Sy4GAVTk6Ic4tJVcnfdau+gx2EnG7S+uAEe+TWJFqE4YoWN4/H8MSLj4eYJKxGg26lZwboEniNiNwZQ6Q==} - '@better-auth/utils@0.2.3': - resolution: {integrity: sha512-Ap1GaSmo6JYhJhxJOpUB0HobkKPTNzfta+bLV89HfpyCAHN7p8ntCrmNFHNAVD0F6v0mywFVEUg1FUhNCc81Rw==} + '@better-auth/utils@0.2.4': + resolution: {integrity: sha512-ayiX87Xd5sCHEplAdeMgwkA0FgnXsEZBgDn890XHHwSWNqqRZDYOq3uj2Ei2leTv1I2KbG5HHn60Ah1i2JWZjQ==} - '@better-fetch/fetch@1.1.15': - resolution: {integrity: sha512-0Bl8YYj1f8qCTNHeSn5+1DWv2hy7rLBrQ8rS8Y9XYloiwZEfc3k4yspIG0llRxafxqhGCwlGRg+F8q1HZRCMXA==} + '@better-fetch/fetch@1.1.18': + resolution: {integrity: sha512-rEFOE1MYIsBmoMJtQbl32PGHHXuG2hDxvEd7rUHE0vCBoFQVSDqaVs9hkZEtHCxRoY+CljXKFCOuJ8uxqw1LcA==} '@biomejs/biome@1.9.4': resolution: {integrity: sha512-1rkd7G70+o9KkTn5KLmDYXihGoTaIGO9PIIN2ZB7UJxFrWw04CZHPYiMRjYsaDvVV7hP1dYNRLxSANLaBFGpog==} @@ -3925,11 +3925,11 @@ packages: before-after-hook@2.2.3: resolution: {integrity: sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ==} - better-auth@1.2.4: - resolution: {integrity: sha512-/ZK2jbUjm8JwdeCLFrUWUBmexPyI9PkaLVXWLWtN60sMDHTY8B5G72wcHglo1QMFBaw4G0qFkP5ayl9k6XfDaA==} + better-auth@1.2.6: + resolution: {integrity: sha512-RVy6nfNCXpohx49zP2ChUO3zN0nvz5UXuETJIhWU+dshBKpFMk4P4hAQauM3xqTJdd9hfeB5y+segmG1oYGTJQ==} - better-call@1.0.3: - resolution: {integrity: sha512-DUKImKoDIy5UtCvQbHTg0wuBRse6gu1Yvznn7+1B3I5TeY8sclRPFce0HI+4WF2bcb+9PqmkET8nXZubrHQh9A==} + better-call@1.0.7: + resolution: {integrity: sha512-p5kEthErx3HsW9dCCvvEx+uuEdncn0ZrlqrOG3TkR1aVYgynpwYbTVU90nY8/UwfMhROzqZWs8vryainSQxrNg==} binary-extensions@2.3.0: resolution: {integrity: sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==} @@ -7272,8 +7272,8 @@ packages: engines: {node: '>=14.17'} hasBin: true - typescript@5.7.2: - resolution: {integrity: sha512-i5t66RHxDvVN40HfDd1PsEThGNnlMCMT3jMUuoh9/0TaqWevNontacunWyN02LA9/fIbEWlcHZcgTKb9QoaLfg==} + typescript@5.8.3: + resolution: {integrity: sha512-p1diW6TqL9L07nNxvRMM7hMMw4c5XOo/1ibL4aAIGmSAt9slTE1Xgw5KWuof2uTOvCg9BY7ZRi+GaF+7sfgPeQ==} engines: {node: '>=14.17'} hasBin: true @@ -7389,14 +7389,6 @@ packages: v8-compile-cache-lib@3.0.1: resolution: {integrity: sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==} - valibot@1.0.0-beta.15: - resolution: {integrity: sha512-BKy8XosZkDHWmYC+cJG74LBzP++Gfntwi33pP3D3RKztz2XV9jmFWnkOi21GoqARP8wAWARwhV6eTr1JcWzjGw==} - peerDependencies: - typescript: '>=5' - peerDependenciesMeta: - typescript: - optional: true - vfile-message@4.0.2: resolution: {integrity: sha512-jRDZ1IMLttGj41KcZvlrYAaI3CfqpLpfpf+Mfig13viT6NKvRzWZ+lXz0Y5D60w6uJIBAOGq9mSHf0gktF0duw==} @@ -7746,11 +7738,12 @@ snapshots: '@balena/dockerignore@1.0.2': {} - '@better-auth/utils@0.2.3': + '@better-auth/utils@0.2.4': dependencies: + typescript: 5.8.3 uncrypto: 0.1.3 - '@better-fetch/fetch@1.1.15': {} + '@better-fetch/fetch@1.1.18': {} '@biomejs/biome@1.9.4': optionalDependencies: @@ -7857,11 +7850,11 @@ snapshots: style-mod: 4.1.2 w3c-keyname: 2.2.8 - '@commitlint/cli@19.3.0(@types/node@18.19.42)(typescript@5.7.2)': + '@commitlint/cli@19.3.0(@types/node@18.19.42)(typescript@5.8.3)': dependencies: '@commitlint/format': 19.3.0 '@commitlint/lint': 19.2.2 - '@commitlint/load': 19.2.0(@types/node@18.19.42)(typescript@5.7.2) + '@commitlint/load': 19.2.0(@types/node@18.19.42)(typescript@5.8.3) '@commitlint/read': 19.2.1 '@commitlint/types': 19.0.3 execa: 8.0.1 @@ -7908,15 +7901,15 @@ snapshots: '@commitlint/rules': 19.0.3 '@commitlint/types': 19.0.3 - '@commitlint/load@19.2.0(@types/node@18.19.42)(typescript@5.7.2)': + '@commitlint/load@19.2.0(@types/node@18.19.42)(typescript@5.8.3)': dependencies: '@commitlint/config-validator': 19.0.3 '@commitlint/execute-rule': 19.0.0 '@commitlint/resolve-extends': 19.1.0 '@commitlint/types': 19.0.3 chalk: 5.3.0 - cosmiconfig: 9.0.0(typescript@5.7.2) - cosmiconfig-typescript-loader: 5.0.0(@types/node@18.19.42)(cosmiconfig@9.0.0(typescript@5.7.2))(typescript@5.7.2) + cosmiconfig: 9.0.0(typescript@5.8.3) + cosmiconfig-typescript-loader: 5.0.0(@types/node@18.19.42)(cosmiconfig@9.0.0(typescript@5.8.3))(typescript@5.8.3) lodash.isplainobject: 4.0.6 lodash.merge: 4.6.2 lodash.uniq: 4.5.0 @@ -10745,27 +10738,24 @@ snapshots: before-after-hook@2.2.3: {} - better-auth@1.2.4(typescript@5.5.3): + better-auth@1.2.6: dependencies: - '@better-auth/utils': 0.2.3 - '@better-fetch/fetch': 1.1.15 + '@better-auth/utils': 0.2.4 + '@better-fetch/fetch': 1.1.18 '@noble/ciphers': 0.6.0 '@noble/hashes': 1.7.1 '@simplewebauthn/browser': 13.1.0 '@simplewebauthn/server': 13.1.1 - better-call: 1.0.3 + better-call: 1.0.7 defu: 6.1.4 jose: 5.9.6 kysely: 0.27.6 nanostores: 0.11.3 - valibot: 1.0.0-beta.15(typescript@5.5.3) zod: 3.24.1 - transitivePeerDependencies: - - typescript - better-call@1.0.3: + better-call@1.0.7: dependencies: - '@better-fetch/fetch': 1.1.15 + '@better-fetch/fetch': 1.1.18 rou3: 0.5.1 set-cookie-parser: 2.7.1 uncrypto: 0.1.3 @@ -11140,21 +11130,21 @@ snapshots: core-js@3.39.0: {} - cosmiconfig-typescript-loader@5.0.0(@types/node@18.19.42)(cosmiconfig@9.0.0(typescript@5.7.2))(typescript@5.7.2): + cosmiconfig-typescript-loader@5.0.0(@types/node@18.19.42)(cosmiconfig@9.0.0(typescript@5.8.3))(typescript@5.8.3): dependencies: '@types/node': 18.19.42 - cosmiconfig: 9.0.0(typescript@5.7.2) + cosmiconfig: 9.0.0(typescript@5.8.3) jiti: 1.21.6 - typescript: 5.7.2 + typescript: 5.8.3 - cosmiconfig@9.0.0(typescript@5.7.2): + cosmiconfig@9.0.0(typescript@5.8.3): dependencies: env-paths: 2.2.1 import-fresh: 3.3.0 js-yaml: 4.1.0 parse-json: 5.2.0 optionalDependencies: - typescript: 5.7.2 + typescript: 5.8.3 cpu-features@0.0.10: dependencies: @@ -14446,7 +14436,7 @@ snapshots: typescript@5.5.3: {} - typescript@5.7.2: {} + typescript@5.8.3: {} ufo@1.5.4: {} @@ -14567,10 +14557,6 @@ snapshots: v8-compile-cache-lib@3.0.1: optional: true - valibot@1.0.0-beta.15(typescript@5.5.3): - optionalDependencies: - typescript: 5.5.3 - vfile-message@4.0.2: dependencies: '@types/unist': 3.0.3