Merge branch 'canary' into fix-mount-update

This commit is contained in:
Mauricio Siu
2025-04-26 16:44:49 -06:00
201 changed files with 40191 additions and 3332 deletions

View File

@@ -28,7 +28,7 @@
"typecheck": "tsc --noEmit"
},
"dependencies": {
"micromatch":"4.0.8",
"micromatch": "4.0.8",
"@ai-sdk/anthropic": "^1.0.6",
"@ai-sdk/azure": "^1.0.15",
"@ai-sdk/cohere": "^1.0.6",
@@ -36,13 +36,12 @@
"@ai-sdk/mistral": "^1.0.6",
"@ai-sdk/openai": "^1.0.12",
"@ai-sdk/openai-compatible": "^0.0.13",
"@better-auth/utils":"0.2.3",
"@oslojs/encoding":"1.1.0",
"@oslojs/crypto":"1.0.1",
"drizzle-dbml-generator":"0.10.0",
"better-auth":"1.2.4",
"@better-auth/utils": "0.2.4",
"@oslojs/encoding": "1.1.0",
"@oslojs/crypto": "1.0.1",
"drizzle-dbml-generator": "0.10.0",
"better-auth": "1.2.6",
"@faker-js/faker": "^8.4.1",
"@lucia-auth/adapter-drizzle": "1.0.7",
"@octokit/auth-app": "^6.0.4",
"@react-email/components": "^0.0.21",
"@trpc/server": "^10.43.6",
@@ -54,12 +53,11 @@
"date-fns": "3.6.0",
"dockerode": "4.0.2",
"dotenv": "16.4.5",
"drizzle-orm": "^0.39.1",
"drizzle-orm": "^0.39.1",
"drizzle-zod": "0.5.1",
"hi-base32": "^0.5.1",
"js-yaml": "4.1.0",
"lodash": "4.17.21",
"lucia": "^3.0.1",
"nanoid": "3",
"node-os-utils": "1.3.7",
"node-pty": "1.0.0",
@@ -78,7 +76,8 @@
"ws": "8.16.0",
"zod": "^3.23.4",
"ssh2": "1.15.0",
"@octokit/rest": "^20.0.2"
"@octokit/rest": "^20.0.2",
"toml": "3.0.0"
},
"devDependencies": {
"@types/micromatch": "4.0.9",

View File

@@ -13,6 +13,7 @@ import { z } from "zod";
import { bitbucket } from "./bitbucket";
import { deployments } from "./deployment";
import { domains } from "./domain";
import { gitea } from "./gitea";
import { github } from "./github";
import { gitlab } from "./gitlab";
import { mounts } from "./mount";
@@ -33,6 +34,7 @@ export const sourceType = pgEnum("sourceType", [
"github",
"gitlab",
"bitbucket",
"gitea",
"drop",
]);
@@ -155,6 +157,11 @@ export const applications = pgTable("application", {
gitlabBranch: text("gitlabBranch"),
gitlabBuildPath: text("gitlabBuildPath").default("/"),
gitlabPathNamespace: text("gitlabPathNamespace"),
// Gitea
giteaRepository: text("giteaRepository"),
giteaOwner: text("giteaOwner"),
giteaBranch: text("giteaBranch"),
giteaBuildPath: text("giteaBuildPath").default("/"),
// Bitbucket
bitbucketRepository: text("bitbucketRepository"),
bitbucketOwner: text("bitbucketOwner"),
@@ -212,6 +219,9 @@ export const applications = pgTable("application", {
gitlabId: text("gitlabId").references(() => gitlab.gitlabId, {
onDelete: "set null",
}),
giteaId: text("giteaId").references(() => gitea.giteaId, {
onDelete: "set null",
}),
bitbucketId: text("bitbucketId").references(() => bitbucket.bitbucketId, {
onDelete: "set null",
}),
@@ -249,6 +259,10 @@ export const applicationsRelations = relations(
fields: [applications.gitlabId],
references: [gitlab.gitlabId],
}),
gitea: one(gitea, {
fields: [applications.giteaId],
references: [gitea.giteaId],
}),
bitbucket: one(bitbucket, {
fields: [applications.bitbucketId],
references: [bitbucket.bitbucketId],
@@ -379,7 +393,9 @@ const createSchema = createInsertSchema(applications, {
customGitUrl: z.string().optional(),
buildPath: z.string().optional(),
projectId: z.string(),
sourceType: z.enum(["github", "docker", "git"]).optional(),
sourceType: z
.enum(["github", "docker", "git", "gitlab", "bitbucket", "gitea", "drop"])
.optional(),
applicationStatus: z.enum(["idle", "running", "done", "error"]),
buildType: z.enum([
"dockerfile",
@@ -483,6 +499,18 @@ export const apiSaveBitbucketProvider = createSchema
})
.required();
export const apiSaveGiteaProvider = createSchema
.pick({
applicationId: true,
giteaBranch: true,
giteaBuildPath: true,
giteaOwner: true,
giteaRepository: true,
giteaId: true,
watchPaths: true,
})
.required();
export const apiSaveDockerProvider = createSchema
.pick({
dockerImage: true,

View File

@@ -15,12 +15,13 @@ import { mariadb } from "./mariadb";
import { mongo } from "./mongo";
import { mysql } from "./mysql";
import { postgres } from "./postgres";
import { users_temp } from "./user";
export const databaseType = pgEnum("databaseType", [
"postgres",
"mariadb",
"mysql",
"mongo",
"web-server",
]);
export const backups = pgTable("backup", {
@@ -58,6 +59,7 @@ export const backups = pgTable("backup", {
mongoId: text("mongoId").references((): AnyPgColumn => mongo.mongoId, {
onDelete: "cascade",
}),
userId: text("userId").references(() => users_temp.id),
});
export const backupsRelations = relations(backups, ({ one }) => ({
@@ -81,6 +83,10 @@ export const backupsRelations = relations(backups, ({ one }) => ({
fields: [backups.mongoId],
references: [mongo.mongoId],
}),
user: one(users_temp, {
fields: [backups.userId],
references: [users_temp.id],
}),
}));
const createSchema = createInsertSchema(backups, {
@@ -91,11 +97,12 @@ const createSchema = createInsertSchema(backups, {
database: z.string().min(1),
schedule: z.string(),
keepLatestCount: z.number().optional(),
databaseType: z.enum(["postgres", "mariadb", "mysql", "mongo"]),
databaseType: z.enum(["postgres", "mariadb", "mysql", "mongo", "web-server"]),
postgresId: z.string().optional(),
mariadbId: z.string().optional(),
mysqlId: z.string().optional(),
mongoId: z.string().optional(),
userId: z.string().optional(),
});
export const apiCreateBackup = createSchema.pick({
@@ -110,6 +117,7 @@ export const apiCreateBackup = createSchema.pick({
postgresId: true,
mongoId: true,
databaseType: true,
userId: true,
});
export const apiFindOneBackup = createSchema

View File

@@ -6,6 +6,7 @@ import { z } from "zod";
import { bitbucket } from "./bitbucket";
import { deployments } from "./deployment";
import { domains } from "./domain";
import { gitea } from "./gitea";
import { github } from "./github";
import { gitlab } from "./gitlab";
import { mounts } from "./mount";
@@ -20,6 +21,7 @@ export const sourceTypeCompose = pgEnum("sourceTypeCompose", [
"github",
"gitlab",
"bitbucket",
"gitea",
"raw",
]);
@@ -55,6 +57,10 @@ export const compose = pgTable("compose", {
bitbucketRepository: text("bitbucketRepository"),
bitbucketOwner: text("bitbucketOwner"),
bitbucketBranch: text("bitbucketBranch"),
// Gitea
giteaRepository: text("giteaRepository"),
giteaOwner: text("giteaOwner"),
giteaBranch: text("giteaBranch"),
// Git
customGitUrl: text("customGitUrl"),
customGitBranch: text("customGitBranch"),
@@ -87,6 +93,9 @@ export const compose = pgTable("compose", {
bitbucketId: text("bitbucketId").references(() => bitbucket.bitbucketId, {
onDelete: "set null",
}),
giteaId: text("giteaId").references(() => gitea.giteaId, {
onDelete: "set null",
}),
serverId: text("serverId").references(() => server.serverId, {
onDelete: "cascade",
}),
@@ -116,6 +125,10 @@ export const composeRelations = relations(compose, ({ one, many }) => ({
fields: [compose.bitbucketId],
references: [bitbucket.bitbucketId],
}),
gitea: one(gitea, {
fields: [compose.giteaId],
references: [gitea.giteaId],
}),
server: one(server, {
fields: [compose.serverId],
references: [server.serverId],
@@ -126,7 +139,7 @@ const createSchema = createInsertSchema(compose, {
name: z.string().min(1),
description: z.string(),
env: z.string().optional(),
composeFile: z.string().min(1),
composeFile: z.string().optional(),
projectId: z.string(),
customGitSSHKeyId: z.string().optional(),
command: z.string().optional(),
@@ -142,6 +155,7 @@ export const apiCreateCompose = createSchema.pick({
composeType: true,
appName: true,
serverId: true,
composeFile: true,
});
export const apiCreateComposeByTemplate = createSchema

View File

@@ -5,6 +5,7 @@ import { nanoid } from "nanoid";
import { z } from "zod";
import { organization } from "./account";
import { bitbucket } from "./bitbucket";
import { gitea } from "./gitea";
import { github } from "./github";
import { gitlab } from "./gitlab";
@@ -12,6 +13,7 @@ export const gitProviderType = pgEnum("gitProviderType", [
"github",
"gitlab",
"bitbucket",
"gitea",
]);
export const gitProvider = pgTable("git_provider", {
@@ -42,6 +44,10 @@ export const gitProviderRelations = relations(gitProvider, ({ one }) => ({
fields: [gitProvider.gitProviderId],
references: [bitbucket.gitProviderId],
}),
gitea: one(gitea, {
fields: [gitProvider.gitProviderId],
references: [gitea.gitProviderId],
}),
organization: one(organization, {
fields: [gitProvider.organizationId],
references: [organization.id],

View File

@@ -0,0 +1,86 @@
import { relations } from "drizzle-orm";
import { integer, pgTable, text } from "drizzle-orm/pg-core";
import { createInsertSchema } from "drizzle-zod";
import { nanoid } from "nanoid";
import { z } from "zod";
import { gitProvider } from "./git-provider";
export const gitea = pgTable("gitea", {
giteaId: text("giteaId")
.notNull()
.primaryKey()
.$defaultFn(() => nanoid()),
giteaUrl: text("giteaUrl").default("https://gitea.com").notNull(),
redirectUri: text("redirect_uri"),
clientId: text("client_id"),
clientSecret: text("client_secret"),
gitProviderId: text("gitProviderId")
.notNull()
.references(() => gitProvider.gitProviderId, { onDelete: "cascade" }),
accessToken: text("access_token"),
refreshToken: text("refresh_token"),
expiresAt: integer("expires_at"),
scopes: text("scopes").default("repo,repo:status,read:user,read:org"),
lastAuthenticatedAt: integer("last_authenticated_at"),
});
export const giteaProviderRelations = relations(gitea, ({ one }) => ({
gitProvider: one(gitProvider, {
fields: [gitea.gitProviderId],
references: [gitProvider.gitProviderId],
}),
}));
const createSchema = createInsertSchema(gitea);
export const apiCreateGitea = createSchema.extend({
clientId: z.string().optional(),
clientSecret: z.string().optional(),
gitProviderId: z.string().optional(),
redirectUri: z.string().optional(),
name: z.string().min(1),
giteaUrl: z.string().min(1),
giteaUsername: z.string().optional(),
accessToken: z.string().optional(),
refreshToken: z.string().optional(),
expiresAt: z.number().optional(),
organizationName: z.string().optional(),
scopes: z.string().optional(),
lastAuthenticatedAt: z.number().optional(),
});
export const apiFindOneGitea = createSchema
.extend({
giteaId: z.string().min(1),
})
.pick({ giteaId: true });
export const apiGiteaTestConnection = createSchema
.extend({
organizationName: z.string().optional(),
})
.pick({ giteaId: true, organizationName: true });
export type ApiGiteaTestConnection = z.infer<typeof apiGiteaTestConnection>;
export const apiFindGiteaBranches = z.object({
owner: z.string().min(1),
repositoryName: z.string().min(1),
giteaId: z.string().optional(),
});
export const apiUpdateGitea = createSchema.extend({
clientId: z.string().optional(),
clientSecret: z.string().optional(),
redirectUri: z.string().optional(),
name: z.string().min(1),
giteaId: z.string().min(1),
giteaUrl: z.string().min(1),
giteaUsername: z.string().optional(),
accessToken: z.string().optional(),
refreshToken: z.string().optional(),
expiresAt: z.number().optional(),
organizationName: z.string().optional(),
scopes: z.string().optional(),
lastAuthenticatedAt: z.number().optional(),
});

View File

@@ -25,6 +25,7 @@ export * from "./git-provider";
export * from "./bitbucket";
export * from "./github";
export * from "./gitlab";
export * from "./gitea";
export * from "./server";
export * from "./utils";
export * from "./preview-deployments";

View File

@@ -52,6 +52,7 @@ const createSchema = createInsertSchema(projects, {
export const apiCreateProject = createSchema.pick({
name: true,
description: true,
env: true,
});
export const apiFindOneProject = createSchema

View File

@@ -46,6 +46,7 @@ enum gitProviderType {
github
gitlab
bitbucket
gitea
}
enum mountType {
@@ -98,6 +99,7 @@ enum sourceType {
github
gitlab
bitbucket
gitea
drop
}
@@ -106,6 +108,7 @@ enum sourceTypeCompose {
github
gitlab
bitbucket
gitea
raw
}
@@ -206,6 +209,7 @@ table application {
githubId text
gitlabId text
bitbucketId text
giteaId text
serverId text
}
@@ -280,6 +284,9 @@ table compose {
bitbucketRepository text
bitbucketOwner text
bitbucketBranch text
giteaRepository text
giteaOwner text
giteaBranch text
customGitUrl text
customGitBranch text
customGitSSHKeyId text
@@ -294,6 +301,7 @@ table compose {
githubId text
gitlabId text
bitbucketId text
giteaId text
serverId text
}
@@ -389,6 +397,20 @@ table gitlab {
gitProviderId text [not null]
}
table gitea {
giteaId text [pk, not null]
giteaUrl text [not null, default: 'https://gitea.com']
redirect_uri text
client_id text [not null]
client_secret text [not null]
access_token text
refresh_token text
expires_at integer
gitProviderId text [not null]
scopes text [default: 'repo,repo:status,read:user,read:org']
last_authenticated_at integer
}
table gotify {
gotifyId text [pk, not null]
serverUrl text [not null]
@@ -820,6 +842,8 @@ ref: github.gitProviderId - git_provider.gitProviderId
ref: gitlab.gitProviderId - git_provider.gitProviderId
ref: gitea.gitProviderId - git_provider.gitProviderId
ref: git_provider.userId - user.id
ref: mariadb.projectId > project.projectId

View File

@@ -10,9 +10,10 @@ import {
import { createInsertSchema } from "drizzle-zod";
import { nanoid } from "nanoid";
import { z } from "zod";
import { account, organization, apikey } from "./account";
import { account, apikey, organization } from "./account";
import { projects } from "./project";
import { certificateType } from "./shared";
import { backups } from "./backups";
/**
* This is an example of how to use the multi-project schema feature of Drizzle ORM. Use the same
* database instance for multiple projects.
@@ -49,6 +50,7 @@ export const users_temp = pgTable("user_temp", {
// Admin
serverIp: text("serverIp"),
certificateType: certificateType("certificateType").notNull().default("none"),
https: boolean("https").notNull().default(false),
host: text("host"),
letsEncryptEmail: text("letsEncryptEmail"),
sshPrivateKey: text("sshPrivateKey"),
@@ -124,6 +126,7 @@ export const usersRelations = relations(users_temp, ({ one, many }) => ({
organizations: many(organization),
projects: many(projects),
apiKeys: many(apikey),
backups: many(backups),
}));
const createSchema = createInsertSchema(users_temp, {
@@ -200,10 +203,12 @@ export const apiAssignDomain = createSchema
host: true,
certificateType: true,
letsEncryptEmail: true,
https: true,
})
.required()
.partial({
letsEncryptEmail: true,
https: true,
});
export const apiUpdateDockerCleanup = createSchema

View File

@@ -28,6 +28,7 @@ export * from "./services/git-provider";
export * from "./services/bitbucket";
export * from "./services/github";
export * from "./services/gitlab";
export * from "./services/gitea";
export * from "./services/server";
export * from "./services/application";
export * from "./utils/databases/rebuild";
@@ -47,6 +48,7 @@ export * from "./utils/backups/mongo";
export * from "./utils/backups/mysql";
export * from "./utils/backups/postgres";
export * from "./utils/backups/utils";
export * from "./utils/backups/web-server";
export * from "./templates/processors";
export * from "./utils/notifications/build-error";
@@ -90,6 +92,7 @@ export * from "./utils/providers/docker";
export * from "./utils/providers/git";
export * from "./utils/providers/github";
export * from "./utils/providers/gitlab";
export * from "./utils/providers/gitea";
export * from "./utils/providers/raw";
export * from "./utils/servers/remote-docker";

View File

@@ -2,12 +2,16 @@ import type { IncomingMessage } from "node:http";
import * as bcrypt from "bcrypt";
import { betterAuth } from "better-auth";
import { drizzleAdapter } from "better-auth/adapters/drizzle";
import { organization, twoFactor, apiKey } from "better-auth/plugins";
import { APIError } from "better-auth/api";
import { apiKey, organization, twoFactor } from "better-auth/plugins";
import { and, desc, eq } from "drizzle-orm";
import { IS_CLOUD } from "../constants";
import { db } from "../db";
import * as schema from "../db/schema";
import { getUserByToken } from "../services/admin";
import { updateUser } from "../services/user";
import { sendEmail } from "../verification/send-verification-email";
import { IS_CLOUD } from "../constants";
import { getPublicIpWithFallback } from "../wss/utils";
const { handler, api } = betterAuth({
database: drizzleAdapter(db, {
@@ -88,11 +92,40 @@ const { handler, api } = betterAuth({
databaseHooks: {
user: {
create: {
before: async (_user, context) => {
if (!IS_CLOUD) {
const xDokployToken =
context?.request?.headers?.get("x-dokploy-token");
if (xDokployToken) {
const user = await getUserByToken(xDokployToken);
if (!user) {
throw new APIError("BAD_REQUEST", {
message: "User not found",
});
}
} else {
const isAdminPresent = await db.query.member.findFirst({
where: eq(schema.member.role, "owner"),
});
if (isAdminPresent) {
throw new APIError("BAD_REQUEST", {
message: "Admin is already created",
});
}
}
}
},
after: async (user) => {
const isAdminPresent = await db.query.member.findFirst({
where: eq(schema.member.role, "owner"),
});
if (!IS_CLOUD) {
await updateUser(user.id, {
serverIp: await getPublicIpWithFallback(),
});
}
if (IS_CLOUD || !isAdminPresent) {
await db.transaction(async (tx) => {
const organization = await tx
@@ -168,7 +201,7 @@ const { handler, api } = betterAuth({
const host =
process.env.NODE_ENV === "development"
? "http://localhost:3000"
: "https://dokploy.com";
: "https://app.dokploy.com";
const inviteLink = `${host}/invitation?token=${data.id}`;
await sendEmail({

View File

@@ -6,8 +6,8 @@ import { generateObject } from "ai";
import { desc, eq } from "drizzle-orm";
import { z } from "zod";
import { IS_CLOUD } from "../constants";
import { findServerById } from "./server";
import { findOrganizationById } from "./admin";
import { findServerById } from "./server";
export const getAiSettingsByOrganizationId = async (organizationId: string) => {
const aiSettings = await db.query.ai.findMany({

View File

@@ -26,6 +26,10 @@ import {
cloneGitRepository,
getCustomGitCloneCommand,
} from "@dokploy/server/utils/providers/git";
import {
cloneGiteaRepository,
getGiteaCloneCommand,
} from "@dokploy/server/utils/providers/gitea";
import {
cloneGithubRepository,
getGithubCloneCommand,
@@ -111,6 +115,7 @@ export const findApplicationById = async (applicationId: string) => {
gitlab: true,
github: true,
bitbucket: true,
gitea: true,
server: true,
previewDeployments: true,
},
@@ -191,6 +196,9 @@ export const deployApplication = async ({
} else if (application.sourceType === "gitlab") {
await cloneGitlabRepository(application, deployment.logPath);
await buildApplication(application, deployment.logPath);
} else if (application.sourceType === "gitea") {
await cloneGiteaRepository(application, deployment.logPath);
await buildApplication(application, deployment.logPath);
} else if (application.sourceType === "bitbucket") {
await cloneBitbucketRepository(application, deployment.logPath);
await buildApplication(application, deployment.logPath);
@@ -309,6 +317,8 @@ export const deployRemoteApplication = async ({
application,
deployment.logPath,
);
} else if (application.sourceType === "gitea") {
command += await getGiteaCloneCommand(application, deployment.logPath);
} else if (application.sourceType === "git") {
command += await getCustomGitCloneCommand(
application,

View File

@@ -29,6 +29,10 @@ import {
cloneGitRepository,
getCustomGitCloneCommand,
} from "@dokploy/server/utils/providers/git";
import {
cloneGiteaRepository,
getGiteaCloneCommand,
} from "@dokploy/server/utils/providers/gitea";
import {
cloneGithubRepository,
getGithubCloneCommand,
@@ -65,7 +69,7 @@ export const createCompose = async (input: typeof apiCreateCompose._type) => {
.insert(compose)
.values({
...input,
composeFile: "",
composeFile: input.composeFile || "",
appName,
})
.returning()
@@ -125,6 +129,7 @@ export const findComposeById = async (composeId: string) => {
github: true,
gitlab: true,
bitbucket: true,
gitea: true,
server: true,
},
});
@@ -228,6 +233,8 @@ export const deployCompose = async ({
await cloneBitbucketRepository(compose, deployment.logPath, true);
} else if (compose.sourceType === "git") {
await cloneGitRepository(compose, deployment.logPath, true);
} else if (compose.sourceType === "gitea") {
await cloneGiteaRepository(compose, deployment.logPath, true);
} else if (compose.sourceType === "raw") {
await createComposeFile(compose, deployment.logPath);
}
@@ -351,6 +358,12 @@ export const deployRemoteCompose = async ({
);
} else if (compose.sourceType === "raw") {
command += getCreateComposeFileCommand(compose, deployment.logPath);
} else if (compose.sourceType === "gitea") {
command += await getGiteaCloneCommand(
compose,
deployment.logPath,
true,
);
}
await execAsyncRemote(compose.serverId, command);

View File

@@ -0,0 +1,98 @@
import { db } from "@dokploy/server/db";
import {
type apiCreateGitea,
gitProvider,
gitea,
} from "@dokploy/server/db/schema";
import { TRPCError } from "@trpc/server";
import { eq } from "drizzle-orm";
export type Gitea = typeof gitea.$inferSelect;
export const createGitea = async (
input: typeof apiCreateGitea._type,
organizationId: string,
) => {
return await db.transaction(async (tx) => {
const newGitProvider = await tx
.insert(gitProvider)
.values({
providerType: "gitea",
organizationId: organizationId,
name: input.name,
})
.returning()
.then((response) => response[0]);
if (!newGitProvider) {
throw new TRPCError({
code: "BAD_REQUEST",
message: "Error creating the Git provider",
});
}
const giteaProvider = await tx
.insert(gitea)
.values({
...input,
gitProviderId: newGitProvider?.gitProviderId,
})
.returning()
.then((response: (typeof gitea.$inferSelect)[]) => response[0]);
if (!giteaProvider) {
throw new TRPCError({
code: "BAD_REQUEST",
message: "Error creating the Gitea provider",
});
}
return {
giteaId: giteaProvider.giteaId,
clientId: giteaProvider.clientId,
giteaUrl: giteaProvider.giteaUrl,
};
});
};
export const findGiteaById = async (giteaId: string) => {
try {
const giteaProviderResult = await db.query.gitea.findFirst({
where: eq(gitea.giteaId, giteaId),
with: {
gitProvider: true,
},
});
if (!giteaProviderResult) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Gitea Provider not found",
});
}
return giteaProviderResult;
} catch (error) {
throw error;
}
};
export const updateGitea = async (giteaId: string, input: Partial<Gitea>) => {
try {
const updateResult = await db
.update(gitea)
.set(input)
.where(eq(gitea.giteaId, giteaId))
.returning();
const result = updateResult[0] as Gitea | undefined;
if (!result) {
throw new Error(`Failed to update Gitea provider with ID ${giteaId}`);
}
return result;
} catch (error) {
throw error;
}
};

View File

@@ -62,6 +62,7 @@ export const findApplicationByPreview = async (applicationId: string) => {
gitlab: true,
github: true,
bitbucket: true,
gitea: true,
server: true,
},
});

View File

@@ -14,13 +14,50 @@ const validateUfw = () => `
`;
const validateSsh = () => `
if systemctl is-active --quiet sshd; then
if systemctl is-active --quiet sshd || systemctl is-active --quiet ssh; then
isEnabled=true
hasKeyAuth=$(find "$HOME/.ssh" -type f -name "authorized_keys" 2>/dev/null | grep -q . && echo true || echo false)
permitRootLogin=$(sudo sshd -T | grep -i "^PermitRootLogin" | awk '{print $2}')
passwordAuth=$(sudo sshd -T | grep -i "^PasswordAuthentication" | awk '{print $2}')
usePam=$(sudo sshd -T | grep -i "^UsePAM" | awk '{print $2}')
echo "{\\"enabled\\": $isEnabled, \\"keyAuth\\": $hasKeyAuth, \\"permitRootLogin\\": \\"$permitRootLogin\\", \\"passwordAuth\\": \\"$passwordAuth\\", \\"usePam\\": \\"$usePam\\"}"
# Get the sshd config file path
sshd_config=$(sudo sshd -T 2>/dev/null | grep -i "^configfile" | awk '{print $2}')
# If we couldn't get the path, use the default
if [ -z "$sshd_config" ]; then
sshd_config="/etc/ssh/sshd_config"
fi
# Check for key authentication
# SSH key auth is enabled by default unless explicitly disabled
pubkey_line=$(sudo grep -i "^PubkeyAuthentication" "$sshd_config" 2>/dev/null | grep -v "#")
if [ -z "$pubkey_line" ] || echo "$pubkey_line" | grep -q -i "yes"; then
keyAuth=true
else
keyAuth=false
fi
# Get the exact PermitRootLogin value from config
# This preserves values like "prohibit-password" without normalization
permitRootLogin=$(sudo grep -i "^PermitRootLogin" "$sshd_config" 2>/dev/null | grep -v "#" | awk '{print $2}')
if [ -z "$permitRootLogin" ]; then
# Default is prohibit-password in newer versions
permitRootLogin="prohibit-password"
fi
# Get the exact PasswordAuthentication value from config
passwordAuth=$(sudo grep -i "^PasswordAuthentication" "$sshd_config" 2>/dev/null | grep -v "#" | awk '{print $2}')
if [ -z "$passwordAuth" ]; then
# Default is yes
passwordAuth="yes"
fi
# Get the exact UsePAM value from config
usePam=$(sudo grep -i "^UsePAM" "$sshd_config" 2>/dev/null | grep -v "#" | awk '{print $2}')
if [ -z "$usePam" ]; then
# Default is yes in most distros
usePam="yes"
fi
# Return the results with exact values from config file
echo "{\\"enabled\\": $isEnabled, \\"keyAuth\\": $keyAuth, \\"permitRootLogin\\": \\"$permitRootLogin\\", \\"passwordAuth\\": \\"$passwordAuth\\", \\"usePam\\": \\"$usePam\\"}"
else
echo "{\\"enabled\\": false, \\"keyAuth\\": false, \\"permitRootLogin\\": \\"unknown\\", \\"passwordAuth\\": \\"unknown\\", \\"usePam\\": \\"unknown\\"}"
fi

View File

@@ -6,10 +6,10 @@ import {
} from "@dokploy/server/services/deployment";
import { findServerById } from "@dokploy/server/services/server";
import {
TRAEFIK_HTTP3_PORT,
TRAEFIK_PORT,
TRAEFIK_SSL_PORT,
TRAEFIK_VERSION,
TRAEFIK_HTTP3_PORT,
getDefaultMiddlewares,
getDefaultServerTraefikConfig,
} from "@dokploy/server/setup/traefik-setup";
@@ -76,7 +76,7 @@ CURRENT_USER=$USER
echo "Installing requirements for: OS: $OS_TYPE"
if [ $EUID != 0 ]; then
echo "Please run this script as root or with sudo ❌"
echo "Please run this script as root or with sudo ❌"
exit
fi
@@ -263,7 +263,7 @@ const setupMainDirectory = () => `
# Create the /etc/dokploy directory
mkdir -p /etc/dokploy
chmod 777 /etc/dokploy
echo "Directory /etc/dokploy created ✅"
fi
`;
@@ -276,16 +276,16 @@ export const setupSwarm = () => `
# Get IP address
get_ip() {
local ip=""
# Try IPv4 with multiple services
# First attempt: ifconfig.io
ip=\$(curl -4s --connect-timeout 5 https://ifconfig.io 2>/dev/null)
# Second attempt: icanhazip.com
if [ -z "\$ip" ]; then
ip=\$(curl -4s --connect-timeout 5 https://icanhazip.com 2>/dev/null)
fi
# Third attempt: ipecho.net
if [ -z "\$ip" ]; then
ip=\$(curl -4s --connect-timeout 5 https://ipecho.net/plain 2>/dev/null)
@@ -295,12 +295,12 @@ export const setupSwarm = () => `
if [ -z "\$ip" ]; then
# Try IPv6 with ifconfig.io
ip=\$(curl -6s --connect-timeout 5 https://ifconfig.io 2>/dev/null)
# Try IPv6 with icanhazip.com
if [ -z "\$ip" ]; then
ip=\$(curl -6s --connect-timeout 5 https://icanhazip.com 2>/dev/null)
fi
# Try IPv6 with ipecho.net
if [ -z "\$ip" ]; then
ip=\$(curl -6s --connect-timeout 5 https://ipecho.net/plain 2>/dev/null)
@@ -361,7 +361,7 @@ const installUtilities = () => `
alpine)
sed -i '/^#.*\/community/s/^#//' /etc/apk/repositories
apk update >/dev/null
apk add curl wget git jq openssl >/dev/null
apk add curl wget git jq openssl sudo unzip tar >/dev/null
;;
ubuntu | debian | raspbian)
DEBIAN_FRONTEND=noninteractive apt-get update -y >/dev/null
@@ -549,7 +549,7 @@ export const createTraefikInstance = () => {
sleep 8
echo "Traefik migrated to Standalone ✅"
fi
if docker inspect dokploy-traefik > /dev/null 2>&1; then
echo "Traefik already exists ✅"
else
@@ -577,7 +577,7 @@ const installNixpacks = () => `
if command_exists nixpacks; then
echo "Nixpacks already installed ✅"
else
export NIXPACKS_VERSION=1.29.1
export NIXPACKS_VERSION=1.35.0
bash -c "$(curl -fsSL https://nixpacks.com/install.sh)"
echo "Nixpacks version $NIXPACKS_VERSION installed ✅"
fi

View File

@@ -101,11 +101,11 @@ export const initializeTraefik = async ({
console.log("Waiting for service cleanup...");
await new Promise((resolve) => setTimeout(resolve, 5000));
attempts++;
} catch (e) {
} catch (_e) {
break;
}
}
} catch (err) {
} catch (_err) {
console.log("No existing service to remove");
}
@@ -120,7 +120,7 @@ export const initializeTraefik = async ({
await container.remove({ force: true });
await new Promise((resolve) => setTimeout(resolve, 5000));
} catch (error) {
} catch (_err) {
console.log("No existing container to remove");
}

View File

@@ -1,4 +1,4 @@
import { load } from "js-yaml";
import { parse } from "toml";
/**
* Complete template interface that includes both metadata and configuration
@@ -86,7 +86,7 @@ export async function fetchTemplateFiles(
try {
// Fetch both files in parallel
const [templateYmlResponse, dockerComposeResponse] = await Promise.all([
fetch(`${baseUrl}/blueprints/${templateId}/template.yml`),
fetch(`${baseUrl}/blueprints/${templateId}/template.toml`),
fetch(`${baseUrl}/blueprints/${templateId}/docker-compose.yml`),
]);
@@ -99,7 +99,7 @@ export async function fetchTemplateFiles(
dockerComposeResponse.text(),
]);
const config = load(templateYml) as CompleteTemplate;
const config = parse(templateYml) as CompleteTemplate;
return { config, dockerCompose };
} catch (error) {

View File

@@ -1,4 +1,4 @@
import { randomBytes } from "node:crypto";
import { randomBytes, createHmac } from "node:crypto";
import { existsSync } from "node:fs";
import { mkdir, readFile, writeFile } from "node:fs/promises";
import { join } from "node:path";
@@ -24,6 +24,12 @@ export interface Template {
domains: DomainSchema[];
}
export interface GenerateJWTOptions {
length?: number;
secret?: string;
payload?: Record<string, unknown> | undefined;
}
export const generateRandomDomain = ({
serverIp,
projectName,
@@ -61,8 +67,48 @@ export function generateBase64(bytes = 32): string {
return randomBytes(bytes).toString("base64");
}
export function generateJwt(length = 256): string {
return randomBytes(length).toString("hex");
function safeBase64(str: string): string {
return str.replace(/=/g, "").replace(/\+/g, "-").replace(/\//g, "_");
}
function objToJWTBase64(obj: any): string {
return safeBase64(
Buffer.from(JSON.stringify(obj), "utf8").toString("base64"),
);
}
export function generateJwt(options: GenerateJWTOptions = {}): string {
let { length, secret, payload = {} } = options;
if (length) {
return randomBytes(length).toString("hex");
}
const encodedHeader = objToJWTBase64({
alg: "HS256",
typ: "JWT",
});
if (!payload.iss) {
payload.iss = "dokploy";
}
if (!payload.iat) {
payload.iat = Math.floor(Date.now() / 1000);
}
if (!payload.exp) {
payload.exp = Math.floor(new Date("2030-01-01T00:00:00Z").getTime() / 1000);
}
const encodedPayload = objToJWTBase64({
iat: Math.floor(Date.now() / 1000),
exp: Math.floor(new Date("2030-01-01T00:00:00Z").getTime() / 1000),
...payload,
});
if (!secret) {
secret = randomBytes(32).toString("hex");
}
const signature = safeBase64(
createHmac("SHA256", secret)
.update(`${encodedHeader}.${encodedPayload}`)
.digest("base64"),
);
return `${encodedHeader}.${encodedPayload}.${signature}`;
}
/**

View File

@@ -1,3 +1,4 @@
import { faker } from "@faker-js/faker";
import type { Schema } from "./index";
import {
generateBase64,
@@ -45,7 +46,9 @@ export interface CompleteTemplate {
variables: Record<string, string>;
config: {
domains: DomainConfig[];
env: Record<string, string> | string[];
env:
| Record<string, string | boolean | number>
| (string | Record<string, string | boolean | number>)[];
mounts?: MountConfig[];
};
}
@@ -62,13 +65,13 @@ export interface Template {
/**
* Process a string value and replace variables
*/
function processValue(
export function processValue(
value: string,
variables: Record<string, string>,
schema: Schema,
): string {
// First replace utility functions
let processedValue = value.replace(/\${([^}]+)}/g, (match, varName) => {
let processedValue = value?.replace(/\${([^}]+)}/g, (match, varName) => {
// Handle utility functions
if (varName === "domain") {
return generateRandomDomain(schema);
@@ -81,11 +84,11 @@ function processValue(
const length = Number.parseInt(varName.split(":")[1], 10) || 32;
return generateBase64(length);
}
if (varName.startsWith("password:")) {
const length = Number.parseInt(varName.split(":")[1], 10) || 16;
return generatePassword(length);
}
if (varName === "password") {
return generatePassword(16);
}
@@ -94,14 +97,31 @@ function processValue(
const length = Number.parseInt(varName.split(":")[1], 10) || 8;
return generateHash(length);
}
if (varName === "hash") {
return generateHash();
}
if (varName === "uuid") {
return crypto.randomUUID();
}
if (varName === "timestamp") {
if (varName === "timestamp" || varName === "timestampms") {
return Date.now().toString();
}
if (varName === "timestamps") {
return Math.round(Date.now() / 1000).toString();
}
if (varName.startsWith("timestampms:")) {
return new Date(varName.slice(12)).getTime().toString();
}
if (varName.startsWith("timestamps:")) {
return Math.round(
new Date(varName.slice(11)).getTime() / 1000,
).toString();
}
if (varName === "randomPort") {
return Math.floor(Math.random() * 65535).toString();
}
@@ -111,8 +131,42 @@ function processValue(
}
if (varName.startsWith("jwt:")) {
const length = Number.parseInt(varName.split(":")[1], 10) || 256;
return generateJwt(length);
const params: string[] = varName.split(":").slice(1);
if (params.length === 1 && params[0] && params[0].match(/^\d{1,3}$/)) {
return generateJwt({ length: Number.parseInt(params[0], 10) });
}
let [secret, payload] = params;
if (typeof payload === "string" && variables[payload]) {
payload = variables[payload];
}
if (
typeof payload === "string" &&
payload.startsWith("{") &&
payload.endsWith("}")
) {
try {
payload = JSON.parse(payload);
} catch (e) {
// If payload is not a valid JSON, invalid it
payload = undefined;
console.error("Invalid JWT payload", e);
}
}
if (typeof payload !== "object") {
payload = undefined;
}
return generateJwt({
secret: secret ? variables[secret] || secret : undefined,
payload: payload as any,
});
}
if (varName === "username") {
return faker.internet.userName().toLowerCase();
}
if (varName === "email") {
return faker.internet.email().toLowerCase();
}
// If not a utility function, try to get from variables
@@ -136,7 +190,7 @@ export function processVariables(
): Record<string, string> {
const variables: Record<string, string> = {};
// First pass: Process variables that don't depend on other variables
// First pass: Process some variables that don't depend on other variables
for (const [key, value] of Object.entries(template.variables)) {
if (typeof value !== "string") continue;
@@ -150,6 +204,8 @@ export function processVariables(
const match = value.match(/\${password:(\d+)}/);
const length = match?.[1] ? Number.parseInt(match[1], 10) : 16;
variables[key] = generatePassword(length);
} else if (value === "${hash}") {
variables[key] = generateHash();
} else if (value.startsWith("${hash:")) {
const match = value.match(/\${hash:(\d+)}/);
const length = match?.[1] ? Number.parseInt(match[1], 10) : 8;
@@ -175,7 +231,14 @@ export function processDomains(
variables: Record<string, string>,
schema: Schema,
): Template["domains"] {
if (!template?.config?.domains) return [];
if (
!template?.config?.domains ||
template.config.domains.length === 0 ||
template.config.domains.every((domain) => !domain.serviceName)
) {
return [];
}
return template?.config?.domains?.map((domain: DomainConfig) => ({
...domain,
host: domain.host
@@ -192,7 +255,9 @@ export function processEnvVars(
variables: Record<string, string>,
schema: Schema,
): Template["envs"] {
if (!template?.config?.env) return [];
if (!template?.config?.env || Object.keys(template.config.env).length === 0) {
return [];
}
// Handle array of env vars
if (Array.isArray(template.config.env)) {
@@ -200,17 +265,27 @@ export function processEnvVars(
if (typeof env === "string") {
return processValue(env, variables, schema);
}
return env;
// Si es un objeto, asumimos que es un par clave-valor
if (typeof env === "object" && env !== null) {
const keys = Object.keys(env);
if (keys.length > 0) {
const key = keys[0];
return `${key}=${env[key as keyof typeof env]}`;
}
}
// Para valores primitivos (boolean, number)
return String(env);
});
}
// Handle object of env vars
return Object.entries(template.config.env).map(
([key, value]: [string, string]) => {
return Object.entries(template.config.env).map(([key, value]) => {
if (typeof value === "string") {
const processedValue = processValue(value, variables, schema);
return `${key}=${processedValue}`;
},
);
}
return `${key}=${value}`;
});
}
/**
@@ -221,7 +296,13 @@ export function processMounts(
variables: Record<string, string>,
schema: Schema,
): Template["mounts"] {
if (!template?.config?.mounts) return [];
if (
!template?.config?.mounts ||
template.config.mounts.length === 0 ||
template.config.mounts.every((mount) => !mount.filePath && !mount.content)
) {
return [];
}
return template?.config?.mounts?.map((mount: MountConfig) => ({
filePath: processValue(mount.filePath, variables, schema),

View File

@@ -1,8 +1,8 @@
import { paths } from "@dokploy/server/constants";
import { execAsync } from "../process/execAsync";
import { findAdmin } from "@dokploy/server/services/admin";
import { updateUser } from "@dokploy/server/services/user";
import { scheduleJob, scheduledJobs } from "node-schedule";
import { execAsync } from "../process/execAsync";
const LOG_CLEANUP_JOB_NAME = "access-log-cleanup";

View File

@@ -8,23 +8,29 @@ import {
cleanUpUnusedImages,
} from "../docker/utils";
import { sendDockerCleanupNotifications } from "../notifications/docker-cleanup";
import { runMariadbBackup } from "./mariadb";
import { runMongoBackup } from "./mongo";
import { runMySqlBackup } from "./mysql";
import { runPostgresBackup } from "./postgres";
import { findAdmin } from "../../services/admin";
import { getS3Credentials } from "./utils";
import { execAsync, execAsyncRemote } from "../process/execAsync";
import { getS3Credentials, scheduleBackup } from "./utils";
import type { BackupSchedule } from "@dokploy/server/services/backup";
import { startLogCleanup } from "../access-log/handler";
import { member } from "@dokploy/server/db/schema";
import { eq } from "drizzle-orm";
export const initCronJobs = async () => {
console.log("Setting up cron jobs....");
const admin = await findAdmin();
const admin = await db.query.member.findFirst({
where: eq(member.role, "owner"),
with: {
user: true,
},
});
if (admin?.user.enableDockerCleanup) {
if (!admin) {
return;
}
if (admin.user.enableDockerCleanup) {
scheduleJob("docker-cleanup", "0 0 * * *", async () => {
console.log(
`Docker Cleanup ${new Date().toLocaleString()}] Running docker cleanup`,
@@ -56,122 +62,27 @@ export const initCronJobs = async () => {
}
}
const pgs = await db.query.postgres.findMany({
const backups = await db.query.backups.findMany({
with: {
backups: {
with: {
destination: true,
postgres: true,
mariadb: true,
mysql: true,
mongo: true,
},
},
},
});
for (const pg of pgs) {
for (const backup of pg.backups) {
const { schedule, backupId, enabled, database } = backup;
if (enabled) {
console.log(
`[Backup] Postgres DB ${pg.name} for ${database} Activated`,
);
scheduleJob(backupId, schedule, async () => {
console.log(
`PG-SERVER[${new Date().toLocaleString()}] Running Backup ${backupId}`,
);
runPostgresBackup(pg, backup);
});
}
}
}
const mariadbs = await db.query.mariadb.findMany({
with: {
backups: {
with: {
destination: true,
postgres: true,
mariadb: true,
mysql: true,
mongo: true,
},
},
destination: true,
postgres: true,
mariadb: true,
mysql: true,
mongo: true,
user: true,
},
});
for (const maria of mariadbs) {
for (const backup of maria.backups) {
const { schedule, backupId, enabled, database } = backup;
if (enabled) {
console.log(
`[Backup] MariaDB DB ${maria.name} for ${database} Activated`,
);
scheduleJob(backupId, schedule, async () => {
console.log(
`MARIADB-SERVER[${new Date().toLocaleString()}] Running Backup ${backupId}`,
);
await runMariadbBackup(maria, backup);
});
}
}
}
const mongodbs = await db.query.mongo.findMany({
with: {
backups: {
with: {
destination: true,
postgres: true,
mariadb: true,
mysql: true,
mongo: true,
},
},
},
});
for (const mongo of mongodbs) {
for (const backup of mongo.backups) {
const { schedule, backupId, enabled } = backup;
if (enabled) {
console.log(`[Backup] MongoDB DB ${mongo.name} Activated`);
scheduleJob(backupId, schedule, async () => {
console.log(
`MONGO-SERVER[${new Date().toLocaleString()}] Running Backup ${backupId}`,
);
await runMongoBackup(mongo, backup);
});
}
}
}
const mysqls = await db.query.mysql.findMany({
with: {
backups: {
with: {
destination: true,
postgres: true,
mariadb: true,
mysql: true,
mongo: true,
},
},
},
});
for (const mysql of mysqls) {
for (const backup of mysql.backups) {
const { schedule, backupId, enabled } = backup;
if (enabled) {
console.log(`[Backup] MySQL DB ${mysql.name} Activated`);
scheduleJob(backupId, schedule, async () => {
console.log(
`MYSQL-SERVER[${new Date().toLocaleString()}] Running Backup ${backupId}`,
);
await runMySqlBackup(mysql, backup);
});
for (const backup of backups) {
try {
if (backup.enabled) {
scheduleBackup(backup);
}
console.log(
`[Backup] ${backup.databaseType} Enabled with cron: [${backup.schedule}]`,
);
} catch (error) {
console.error(`[Backup] ${backup.databaseType} Error`, error);
}
}
@@ -195,8 +106,8 @@ export const keepLatestNBackups = async (
backup.prefix,
);
// --include "*.sql.gz" ensures nothing else other than the db backup files are touched by rclone
const rcloneList = `rclone lsf ${rcloneFlags.join(" ")} --include "*.sql.gz" ${backupFilesPath}`;
// --include "*.sql.gz" or "*.zip" ensures nothing else other than the dokploy backup files are touched by rclone
const rcloneList = `rclone lsf ${rcloneFlags.join(" ")} --include "*${backup.databaseType === "web-server" ? ".zip" : ".sql.gz"}" ${backupFilesPath}`;
// when we pipe the above command with this one, we only get the list of files we want to delete
const sortAndPickUnwantedBackups = `sort -r | tail -n +$((${backup.keepLatestCount}+1)) | xargs -I{}`;
// this command deletes the files

View File

@@ -1,4 +1,3 @@
import path from "node:path";
import type { BackupSchedule } from "@dokploy/server/services/backup";
import type { Mariadb } from "@dokploy/server/services/mariadb";
import { findProjectById } from "@dokploy/server/services/project";
@@ -8,7 +7,7 @@ import {
} from "../docker/utils";
import { sendDatabaseBackupNotifications } from "../notifications/database-backup";
import { execAsync, execAsyncRemote } from "../process/execAsync";
import { getS3Credentials } from "./utils";
import { getS3Credentials, normalizeS3Path } from "./utils";
export const runMariadbBackup = async (
mariadb: Mariadb,
@@ -19,7 +18,7 @@ export const runMariadbBackup = async (
const { prefix, database } = backup;
const destination = backup.destination;
const backupFileName = `${new Date().toISOString()}.sql.gz`;
const bucketDestination = path.join(prefix, backupFileName);
const bucketDestination = `${normalizeS3Path(prefix)}${backupFileName}`;
try {
const rcloneFlags = getS3Credentials(destination);

View File

@@ -1,4 +1,3 @@
import path from "node:path";
import type { BackupSchedule } from "@dokploy/server/services/backup";
import type { Mongo } from "@dokploy/server/services/mongo";
import { findProjectById } from "@dokploy/server/services/project";
@@ -8,7 +7,7 @@ import {
} from "../docker/utils";
import { sendDatabaseBackupNotifications } from "../notifications/database-backup";
import { execAsync, execAsyncRemote } from "../process/execAsync";
import { getS3Credentials } from "./utils";
import { getS3Credentials, normalizeS3Path } from "./utils";
// mongodb://mongo:Bqh7AQl-PRbnBu@localhost:27017/?tls=false&directConnection=true
export const runMongoBackup = async (mongo: Mongo, backup: BackupSchedule) => {
@@ -17,7 +16,7 @@ export const runMongoBackup = async (mongo: Mongo, backup: BackupSchedule) => {
const { prefix, database } = backup;
const destination = backup.destination;
const backupFileName = `${new Date().toISOString()}.dump.gz`;
const bucketDestination = path.join(prefix, backupFileName);
const bucketDestination = `${normalizeS3Path(prefix)}${backupFileName}`;
try {
const rcloneFlags = getS3Credentials(destination);

View File

@@ -1,4 +1,3 @@
import path from "node:path";
import type { BackupSchedule } from "@dokploy/server/services/backup";
import type { MySql } from "@dokploy/server/services/mysql";
import { findProjectById } from "@dokploy/server/services/project";
@@ -8,7 +7,7 @@ import {
} from "../docker/utils";
import { sendDatabaseBackupNotifications } from "../notifications/database-backup";
import { execAsync, execAsyncRemote } from "../process/execAsync";
import { getS3Credentials } from "./utils";
import { getS3Credentials, normalizeS3Path } from "./utils";
export const runMySqlBackup = async (mysql: MySql, backup: BackupSchedule) => {
const { appName, databaseRootPassword, projectId, name } = mysql;
@@ -16,7 +15,7 @@ export const runMySqlBackup = async (mysql: MySql, backup: BackupSchedule) => {
const { prefix, database } = backup;
const destination = backup.destination;
const backupFileName = `${new Date().toISOString()}.sql.gz`;
const bucketDestination = path.join(prefix, backupFileName);
const bucketDestination = `${normalizeS3Path(prefix)}${backupFileName}`;
try {
const rcloneFlags = getS3Credentials(destination);

View File

@@ -1,4 +1,3 @@
import path from "node:path";
import type { BackupSchedule } from "@dokploy/server/services/backup";
import type { Postgres } from "@dokploy/server/services/postgres";
import { findProjectById } from "@dokploy/server/services/project";
@@ -8,7 +7,7 @@ import {
} from "../docker/utils";
import { sendDatabaseBackupNotifications } from "../notifications/database-backup";
import { execAsync, execAsyncRemote } from "../process/execAsync";
import { getS3Credentials } from "./utils";
import { getS3Credentials, normalizeS3Path } from "./utils";
export const runPostgresBackup = async (
postgres: Postgres,
@@ -20,7 +19,7 @@ export const runPostgresBackup = async (
const { prefix, database } = backup;
const destination = backup.destination;
const backupFileName = `${new Date().toISOString()}.sql.gz`;
const bucketDestination = path.join(prefix, backupFileName);
const bucketDestination = `${normalizeS3Path(prefix)}${backupFileName}`;
try {
const rcloneFlags = getS3Credentials(destination);
const rcloneDestination = `:s3:${destination.bucket}/${bucketDestination}`;

View File

@@ -1,11 +1,12 @@
import type { BackupSchedule } from "@dokploy/server/services/backup";
import type { Destination } from "@dokploy/server/services/destination";
import { scheduleJob, scheduledJobs } from "node-schedule";
import { keepLatestNBackups } from ".";
import { runMariadbBackup } from "./mariadb";
import { runMongoBackup } from "./mongo";
import { runMySqlBackup } from "./mysql";
import { runPostgresBackup } from "./postgres";
import { keepLatestNBackups } from ".";
import { runWebServerBackup } from "./web-server";
export const scheduleBackup = (backup: BackupSchedule) => {
const { schedule, backupId, databaseType, postgres, mysql, mongo, mariadb } =
@@ -23,6 +24,9 @@ export const scheduleBackup = (backup: BackupSchedule) => {
} else if (databaseType === "mariadb" && mariadb) {
await runMariadbBackup(mariadb, backup);
await keepLatestNBackups(backup, mariadb.serverId);
} else if (databaseType === "web-server") {
await runWebServerBackup(backup);
await keepLatestNBackups(backup);
}
});
};
@@ -32,6 +36,13 @@ export const removeScheduleBackup = (backupId: string) => {
currentJob?.cancel();
};
export const normalizeS3Path = (prefix: string) => {
// Trim whitespace and remove leading/trailing slashes
const normalizedPrefix = prefix.trim().replace(/^\/+|\/+$/g, "");
// Return empty string if prefix is empty, otherwise append trailing slash
return normalizedPrefix ? `${normalizedPrefix}/` : "";
};
export const getS3Credentials = (destination: Destination) => {
const { accessKey, secretAccessKey, region, endpoint, provider } =
destination;

View File

@@ -0,0 +1,55 @@
import type { BackupSchedule } from "@dokploy/server/services/backup";
import { execAsync } from "../process/execAsync";
import { getS3Credentials, normalizeS3Path } from "./utils";
import { findDestinationById } from "@dokploy/server/services/destination";
import { IS_CLOUD, paths } from "@dokploy/server/constants";
import { mkdtemp } from "node:fs/promises";
import { join } from "node:path";
import { tmpdir } from "node:os";
export const runWebServerBackup = async (backup: BackupSchedule) => {
try {
if (IS_CLOUD) {
return;
}
const destination = await findDestinationById(backup.destinationId);
const rcloneFlags = getS3Credentials(destination);
const timestamp = new Date().toISOString().replace(/[:.]/g, "-");
const { BASE_PATH } = paths();
const tempDir = await mkdtemp(join(tmpdir(), "dokploy-backup-"));
const backupFileName = `webserver-backup-${timestamp}.zip`;
const s3Path = `:s3:${destination.bucket}/${normalizeS3Path(backup.prefix)}${backupFileName}`;
try {
await execAsync(`mkdir -p ${tempDir}/filesystem`);
// First get the container ID
const { stdout: containerId } = await execAsync(
"docker ps --filter 'name=dokploy-postgres' -q",
);
if (!containerId) {
throw new Error("PostgreSQL container not found");
}
// Then run pg_dump with the container ID
const postgresCommand = `docker exec ${containerId.trim()} pg_dump -v -Fc -U dokploy -d dokploy > '${tempDir}/database.sql'`;
await execAsync(postgresCommand);
await execAsync(`cp -r ${BASE_PATH}/* ${tempDir}/filesystem/`);
await execAsync(
`cd ${tempDir} && zip -r ${backupFileName} database.sql filesystem/ > /dev/null 2>&1`,
);
const uploadCommand = `rclone copyto ${rcloneFlags.join(" ")} "${tempDir}/${backupFileName}" "${s3Path}"`;
await execAsync(uploadCommand);
return true;
} finally {
await execAsync(`rm -rf ${tempDir}`);
}
} catch (error) {
console.error("Backup error:", error);
throw error;
}
};

View File

@@ -195,6 +195,7 @@ export const mechanizeDockerContainer = async (
try {
const service = docker.getService(appName);
const inspect = await service.inspect();
await service.update({
version: Number.parseInt(inspect.Version.Index),
...settings,
@@ -203,7 +204,7 @@ export const mechanizeDockerContainer = async (
ForceUpdate: inspect.Spec.TaskTemplate.ForceUpdate + 1,
},
});
} catch (_error) {
} catch (_error: unknown) {
await docker.createService(settings);
}
};

View File

@@ -1,11 +1,11 @@
import { createHash } from "node:crypto";
import type { WriteStream } from "node:fs";
import { nanoid } from "nanoid";
import type { ApplicationNested } from ".";
import { prepareEnvironmentVariables } from "../docker/utils";
import { getBuildAppDirectory } from "../filesystem/directory";
import { spawnAsync } from "../process/spawnAsync";
import { execAsync } from "../process/execAsync";
import { nanoid } from "nanoid";
import { createHash } from "node:crypto";
import { spawnAsync } from "../process/spawnAsync";
const calculateSecretsHash = (envVariables: string[]): string => {
const hash = createHash("sha256");
@@ -84,7 +84,7 @@ export const buildRailpack = async (
for (const envVar of envVariables) {
const [key, value] = envVar.split("=");
if (key && value) {
buildArgs.push("--secret", `id=${key},env=${key}`);
buildArgs.push("--secret", `id=${key},env='${key}'`);
env[key] = value;
}
}
@@ -132,7 +132,7 @@ export const getRailpackCommand = (
];
for (const env of envVariables) {
prepareArgs.push("--env", env);
prepareArgs.push("--env", `'${env}'`);
}
// Calculate secrets hash for layer invalidation
@@ -164,7 +164,7 @@ export const getRailpackCommand = (
for (const envVar of envVariables) {
const [key, value] = envVar.split("=");
if (key && value) {
buildArgs.push("--secret", `id=${key},env=${key}`);
buildArgs.push("--secret", `id=${key},env='${key}'`);
exportEnvs.push(`export ${key}=${value}`);
}
}

View File

@@ -25,6 +25,12 @@ export const buildStatic = async (
].join("\n"),
);
createFile(
buildAppDirectory,
".dockerignore",
[".git", ".env", "Dockerfile", ".dockerignore"].join("\n"),
);
await buildCustomDocker(
{
...application,

View File

@@ -1,20 +1,20 @@
import { deployPostgres } from "@dokploy/server/services/postgres";
import { execAsyncRemote } from "../process/execAsync";
import { execAsync } from "../process/execAsync";
import { deployMySql } from "@dokploy/server/services/mysql";
import { deployMariadb } from "@dokploy/server/services/mariadb";
import { deployMongo } from "@dokploy/server/services/mongo";
import { deployRedis } from "@dokploy/server/services/redis";
import { removeService } from "../docker/utils";
import { db } from "@dokploy/server/db";
import {
postgres,
mysql,
mariadb,
mongo,
mysql,
postgres,
redis,
} from "@dokploy/server/db/schema";
import { deployMariadb } from "@dokploy/server/services/mariadb";
import { deployMongo } from "@dokploy/server/services/mongo";
import { deployMySql } from "@dokploy/server/services/mysql";
import { deployPostgres } from "@dokploy/server/services/postgres";
import { deployRedis } from "@dokploy/server/services/redis";
import { eq } from "drizzle-orm";
import { removeService } from "../docker/utils";
import { execAsyncRemote } from "../process/execAsync";
import { execAsync } from "../process/execAsync";
type DatabaseType = "postgres" | "mysql" | "mariadb" | "mongo" | "redis";

View File

@@ -30,12 +30,22 @@ export const addSuffixToVolumesInServices = (
// skip bind mounts and variables (e.g. $PWD)
if (
volumeName?.startsWith(".") ||
volumeName?.startsWith("/") ||
volumeName?.startsWith("$")
!volumeName ||
volumeName.startsWith(".") ||
volumeName.startsWith("/") ||
volumeName.startsWith("$")
) {
return volume;
}
// Handle volume paths with subdirectories
const parts = volumeName.split("/");
if (parts.length > 1) {
const baseName = parts[0];
const rest = parts.slice(1).join("/");
return `${baseName}-${suffix}/${rest}:${path}`;
}
return `${volumeName}-${suffix}:${path}`;
}
if (_.isObject(volume) && volume.type === "volume" && volume.source) {

View File

@@ -14,6 +14,10 @@ import {
cloneGitRawRepository,
cloneRawGitRepositoryRemote,
} from "../providers/git";
import {
cloneRawGiteaRepository,
cloneRawGiteaRepositoryRemote,
} from "../providers/gitea";
import {
cloneRawGithubRepository,
cloneRawGithubRepositoryRemote,
@@ -44,6 +48,8 @@ export const cloneCompose = async (compose: Compose) => {
await cloneRawBitbucketRepository(compose);
} else if (compose.sourceType === "git") {
await cloneGitRawRepository(compose);
} else if (compose.sourceType === "gitea") {
await cloneRawGiteaRepository(compose);
} else if (compose.sourceType === "raw") {
await createComposeFileRaw(compose);
}
@@ -58,6 +64,8 @@ export const cloneComposeRemote = async (compose: Compose) => {
await cloneRawBitbucketRepositoryRemote(compose);
} else if (compose.sourceType === "git") {
await cloneRawGitRepositoryRemote(compose);
} else if (compose.sourceType === "gitea") {
await cloneRawGiteaRepositoryRemote(compose);
} else if (compose.sourceType === "raw") {
await createComposeFileRawRemote(compose);
}
@@ -241,6 +249,11 @@ export const addDomainToCompose = async (
labels.unshift("traefik.enable=true");
}
labels.unshift(...httpLabels);
if (!compose.isolatedDeployment) {
if (!labels.includes("traefik.docker.network=dokploy-network")) {
labels.unshift("traefik.docker.network=dokploy-network");
}
}
}
if (!compose.isolatedDeployment) {

View File

@@ -113,6 +113,8 @@ export const getBuildAppDirectory = (application: Application) => {
buildPath = application?.gitlabBuildPath || "";
} else if (sourceType === "bitbucket") {
buildPath = application?.bitbucketBuildPath || "";
} else if (sourceType === "gitea") {
buildPath = application?.giteaBuildPath || "";
} else if (sourceType === "drop") {
buildPath = application?.dropBuildPath || "";
} else if (sourceType === "git") {

View File

@@ -1,9 +1,48 @@
import { exec } from "node:child_process";
import { exec, execFile } from "node:child_process";
import util from "node:util";
import { findServerById } from "@dokploy/server/services/server";
import { Client } from "ssh2";
export const execAsync = util.promisify(exec);
export const execFileAsync = async (
command: string,
args: string[],
options: { input?: string } = {},
): Promise<{ stdout: string; stderr: string }> => {
const child = execFile(command, args);
if (options.input && child.stdin) {
child.stdin.write(options.input);
child.stdin.end();
}
return new Promise((resolve, reject) => {
let stdout = "";
let stderr = "";
child.stdout?.on("data", (data) => {
stdout += data.toString();
});
child.stderr?.on("data", (data) => {
stderr += data.toString();
});
child.on("close", (code) => {
if (code === 0) {
resolve({ stdout, stderr });
} else {
reject(
new Error(`Command failed with code ${code}. Stderr: ${stderr}`),
);
}
});
child.on("error", reject);
});
};
export const execAsyncRemote = async (
serverId: string | null,
command: string,

View File

@@ -0,0 +1,457 @@
import { createWriteStream } from "node:fs";
import { join } from "node:path";
import { paths } from "@dokploy/server/constants";
import type { Compose } from "@dokploy/server/services/compose";
import {
type Gitea,
findGiteaById,
updateGitea,
} from "@dokploy/server/services/gitea";
import type { InferResultType } from "@dokploy/server/types/with";
import { TRPCError } from "@trpc/server";
import { recreateDirectory } from "../filesystem/directory";
import { execAsyncRemote } from "../process/execAsync";
import { spawnAsync } from "../process/spawnAsync";
export const getErrorCloneRequirements = (entity: {
giteaRepository?: string | null;
giteaOwner?: string | null;
giteaBranch?: string | null;
}) => {
const reasons: string[] = [];
const { giteaBranch, giteaOwner, giteaRepository } = entity;
if (!giteaRepository) reasons.push("1. Repository not assigned.");
if (!giteaOwner) reasons.push("2. Owner not specified.");
if (!giteaBranch) reasons.push("3. Branch not defined.");
return reasons;
};
export const refreshGiteaToken = async (giteaProviderId: string) => {
try {
const giteaProvider = await findGiteaById(giteaProviderId);
if (
!giteaProvider?.clientId ||
!giteaProvider?.clientSecret ||
!giteaProvider?.refreshToken
) {
return giteaProvider?.accessToken || null;
}
// Check if token is still valid (add some buffer time, e.g., 5 minutes)
const currentTimeSeconds = Math.floor(Date.now() / 1000);
const bufferTimeSeconds = 300; // 5 minutes
if (
giteaProvider.expiresAt &&
giteaProvider.expiresAt > currentTimeSeconds + bufferTimeSeconds &&
giteaProvider.accessToken
) {
// Token is still valid, no need to refresh
return giteaProvider.accessToken;
}
// Token is expired or about to expire, refresh it
const tokenEndpoint = `${giteaProvider.giteaUrl}/login/oauth/access_token`;
const params = new URLSearchParams({
grant_type: "refresh_token",
refresh_token: giteaProvider.refreshToken,
client_id: giteaProvider.clientId,
client_secret: giteaProvider.clientSecret,
});
const response = await fetch(tokenEndpoint, {
method: "POST",
headers: {
"Content-Type": "application/x-www-form-urlencoded",
Accept: "application/json",
},
body: params.toString(),
});
if (!response.ok) {
return giteaProvider?.accessToken || null;
}
const data = await response.json();
const { access_token, refresh_token, expires_in } = data;
if (!access_token) {
return giteaProvider?.accessToken || null;
}
const expiresAt = Date.now() + (expires_in || 3600) * 1000;
const expiresAtSeconds = Math.floor(expiresAt / 1000);
await updateGitea(giteaProviderId, {
accessToken: access_token,
refreshToken: refresh_token || giteaProvider.refreshToken,
expiresAt: expiresAtSeconds,
});
return access_token;
} catch (error) {
console.error("Error refreshing Gitea token:", error);
const giteaProvider = await findGiteaById(giteaProviderId);
return giteaProvider?.accessToken || null;
}
};
export type ApplicationWithGitea = InferResultType<
"applications",
{ gitea: true }
>;
export type ComposeWithGitea = InferResultType<"compose", { gitea: true }>;
export const getGiteaCloneCommand = async (
entity: ApplicationWithGitea | ComposeWithGitea,
logPath: string,
isCompose = false,
) => {
const {
appName,
giteaBranch,
giteaId,
giteaOwner,
giteaRepository,
serverId,
gitea,
} = entity;
if (!serverId) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Server not found",
});
}
if (!giteaId) {
const command = `
echo "Error: ❌ Gitlab Provider not found" >> ${logPath};
exit 1;
`;
await execAsyncRemote(serverId, command);
throw new TRPCError({
code: "NOT_FOUND",
message: "Gitea Provider not found",
});
}
// Use paths(true) for remote operations
const { COMPOSE_PATH, APPLICATIONS_PATH } = paths(true);
await refreshGiteaToken(giteaId);
const basePath = isCompose ? COMPOSE_PATH : APPLICATIONS_PATH;
const outputPath = join(basePath, appName, "code");
const baseUrl = gitea?.giteaUrl.replace(/^https?:\/\//, "");
const repoClone = `${giteaOwner}/${giteaRepository}.git`;
const cloneUrl = `https://oauth2:${gitea?.accessToken}@${baseUrl}/${repoClone}`;
const cloneCommand = `
rm -rf ${outputPath};
mkdir -p ${outputPath};
if ! git clone --branch ${giteaBranch} --depth 1 --recurse-submodules ${cloneUrl} ${outputPath} >> ${logPath} 2>&1; then
echo "❌ [ERROR] Failed to clone the repository ${repoClone}" >> ${logPath};
exit 1;
fi
echo "Cloned ${repoClone} to ${outputPath}: ✅" >> ${logPath};
`;
return cloneCommand;
};
export const cloneGiteaRepository = async (
entity: ApplicationWithGitea | ComposeWithGitea,
logPath: string,
isCompose = false,
) => {
const { APPLICATIONS_PATH, COMPOSE_PATH } = paths();
const writeStream = createWriteStream(logPath, { flags: "a" });
const { appName, giteaBranch, giteaId, giteaOwner, giteaRepository } = entity;
if (!giteaId) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Gitea Provider not found",
});
}
await refreshGiteaToken(giteaId);
const giteaProvider = await findGiteaById(giteaId);
if (!giteaProvider) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Gitea provider not found in the database",
});
}
const basePath = isCompose ? COMPOSE_PATH : APPLICATIONS_PATH;
const outputPath = join(basePath, appName, "code");
await recreateDirectory(outputPath);
const repoClone = `${giteaOwner}/${giteaRepository}.git`;
const baseUrl = giteaProvider.giteaUrl.replace(/^https?:\/\//, "");
const cloneUrl = `https://oauth2:${giteaProvider.accessToken}@${baseUrl}/${repoClone}`;
writeStream.write(`\nCloning Repo ${repoClone} to ${outputPath}...\n`);
try {
await spawnAsync(
"git",
[
"clone",
"--branch",
giteaBranch!,
"--depth",
"1",
"--recurse-submodules",
cloneUrl,
outputPath,
"--progress",
],
(data) => {
if (writeStream.writable) {
writeStream.write(data);
}
},
);
writeStream.write(`\nCloned ${repoClone}: ✅\n`);
} catch (error) {
writeStream.write(`ERROR Clonning: ${error}: ❌`);
throw error;
} finally {
writeStream.end();
}
};
export const cloneRawGiteaRepository = async (entity: Compose) => {
const { appName, giteaRepository, giteaOwner, giteaBranch, giteaId } = entity;
const { COMPOSE_PATH } = paths();
if (!giteaId) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Gitea Provider not found",
});
}
await refreshGiteaToken(giteaId);
const giteaProvider = await findGiteaById(giteaId);
if (!giteaProvider) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Gitea provider not found in the database",
});
}
const basePath = COMPOSE_PATH;
const outputPath = join(basePath, appName, "code");
await recreateDirectory(outputPath);
const repoClone = `${giteaOwner}/${giteaRepository}.git`;
const baseUrl = giteaProvider.giteaUrl.replace(/^https?:\/\//, "");
const cloneUrl = `https://oauth2:${giteaProvider.accessToken}@${baseUrl}/${repoClone}`;
try {
await spawnAsync("git", [
"clone",
"--branch",
giteaBranch!,
"--depth",
"1",
"--recurse-submodules",
cloneUrl,
outputPath,
"--progress",
]);
} catch (error) {
throw error;
}
};
export const cloneRawGiteaRepositoryRemote = async (compose: Compose) => {
const {
appName,
giteaRepository,
giteaOwner,
giteaBranch,
giteaId,
serverId,
} = compose;
if (!serverId) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Server not found",
});
}
if (!giteaId) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Gitea Provider not found",
});
}
const { COMPOSE_PATH } = paths(true);
const giteaProvider = await findGiteaById(giteaId);
const basePath = COMPOSE_PATH;
const outputPath = join(basePath, appName, "code");
const repoClone = `${giteaOwner}/${giteaRepository}.git`;
const baseUrl = giteaProvider.giteaUrl.replace(/^https?:\/\//, "");
const cloneUrl = `https://oauth2:${giteaProvider.accessToken}@${baseUrl}/${repoClone}`;
try {
const command = `
rm -rf ${outputPath};
git clone --branch ${giteaBranch} --depth 1 ${cloneUrl} ${outputPath}
`;
await execAsyncRemote(serverId, command);
} catch (error) {
throw error;
}
};
export const haveGiteaRequirements = (giteaProvider: Gitea) => {
return !!(giteaProvider?.clientId && giteaProvider?.clientSecret);
};
export const testGiteaConnection = async (input: { giteaId: string }) => {
try {
const { giteaId } = input;
if (!giteaId) {
throw new Error("Gitea provider not found");
}
const giteaProvider = await findGiteaById(giteaId);
if (!giteaProvider) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Gitea provider not found in the database",
});
}
await refreshGiteaToken(giteaId);
const provider = await findGiteaById(giteaId);
if (!provider || !provider.accessToken) {
throw new TRPCError({
code: "UNAUTHORIZED",
message: "No access token available. Please authorize with Gitea.",
});
}
const baseUrl = provider.giteaUrl.replace(/\/+$/, "");
const url = `${baseUrl}/api/v1/user/repos`;
const response = await fetch(url, {
headers: {
Accept: "application/json",
Authorization: `token ${provider.accessToken}`,
},
});
if (!response.ok) {
throw new Error(
`Failed to connect to Gitea API: ${response.status} ${response.statusText}`,
);
}
const repos = await response.json();
await updateGitea(giteaId, {
lastAuthenticatedAt: Math.floor(Date.now() / 1000),
});
return repos.length;
} catch (error) {
throw error;
}
};
export const getGiteaRepositories = async (giteaId?: string) => {
if (!giteaId) {
return [];
}
await refreshGiteaToken(giteaId);
const giteaProvider = await findGiteaById(giteaId);
const baseUrl = giteaProvider.giteaUrl.replace(/\/+$/, "");
const url = `${baseUrl}/api/v1/user/repos`;
const response = await fetch(url, {
headers: {
Accept: "application/json",
Authorization: `token ${giteaProvider.accessToken}`,
},
});
if (!response.ok) {
throw new TRPCError({
code: "BAD_REQUEST",
message: `Failed to fetch repositories: ${response.statusText}`,
});
}
const repositories = await response.json();
const mappedRepositories = repositories.map((repo: any) => ({
id: repo.id,
name: repo.name,
url: repo.full_name,
owner: {
username: repo.owner.login,
},
}));
return mappedRepositories;
};
export const getGiteaBranches = async (input: {
giteaId?: string;
owner: string;
repo: string;
}) => {
if (!input.giteaId) {
return [];
}
await refreshGiteaToken(input.giteaId);
const giteaProvider = await findGiteaById(input.giteaId);
const baseUrl = giteaProvider.giteaUrl.replace(/\/+$/, "");
const url = `${baseUrl}/api/v1/repos/${input.owner}/${input.repo}/branches`;
const response = await fetch(url, {
headers: {
Accept: "application/json",
Authorization: `token ${giteaProvider.accessToken}`,
},
});
if (!response.ok) {
throw new Error(`Failed to fetch branches: ${response.statusText}`);
}
const branches = await response.json();
return branches.map((branch: any) => ({
id: branch.name,
name: branch.name,
commit: {
id: branch.commit.id,
},
})) as {
id: string;
name: string;
commit: {
id: string;
};
}[];
};

View File

@@ -264,7 +264,11 @@ export const getGitlabRepositories = async (gitlabId?: string) => {
const groupName = gitlabProvider.groupName?.toLowerCase();
if (groupName) {
return full_path.toLowerCase().includes(groupName) && kind === "group";
const isIncluded = groupName
.split(",")
.some((name) => full_path.toLowerCase().includes(name));
return isIncluded && kind === "group";
}
return kind === "user";
});
@@ -431,7 +435,9 @@ export const testGitlabConnection = async (
const { full_path, kind } = repo.namespace;
if (groupName) {
return full_path.toLowerCase().includes(groupName) && kind === "group";
return groupName
.split(",")
.some((name) => full_path.toLowerCase().includes(name));
}
return kind === "user";
});

View File

@@ -2,3 +2,4 @@ export { restorePostgresBackup } from "./postgres";
export { restoreMySqlBackup } from "./mysql";
export { restoreMariadbBackup } from "./mariadb";
export { restoreMongoBackup } from "./mongo";
export { restoreWebServerBackup } from "./web-server";

View File

@@ -1,11 +1,11 @@
import type { Mariadb } from "@dokploy/server/services/mariadb";
import type { Destination } from "@dokploy/server/services/destination";
import type { Mariadb } from "@dokploy/server/services/mariadb";
import { getS3Credentials } from "../backups/utils";
import {
getRemoteServiceContainer,
getServiceContainer,
} from "../docker/utils";
import { execAsync, execAsyncRemote } from "../process/execAsync";
import { getS3Credentials } from "../backups/utils";
export const restoreMariadbBackup = async (
mariadb: Mariadb,

View File

@@ -1,11 +1,11 @@
import type { Mongo } from "@dokploy/server/services/mongo";
import type { Destination } from "@dokploy/server/services/destination";
import type { Mongo } from "@dokploy/server/services/mongo";
import { getS3Credentials } from "../backups/utils";
import {
getRemoteServiceContainer,
getServiceContainer,
} from "../docker/utils";
import { execAsync, execAsyncRemote } from "../process/execAsync";
import { getS3Credentials } from "../backups/utils";
export const restoreMongoBackup = async (
mongo: Mongo,

View File

@@ -1,11 +1,11 @@
import type { MySql } from "@dokploy/server/services/mysql";
import type { Destination } from "@dokploy/server/services/destination";
import type { MySql } from "@dokploy/server/services/mysql";
import { getS3Credentials } from "../backups/utils";
import {
getRemoteServiceContainer,
getServiceContainer,
} from "../docker/utils";
import { execAsync, execAsyncRemote } from "../process/execAsync";
import { getS3Credentials } from "../backups/utils";
export const restoreMySqlBackup = async (
mysql: MySql,

View File

@@ -1,11 +1,11 @@
import type { Postgres } from "@dokploy/server/services/postgres";
import type { Destination } from "@dokploy/server/services/destination";
import type { Postgres } from "@dokploy/server/services/postgres";
import { getS3Credentials } from "../backups/utils";
import {
getRemoteServiceContainer,
getServiceContainer,
} from "../docker/utils";
import { execAsync, execAsyncRemote } from "../process/execAsync";
import { getS3Credentials } from "../backups/utils";
export const restorePostgresBackup = async (
postgres: Postgres,

View File

@@ -0,0 +1,143 @@
import type { Destination } from "@dokploy/server/services/destination";
import { getS3Credentials } from "../backups/utils";
import { execAsync } from "../process/execAsync";
import { paths, IS_CLOUD } from "@dokploy/server/constants";
import { mkdtemp } from "node:fs/promises";
import { join } from "node:path";
import { tmpdir } from "node:os";
export const restoreWebServerBackup = async (
destination: Destination,
backupFile: string,
emit: (log: string) => void,
) => {
if (IS_CLOUD) {
return;
}
try {
const rcloneFlags = getS3Credentials(destination);
const bucketPath = `:s3:${destination.bucket}`;
const backupPath = `${bucketPath}/${backupFile}`;
const { BASE_PATH } = paths();
// Create a temporary directory outside of BASE_PATH
const tempDir = await mkdtemp(join(tmpdir(), "dokploy-restore-"));
try {
emit("Starting restore...");
emit(`Backup path: ${backupPath}`);
emit(`Temp directory: ${tempDir}`);
// Create temp directory
emit("Creating temporary directory...");
await execAsync(`mkdir -p ${tempDir}`);
// Download backup from S3
emit("Downloading backup from S3...");
await execAsync(
`rclone copyto ${rcloneFlags.join(" ")} "${backupPath}" "${tempDir}/${backupFile}"`,
);
// List files before extraction
emit("Listing files before extraction...");
const { stdout: beforeFiles } = await execAsync(`ls -la ${tempDir}`);
emit(`Files before extraction: ${beforeFiles}`);
// Extract backup
emit("Extracting backup...");
await execAsync(`cd ${tempDir} && unzip ${backupFile} > /dev/null 2>&1`);
// Restore filesystem first
emit("Restoring filesystem...");
emit(`Copying from ${tempDir}/filesystem/* to ${BASE_PATH}/`);
// First clean the target directory
emit("Cleaning target directory...");
await execAsync(`rm -rf "${BASE_PATH}/"*`);
// Ensure the target directory exists
emit("Setting up target directory...");
await execAsync(`mkdir -p "${BASE_PATH}"`);
// Copy files preserving permissions
emit("Copying files...");
await execAsync(`cp -rp "${tempDir}/filesystem/"* "${BASE_PATH}/"`);
// Now handle database restore
emit("Starting database restore...");
// Check if database.sql.gz exists and decompress it
const { stdout: hasGzFile } = await execAsync(
`ls ${tempDir}/database.sql.gz || true`,
);
if (hasGzFile.includes("database.sql.gz")) {
emit("Found compressed database file, decompressing...");
await execAsync(`cd ${tempDir} && gunzip database.sql.gz`);
}
// Verify database file exists
const { stdout: hasSqlFile } = await execAsync(
`ls ${tempDir}/database.sql || true`,
);
if (!hasSqlFile.includes("database.sql")) {
throw new Error("Database file not found after extraction");
}
// Drop and recreate database
emit("Disconnecting all users from database...");
await execAsync(
`docker exec $(docker ps --filter "name=dokploy-postgres" -q) psql -U dokploy postgres -c "SELECT pg_terminate_backend(pg_stat_activity.pid) FROM pg_stat_activity WHERE pg_stat_activity.datname = 'dokploy' AND pid <> pg_backend_pid();"`,
);
emit("Dropping existing database...");
await execAsync(
`docker exec $(docker ps --filter "name=dokploy-postgres" -q) psql -U dokploy postgres -c "DROP DATABASE IF EXISTS dokploy;"`,
);
emit("Creating fresh database...");
await execAsync(
`docker exec $(docker ps --filter "name=dokploy-postgres" -q) psql -U dokploy postgres -c "CREATE DATABASE dokploy;"`,
);
// Copy the backup file into the container
emit("Copying backup file into container...");
await execAsync(
`docker cp ${tempDir}/database.sql $(docker ps --filter "name=dokploy-postgres" -q):/tmp/database.sql`,
);
// Verify file in container
emit("Verifying file in container...");
await execAsync(
`docker exec $(docker ps --filter "name=dokploy-postgres" -q) ls -l /tmp/database.sql`,
);
// Restore from the copied file
emit("Running database restore...");
await execAsync(
`docker exec $(docker ps --filter "name=dokploy-postgres" -q) pg_restore -v -U dokploy -d dokploy /tmp/database.sql`,
);
// Cleanup the temporary file in the container
emit("Cleaning up container temp file...");
await execAsync(
`docker exec $(docker ps --filter "name=dokploy-postgres" -q) rm /tmp/database.sql`,
);
emit("Restore completed successfully!");
} finally {
// Cleanup
emit("Cleaning up temporary files...");
await execAsync(`rm -rf ${tempDir}`);
}
} catch (error) {
console.error(error);
emit(
`Error: ${
error instanceof Error
? error.message
: "Error restoring web server backup"
}`,
);
throw error;
}
};

View File

@@ -3,7 +3,11 @@ import { join } from "node:path";
import { paths } from "@dokploy/server/constants";
import type { User } from "@dokploy/server/services/user";
import { dump, load } from "js-yaml";
import { loadOrCreateConfig, writeTraefikConfig } from "./application";
import {
loadOrCreateConfig,
removeTraefikConfig,
writeTraefikConfig,
} from "./application";
import type { FileConfig } from "./file-types";
import type { MainTraefikConfig } from "./types";
@@ -11,32 +15,62 @@ export const updateServerTraefik = (
user: User | null,
newHost: string | null,
) => {
const { https, certificateType } = user || {};
const appName = "dokploy";
const config: FileConfig = loadOrCreateConfig(appName);
config.http = config.http || { routers: {}, services: {} };
config.http.routers = config.http.routers || {};
config.http.services = config.http.services || {};
const currentRouterConfig = config.http.routers[`${appName}-router-app`];
const currentRouterConfig = config.http.routers[`${appName}-router-app`] || {
rule: `Host(\`${newHost}\`)`,
service: `${appName}-service-app`,
entryPoints: ["web"],
};
config.http.routers[`${appName}-router-app`] = currentRouterConfig;
if (currentRouterConfig && newHost) {
currentRouterConfig.rule = `Host(\`${newHost}\`)`;
config.http.services = {
...config.http.services,
[`${appName}-service-app`]: {
loadBalancer: {
servers: [
{
url: `http://dokploy:${process.env.PORT || 3000}`,
},
],
passHostHeader: true,
},
},
};
if (user?.certificateType === "letsencrypt") {
if (https) {
currentRouterConfig.middlewares = ["redirect-to-https"];
if (certificateType === "letsencrypt") {
config.http.routers[`${appName}-router-app-secure`] = {
...currentRouterConfig,
rule: `Host(\`${newHost}\`)`,
service: `${appName}-service-app`,
entryPoints: ["websecure"],
tls: { certResolver: "letsencrypt" },
};
currentRouterConfig.middlewares = ["redirect-to-https"];
} else {
delete config.http.routers[`${appName}-router-app-secure`];
currentRouterConfig.middlewares = [];
config.http.routers[`${appName}-router-app-secure`] = {
rule: `Host(\`${newHost}\`)`,
service: `${appName}-service-app`,
entryPoints: ["websecure"],
};
}
} else {
delete config.http.routers[`${appName}-router-app-secure`];
currentRouterConfig.middlewares = [];
}
writeTraefikConfig(config, appName);
if (newHost) {
writeTraefikConfig(config, appName);
} else {
removeTraefikConfig(appName);
}
};
export const updateLetsEncryptEmail = (newEmail: string | null) => {