fix: resolved merge conflicts with fork/canary

This commit is contained in:
Jason Parks
2025-03-16 03:02:15 -06:00
502 changed files with 18288 additions and 16475 deletions

View File

@@ -118,6 +118,7 @@ export const applications = pgTable("application", {
description: text("description"),
env: text("env"),
previewEnv: text("previewEnv"),
watchPaths: text("watchPaths").array(),
previewBuildArgs: text("previewBuildArgs"),
previewWildcard: text("previewWildcard"),
previewPort: integer("previewPort").default(3000),
@@ -126,6 +127,7 @@ export const applications = pgTable("application", {
previewCertificateType: certificateType("certificateType")
.notNull()
.default("none"),
previewCustomCertResolver: text("previewCustomCertResolver"),
previewLimit: integer("previewLimit").default(3),
isPreviewDeploymentsActive: boolean("isPreviewDeploymentsActive").default(
false,
@@ -420,7 +422,8 @@ const createSchema = createInsertSchema(applications, {
previewLimit: z.number().optional(),
previewHttps: z.boolean().optional(),
previewPath: z.string().optional(),
previewCertificateType: z.enum(["letsencrypt", "none"]).optional(),
previewCertificateType: z.enum(["letsencrypt", "none", "custom"]).optional(),
watchPaths: z.array(z.string()).optional(),
});
export const apiCreateApplication = createSchema.pick({
@@ -464,6 +467,7 @@ export const apiSaveGithubProvider = createSchema
owner: true,
buildPath: true,
githubId: true,
watchPaths: true,
})
.required();
@@ -477,6 +481,7 @@ export const apiSaveGitlabProvider = createSchema
gitlabId: true,
gitlabProjectId: true,
gitlabPathNamespace: true,
watchPaths: true,
})
.required();
@@ -488,6 +493,7 @@ export const apiSaveBitbucketProvider = createSchema
bitbucketRepository: true,
bitbucketId: true,
applicationId: true,
watchPaths: true,
})
.required();
@@ -520,6 +526,7 @@ export const apiSaveGitProvider = createSchema
applicationId: true,
customGitBuildPath: true,
customGitUrl: true,
watchPaths: true,
})
.required()
.merge(

View File

@@ -2,6 +2,7 @@ import { relations } from "drizzle-orm";
import {
type AnyPgColumn,
boolean,
integer,
pgEnum,
pgTable,
text,
@@ -36,6 +37,8 @@ export const backups = pgTable("backup", {
.notNull()
.references(() => destinations.destinationId, { onDelete: "cascade" }),
keepLatestCount: integer("keepLatestCount"),
databaseType: databaseType("databaseType").notNull(),
postgresId: text("postgresId").references(
(): AnyPgColumn => postgres.postgresId,
@@ -87,6 +90,7 @@ const createSchema = createInsertSchema(backups, {
prefix: z.string().min(1),
database: z.string().min(1),
schedule: z.string(),
keepLatestCount: z.number().optional(),
databaseType: z.enum(["postgres", "mariadb", "mysql", "mongo"]),
postgresId: z.string().optional(),
mariadbId: z.string().optional(),
@@ -99,6 +103,7 @@ export const apiCreateBackup = createSchema.pick({
enabled: true,
prefix: true,
destinationId: true,
keepLatestCount: true,
database: true,
mariadbId: true,
mysqlId: true,
@@ -127,5 +132,6 @@ export const apiUpdateBackup = createSchema
backupId: true,
destinationId: true,
database: true,
keepLatestCount: true,
})
.required();

View File

@@ -83,7 +83,7 @@ export const compose = pgTable("compose", {
createdAt: text("createdAt")
.notNull()
.$defaultFn(() => new Date().toISOString()),
watchPaths: text("watchPaths").array(),
githubId: text("githubId").references(() => github.githubId, {
onDelete: "set null",
}),
@@ -145,6 +145,7 @@ const createSchema = createInsertSchema(compose, {
command: z.string().optional(),
composePath: z.string().min(1),
composeType: z.enum(["docker-compose", "stack"]).optional(),
watchPaths: z.array(z.string()).optional(),
});
export const apiCreateCompose = createSchema.pick({

View File

@@ -41,6 +41,7 @@ export const domains = pgTable("domain", {
composeId: text("composeId").references(() => compose.composeId, {
onDelete: "cascade",
}),
customCertResolver: text("customCertResolver"),
applicationId: text("applicationId").references(
() => applications.applicationId,
{ onDelete: "cascade" },
@@ -76,6 +77,7 @@ export const apiCreateDomain = createSchema.pick({
https: true,
applicationId: true,
certificateType: true,
customCertResolver: true,
composeId: true,
serviceName: true,
domainType: true,
@@ -107,6 +109,7 @@ export const apiUpdateDomain = createSchema
port: true,
https: true,
certificateType: true,
customCertResolver: true,
serviceName: true,
domainType: true,
})

View File

@@ -146,3 +146,9 @@ export const apiUpdateMariaDB = createSchema
mariadbId: z.string().min(1),
})
.omit({ serverId: true });
export const apiRebuildMariadb = createSchema
.pick({
mariadbId: true,
})
.required();

View File

@@ -141,3 +141,9 @@ export const apiResetMongo = createSchema
appName: true,
})
.required();
export const apiRebuildMongo = createSchema
.pick({
mongoId: true,
})
.required();

View File

@@ -144,3 +144,9 @@ export const apiUpdateMySql = createSchema
mysqlId: z.string().min(1),
})
.omit({ serverId: true });
export const apiRebuildMysql = createSchema
.pick({
mysqlId: true,
})
.required();

View File

@@ -140,3 +140,9 @@ export const apiUpdatePostgres = createSchema
postgresId: z.string().min(1),
})
.omit({ serverId: true });
export const apiRebuildPostgres = createSchema
.pick({
postgresId: true,
})
.required();

View File

@@ -133,3 +133,9 @@ export const apiUpdateRedis = createSchema
redisId: z.string().min(1),
})
.omit({ serverId: true });
export const apiRebuildRedis = createSchema
.pick({
redisId: true,
})
.required();

View File

@@ -10,4 +10,5 @@ export const applicationStatus = pgEnum("applicationStatus", [
export const certificateType = pgEnum("certificateType", [
"letsencrypt",
"none",
"custom",
]);

View File

@@ -53,7 +53,7 @@ export const users_temp = pgTable("user_temp", {
letsEncryptEmail: text("letsEncryptEmail"),
sshPrivateKey: text("sshPrivateKey"),
enableDockerCleanup: boolean("enableDockerCleanup").notNull().default(false),
enableLogRotation: boolean("enableLogRotation").notNull().default(false),
logCleanupCron: text("logCleanupCron"),
// Metrics
enablePaidFeatures: boolean("enablePaidFeatures").notNull().default(false),
metricsConfig: jsonb("metricsConfig")
@@ -250,6 +250,12 @@ export const apiReadStatsLogs = z.object({
status: z.string().array().optional(),
search: z.string().optional(),
sort: z.object({ id: z.string(), desc: z.boolean() }).optional(),
dateRange: z
.object({
start: z.string().optional(),
end: z.string().optional(),
})
.optional(),
});
export const apiUpdateWebServerMonitoring = z.object({
@@ -305,4 +311,5 @@ export const apiUpdateUser = createSchema.partial().extend({
}),
})
.optional(),
logCleanupCron: z.string().optional().nullable(),
});

View File

@@ -1,4 +1,4 @@
import { generatePassword } from "@dokploy/server/templates/utils";
import { generatePassword } from "@dokploy/server/templates";
import { faker } from "@faker-js/faker";
import { customAlphabet } from "nanoid";

View File

@@ -10,7 +10,8 @@ export const domain = z
.max(65535, { message: "Port must be 65535 or below" })
.optional(),
https: z.boolean().optional(),
certificateType: z.enum(["letsencrypt", "none"]).optional(),
certificateType: z.enum(["letsencrypt", "none", "custom"]).optional(),
customCertResolver: z.string(),
})
.superRefine((input, ctx) => {
if (input.https && !input.certificateType) {
@@ -20,6 +21,14 @@ export const domain = z
message: "Required",
});
}
if (input.certificateType === "custom" && !input.customCertResolver) {
ctx.addIssue({
code: z.ZodIssueCode.custom,
path: ["customCertResolver"],
message: "Required when certificate type is custom",
});
}
});
export const domainCompose = z
@@ -32,7 +41,8 @@ export const domainCompose = z
.max(65535, { message: "Port must be 65535 or below" })
.optional(),
https: z.boolean().optional(),
certificateType: z.enum(["letsencrypt", "none"]).optional(),
certificateType: z.enum(["letsencrypt", "none", "custom"]).optional(),
customCertResolver: z.string(),
serviceName: z.string().min(1, { message: "Service name is required" }),
})
.superRefine((input, ctx) => {
@@ -43,4 +53,12 @@ export const domainCompose = z
message: "Required",
});
}
if (input.certificateType === "custom" && !input.customCertResolver) {
ctx.addIssue({
code: z.ZodIssueCode.custom,
path: ["customCertResolver"],
message: "Required when certificate type is custom",
});
}
});

View File

@@ -31,7 +31,7 @@ export * from "./services/gitlab";
export * from "./services/gitea";
export * from "./services/server";
export * from "./services/application";
export * from "./utils/databases/rebuild";
export * from "./setup/config-paths";
export * from "./setup/postgres-setup";
export * from "./setup/redis-setup";
@@ -41,13 +41,14 @@ export * from "./setup/setup";
export * from "./setup/traefik-setup";
export * from "./setup/server-validate";
export * from "./setup/server-audit";
export * from "./utils/watch-paths/should-deploy";
export * from "./utils/backups/index";
export * from "./utils/backups/mariadb";
export * from "./utils/backups/mongo";
export * from "./utils/backups/mysql";
export * from "./utils/backups/postgres";
export * from "./utils/backups/utils";
export * from "./templates/processors";
export * from "./utils/notifications/build-error";
export * from "./utils/notifications/build-success";
@@ -118,3 +119,9 @@ export * from "./db/validations/index";
export * from "./utils/gpu-setup";
export * from "./lib/auth";
export {
startLogCleanup,
stopLogCleanup,
getLogCleanupStatus,
} from "./utils/access-log/handler";

View File

@@ -28,6 +28,26 @@ const { handler, api } = betterAuth({
clientSecret: process.env.GOOGLE_CLIENT_SECRET as string,
},
},
...(!IS_CLOUD && {
async trustedOrigins() {
const admin = await db.query.member.findFirst({
where: eq(schema.member.role, "owner"),
with: {
user: true,
},
});
if (admin) {
return [
...(admin.user.serverIp
? [`http://${admin.user.serverIp}:3000`]
: []),
...(admin.user.host ? [`https://${admin.user.host}`] : []),
];
}
return [];
},
}),
emailVerification: {
sendOnSignUp: true,
autoSignInAfterVerification: true,
@@ -117,6 +137,10 @@ const { handler, api } = betterAuth({
},
},
},
session: {
expiresIn: 60 * 60 * 24 * 3,
updateAge: 60 * 60 * 24,
},
user: {
modelName: "users_temp",
additionalFields: {

View File

@@ -461,7 +461,7 @@ export const deployPreviewApplication = async ({
body: `### Dokploy Preview Deployment\n\n${buildingComment}`,
});
application.appName = previewDeployment.appName;
application.env = `${application.previewEnv}\nDOKPLOY_DEPLOY_URL=${previewDeployment?.domain}`;
application.env = `${application.previewEnv}\nDOKPLOY_DEPLOY_URL=${previewDeployment?.domain?.host}`;
application.buildArgs = application.previewBuildArgs;
// const admin = await findUserById(application.project.userId);
@@ -574,7 +574,7 @@ export const deployRemotePreviewApplication = async ({
body: `### Dokploy Preview Deployment\n\n${buildingComment}`,
});
application.appName = previewDeployment.appName;
application.env = `${application.previewEnv}\nDOKPLOY_DEPLOY_URL=${previewDeployment?.domain}`;
application.env = `${application.previewEnv}\nDOKPLOY_DEPLOY_URL=${previewDeployment?.domain?.host}`;
application.buildArgs = application.previewBuildArgs;
if (application.serverId) {

View File

@@ -296,11 +296,11 @@ export const rebuildCompose = async ({
// if (admin.cleanupCacheOnCompose) {
// await cleanupFullDocker(compose?.serverId);
// }
if (compose.serverId) {
await getBuildComposeCommand(compose, deployment.logPath);
} else {
await buildCompose(compose, deployment.logPath);
if (compose.sourceType === "raw") {
await createComposeFile(compose, deployment.logPath);
}
await buildCompose(compose, deployment.logPath);
await updateDeploymentStatus(deployment.deploymentId, "done");
await updateCompose(composeId, {
@@ -446,6 +446,10 @@ export const rebuildRemoteCompose = async ({
// if (admin.cleanupCacheOnCompose) {
// await cleanupFullDocker(compose?.serverId);
// }
if (compose.sourceType === "raw") {
const command = getCreateComposeFileCommand(compose, deployment.logPath);
await execAsyncRemote(compose.serverId, command);
}
if (compose.serverId) {
await getBuildComposeCommand(compose, deployment.logPath);
}

View File

@@ -281,13 +281,19 @@ export const getServiceContainersByAppName = async (
export const getContainersByAppLabel = async (
appName: string,
type: "standalone" | "swarm",
serverId?: string,
) => {
try {
let stdout = "";
let stderr = "";
const command = `docker ps --filter "label=com.docker.swarm.service.name=${appName}" --format 'CONTAINER ID : {{.ID}} | Name: {{.Names}} | State: {{.State}}'`;
const command =
type === "swarm"
? `docker ps --filter "label=com.docker.swarm.service.name=${appName}" --format 'CONTAINER ID : {{.ID}} | Name: {{.Names}} | State: {{.State}}'`
: type === "standalone"
? `docker ps --filter "name=${appName}" --format 'CONTAINER ID : {{.ID}} | Name: {{.Names}} | State: {{.State}}'`
: `docker ps --filter "label=com.docker.compose.project=${appName}" --format 'CONTAINER ID : {{.ID}} | Name: {{.Names}} | State: {{.State}}'`;
if (serverId) {
const result = await execAsyncRemote(serverId, command);
stdout = result.stdout;

View File

@@ -1,5 +1,5 @@
import { db } from "@dokploy/server/db";
import { generateRandomDomain } from "@dokploy/server/templates/utils";
import { generateRandomDomain } from "@dokploy/server/templates";
import { manageDomain } from "@dokploy/server/utils/traefik/domain";
import { TRPCError } from "@trpc/server";
import { eq } from "drizzle-orm";

View File

@@ -5,7 +5,7 @@ import {
mariadb,
} from "@dokploy/server/db/schema";
import { buildAppName } from "@dokploy/server/db/schema";
import { generatePassword } from "@dokploy/server/templates/utils";
import { generatePassword } from "@dokploy/server/templates";
import { buildMariadb } from "@dokploy/server/utils/databases/mariadb";
import { pullImage } from "@dokploy/server/utils/docker/utils";
import { TRPCError } from "@trpc/server";

View File

@@ -1,7 +1,7 @@
import { db } from "@dokploy/server/db";
import { type apiCreateMongo, backups, mongo } from "@dokploy/server/db/schema";
import { buildAppName } from "@dokploy/server/db/schema";
import { generatePassword } from "@dokploy/server/templates/utils";
import { generatePassword } from "@dokploy/server/templates";
import { buildMongo } from "@dokploy/server/utils/databases/mongo";
import { pullImage } from "@dokploy/server/utils/docker/utils";
import { TRPCError } from "@trpc/server";

View File

@@ -1,7 +1,7 @@
import { db } from "@dokploy/server/db";
import { type apiCreateMySql, backups, mysql } from "@dokploy/server/db/schema";
import { buildAppName } from "@dokploy/server/db/schema";
import { generatePassword } from "@dokploy/server/templates/utils";
import { generatePassword } from "@dokploy/server/templates";
import { buildMysql } from "@dokploy/server/utils/databases/mysql";
import { pullImage } from "@dokploy/server/utils/docker/utils";
import { TRPCError } from "@trpc/server";

View File

@@ -5,7 +5,7 @@ import {
postgres,
} from "@dokploy/server/db/schema";
import { buildAppName } from "@dokploy/server/db/schema";
import { generatePassword } from "@dokploy/server/templates/utils";
import { generatePassword } from "@dokploy/server/templates";
import { buildPostgres } from "@dokploy/server/utils/databases/postgres";
import { pullImage } from "@dokploy/server/utils/docker/utils";
import { TRPCError } from "@trpc/server";

View File

@@ -7,7 +7,7 @@ import {
} from "@dokploy/server/db/schema";
import { TRPCError } from "@trpc/server";
import { and, desc, eq } from "drizzle-orm";
import { generatePassword } from "../templates/utils";
import { generatePassword } from "../templates";
import { removeService } from "../utils/docker/utils";
import { removeDirectoryCode } from "../utils/filesystem/directory";
import { authGithub } from "../utils/providers/github";
@@ -204,6 +204,7 @@ export const createPreviewDeployment = async (
port: application.previewPort,
https: application.previewHttps,
certificateType: application.previewCertificateType,
customCertResolver: application.previewCustomCertResolver,
domainType: "preview",
previewDeploymentId: previewDeployment.previewDeploymentId,
});

View File

@@ -1,7 +1,7 @@
import { db } from "@dokploy/server/db";
import { type apiCreateRedis, redis } from "@dokploy/server/db/schema";
import { buildAppName } from "@dokploy/server/db/schema";
import { generatePassword } from "@dokploy/server/templates/utils";
import { generatePassword } from "@dokploy/server/templates";
import { buildRedis } from "@dokploy/server/utils/databases/redis";
import { pullImage } from "@dokploy/server/utils/docker/utils";
import { TRPCError } from "@trpc/server";

View File

@@ -9,6 +9,7 @@ import {
TRAEFIK_PORT,
TRAEFIK_SSL_PORT,
TRAEFIK_VERSION,
TRAEFIK_HTTP3_PORT,
getDefaultMiddlewares,
getDefaultServerTraefikConfig,
} from "@dokploy/server/setup/traefik-setup";
@@ -542,22 +543,28 @@ export const installRClone = () => `
export const createTraefikInstance = () => {
const command = `
# Check if dokpyloy-traefik exists
if docker service ls | grep -q 'dokploy-traefik'; then
if docker service inspect dokploy-traefik > /dev/null 2>&1; then
echo "Migrating Traefik to Standalone..."
docker service rm dokploy-traefik
sleep 8
echo "Traefik migrated to Standalone ✅"
fi
if docker inspect dokploy-traefik > /dev/null 2>&1; then
echo "Traefik already exists ✅"
else
# Create the dokploy-traefik service
# Create the dokploy-traefik container
TRAEFIK_VERSION=${TRAEFIK_VERSION}
docker service create \
docker run -d \
--name dokploy-traefik \
--replicas 1 \
--constraint 'node.role==manager' \
--network dokploy-network \
--mount type=bind,src=/etc/dokploy/traefik/traefik.yml,dst=/etc/traefik/traefik.yml \
--mount type=bind,src=/etc/dokploy/traefik/dynamic,dst=/etc/dokploy/traefik/dynamic \
--mount type=bind,src=/var/run/docker.sock,dst=/var/run/docker.sock \
--label traefik.enable=true \
--publish mode=host,target=${TRAEFIK_SSL_PORT},published=${TRAEFIK_SSL_PORT} \
--publish mode=host,target=${TRAEFIK_PORT},published=${TRAEFIK_PORT} \
--restart unless-stopped \
-v /etc/dokploy/traefik/traefik.yml:/etc/traefik/traefik.yml \
-v /etc/dokploy/traefik/dynamic:/etc/dokploy/traefik/dynamic \
-v /var/run/docker.sock:/var/run/docker.sock \
-p ${TRAEFIK_SSL_PORT}:${TRAEFIK_SSL_PORT} \
-p ${TRAEFIK_PORT}:${TRAEFIK_PORT} \
-p ${TRAEFIK_HTTP3_PORT}:${TRAEFIK_HTTP3_PORT}/udp \
traefik:v$TRAEFIK_VERSION
echo "Traefik version $TRAEFIK_VERSION installed ✅"
fi

View File

@@ -1,9 +1,8 @@
import { chmodSync, existsSync, mkdirSync, writeFileSync } from "node:fs";
import path from "node:path";
import type { ContainerTaskSpec, CreateServiceOptions } from "dockerode";
import type { ContainerCreateOptions } from "dockerode";
import { dump } from "js-yaml";
import { paths } from "../constants";
import { pullImage, pullRemoteImage } from "../utils/docker/utils";
import { getRemoteDocker } from "../utils/servers/remote-docker";
import type { FileConfig } from "../utils/traefik/file-types";
import type { MainTraefikConfig } from "../utils/traefik/types";
@@ -12,6 +11,8 @@ export const TRAEFIK_SSL_PORT =
Number.parseInt(process.env.TRAEFIK_SSL_PORT!, 10) || 443;
export const TRAEFIK_PORT =
Number.parseInt(process.env.TRAEFIK_PORT!, 10) || 80;
export const TRAEFIK_HTTP3_PORT =
Number.parseInt(process.env.TRAEFIK_HTTP3_PORT!, 10) || 443;
export const TRAEFIK_VERSION = process.env.TRAEFIK_VERSION || "3.1.2";
interface TraefikOptions {
@@ -21,8 +22,8 @@ interface TraefikOptions {
additionalPorts?: {
targetPort: number;
publishedPort: number;
publishMode?: "ingress" | "host";
}[];
force?: boolean;
}
export const initializeTraefik = async ({
@@ -30,113 +31,119 @@ export const initializeTraefik = async ({
env,
serverId,
additionalPorts = [],
force = false,
}: TraefikOptions = {}) => {
const { MAIN_TRAEFIK_PATH, DYNAMIC_TRAEFIK_PATH } = paths(!!serverId);
const imageName = `traefik:v${TRAEFIK_VERSION}`;
const containerName = "dokploy-traefik";
const settings: CreateServiceOptions = {
Name: containerName,
TaskTemplate: {
ContainerSpec: {
Image: imageName,
Env: env,
Mounts: [
{
Type: "bind",
Source: `${MAIN_TRAEFIK_PATH}/traefik.yml`,
Target: "/etc/traefik/traefik.yml",
},
{
Type: "bind",
Source: DYNAMIC_TRAEFIK_PATH,
Target: "/etc/dokploy/traefik/dynamic",
},
{
Type: "bind",
Source: "/var/run/docker.sock",
Target: "/var/run/docker.sock",
},
],
},
Networks: [{ Target: "dokploy-network" }],
Placement: {
Constraints: ["node.role==manager"],
},
},
Mode: {
Replicated: {
Replicas: 1,
},
},
EndpointSpec: {
Ports: [
{
TargetPort: 443,
PublishedPort: TRAEFIK_SSL_PORT,
PublishMode: "host",
},
{
TargetPort: 80,
PublishedPort: TRAEFIK_PORT,
PublishMode: "host",
},
...(enableDashboard
? [
{
TargetPort: 8080,
PublishedPort: 8080,
PublishMode: "host" as const,
},
]
: []),
...additionalPorts.map((port) => ({
TargetPort: port.targetPort,
PublishedPort: port.publishedPort,
PublishMode: port.publishMode || ("host" as const),
})),
],
},
const exposedPorts: Record<string, {}> = {
[`${TRAEFIK_PORT}/tcp`]: {},
[`${TRAEFIK_SSL_PORT}/tcp`]: {},
[`${TRAEFIK_HTTP3_PORT}/udp`]: {},
};
const portBindings: Record<string, Array<{ HostPort: string }>> = {
[`${TRAEFIK_PORT}/tcp`]: [{ HostPort: TRAEFIK_PORT.toString() }],
[`${TRAEFIK_SSL_PORT}/tcp`]: [{ HostPort: TRAEFIK_SSL_PORT.toString() }],
[`${TRAEFIK_HTTP3_PORT}/udp`]: [
{ HostPort: TRAEFIK_HTTP3_PORT.toString() },
],
};
if (enableDashboard) {
exposedPorts["8080/tcp"] = {};
portBindings["8080/tcp"] = [{ HostPort: "8080" }];
}
for (const port of additionalPorts) {
const portKey = `${port.targetPort}/tcp`;
exposedPorts[portKey] = {};
portBindings[portKey] = [{ HostPort: port.publishedPort.toString() }];
}
const settings: ContainerCreateOptions = {
name: containerName,
Image: imageName,
NetworkingConfig: {
EndpointsConfig: {
"dokploy-network": {},
},
},
ExposedPorts: exposedPorts,
HostConfig: {
RestartPolicy: {
Name: "always",
},
Binds: [
`${MAIN_TRAEFIK_PATH}/traefik.yml:/etc/traefik/traefik.yml`,
`${DYNAMIC_TRAEFIK_PATH}:/etc/dokploy/traefik/dynamic`,
"/var/run/docker.sock:/var/run/docker.sock",
],
PortBindings: portBindings,
},
Env: env,
};
const docker = await getRemoteDocker(serverId);
try {
if (serverId) {
await pullRemoteImage(imageName, serverId);
} else {
await pullImage(imageName);
}
const service = docker.getService(containerName);
const inspect = await service.inspect();
const existingEnv = inspect.Spec.TaskTemplate.ContainerSpec.Env || [];
const updatedEnv = !env ? existingEnv : env;
const updatedSettings = {
...settings,
TaskTemplate: {
...settings.TaskTemplate,
ContainerSpec: {
...(settings?.TaskTemplate as ContainerTaskSpec).ContainerSpec,
Env: updatedEnv,
},
},
};
await service.update({
version: Number.parseInt(inspect.Version.Index),
...updatedSettings,
});
console.log("Traefik Started ✅");
} catch (_) {
try {
await docker.createService(settings);
} catch (error: any) {
if (error?.statusCode !== 409) {
throw error;
const service = docker.getService("dokploy-traefik");
await service?.remove({ force: true });
let attempts = 0;
const maxAttempts = 5;
while (attempts < maxAttempts) {
try {
await docker.listServices({
filters: { name: ["dokploy-traefik"] },
});
console.log("Waiting for service cleanup...");
await new Promise((resolve) => setTimeout(resolve, 5000));
attempts++;
} catch (e) {
break;
}
}
console.log("Traefik service already exists, continuing...");
} catch (err) {
console.log("No existing service to remove");
}
console.log("Traefik Not Found: Starting ✅");
// Then try to remove any existing container
const container = docker.getContainer(containerName);
try {
const inspect = await container.inspect();
if (inspect.State.Status === "running" && !force) {
console.log("Traefik already running");
return;
}
await container.remove({ force: true });
await new Promise((resolve) => setTimeout(resolve, 5000));
} catch (error) {
console.log("No existing container to remove");
}
// Create and start the new container
try {
await docker.createContainer(settings);
const newContainer = docker.getContainer(containerName);
await newContainer.start();
console.log("Traefik container started successfully");
} catch (error: any) {
if (error?.json?.message?.includes("port is already allocated")) {
console.log("Ports still in use, waiting longer for cleanup...");
await new Promise((resolve) => setTimeout(resolve, 10000));
// Try one more time
await docker.createContainer(settings);
const newContainer = docker.getContainer(containerName);
await newContainer.start();
console.log("Traefik container started successfully after retry");
}
}
} catch (error) {
console.error("Failed to initialize Traefik:", error);
throw error;
}
};
@@ -212,6 +219,9 @@ export const getDefaultTraefikConfig = () => {
},
websecure: {
address: `:${TRAEFIK_SSL_PORT}`,
http3: {
advertisedPort: TRAEFIK_HTTP3_PORT,
},
...(process.env.NODE_ENV === "production" && {
http: {
tls: {
@@ -267,6 +277,9 @@ export const getDefaultServerTraefikConfig = () => {
},
websecure: {
address: `:${TRAEFIK_SSL_PORT}`,
http3: {
advertisedPort: TRAEFIK_HTTP3_PORT,
},
http: {
tls: {
certResolver: "letsencrypt",

View File

@@ -0,0 +1,109 @@
import { load } from "js-yaml";
/**
* Complete template interface that includes both metadata and configuration
*/
export interface CompleteTemplate {
metadata: {
id: string;
name: string;
description: string;
tags: string[];
version: string;
logo: string;
links: {
github: string;
website?: string;
docs?: string;
};
};
variables: {
[key: string]: string;
};
config: {
domains: Array<{
serviceName: string;
port: number;
path?: string;
host?: string;
}>;
env: Record<string, string>;
mounts?: Array<{
filePath: string;
content: string;
}>;
};
}
interface TemplateMetadata {
id: string;
name: string;
description: string;
version: string;
logo: string;
links: {
github: string;
website?: string;
docs?: string;
};
tags: string[];
}
/**
* Fetches the list of available templates from meta.json
*/
export async function fetchTemplatesList(
baseUrl = "https://templates.dokploy.com",
): Promise<TemplateMetadata[]> {
try {
const response = await fetch(`${baseUrl}/meta.json`);
if (!response.ok) {
throw new Error(`Failed to fetch templates: ${response.statusText}`);
}
const templates = (await response.json()) as TemplateMetadata[];
return templates.map((template) => ({
id: template.id,
name: template.name,
description: template.description,
version: template.version,
logo: template.logo,
links: template.links,
tags: template.tags,
}));
} catch (error) {
console.error("Error fetching templates list:", error);
throw error;
}
}
/**
* Fetches a specific template's files
*/
export async function fetchTemplateFiles(
templateId: string,
baseUrl = "https://templates.dokploy.com",
): Promise<{ config: CompleteTemplate; dockerCompose: string }> {
try {
// Fetch both files in parallel
const [templateYmlResponse, dockerComposeResponse] = await Promise.all([
fetch(`${baseUrl}/blueprints/${templateId}/template.yml`),
fetch(`${baseUrl}/blueprints/${templateId}/docker-compose.yml`),
]);
if (!templateYmlResponse.ok || !dockerComposeResponse.ok) {
throw new Error("Template files not found");
}
const [templateYml, dockerCompose] = await Promise.all([
templateYmlResponse.text(),
dockerComposeResponse.text(),
]);
const config = load(templateYml) as CompleteTemplate;
return { config, dockerCompose };
} catch (error) {
console.error(`Error fetching template ${templateId}:`, error);
throw error;
}
}

View File

@@ -0,0 +1,120 @@
import { randomBytes } from "node:crypto";
import { existsSync } from "node:fs";
import { mkdir, readFile, writeFile } from "node:fs/promises";
import { join } from "node:path";
import type { Domain } from "@dokploy/server/services/domain";
import { TRPCError } from "@trpc/server";
import { fetchTemplateFiles } from "./github";
export interface Schema {
serverIp: string;
projectName: string;
}
export type DomainSchema = Pick<Domain, "host" | "port" | "serviceName"> & {
path?: string;
};
export interface Template {
envs: string[];
mounts: Array<{
filePath: string;
content: string;
}>;
domains: DomainSchema[];
}
export const generateRandomDomain = ({
serverIp,
projectName,
}: Schema): string => {
const hash = randomBytes(3).toString("hex");
const slugIp = serverIp.replaceAll(".", "-");
return `${projectName}-${hash}${slugIp === "" ? "" : `-${slugIp}`}.traefik.me`;
};
export const generateHash = (length = 8): string => {
return randomBytes(Math.ceil(length / 2))
.toString("hex")
.substring(0, length);
};
export const generatePassword = (quantity = 16): string => {
const characters =
"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
let password = "";
for (let i = 0; i < quantity; i++) {
password += characters.charAt(
Math.floor(Math.random() * characters.length),
);
}
return password.toLowerCase();
};
/**
* Generate a random base64 string from N random bytes
* @param bytes Number of random bytes to generate before base64 encoding (default: 32)
* @returns base64 encoded string of the random bytes
*/
export function generateBase64(bytes = 32): string {
return randomBytes(bytes).toString("base64");
}
export function generateJwt(length = 256): string {
return randomBytes(length).toString("hex");
}
/**
* Reads a template's docker-compose.yml file
* First tries to fetch from GitHub, falls back to local cache if fetch fails
*/
export const readTemplateComposeFile = async (id: string) => {
// First try to fetch from GitHub
try {
const { dockerCompose } = await fetchTemplateFiles(id);
// Cache the file for future use
const cwd = process.cwd();
const templatePath = join(cwd, ".next", "templates", id);
const composeFilePath = join(templatePath, "docker-compose.yml");
// Ensure the template directory exists
if (!existsSync(templatePath)) {
await mkdir(templatePath, { recursive: true });
}
// Cache the file for future use
await writeFile(composeFilePath, dockerCompose, "utf8");
return dockerCompose;
} catch (error) {
console.warn(`Failed to fetch template ${id} from GitHub:`, error);
// Try to use cached version as fallback
const cwd = process.cwd();
const composeFilePath = join(
cwd,
".next",
"templates",
id,
"docker-compose.yml",
);
if (existsSync(composeFilePath)) {
console.warn(`Using cached version of template ${id}`);
return await readFile(composeFilePath, "utf8");
}
console.error(`Error: Template ${id} not found in GitHub or cache`);
throw new TRPCError({
code: "NOT_FOUND",
message: `Template ${id} not found or could not be fetched`,
});
}
};
/**
* Loads a template module from GitHub or local cache
* First tries to fetch from GitHub, falls back to local cache if fetch fails
*/

View File

@@ -0,0 +1,247 @@
import type { Schema } from "./index";
import {
generateBase64,
generateHash,
generateJwt,
generatePassword,
generateRandomDomain,
} from "./index";
/**
* Domain configuration
*/
interface DomainConfig {
serviceName: string;
port: number;
path?: string;
host?: string;
}
/**
* Mount configuration
*/
interface MountConfig {
filePath: string;
content: string;
}
/**
* Complete template interface that includes both metadata and configuration
*/
export interface CompleteTemplate {
metadata: {
id: string;
name: string;
description: string;
tags: string[];
version: string;
logo: string;
links: {
github: string;
website?: string;
docs?: string;
};
};
variables: Record<string, string>;
config: {
domains: DomainConfig[];
env: Record<string, string> | string[];
mounts?: MountConfig[];
};
}
/**
* Processed template output
*/
export interface Template {
domains: Array<DomainConfig>;
envs: string[];
mounts: MountConfig[];
}
/**
* Process a string value and replace variables
*/
function processValue(
value: string,
variables: Record<string, string>,
schema: Schema,
): string {
// First replace utility functions
let processedValue = value.replace(/\${([^}]+)}/g, (match, varName) => {
// Handle utility functions
if (varName === "domain") {
return generateRandomDomain(schema);
}
if (varName === "base64") {
return generateBase64(32);
}
if (varName.startsWith("base64:")) {
const length = Number.parseInt(varName.split(":")[1], 10) || 32;
return generateBase64(length);
}
if (varName.startsWith("password:")) {
const length = Number.parseInt(varName.split(":")[1], 10) || 16;
return generatePassword(length);
}
if (varName === "password") {
return generatePassword(16);
}
if (varName.startsWith("hash:")) {
const length = Number.parseInt(varName.split(":")[1], 10) || 8;
return generateHash(length);
}
if (varName === "uuid") {
return crypto.randomUUID();
}
if (varName === "timestamp") {
return Date.now().toString();
}
if (varName === "randomPort") {
return Math.floor(Math.random() * 65535).toString();
}
if (varName === "jwt") {
return generateJwt();
}
if (varName.startsWith("jwt:")) {
const length = Number.parseInt(varName.split(":")[1], 10) || 256;
return generateJwt(length);
}
// If not a utility function, try to get from variables
return variables[varName] || match;
});
// Then replace any remaining ${var} with their values from variables
processedValue = processedValue.replace(/\${([^}]+)}/g, (match, varName) => {
return variables[varName] || match;
});
return processedValue;
}
/**
* Process variables in a template
*/
export function processVariables(
template: CompleteTemplate,
schema: Schema,
): Record<string, string> {
const variables: Record<string, string> = {};
// First pass: Process variables that don't depend on other variables
for (const [key, value] of Object.entries(template.variables)) {
if (typeof value !== "string") continue;
if (value === "${domain}") {
variables[key] = generateRandomDomain(schema);
} else if (value.startsWith("${base64:")) {
const match = value.match(/\${base64:(\d+)}/);
const length = match?.[1] ? Number.parseInt(match[1], 10) : 32;
variables[key] = generateBase64(length);
} else if (value.startsWith("${password:")) {
const match = value.match(/\${password:(\d+)}/);
const length = match?.[1] ? Number.parseInt(match[1], 10) : 16;
variables[key] = generatePassword(length);
} else if (value.startsWith("${hash:")) {
const match = value.match(/\${hash:(\d+)}/);
const length = match?.[1] ? Number.parseInt(match[1], 10) : 8;
variables[key] = generateHash(length);
} else {
variables[key] = value;
}
}
// Second pass: Process variables that reference other variables
for (const [key, value] of Object.entries(variables)) {
variables[key] = processValue(value, variables, schema);
}
return variables;
}
/**
* Process domains in a template
*/
export function processDomains(
template: CompleteTemplate,
variables: Record<string, string>,
schema: Schema,
): Template["domains"] {
if (!template?.config?.domains) return [];
return template?.config?.domains?.map((domain: DomainConfig) => ({
...domain,
host: domain.host
? processValue(domain.host, variables, schema)
: generateRandomDomain(schema),
}));
}
/**
* Process environment variables in a template
*/
export function processEnvVars(
template: CompleteTemplate,
variables: Record<string, string>,
schema: Schema,
): Template["envs"] {
if (!template?.config?.env) return [];
// Handle array of env vars
if (Array.isArray(template.config.env)) {
return template.config.env.map((env) => {
if (typeof env === "string") {
return processValue(env, variables, schema);
}
return env;
});
}
// Handle object of env vars
return Object.entries(template.config.env).map(
([key, value]: [string, string]) => {
const processedValue = processValue(value, variables, schema);
return `${key}=${processedValue}`;
},
);
}
/**
* Process mounts in a template
*/
export function processMounts(
template: CompleteTemplate,
variables: Record<string, string>,
schema: Schema,
): Template["mounts"] {
if (!template?.config?.mounts) return [];
return template?.config?.mounts?.map((mount: MountConfig) => ({
filePath: processValue(mount.filePath, variables, schema),
content: processValue(mount.content, variables, schema),
}));
}
/**
* Process a complete template
*/
export function processTemplate(
template: CompleteTemplate,
schema: Schema,
): Template {
// First process variables as they might be referenced by other sections
const variables = processVariables(template, schema);
return {
domains: processDomains(template, variables, schema),
envs: processEnvVars(template, variables, schema),
mounts: processMounts(template, variables, schema),
};
}

View File

@@ -1,61 +0,0 @@
import { randomBytes } from "node:crypto";
import { readFile } from "node:fs/promises";
import { join } from "node:path";
import type { Domain } from "@dokploy/server/services/domain";
export interface Schema {
serverIp: string;
projectName: string;
}
export type DomainSchema = Pick<Domain, "host" | "port" | "serviceName">;
export interface Template {
envs?: string[];
mounts?: {
filePath: string;
content?: string;
}[];
domains?: DomainSchema[];
}
export const generateRandomDomain = ({
serverIp,
projectName,
}: Schema): string => {
const hash = randomBytes(3).toString("hex");
const slugIp = serverIp.replaceAll(".", "-");
return `${projectName}-${hash}${slugIp === "" ? "" : `-${slugIp}`}.traefik.me`;
};
export const generateHash = (projectName: string, quantity = 3): string => {
const hash = randomBytes(quantity).toString("hex");
return `${projectName}-${hash}`;
};
export const generatePassword = (quantity = 16): string => {
const characters =
"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
let password = "";
for (let i = 0; i < quantity; i++) {
password += characters.charAt(
Math.floor(Math.random() * characters.length),
);
}
return password.toLowerCase();
};
export const generateBase64 = (bytes = 32): string => {
return randomBytes(bytes).toString("base64");
};
export const readTemplateComposeFile = async (id: string) => {
const cwd = process.cwd();
const composeFile = await readFile(
join(cwd, ".next", "templates", id, "docker-compose.yml"),
"utf8",
);
return composeFile;
};

View File

@@ -0,0 +1,44 @@
export interface TemplateConfig {
variables: Record<string, string>;
domains: Array<{
serviceName: string;
port: number;
path?: string;
host?: string;
}>;
env: Record<string, string>;
mounts: Array<{
filePath: string;
content: string;
}>;
}
export interface Template {
metadata: {
id: string;
name: string;
description: string;
version: string;
logo: string;
links: {
github: string;
website?: string;
docs?: string;
};
tags: string[];
};
variables: Record<string, string>;
config: {
domains: Array<{
serviceName: string;
port: number;
path?: string;
host?: string;
}>;
env: Record<string, string>;
mounts?: Array<{
filePath: string;
content: string;
}>;
};
}

View File

@@ -1,121 +1,77 @@
import { IS_CLOUD, paths } from "@dokploy/server/constants";
import { type RotatingFileStream, createStream } from "rotating-file-stream";
import { paths } from "@dokploy/server/constants";
import { execAsync } from "../process/execAsync";
import { findAdmin } from "@dokploy/server/services/admin";
import { updateUser } from "@dokploy/server/services/user";
import { scheduleJob, scheduledJobs } from "node-schedule";
class LogRotationManager {
private static instance: LogRotationManager;
private stream: RotatingFileStream | null = null;
const LOG_CLEANUP_JOB_NAME = "access-log-cleanup";
private constructor() {
if (IS_CLOUD) {
return;
}
this.initialize().catch(console.error);
}
public static getInstance(): LogRotationManager {
if (!LogRotationManager.instance) {
LogRotationManager.instance = new LogRotationManager();
}
return LogRotationManager.instance;
}
private async initialize(): Promise<void> {
const isActive = await this.getStateFromDB();
if (isActive) {
await this.activateStream();
}
}
private async getStateFromDB(): Promise<boolean> {
const admin = await findAdmin();
return admin?.user.enableLogRotation ?? false;
}
private async setStateInDB(active: boolean): Promise<void> {
const admin = await findAdmin();
if (!admin) {
return;
}
await updateUser(admin.user.id, {
enableLogRotation: active,
});
}
private async activateStream(): Promise<void> {
export const startLogCleanup = async (
cronExpression = "0 0 * * *",
): Promise<boolean> => {
try {
const { DYNAMIC_TRAEFIK_PATH } = paths();
if (this.stream) {
await this.deactivateStream();
const existingJob = scheduledJobs[LOG_CLEANUP_JOB_NAME];
if (existingJob) {
existingJob.cancel();
}
this.stream = createStream("access.log", {
size: "100M",
interval: "1d",
path: DYNAMIC_TRAEFIK_PATH,
rotate: 6,
compress: "gzip",
});
scheduleJob(LOG_CLEANUP_JOB_NAME, cronExpression, async () => {
try {
await execAsync(
`tail -n 1000 ${DYNAMIC_TRAEFIK_PATH}/access.log > ${DYNAMIC_TRAEFIK_PATH}/access.log.tmp && mv ${DYNAMIC_TRAEFIK_PATH}/access.log.tmp ${DYNAMIC_TRAEFIK_PATH}/access.log`,
);
this.stream.on("rotation", this.handleRotation.bind(this));
}
private async deactivateStream(): Promise<void> {
return new Promise<void>((resolve) => {
if (this.stream) {
this.stream.end(() => {
this.stream = null;
resolve();
});
} else {
resolve();
await execAsync("docker exec dokploy-traefik kill -USR1 1");
} catch (error) {
console.error("Error during log cleanup:", error);
}
});
}
public async activate(): Promise<boolean> {
const currentState = await this.getStateFromDB();
if (currentState) {
return true;
const admin = await findAdmin();
if (admin) {
await updateUser(admin.user.id, {
logCleanupCron: cronExpression,
});
}
await this.setStateInDB(true);
await this.activateStream();
return true;
} catch (_) {
return false;
}
};
public async deactivate(): Promise<boolean> {
console.log("Deactivating log rotation...");
const currentState = await this.getStateFromDB();
if (!currentState) {
console.log("Log rotation is already inactive in DB");
return true;
export const stopLogCleanup = async (): Promise<boolean> => {
try {
const existingJob = scheduledJobs[LOG_CLEANUP_JOB_NAME];
if (existingJob) {
existingJob.cancel();
}
// Update database
const admin = await findAdmin();
if (admin) {
await updateUser(admin.user.id, {
logCleanupCron: null,
});
}
await this.setStateInDB(false);
await this.deactivateStream();
console.log("Log rotation deactivated successfully");
return true;
} catch (error) {
console.error("Error stopping log cleanup:", error);
return false;
}
};
private async handleRotation() {
try {
const status = await this.getStatus();
if (!status) {
await this.deactivateStream();
}
await execAsync(
"docker kill -s USR1 $(docker ps -q --filter name=dokploy-traefik)",
);
console.log("USR1 Signal send to Traefik");
} catch (error) {
console.error("Error sending USR1 Signal to Traefik:", error);
}
}
public async getStatus(): Promise<boolean> {
const dbState = await this.getStateFromDB();
return dbState;
}
}
export const logRotationManager = LogRotationManager.getInstance();
export const getLogCleanupStatus = async (): Promise<{
enabled: boolean;
cronExpression: string | null;
}> => {
const admin = await findAdmin();
const cronExpression = admin?.user.logCleanupCron ?? null;
return {
enabled: cronExpression !== null,
cronExpression,
};
};

View File

@@ -6,14 +6,21 @@ interface HourlyData {
count: number;
}
export function processLogs(logString: string): HourlyData[] {
export function processLogs(
logString: string,
dateRange?: { start?: string; end?: string },
): HourlyData[] {
if (_.isEmpty(logString)) {
return [];
}
const hourlyData = _(logString)
.split("\n")
.compact()
.filter((line) => {
const trimmed = line.trim();
// Check if the line starts with { and ends with } to ensure it's a potential JSON object
return trimmed !== "" && trimmed.startsWith("{") && trimmed.endsWith("}");
})
.map((entry) => {
try {
const log: LogEntry = JSON.parse(entry);
@@ -21,6 +28,20 @@ export function processLogs(logString: string): HourlyData[] {
return null;
}
const date = new Date(log.StartUTC);
if (dateRange?.start || dateRange?.end) {
const logDate = date.getTime();
const start = dateRange?.start
? new Date(dateRange.start).getTime()
: 0;
const end = dateRange?.end
? new Date(dateRange.end).getTime()
: Number.POSITIVE_INFINITY;
if (logDate < start || logDate > end) {
return null;
}
}
return `${date.toISOString().slice(0, 13)}:00:00Z`;
} catch (error) {
console.error("Error parsing log entry:", error);
@@ -51,21 +72,46 @@ export function parseRawConfig(
sort?: SortInfo,
search?: string,
status?: string[],
dateRange?: { start?: string; end?: string },
): { data: LogEntry[]; totalCount: number } {
try {
if (_.isEmpty(rawConfig)) {
return { data: [], totalCount: 0 };
}
// Split logs into chunks to avoid memory issues
let parsedLogs = _(rawConfig)
.split("\n")
.filter((line) => {
const trimmed = line.trim();
return (
trimmed !== "" && trimmed.startsWith("{") && trimmed.endsWith("}")
);
})
.map((line) => {
try {
return JSON.parse(line) as LogEntry;
} catch (error) {
console.error("Error parsing log line:", error);
return null;
}
})
.compact()
.map((line) => JSON.parse(line) as LogEntry)
.value();
parsedLogs = parsedLogs.filter(
(log) => log.ServiceName !== "dokploy-service-app@file",
);
// Apply date range filter if provided
if (dateRange?.start || dateRange?.end) {
parsedLogs = parsedLogs.filter((log) => {
const logDate = new Date(log.StartUTC).getTime();
const start = dateRange?.start
? new Date(dateRange.start).getTime()
: 0;
const end = dateRange?.end
? new Date(dateRange.end).getTime()
: Number.POSITIVE_INFINITY;
return logDate >= start && logDate <= end;
});
}
if (search) {
parsedLogs = parsedLogs.filter((log) =>
@@ -78,6 +124,7 @@ export function parseRawConfig(
status.some((range) => isStatusInRange(log.DownstreamStatus, range)),
);
}
const totalCount = parsedLogs.length;
if (sort) {
@@ -101,6 +148,7 @@ export function parseRawConfig(
throw new Error("Failed to parse rawConfig");
}
}
const isStatusInRange = (status: number, range: string) => {
switch (range) {
case "info":

View File

@@ -1,3 +1,4 @@
import path from "node:path";
import { getAllServers } from "@dokploy/server/services/server";
import { scheduleJob } from "node-schedule";
import { db } from "../../db/index";
@@ -12,6 +13,11 @@ import { runMongoBackup } from "./mongo";
import { runMySqlBackup } from "./mysql";
import { runPostgresBackup } from "./postgres";
import { findAdmin } from "../../services/admin";
import { getS3Credentials } from "./utils";
import { execAsync, execAsyncRemote } from "../process/execAsync";
import type { BackupSchedule } from "@dokploy/server/services/backup";
import { startLogCleanup } from "../access-log/handler";
export const initCronJobs = async () => {
console.log("Setting up cron jobs....");
@@ -168,4 +174,43 @@ export const initCronJobs = async () => {
}
}
}
if (admin?.user.logCleanupCron) {
await startLogCleanup(admin.user.logCleanupCron);
}
};
export const keepLatestNBackups = async (
backup: BackupSchedule,
serverId?: string | null,
) => {
// 0 also immediately returns which is good as the empty "keep latest" field in the UI
// is saved as 0 in the database
if (!backup.keepLatestCount) return;
try {
const rcloneFlags = getS3Credentials(backup.destination);
const backupFilesPath = path.join(
`:s3:${backup.destination.bucket}`,
backup.prefix,
);
// --include "*.sql.gz" ensures nothing else other than the db backup files are touched by rclone
const rcloneList = `rclone lsf ${rcloneFlags.join(" ")} --include "*.sql.gz" ${backupFilesPath}`;
// when we pipe the above command with this one, we only get the list of files we want to delete
const sortAndPickUnwantedBackups = `sort -r | tail -n +$((${backup.keepLatestCount}+1)) | xargs -I{}`;
// this command deletes the files
// to test the deletion before actually deleting we can add --dry-run before ${backupFilesPath}/{}
const rcloneDelete = `rclone delete ${rcloneFlags.join(" ")} ${backupFilesPath}/{}`;
const rcloneCommand = `${rcloneList} | ${sortAndPickUnwantedBackups} ${rcloneDelete}`;
if (serverId) {
await execAsyncRemote(serverId, rcloneCommand);
} else {
await execAsync(rcloneCommand);
}
} catch (error) {
console.error(error);
}
};

View File

@@ -5,6 +5,7 @@ import { runMariadbBackup } from "./mariadb";
import { runMongoBackup } from "./mongo";
import { runMySqlBackup } from "./mysql";
import { runPostgresBackup } from "./postgres";
import { keepLatestNBackups } from ".";
export const scheduleBackup = (backup: BackupSchedule) => {
const { schedule, backupId, databaseType, postgres, mysql, mongo, mariadb } =
@@ -12,12 +13,16 @@ export const scheduleBackup = (backup: BackupSchedule) => {
scheduleJob(backupId, schedule, async () => {
if (databaseType === "postgres" && postgres) {
await runPostgresBackup(postgres, backup);
await keepLatestNBackups(backup, postgres.serverId);
} else if (databaseType === "mysql" && mysql) {
await runMySqlBackup(mysql, backup);
await keepLatestNBackups(backup, mysql.serverId);
} else if (databaseType === "mongo" && mongo) {
await runMongoBackup(mongo, backup);
await keepLatestNBackups(backup, mongo.serverId);
} else if (databaseType === "mariadb" && mariadb) {
await runMariadbBackup(mariadb, backup);
await keepLatestNBackups(backup, mariadb.serverId);
}
});
};

View File

@@ -17,32 +17,68 @@ export const buildRailpack = async (
);
try {
// Ensure buildkit container is running, create if it doesn't exist
await execAsync(
"docker container inspect buildkit >/dev/null 2>&1 || docker run --rm --privileged -d --name buildkit moby/buildkit",
"docker buildx create --use --name builder-containerd --driver docker-container || true",
);
// Build the application using railpack
const args = ["build", buildAppDirectory, "--name", appName];
await execAsync("docker buildx use builder-containerd");
// Add environment variables
// First prepare the build plan and info
const prepareArgs = [
"prepare",
buildAppDirectory,
"--plan-out",
`${buildAppDirectory}/railpack-plan.json`,
"--info-out",
`${buildAppDirectory}/railpack-info.json`,
];
// Add environment variables to prepare command
for (const env of envVariables) {
args.push("--env", env);
prepareArgs.push("--env", env);
}
// Run prepare command
await spawnAsync("railpack", prepareArgs, (data) => {
if (writeStream.writable) {
writeStream.write(data);
}
});
// Build with BuildKit using the Railpack frontend
const buildArgs = [
"buildx",
"build",
"--build-arg",
"BUILDKIT_SYNTAX=ghcr.io/railwayapp/railpack-frontend:v0.0.55",
"-f",
`${buildAppDirectory}/railpack-plan.json`,
"--output",
`type=docker,name=${appName}`,
];
// Add secrets properly formatted
const env: { [key: string]: string } = {};
for (const envVar of envVariables) {
const [key, value] = envVar.split("=");
if (key && value) {
buildArgs.push("--secret", `id=${key},env=${key}`);
env[key] = value;
}
}
buildArgs.push(buildAppDirectory);
await spawnAsync(
"railpack",
args,
"docker",
buildArgs,
(data) => {
if (writeStream.writable) {
writeStream.write(data);
}
},
{
env: {
...process.env,
BUILDKIT_HOST: "docker-container://buildkit",
},
env: { ...process.env, ...env },
},
);
@@ -63,25 +99,65 @@ export const getRailpackCommand = (
application.project.env,
);
// Build the application using railpack
const args = ["build", buildAppDirectory, "--name", appName];
// Prepare command
const prepareArgs = [
"prepare",
buildAppDirectory,
"--plan-out",
`${buildAppDirectory}/railpack-plan.json`,
"--info-out",
`${buildAppDirectory}/railpack-info.json`,
];
// Add environment variables
for (const env of envVariables) {
args.push("--env", env);
prepareArgs.push("--env", env);
}
const command = `railpack ${args.join(" ")}`;
// Build command
const buildArgs = [
"buildx",
"build",
"--build-arg",
"BUILDKIT_SYNTAX=ghcr.io/railwayapp/railpack-frontend:v0.0.55",
"-f",
`${buildAppDirectory}/railpack-plan.json`,
"--output",
`type=docker,name=${appName}`,
];
// Add secrets properly formatted
const exportEnvs = [];
for (const envVar of envVariables) {
const [key, value] = envVar.split("=");
if (key && value) {
buildArgs.push("--secret", `id=${key},env=${key}`);
exportEnvs.push(`export ${key}=${value}`);
}
}
buildArgs.push(buildAppDirectory);
const bashCommand = `
echo "Building with Railpack..." >> "${logPath}";
docker container inspect buildkit >/dev/null 2>&1 || docker run --rm --privileged -d --name buildkit moby/buildkit;
export BUILDKIT_HOST=docker-container://buildkit;
${command} >> ${logPath} 2>> ${logPath} || {
echo " Railpack build failed" >> ${logPath};
exit 1;
}
echo "✅ Railpack build completed." >> ${logPath};
`;
# Ensure we have a builder with containerd
docker buildx create --use --name builder-containerd --driver docker-container || true
docker buildx use builder-containerd
echo "Preparing Railpack build plan..." >> "${logPath}";
railpack ${prepareArgs.join(" ")} >> ${logPath} 2>> ${logPath} || {
echo "❌ Railpack prepare failed" >> ${logPath};
exit 1;
}
echo "✅ Railpack prepare completed." >> ${logPath};
echo "Building with Railpack frontend..." >> "${logPath}";
# Export environment variables for secrets
${exportEnvs.join("\n")}
docker ${buildArgs.join(" ")} >> ${logPath} 2>> ${logPath} || {
echo "❌ Railpack build failed" >> ${logPath};
exit 1;
}
echo "✅ Railpack build completed." >> ${logPath};
`;
return bashCommand;
};

View File

@@ -31,7 +31,7 @@ export const buildMariadb = async (mariadb: MariadbNested) => {
mounts,
} = mariadb;
const defaultMariadbEnv = `MARIADB_DATABASE=${databaseName}\nMARIADB_USER=${databaseUser}\nMARIADB_PASSWORD=${databasePassword}\nMARIADB_ROOT_PASSWORD=${databaseRootPassword}${
const defaultMariadbEnv = `MARIADB_DATABASE="${databaseName}"\nMARIADB_USER="${databaseUser}"\nMARIADB_PASSWORD="${databasePassword}"\nMARIADB_ROOT_PASSWORD="${databaseRootPassword}"${
env ? `\n${env}` : ""
}`;
const resources = calculateResources({

View File

@@ -77,7 +77,7 @@ fi
${command ?? "wait $MONGOD_PID"}`;
const defaultMongoEnv = `MONGO_INITDB_ROOT_USERNAME=${databaseUser}\nMONGO_INITDB_ROOT_PASSWORD=${databasePassword}${replicaSets ? "\nMONGO_INITDB_DATABASE=admin" : ""}${
const defaultMongoEnv = `MONGO_INITDB_ROOT_USERNAME="${databaseUser}"\nMONGO_INITDB_ROOT_PASSWORD="${databasePassword}"${replicaSets ? "\nMONGO_INITDB_DATABASE=admin" : ""}${
env ? `\n${env}` : ""
}`;

View File

@@ -34,10 +34,10 @@ export const buildMysql = async (mysql: MysqlNested) => {
const defaultMysqlEnv =
databaseUser !== "root"
? `MYSQL_USER=${databaseUser}\nMYSQL_DATABASE=${databaseName}\nMYSQL_PASSWORD=${databasePassword}\nMYSQL_ROOT_PASSWORD=${databaseRootPassword}${
? `MYSQL_USER="${databaseUser}"\nMYSQL_DATABASE="${databaseName}"\nMYSQL_PASSWORD="${databasePassword}"\nMYSQL_ROOT_PASSWORD="${databaseRootPassword}"${
env ? `\n${env}` : ""
}`
: `MYSQL_DATABASE=${databaseName}\nMYSQL_ROOT_PASSWORD=${databaseRootPassword}${
: `MYSQL_DATABASE="${databaseName}"\nMYSQL_ROOT_PASSWORD="${databaseRootPassword}"${
env ? `\n${env}` : ""
}`;
const resources = calculateResources({

View File

@@ -30,7 +30,7 @@ export const buildPostgres = async (postgres: PostgresNested) => {
mounts,
} = postgres;
const defaultPostgresEnv = `POSTGRES_DB=${databaseName}\nPOSTGRES_USER=${databaseUser}\nPOSTGRES_PASSWORD=${databasePassword}${
const defaultPostgresEnv = `POSTGRES_DB="${databaseName}"\nPOSTGRES_USER="${databaseUser}"\nPOSTGRES_PASSWORD="${databasePassword}"${
env ? `\n${env}` : ""
}`;
const resources = calculateResources({

View File

@@ -0,0 +1,99 @@
import { deployPostgres } from "@dokploy/server/services/postgres";
import { execAsyncRemote } from "../process/execAsync";
import { execAsync } from "../process/execAsync";
import { deployMySql } from "@dokploy/server/services/mysql";
import { deployMariadb } from "@dokploy/server/services/mariadb";
import { deployMongo } from "@dokploy/server/services/mongo";
import { deployRedis } from "@dokploy/server/services/redis";
import { removeService } from "../docker/utils";
import { db } from "@dokploy/server/db";
import {
postgres,
mysql,
mariadb,
mongo,
redis,
} from "@dokploy/server/db/schema";
import { eq } from "drizzle-orm";
type DatabaseType = "postgres" | "mysql" | "mariadb" | "mongo" | "redis";
export const rebuildDatabase = async (
databaseId: string,
type: DatabaseType,
) => {
const database = await findDatabaseById(databaseId, type);
if (!database) {
throw new Error("Database not found");
}
await removeService(database.appName, database.serverId);
await new Promise((resolve) => setTimeout(resolve, 6000));
for (const mount of database.mounts) {
if (mount.type === "volume") {
const command = `docker volume rm ${mount?.volumeName} --force`;
if (database.serverId) {
await execAsyncRemote(database.serverId, command);
} else {
await execAsync(command);
}
}
}
if (type === "postgres") {
await deployPostgres(databaseId);
} else if (type === "mysql") {
await deployMySql(databaseId);
} else if (type === "mariadb") {
await deployMariadb(databaseId);
} else if (type === "mongo") {
await deployMongo(databaseId);
} else if (type === "redis") {
await deployRedis(databaseId);
}
};
const findDatabaseById = async (databaseId: string, type: DatabaseType) => {
if (type === "postgres") {
return await db.query.postgres.findFirst({
where: eq(postgres.postgresId, databaseId),
with: {
mounts: true,
},
});
}
if (type === "mysql") {
return await db.query.mysql.findFirst({
where: eq(mysql.mysqlId, databaseId),
with: {
mounts: true,
},
});
}
if (type === "mariadb") {
return await db.query.mariadb.findFirst({
where: eq(mariadb.mariadbId, databaseId),
with: {
mounts: true,
},
});
}
if (type === "mongo") {
return await db.query.mongo.findFirst({
where: eq(mongo.mongoId, databaseId),
with: {
mounts: true,
},
});
}
if (type === "redis") {
return await db.query.redis.findFirst({
where: eq(redis.redisId, databaseId),
with: {
mounts: true,
},
});
}
};

View File

@@ -28,7 +28,7 @@ export const buildRedis = async (redis: RedisNested) => {
mounts,
} = redis;
const defaultRedisEnv = `REDIS_PASSWORD=${databasePassword}${
const defaultRedisEnv = `REDIS_PASSWORD="${databasePassword}"${
env ? `\n${env}` : ""
}`;
const resources = calculateResources({

View File

@@ -219,13 +219,9 @@ export const addDomainToCompose = async (
throw new Error(`The service ${serviceName} not found in the compose`);
}
const httpLabels = await createDomainLabels(appName, domain, "web");
const httpLabels = createDomainLabels(appName, domain, "web");
if (https) {
const httpsLabels = await createDomainLabels(
appName,
domain,
"websecure",
);
const httpsLabels = createDomainLabels(appName, domain, "websecure");
httpLabels.push(...httpsLabels);
}
@@ -250,9 +246,9 @@ export const addDomainToCompose = async (
if (Array.isArray(labels)) {
if (!labels.includes("traefik.enable=true")) {
labels.push("traefik.enable=true");
labels.unshift("traefik.enable=true");
}
labels.push(...httpLabels);
labels.unshift(...httpLabels);
}
if (!compose.isolatedDeployment) {
@@ -287,12 +283,20 @@ export const writeComposeFile = async (
}
};
export const createDomainLabels = async (
export const createDomainLabels = (
appName: string,
domain: Domain,
entrypoint: "web" | "websecure",
) => {
const { host, port, https, uniqueConfigKey, certificateType, path } = domain;
const {
host,
port,
https,
uniqueConfigKey,
certificateType,
path,
customCertResolver,
} = domain;
const routerName = `${appName}-${uniqueConfigKey}-${entrypoint}`;
const labels = [
`traefik.http.routers.${routerName}.rule=Host(\`${host}\`)${path && path !== "/" ? ` && PathPrefix(\`${path}\`)` : ""}`,
@@ -312,6 +316,10 @@ export const createDomainLabels = async (
labels.push(
`traefik.http.routers.${routerName}.tls.certresolver=letsencrypt`,
);
} else if (certificateType === "custom" && customCertResolver) {
labels.push(
`traefik.http.routers.${routerName}.tls.certresolver=${customCertResolver}`,
);
}
}

View File

@@ -137,12 +137,44 @@ export const readRemoteConfig = async (serverId: string, appName: string) => {
}
};
export const readMonitoringConfig = () => {
export const readMonitoringConfig = (readAll = false) => {
const { DYNAMIC_TRAEFIK_PATH } = paths();
const configPath = path.join(DYNAMIC_TRAEFIK_PATH, "access.log");
if (fs.existsSync(configPath)) {
const yamlStr = fs.readFileSync(configPath, "utf8");
return yamlStr;
if (!readAll) {
// Read first 500 lines
let content = "";
let chunk = "";
let validCount = 0;
for (const char of fs.readFileSync(configPath, "utf8")) {
chunk += char;
if (char === "\n") {
try {
const trimmed = chunk.trim();
if (
trimmed !== "" &&
trimmed.startsWith("{") &&
trimmed.endsWith("}")
) {
const log = JSON.parse(trimmed);
if (log.ServiceName !== "dokploy-service-app@file") {
content += chunk;
validCount++;
if (validCount >= 500) {
break;
}
}
}
} catch {
// Ignore invalid JSON
}
chunk = "";
}
}
return content;
}
return fs.readFileSync(configPath, "utf8");
}
return null;
};

View File

@@ -148,6 +148,8 @@ export const createRouterConfig = async (
if (entryPoint === "websecure") {
if (certificateType === "letsencrypt") {
routerConfig.tls = { certResolver: "letsencrypt" };
} else if (certificateType === "custom" && domain.customCertResolver) {
routerConfig.tls = { certResolver: domain.customCertResolver };
} else if (certificateType === "none") {
routerConfig.tls = undefined;
}

View File

@@ -0,0 +1,9 @@
import micromatch from "micromatch";
export const shouldDeploy = (
watchPaths: string[] | null,
modifiedFiles: string[],
): boolean => {
if (!watchPaths || watchPaths?.length === 0) return true;
return micromatch.some(modifiedFiles, watchPaths);
};