Merge branch 'canary' into 1345-domain-not-working-after-server-restart-or-traefik-reload

This commit is contained in:
Mauricio Siu
2025-03-09 01:12:04 -06:00
139 changed files with 46112 additions and 1272 deletions

View File

@@ -28,6 +28,7 @@
"typecheck": "tsc --noEmit"
},
"dependencies": {
"micromatch":"4.0.8",
"@ai-sdk/anthropic": "^1.0.6",
"@ai-sdk/azure": "^1.0.15",
"@ai-sdk/cohere": "^1.0.6",
@@ -79,6 +80,7 @@
"zod": "^3.23.4"
},
"devDependencies": {
"@types/micromatch": "4.0.9",
"@types/adm-zip": "^0.5.5",
"@types/bcrypt": "5.0.2",
"@types/dockerode": "3.3.23",

View File

@@ -42,6 +42,7 @@ export const buildType = pgEnum("buildType", [
"paketo_buildpacks",
"nixpacks",
"static",
"railpack",
]);
export interface HealthCheckSwarm {
@@ -115,6 +116,7 @@ export const applications = pgTable("application", {
description: text("description"),
env: text("env"),
previewEnv: text("previewEnv"),
watchPaths: text("watchPaths").array(),
previewBuildArgs: text("previewBuildArgs"),
previewWildcard: text("previewWildcard"),
previewPort: integer("previewPort").default(3000),
@@ -123,6 +125,7 @@ export const applications = pgTable("application", {
previewCertificateType: certificateType("certificateType")
.notNull()
.default("none"),
previewCustomCertResolver: text("previewCustomCertResolver"),
previewLimit: integer("previewLimit").default(3),
isPreviewDeploymentsActive: boolean("isPreviewDeploymentsActive").default(
false,
@@ -383,6 +386,7 @@ const createSchema = createInsertSchema(applications, {
"paketo_buildpacks",
"nixpacks",
"static",
"railpack",
]),
herokuVersion: z.string().optional(),
publishDirectory: z.string().optional(),
@@ -402,7 +406,8 @@ const createSchema = createInsertSchema(applications, {
previewLimit: z.number().optional(),
previewHttps: z.boolean().optional(),
previewPath: z.string().optional(),
previewCertificateType: z.enum(["letsencrypt", "none"]).optional(),
previewCertificateType: z.enum(["letsencrypt", "none", "custom"]).optional(),
watchPaths: z.array(z.string()).optional(),
});
export const apiCreateApplication = createSchema.pick({
@@ -446,6 +451,7 @@ export const apiSaveGithubProvider = createSchema
owner: true,
buildPath: true,
githubId: true,
watchPaths: true,
})
.required();
@@ -459,6 +465,7 @@ export const apiSaveGitlabProvider = createSchema
gitlabId: true,
gitlabProjectId: true,
gitlabPathNamespace: true,
watchPaths: true,
})
.required();
@@ -470,6 +477,7 @@ export const apiSaveBitbucketProvider = createSchema
bitbucketRepository: true,
bitbucketId: true,
applicationId: true,
watchPaths: true,
})
.required();
@@ -489,6 +497,7 @@ export const apiSaveGitProvider = createSchema
applicationId: true,
customGitBuildPath: true,
customGitUrl: true,
watchPaths: true,
})
.required()
.merge(

View File

@@ -77,7 +77,7 @@ export const compose = pgTable("compose", {
createdAt: text("createdAt")
.notNull()
.$defaultFn(() => new Date().toISOString()),
watchPaths: text("watchPaths").array(),
githubId: text("githubId").references(() => github.githubId, {
onDelete: "set null",
}),
@@ -132,6 +132,7 @@ const createSchema = createInsertSchema(compose, {
command: z.string().optional(),
composePath: z.string().min(1),
composeType: z.enum(["docker-compose", "stack"]).optional(),
watchPaths: z.array(z.string()).optional(),
});
export const apiCreateCompose = createSchema.pick({

View File

@@ -1,5 +1,5 @@
import { relations } from "drizzle-orm";
import { pgTable, text } from "drizzle-orm/pg-core";
import { pgTable, text, timestamp } from "drizzle-orm/pg-core";
import { createInsertSchema } from "drizzle-zod";
import { nanoid } from "nanoid";
import { z } from "zod";
@@ -21,6 +21,7 @@ export const destinations = pgTable("destination", {
organizationId: text("organizationId")
.notNull()
.references(() => organization.id, { onDelete: "cascade" }),
createdAt: timestamp("createdAt").notNull().defaultNow(),
});
export const destinationsRelations = relations(

View File

@@ -41,6 +41,7 @@ export const domains = pgTable("domain", {
composeId: text("composeId").references(() => compose.composeId, {
onDelete: "cascade",
}),
customCertResolver: text("customCertResolver"),
applicationId: text("applicationId").references(
() => applications.applicationId,
{ onDelete: "cascade" },
@@ -76,6 +77,7 @@ export const apiCreateDomain = createSchema.pick({
https: true,
applicationId: true,
certificateType: true,
customCertResolver: true,
composeId: true,
serviceName: true,
domainType: true,
@@ -107,6 +109,7 @@ export const apiUpdateDomain = createSchema
port: true,
https: true,
certificateType: true,
customCertResolver: true,
serviceName: true,
domainType: true,
})

View File

@@ -146,3 +146,9 @@ export const apiUpdateMariaDB = createSchema
mariadbId: z.string().min(1),
})
.omit({ serverId: true });
export const apiRebuildMariadb = createSchema
.pick({
mariadbId: true,
})
.required();

View File

@@ -141,3 +141,9 @@ export const apiResetMongo = createSchema
appName: true,
})
.required();
export const apiRebuildMongo = createSchema
.pick({
mongoId: true,
})
.required();

View File

@@ -144,3 +144,9 @@ export const apiUpdateMySql = createSchema
mysqlId: z.string().min(1),
})
.omit({ serverId: true });
export const apiRebuildMysql = createSchema
.pick({
mysqlId: true,
})
.required();

View File

@@ -140,3 +140,9 @@ export const apiUpdatePostgres = createSchema
postgresId: z.string().min(1),
})
.omit({ serverId: true });
export const apiRebuildPostgres = createSchema
.pick({
postgresId: true,
})
.required();

View File

@@ -133,3 +133,9 @@ export const apiUpdateRedis = createSchema
redisId: z.string().min(1),
})
.omit({ serverId: true });
export const apiRebuildRedis = createSchema
.pick({
redisId: true,
})
.required();

View File

@@ -10,4 +10,5 @@ export const applicationStatus = pgEnum("applicationStatus", [
export const certificateType = pgEnum("certificateType", [
"letsencrypt",
"none",
"custom",
]);

View File

@@ -53,7 +53,7 @@ export const users_temp = pgTable("user_temp", {
letsEncryptEmail: text("letsEncryptEmail"),
sshPrivateKey: text("sshPrivateKey"),
enableDockerCleanup: boolean("enableDockerCleanup").notNull().default(false),
enableLogRotation: boolean("enableLogRotation").notNull().default(false),
logCleanupCron: text("logCleanupCron"),
// Metrics
enablePaidFeatures: boolean("enablePaidFeatures").notNull().default(false),
metricsConfig: jsonb("metricsConfig")
@@ -250,6 +250,12 @@ export const apiReadStatsLogs = z.object({
status: z.string().array().optional(),
search: z.string().optional(),
sort: z.object({ id: z.string(), desc: z.boolean() }).optional(),
dateRange: z
.object({
start: z.string().optional(),
end: z.string().optional(),
})
.optional(),
});
export const apiUpdateWebServerMonitoring = z.object({
@@ -305,4 +311,5 @@ export const apiUpdateUser = createSchema.partial().extend({
}),
})
.optional(),
logCleanupCron: z.string().optional().nullable(),
});

View File

@@ -10,7 +10,8 @@ export const domain = z
.max(65535, { message: "Port must be 65535 or below" })
.optional(),
https: z.boolean().optional(),
certificateType: z.enum(["letsencrypt", "none"]).optional(),
certificateType: z.enum(["letsencrypt", "none", "custom"]).optional(),
customCertResolver: z.string(),
})
.superRefine((input, ctx) => {
if (input.https && !input.certificateType) {
@@ -20,6 +21,14 @@ export const domain = z
message: "Required",
});
}
if (input.certificateType === "custom" && !input.customCertResolver) {
ctx.addIssue({
code: z.ZodIssueCode.custom,
path: ["customCertResolver"],
message: "Required when certificate type is custom",
});
}
});
export const domainCompose = z
@@ -32,7 +41,8 @@ export const domainCompose = z
.max(65535, { message: "Port must be 65535 or below" })
.optional(),
https: z.boolean().optional(),
certificateType: z.enum(["letsencrypt", "none"]).optional(),
certificateType: z.enum(["letsencrypt", "none", "custom"]).optional(),
customCertResolver: z.string(),
serviceName: z.string().min(1, { message: "Service name is required" }),
})
.superRefine((input, ctx) => {
@@ -43,4 +53,12 @@ export const domainCompose = z
message: "Required",
});
}
if (input.certificateType === "custom" && !input.customCertResolver) {
ctx.addIssue({
code: z.ZodIssueCode.custom,
path: ["customCertResolver"],
message: "Required when certificate type is custom",
});
}
});

View File

@@ -30,7 +30,7 @@ export * from "./services/github";
export * from "./services/gitlab";
export * from "./services/server";
export * from "./services/application";
export * from "./utils/databases/rebuild";
export * from "./setup/config-paths";
export * from "./setup/postgres-setup";
export * from "./setup/redis-setup";
@@ -40,7 +40,7 @@ export * from "./setup/setup";
export * from "./setup/traefik-setup";
export * from "./setup/server-validate";
export * from "./setup/server-audit";
export * from "./utils/watch-paths/should-deploy";
export * from "./utils/backups/index";
export * from "./utils/backups/mariadb";
export * from "./utils/backups/mongo";
@@ -116,3 +116,9 @@ export * from "./db/validations/index";
export * from "./utils/gpu-setup";
export * from "./lib/auth";
export {
startLogCleanup,
stopLogCleanup,
getLogCleanupStatus,
} from "./utils/access-log/handler";

View File

@@ -136,24 +136,26 @@ export const getContainersByAppNameMatch = async (
result = stdout.trim().split("\n");
}
const containers = result.map((line) => {
const parts = line.split(" | ");
const containerId = parts[0]
? parts[0].replace("CONTAINER ID : ", "").trim()
: "No container id";
const name = parts[1]
? parts[1].replace("Name: ", "").trim()
: "No container name";
const containers = result
.map((line) => {
const parts = line.split(" | ");
const containerId = parts[0]
? parts[0].replace("CONTAINER ID : ", "").trim()
: "No container id";
const name = parts[1]
? parts[1].replace("Name: ", "").trim()
: "No container name";
const state = parts[2]
? parts[2].replace("State: ", "").trim()
: "No state";
return {
containerId,
name,
state,
};
});
const state = parts[2]
? parts[2].replace("State: ", "").trim()
: "No state";
return {
containerId,
name,
state,
};
})
.sort((a, b) => a.name.localeCompare(b.name));
return containers || [];
} catch (_error) {}
@@ -190,28 +192,30 @@ export const getStackContainersByAppName = async (
result = stdout.trim().split("\n");
}
const containers = result.map((line) => {
const parts = line.split(" | ");
const containerId = parts[0]
? parts[0].replace("CONTAINER ID : ", "").trim()
: "No container id";
const name = parts[1]
? parts[1].replace("Name: ", "").trim()
: "No container name";
const containers = result
.map((line) => {
const parts = line.split(" | ");
const containerId = parts[0]
? parts[0].replace("CONTAINER ID : ", "").trim()
: "No container id";
const name = parts[1]
? parts[1].replace("Name: ", "").trim()
: "No container name";
const state = parts[2]
? parts[2].replace("State: ", "").trim().toLowerCase()
: "No state";
const node = parts[3]
? parts[3].replace("Node: ", "").trim()
: "No specific node";
return {
containerId,
name,
state,
node,
};
});
const state = parts[2]
? parts[2].replace("State: ", "").trim().toLowerCase()
: "No state";
const node = parts[3]
? parts[3].replace("Node: ", "").trim()
: "No specific node";
return {
containerId,
name,
state,
node,
};
})
.sort((a, b) => a.name.localeCompare(b.name));
return containers || [];
} catch (_error) {}
@@ -249,29 +253,31 @@ export const getServiceContainersByAppName = async (
result = stdout.trim().split("\n");
}
const containers = result.map((line) => {
const parts = line.split(" | ");
const containerId = parts[0]
? parts[0].replace("CONTAINER ID : ", "").trim()
: "No container id";
const name = parts[1]
? parts[1].replace("Name: ", "").trim()
: "No container name";
const containers = result
.map((line) => {
const parts = line.split(" | ");
const containerId = parts[0]
? parts[0].replace("CONTAINER ID : ", "").trim()
: "No container id";
const name = parts[1]
? parts[1].replace("Name: ", "").trim()
: "No container name";
const state = parts[2]
? parts[2].replace("State: ", "").trim().toLowerCase()
: "No state";
const state = parts[2]
? parts[2].replace("State: ", "").trim().toLowerCase()
: "No state";
const node = parts[3]
? parts[3].replace("Node: ", "").trim()
: "No specific node";
return {
containerId,
name,
state,
node,
};
});
const node = parts[3]
? parts[3].replace("Node: ", "").trim()
: "No specific node";
return {
containerId,
name,
state,
node,
};
})
.sort((a, b) => a.name.localeCompare(b.name));
return containers || [];
} catch (_error) {}
@@ -312,23 +318,25 @@ export const getContainersByAppLabel = async (
const lines = stdout.trim().split("\n");
const containers = lines.map((line) => {
const parts = line.split(" | ");
const containerId = parts[0]
? parts[0].replace("CONTAINER ID : ", "").trim()
: "No container id";
const name = parts[1]
? parts[1].replace("Name: ", "").trim()
: "No container name";
const state = parts[2]
? parts[2].replace("State: ", "").trim()
: "No state";
return {
containerId,
name,
state,
};
});
const containers = lines
.map((line) => {
const parts = line.split(" | ");
const containerId = parts[0]
? parts[0].replace("CONTAINER ID : ", "").trim()
: "No container id";
const name = parts[1]
? parts[1].replace("Name: ", "").trim()
: "No container name";
const state = parts[2]
? parts[2].replace("State: ", "").trim()
: "No state";
return {
containerId,
name,
state,
};
})
.sort((a, b) => a.name.localeCompare(b.name));
return containers || [];
} catch (_error) {}

View File

@@ -203,6 +203,7 @@ export const createPreviewDeployment = async (
port: application.previewPort,
https: application.previewHttps,
certificateType: application.previewCertificateType,
customCertResolver: application.previewCustomCertResolver,
domainType: "preview",
previewDeploymentId: previewDeployment.previewDeploymentId,
});

View File

@@ -171,6 +171,9 @@ ${installNixpacks()}
echo -e "12. Installing Buildpacks"
${installBuildpacks()}
echo -e "13. Installing Railpack"
${installRailpack()}
`;
return bashCommand;
@@ -573,6 +576,16 @@ const installNixpacks = () => `
fi
`;
const installRailpack = () => `
if command_exists railpack; then
echo "Railpack already installed ✅"
else
export RAILPACK_VERSION=0.0.37
bash -c "$(curl -fsSL https://railpack.com/install.sh)"
echo "Railpack version $RAILPACK_VERSION installed ✅"
fi
`;
const installBuildpacks = () => `
SUFFIX=""
if [ "$SYS_ARCH" = "aarch64" ] || [ "$SYS_ARCH" = "arm64" ]; then

View File

@@ -38,6 +38,18 @@ export const validateNixpacks = () => `
fi
`;
export const validateRailpack = () => `
if command_exists railpack; then
version=$(railpack --version | awk '{print $3}')
if [ -n "$version" ]; then
echo "$version true"
else
echo "0.0.0 false"
fi
else
echo "0.0.0 false"
fi
`;
export const validateBuildpacks = () => `
if command_exists pack; then
version=$(pack --version | awk '{print $1}')
@@ -86,7 +98,7 @@ export const serverValidate = async (serverId: string) => {
rcloneVersionEnabled=$(${validateRClone()})
nixpacksVersionEnabled=$(${validateNixpacks()})
buildpacksVersionEnabled=$(${validateBuildpacks()})
railpackVersionEnabled=$(${validateRailpack()})
dockerVersion=$(echo $dockerVersionEnabled | awk '{print $1}')
dockerEnabled=$(echo $dockerVersionEnabled | awk '{print $2}')
@@ -96,6 +108,9 @@ export const serverValidate = async (serverId: string) => {
nixpacksVersion=$(echo $nixpacksVersionEnabled | awk '{print $1}')
nixpacksEnabled=$(echo $nixpacksVersionEnabled | awk '{print $2}')
railpackVersion=$(echo $railpackVersionEnabled | awk '{print $1}')
railpackEnabled=$(echo $railpackVersionEnabled | awk '{print $2}')
buildpacksVersion=$(echo $buildpacksVersionEnabled | awk '{print $1}')
buildpacksEnabled=$(echo $buildpacksVersionEnabled | awk '{print $2}')
@@ -103,7 +118,7 @@ export const serverValidate = async (serverId: string) => {
isSwarmInstalled=$(${validateSwarm()})
isMainDirectoryInstalled=$(${validateMainDirectory()})
echo "{\\"docker\\": {\\"version\\": \\"$dockerVersion\\", \\"enabled\\": $dockerEnabled}, \\"rclone\\": {\\"version\\": \\"$rcloneVersion\\", \\"enabled\\": $rcloneEnabled}, \\"nixpacks\\": {\\"version\\": \\"$nixpacksVersion\\", \\"enabled\\": $nixpacksEnabled}, \\"buildpacks\\": {\\"version\\": \\"$buildpacksVersion\\", \\"enabled\\": $buildpacksEnabled}, \\"isDokployNetworkInstalled\\": $isDokployNetworkInstalled, \\"isSwarmInstalled\\": $isSwarmInstalled, \\"isMainDirectoryInstalled\\": $isMainDirectoryInstalled}"
echo "{\\"docker\\": {\\"version\\": \\"$dockerVersion\\", \\"enabled\\": $dockerEnabled}, \\"rclone\\": {\\"version\\": \\"$rcloneVersion\\", \\"enabled\\": $rcloneEnabled}, \\"nixpacks\\": {\\"version\\": \\"$nixpacksVersion\\", \\"enabled\\": $nixpacksEnabled}, \\"buildpacks\\": {\\"version\\": \\"$buildpacksVersion\\", \\"enabled\\": $buildpacksEnabled}, \\"railpack\\": {\\"version\\": \\"$railpackVersion\\", \\"enabled\\": $railpackEnabled}, \\"isDokployNetworkInstalled\\": $isDokployNetworkInstalled, \\"isSwarmInstalled\\": $isSwarmInstalled, \\"isMainDirectoryInstalled\\": $isMainDirectoryInstalled}"
`;
client.exec(bashCommand, (err, stream) => {
if (err) {

View File

@@ -1,121 +1,77 @@
import { IS_CLOUD, paths } from "@dokploy/server/constants";
import { type RotatingFileStream, createStream } from "rotating-file-stream";
import { paths } from "@dokploy/server/constants";
import { execAsync } from "../process/execAsync";
import { findAdmin } from "@dokploy/server/services/admin";
import { updateUser } from "@dokploy/server/services/user";
import { scheduleJob, scheduledJobs } from "node-schedule";
class LogRotationManager {
private static instance: LogRotationManager;
private stream: RotatingFileStream | null = null;
const LOG_CLEANUP_JOB_NAME = "access-log-cleanup";
private constructor() {
if (IS_CLOUD) {
return;
}
this.initialize().catch(console.error);
}
public static getInstance(): LogRotationManager {
if (!LogRotationManager.instance) {
LogRotationManager.instance = new LogRotationManager();
}
return LogRotationManager.instance;
}
private async initialize(): Promise<void> {
const isActive = await this.getStateFromDB();
if (isActive) {
await this.activateStream();
}
}
private async getStateFromDB(): Promise<boolean> {
const admin = await findAdmin();
return admin?.user.enableLogRotation ?? false;
}
private async setStateInDB(active: boolean): Promise<void> {
const admin = await findAdmin();
if (!admin) {
return;
}
await updateUser(admin.user.id, {
enableLogRotation: active,
});
}
private async activateStream(): Promise<void> {
export const startLogCleanup = async (
cronExpression = "0 0 * * *",
): Promise<boolean> => {
try {
const { DYNAMIC_TRAEFIK_PATH } = paths();
if (this.stream) {
await this.deactivateStream();
const existingJob = scheduledJobs[LOG_CLEANUP_JOB_NAME];
if (existingJob) {
existingJob.cancel();
}
this.stream = createStream("access.log", {
size: "100M",
interval: "1d",
path: DYNAMIC_TRAEFIK_PATH,
rotate: 6,
compress: "gzip",
});
scheduleJob(LOG_CLEANUP_JOB_NAME, cronExpression, async () => {
try {
await execAsync(
`tail -n 1000 ${DYNAMIC_TRAEFIK_PATH}/access.log > ${DYNAMIC_TRAEFIK_PATH}/access.log.tmp && mv ${DYNAMIC_TRAEFIK_PATH}/access.log.tmp ${DYNAMIC_TRAEFIK_PATH}/access.log`,
);
this.stream.on("rotation", this.handleRotation.bind(this));
}
private async deactivateStream(): Promise<void> {
return new Promise<void>((resolve) => {
if (this.stream) {
this.stream.end(() => {
this.stream = null;
resolve();
});
} else {
resolve();
await execAsync("docker exec dokploy-traefik kill -USR1 1");
} catch (error) {
console.error("Error during log cleanup:", error);
}
});
}
public async activate(): Promise<boolean> {
const currentState = await this.getStateFromDB();
if (currentState) {
return true;
const admin = await findAdmin();
if (admin) {
await updateUser(admin.user.id, {
logCleanupCron: cronExpression,
});
}
await this.setStateInDB(true);
await this.activateStream();
return true;
} catch (_) {
return false;
}
};
public async deactivate(): Promise<boolean> {
console.log("Deactivating log rotation...");
const currentState = await this.getStateFromDB();
if (!currentState) {
console.log("Log rotation is already inactive in DB");
return true;
export const stopLogCleanup = async (): Promise<boolean> => {
try {
const existingJob = scheduledJobs[LOG_CLEANUP_JOB_NAME];
if (existingJob) {
existingJob.cancel();
}
// Update database
const admin = await findAdmin();
if (admin) {
await updateUser(admin.user.id, {
logCleanupCron: null,
});
}
await this.setStateInDB(false);
await this.deactivateStream();
console.log("Log rotation deactivated successfully");
return true;
} catch (error) {
console.error("Error stopping log cleanup:", error);
return false;
}
};
private async handleRotation() {
try {
const status = await this.getStatus();
if (!status) {
await this.deactivateStream();
}
await execAsync(
"docker kill -s USR1 $(docker ps -q --filter name=dokploy-traefik)",
);
console.log("USR1 Signal send to Traefik");
} catch (error) {
console.error("Error sending USR1 Signal to Traefik:", error);
}
}
public async getStatus(): Promise<boolean> {
const dbState = await this.getStateFromDB();
return dbState;
}
}
export const logRotationManager = LogRotationManager.getInstance();
export const getLogCleanupStatus = async (): Promise<{
enabled: boolean;
cronExpression: string | null;
}> => {
const admin = await findAdmin();
const cronExpression = admin?.user.logCleanupCron ?? null;
return {
enabled: cronExpression !== null,
cronExpression,
};
};

View File

@@ -6,14 +6,21 @@ interface HourlyData {
count: number;
}
export function processLogs(logString: string): HourlyData[] {
export function processLogs(
logString: string,
dateRange?: { start?: string; end?: string },
): HourlyData[] {
if (_.isEmpty(logString)) {
return [];
}
const hourlyData = _(logString)
.split("\n")
.compact()
.filter((line) => {
const trimmed = line.trim();
// Check if the line starts with { and ends with } to ensure it's a potential JSON object
return trimmed !== "" && trimmed.startsWith("{") && trimmed.endsWith("}");
})
.map((entry) => {
try {
const log: LogEntry = JSON.parse(entry);
@@ -21,6 +28,20 @@ export function processLogs(logString: string): HourlyData[] {
return null;
}
const date = new Date(log.StartUTC);
if (dateRange?.start || dateRange?.end) {
const logDate = date.getTime();
const start = dateRange?.start
? new Date(dateRange.start).getTime()
: 0;
const end = dateRange?.end
? new Date(dateRange.end).getTime()
: Number.POSITIVE_INFINITY;
if (logDate < start || logDate > end) {
return null;
}
}
return `${date.toISOString().slice(0, 13)}:00:00Z`;
} catch (error) {
console.error("Error parsing log entry:", error);
@@ -51,21 +72,46 @@ export function parseRawConfig(
sort?: SortInfo,
search?: string,
status?: string[],
dateRange?: { start?: string; end?: string },
): { data: LogEntry[]; totalCount: number } {
try {
if (_.isEmpty(rawConfig)) {
return { data: [], totalCount: 0 };
}
// Split logs into chunks to avoid memory issues
let parsedLogs = _(rawConfig)
.split("\n")
.filter((line) => {
const trimmed = line.trim();
return (
trimmed !== "" && trimmed.startsWith("{") && trimmed.endsWith("}")
);
})
.map((line) => {
try {
return JSON.parse(line) as LogEntry;
} catch (error) {
console.error("Error parsing log line:", error);
return null;
}
})
.compact()
.map((line) => JSON.parse(line) as LogEntry)
.value();
parsedLogs = parsedLogs.filter(
(log) => log.ServiceName !== "dokploy-service-app@file",
);
// Apply date range filter if provided
if (dateRange?.start || dateRange?.end) {
parsedLogs = parsedLogs.filter((log) => {
const logDate = new Date(log.StartUTC).getTime();
const start = dateRange?.start
? new Date(dateRange.start).getTime()
: 0;
const end = dateRange?.end
? new Date(dateRange.end).getTime()
: Number.POSITIVE_INFINITY;
return logDate >= start && logDate <= end;
});
}
if (search) {
parsedLogs = parsedLogs.filter((log) =>
@@ -78,6 +124,7 @@ export function parseRawConfig(
status.some((range) => isStatusInRange(log.DownstreamStatus, range)),
);
}
const totalCount = parsedLogs.length;
if (sort) {
@@ -101,6 +148,7 @@ export function parseRawConfig(
throw new Error("Failed to parse rawConfig");
}
}
const isStatusInRange = (status: number, range: string) => {
switch (range) {
case "info":

View File

@@ -17,7 +17,7 @@ function getProviderName(apiUrl: string) {
if (apiUrl.includes("localhost:11434") || apiUrl.includes("ollama"))
return "ollama";
if (apiUrl.includes("api.deepinfra.com")) return "deepinfra";
throw new Error(`Unsupported AI provider for URL: ${apiUrl}`);
return "custom";
}
export function selectAIProvider(config: { apiUrl: string; apiKey: string }) {
@@ -67,7 +67,46 @@ export function selectAIProvider(config: { apiUrl: string; apiKey: string }) {
baseURL: config.apiUrl,
apiKey: config.apiKey,
});
case "custom":
return createOpenAICompatible({
name: "custom",
baseURL: config.apiUrl,
headers: {
Authorization: `Bearer ${config.apiKey}`,
},
});
default:
throw new Error(`Unsupported AI provider: ${providerName}`);
}
}
export const getProviderHeaders = (
apiUrl: string,
apiKey: string,
): Record<string, string> => {
// Anthropic
if (apiUrl.includes("anthropic")) {
return {
"x-api-key": apiKey,
"anthropic-version": "2023-06-01",
};
}
// Mistral
if (apiUrl.includes("mistral")) {
return {
Authorization: apiKey,
};
}
// Default (OpenAI style)
return {
Authorization: `Bearer ${apiKey}`,
};
};
export interface Model {
id: string;
object: string;
created: number;
owned_by: string;
}

View File

@@ -12,6 +12,7 @@ import { runMongoBackup } from "./mongo";
import { runMySqlBackup } from "./mysql";
import { runPostgresBackup } from "./postgres";
import { findAdmin } from "../../services/admin";
import { startLogCleanup } from "../access-log/handler";
export const initCronJobs = async () => {
console.log("Setting up cron jobs....");
@@ -168,4 +169,8 @@ export const initCronJobs = async () => {
}
}
}
if (admin?.user.logCleanupCron) {
await startLogCleanup(admin.user.logCleanupCron);
}
};

View File

@@ -16,6 +16,7 @@ import { buildCustomDocker, getDockerCommand } from "./docker-file";
import { buildHeroku, getHerokuCommand } from "./heroku";
import { buildNixpacks, getNixpacksCommand } from "./nixpacks";
import { buildPaketo, getPaketoCommand } from "./paketo";
import { buildRailpack, getRailpackCommand } from "./railpack";
import { buildStatic, getStaticCommand } from "./static";
// NIXPACKS codeDirectory = where is the path of the code directory
@@ -55,6 +56,8 @@ export const buildApplication = async (
await buildCustomDocker(application, writeStream);
} else if (buildType === "static") {
await buildStatic(application, writeStream);
} else if (buildType === "railpack") {
await buildRailpack(application, writeStream);
}
if (application.registryId) {
@@ -96,6 +99,9 @@ export const getBuildCommand = (
case "dockerfile":
command = getDockerCommand(application, logPath);
break;
case "railpack":
command = getRailpackCommand(application, logPath);
break;
}
if (registry) {
command += uploadImageRemoteCommand(application, logPath);

View File

@@ -0,0 +1,87 @@
import type { WriteStream } from "node:fs";
import type { ApplicationNested } from ".";
import { prepareEnvironmentVariables } from "../docker/utils";
import { getBuildAppDirectory } from "../filesystem/directory";
import { spawnAsync } from "../process/spawnAsync";
import { execAsync } from "../process/execAsync";
export const buildRailpack = async (
application: ApplicationNested,
writeStream: WriteStream,
) => {
const { env, appName } = application;
const buildAppDirectory = getBuildAppDirectory(application);
const envVariables = prepareEnvironmentVariables(
env,
application.project.env,
);
try {
// Ensure buildkit container is running, create if it doesn't exist
await execAsync(
"docker container inspect buildkit >/dev/null 2>&1 || docker run --rm --privileged -d --name buildkit moby/buildkit",
);
// Build the application using railpack
const args = ["build", buildAppDirectory, "--name", appName];
// Add environment variables
for (const env of envVariables) {
args.push("--env", env);
}
await spawnAsync(
"railpack",
args,
(data) => {
if (writeStream.writable) {
writeStream.write(data);
}
},
{
env: {
...process.env,
BUILDKIT_HOST: "docker-container://buildkit",
},
},
);
return true;
} catch (e) {
throw e;
}
};
export const getRailpackCommand = (
application: ApplicationNested,
logPath: string,
) => {
const { env, appName } = application;
const buildAppDirectory = getBuildAppDirectory(application);
const envVariables = prepareEnvironmentVariables(
env,
application.project.env,
);
// Build the application using railpack
const args = ["build", buildAppDirectory, "--name", appName];
// Add environment variables
for (const env of envVariables) {
args.push("--env", env);
}
const command = `railpack ${args.join(" ")}`;
const bashCommand = `
echo "Building with Railpack..." >> "${logPath}";
docker container inspect buildkit >/dev/null 2>&1 || docker run --rm --privileged -d --name buildkit moby/buildkit;
export BUILDKIT_HOST=docker-container://buildkit;
${command} >> ${logPath} 2>> ${logPath} || {
echo "❌ Railpack build failed" >> ${logPath};
exit 1;
}
echo "✅ Railpack build completed." >> ${logPath};
`;
return bashCommand;
};

View File

@@ -31,7 +31,7 @@ export const buildMariadb = async (mariadb: MariadbNested) => {
mounts,
} = mariadb;
const defaultMariadbEnv = `MARIADB_DATABASE=${databaseName}\nMARIADB_USER=${databaseUser}\nMARIADB_PASSWORD=${databasePassword}\nMARIADB_ROOT_PASSWORD=${databaseRootPassword}${
const defaultMariadbEnv = `MARIADB_DATABASE="${databaseName}"\nMARIADB_USER="${databaseUser}"\nMARIADB_PASSWORD="${databasePassword}"\nMARIADB_ROOT_PASSWORD="${databaseRootPassword}"${
env ? `\n${env}` : ""
}`;
const resources = calculateResources({

View File

@@ -77,7 +77,7 @@ fi
${command ?? "wait $MONGOD_PID"}`;
const defaultMongoEnv = `MONGO_INITDB_ROOT_USERNAME=${databaseUser}\nMONGO_INITDB_ROOT_PASSWORD=${databasePassword}${replicaSets ? "\nMONGO_INITDB_DATABASE=admin" : ""}${
const defaultMongoEnv = `MONGO_INITDB_ROOT_USERNAME="${databaseUser}"\nMONGO_INITDB_ROOT_PASSWORD="${databasePassword}"${replicaSets ? "\nMONGO_INITDB_DATABASE=admin" : ""}${
env ? `\n${env}` : ""
}`;

View File

@@ -34,10 +34,10 @@ export const buildMysql = async (mysql: MysqlNested) => {
const defaultMysqlEnv =
databaseUser !== "root"
? `MYSQL_USER=${databaseUser}\nMYSQL_DATABASE=${databaseName}\nMYSQL_PASSWORD=${databasePassword}\nMYSQL_ROOT_PASSWORD=${databaseRootPassword}${
? `MYSQL_USER="${databaseUser}"\nMYSQL_DATABASE="${databaseName}"\nMYSQL_PASSWORD="${databasePassword}"\nMYSQL_ROOT_PASSWORD="${databaseRootPassword}"${
env ? `\n${env}` : ""
}`
: `MYSQL_DATABASE=${databaseName}\nMYSQL_ROOT_PASSWORD=${databaseRootPassword}${
: `MYSQL_DATABASE="${databaseName}"\nMYSQL_ROOT_PASSWORD="${databaseRootPassword}"${
env ? `\n${env}` : ""
}`;
const resources = calculateResources({

View File

@@ -30,7 +30,7 @@ export const buildPostgres = async (postgres: PostgresNested) => {
mounts,
} = postgres;
const defaultPostgresEnv = `POSTGRES_DB=${databaseName}\nPOSTGRES_USER=${databaseUser}\nPOSTGRES_PASSWORD=${databasePassword}${
const defaultPostgresEnv = `POSTGRES_DB="${databaseName}"\nPOSTGRES_USER="${databaseUser}"\nPOSTGRES_PASSWORD="${databasePassword}"${
env ? `\n${env}` : ""
}`;
const resources = calculateResources({

View File

@@ -0,0 +1,99 @@
import { deployPostgres } from "@dokploy/server/services/postgres";
import { execAsyncRemote } from "../process/execAsync";
import { execAsync } from "../process/execAsync";
import { deployMySql } from "@dokploy/server/services/mysql";
import { deployMariadb } from "@dokploy/server/services/mariadb";
import { deployMongo } from "@dokploy/server/services/mongo";
import { deployRedis } from "@dokploy/server/services/redis";
import { removeService } from "../docker/utils";
import { db } from "@dokploy/server/db";
import {
postgres,
mysql,
mariadb,
mongo,
redis,
} from "@dokploy/server/db/schema";
import { eq } from "drizzle-orm";
type DatabaseType = "postgres" | "mysql" | "mariadb" | "mongo" | "redis";
export const rebuildDatabase = async (
databaseId: string,
type: DatabaseType,
) => {
const database = await findDatabaseById(databaseId, type);
if (!database) {
throw new Error("Database not found");
}
await removeService(database.appName, database.serverId);
await new Promise((resolve) => setTimeout(resolve, 6000));
for (const mount of database.mounts) {
if (mount.type === "volume") {
const command = `docker volume rm ${mount?.volumeName} --force`;
if (database.serverId) {
await execAsyncRemote(database.serverId, command);
} else {
await execAsync(command);
}
}
}
if (type === "postgres") {
await deployPostgres(databaseId);
} else if (type === "mysql") {
await deployMySql(databaseId);
} else if (type === "mariadb") {
await deployMariadb(databaseId);
} else if (type === "mongo") {
await deployMongo(databaseId);
} else if (type === "redis") {
await deployRedis(databaseId);
}
};
const findDatabaseById = async (databaseId: string, type: DatabaseType) => {
if (type === "postgres") {
return await db.query.postgres.findFirst({
where: eq(postgres.postgresId, databaseId),
with: {
mounts: true,
},
});
}
if (type === "mysql") {
return await db.query.mysql.findFirst({
where: eq(mysql.mysqlId, databaseId),
with: {
mounts: true,
},
});
}
if (type === "mariadb") {
return await db.query.mariadb.findFirst({
where: eq(mariadb.mariadbId, databaseId),
with: {
mounts: true,
},
});
}
if (type === "mongo") {
return await db.query.mongo.findFirst({
where: eq(mongo.mongoId, databaseId),
with: {
mounts: true,
},
});
}
if (type === "redis") {
return await db.query.redis.findFirst({
where: eq(redis.redisId, databaseId),
with: {
mounts: true,
},
});
}
};

View File

@@ -28,7 +28,7 @@ export const buildRedis = async (redis: RedisNested) => {
mounts,
} = redis;
const defaultRedisEnv = `REDIS_PASSWORD=${databasePassword}${
const defaultRedisEnv = `REDIS_PASSWORD="${databasePassword}"${
env ? `\n${env}` : ""
}`;
const resources = calculateResources({

View File

@@ -211,13 +211,9 @@ export const addDomainToCompose = async (
throw new Error(`The service ${serviceName} not found in the compose`);
}
const httpLabels = await createDomainLabels(appName, domain, "web");
const httpLabels = createDomainLabels(appName, domain, "web");
if (https) {
const httpsLabels = await createDomainLabels(
appName,
domain,
"websecure",
);
const httpsLabels = createDomainLabels(appName, domain, "websecure");
httpLabels.push(...httpsLabels);
}
@@ -279,12 +275,20 @@ export const writeComposeFile = async (
}
};
export const createDomainLabels = async (
export const createDomainLabels = (
appName: string,
domain: Domain,
entrypoint: "web" | "websecure",
) => {
const { host, port, https, uniqueConfigKey, certificateType, path } = domain;
const {
host,
port,
https,
uniqueConfigKey,
certificateType,
path,
customCertResolver,
} = domain;
const routerName = `${appName}-${uniqueConfigKey}-${entrypoint}`;
const labels = [
`traefik.http.routers.${routerName}.rule=Host(\`${host}\`)${path && path !== "/" ? ` && PathPrefix(\`${path}\`)` : ""}`,
@@ -304,6 +308,10 @@ export const createDomainLabels = async (
labels.push(
`traefik.http.routers.${routerName}.tls.certresolver=letsencrypt`,
);
} else if (certificateType === "custom" && customCertResolver) {
labels.push(
`traefik.http.routers.${routerName}.tls.certresolver=${customCertResolver}`,
);
}
}

View File

@@ -275,7 +275,7 @@ export const getBitbucketBranches = async (
}
const bitbucketProvider = await findBitbucketById(input.bitbucketId);
const { owner, repo } = input;
const url = `https://api.bitbucket.org/2.0/repositories/${owner}/${repo}/refs/branches`;
const url = `https://api.bitbucket.org/2.0/repositories/${owner}/${repo}/refs/branches?pagelen=100`;
try {
const response = await fetch(url, {

View File

@@ -266,7 +266,7 @@ export const getGitlabRepositories = async (gitlabId?: string) => {
if (groupName) {
return full_path.toLowerCase().includes(groupName) && kind === "group";
}
return kind === "member";
return kind === "user";
});
const mappedRepositories = filteredRepos.map((repo: any) => {
return {
@@ -433,7 +433,7 @@ export const testGitlabConnection = async (
if (groupName) {
return full_path.toLowerCase().includes(groupName) && kind === "group";
}
return kind === "member";
return kind === "user";
});
return filteredRepos.length;

View File

@@ -137,12 +137,44 @@ export const readRemoteConfig = async (serverId: string, appName: string) => {
}
};
export const readMonitoringConfig = () => {
export const readMonitoringConfig = (readAll = false) => {
const { DYNAMIC_TRAEFIK_PATH } = paths();
const configPath = path.join(DYNAMIC_TRAEFIK_PATH, "access.log");
if (fs.existsSync(configPath)) {
const yamlStr = fs.readFileSync(configPath, "utf8");
return yamlStr;
if (!readAll) {
// Read first 500 lines
let content = "";
let chunk = "";
let validCount = 0;
for (const char of fs.readFileSync(configPath, "utf8")) {
chunk += char;
if (char === "\n") {
try {
const trimmed = chunk.trim();
if (
trimmed !== "" &&
trimmed.startsWith("{") &&
trimmed.endsWith("}")
) {
const log = JSON.parse(trimmed);
if (log.ServiceName !== "dokploy-service-app@file") {
content += chunk;
validCount++;
if (validCount >= 500) {
break;
}
}
}
} catch {
// Ignore invalid JSON
}
chunk = "";
}
}
return content;
}
return fs.readFileSync(configPath, "utf8");
}
return null;
};

View File

@@ -148,6 +148,8 @@ export const createRouterConfig = async (
if (entryPoint === "websecure") {
if (certificateType === "letsencrypt") {
routerConfig.tls = { certResolver: "letsencrypt" };
} else if (certificateType === "custom" && domain.customCertResolver) {
routerConfig.tls = { certResolver: domain.customCertResolver };
} else if (certificateType === "none") {
routerConfig.tls = undefined;
}

View File

@@ -0,0 +1,9 @@
import micromatch from "micromatch";
export const shouldDeploy = (
watchPaths: string[] | null,
modifiedFiles: string[],
): boolean => {
if (!watchPaths || watchPaths?.length === 0) return true;
return micromatch.some(modifiedFiles, watchPaths);
};