refactor: rename builders to server

This commit is contained in:
Mauricio Siu
2024-10-05 22:15:47 -06:00
parent 43555cdabe
commit f3ce69b656
361 changed files with 551 additions and 562 deletions

View File

@@ -0,0 +1,117 @@
import { IS_CLOUD, paths } from "@/server/constants";
import { findAdmin, updateAdmin } from "@/server/services/admin";
import { type RotatingFileStream, createStream } from "rotating-file-stream";
import { execAsync } from "../process/execAsync";
class LogRotationManager {
private static instance: LogRotationManager;
private stream: RotatingFileStream | null = null;
private constructor() {
if (IS_CLOUD) {
return;
}
this.initialize().catch(console.error);
}
public static getInstance(): LogRotationManager {
if (!LogRotationManager.instance) {
LogRotationManager.instance = new LogRotationManager();
}
return LogRotationManager.instance;
}
private async initialize(): Promise<void> {
const isActive = await this.getStateFromDB();
if (isActive) {
await this.activateStream();
}
}
private async getStateFromDB(): Promise<boolean> {
const setting = await findAdmin();
return setting?.enableLogRotation ?? false;
}
private async setStateInDB(active: boolean): Promise<void> {
const admin = await findAdmin();
await updateAdmin(admin.authId, {
enableLogRotation: active,
});
}
private async activateStream(): Promise<void> {
const { DYNAMIC_TRAEFIK_PATH } = paths();
if (this.stream) {
await this.deactivateStream();
}
this.stream = createStream("access.log", {
size: "100M",
interval: "1d",
path: DYNAMIC_TRAEFIK_PATH,
rotate: 6,
compress: "gzip",
});
this.stream.on("rotation", this.handleRotation.bind(this));
}
private async deactivateStream(): Promise<void> {
return new Promise<void>((resolve) => {
if (this.stream) {
this.stream.end(() => {
this.stream = null;
resolve();
});
} else {
resolve();
}
});
}
public async activate(): Promise<boolean> {
const currentState = await this.getStateFromDB();
if (currentState) {
return true;
}
await this.setStateInDB(true);
await this.activateStream();
return true;
}
public async deactivate(): Promise<boolean> {
console.log("Deactivating log rotation...");
const currentState = await this.getStateFromDB();
if (!currentState) {
console.log("Log rotation is already inactive in DB");
return true;
}
await this.setStateInDB(false);
await this.deactivateStream();
console.log("Log rotation deactivated successfully");
return true;
}
private async handleRotation() {
try {
const status = await this.getStatus();
if (!status) {
await this.deactivateStream();
}
await execAsync(
"docker kill -s USR1 $(docker ps -q --filter name=dokploy-traefik)",
);
console.log("USR1 Signal send to Traefik");
} catch (error) {
console.error("Error to send USR1 Signal to Traefik:", error);
}
}
public async getStatus(): Promise<boolean> {
const dbState = await this.getStateFromDB();
return dbState;
}
}
export const logRotationManager = LogRotationManager.getInstance();

View File

@@ -0,0 +1,48 @@
export interface LogEntry {
ClientAddr: string;
ClientHost: string;
ClientPort: string;
ClientUsername: string;
DownstreamContentSize: number;
DownstreamStatus: number;
Duration: number;
OriginContentSize: number;
OriginDuration: number;
OriginStatus: number;
Overhead: number;
RequestAddr: string;
RequestContentSize: number;
RequestCount: number;
RequestHost: string;
RequestMethod: string;
RequestPath: string;
RequestPort: string;
RequestProtocol: string;
RequestScheme: string;
RetryAttempts: number;
RouterName: string;
ServiceAddr: string;
ServiceName: string;
ServiceURL: {
Scheme: string;
Opaque: string;
User: null;
Host: string;
Path: string;
RawPath: string;
ForceQuery: boolean;
RawQuery: string;
Fragment: string;
RawFragment: string;
};
StartLocal: string;
StartUTC: string;
downstream_Content_Type: string;
entryPointName: string;
level: string;
msg: string;
origin_Content_Type: string;
request_Content_Type: string;
request_User_Agent: string;
time: string;
}

View File

@@ -0,0 +1,119 @@
import _ from "lodash";
import type { LogEntry } from "./types";
interface HourlyData {
hour: string;
count: number;
}
export function processLogs(logString: string): HourlyData[] {
if (_.isEmpty(logString)) {
return [];
}
const hourlyData = _(logString)
.split("\n")
.compact()
.map((entry) => {
try {
const log: LogEntry = JSON.parse(entry);
if (log.ServiceName === "dokploy-service-app@file") {
return null;
}
const date = new Date(log.StartUTC);
return `${date.toISOString().slice(0, 13)}:00:00Z`;
} catch (error) {
console.error("Error parsing log entry:", error);
return null;
}
})
.compact()
.countBy()
.map((count, hour) => ({ hour, count }))
.value();
return _.sortBy(hourlyData, (entry) => new Date(entry.hour).getTime());
}
interface PageInfo {
pageIndex: number;
pageSize: number;
}
interface SortInfo {
id: string;
desc: boolean;
}
export function parseRawConfig(
rawConfig: string,
page?: PageInfo,
sort?: SortInfo,
search?: string,
status?: string[],
): { data: LogEntry[]; totalCount: number } {
try {
if (_.isEmpty(rawConfig)) {
return { data: [], totalCount: 0 };
}
let parsedLogs = _(rawConfig)
.split("\n")
.compact()
.map((line) => JSON.parse(line) as LogEntry)
.value();
parsedLogs = parsedLogs.filter(
(log) => log.ServiceName !== "dokploy-service-app@file",
);
if (search) {
parsedLogs = parsedLogs.filter((log) =>
log.RequestPath.toLowerCase().includes(search.toLowerCase()),
);
}
if (status && status.length > 0) {
parsedLogs = parsedLogs.filter((log) =>
status.some((range) => isStatusInRange(log.DownstreamStatus, range)),
);
}
const totalCount = parsedLogs.length;
if (sort) {
parsedLogs = _.orderBy(
parsedLogs,
[sort.id],
[sort.desc ? "desc" : "asc"],
);
} else {
parsedLogs = _.orderBy(parsedLogs, ["time"], ["desc"]);
}
if (page) {
const startIndex = page.pageIndex * page.pageSize;
parsedLogs = parsedLogs.slice(startIndex, startIndex + page.pageSize);
}
return { data: parsedLogs, totalCount };
} catch (error) {
console.error("Error parsing rawConfig:", error);
throw new Error("Failed to parse rawConfig");
}
}
const isStatusInRange = (status: number, range: string) => {
switch (range) {
case "info":
return status >= 100 && status <= 199;
case "success":
return status >= 200 && status <= 299;
case "redirect":
return status >= 300 && status <= 399;
case "client":
return status >= 400 && status <= 499;
case "server":
return status >= 500 && status <= 599;
default:
return false;
}
};

View File

@@ -0,0 +1,157 @@
import { findAdmin } from "@/server/services/admin";
import { getAllServers } from "@/server/services/server";
import { scheduleJob } from "node-schedule";
import { db } from "../../db/index";
import {
cleanUpDockerBuilder,
cleanUpSystemPrune,
cleanUpUnusedImages,
} from "../docker/utils";
import { runMariadbBackup } from "./mariadb";
import { runMongoBackup } from "./mongo";
import { runMySqlBackup } from "./mysql";
import { runPostgresBackup } from "./postgres";
export const initCronJobs = async () => {
console.log("Setting up cron jobs....");
const admin = await findAdmin();
if (admin?.enableDockerCleanup) {
scheduleJob("docker-cleanup", "0 0 * * *", async () => {
console.log(
`Docker Cleanup ${new Date().toLocaleString()}] Running docker cleanup`,
);
await cleanUpUnusedImages();
await cleanUpDockerBuilder();
await cleanUpSystemPrune();
});
}
const servers = await getAllServers();
for (const server of servers) {
const { appName, serverId } = server;
if (serverId) {
scheduleJob(serverId, "0 0 * * *", async () => {
console.log(
`SERVER-BACKUP[${new Date().toLocaleString()}] Running Cleanup ${appName}`,
);
await cleanUpUnusedImages(serverId);
await cleanUpDockerBuilder(serverId);
await cleanUpSystemPrune(serverId);
});
}
}
const pgs = await db.query.postgres.findMany({
with: {
backups: {
with: {
destination: true,
postgres: true,
mariadb: true,
mysql: true,
mongo: true,
},
},
},
});
for (const pg of pgs) {
for (const backup of pg.backups) {
const { schedule, backupId, enabled } = backup;
if (enabled) {
scheduleJob(backupId, schedule, async () => {
console.log(
`PG-SERVER[${new Date().toLocaleString()}] Running Backup ${backupId}`,
);
runPostgresBackup(pg, backup);
});
}
}
}
const mariadbs = await db.query.mariadb.findMany({
with: {
backups: {
with: {
destination: true,
postgres: true,
mariadb: true,
mysql: true,
mongo: true,
},
},
},
});
for (const maria of mariadbs) {
for (const backup of maria.backups) {
const { schedule, backupId, enabled } = backup;
if (enabled) {
scheduleJob(backupId, schedule, async () => {
console.log(
`MARIADB-SERVER[${new Date().toLocaleString()}] Running Backup ${backupId}`,
);
await runMariadbBackup(maria, backup);
});
}
}
}
const mongodbs = await db.query.mongo.findMany({
with: {
backups: {
with: {
destination: true,
postgres: true,
mariadb: true,
mysql: true,
mongo: true,
},
},
},
});
for (const mongo of mongodbs) {
for (const backup of mongo.backups) {
const { schedule, backupId, enabled } = backup;
if (enabled) {
scheduleJob(backupId, schedule, async () => {
console.log(
`MONGO-SERVER[${new Date().toLocaleString()}] Running Backup ${backupId}`,
);
await runMongoBackup(mongo, backup);
});
}
}
}
const mysqls = await db.query.mysql.findMany({
with: {
backups: {
with: {
destination: true,
postgres: true,
mariadb: true,
mysql: true,
mongo: true,
},
},
},
});
for (const mysql of mysqls) {
for (const backup of mysql.backups) {
const { schedule, backupId, enabled } = backup;
if (enabled) {
scheduleJob(backupId, schedule, async () => {
console.log(
`MYSQL-SERVER[${new Date().toLocaleString()}] Running Backup ${backupId}`,
);
await runMySqlBackup(mysql, backup);
});
}
}
}
};

View File

@@ -0,0 +1,65 @@
import path from "node:path";
import type { BackupSchedule } from "@/server/services/backup";
import type { Mariadb } from "@/server/services/mariadb";
import { findProjectById } from "@/server/services/project";
import {
getRemoteServiceContainer,
getServiceContainer,
} from "../docker/utils";
import { sendDatabaseBackupNotifications } from "../notifications/database-backup";
import { execAsync, execAsyncRemote } from "../process/execAsync";
import { getS3Credentials } from "./utils";
export const runMariadbBackup = async (
mariadb: Mariadb,
backup: BackupSchedule,
) => {
const { appName, databasePassword, databaseUser, projectId, name } = mariadb;
const project = await findProjectById(projectId);
const { prefix, database } = backup;
const destination = backup.destination;
const backupFileName = `${new Date().toISOString()}.sql.gz`;
const bucketDestination = path.join(prefix, backupFileName);
try {
const rcloneFlags = getS3Credentials(destination);
const rcloneDestination = `:s3:${destination.bucket}/${bucketDestination}`;
const rcloneCommand = `rclone rcat ${rcloneFlags.join(" ")} "${rcloneDestination}"`;
if (mariadb.serverId) {
const { Id: containerId } = await getRemoteServiceContainer(
mariadb.serverId,
appName,
);
const mariadbDumpCommand = `docker exec ${containerId} sh -c "mariadb-dump --user='${databaseUser}' --password='${databasePassword}' --databases ${database} | gzip"`;
await execAsyncRemote(
mariadb.serverId,
`${mariadbDumpCommand} | ${rcloneCommand}`,
);
} else {
const { Id: containerId } = await getServiceContainer(appName);
const mariadbDumpCommand = `docker exec ${containerId} sh -c "mariadb-dump --user='${databaseUser}' --password='${databasePassword}' --databases ${database} | gzip"`;
await execAsync(`${mariadbDumpCommand} | ${rcloneCommand}`);
}
await sendDatabaseBackupNotifications({
applicationName: name,
projectName: project.name,
databaseType: "mariadb",
type: "success",
});
} catch (error) {
console.log(error);
await sendDatabaseBackupNotifications({
applicationName: name,
projectName: project.name,
databaseType: "mariadb",
type: "error",
// @ts-ignore
errorMessage: error?.message || "Error message not provided",
});
throw error;
}
};

View File

@@ -0,0 +1,63 @@
import path from "node:path";
import type { BackupSchedule } from "@/server/services/backup";
import type { Mongo } from "@/server/services/mongo";
import { findProjectById } from "@/server/services/project";
import {
getRemoteServiceContainer,
getServiceContainer,
} from "../docker/utils";
import { sendDatabaseBackupNotifications } from "../notifications/database-backup";
import { execAsync, execAsyncRemote } from "../process/execAsync";
import { getS3Credentials } from "./utils";
// mongodb://mongo:Bqh7AQl-PRbnBu@localhost:27017/?tls=false&directConnection=true
export const runMongoBackup = async (mongo: Mongo, backup: BackupSchedule) => {
const { appName, databasePassword, databaseUser, projectId, name } = mongo;
const project = await findProjectById(projectId);
const { prefix, database } = backup;
const destination = backup.destination;
const backupFileName = `${new Date().toISOString()}.dump.gz`;
const bucketDestination = path.join(prefix, backupFileName);
try {
const rcloneFlags = getS3Credentials(destination);
const rcloneDestination = `:s3:${destination.bucket}/${bucketDestination}`;
const rcloneCommand = `rclone rcat ${rcloneFlags.join(" ")} "${rcloneDestination}"`;
if (mongo.serverId) {
const { Id: containerId } = await getRemoteServiceContainer(
mongo.serverId,
appName,
);
const mongoDumpCommand = `docker exec ${containerId} sh -c "mongodump -d '${database}' -u '${databaseUser}' -p '${databasePassword}' --authenticationDatabase=admin --gzip"`;
await execAsyncRemote(
mongo.serverId,
`${mongoDumpCommand} | ${rcloneCommand}`,
);
} else {
const { Id: containerId } = await getServiceContainer(appName);
const mongoDumpCommand = `docker exec ${containerId} sh -c "mongodump -d '${database}' -u '${databaseUser}' -p '${databasePassword}' --authenticationDatabase=admin --gzip"`;
await execAsync(`${mongoDumpCommand} | ${rcloneCommand}`);
}
await sendDatabaseBackupNotifications({
applicationName: name,
projectName: project.name,
databaseType: "mongodb",
type: "success",
});
} catch (error) {
console.log(error);
await sendDatabaseBackupNotifications({
applicationName: name,
projectName: project.name,
databaseType: "mongodb",
type: "error",
// @ts-ignore
errorMessage: error?.message || "Error message not provided",
});
throw error;
}
};
// mongorestore -d monguito -u mongo -p Bqh7AQl-PRbnBu --authenticationDatabase admin --gzip --archive=2024-04-13T05:03:58.937Z.dump.gz

View File

@@ -0,0 +1,62 @@
import { unlink } from "node:fs/promises";
import path from "node:path";
import type { BackupSchedule } from "@/server/services/backup";
import type { MySql } from "@/server/services/mysql";
import { findProjectById } from "@/server/services/project";
import {
getRemoteServiceContainer,
getServiceContainer,
} from "../docker/utils";
import { sendDatabaseBackupNotifications } from "../notifications/database-backup";
import { execAsync, execAsyncRemote } from "../process/execAsync";
import { getS3Credentials } from "./utils";
export const runMySqlBackup = async (mysql: MySql, backup: BackupSchedule) => {
const { appName, databaseRootPassword, projectId, name } = mysql;
const project = await findProjectById(projectId);
const { prefix, database } = backup;
const destination = backup.destination;
const backupFileName = `${new Date().toISOString()}.sql.gz`;
const bucketDestination = path.join(prefix, backupFileName);
try {
const rcloneFlags = getS3Credentials(destination);
const rcloneDestination = `:s3:${destination.bucket}/${bucketDestination}`;
const rcloneCommand = `rclone rcat ${rcloneFlags.join(" ")} "${rcloneDestination}"`;
if (mysql.serverId) {
const { Id: containerId } = await getRemoteServiceContainer(
mysql.serverId,
appName,
);
const mysqlDumpCommand = `docker exec ${containerId} sh -c "mysqldump --default-character-set=utf8mb4 -u 'root' --password='${databaseRootPassword}' --single-transaction --no-tablespaces --quick '${database}' | gzip"`;
await execAsyncRemote(
mysql.serverId,
`${mysqlDumpCommand} | ${rcloneCommand}`,
);
} else {
const { Id: containerId } = await getServiceContainer(appName);
const mysqlDumpCommand = `docker exec ${containerId} sh -c "mysqldump --default-character-set=utf8mb4 -u 'root' --password='${databaseRootPassword}' --single-transaction --no-tablespaces --quick '${database}' | gzip"`;
await execAsync(`${mysqlDumpCommand} | ${rcloneCommand}`);
}
await sendDatabaseBackupNotifications({
applicationName: name,
projectName: project.name,
databaseType: "mysql",
type: "success",
});
} catch (error) {
console.log(error);
await sendDatabaseBackupNotifications({
applicationName: name,
projectName: project.name,
databaseType: "mysql",
type: "error",
// @ts-ignore
errorMessage: error?.message || "Error message not provided",
});
throw error;
}
};

View File

@@ -0,0 +1,69 @@
import path from "node:path";
import type { BackupSchedule } from "@/server/services/backup";
import type { Postgres } from "@/server/services/postgres";
import { findProjectById } from "@/server/services/project";
import {
getRemoteServiceContainer,
getServiceContainer,
} from "../docker/utils";
import { sendDatabaseBackupNotifications } from "../notifications/database-backup";
import { execAsync, execAsyncRemote } from "../process/execAsync";
import { getS3Credentials } from "./utils";
export const runPostgresBackup = async (
postgres: Postgres,
backup: BackupSchedule,
) => {
const { appName, databaseUser, name, projectId } = postgres;
const project = await findProjectById(projectId);
const { prefix, database } = backup;
const destination = backup.destination;
const backupFileName = `${new Date().toISOString()}.sql.gz`;
const bucketDestination = path.join(prefix, backupFileName);
try {
const rcloneFlags = getS3Credentials(destination);
const rcloneDestination = `:s3:${destination.bucket}/${bucketDestination}`;
const rcloneCommand = `rclone rcat ${rcloneFlags.join(" ")} "${rcloneDestination}"`;
if (postgres.serverId) {
const { Id: containerId } = await getRemoteServiceContainer(
postgres.serverId,
appName,
);
const pgDumpCommand = `docker exec ${containerId} sh -c "pg_dump -Fc --no-acl --no-owner -h localhost -U ${databaseUser} --no-password '${database}' | gzip"`;
await execAsyncRemote(
postgres.serverId,
`${pgDumpCommand} | ${rcloneCommand}`,
);
} else {
const { Id: containerId } = await getServiceContainer(appName);
const pgDumpCommand = `docker exec ${containerId} sh -c "pg_dump -Fc --no-acl --no-owner -h localhost -U ${databaseUser} --no-password '${database}' | gzip"`;
await execAsync(`${pgDumpCommand} | ${rcloneCommand}`);
}
await sendDatabaseBackupNotifications({
applicationName: name,
projectName: project.name,
databaseType: "postgres",
type: "success",
});
} catch (error) {
await sendDatabaseBackupNotifications({
applicationName: name,
projectName: project.name,
databaseType: "postgres",
type: "error",
// @ts-ignore
errorMessage: error?.message || "Error message not provided",
});
throw error;
} finally {
}
};
// Restore
// /Applications/pgAdmin 4.app/Contents/SharedSupport/pg_restore --host "localhost" --port "5432" --username "mauricio" --no-password --dbname "postgres" --verbose "/Users/mauricio/Downloads/_databases_2024-04-12T07_02_05.234Z.sql"

View File

@@ -0,0 +1,43 @@
import type { BackupSchedule } from "@/server/services/backup";
import type { Destination } from "@/server/services/destination";
import { scheduleJob, scheduledJobs } from "node-schedule";
import { runMariadbBackup } from "./mariadb";
import { runMongoBackup } from "./mongo";
import { runMySqlBackup } from "./mysql";
import { runPostgresBackup } from "./postgres";
export const scheduleBackup = (backup: BackupSchedule) => {
const { schedule, backupId, databaseType, postgres, mysql, mongo, mariadb } =
backup;
scheduleJob(backupId, schedule, async () => {
if (databaseType === "postgres" && postgres) {
await runPostgresBackup(postgres, backup);
} else if (databaseType === "mysql" && mysql) {
await runMySqlBackup(mysql, backup);
} else if (databaseType === "mongo" && mongo) {
await runMongoBackup(mongo, backup);
} else if (databaseType === "mariadb" && mariadb) {
await runMariadbBackup(mariadb, backup);
}
});
};
export const removeScheduleBackup = (backupId: string) => {
const currentJob = scheduledJobs[backupId];
currentJob?.cancel();
};
export const getS3Credentials = (destination: Destination) => {
const { accessKey, secretAccessKey, bucket, region, endpoint } = destination;
const rcloneFlags = [
// `--s3-provider=Cloudflare`,
`--s3-access-key-id=${accessKey}`,
`--s3-secret-access-key=${secretAccessKey}`,
`--s3-region=${region}`,
`--s3-endpoint=${endpoint}`,
"--s3-no-check-bucket",
"--s3-force-path-style",
];
return rcloneFlags;
};

View File

@@ -0,0 +1,216 @@
import {
createWriteStream,
existsSync,
mkdirSync,
writeFileSync,
} from "node:fs";
import { dirname, join } from "node:path";
import { paths } from "@/server/constants";
import type { InferResultType } from "@/server/types/with";
import boxen from "boxen";
import {
writeDomainsToCompose,
writeDomainsToComposeRemote,
} from "../docker/domain";
import { encodeBase64, prepareEnvironmentVariables } from "../docker/utils";
import { execAsyncRemote } from "../process/execAsync";
import { spawnAsync } from "../process/spawnAsync";
export type ComposeNested = InferResultType<
"compose",
{ project: true; mounts: true; domains: true }
>;
export const buildCompose = async (compose: ComposeNested, logPath: string) => {
const writeStream = createWriteStream(logPath, { flags: "a" });
const { sourceType, appName, mounts, composeType, domains } = compose;
try {
const { COMPOSE_PATH } = paths();
const command = createCommand(compose);
await writeDomainsToCompose(compose, domains);
createEnvFile(compose);
const logContent = `
App Name: ${appName}
Build Compose 🐳
Detected: ${mounts.length} mounts 📂
Command: docker ${command}
Source Type: docker ${sourceType}
Compose Type: ${composeType}`;
const logBox = boxen(logContent, {
padding: {
left: 1,
right: 1,
bottom: 1,
},
width: 80,
borderStyle: "double",
});
writeStream.write(`\n${logBox}\n`);
const projectPath = join(COMPOSE_PATH, compose.appName, "code");
await spawnAsync(
"docker",
[...command.split(" ")],
(data) => {
if (writeStream.writable) {
writeStream.write(data.toString());
}
},
{
cwd: projectPath,
env: {
NODE_ENV: process.env.NODE_ENV,
PATH: process.env.PATH,
},
},
);
writeStream.write("Docker Compose Deployed: ✅");
} catch (error) {
writeStream.write("Error ❌");
throw error;
} finally {
writeStream.end();
}
};
export const getBuildComposeCommand = async (
compose: ComposeNested,
logPath: string,
) => {
const { COMPOSE_PATH } = paths(true);
const { sourceType, appName, mounts, composeType, domains, composePath } =
compose;
const command = createCommand(compose);
const envCommand = getCreateEnvFileCommand(compose);
const projectPath = join(COMPOSE_PATH, compose.appName, "code");
const newCompose = await writeDomainsToComposeRemote(
compose,
domains,
logPath,
);
const logContent = `
App Name: ${appName}
Build Compose 🐳
Detected: ${mounts.length} mounts 📂
Command: docker ${command}
Source Type: docker ${sourceType}
Compose Type: ${composeType}`;
const logBox = boxen(logContent, {
padding: {
left: 1,
right: 1,
bottom: 1,
},
width: 80,
borderStyle: "double",
});
const bashCommand = `
set -e
{
echo "${logBox}" >> "${logPath}"
${newCompose}
${envCommand}
cd "${projectPath}";
docker ${command.split(" ").join(" ")} >> "${logPath}" 2>&1 || { echo "Error: ❌ Docker command failed" >> "${logPath}"; exit 1; }
echo "Docker Compose Deployed: ✅" >> "${logPath}"
} || {
echo "Error: ❌ Script execution failed" >> "${logPath}"
exit 1
}
`;
return await execAsyncRemote(compose.serverId, bashCommand);
};
const sanitizeCommand = (command: string) => {
const sanitizedCommand = command.trim();
const parts = sanitizedCommand.split(/\s+/);
const restCommand = parts.map((arg) => arg.replace(/^"(.*)"$/, "$1"));
return restCommand.join(" ");
};
export const createCommand = (compose: ComposeNested) => {
const { composeType, appName, sourceType } = compose;
const path =
sourceType === "raw" ? "docker-compose.yml" : compose.composePath;
let command = "";
if (composeType === "docker-compose") {
command = `compose -p ${appName} -f ${path} up -d --build --remove-orphans`;
} else if (composeType === "stack") {
command = `stack deploy -c ${path} ${appName} --prune`;
}
const customCommand = sanitizeCommand(compose.command);
if (customCommand) {
command = `${command} ${customCommand}`;
}
return command;
};
const createEnvFile = (compose: ComposeNested) => {
const { COMPOSE_PATH } = paths();
const { env, composePath, appName } = compose;
const composeFilePath =
join(COMPOSE_PATH, appName, "code", composePath) ||
join(COMPOSE_PATH, appName, "code", "docker-compose.yml");
const envFilePath = join(dirname(composeFilePath), ".env");
let envContent = env || "";
if (!envContent.includes("DOCKER_CONFIG")) {
envContent += "\nDOCKER_CONFIG=/root/.docker/config.json";
}
if (compose.randomize) {
envContent += `\nCOMPOSE_PREFIX=${compose.suffix}`;
}
const envFileContent = prepareEnvironmentVariables(envContent).join("\n");
if (!existsSync(dirname(envFilePath))) {
mkdirSync(dirname(envFilePath), { recursive: true });
}
writeFileSync(envFilePath, envFileContent);
};
export const getCreateEnvFileCommand = (compose: ComposeNested) => {
const { COMPOSE_PATH } = paths(true);
const { env, composePath, appName } = compose;
const composeFilePath =
join(COMPOSE_PATH, appName, "code", composePath) ||
join(COMPOSE_PATH, appName, "code", "docker-compose.yml");
const envFilePath = join(dirname(composeFilePath), ".env");
let envContent = env || "";
if (!envContent.includes("DOCKER_CONFIG")) {
envContent += "\nDOCKER_CONFIG=/root/.docker/config.json";
}
if (compose.randomize) {
envContent += `\nCOMPOSE_PREFIX=${compose.suffix}`;
}
const envFileContent = prepareEnvironmentVariables(envContent).join("\n");
const encodedContent = encodeBase64(envFileContent);
return `
touch ${envFilePath};
echo "${encodedContent}" | base64 -d > "${envFilePath}";
`;
};

View File

@@ -0,0 +1,116 @@
import type { WriteStream } from "node:fs";
import { prepareEnvironmentVariables } from "@/server/utils/docker/utils";
import type { ApplicationNested } from ".";
import {
getBuildAppDirectory,
getDockerContextPath,
} from "../filesystem/directory";
import { spawnAsync } from "../process/spawnAsync";
import { createEnvFile, createEnvFileCommand } from "./utils";
export const buildCustomDocker = async (
application: ApplicationNested,
writeStream: WriteStream,
) => {
const { appName, env, publishDirectory, buildArgs, dockerBuildStage } =
application;
const dockerFilePath = getBuildAppDirectory(application);
try {
const image = `${appName}`;
const defaultContextPath =
dockerFilePath.substring(0, dockerFilePath.lastIndexOf("/") + 1) || ".";
const args = prepareEnvironmentVariables(buildArgs);
const dockerContextPath = getDockerContextPath(application);
const commandArgs = ["build", "-t", image, "-f", dockerFilePath, "."];
if (dockerBuildStage) {
commandArgs.push("--target", dockerBuildStage);
}
for (const arg of args) {
commandArgs.push("--build-arg", arg);
}
/*
Do not generate an environment file when publishDirectory is specified,
as it could be publicly exposed.
*/
if (!publishDirectory) {
createEnvFile(dockerFilePath, env);
}
await spawnAsync(
"docker",
commandArgs,
(data) => {
if (writeStream.writable) {
writeStream.write(data);
}
},
{
cwd: dockerContextPath || defaultContextPath,
},
);
} catch (error) {
throw error;
}
};
export const getDockerCommand = (
application: ApplicationNested,
logPath: string,
) => {
const { appName, env, publishDirectory, buildArgs, dockerBuildStage } =
application;
const dockerFilePath = getBuildAppDirectory(application);
try {
const image = `${appName}`;
const defaultContextPath =
dockerFilePath.substring(0, dockerFilePath.lastIndexOf("/") + 1) || ".";
const args = prepareEnvironmentVariables(buildArgs);
const dockerContextPath =
getDockerContextPath(application) || defaultContextPath;
const commandArgs = ["build", "-t", image, "-f", dockerFilePath, "."];
if (dockerBuildStage) {
commandArgs.push("--target", dockerBuildStage);
}
for (const arg of args) {
commandArgs.push("--build-arg", arg);
}
/*
Do not generate an environment file when publishDirectory is specified,
as it could be publicly exposed.
*/
let command = "";
if (!publishDirectory) {
command += createEnvFileCommand(dockerFilePath, env);
}
command = `
echo "Building ${appName}" >> ${logPath};
cd ${dockerContextPath} >> ${logPath} 2>> ${logPath} || {
echo "❌ The path ${dockerContextPath} does not exist" >> ${logPath};
exit 1;
}
docker ${commandArgs.join(" ")} >> ${logPath} 2>> ${logPath} || {
echo "❌ Docker build failed" >> ${logPath};
exit 1;
}
echo "✅ Docker build completed." >> ${logPath};
`;
return command;
} catch (error) {
throw error;
}
};

View File

@@ -0,0 +1,122 @@
import fs from "node:fs/promises";
import path, { join } from "node:path";
import { paths } from "@/server/constants";
import type { Application } from "@/server/services/application";
import { findServerById } from "@/server/services/server";
import AdmZip from "adm-zip";
import { Client, type SFTPWrapper } from "ssh2";
import {
recreateDirectory,
recreateDirectoryRemote,
} from "../filesystem/directory";
import { execAsyncRemote } from "../process/execAsync";
export const unzipDrop = async (zipFile: File, application: Application) => {
let sftp: SFTPWrapper | null = null;
try {
const { appName } = application;
const { APPLICATIONS_PATH } = paths(!!application.serverId);
const outputPath = join(APPLICATIONS_PATH, appName, "code");
if (application.serverId) {
await recreateDirectoryRemote(outputPath, application.serverId);
} else {
await recreateDirectory(outputPath);
}
const arrayBuffer = await zipFile.arrayBuffer();
const buffer = Buffer.from(arrayBuffer);
const zip = new AdmZip(buffer);
const zipEntries = zip.getEntries();
const rootEntries = zipEntries.filter(
(entry) =>
entry.entryName.split("/").length === 1 ||
(entry.entryName.split("/").length === 2 &&
entry.entryName.endsWith("/")),
);
const hasSingleRootFolder = !!(
rootEntries.length === 1 && rootEntries[0]?.isDirectory
);
const rootFolderName = hasSingleRootFolder
? rootEntries[0]?.entryName.split("/")[0]
: "";
if (application.serverId) {
sftp = await getSFTPConnection(application.serverId);
}
for (const entry of zipEntries) {
let filePath = entry.entryName;
if (
hasSingleRootFolder &&
rootFolderName &&
filePath.startsWith(`${rootFolderName}/`)
) {
filePath = filePath.slice(rootFolderName?.length + 1);
}
if (!filePath) continue;
const fullPath = path.join(outputPath, filePath);
if (application.serverId) {
if (entry.isDirectory) {
await execAsyncRemote(application.serverId, `mkdir -p ${fullPath}`);
} else {
if (sftp === null) throw new Error("No SFTP connection available");
await uploadFileToServer(sftp, entry.getData(), fullPath);
}
} else {
if (entry.isDirectory) {
await fs.mkdir(fullPath, { recursive: true });
} else {
await fs.mkdir(path.dirname(fullPath), { recursive: true });
await fs.writeFile(fullPath, entry.getData());
}
}
}
} catch (error) {
console.error("Error processing ZIP file:", error);
throw error;
} finally {
sftp?.end();
}
};
const getSFTPConnection = async (serverId: string): Promise<SFTPWrapper> => {
const server = await findServerById(serverId);
if (!server.sshKeyId) throw new Error("No SSH key available for this server");
return new Promise((resolve, reject) => {
const conn = new Client();
conn
.on("ready", () => {
conn.sftp((err, sftp) => {
if (err) return reject(err);
resolve(sftp);
});
})
.connect({
host: server.ipAddress,
port: server.port,
username: server.username,
privateKey: server.sshKey?.privateKey,
timeout: 99999,
});
});
};
const uploadFileToServer = (
sftp: SFTPWrapper,
data: Buffer,
remotePath: string,
): Promise<void> => {
return new Promise((resolve, reject) => {
sftp.writeFile(remotePath, data, (err) => {
if (err) return reject(err);
resolve();
});
});
};

View File

@@ -0,0 +1,73 @@
import type { WriteStream } from "node:fs";
import type { ApplicationNested } from ".";
import { prepareEnvironmentVariables } from "../docker/utils";
import { getBuildAppDirectory } from "../filesystem/directory";
import { spawnAsync } from "../process/spawnAsync";
// TODO: integrate in the vps sudo chown -R $(whoami) ~/.docker
export const buildHeroku = async (
application: ApplicationNested,
writeStream: WriteStream,
) => {
const { env, appName } = application;
const buildAppDirectory = getBuildAppDirectory(application);
const envVariables = prepareEnvironmentVariables(env);
try {
const args = [
"build",
appName,
"--path",
buildAppDirectory,
"--builder",
"heroku/builder:24",
];
for (const env of envVariables) {
args.push("--env", env);
}
await spawnAsync("pack", args, (data) => {
if (writeStream.writable) {
writeStream.write(data);
}
});
return true;
} catch (e) {
throw e;
}
};
export const getHerokuCommand = (
application: ApplicationNested,
logPath: string,
) => {
const { env, appName } = application;
const buildAppDirectory = getBuildAppDirectory(application);
const envVariables = prepareEnvironmentVariables(env);
const args = [
"build",
appName,
"--path",
buildAppDirectory,
"--builder",
"heroku/builder:24",
];
for (const env of envVariables) {
args.push("--env", env);
}
const command = `pack ${args.join(" ")}`;
const bashCommand = `
echo "Starting heroku build..." >> ${logPath};
${command} >> ${logPath} 2>> ${logPath} || {
echo "❌ Heroku build failed" >> ${logPath};
exit 1;
}
echo "✅ Heroku build completed." >> ${logPath};
`;
return bashCommand;
};

View File

@@ -0,0 +1,216 @@
import { createWriteStream } from "node:fs";
import type { InferResultType } from "@/server/types/with";
import type { CreateServiceOptions } from "dockerode";
import { uploadImage } from "../cluster/upload";
import {
calculateResources,
generateBindMounts,
generateConfigContainer,
generateFileMounts,
generateVolumeMounts,
prepareEnvironmentVariables,
} from "../docker/utils";
import { getRemoteDocker } from "../servers/remote-docker";
import { buildCustomDocker, getDockerCommand } from "./docker-file";
import { buildHeroku, getHerokuCommand } from "./heroku";
import { buildNixpacks, getNixpacksCommand } from "./nixpacks";
import { buildPaketo, getPaketoCommand } from "./paketo";
import { buildStatic, getStaticCommand } from "./static";
// NIXPACKS codeDirectory = where is the path of the code directory
// HEROKU codeDirectory = where is the path of the code directory
// PAKETO codeDirectory = where is the path of the code directory
// DOCKERFILE codeDirectory = where is the exact path of the (Dockerfile)
export type ApplicationNested = InferResultType<
"applications",
{ mounts: true; security: true; redirects: true; ports: true; registry: true }
>;
export const buildApplication = async (
application: ApplicationNested,
logPath: string,
) => {
const writeStream = createWriteStream(logPath, { flags: "a" });
const { buildType, sourceType } = application;
try {
writeStream.write(
`\nBuild ${buildType}: ✅\nSource Type: ${sourceType}: ✅\n`,
);
console.log(`Build ${buildType}: ✅`);
if (buildType === "nixpacks") {
await buildNixpacks(application, writeStream);
} else if (buildType === "heroku_buildpacks") {
await buildHeroku(application, writeStream);
} else if (buildType === "paketo_buildpacks") {
await buildPaketo(application, writeStream);
} else if (buildType === "dockerfile") {
await buildCustomDocker(application, writeStream);
} else if (buildType === "static") {
await buildStatic(application, writeStream);
}
if (application.registryId) {
await uploadImage(application, writeStream);
}
await mechanizeDockerContainer(application);
writeStream.write("Docker Deployed: ✅");
} catch (error) {
if (error instanceof Error) {
writeStream.write(`Error ❌\n${error?.message}`);
} else {
writeStream.write("Error ❌");
}
throw error;
} finally {
writeStream.end();
}
};
export const getBuildCommand = (
application: ApplicationNested,
logPath: string,
) => {
const { buildType } = application;
switch (buildType) {
case "nixpacks":
return getNixpacksCommand(application, logPath);
case "heroku_buildpacks":
return getHerokuCommand(application, logPath);
case "paketo_buildpacks":
return getPaketoCommand(application, logPath);
case "static":
return getStaticCommand(application, logPath);
case "dockerfile":
return getDockerCommand(application, logPath);
}
};
export const mechanizeDockerContainer = async (
application: ApplicationNested,
) => {
const {
appName,
env,
mounts,
cpuLimit,
memoryLimit,
memoryReservation,
cpuReservation,
command,
ports,
} = application;
const resources = calculateResources({
memoryLimit,
memoryReservation,
cpuLimit,
cpuReservation,
});
const volumesMount = generateVolumeMounts(mounts);
const {
HealthCheck,
RestartPolicy,
Placement,
Labels,
Mode,
RollbackConfig,
UpdateConfig,
Networks,
} = generateConfigContainer(application);
const bindsMount = generateBindMounts(mounts);
const filesMount = generateFileMounts(appName, application);
const envVariables = prepareEnvironmentVariables(env);
const image = getImageName(application);
const authConfig = getAuthConfig(application);
const docker = await getRemoteDocker(application.serverId);
const settings: CreateServiceOptions = {
authconfig: authConfig,
Name: appName,
TaskTemplate: {
ContainerSpec: {
HealthCheck,
Image: image,
Env: envVariables,
Mounts: [...volumesMount, ...bindsMount, ...filesMount],
...(command
? {
Command: ["/bin/sh"],
Args: ["-c", command],
}
: {}),
Labels,
},
Networks,
RestartPolicy,
Placement,
Resources: {
...resources,
},
},
Mode,
RollbackConfig,
EndpointSpec: {
Ports: ports.map((port) => ({
Protocol: port.protocol,
TargetPort: port.targetPort,
PublishedPort: port.publishedPort,
})),
},
UpdateConfig,
};
try {
const service = docker.getService(appName);
const inspect = await service.inspect();
await service.update({
version: Number.parseInt(inspect.Version.Index),
...settings,
TaskTemplate: {
...settings.TaskTemplate,
ForceUpdate: inspect.Spec.TaskTemplate.ForceUpdate + 1,
},
});
} catch (error) {
await docker.createService(settings);
}
};
const getImageName = (application: ApplicationNested) => {
const { appName, sourceType, dockerImage, registry } = application;
if (sourceType === "docker") {
return dockerImage || "ERROR-NO-IMAGE-PROVIDED";
}
const registryUrl = registry?.registryUrl || "";
const imagePrefix = registry?.imagePrefix ? `${registry.imagePrefix}/` : "";
return registry
? `${registryUrl}/${imagePrefix}${appName}`
: `${appName}:latest`;
};
const getAuthConfig = (application: ApplicationNested) => {
const { registry, username, password, sourceType } = application;
if (sourceType === "docker") {
if (username && password) {
return {
password,
username,
serveraddress: "https://index.docker.io/v1/",
};
}
} else if (registry) {
return {
password: registry.password,
username: registry.username,
serveraddress: registry.registryUrl,
};
}
return undefined;
};

View File

@@ -0,0 +1,138 @@
import { type WriteStream, existsSync, mkdirSync } from "node:fs";
import path from "node:path";
import { buildStatic, getStaticCommand } from "@/server/utils/builders/static";
import { nanoid } from "nanoid";
import type { ApplicationNested } from ".";
import { prepareEnvironmentVariables } from "../docker/utils";
import { getBuildAppDirectory } from "../filesystem/directory";
import { spawnAsync } from "../process/spawnAsync";
export const buildNixpacks = async (
application: ApplicationNested,
writeStream: WriteStream,
) => {
const { env, appName, publishDirectory, serverId } = application;
const buildAppDirectory = getBuildAppDirectory(application);
const buildContainerId = `${appName}-${nanoid(10)}`;
const envVariables = prepareEnvironmentVariables(env);
const writeToStream = (data: string) => {
if (writeStream.writable) {
writeStream.write(data);
}
};
try {
const args = ["build", buildAppDirectory, "--name", appName];
for (const env of envVariables) {
args.push("--env", env);
}
if (publishDirectory) {
/* No need for any start command, since we'll use nginx later on */
args.push("--no-error-without-start");
}
await spawnAsync("nixpacks", args, writeToStream);
/*
Run the container with the image created by nixpacks,
and copy the artifacts on the host filesystem.
Then, remove the container and create a static build.
*/
if (publishDirectory) {
await spawnAsync(
"docker",
["create", "--name", buildContainerId, appName],
writeToStream,
);
const localPath = path.join(buildAppDirectory, publishDirectory);
if (!existsSync(path.dirname(localPath))) {
mkdirSync(path.dirname(localPath), { recursive: true });
}
// https://docs.docker.com/reference/cli/docker/container/cp/
const isDirectory =
publishDirectory.endsWith("/") || !path.extname(publishDirectory);
await spawnAsync(
"docker",
[
"cp",
`${buildContainerId}:/app/${publishDirectory}${isDirectory ? "/." : ""}`,
localPath,
],
writeToStream,
);
await spawnAsync("docker", ["rm", buildContainerId], writeToStream);
await buildStatic(application, writeStream);
}
return true;
} catch (e) {
await spawnAsync("docker", ["rm", buildContainerId], writeToStream);
throw e;
}
};
export const getNixpacksCommand = (
application: ApplicationNested,
logPath: string,
) => {
const { env, appName, publishDirectory, serverId } = application;
const buildAppDirectory = getBuildAppDirectory(application);
const buildContainerId = `${appName}-${nanoid(10)}`;
const envVariables = prepareEnvironmentVariables(env);
const args = ["build", buildAppDirectory, "--name", appName];
for (const env of envVariables) {
args.push("--env", env);
}
if (publishDirectory) {
/* No need for any start command, since we'll use nginx later on */
args.push("--no-error-without-start");
}
const command = `nixpacks ${args.join(" ")}`;
let bashCommand = `
echo "Starting nixpacks build..." >> ${logPath};
${command} >> ${logPath} 2>> ${logPath} || {
echo "❌ Nixpacks build failed" >> ${logPath};
exit 1;
}
echo "✅ Nixpacks build completed." >> ${logPath};
`;
/*
Run the container with the image created by nixpacks,
and copy the artifacts on the host filesystem.
Then, remove the container and create a static build.
*/
if (publishDirectory) {
const localPath = path.join(buildAppDirectory, publishDirectory);
const isDirectory =
publishDirectory.endsWith("/") || !path.extname(publishDirectory);
bashCommand += `
docker create --name ${buildContainerId} ${appName}
mkdir -p ${localPath}
docker cp ${buildContainerId}:/app/${publishDirectory}${isDirectory ? "/." : ""} ${path.join(buildAppDirectory, publishDirectory)} >> ${logPath} 2>> ${logPath} || {
docker rm ${buildContainerId}
echo "❌ Copying ${publishDirectory} to ${path.join(buildAppDirectory, publishDirectory)} failed" >> ${logPath};
exit 1;
}
docker rm ${buildContainerId}
${getStaticCommand(application, logPath)}
`;
}
return bashCommand;
};

View File

@@ -0,0 +1,72 @@
import type { WriteStream } from "node:fs";
import type { ApplicationNested } from ".";
import { prepareEnvironmentVariables } from "../docker/utils";
import { getBuildAppDirectory } from "../filesystem/directory";
import { spawnAsync } from "../process/spawnAsync";
export const buildPaketo = async (
application: ApplicationNested,
writeStream: WriteStream,
) => {
const { env, appName } = application;
const buildAppDirectory = getBuildAppDirectory(application);
const envVariables = prepareEnvironmentVariables(env);
try {
const args = [
"build",
appName,
"--path",
buildAppDirectory,
"--builder",
"paketobuildpacks/builder-jammy-full",
];
for (const env of envVariables) {
args.push("--env", env);
}
await spawnAsync("pack", args, (data) => {
if (writeStream.writable) {
writeStream.write(data);
}
});
return true;
} catch (e) {
throw e;
}
};
export const getPaketoCommand = (
application: ApplicationNested,
logPath: string,
) => {
const { env, appName } = application;
const buildAppDirectory = getBuildAppDirectory(application);
const envVariables = prepareEnvironmentVariables(env);
const args = [
"build",
appName,
"--path",
buildAppDirectory,
"--builder",
"paketobuildpacks/builder-jammy-full",
];
for (const env of envVariables) {
args.push("--env", env);
}
const command = `pack ${args.join(" ")}`;
const bashCommand = `
echo "Starting Paketo build..." >> ${logPath};
${command} >> ${logPath} 2>> ${logPath} || {
echo "❌ Paketo build failed" >> ${logPath};
exit 1;
}
echo "✅ Paketo build completed." >> ${logPath};
`;
return bashCommand;
};

View File

@@ -0,0 +1,69 @@
import type { WriteStream } from "node:fs";
import {
buildCustomDocker,
getDockerCommand,
} from "@/server/utils/builders/docker-file";
import type { ApplicationNested } from ".";
import { createFile, getCreateFileCommand } from "../docker/utils";
import { getBuildAppDirectory } from "../filesystem/directory";
export const buildStatic = async (
application: ApplicationNested,
writeStream: WriteStream,
) => {
const { publishDirectory } = application;
const buildAppDirectory = getBuildAppDirectory(application);
try {
createFile(
buildAppDirectory,
"Dockerfile",
[
"FROM nginx:alpine",
"WORKDIR /usr/share/nginx/html/",
`COPY ${publishDirectory || "."} .`,
].join("\n"),
);
await buildCustomDocker(
{
...application,
buildType: "dockerfile",
dockerfile: "Dockerfile",
},
writeStream,
);
return true;
} catch (e) {
throw e;
}
};
export const getStaticCommand = (
application: ApplicationNested,
logPath: string,
) => {
const { publishDirectory } = application;
const buildAppDirectory = getBuildAppDirectory(application);
let command = getCreateFileCommand(
buildAppDirectory,
"Dockerfile",
[
"FROM nginx:alpine",
"WORKDIR /usr/share/nginx/html/",
`COPY ${publishDirectory || "."} .`,
].join("\n"),
);
command += getDockerCommand(
{
...application,
buildType: "dockerfile",
dockerfile: "Dockerfile",
},
logPath,
);
return command;
};

View File

@@ -0,0 +1,21 @@
import { existsSync, mkdirSync, writeFileSync } from "node:fs";
import { dirname, join } from "node:path";
import { prepareEnvironmentVariables } from "../docker/utils";
export const createEnvFile = (directory: string, env: string | null) => {
const envFilePath = join(dirname(directory), ".env");
if (!existsSync(dirname(envFilePath))) {
mkdirSync(dirname(envFilePath), { recursive: true });
}
const envFileContent = prepareEnvironmentVariables(env).join("\n");
writeFileSync(envFilePath, envFileContent);
};
export const createEnvFileCommand = (directory: string, env: string | null) => {
const envFilePath = join(dirname(directory), ".env");
if (!existsSync(dirname(envFilePath))) {
mkdirSync(dirname(envFilePath), { recursive: true });
}
const envFileContent = prepareEnvironmentVariables(env).join("\n");
return `echo "${envFileContent}" > ${envFilePath}`;
};

View File

@@ -0,0 +1,65 @@
import type { WriteStream } from "node:fs";
import type { ApplicationNested } from "../builders";
import { spawnAsync } from "../process/spawnAsync";
export const uploadImage = async (
application: ApplicationNested,
writeStream: WriteStream,
) => {
const registry = application.registry;
if (!registry) {
throw new Error("Registry not found");
}
const { registryUrl, imagePrefix, registryType } = registry;
const { appName } = application;
const imageName = `${appName}:latest`;
const finalURL =
registryType === "selfHosted"
? process.env.NODE_ENV === "development"
? "localhost:5000"
: registryUrl
: registryUrl;
const registryTag = imagePrefix
? `${finalURL}/${imagePrefix}/${imageName}`
: `${finalURL}/${imageName}`;
try {
console.log(finalURL, registryTag);
writeStream.write(
`📦 [Enabled Registry] Uploading image to ${registry.registryType} | ${registryTag} | ${finalURL}\n`,
);
await spawnAsync(
"docker",
["login", finalURL, "-u", registry.username, "-p", registry.password],
(data) => {
if (writeStream.writable) {
writeStream.write(data);
}
},
);
await spawnAsync("docker", ["tag", imageName, registryTag], (data) => {
if (writeStream.writable) {
writeStream.write(data);
}
});
await spawnAsync("docker", ["push", registryTag], (data) => {
if (writeStream.writable) {
writeStream.write(data);
}
});
} catch (error) {
console.log(error);
throw error;
}
};
// docker:
// endpoint: "unix:///var/run/docker.sock"
// exposedByDefault: false
// swarmMode: true

View File

@@ -0,0 +1,98 @@
import type { InferResultType } from "@/server/types/with";
import type { CreateServiceOptions } from "dockerode";
import {
calculateResources,
generateBindMounts,
generateFileMounts,
generateVolumeMounts,
prepareEnvironmentVariables,
} from "../docker/utils";
import { getRemoteDocker } from "../servers/remote-docker";
export type MariadbNested = InferResultType<"mariadb", { mounts: true }>;
export const buildMariadb = async (mariadb: MariadbNested) => {
const {
appName,
env,
externalPort,
dockerImage,
memoryLimit,
memoryReservation,
databaseName,
databaseUser,
databasePassword,
databaseRootPassword,
cpuLimit,
cpuReservation,
command,
mounts,
} = mariadb;
const defaultMariadbEnv = `MARIADB_DATABASE=${databaseName}\nMARIADB_USER=${databaseUser}\nMARIADB_PASSWORD=${databasePassword}\nMARIADB_ROOT_PASSWORD=${databaseRootPassword}${
env ? `\n${env}` : ""
}`;
const resources = calculateResources({
memoryLimit,
memoryReservation,
cpuLimit,
cpuReservation,
});
const envVariables = prepareEnvironmentVariables(defaultMariadbEnv);
const volumesMount = generateVolumeMounts(mounts);
const bindsMount = generateBindMounts(mounts);
const filesMount = generateFileMounts(appName, mariadb);
const docker = await getRemoteDocker(mariadb.serverId);
const settings: CreateServiceOptions = {
Name: appName,
TaskTemplate: {
ContainerSpec: {
Image: dockerImage,
Env: envVariables,
Mounts: [...volumesMount, ...bindsMount, ...filesMount],
...(command
? {
Command: ["/bin/sh"],
Args: ["-c", command],
}
: {}),
},
Networks: [{ Target: "dokploy-network" }],
Resources: {
...resources,
},
Placement: {
Constraints: ["node.role==manager"],
},
},
Mode: {
Replicated: {
Replicas: 1,
},
},
EndpointSpec: {
Mode: "dnsrr",
Ports: externalPort
? [
{
Protocol: "tcp",
TargetPort: 3306,
PublishedPort: externalPort,
PublishMode: "host",
},
]
: [],
},
};
try {
const service = docker.getService(appName);
const inspect = await service.inspect();
await service.update({
version: Number.parseInt(inspect.Version.Index),
...settings,
});
} catch (error) {
await docker.createService(settings);
}
};

View File

@@ -0,0 +1,97 @@
import type { InferResultType } from "@/server/types/with";
import type { CreateServiceOptions } from "dockerode";
import {
calculateResources,
generateBindMounts,
generateFileMounts,
generateVolumeMounts,
prepareEnvironmentVariables,
} from "../docker/utils";
import { getRemoteDocker } from "../servers/remote-docker";
export type MongoNested = InferResultType<"mongo", { mounts: true }>;
export const buildMongo = async (mongo: MongoNested) => {
const {
appName,
env,
externalPort,
dockerImage,
memoryLimit,
memoryReservation,
cpuLimit,
cpuReservation,
databaseUser,
databasePassword,
command,
mounts,
} = mongo;
const defaultMongoEnv = `MONGO_INITDB_ROOT_USERNAME=${databaseUser}\nMONGO_INITDB_ROOT_PASSWORD=${databasePassword}${
env ? `\n${env}` : ""
}`;
const resources = calculateResources({
memoryLimit,
memoryReservation,
cpuLimit,
cpuReservation,
});
const envVariables = prepareEnvironmentVariables(defaultMongoEnv);
const volumesMount = generateVolumeMounts(mounts);
const bindsMount = generateBindMounts(mounts);
const filesMount = generateFileMounts(appName, mongo);
const docker = await getRemoteDocker(mongo.serverId);
const settings: CreateServiceOptions = {
Name: appName,
TaskTemplate: {
ContainerSpec: {
Image: dockerImage,
Env: envVariables,
Mounts: [...volumesMount, ...bindsMount, ...filesMount],
...(command
? {
Command: ["/bin/sh"],
Args: ["-c", command],
}
: {}),
},
Networks: [{ Target: "dokploy-network" }],
Resources: {
...resources,
},
Placement: {
Constraints: ["node.role==manager"],
},
},
Mode: {
Replicated: {
Replicas: 1,
},
},
EndpointSpec: {
Mode: "dnsrr",
Ports: externalPort
? [
{
Protocol: "tcp",
TargetPort: 27017,
PublishedPort: externalPort,
PublishMode: "host",
},
]
: [],
},
};
try {
const service = docker.getService(appName);
const inspect = await service.inspect();
await service.update({
version: Number.parseInt(inspect.Version.Index),
...settings,
});
} catch (error) {
await docker.createService(settings);
}
};

View File

@@ -0,0 +1,104 @@
import type { InferResultType } from "@/server/types/with";
import type { CreateServiceOptions } from "dockerode";
import {
calculateResources,
generateBindMounts,
generateFileMounts,
generateVolumeMounts,
prepareEnvironmentVariables,
} from "../docker/utils";
import { getRemoteDocker } from "../servers/remote-docker";
export type MysqlNested = InferResultType<"mysql", { mounts: true }>;
export const buildMysql = async (mysql: MysqlNested) => {
const {
appName,
env,
externalPort,
dockerImage,
memoryLimit,
memoryReservation,
databaseName,
databaseUser,
databasePassword,
databaseRootPassword,
cpuLimit,
cpuReservation,
command,
mounts,
} = mysql;
const defaultMysqlEnv =
databaseUser !== "root"
? `MYSQL_USER=${databaseUser}\nMYSQL_DATABASE=${databaseName}\nMYSQL_PASSWORD=${databasePassword}\nMYSQL_ROOT_PASSWORD=${databaseRootPassword}${
env ? `\n${env}` : ""
}`
: `MYSQL_DATABASE=${databaseName}\nMYSQL_ROOT_PASSWORD=${databaseRootPassword}${
env ? `\n${env}` : ""
}`;
const resources = calculateResources({
memoryLimit,
memoryReservation,
cpuLimit,
cpuReservation,
});
const envVariables = prepareEnvironmentVariables(defaultMysqlEnv);
const volumesMount = generateVolumeMounts(mounts);
const bindsMount = generateBindMounts(mounts);
const filesMount = generateFileMounts(appName, mysql);
const docker = await getRemoteDocker(mysql.serverId);
const settings: CreateServiceOptions = {
Name: appName,
TaskTemplate: {
ContainerSpec: {
Image: dockerImage,
Env: envVariables,
Mounts: [...volumesMount, ...bindsMount, ...filesMount],
...(command
? {
Command: ["/bin/sh"],
Args: ["-c", command],
}
: {}),
},
Networks: [{ Target: "dokploy-network" }],
Resources: {
...resources,
},
Placement: {
Constraints: ["node.role==manager"],
},
},
Mode: {
Replicated: {
Replicas: 1,
},
},
EndpointSpec: {
Mode: "dnsrr",
Ports: externalPort
? [
{
Protocol: "tcp",
TargetPort: 3306,
PublishedPort: externalPort,
PublishMode: "host",
},
]
: [],
},
};
try {
const service = docker.getService(appName);
const inspect = await service.inspect();
await service.update({
version: Number.parseInt(inspect.Version.Index),
...settings,
});
} catch (error) {
await docker.createService(settings);
}
};

View File

@@ -0,0 +1,98 @@
import type { InferResultType } from "@/server/types/with";
import type { CreateServiceOptions } from "dockerode";
import {
calculateResources,
generateBindMounts,
generateFileMounts,
generateVolumeMounts,
prepareEnvironmentVariables,
} from "../docker/utils";
import { getRemoteDocker } from "../servers/remote-docker";
export type PostgresNested = InferResultType<"postgres", { mounts: true }>;
export const buildPostgres = async (postgres: PostgresNested) => {
const {
appName,
env,
externalPort,
dockerImage,
memoryLimit,
memoryReservation,
cpuLimit,
cpuReservation,
databaseName,
databaseUser,
databasePassword,
command,
mounts,
} = postgres;
const defaultPostgresEnv = `POSTGRES_DB=${databaseName}\nPOSTGRES_USER=${databaseUser}\nPOSTGRES_PASSWORD=${databasePassword}${
env ? `\n${env}` : ""
}`;
const resources = calculateResources({
memoryLimit,
memoryReservation,
cpuLimit,
cpuReservation,
});
const envVariables = prepareEnvironmentVariables(defaultPostgresEnv);
const volumesMount = generateVolumeMounts(mounts);
const bindsMount = generateBindMounts(mounts);
const filesMount = generateFileMounts(appName, postgres);
const docker = await getRemoteDocker(postgres.serverId);
const settings: CreateServiceOptions = {
Name: appName,
TaskTemplate: {
ContainerSpec: {
Image: dockerImage,
Env: envVariables,
Mounts: [...volumesMount, ...bindsMount, ...filesMount],
...(command
? {
Command: ["/bin/sh"],
Args: ["-c", command],
}
: {}),
},
Networks: [{ Target: "dokploy-network" }],
Resources: {
...resources,
},
Placement: {
Constraints: ["node.role==manager"],
},
},
Mode: {
Replicated: {
Replicas: 1,
},
},
EndpointSpec: {
Mode: "dnsrr",
Ports: externalPort
? [
{
Protocol: "tcp",
TargetPort: 5432,
PublishedPort: externalPort,
PublishMode: "host",
},
]
: [],
},
};
try {
const service = docker.getService(appName);
const inspect = await service.inspect();
await service.update({
version: Number.parseInt(inspect.Version.Index),
...settings,
});
} catch (error) {
console.log("error", error);
await docker.createService(settings);
}
};

View File

@@ -0,0 +1,95 @@
import type { InferResultType } from "@/server/types/with";
import type { CreateServiceOptions } from "dockerode";
import {
calculateResources,
generateBindMounts,
generateFileMounts,
generateVolumeMounts,
prepareEnvironmentVariables,
} from "../docker/utils";
import { getRemoteDocker } from "../servers/remote-docker";
export type RedisNested = InferResultType<"redis", { mounts: true }>;
export const buildRedis = async (redis: RedisNested) => {
const {
appName,
env,
externalPort,
dockerImage,
memoryLimit,
memoryReservation,
databasePassword,
cpuLimit,
cpuReservation,
command,
mounts,
} = redis;
const defaultRedisEnv = `REDIS_PASSWORD=${databasePassword}${
env ? `\n${env}` : ""
}`;
const resources = calculateResources({
memoryLimit,
memoryReservation,
cpuLimit,
cpuReservation,
});
const envVariables = prepareEnvironmentVariables(defaultRedisEnv);
const volumesMount = generateVolumeMounts(mounts);
const bindsMount = generateBindMounts(mounts);
const filesMount = generateFileMounts(appName, redis);
const docker = await getRemoteDocker(redis.serverId);
const settings: CreateServiceOptions = {
Name: appName,
TaskTemplate: {
ContainerSpec: {
Image: dockerImage,
Env: envVariables,
Mounts: [...volumesMount, ...bindsMount, ...filesMount],
Command: ["/bin/sh"],
Args: [
"-c",
command ? command : `redis-server --requirepass ${databasePassword}`,
],
},
Networks: [{ Target: "dokploy-network" }],
Resources: {
...resources,
},
Placement: {
Constraints: ["node.role==manager"],
},
},
Mode: {
Replicated: {
Replicas: 1,
},
},
EndpointSpec: {
Mode: "dnsrr",
Ports: externalPort
? [
{
Protocol: "tcp",
TargetPort: 6379,
PublishedPort: externalPort,
PublishMode: "host",
},
]
: [],
},
};
try {
const service = docker.getService(appName);
const inspect = await service.inspect();
await service.update({
version: Number.parseInt(inspect.Version.Index),
...settings,
});
} catch (error) {
await docker.createService(settings);
}
};

View File

@@ -0,0 +1,56 @@
import crypto from "node:crypto";
import { findComposeById } from "@/server/services/compose";
import { dump, load } from "js-yaml";
import { addSuffixToAllConfigs } from "./compose/configs";
import { addSuffixToAllNetworks } from "./compose/network";
import { addSuffixToAllSecrets } from "./compose/secrets";
import { addSuffixToAllServiceNames } from "./compose/service";
import { addSuffixToAllVolumes } from "./compose/volume";
import type { ComposeSpecification } from "./types";
export const generateRandomHash = (): string => {
return crypto.randomBytes(4).toString("hex");
};
export const randomizeComposeFile = async (
composeId: string,
suffix?: string,
) => {
const compose = await findComposeById(composeId);
const composeFile = compose.composeFile;
const composeData = load(composeFile) as ComposeSpecification;
const randomSuffix = suffix || generateRandomHash();
const newComposeFile = addSuffixToAllProperties(composeData, randomSuffix);
return dump(newComposeFile);
};
export const randomizeSpecificationFile = (
composeSpec: ComposeSpecification,
suffix?: string,
) => {
if (!suffix) {
return composeSpec;
}
const newComposeFile = addSuffixToAllProperties(composeSpec, suffix);
return newComposeFile;
};
export const addSuffixToAllProperties = (
composeData: ComposeSpecification,
suffix: string,
): ComposeSpecification => {
let updatedComposeData = { ...composeData };
updatedComposeData = addSuffixToAllServiceNames(updatedComposeData, suffix);
updatedComposeData = addSuffixToAllVolumes(updatedComposeData, suffix);
updatedComposeData = addSuffixToAllNetworks(updatedComposeData, suffix);
updatedComposeData = addSuffixToAllConfigs(updatedComposeData, suffix);
updatedComposeData = addSuffixToAllSecrets(updatedComposeData, suffix);
return updatedComposeData;
};

View File

@@ -0,0 +1,73 @@
import _ from "lodash";
import type {
ComposeSpecification,
DefinitionsConfig,
DefinitionsService,
} from "../types";
export const addSuffixToConfigsRoot = (
configs: { [key: string]: DefinitionsConfig },
suffix: string,
): { [key: string]: DefinitionsConfig } => {
const newConfigs: { [key: string]: DefinitionsConfig } = {};
_.forEach(configs, (config, configName) => {
const newConfigName = `${configName}-${suffix}`;
newConfigs[newConfigName] = _.cloneDeep(config);
});
return newConfigs;
};
export const addSuffixToConfigsInServices = (
services: { [key: string]: DefinitionsService },
suffix: string,
): { [key: string]: DefinitionsService } => {
const newServices: { [key: string]: DefinitionsService } = {};
_.forEach(services, (serviceConfig, serviceName) => {
const newServiceConfig = _.cloneDeep(serviceConfig);
// Reemplazar nombres de configs en configs
if (_.has(newServiceConfig, "configs")) {
newServiceConfig.configs = _.map(newServiceConfig.configs, (config) => {
if (_.isString(config)) {
return `${config}-${suffix}`;
}
if (_.isObject(config) && config.source) {
return {
...config,
source: `${config.source}-${suffix}`,
};
}
return config;
});
}
newServices[serviceName] = newServiceConfig;
});
return newServices;
};
export const addSuffixToAllConfigs = (
composeData: ComposeSpecification,
suffix: string,
): ComposeSpecification => {
const updatedComposeData = { ...composeData };
if (composeData?.configs) {
updatedComposeData.configs = addSuffixToConfigsRoot(
composeData.configs,
suffix,
);
}
if (composeData?.services) {
updatedComposeData.services = addSuffixToConfigsInServices(
composeData.services,
suffix,
);
}
return updatedComposeData;
};

View File

@@ -0,0 +1,83 @@
import _ from "lodash";
import type {
ComposeSpecification,
DefinitionsNetwork,
DefinitionsService,
} from "../types";
export const addSuffixToNetworksRoot = (
networks: { [key: string]: DefinitionsNetwork },
suffix: string,
): { [key: string]: DefinitionsNetwork } => {
return _.mapKeys(networks, (_value, key) => {
if (key === "dokploy-network") {
return "dokploy-network";
}
return `${key}-${suffix}`;
});
};
export const addSuffixToServiceNetworks = (
services: { [key: string]: DefinitionsService },
suffix: string,
): { [key: string]: DefinitionsService } => {
return _.mapValues(services, (service) => {
if (service.networks) {
// 1 Case the most common
if (Array.isArray(service.networks)) {
service.networks = service.networks.map((network: string) => {
if (network === "dokploy-network") {
return "dokploy-network";
}
return `${network}-${suffix}`;
});
} else {
// 2 Case
service.networks = _.mapKeys(service.networks, (_value, key) => {
if (key === "dokploy-network") {
return "dokploy-network";
}
return `${key}-${suffix}`;
});
// 3 Case
service.networks = _.mapValues(service.networks, (value) => {
if (value && typeof value === "object") {
return _.mapKeys(value, (_val, innerKey) => {
if (innerKey === "aliases") {
return "aliases";
}
return `${innerKey}-${suffix}`;
});
}
return value;
});
}
}
return service;
});
};
export const addSuffixToAllNetworks = (
composeData: ComposeSpecification,
suffix: string,
): ComposeSpecification => {
const updatedComposeData = { ...composeData };
if (updatedComposeData.networks) {
updatedComposeData.networks = addSuffixToNetworksRoot(
updatedComposeData.networks,
suffix,
);
}
if (updatedComposeData.services) {
updatedComposeData.services = addSuffixToServiceNetworks(
updatedComposeData.services,
suffix,
);
}
return updatedComposeData;
};

View File

@@ -0,0 +1,68 @@
import _ from "lodash";
import type { ComposeSpecification, DefinitionsService } from "../types";
export const addSuffixToSecretsRoot = (
secrets: ComposeSpecification["secrets"],
suffix: string,
): ComposeSpecification["secrets"] => {
const newSecrets: ComposeSpecification["secrets"] = {};
_.forEach(secrets, (secretConfig, secretName) => {
const newSecretName = `${secretName}-${suffix}`;
newSecrets[newSecretName] = _.cloneDeep(secretConfig);
});
return newSecrets;
};
export const addSuffixToSecretsInServices = (
services: { [key: string]: DefinitionsService },
suffix: string,
): { [key: string]: DefinitionsService } => {
const newServices: { [key: string]: DefinitionsService } = {};
_.forEach(services, (serviceConfig, serviceName) => {
const newServiceConfig = _.cloneDeep(serviceConfig);
// Replace secret names in secrets
if (_.has(newServiceConfig, "secrets")) {
newServiceConfig.secrets = _.map(newServiceConfig.secrets, (secret) => {
if (_.isString(secret)) {
return `${secret}-${suffix}`;
}
if (_.isObject(secret) && secret.source) {
return {
...secret,
source: `${secret.source}-${suffix}`,
};
}
return secret;
});
}
newServices[serviceName] = newServiceConfig;
});
return newServices;
};
export const addSuffixToAllSecrets = (
composeData: ComposeSpecification,
suffix: string,
): ComposeSpecification => {
const updatedComposeData = { ...composeData };
if (composeData?.secrets) {
updatedComposeData.secrets = addSuffixToSecretsRoot(
composeData.secrets,
suffix,
);
}
if (composeData?.services) {
updatedComposeData.services = addSuffixToSecretsInServices(
composeData.services,
suffix,
);
}
return updatedComposeData;
};

View File

@@ -0,0 +1,90 @@
// En la sección depends_on de otros servicios: Para definir dependencias entre servicios.
// En la sección networks de otros servicios: Aunque esto no es común, es posible referenciar servicios en redes personalizadas.
// En la sección volumes_from de otros servicios: Para reutilizar volúmenes definidos por otro servicio.
// En la sección links de otros servicios: Para crear enlaces entre servicios.
// En la sección extends de otros servicios: Para extender la configuración de otro servicio.
import _ from "lodash";
import type { ComposeSpecification, DefinitionsService } from "../types";
type DependsOnObject = NonNullable<
Exclude<DefinitionsService["depends_on"], string[]> extends infer T
? { [K in keyof T]: T[K] }
: never
>;
export const addSuffixToServiceNames = (
services: { [key: string]: DefinitionsService },
suffix: string,
): { [key: string]: DefinitionsService } => {
const newServices: { [key: string]: DefinitionsService } = {};
for (const [serviceName, serviceConfig] of Object.entries(services)) {
const newServiceName = `${serviceName}-${suffix}`;
const newServiceConfig = _.cloneDeep(serviceConfig);
// Reemplazar nombres de servicios en depends_on
if (newServiceConfig.depends_on) {
if (Array.isArray(newServiceConfig.depends_on)) {
newServiceConfig.depends_on = newServiceConfig.depends_on.map(
(dep) => `${dep}-${suffix}`,
);
} else {
const newDependsOn: DependsOnObject = {};
for (const [depName, depConfig] of Object.entries(
newServiceConfig.depends_on,
)) {
newDependsOn[`${depName}-${suffix}`] = depConfig;
}
newServiceConfig.depends_on = newDependsOn;
}
}
// Reemplazar nombre en container_name
if (newServiceConfig.container_name) {
newServiceConfig.container_name = `${newServiceConfig.container_name}-${suffix}`;
}
// Reemplazar nombres de servicios en links
if (newServiceConfig.links) {
newServiceConfig.links = newServiceConfig.links.map(
(link) => `${link}-${suffix}`,
);
}
// Reemplazar nombres de servicios en extends
if (newServiceConfig.extends) {
if (typeof newServiceConfig.extends === "string") {
newServiceConfig.extends = `${newServiceConfig.extends}-${suffix}`;
} else {
newServiceConfig.extends.service = `${newServiceConfig.extends.service}-${suffix}`;
}
}
// Reemplazar nombres de servicios en volumes_from
if (newServiceConfig.volumes_from) {
newServiceConfig.volumes_from = newServiceConfig.volumes_from.map(
(vol) => `${vol}-${suffix}`,
);
}
newServices[newServiceName] = newServiceConfig;
}
return newServices;
};
export const addSuffixToAllServiceNames = (
composeData: ComposeSpecification,
suffix: string,
): ComposeSpecification => {
const updatedComposeData = { ...composeData };
if (updatedComposeData.services) {
updatedComposeData.services = addSuffixToServiceNames(
updatedComposeData.services,
suffix,
);
}
return updatedComposeData;
};

View File

@@ -0,0 +1,78 @@
import _ from "lodash";
import type {
ComposeSpecification,
DefinitionsService,
DefinitionsVolume,
} from "../types";
// Función para agregar prefijo a volúmenes
export const addSuffixToVolumesRoot = (
volumes: { [key: string]: DefinitionsVolume },
suffix: string,
): { [key: string]: DefinitionsVolume } => {
return _.mapKeys(volumes, (_value, key) => `${key}-${suffix}`);
};
export const addSuffixToVolumesInServices = (
services: { [key: string]: DefinitionsService },
suffix: string,
): { [key: string]: DefinitionsService } => {
const newServices: { [key: string]: DefinitionsService } = {};
_.forEach(services, (serviceConfig, serviceName) => {
const newServiceConfig = _.cloneDeep(serviceConfig);
// Reemplazar nombres de volúmenes en volumes
if (_.has(newServiceConfig, "volumes")) {
newServiceConfig.volumes = _.map(newServiceConfig.volumes, (volume) => {
if (_.isString(volume)) {
const [volumeName, path] = volume.split(":");
// skip bind mounts and variables (e.g. $PWD)
if (
volumeName?.startsWith(".") ||
volumeName?.startsWith("/") ||
volumeName?.startsWith("$")
) {
return volume;
}
return `${volumeName}-${suffix}:${path}`;
}
if (_.isObject(volume) && volume.type === "volume" && volume.source) {
return {
...volume,
source: `${volume.source}-${suffix}`,
};
}
return volume;
});
}
newServices[serviceName] = newServiceConfig;
});
return newServices;
};
export const addSuffixToAllVolumes = (
composeData: ComposeSpecification,
suffix: string,
): ComposeSpecification => {
const updatedComposeData = { ...composeData };
if (updatedComposeData.volumes) {
updatedComposeData.volumes = addSuffixToVolumesRoot(
updatedComposeData.volumes,
suffix,
);
}
if (updatedComposeData.services) {
updatedComposeData.services = addSuffixToVolumesInServices(
updatedComposeData.services,
suffix,
);
}
return updatedComposeData;
};

View File

@@ -0,0 +1,327 @@
import fs, { existsSync, readFileSync } from "node:fs";
import { writeFile } from "node:fs/promises";
import { join } from "node:path";
import { paths } from "@/server/constants";
import type { Compose } from "@/server/services/compose";
import type { Domain } from "@/server/services/domain";
import { dump, load } from "js-yaml";
import { execAsyncRemote } from "../process/execAsync";
import {
cloneRawBitbucketRepository,
cloneRawBitbucketRepositoryRemote,
} from "../providers/bitbucket";
import {
cloneGitRawRepository,
cloneRawGitRepositoryRemote,
} from "../providers/git";
import {
cloneRawGithubRepository,
cloneRawGithubRepositoryRemote,
} from "../providers/github";
import {
cloneRawGitlabRepository,
cloneRawGitlabRepositoryRemote,
} from "../providers/gitlab";
import {
createComposeFileRaw,
createComposeFileRawRemote,
} from "../providers/raw";
import { randomizeSpecificationFile } from "./compose";
import type {
ComposeSpecification,
DefinitionsService,
PropertiesNetworks,
} from "./types";
import { encodeBase64 } from "./utils";
export const cloneCompose = async (compose: Compose) => {
if (compose.sourceType === "github") {
await cloneRawGithubRepository(compose);
} else if (compose.sourceType === "gitlab") {
await cloneRawGitlabRepository(compose);
} else if (compose.sourceType === "bitbucket") {
await cloneRawBitbucketRepository(compose);
} else if (compose.sourceType === "git") {
await cloneGitRawRepository(compose);
} else if (compose.sourceType === "raw") {
await createComposeFileRaw(compose);
}
};
export const cloneComposeRemote = async (compose: Compose) => {
if (compose.sourceType === "github") {
await cloneRawGithubRepositoryRemote(compose);
} else if (compose.sourceType === "gitlab") {
await cloneRawGitlabRepositoryRemote(compose);
} else if (compose.sourceType === "bitbucket") {
await cloneRawBitbucketRepositoryRemote(compose);
} else if (compose.sourceType === "git") {
await cloneRawGitRepositoryRemote(compose);
} else if (compose.sourceType === "raw") {
await createComposeFileRawRemote(compose);
}
};
export const getComposePath = (compose: Compose) => {
const { COMPOSE_PATH } = paths(!!compose.serverId);
const { appName, sourceType, composePath } = compose;
let path = "";
if (sourceType === "raw") {
path = "docker-compose.yml";
} else {
path = composePath;
}
return join(COMPOSE_PATH, appName, "code", path);
};
export const loadDockerCompose = async (
compose: Compose,
): Promise<ComposeSpecification | null> => {
const path = getComposePath(compose);
if (existsSync(path)) {
const yamlStr = readFileSync(path, "utf8");
const parsedConfig = load(yamlStr) as ComposeSpecification;
return parsedConfig;
}
return null;
};
export const loadDockerComposeRemote = async (
compose: Compose,
): Promise<ComposeSpecification | null> => {
const path = getComposePath(compose);
try {
if (!compose.serverId) {
return null;
}
const { stdout, stderr } = await execAsyncRemote(
compose.serverId,
`cat ${path}`,
);
if (stderr) {
return null;
}
if (!stdout) return null;
const parsedConfig = load(stdout) as ComposeSpecification;
return parsedConfig;
} catch (err) {
return null;
}
};
export const readComposeFile = async (compose: Compose) => {
const path = getComposePath(compose);
if (existsSync(path)) {
const yamlStr = readFileSync(path, "utf8");
return yamlStr;
}
return null;
};
export const writeDomainsToCompose = async (
compose: Compose,
domains: Domain[],
) => {
if (!domains.length) {
return;
}
const composeConverted = await addDomainToCompose(compose, domains);
const path = getComposePath(compose);
const composeString = dump(composeConverted, { lineWidth: 1000 });
try {
await writeFile(path, composeString, "utf8");
} catch (error) {
throw error;
}
};
export const writeDomainsToComposeRemote = async (
compose: Compose,
domains: Domain[],
logPath: string,
) => {
if (!domains.length) {
return "";
}
try {
const composeConverted = await addDomainToCompose(compose, domains);
const path = getComposePath(compose);
if (!composeConverted) {
return `
echo "❌ Error: Compose file not found" >> ${logPath};
exit 1;
`;
}
if (compose.serverId) {
const composeString = dump(composeConverted, { lineWidth: 1000 });
const encodedContent = encodeBase64(composeString);
return `echo "${encodedContent}" | base64 -d > "${path}";`;
}
} catch (error) {
// @ts-ignore
return `echo "❌ Has occured an error: ${error?.message || error}" >> ${logPath};
exit 1;
`;
}
};
// (node:59875) MaxListenersExceededWarning: Possible EventEmitter memory leak detected. 11 SIGTERM listeners added to [process]. Use emitter.setMaxListeners() to increase limit
export const addDomainToCompose = async (
compose: Compose,
domains: Domain[],
) => {
const { appName } = compose;
let result: ComposeSpecification | null;
if (compose.serverId) {
result = await loadDockerComposeRemote(compose); // aca hay que ir al servidor e ir a traer el compose file al servidor
} else {
result = await loadDockerCompose(compose);
}
if (!result || domains.length === 0) {
return null;
}
if (compose.randomize) {
const randomized = randomizeSpecificationFile(result, compose.suffix);
result = randomized;
}
for (const domain of domains) {
const { serviceName, https } = domain;
if (!serviceName) {
throw new Error("Service name not found");
}
if (!result?.services?.[serviceName]) {
throw new Error(`The service ${serviceName} not found in the compose`);
}
if (!result.services[serviceName].labels) {
result.services[serviceName].labels = [];
}
const httpLabels = await createDomainLabels(appName, domain, "web");
if (https) {
const httpsLabels = await createDomainLabels(
appName,
domain,
"websecure",
);
httpLabels.push(...httpsLabels);
}
const labels = result.services[serviceName].labels;
if (Array.isArray(labels)) {
if (!labels.includes("traefik.enable=true")) {
labels.push("traefik.enable=true");
}
labels.push(...httpLabels);
}
// Add the dokploy-network to the service
result.services[serviceName].networks = addDokployNetworkToService(
result.services[serviceName].networks,
);
}
// Add dokploy-network to the root of the compose file
result.networks = addDokployNetworkToRoot(result.networks);
return result;
};
export const writeComposeFile = async (
compose: Compose,
composeSpec: ComposeSpecification,
) => {
const path = getComposePath(compose);
try {
const composeFile = dump(composeSpec, {
lineWidth: 1000,
});
fs.writeFileSync(path, composeFile, "utf8");
} catch (e) {
console.error("Error saving the YAML config file:", e);
}
};
export const createDomainLabels = async (
appName: string,
domain: Domain,
entrypoint: "web" | "websecure",
) => {
const { host, port, https, uniqueConfigKey, certificateType } = domain;
const routerName = `${appName}-${uniqueConfigKey}-${entrypoint}`;
const labels = [
`traefik.http.routers.${routerName}.rule=Host(\`${host}\`)`,
`traefik.http.routers.${routerName}.entrypoints=${entrypoint}`,
`traefik.http.services.${routerName}.loadbalancer.server.port=${port}`,
`traefik.http.routers.${routerName}.service=${routerName}`,
];
if (entrypoint === "web" && https) {
labels.push(
`traefik.http.routers.${routerName}.middlewares=redirect-to-https@file`,
);
}
if (entrypoint === "websecure") {
if (certificateType === "letsencrypt") {
labels.push(
`traefik.http.routers.${routerName}.tls.certresolver=letsencrypt`,
);
}
}
return labels;
};
export const addDokployNetworkToService = (
networkService: DefinitionsService["networks"],
) => {
let networks = networkService;
const network = "dokploy-network";
if (!networks) {
networks = [];
}
if (Array.isArray(networks)) {
if (!networks.includes(network)) {
networks.push(network);
}
} else if (networks && typeof networks === "object") {
if (!(network in networks)) {
networks[network] = {};
}
}
return networks;
};
export const addDokployNetworkToRoot = (
networkRoot: PropertiesNetworks | undefined,
) => {
let networks = networkRoot;
const network = "dokploy-network";
if (!networks) {
networks = {};
}
if (networks[network] || !networks[network]) {
networks[network] = {
external: true,
};
}
return networks;
};

View File

@@ -0,0 +1,879 @@
export type DefinitionsInclude =
| string
| {
path?: StringOrList;
env_file?: StringOrList;
project_directory?: string;
};
export type StringOrList = string | ListOfStrings;
export type ListOfStrings = string[];
export type DefinitionsDevelopment = {
watch?: {
ignore?: string[];
path: string;
action: "rebuild" | "sync" | "sync+restart";
target?: string;
[k: string]: unknown;
}[];
[k: string]: unknown;
} & Development;
export type Development = {
watch?: {
ignore?: string[];
path: string;
action: "rebuild" | "sync" | "sync+restart";
target?: string;
[k: string]: unknown;
}[];
[k: string]: unknown;
} | null;
export type DefinitionsDeployment = {
mode?: string;
endpoint_mode?: string;
replicas?: number;
labels?: ListOrDict;
rollback_config?: {
parallelism?: number;
delay?: string;
failure_action?: string;
monitor?: string;
max_failure_ratio?: number;
order?: "start-first" | "stop-first";
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
update_config?: {
parallelism?: number;
delay?: string;
failure_action?: string;
monitor?: string;
max_failure_ratio?: number;
order?: "start-first" | "stop-first";
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
resources?: {
limits?: {
cpus?: number | string;
memory?: string;
pids?: number;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
reservations?: {
cpus?: number | string;
memory?: string;
generic_resources?: DefinitionsGenericResources;
devices?: DefinitionsDevices;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
restart_policy?: {
condition?: string;
delay?: string;
max_attempts?: number;
window?: string;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
placement?: {
constraints?: string[];
preferences?: {
spread?: string;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
}[];
max_replicas_per_node?: number;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
} & Deployment;
export type ListOrDict =
| {
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` ".+".
*/
[k: string]: string | number | boolean | null;
}
| string[];
export type DefinitionsGenericResources = {
discrete_resource_spec?: {
kind?: string;
value?: number;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
}[];
export type DefinitionsDevices = {
capabilities?: ListOfStrings;
count?: string | number;
device_ids?: ListOfStrings;
driver?: string;
options?: ListOrDict;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
}[];
type Deployment = {
mode?: string;
endpoint_mode?: string;
replicas?: number;
labels?: ListOrDict;
rollback_config?: {
parallelism?: number;
delay?: string;
failure_action?: string;
monitor?: string;
max_failure_ratio?: number;
order?: "start-first" | "stop-first";
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
update_config?: {
parallelism?: number;
delay?: string;
failure_action?: string;
monitor?: string;
max_failure_ratio?: number;
order?: "start-first" | "stop-first";
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
resources?: {
limits?: {
cpus?: number | string;
memory?: string;
pids?: number;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
reservations?: {
cpus?: number | string;
memory?: string;
generic_resources?: DefinitionsGenericResources;
devices?: DefinitionsDevices;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
restart_policy?: {
condition?: string;
delay?: string;
max_attempts?: number;
window?: string;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
placement?: {
constraints?: string[];
preferences?: {
spread?: string;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
}[];
max_replicas_per_node?: number;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
} | null;
export type ServiceConfigOrSecret = (
| string
| {
source?: string;
target?: string;
uid?: string;
gid?: string;
mode?: number;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
}
)[];
export type Command = null | string | string[];
export type EnvFile =
| string
| (
| string
| {
path: string;
required?: boolean;
}
)[];
/**
* This interface was referenced by `PropertiesNetworks`'s JSON-Schema definition
* via the `patternProperty` "^[a-zA-Z0-9._-]+$".
*/
export type DefinitionsNetwork = {
name?: string;
driver?: string;
driver_opts?: {
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^.+$".
*/
[k: string]: string | number;
};
ipam?: {
driver?: string;
config?: {
subnet?: string;
ip_range?: string;
gateway?: string;
aux_addresses?: {
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^.+$".
*/
[k: string]: string;
};
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
}[];
options?: {
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^.+$".
*/
[k: string]: string;
};
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
external?:
| boolean
| {
name?: string;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
internal?: boolean;
enable_ipv6?: boolean;
attachable?: boolean;
labels?: ListOrDict;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
} & Network;
export type Network = {
name?: string;
driver?: string;
driver_opts?: {
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^.+$".
*/
[k: string]: string | number;
};
ipam?: {
driver?: string;
config?: {
subnet?: string;
ip_range?: string;
gateway?: string;
aux_addresses?: {
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^.+$".
*/
[k: string]: string;
};
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
}[];
options?: {
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^.+$".
*/
[k: string]: string;
};
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
external?:
| boolean
| {
name?: string;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
internal?: boolean;
enable_ipv6?: boolean;
attachable?: boolean;
labels?: ListOrDict;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
} | null;
/**
* This interface was referenced by `PropertiesVolumes`'s JSON-Schema definition
* via the `patternProperty` "^[a-zA-Z0-9._-]+$".
*/
export type DefinitionsVolume = {
name?: string;
driver?: string;
driver_opts?: {
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^.+$".
*/
[k: string]: string | number;
};
external?:
| boolean
| {
name?: string;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
labels?: ListOrDict;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
} & Volume;
export type Volume = {
name?: string;
driver?: string;
driver_opts?: {
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^.+$".
*/
[k: string]: string | number;
};
external?:
| boolean
| {
name?: string;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
labels?: ListOrDict;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
} | null;
/**
* The Compose file is a YAML file defining a multi-containers based application.
*/
export interface ComposeSpecification {
/**
* declared for backward compatibility, ignored.
*/
version?: string;
/**
* define the Compose project name, until user defines one explicitly.
*/
name?: string;
/**
* compose sub-projects to be included.
*/
include?: DefinitionsInclude[];
services?: PropertiesServices;
networks?: PropertiesNetworks;
volumes?: PropertiesVolumes;
secrets?: PropertiesSecrets;
configs?: PropertiesConfigs;
/**
* This interface was referenced by `ComposeSpecification`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
}
export interface PropertiesServices {
[k: string]: DefinitionsService;
}
/**
* This interface was referenced by `PropertiesServices`'s JSON-Schema definition
* via the `patternProperty` "^[a-zA-Z0-9._-]+$".
*/
export interface DefinitionsService {
develop?: DefinitionsDevelopment;
deploy?: DefinitionsDeployment;
annotations?: ListOrDict;
attach?: boolean;
build?:
| string
| {
context?: string;
dockerfile?: string;
dockerfile_inline?: string;
entitlements?: string[];
args?: ListOrDict;
ssh?: ListOrDict;
labels?: ListOrDict;
cache_from?: string[];
cache_to?: string[];
no_cache?: boolean;
additional_contexts?: ListOrDict;
network?: string;
pull?: boolean;
target?: string;
shm_size?: number | string;
extra_hosts?: ListOrDict;
isolation?: string;
privileged?: boolean;
secrets?: ServiceConfigOrSecret;
tags?: string[];
ulimits?: Ulimits;
platforms?: string[];
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
blkio_config?: {
device_read_bps?: BlkioLimit[];
device_read_iops?: BlkioLimit[];
device_write_bps?: BlkioLimit[];
device_write_iops?: BlkioLimit[];
weight?: number;
weight_device?: BlkioWeight[];
};
cap_add?: string[];
cap_drop?: string[];
cgroup?: "host" | "private";
cgroup_parent?: string;
command?: Command;
configs?: ServiceConfigOrSecret;
container_name?: string;
cpu_count?: number;
cpu_percent?: number;
cpu_shares?: number | string;
cpu_quota?: number | string;
cpu_period?: number | string;
cpu_rt_period?: number | string;
cpu_rt_runtime?: number | string;
cpus?: number | string;
cpuset?: string;
credential_spec?: {
config?: string;
file?: string;
registry?: string;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
depends_on?:
| ListOfStrings
| {
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^[a-zA-Z0-9._-]+$".
*/
[k: string]: {
restart?: boolean;
required?: boolean;
condition:
| "service_started"
| "service_healthy"
| "service_completed_successfully";
};
};
device_cgroup_rules?: ListOfStrings;
devices?: string[];
dns?: StringOrList;
dns_opt?: string[];
dns_search?: StringOrList;
domainname?: string;
entrypoint?: Command;
env_file?: EnvFile;
environment?: ListOrDict;
expose?: (string | number)[];
extends?:
| string
| {
service: string;
file?: string;
};
external_links?: string[];
extra_hosts?: ListOrDict;
group_add?: (string | number)[];
healthcheck?: DefinitionsHealthcheck;
hostname?: string;
image?: string;
init?: boolean;
ipc?: string;
isolation?: string;
labels?: ListOrDict;
links?: string[];
logging?: {
driver?: string;
options?: {
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^.+$".
*/
[k: string]: string | number | null;
};
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
mac_address?: string;
mem_limit?: number | string;
mem_reservation?: string | number;
mem_swappiness?: number;
memswap_limit?: number | string;
network_mode?: string;
networks?:
| ListOfStrings
| {
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^[a-zA-Z0-9._-]+$".
*/
[k: string]: {
aliases?: ListOfStrings;
ipv4_address?: string;
ipv6_address?: string;
link_local_ips?: ListOfStrings;
mac_address?: string;
driver_opts?: {
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^.+$".
*/
[k: string]: string | number;
};
priority?: number;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
} | null;
};
oom_kill_disable?: boolean;
oom_score_adj?: number;
pid?: string | null;
pids_limit?: number | string;
platform?: string;
ports?: (
| number
| string
| {
name?: string;
mode?: string;
host_ip?: string;
target?: number;
published?: string | number;
protocol?: string;
app_protocol?: string;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
}
)[];
privileged?: boolean;
profiles?: ListOfStrings;
pull_policy?: "always" | "never" | "if_not_present" | "build" | "missing";
read_only?: boolean;
restart?: string;
runtime?: string;
scale?: number;
security_opt?: string[];
shm_size?: number | string;
secrets?: ServiceConfigOrSecret;
sysctls?: ListOrDict;
stdin_open?: boolean;
stop_grace_period?: string;
stop_signal?: string;
storage_opt?: {
[k: string]: unknown;
};
tmpfs?: StringOrList;
tty?: boolean;
ulimits?: Ulimits;
user?: string;
uts?: string;
userns_mode?: string;
volumes?: (
| string
| {
type: string;
source?: string;
target?: string;
read_only?: boolean;
consistency?: string;
bind?: {
propagation?: string;
create_host_path?: boolean;
selinux?: "z" | "Z";
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
volume?: {
nocopy?: boolean;
subpath?: string;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
tmpfs?: {
size?: number | string;
mode?: number;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
}
)[];
volumes_from?: string[];
working_dir?: string;
/**
* This interface was referenced by `DefinitionsService`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
}
export interface Ulimits {
/**
* This interface was referenced by `Ulimits`'s JSON-Schema definition
* via the `patternProperty` "^[a-z]+$".
*/
[k: string]:
| number
| {
hard: number;
soft: number;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
}
export interface BlkioLimit {
path?: string;
rate?: number | string;
}
export interface BlkioWeight {
path?: string;
weight?: number;
}
export interface DefinitionsHealthcheck {
disable?: boolean;
interval?: string;
retries?: number;
test?: string | string[];
timeout?: string;
start_period?: string;
start_interval?: string;
/**
* This interface was referenced by `DefinitionsHealthcheck`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
}
export interface PropertiesNetworks {
[k: string]: DefinitionsNetwork;
}
export interface PropertiesVolumes {
[k: string]: DefinitionsVolume;
}
export interface PropertiesSecrets {
[k: string]: DefinitionsSecret;
}
/**
* This interface was referenced by `PropertiesSecrets`'s JSON-Schema definition
* via the `patternProperty` "^[a-zA-Z0-9._-]+$".
*/
export interface DefinitionsSecret {
name?: string;
environment?: string;
file?: string;
external?:
| boolean
| {
name?: string;
[k: string]: unknown;
};
labels?: ListOrDict;
driver?: string;
driver_opts?: {
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^.+$".
*/
[k: string]: string | number;
};
template_driver?: string;
/**
* This interface was referenced by `DefinitionsSecret`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
}
export interface PropertiesConfigs {
[k: string]: DefinitionsConfig;
}
/**
* This interface was referenced by `PropertiesConfigs`'s JSON-Schema definition
* via the `patternProperty` "^[a-zA-Z0-9._-]+$".
*/
export interface DefinitionsConfig {
name?: string;
content?: string;
environment?: string;
file?: string;
external?:
| boolean
| {
name?: string;
[k: string]: unknown;
};
labels?: ListOrDict;
template_driver?: string;
/**
* This interface was referenced by `DefinitionsConfig`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
}

View File

@@ -0,0 +1,525 @@
import fs from "node:fs";
import path from "node:path";
import type { Readable } from "node:stream";
import { docker, paths } from "@/server/constants";
import type { ContainerInfo, ResourceRequirements } from "dockerode";
import { parse } from "dotenv";
import type { ApplicationNested } from "../builders";
import type { MariadbNested } from "../databases/mariadb";
import type { MongoNested } from "../databases/mongo";
import type { MysqlNested } from "../databases/mysql";
import type { PostgresNested } from "../databases/postgres";
import type { RedisNested } from "../databases/redis";
import { execAsync, execAsyncRemote } from "../process/execAsync";
import { getRemoteDocker } from "../servers/remote-docker";
interface RegistryAuth {
username: string;
password: string;
serveraddress: string;
}
export const pullImage = async (
dockerImage: string,
onData?: (data: any) => void,
authConfig?: Partial<RegistryAuth>,
): Promise<void> => {
try {
if (!dockerImage) {
throw new Error("Docker image not found");
}
await new Promise((resolve, reject) => {
docker.pull(dockerImage, { authconfig: authConfig }, (err, stream) => {
if (err) {
reject(err);
return;
}
docker.modem.followProgress(
stream as Readable,
(err: Error | null, res) => {
if (!err) {
resolve(res);
}
if (err) {
reject(err);
}
},
(event) => {
onData?.(event);
},
);
});
});
} catch (error) {
throw error;
}
};
export const pullRemoteImage = async (
dockerImage: string,
serverId: string,
onData?: (data: any) => void,
authConfig?: Partial<RegistryAuth>,
): Promise<void> => {
try {
if (!dockerImage) {
throw new Error("Docker image not found");
}
const remoteDocker = await getRemoteDocker(serverId);
await new Promise((resolve, reject) => {
remoteDocker.pull(
dockerImage,
{ authconfig: authConfig },
(err, stream) => {
if (err) {
reject(err);
return;
}
remoteDocker.modem.followProgress(
stream as Readable,
(err: Error | null, res) => {
if (!err) {
resolve(res);
}
if (err) {
reject(err);
}
},
(event) => {
onData?.(event);
},
);
},
);
});
} catch (error) {
throw error;
}
};
export const containerExists = async (containerName: string) => {
const container = docker.getContainer(containerName);
try {
await container.inspect();
return true;
} catch (error) {
return false;
}
};
export const stopService = async (appName: string) => {
try {
await execAsync(`docker service scale ${appName}=0 `);
} catch (error) {
console.error(error);
return error;
}
};
export const stopServiceRemote = async (serverId: string, appName: string) => {
try {
await execAsyncRemote(serverId, `docker service scale ${appName}=0 `);
} catch (error) {
console.error(error);
return error;
}
};
export const getContainerByName = (name: string): Promise<ContainerInfo> => {
const opts = {
limit: 1,
filters: {
name: [name],
},
};
return new Promise((resolve, reject) => {
docker.listContainers(opts, (err, containers) => {
if (err) {
reject(err);
} else if (containers?.length === 0) {
reject(new Error(`No container found with name: ${name}`));
} else if (containers && containers?.length > 0 && containers[0]) {
resolve(containers[0]);
}
});
});
};
export const cleanUpUnusedImages = async (serverId?: string) => {
try {
if (serverId) {
await execAsyncRemote(serverId, "docker image prune --all --force");
} else {
await execAsync("docker image prune --all --force");
}
} catch (error) {
console.error(error);
throw error;
}
};
export const cleanStoppedContainers = async (serverId?: string) => {
try {
if (serverId) {
await execAsyncRemote(serverId, "docker container prune --force");
} else {
await execAsync("docker container prune --force");
}
} catch (error) {
console.error(error);
throw error;
}
};
export const cleanUpUnusedVolumes = async (serverId?: string) => {
try {
if (serverId) {
await execAsyncRemote(serverId, "docker volume prune --all --force");
} else {
await execAsync("docker volume prune --all --force");
}
} catch (error) {
console.error(error);
throw error;
}
};
export const cleanUpInactiveContainers = async () => {
try {
const containers = await docker.listContainers({ all: true });
const inactiveContainers = containers.filter(
(container) => container.State !== "running",
);
for (const container of inactiveContainers) {
await docker.getContainer(container.Id).remove({ force: true });
console.log(`Cleaning up inactive container: ${container.Id}`);
}
} catch (error) {
console.error("Error cleaning up inactive containers:", error);
throw error;
}
};
export const cleanUpDockerBuilder = async (serverId?: string) => {
if (serverId) {
await execAsyncRemote(serverId, "docker builder prune --all --force");
} else {
await execAsync("docker builder prune --all --force");
}
};
export const cleanUpSystemPrune = async (serverId?: string) => {
if (serverId) {
await execAsyncRemote(
serverId,
"docker system prune --all --force --volumes",
);
} else {
await execAsync("docker system prune --all --force --volumes");
}
};
export const startService = async (appName: string) => {
try {
await execAsync(`docker service scale ${appName}=1 `);
} catch (error) {
console.error(error);
throw error;
}
};
export const startServiceRemote = async (serverId: string, appName: string) => {
try {
await execAsyncRemote(serverId, `docker service scale ${appName}=1 `);
} catch (error) {
console.error(error);
throw error;
}
};
export const removeService = async (
appName: string,
serverId?: string | null,
) => {
try {
const command = `docker service rm ${appName}`;
if (serverId) {
await execAsyncRemote(serverId, command);
} else {
await execAsync(command);
}
} catch (error) {
return error;
}
};
export const prepareEnvironmentVariables = (env: string | null) =>
Object.entries(parse(env ?? "")).map(([key, value]) => `${key}=${value}`);
export const prepareBuildArgs = (input: string | null) => {
const pairs = (input ?? "").split("\n");
const jsonObject: Record<string, string> = {};
for (const pair of pairs) {
const [key, value] = pair.split("=");
if (key && value) {
jsonObject[key] = value;
}
}
return jsonObject;
};
export const generateVolumeMounts = (mounts: ApplicationNested["mounts"]) => {
if (!mounts || mounts.length === 0) {
return [];
}
return mounts
.filter((mount) => mount.type === "volume")
.map((mount) => ({
Type: "volume" as const,
Source: mount.volumeName || "",
Target: mount.mountPath,
}));
};
type Resources = {
memoryLimit: number | null;
memoryReservation: number | null;
cpuLimit: number | null;
cpuReservation: number | null;
};
export const calculateResources = ({
memoryLimit,
memoryReservation,
cpuLimit,
cpuReservation,
}: Resources): ResourceRequirements => {
return {
Limits: {
MemoryBytes: memoryLimit ? memoryLimit * 1024 * 1024 : undefined,
NanoCPUs: memoryLimit ? (cpuLimit || 1) * 1000 * 1000 * 1000 : undefined,
},
Reservations: {
MemoryBytes: memoryLimit
? (memoryReservation || 1) * 1024 * 1024
: undefined,
NanoCPUs: memoryLimit
? (cpuReservation || 1) * 1000 * 1000 * 1000
: undefined,
},
};
};
export const generateConfigContainer = (application: ApplicationNested) => {
const {
healthCheckSwarm,
restartPolicySwarm,
placementSwarm,
updateConfigSwarm,
rollbackConfigSwarm,
modeSwarm,
labelsSwarm,
replicas,
mounts,
networkSwarm,
} = application;
const haveMounts = mounts.length > 0;
return {
...(healthCheckSwarm && {
HealthCheck: healthCheckSwarm,
}),
...(restartPolicySwarm
? {
RestartPolicy: restartPolicySwarm,
}
: {}),
...(placementSwarm
? {
Placement: placementSwarm,
}
: {
// if app have mounts keep manager as constraint
Placement: {
Constraints: haveMounts ? ["node.role==manager"] : [],
},
}),
...(labelsSwarm && {
Labels: labelsSwarm,
}),
...(modeSwarm
? {
Mode: modeSwarm,
}
: {
// use replicas value if no modeSwarm provided
Mode: {
Replicated: {
Replicas: replicas,
},
},
}),
...(rollbackConfigSwarm && {
RollbackConfig: rollbackConfigSwarm,
}),
...(updateConfigSwarm
? { UpdateConfig: updateConfigSwarm }
: {
// default config if no updateConfigSwarm provided
UpdateConfig: {
Parallelism: 1,
Order: "start-first",
},
}),
...(networkSwarm
? {
Networks: networkSwarm,
}
: {
Networks: [{ Target: "dokploy-network" }],
}),
};
};
export const generateBindMounts = (mounts: ApplicationNested["mounts"]) => {
if (!mounts || mounts.length === 0) {
return [];
}
return mounts
.filter((mount) => mount.type === "bind")
.map((mount) => ({
Type: "bind" as const,
Source: mount.hostPath || "",
Target: mount.mountPath,
}));
};
export const generateFileMounts = (
appName: string,
service:
| ApplicationNested
| MongoNested
| MariadbNested
| MysqlNested
| PostgresNested
| RedisNested,
) => {
const { mounts } = service;
const { APPLICATIONS_PATH } = paths(!!service.serverId);
if (!mounts || mounts.length === 0) {
return [];
}
return mounts
.filter((mount) => mount.type === "file")
.map((mount) => {
const fileName = mount.filePath;
const absoluteBasePath = path.resolve(APPLICATIONS_PATH);
const directory = path.join(absoluteBasePath, appName, "files");
const sourcePath = path.join(directory, fileName || "");
return {
Type: "bind" as const,
Source: sourcePath,
Target: mount.mountPath,
};
});
};
export const createFile = async (
outputPath: string,
filePath: string,
content: string,
) => {
try {
const fullPath = path.join(outputPath, filePath);
if (fullPath.endsWith(path.sep) || filePath.endsWith("/")) {
fs.mkdirSync(fullPath, { recursive: true });
return;
}
const directory = path.dirname(fullPath);
fs.mkdirSync(directory, { recursive: true });
fs.writeFileSync(fullPath, content || "");
} catch (error) {
throw error;
}
};
export const encodeBase64 = (content: string) =>
Buffer.from(content, "utf-8").toString("base64");
export const getCreateFileCommand = (
outputPath: string,
filePath: string,
content: string,
) => {
const fullPath = path.join(outputPath, filePath);
if (fullPath.endsWith(path.sep) || filePath.endsWith("/")) {
return `mkdir -p ${fullPath};`;
}
const directory = path.dirname(fullPath);
const encodedContent = encodeBase64(content);
return `
mkdir -p ${directory};
echo "${encodedContent}" | base64 -d > "${fullPath}";
`;
};
export const getServiceContainer = async (appName: string) => {
try {
const filter = {
status: ["running"],
label: [`com.docker.swarm.service.name=${appName}`],
};
const containers = await docker.listContainers({
filters: JSON.stringify(filter),
});
if (containers.length === 0 || !containers[0]) {
throw new Error(`No container found with name: ${appName}`);
}
const container = containers[0];
return container;
} catch (error) {
throw error;
}
};
export const getRemoteServiceContainer = async (
serverId: string,
appName: string,
) => {
try {
const filter = {
status: ["running"],
label: [`com.docker.swarm.service.name=${appName}`],
};
const remoteDocker = await getRemoteDocker(serverId);
const containers = await remoteDocker.listContainers({
filters: JSON.stringify(filter),
});
if (containers.length === 0 || !containers[0]) {
throw new Error(`No container found with name: ${appName}`);
}
const container = containers[0];
return container;
} catch (error) {
throw error;
}
};

View File

@@ -0,0 +1,142 @@
import fs, { promises as fsPromises } from "node:fs";
import path from "node:path";
import { paths } from "@/server/constants";
import type { Application } from "@/server/services/application";
import { execAsync, execAsyncRemote } from "../process/execAsync";
export const recreateDirectory = async (pathFolder: string): Promise<void> => {
try {
await removeDirectoryIfExistsContent(pathFolder);
await fsPromises.mkdir(pathFolder, { recursive: true });
} catch (error) {
console.error(`Error recreating directory '${pathFolder}':`, error);
}
};
export const recreateDirectoryRemote = async (
pathFolder: string,
serverId: string | null,
): Promise<void> => {
try {
await execAsyncRemote(
serverId,
`rm -rf ${pathFolder}; mkdir -p ${pathFolder}`,
);
} catch (error) {
console.error(`Error recreating directory '${pathFolder}':`, error);
}
};
export const removeDirectoryIfExistsContent = async (
path: string,
): Promise<void> => {
if (fs.existsSync(path) && fs.readdirSync(path).length !== 0) {
await execAsync(`rm -rf ${path}`);
}
};
export const removeFileOrDirectory = async (path: string) => {
try {
await execAsync(`rm -rf ${path}`);
} catch (error) {
console.error(`Error to remove ${path}: ${error}`);
throw error;
}
};
export const removeDirectoryCode = async (
appName: string,
serverId?: string | null,
) => {
const { APPLICATIONS_PATH } = paths(!!serverId);
const directoryPath = path.join(APPLICATIONS_PATH, appName);
const command = `rm -rf ${directoryPath}`;
try {
if (serverId) {
await execAsyncRemote(serverId, command);
} else {
await execAsync(command);
}
} catch (error) {
console.error(`Error to remove ${directoryPath}: ${error}`);
throw error;
}
};
export const removeComposeDirectory = async (
appName: string,
serverId?: string | null,
) => {
const { COMPOSE_PATH } = paths(!!serverId);
const directoryPath = path.join(COMPOSE_PATH, appName);
const command = `rm -rf ${directoryPath}`;
try {
if (serverId) {
await execAsyncRemote(serverId, command);
} else {
await execAsync(command);
}
} catch (error) {
console.error(`Error to remove ${directoryPath}: ${error}`);
throw error;
}
};
export const removeMonitoringDirectory = async (
appName: string,
serverId?: string | null,
) => {
const { MONITORING_PATH } = paths(!!serverId);
const directoryPath = path.join(MONITORING_PATH, appName);
const command = `rm -rf ${directoryPath}`;
try {
if (serverId) {
await execAsyncRemote(serverId, command);
} else {
await execAsync(command);
}
} catch (error) {
console.error(`Error to remove ${directoryPath}: ${error}`);
throw error;
}
};
export const getBuildAppDirectory = (application: Application) => {
const { APPLICATIONS_PATH } = paths(!!application.serverId);
const { appName, buildType, sourceType, customGitBuildPath, dockerfile } =
application;
let buildPath = "";
if (sourceType === "github") {
buildPath = application?.buildPath || "";
} else if (sourceType === "gitlab") {
buildPath = application?.gitlabBuildPath || "";
} else if (sourceType === "bitbucket") {
buildPath = application?.bitbucketBuildPath || "";
} else if (sourceType === "drop") {
buildPath = application?.dropBuildPath || "";
} else if (sourceType === "git") {
buildPath = customGitBuildPath || "";
}
if (buildType === "dockerfile") {
return path.join(
APPLICATIONS_PATH,
appName,
"code",
buildPath ?? "",
dockerfile || "",
);
}
return path.join(APPLICATIONS_PATH, appName, "code", buildPath ?? "");
};
export const getDockerContextPath = (application: Application) => {
const { APPLICATIONS_PATH } = paths(!!application.serverId);
const { appName, dockerContextPath } = application;
if (!dockerContextPath) {
return null;
}
return path.join(APPLICATIONS_PATH, appName, "code", dockerContextPath);
};

View File

@@ -0,0 +1,26 @@
import * as ssh2 from "ssh2";
export const generateSSHKey = async (type: "rsa" | "ed25519" = "rsa") => {
try {
if (type === "rsa") {
const keys = ssh2.utils.generateKeyPairSync("rsa", {
bits: 4096,
comment: "dokploy",
});
return {
privateKey: keys.private,
publicKey: keys.public,
};
}
const keys = ssh2.utils.generateKeyPairSync("ed25519", {
comment: "dokploy",
});
return {
privateKey: keys.private,
publicKey: keys.public,
};
} catch (error) {
throw error;
}
};

View File

@@ -0,0 +1,157 @@
import { db } from "@/server/db";
import { notifications } from "@/server/db/schema";
import BuildFailedEmail from "@/server/emails/emails/build-failed";
import { renderAsync } from "@react-email/components";
import { eq } from "drizzle-orm";
import {
sendDiscordNotification,
sendEmailNotification,
sendSlackNotification,
sendTelegramNotification,
} from "./utils";
interface Props {
projectName: string;
applicationName: string;
applicationType: string;
errorMessage: string;
buildLink: string;
}
export const sendBuildErrorNotifications = async ({
projectName,
applicationName,
applicationType,
errorMessage,
buildLink,
}: Props) => {
const date = new Date();
const notificationList = await db.query.notifications.findMany({
where: eq(notifications.appBuildError, true),
with: {
email: true,
discord: true,
telegram: true,
slack: true,
},
});
for (const notification of notificationList) {
const { email, discord, telegram, slack } = notification;
if (email) {
const template = await renderAsync(
BuildFailedEmail({
projectName,
applicationName,
applicationType,
errorMessage: errorMessage,
buildLink,
date: date.toLocaleString(),
}),
).catch();
await sendEmailNotification(email, "Build failed for dokploy", template);
}
if (discord) {
await sendDiscordNotification(discord, {
title: "⚠️ Build Failed",
color: 0xff0000,
fields: [
{
name: "Project",
value: projectName,
inline: true,
},
{
name: "Application",
value: applicationName,
inline: true,
},
{
name: "Type",
value: applicationType,
inline: true,
},
{
name: "Error",
value: errorMessage,
},
{
name: "Build Link",
value: buildLink,
},
],
timestamp: date.toISOString(),
footer: {
text: "Dokploy Build Notification",
},
});
}
if (telegram) {
await sendTelegramNotification(
telegram,
`
<b>⚠️ Build Failed</b>
<b>Project:</b> ${projectName}
<b>Application:</b> ${applicationName}
<b>Type:</b> ${applicationType}
<b>Time:</b> ${date.toLocaleString()}
<b>Error:</b>
<pre>${errorMessage}</pre>
<b>Build Details:</b> ${buildLink}
`,
);
}
if (slack) {
const { channel } = slack;
await sendSlackNotification(slack, {
channel: channel,
attachments: [
{
color: "#FF0000",
pretext: ":warning: *Build Failed*",
fields: [
{
title: "Project",
value: projectName,
short: true,
},
{
title: "Application",
value: applicationName,
short: true,
},
{
title: "Type",
value: applicationType,
short: true,
},
{
title: "Time",
value: date.toLocaleString(),
short: true,
},
{
title: "Error",
value: `\`\`\`${errorMessage}\`\`\``,
short: false,
},
],
actions: [
{
type: "button",
text: "View Build Details",
url: buildLink,
},
],
},
],
});
}
}
};

View File

@@ -0,0 +1,143 @@
import { db } from "@/server/db";
import { notifications } from "@/server/db/schema";
import BuildSuccessEmail from "@/server/emails/emails/build-success";
import { renderAsync } from "@react-email/components";
import { eq } from "drizzle-orm";
import {
sendDiscordNotification,
sendEmailNotification,
sendSlackNotification,
sendTelegramNotification,
} from "./utils";
interface Props {
projectName: string;
applicationName: string;
applicationType: string;
buildLink: string;
}
export const sendBuildSuccessNotifications = async ({
projectName,
applicationName,
applicationType,
buildLink,
}: Props) => {
const date = new Date();
const notificationList = await db.query.notifications.findMany({
where: eq(notifications.appDeploy, true),
with: {
email: true,
discord: true,
telegram: true,
slack: true,
},
});
for (const notification of notificationList) {
const { email, discord, telegram, slack } = notification;
if (email) {
const template = await renderAsync(
BuildSuccessEmail({
projectName,
applicationName,
applicationType,
buildLink,
date: date.toLocaleString(),
}),
).catch();
await sendEmailNotification(email, "Build success for dokploy", template);
}
if (discord) {
await sendDiscordNotification(discord, {
title: "✅ Build Success",
color: 0x00ff00,
fields: [
{
name: "Project",
value: projectName,
inline: true,
},
{
name: "Application",
value: applicationName,
inline: true,
},
{
name: "Type",
value: applicationType,
inline: true,
},
{
name: "Build Link",
value: buildLink,
},
],
timestamp: date.toISOString(),
footer: {
text: "Dokploy Build Notification",
},
});
}
if (telegram) {
await sendTelegramNotification(
telegram,
`
<b>✅ Build Success</b>
<b>Project:</b> ${projectName}
<b>Application:</b> ${applicationName}
<b>Type:</b> ${applicationType}
<b>Time:</b> ${date.toLocaleString()}
<b>Build Details:</b> ${buildLink}
`,
);
}
if (slack) {
const { channel } = slack;
await sendSlackNotification(slack, {
channel: channel,
attachments: [
{
color: "#00FF00",
pretext: ":white_check_mark: *Build Success*",
fields: [
{
title: "Project",
value: projectName,
short: true,
},
{
title: "Application",
value: applicationName,
short: true,
},
{
title: "Type",
value: applicationType,
short: true,
},
{
title: "Time",
value: date.toLocaleString(),
short: true,
},
],
actions: [
{
type: "button",
text: "View Build Details",
url: buildLink,
},
],
},
],
});
}
}
};

View File

@@ -0,0 +1,177 @@
import { db } from "@/server/db";
import { notifications } from "@/server/db/schema";
import DatabaseBackupEmail from "@/server/emails/emails/database-backup";
import { renderAsync } from "@react-email/components";
import { eq } from "drizzle-orm";
import {
sendDiscordNotification,
sendEmailNotification,
sendSlackNotification,
sendTelegramNotification,
} from "./utils";
export const sendDatabaseBackupNotifications = async ({
projectName,
applicationName,
databaseType,
type,
errorMessage,
}: {
projectName: string;
applicationName: string;
databaseType: "postgres" | "mysql" | "mongodb" | "mariadb";
type: "error" | "success";
errorMessage?: string;
}) => {
const date = new Date();
const notificationList = await db.query.notifications.findMany({
where: eq(notifications.databaseBackup, true),
with: {
email: true,
discord: true,
telegram: true,
slack: true,
},
});
for (const notification of notificationList) {
const { email, discord, telegram, slack } = notification;
if (email) {
const template = await renderAsync(
DatabaseBackupEmail({
projectName,
applicationName,
databaseType,
type,
errorMessage,
date: date.toLocaleString(),
}),
).catch();
await sendEmailNotification(
email,
"Database backup for dokploy",
template,
);
}
if (discord) {
await sendDiscordNotification(discord, {
title:
type === "success"
? "✅ Database Backup Successful"
: "❌ Database Backup Failed",
color: type === "success" ? 0x00ff00 : 0xff0000,
fields: [
{
name: "Project",
value: projectName,
inline: true,
},
{
name: "Application",
value: applicationName,
inline: true,
},
{
name: "Type",
value: databaseType,
inline: true,
},
{
name: "Time",
value: date.toLocaleString(),
inline: true,
},
{
name: "Type",
value: type,
},
...(type === "error" && errorMessage
? [
{
name: "Error Message",
value: errorMessage,
},
]
: []),
],
timestamp: date.toISOString(),
footer: {
text: "Dokploy Database Backup Notification",
},
});
}
if (telegram) {
const statusEmoji = type === "success" ? "✅" : "❌";
const messageText = `
<b>${statusEmoji} Database Backup ${type === "success" ? "Successful" : "Failed"}</b>
<b>Project:</b> ${projectName}
<b>Application:</b> ${applicationName}
<b>Type:</b> ${databaseType}
<b>Time:</b> ${date.toLocaleString()}
<b>Status:</b> ${type === "success" ? "Successful" : "Failed"}
${type === "error" && errorMessage ? `<b>Error:</b> ${errorMessage}` : ""}
`;
await sendTelegramNotification(telegram, messageText);
}
if (slack) {
const { channel } = slack;
await sendSlackNotification(slack, {
channel: channel,
attachments: [
{
color: type === "success" ? "#00FF00" : "#FF0000",
pretext:
type === "success"
? ":white_check_mark: *Database Backup Successful*"
: ":x: *Database Backup Failed*",
fields: [
...(type === "error" && errorMessage
? [
{
title: "Error Message",
value: errorMessage,
short: false,
},
]
: []),
{
title: "Project",
value: projectName,
short: true,
},
{
title: "Application",
value: applicationName,
short: true,
},
{
title: "Type",
value: databaseType,
short: true,
},
{
title: "Time",
value: date.toLocaleString(),
short: true,
},
{
title: "Type",
value: type,
},
{
title: "Status",
value: type === "success" ? "Successful" : "Failed",
},
],
},
],
});
}
}
};

View File

@@ -0,0 +1,94 @@
import { db } from "@/server/db";
import { notifications } from "@/server/db/schema";
import DockerCleanupEmail from "@/server/emails/emails/docker-cleanup";
import { renderAsync } from "@react-email/components";
import { eq } from "drizzle-orm";
import {
sendDiscordNotification,
sendEmailNotification,
sendSlackNotification,
sendTelegramNotification,
} from "./utils";
export const sendDockerCleanupNotifications = async (
message = "Docker cleanup for dokploy",
) => {
const date = new Date();
const notificationList = await db.query.notifications.findMany({
where: eq(notifications.dockerCleanup, true),
with: {
email: true,
discord: true,
telegram: true,
slack: true,
},
});
for (const notification of notificationList) {
const { email, discord, telegram, slack } = notification;
if (email) {
const template = await renderAsync(
DockerCleanupEmail({ message, date: date.toLocaleString() }),
).catch();
await sendEmailNotification(
email,
"Docker cleanup for dokploy",
template,
);
}
if (discord) {
await sendDiscordNotification(discord, {
title: "✅ Docker Cleanup",
color: 0x00ff00,
fields: [
{
name: "Message",
value: message,
},
],
timestamp: date.toISOString(),
footer: {
text: "Dokploy Docker Cleanup Notification",
},
});
}
if (telegram) {
await sendTelegramNotification(
telegram,
`
<b>✅ Docker Cleanup</b>
<b>Message:</b> ${message}
<b>Time:</b> ${date.toLocaleString()}
`,
);
}
if (slack) {
const { channel } = slack;
await sendSlackNotification(slack, {
channel: channel,
attachments: [
{
color: "#00FF00",
pretext: ":white_check_mark: *Docker Cleanup*",
fields: [
{
title: "Message",
value: message,
},
{
title: "Time",
value: date.toLocaleString(),
short: true,
},
],
},
],
});
}
}
};

View File

@@ -0,0 +1,83 @@
import { db } from "@/server/db";
import { notifications } from "@/server/db/schema";
import DokployRestartEmail from "@/server/emails/emails/dokploy-restart";
import { renderAsync } from "@react-email/components";
import { eq } from "drizzle-orm";
import {
sendDiscordNotification,
sendEmailNotification,
sendSlackNotification,
sendTelegramNotification,
} from "./utils";
export const sendDokployRestartNotifications = async () => {
const date = new Date();
const notificationList = await db.query.notifications.findMany({
where: eq(notifications.dokployRestart, true),
with: {
email: true,
discord: true,
telegram: true,
slack: true,
},
});
for (const notification of notificationList) {
const { email, discord, telegram, slack } = notification;
if (email) {
const template = await renderAsync(
DokployRestartEmail({ date: date.toLocaleString() }),
).catch();
await sendEmailNotification(email, "Dokploy Server Restarted", template);
}
if (discord) {
await sendDiscordNotification(discord, {
title: "✅ Dokploy Server Restarted",
color: 0x00ff00,
fields: [
{
name: "Time",
value: date.toLocaleString(),
inline: true,
},
],
timestamp: date.toISOString(),
footer: {
text: "Dokploy Restart Notification",
},
});
}
if (telegram) {
await sendTelegramNotification(
telegram,
`
<b>✅ Dokploy Serverd Restarted</b>
<b>Time:</b> ${date.toLocaleString()}
`,
);
}
if (slack) {
const { channel } = slack;
await sendSlackNotification(slack, {
channel: channel,
attachments: [
{
color: "#00FF00",
pretext: ":white_check_mark: *Dokploy Server Restarted*",
fields: [
{
title: "Time",
value: date.toLocaleString(),
short: true,
},
],
},
],
});
}
}
};

View File

@@ -0,0 +1,84 @@
import type { discord, email, slack, telegram } from "@/server/db/schema";
import nodemailer from "nodemailer";
export const sendEmailNotification = async (
connection: typeof email.$inferInsert,
subject: string,
htmlContent: string,
) => {
try {
const {
smtpServer,
smtpPort,
username,
password,
fromAddress,
toAddresses,
} = connection;
const transporter = nodemailer.createTransport({
host: smtpServer,
port: smtpPort,
auth: { user: username, pass: password },
});
await transporter.sendMail({
from: fromAddress,
to: toAddresses.join(", "),
subject,
html: htmlContent,
});
} catch (err) {
console.log(err);
}
};
export const sendDiscordNotification = async (
connection: typeof discord.$inferInsert,
embed: any,
) => {
try {
await fetch(connection.webhookUrl, {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({ embeds: [embed] }),
});
} catch (err) {
console.log(err);
}
};
export const sendTelegramNotification = async (
connection: typeof telegram.$inferInsert,
messageText: string,
) => {
try {
const url = `https://api.telegram.org/bot${connection.botToken}/sendMessage`;
await fetch(url, {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({
chat_id: connection.chatId,
text: messageText,
parse_mode: "HTML",
disable_web_page_preview: true,
}),
});
} catch (err) {
console.log(err);
}
};
export const sendSlackNotification = async (
connection: typeof slack.$inferInsert,
message: any,
) => {
try {
await fetch(connection.webhookUrl, {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify(message),
});
} catch (err) {
console.log(err);
}
};

View File

@@ -0,0 +1,70 @@
import { exec } from "node:child_process";
import util from "node:util";
import { findServerById } from "@/server/services/server";
import { Client } from "ssh2";
export const execAsync = util.promisify(exec);
export const execAsyncRemote = async (
serverId: string | null,
command: string,
): Promise<{ stdout: string; stderr: string }> => {
if (!serverId) return { stdout: "", stderr: "" };
const server = await findServerById(serverId);
if (!server.sshKeyId) throw new Error("No SSH key available for this server");
let stdout = "";
let stderr = "";
return new Promise((resolve, reject) => {
const conn = new Client();
sleep(1000);
conn
.once("ready", () => {
conn.exec(command, (err, stream) => {
if (err) throw err;
stream
.on("close", (code: number, signal: string) => {
conn.end();
if (code === 0) {
resolve({ stdout, stderr });
} else {
reject(
new Error(
`Command exited with code ${code}. Stderr: ${stderr}, command: ${command}`,
),
);
}
})
.on("data", (data: string) => {
stdout += data.toString();
})
.stderr.on("data", (data) => {
stderr += data.toString();
});
});
})
.on("error", (err) => {
conn.end();
if (err.level === "client-authentication") {
reject(
new Error(
`Authentication failed: Invalid SSH private key. ❌ Error: ${err.message} ${err.level}`,
),
);
} else {
reject(new Error(`SSH connection error: ${err.message}`));
}
})
.connect({
host: server.ipAddress,
port: server.port,
username: server.username,
privateKey: server.sshKey?.privateKey,
timeout: 99999,
});
});
};
export const sleep = (ms: number) => {
return new Promise((resolve) => setTimeout(resolve, ms));
};

View File

@@ -0,0 +1,58 @@
import {
type ChildProcess,
type SpawnOptions,
spawn,
} from "node:child_process";
import BufferList from "bl";
export const spawnAsync = (
command: string,
args?: string[] | undefined,
onData?: (data: string) => void, // Callback opcional para manejar datos en tiempo real
options?: SpawnOptions,
): Promise<BufferList> & { child: ChildProcess } => {
const child = spawn(command, args ?? [], options ?? {});
const stdout = child.stdout ? new BufferList() : new BufferList();
const stderr = child.stderr ? new BufferList() : new BufferList();
if (child.stdout) {
child.stdout.on("data", (data) => {
stdout.append(data);
if (onData) {
onData(data.toString());
}
});
}
if (child.stderr) {
child.stderr.on("data", (data) => {
stderr.append(data);
if (onData) {
onData(data.toString());
}
});
}
const promise = new Promise<BufferList>((resolve, reject) => {
child.on("error", reject);
child.on("close", (code) => {
if (code === 0) {
resolve(stdout);
} else {
const err = new Error(`${stderr.toString()}`) as Error & {
code: number;
stderr: BufferList;
stdout: BufferList;
};
err.code = code || -1;
err.stderr = stderr;
err.stdout = stdout;
reject(err);
}
});
}) as Promise<BufferList> & { child: ChildProcess };
promise.child = child;
return promise;
};

View File

@@ -0,0 +1,361 @@
import { createWriteStream } from "node:fs";
import { join } from "node:path";
import { paths } from "@/server/constants";
import type {
apiBitbucketTestConnection,
apiFindBitbucketBranches,
} from "@/server/db/schema";
import { findBitbucketById } from "@/server/services/bitbucket";
import type { Compose } from "@/server/services/compose";
import type { InferResultType } from "@/server/types/with";
import { TRPCError } from "@trpc/server";
import { recreateDirectory } from "../filesystem/directory";
import { execAsyncRemote } from "../process/execAsync";
import { spawnAsync } from "../process/spawnAsync";
export type ApplicationWithBitbucket = InferResultType<
"applications",
{ bitbucket: true }
>;
export type ComposeWithBitbucket = InferResultType<
"compose",
{ bitbucket: true }
>;
export const cloneBitbucketRepository = async (
entity: ApplicationWithBitbucket | ComposeWithBitbucket,
logPath: string,
isCompose = false,
) => {
const { COMPOSE_PATH, APPLICATIONS_PATH } = paths();
const writeStream = createWriteStream(logPath, { flags: "a" });
const {
appName,
bitbucketRepository,
bitbucketOwner,
bitbucketBranch,
bitbucketId,
bitbucket,
} = entity;
if (!bitbucketId) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Bitbucket Provider not found",
});
}
const basePath = isCompose ? COMPOSE_PATH : APPLICATIONS_PATH;
const outputPath = join(basePath, appName, "code");
await recreateDirectory(outputPath);
const repoclone = `bitbucket.org/${bitbucketOwner}/${bitbucketRepository}.git`;
const cloneUrl = `https://${bitbucket?.bitbucketUsername}:${bitbucket?.appPassword}@${repoclone}`;
try {
writeStream.write(`\nCloning Repo ${repoclone} to ${outputPath}: ✅\n`);
await spawnAsync(
"git",
[
"clone",
"--branch",
bitbucketBranch!,
"--depth",
"1",
cloneUrl,
outputPath,
"--progress",
],
(data) => {
if (writeStream.writable) {
writeStream.write(data);
}
},
);
writeStream.write(`\nCloned ${repoclone} to ${outputPath}: ✅\n`);
} catch (error) {
writeStream.write(`ERROR Clonning: ${error}: ❌`);
throw error;
} finally {
writeStream.end();
}
};
export const cloneRawBitbucketRepository = async (entity: Compose) => {
const { COMPOSE_PATH } = paths();
const {
appName,
bitbucketRepository,
bitbucketOwner,
bitbucketBranch,
bitbucketId,
} = entity;
if (!bitbucketId) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Bitbucket Provider not found",
});
}
const bitbucketProvider = await findBitbucketById(bitbucketId);
const basePath = COMPOSE_PATH;
const outputPath = join(basePath, appName, "code");
await recreateDirectory(outputPath);
const repoclone = `bitbucket.org/${bitbucketOwner}/${bitbucketRepository}.git`;
const cloneUrl = `https://${bitbucketProvider?.bitbucketUsername}:${bitbucketProvider?.appPassword}@${repoclone}`;
try {
await spawnAsync("git", [
"clone",
"--branch",
bitbucketBranch!,
"--depth",
"1",
cloneUrl,
outputPath,
"--progress",
]);
} catch (error) {
throw error;
}
};
export const cloneRawBitbucketRepositoryRemote = async (compose: Compose) => {
const { COMPOSE_PATH } = paths(true);
const {
appName,
bitbucketRepository,
bitbucketOwner,
bitbucketBranch,
bitbucketId,
serverId,
} = compose;
if (!serverId) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Server not found",
});
}
if (!bitbucketId) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Bitbucket Provider not found",
});
}
const bitbucketProvider = await findBitbucketById(bitbucketId);
const basePath = COMPOSE_PATH;
const outputPath = join(basePath, appName, "code");
const repoclone = `bitbucket.org/${bitbucketOwner}/${bitbucketRepository}.git`;
const cloneUrl = `https://${bitbucketProvider?.bitbucketUsername}:${bitbucketProvider?.appPassword}@${repoclone}`;
try {
const command = `
rm -rf ${outputPath};
git clone --branch ${bitbucketBranch} --depth 1 ${cloneUrl} ${outputPath}
`;
await execAsyncRemote(serverId, command);
} catch (error) {
throw error;
}
};
export const getBitbucketCloneCommand = async (
entity: ApplicationWithBitbucket | ComposeWithBitbucket,
logPath: string,
isCompose = false,
) => {
const { COMPOSE_PATH, APPLICATIONS_PATH } = paths(true);
const {
appName,
bitbucketRepository,
bitbucketOwner,
bitbucketBranch,
bitbucketId,
serverId,
bitbucket,
} = entity;
if (!serverId) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Server not found",
});
}
if (!bitbucketId) {
const command = `
echo "Error: ❌ Bitbucket Provider not found" >> ${logPath};
exit 1;
`;
await execAsyncRemote(serverId, command);
throw new TRPCError({
code: "NOT_FOUND",
message: "Bitbucket Provider not found",
});
}
const bitbucketProvider = await findBitbucketById(bitbucketId);
const basePath = isCompose ? COMPOSE_PATH : APPLICATIONS_PATH;
const outputPath = join(basePath, appName, "code");
await recreateDirectory(outputPath);
const repoclone = `bitbucket.org/${bitbucketOwner}/${bitbucketRepository}.git`;
const cloneUrl = `https://${bitbucketProvider?.bitbucketUsername}:${bitbucketProvider?.appPassword}@${repoclone}`;
const cloneCommand = `
rm -rf ${outputPath};
mkdir -p ${outputPath};
if ! git clone --branch ${bitbucketBranch} --depth 1 --progress ${cloneUrl} ${outputPath} >> ${logPath} 2>&1; then
echo "❌ [ERROR] Fail to clone the repository ${repoclone}" >> ${logPath};
exit 1;
fi
echo "Cloned ${repoclone} to ${outputPath}: ✅" >> ${logPath};
`;
return cloneCommand;
};
export const getBitbucketRepositories = async (bitbucketId?: string) => {
if (!bitbucketId) {
return [];
}
const bitbucketProvider = await findBitbucketById(bitbucketId);
const username =
bitbucketProvider.bitbucketWorkspaceName ||
bitbucketProvider.bitbucketUsername;
const url = `https://api.bitbucket.org/2.0/repositories/${username}?pagelen=100`;
try {
const response = await fetch(url, {
method: "GET",
headers: {
Authorization: `Basic ${Buffer.from(`${bitbucketProvider.bitbucketUsername}:${bitbucketProvider.appPassword}`).toString("base64")}`,
},
});
if (!response.ok) {
throw new TRPCError({
code: "BAD_REQUEST",
message: `Failed to fetch repositories: ${response.statusText}`,
});
}
const data = await response.json();
const mappedData = data.values.map((repo: any) => {
return {
name: repo.name,
url: repo.links.html.href,
owner: {
username: repo.workspace.slug,
},
};
});
return mappedData as {
name: string;
url: string;
owner: {
username: string;
};
}[];
} catch (error) {
throw error;
}
};
export const getBitbucketBranches = async (
input: typeof apiFindBitbucketBranches._type,
) => {
if (!input.bitbucketId) {
return [];
}
const bitbucketProvider = await findBitbucketById(input.bitbucketId);
const { owner, repo } = input;
const url = `https://api.bitbucket.org/2.0/repositories/${owner}/${repo}/refs/branches`;
try {
const response = await fetch(url, {
method: "GET",
headers: {
Authorization: `Basic ${Buffer.from(`${bitbucketProvider.bitbucketUsername}:${bitbucketProvider.appPassword}`).toString("base64")}`,
},
});
if (!response.ok) {
throw new TRPCError({
code: "BAD_REQUEST",
message: `HTTP error! status: ${response.status}`,
});
}
const data = await response.json();
const mappedData = data.values.map((branch: any) => {
return {
name: branch.name,
commit: {
sha: branch.target.hash,
},
};
});
return mappedData as {
name: string;
commit: {
sha: string;
};
}[];
} catch (error) {
throw error;
}
};
export const testBitbucketConnection = async (
input: typeof apiBitbucketTestConnection._type,
) => {
const bitbucketProvider = await findBitbucketById(input.bitbucketId);
if (!bitbucketProvider) {
throw new Error("Bitbucket provider not found");
}
const { bitbucketUsername, workspaceName } = input;
const username = workspaceName || bitbucketUsername;
const url = `https://api.bitbucket.org/2.0/repositories/${username}`;
try {
const response = await fetch(url, {
method: "GET",
headers: {
Authorization: `Basic ${Buffer.from(`${bitbucketProvider.bitbucketUsername}:${bitbucketProvider.appPassword}`).toString("base64")}`,
},
});
if (!response.ok) {
throw new TRPCError({
code: "BAD_REQUEST",
message: `Failed to fetch repositories: ${response.statusText}`,
});
}
const data = await response.json();
const mappedData = data.values.map((repo: any) => {
return {
name: repo.name,
url: repo.links.html.href,
owner: {
username: repo.workspace.slug,
},
};
}) as [];
return mappedData.length;
} catch (error) {
throw error;
}
};

View File

@@ -0,0 +1,86 @@
import { createWriteStream } from "node:fs";
import { type ApplicationNested, mechanizeDockerContainer } from "../builders";
import { pullImage } from "../docker/utils";
interface RegistryAuth {
username: string;
password: string;
serveraddress: string;
}
export const buildDocker = async (
application: ApplicationNested,
logPath: string,
): Promise<void> => {
const { buildType, dockerImage, username, password } = application;
const authConfig: Partial<RegistryAuth> = {
username: username || "",
password: password || "",
};
const writeStream = createWriteStream(logPath, { flags: "a" });
writeStream.write(`\nBuild ${buildType}\n`);
writeStream.write(`Pulling ${dockerImage}: ✅\n`);
try {
if (!dockerImage) {
throw new Error("Docker image not found");
}
await pullImage(
dockerImage,
(data) => {
if (writeStream.writable) {
writeStream.write(`${data.status}\n`);
}
},
authConfig,
);
await mechanizeDockerContainer(application);
writeStream.write("\nDocker Deployed: ✅\n");
} catch (error) {
writeStream.write(`ERROR: ${error}: ❌`);
throw error;
} finally {
writeStream.end();
}
};
export const buildRemoteDocker = async (
application: ApplicationNested,
logPath: string,
) => {
const { sourceType, dockerImage, username, password } = application;
try {
if (!dockerImage) {
throw new Error("Docker image not found");
}
let command = `
echo "Pulling ${dockerImage}" >> ${logPath};
`;
if (username && password) {
command += `
if ! docker login --username ${username} --password ${password} https://index.docker.io/v1/ >> ${logPath} 2>&1; then
echo "❌ Login failed" >> ${logPath};
exit 1;
fi
`;
}
command += `
docker pull ${dockerImage} >> ${logPath} 2>> ${logPath} || {
echo "❌ Pulling image failed" >> ${logPath};
exit 1;
}
echo "✅ Pulling image completed." >> ${logPath};
`;
return command;
} catch (error) {
throw error;
}
};

View File

@@ -0,0 +1,347 @@
import { createWriteStream } from "node:fs";
import path, { join } from "node:path";
import { paths } from "@/server/constants";
import type { Compose } from "@/server/services/compose";
import { updateSSHKeyById } from "@/server/services/ssh-key";
import { TRPCError } from "@trpc/server";
import { recreateDirectory } from "../filesystem/directory";
import { execAsync, execAsyncRemote } from "../process/execAsync";
import { spawnAsync } from "../process/spawnAsync";
export const cloneGitRepository = async (
entity: {
appName: string;
customGitUrl?: string | null;
customGitBranch?: string | null;
customGitSSHKeyId?: string | null;
},
logPath: string,
isCompose = false,
) => {
const { SSH_PATH, COMPOSE_PATH, APPLICATIONS_PATH } = paths();
const { appName, customGitUrl, customGitBranch, customGitSSHKeyId } = entity;
if (!customGitUrl || !customGitBranch) {
throw new TRPCError({
code: "BAD_REQUEST",
message: "Error: Repository not found",
});
}
const writeStream = createWriteStream(logPath, { flags: "a" });
const keyPath = path.join(SSH_PATH, `${customGitSSHKeyId}_rsa`);
const basePath = isCompose ? COMPOSE_PATH : APPLICATIONS_PATH;
const outputPath = join(basePath, appName, "code");
const knownHostsPath = path.join(SSH_PATH, "known_hosts");
try {
if (!isHttpOrHttps(customGitUrl)) {
await addHostToKnownHosts(customGitUrl);
}
await recreateDirectory(outputPath);
// const command = `GIT_SSH_COMMAND="ssh -i ${keyPath} -o UserKnownHostsFile=${knownHostsPath}" git clone --branch ${customGitBranch} --depth 1 ${customGitUrl} ${gitCopyPath} --progress`;
// const { stdout, stderr } = await execAsync(command);
writeStream.write(
`\nCloning Repo Custom ${customGitUrl} to ${outputPath}: ✅\n`,
);
if (customGitSSHKeyId) {
await updateSSHKeyById({
sshKeyId: customGitSSHKeyId,
lastUsedAt: new Date().toISOString(),
});
}
await spawnAsync(
"git",
[
"clone",
"--branch",
customGitBranch,
"--depth",
"1",
"--recurse-submodules",
customGitUrl,
outputPath,
"--progress",
],
(data) => {
if (writeStream.writable) {
writeStream.write(data);
}
},
{
env: {
...process.env,
...(customGitSSHKeyId && {
GIT_SSH_COMMAND: `ssh -i ${keyPath} -o UserKnownHostsFile=${knownHostsPath}`,
}),
},
},
);
writeStream.write(`\nCloned Custom Git ${customGitUrl}: ✅\n`);
} catch (error) {
writeStream.write(`\nERROR Cloning Custom Git: ${error}: ❌\n`);
throw error;
} finally {
writeStream.end();
}
};
export const getCustomGitCloneCommand = async (
entity: {
appName: string;
customGitUrl?: string | null;
customGitBranch?: string | null;
customGitSSHKeyId?: string | null;
serverId: string | null;
},
logPath: string,
isCompose = false,
) => {
const { SSH_PATH, COMPOSE_PATH, APPLICATIONS_PATH } = paths(true);
const {
appName,
customGitUrl,
customGitBranch,
customGitSSHKeyId,
serverId,
} = entity;
if (!customGitUrl || !customGitBranch) {
const command = `
echo "Error: ❌ Repository not found" >> ${logPath};
exit 1;
`;
await execAsyncRemote(serverId, command);
throw new TRPCError({
code: "BAD_REQUEST",
message: "Error: Repository not found",
});
}
const keyPath = path.join(SSH_PATH, `${customGitSSHKeyId}_rsa`);
const basePath = isCompose ? COMPOSE_PATH : APPLICATIONS_PATH;
const outputPath = join(basePath, appName, "code");
const knownHostsPath = path.join(SSH_PATH, "known_hosts");
if (customGitSSHKeyId) {
await updateSSHKeyById({
sshKeyId: customGitSSHKeyId,
lastUsedAt: new Date().toISOString(),
});
}
try {
const command = [];
if (!isHttpOrHttps(customGitUrl)) {
command.push(addHostToKnownHostsCommand(customGitUrl));
}
command.push(`rm -rf ${outputPath};`);
command.push(`mkdir -p ${outputPath};`);
command.push(
`echo "Cloning Custom Git ${customGitUrl}" to ${outputPath}: ✅ >> ${logPath};`,
);
if (customGitSSHKeyId) {
command.push(
`GIT_SSH_COMMAND="ssh -i ${keyPath} -o UserKnownHostsFile=${knownHostsPath}"`,
);
}
command.push(
`if ! git clone --branch ${customGitBranch} --depth 1 --progress ${customGitUrl} ${outputPath} >> ${logPath} 2>&1; then
echo "❌ [ERROR] Fail to clone the repository ${customGitUrl}" >> ${logPath};
exit 1;
fi
`,
);
command.push(`echo "Cloned Custom Git ${customGitUrl}: ✅" >> ${logPath};`);
return command.join("\n");
} catch (error) {
console.log(error);
throw error;
}
};
const isHttpOrHttps = (url: string): boolean => {
const regex = /^https?:\/\//;
return regex.test(url);
};
const addHostToKnownHosts = async (repositoryURL: string) => {
const { SSH_PATH } = paths();
const { domain, port } = sanitizeRepoPathSSH(repositoryURL);
const knownHostsPath = path.join(SSH_PATH, "known_hosts");
const command = `ssh-keyscan -p ${port} ${domain} >> ${knownHostsPath}`;
try {
await execAsync(command);
} catch (error) {
console.error(`Error adding host to known_hosts: ${error}`);
throw error;
}
};
const addHostToKnownHostsCommand = (repositoryURL: string) => {
const { SSH_PATH } = paths();
const { domain, port } = sanitizeRepoPathSSH(repositoryURL);
const knownHostsPath = path.join(SSH_PATH, "known_hosts");
return `ssh-keyscan -p ${port} ${domain} >> ${knownHostsPath};`;
};
const sanitizeRepoPathSSH = (input: string) => {
const SSH_PATH_RE = new RegExp(
[
/^\s*/,
/(?:(?<proto>[a-z]+):\/\/)?/,
/(?:(?<user>[a-z_][a-z0-9_-]+)@)?/,
/(?<domain>[^\s\/\?#:]+)/,
/(?::(?<port>[0-9]{1,5}))?/,
/(?:[\/:](?<owner>[^\s\/\?#:]+))?/,
/(?:[\/:](?<repo>(?:[^\s\?#:.]|\.(?!git\/?\s*$))+))/,
/(?:.git)?\/?\s*$/,
]
.map((r) => r.source)
.join(""),
"i",
);
const found = input.match(SSH_PATH_RE);
if (!found) {
throw new Error(`Malformatted SSH path: ${input}`);
}
return {
user: found.groups?.user ?? "git",
domain: found.groups?.domain,
port: Number(found.groups?.port ?? 22),
owner: found.groups?.owner ?? "",
repo: found.groups?.repo,
get repoPath() {
return `ssh://${this.user}@${this.domain}:${this.port}/${this.owner}${
this.owner && "/"
}${this.repo}.git`;
},
};
};
export const cloneGitRawRepository = async (entity: {
appName: string;
customGitUrl?: string | null;
customGitBranch?: string | null;
customGitSSHKeyId?: string | null;
}) => {
const { appName, customGitUrl, customGitBranch, customGitSSHKeyId } = entity;
if (!customGitUrl || !customGitBranch) {
throw new TRPCError({
code: "BAD_REQUEST",
message: "Error: Repository not found",
});
}
const { SSH_PATH, COMPOSE_PATH } = paths();
const keyPath = path.join(SSH_PATH, `${customGitSSHKeyId}_rsa`);
const basePath = COMPOSE_PATH;
const outputPath = join(basePath, appName, "code");
const knownHostsPath = path.join(SSH_PATH, "known_hosts");
try {
await addHostToKnownHosts(customGitUrl);
await recreateDirectory(outputPath);
if (customGitSSHKeyId) {
await updateSSHKeyById({
sshKeyId: customGitSSHKeyId,
lastUsedAt: new Date().toISOString(),
});
}
await spawnAsync(
"git",
[
"clone",
"--branch",
customGitBranch,
"--depth",
"1",
customGitUrl,
outputPath,
"--progress",
],
(data) => {},
{
env: {
...process.env,
...(customGitSSHKeyId && {
GIT_SSH_COMMAND: `ssh -i ${keyPath} -o UserKnownHostsFile=${knownHostsPath}`,
}),
},
},
);
} catch (error) {
throw error;
}
};
export const cloneRawGitRepositoryRemote = async (compose: Compose) => {
const {
appName,
customGitBranch,
customGitUrl,
customGitSSHKeyId,
serverId,
} = compose;
if (!serverId) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Server not found",
});
}
if (!customGitUrl) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Git Provider not found",
});
}
const { SSH_PATH, COMPOSE_PATH } = paths(true);
const keyPath = path.join(SSH_PATH, `${customGitSSHKeyId}_rsa`);
const basePath = COMPOSE_PATH;
const outputPath = join(basePath, appName, "code");
const knownHostsPath = path.join(SSH_PATH, "known_hosts");
if (customGitSSHKeyId) {
await updateSSHKeyById({
sshKeyId: customGitSSHKeyId,
lastUsedAt: new Date().toISOString(),
});
}
try {
const command = [];
if (!isHttpOrHttps(customGitUrl)) {
command.push(addHostToKnownHostsCommand(customGitUrl));
}
command.push(`rm -rf ${outputPath};`);
command.push(`mkdir -p ${outputPath};`);
if (customGitSSHKeyId) {
command.push(
`GIT_SSH_COMMAND="ssh -i ${keyPath} -o UserKnownHostsFile=${knownHostsPath}"`,
);
}
command.push(
`if ! git clone --branch ${customGitBranch} --depth 1 --progress ${customGitUrl} ${outputPath} ; then
echo "[ERROR] Fail to clone the repository ";
exit 1;
fi
`,
);
await execAsyncRemote(serverId, command.join("\n"));
} catch (error) {
throw error;
}
};

View File

@@ -0,0 +1,336 @@
import { createWriteStream } from "node:fs";
import { join } from "node:path";
import { paths } from "@/server/constants";
import type { InferResultType } from "@/server/types/with";
import { createAppAuth } from "@octokit/auth-app";
import { TRPCError } from "@trpc/server";
import { Octokit } from "octokit";
import { recreateDirectory } from "../filesystem/directory";
import { spawnAsync } from "../process/spawnAsync";
import type { apiFindGithubBranches } from "@/server/db/schema";
import type { Compose } from "@/server/services/compose";
import { type Github, findGithubById } from "@/server/services/github";
import { execAsyncRemote } from "../process/execAsync";
export const authGithub = (githubProvider: Github): Octokit => {
if (!haveGithubRequirements(githubProvider)) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Github Account not configured correctly",
});
}
const octokit: Octokit = new Octokit({
authStrategy: createAppAuth,
auth: {
appId: githubProvider?.githubAppId || 0,
privateKey: githubProvider?.githubPrivateKey || "",
installationId: githubProvider?.githubInstallationId,
},
});
return octokit;
};
export const getGithubToken = async (
octokit: ReturnType<typeof authGithub>,
) => {
const installation = (await octokit.auth({
type: "installation",
})) as {
token: string;
};
return installation.token;
};
export const haveGithubRequirements = (githubProvider: Github) => {
return !!(
githubProvider?.githubAppId &&
githubProvider?.githubPrivateKey &&
githubProvider?.githubInstallationId
);
};
const getErrorCloneRequirements = (entity: {
repository?: string | null;
owner?: string | null;
branch?: string | null;
}) => {
const reasons: string[] = [];
const { repository, owner, branch } = entity;
if (!repository) reasons.push("1. Repository not assigned.");
if (!owner) reasons.push("2. Owner not specified.");
if (!branch) reasons.push("3. Branch not defined.");
return reasons;
};
export type ApplicationWithGithub = InferResultType<
"applications",
{ github: true }
>;
export type ComposeWithGithub = InferResultType<"compose", { github: true }>;
export const cloneGithubRepository = async (
entity: ApplicationWithGithub | ComposeWithGithub,
logPath: string,
isCompose = false,
) => {
const { APPLICATIONS_PATH, COMPOSE_PATH } = paths();
const writeStream = createWriteStream(logPath, { flags: "a" });
const { appName, repository, owner, branch, githubId } = entity;
if (!githubId) {
throw new TRPCError({
code: "NOT_FOUND",
message: "GitHub Provider not found",
});
}
const requirements = getErrorCloneRequirements(entity);
// Check if requirements are met
if (requirements.length > 0) {
writeStream.write(
`\nGitHub Repository configuration failed for application: ${appName}\n`,
);
writeStream.write("Reasons:\n");
writeStream.write(requirements.join("\n"));
writeStream.end();
throw new TRPCError({
code: "BAD_REQUEST",
message: "Error: GitHub repository information is incomplete.",
});
}
const githubProvider = await findGithubById(githubId);
const basePath = isCompose ? COMPOSE_PATH : APPLICATIONS_PATH;
const outputPath = join(basePath, appName, "code");
const octokit = authGithub(githubProvider);
const token = await getGithubToken(octokit);
const repoclone = `github.com/${owner}/${repository}.git`;
await recreateDirectory(outputPath);
const cloneUrl = `https://oauth2:${token}@${repoclone}`;
try {
writeStream.write(`\nClonning Repo ${repoclone} to ${outputPath}: ✅\n`);
await spawnAsync(
"git",
[
"clone",
"--branch",
branch!,
"--depth",
"1",
cloneUrl,
outputPath,
"--progress",
],
(data) => {
if (writeStream.writable) {
writeStream.write(data);
}
},
);
writeStream.write(`\nCloned ${repoclone}: ✅\n`);
} catch (error) {
writeStream.write(`ERROR Clonning: ${error}: ❌`);
throw error;
} finally {
writeStream.end();
}
};
export const getGithubCloneCommand = async (
entity: ApplicationWithGithub | ComposeWithGithub,
logPath: string,
isCompose = false,
) => {
const { appName, repository, owner, branch, githubId, serverId } = entity;
if (!serverId) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Server not found",
});
}
if (!githubId) {
const command = `
echo "Error: ❌ Github Provider not found" >> ${logPath};
exit 1;
`;
await execAsyncRemote(serverId, command);
throw new TRPCError({
code: "NOT_FOUND",
message: "GitHub Provider not found",
});
}
const requirements = getErrorCloneRequirements(entity);
// Build log messages
let logMessages = "";
if (requirements.length > 0) {
logMessages += `\nGitHub Repository configuration failed for application: ${appName}\n`;
logMessages += "Reasons:\n";
logMessages += requirements.join("\n");
const escapedLogMessages = logMessages
.replace(/\\/g, "\\\\")
.replace(/"/g, '\\"')
.replace(/\n/g, "\\n");
const bashCommand = `
echo "${escapedLogMessages}" >> ${logPath};
exit 1; # Exit with error code
`;
await execAsyncRemote(serverId, bashCommand);
return;
}
const { COMPOSE_PATH, APPLICATIONS_PATH } = paths(true);
const githubProvider = await findGithubById(githubId);
const basePath = isCompose ? COMPOSE_PATH : APPLICATIONS_PATH;
const outputPath = join(basePath, appName, "code");
const octokit = authGithub(githubProvider);
const token = await getGithubToken(octokit);
const repoclone = `github.com/${owner}/${repository}.git`;
const cloneUrl = `https://oauth2:${token}@${repoclone}`;
const cloneCommand = `
rm -rf ${outputPath};
mkdir -p ${outputPath};
if ! git clone --branch ${branch} --depth 1 --progress ${cloneUrl} ${outputPath} >> ${logPath} 2>&1; then
echo "❌ [ERROR] Fallo al clonar el repositorio ${repoclone}" >> ${logPath};
exit 1;
fi
echo "Cloned ${repoclone} to ${outputPath}: ✅" >> ${logPath};
`;
return cloneCommand;
};
export const cloneRawGithubRepository = async (entity: Compose) => {
const { appName, repository, owner, branch, githubId } = entity;
if (!githubId) {
throw new TRPCError({
code: "NOT_FOUND",
message: "GitHub Provider not found",
});
}
const { COMPOSE_PATH } = paths();
const githubProvider = await findGithubById(githubId);
const basePath = COMPOSE_PATH;
const outputPath = join(basePath, appName, "code");
const octokit = authGithub(githubProvider);
const token = await getGithubToken(octokit);
const repoclone = `github.com/${owner}/${repository}.git`;
await recreateDirectory(outputPath);
const cloneUrl = `https://oauth2:${token}@${repoclone}`;
try {
await spawnAsync("git", [
"clone",
"--branch",
branch!,
"--depth",
"1",
cloneUrl,
outputPath,
"--progress",
]);
} catch (error) {
throw error;
}
};
export const cloneRawGithubRepositoryRemote = async (compose: Compose) => {
const { appName, repository, owner, branch, githubId, serverId } = compose;
if (!serverId) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Server not found",
});
}
if (!githubId) {
throw new TRPCError({
code: "NOT_FOUND",
message: "GitHub Provider not found",
});
}
const { COMPOSE_PATH } = paths(true);
const githubProvider = await findGithubById(githubId);
const basePath = COMPOSE_PATH;
const outputPath = join(basePath, appName, "code");
const octokit = authGithub(githubProvider);
const token = await getGithubToken(octokit);
const repoclone = `github.com/${owner}/${repository}.git`;
const cloneUrl = `https://oauth2:${token}@${repoclone}`;
try {
const command = `
rm -rf ${outputPath};
git clone --branch ${branch} --depth 1 ${cloneUrl} ${outputPath}
`;
await execAsyncRemote(serverId, command);
} catch (error) {
throw error;
}
};
export const getGithubRepositories = async (githubId?: string) => {
if (!githubId) {
return [];
}
const githubProvider = await findGithubById(githubId);
const octokit = new Octokit({
authStrategy: createAppAuth,
auth: {
appId: githubProvider.githubAppId,
privateKey: githubProvider.githubPrivateKey,
installationId: githubProvider.githubInstallationId,
},
});
const repositories = (await octokit.paginate(
octokit.rest.apps.listReposAccessibleToInstallation,
)) as unknown as Awaited<
ReturnType<typeof octokit.rest.apps.listReposAccessibleToInstallation>
>["data"]["repositories"];
return repositories;
};
export const getGithubBranches = async (
input: typeof apiFindGithubBranches._type,
) => {
if (!input.githubId) {
return [];
}
const githubProvider = await findGithubById(input.githubId);
const octokit = new Octokit({
authStrategy: createAppAuth,
auth: {
appId: githubProvider.githubAppId,
privateKey: githubProvider.githubPrivateKey,
installationId: githubProvider.githubInstallationId,
},
});
const branches = (await octokit.paginate(octokit.rest.repos.listBranches, {
owner: input.owner,
repo: input.repo,
})) as unknown as Awaited<
ReturnType<typeof octokit.rest.repos.listBranches>
>["data"];
return branches;
};

View File

@@ -0,0 +1,447 @@
import { createWriteStream } from "node:fs";
import { join } from "node:path";
import { paths } from "@/server/constants";
import type { apiGitlabTestConnection } from "@/server/db/schema";
import type { Compose } from "@/server/services/compose";
import {
type Gitlab,
findGitlabById,
updateGitlab,
} from "@/server/services/gitlab";
import type { InferResultType } from "@/server/types/with";
import { TRPCError } from "@trpc/server";
import { recreateDirectory } from "../filesystem/directory";
import { execAsyncRemote } from "../process/execAsync";
import { spawnAsync } from "../process/spawnAsync";
export const refreshGitlabToken = async (gitlabProviderId: string) => {
const gitlabProvider = await findGitlabById(gitlabProviderId);
const currentTime = Math.floor(Date.now() / 1000);
const safetyMargin = 60;
if (
gitlabProvider.expiresAt &&
currentTime + safetyMargin < gitlabProvider.expiresAt
) {
return;
}
const response = await fetch("https://gitlab.com/oauth/token", {
method: "POST",
headers: {
"Content-Type": "application/x-www-form-urlencoded",
},
body: new URLSearchParams({
grant_type: "refresh_token",
refresh_token: gitlabProvider.refreshToken as string,
client_id: gitlabProvider.applicationId as string,
client_secret: gitlabProvider.secret as string,
}),
});
if (!response.ok) {
throw new Error(`Failed to refresh token: ${response.statusText}`);
}
const data = await response.json();
const expiresAt = Math.floor(Date.now() / 1000) + data.expires_in;
console.log("Refreshed token");
await updateGitlab(gitlabProviderId, {
accessToken: data.access_token,
refreshToken: data.refresh_token,
expiresAt,
});
return data;
};
export const haveGitlabRequirements = (gitlabProvider: Gitlab) => {
return !!(gitlabProvider?.accessToken && gitlabProvider?.refreshToken);
};
const getErrorCloneRequirements = (entity: {
gitlabRepository?: string | null;
gitlabOwner?: string | null;
gitlabBranch?: string | null;
gitlabPathNamespace?: string | null;
}) => {
const reasons: string[] = [];
const { gitlabBranch, gitlabOwner, gitlabRepository, gitlabPathNamespace } =
entity;
if (!gitlabRepository) reasons.push("1. Repository not assigned.");
if (!gitlabOwner) reasons.push("2. Owner not specified.");
if (!gitlabBranch) reasons.push("3. Branch not defined.");
if (!gitlabPathNamespace) reasons.push("4. Path namespace not defined.");
return reasons;
};
export type ApplicationWithGitlab = InferResultType<
"applications",
{ gitlab: true }
>;
export type ComposeWithGitlab = InferResultType<"compose", { gitlab: true }>;
export const cloneGitlabRepository = async (
entity: ApplicationWithGitlab | ComposeWithGitlab,
logPath: string,
isCompose = false,
) => {
const writeStream = createWriteStream(logPath, { flags: "a" });
const { appName, gitlabBranch, gitlabId, gitlab, gitlabPathNamespace } =
entity;
if (!gitlabId) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Gitlab Provider not found",
});
}
await refreshGitlabToken(gitlabId);
const requirements = getErrorCloneRequirements(entity);
// Check if requirements are met
if (requirements.length > 0) {
writeStream.write(
`\nGitLab Repository configuration failed for application: ${appName}\n`,
);
writeStream.write("Reasons:\n");
writeStream.write(requirements.join("\n"));
writeStream.end();
throw new TRPCError({
code: "BAD_REQUEST",
message: "Error: GitLab repository information is incomplete.",
});
}
const { COMPOSE_PATH, APPLICATIONS_PATH } = paths();
const basePath = isCompose ? COMPOSE_PATH : APPLICATIONS_PATH;
const outputPath = join(basePath, appName, "code");
await recreateDirectory(outputPath);
const repoclone = `gitlab.com/${gitlabPathNamespace}.git`;
const cloneUrl = `https://oauth2:${gitlab?.accessToken}@${repoclone}`;
try {
writeStream.write(`\nClonning Repo ${repoclone} to ${outputPath}: ✅\n`);
await spawnAsync(
"git",
[
"clone",
"--branch",
gitlabBranch!,
"--depth",
"1",
cloneUrl,
outputPath,
"--progress",
],
(data) => {
if (writeStream.writable) {
writeStream.write(data);
}
},
);
writeStream.write(`\nCloned ${repoclone}: ✅\n`);
} catch (error) {
writeStream.write(`ERROR Clonning: ${error}: ❌`);
throw error;
} finally {
writeStream.end();
}
};
export const getGitlabCloneCommand = async (
entity: ApplicationWithGitlab | ComposeWithGitlab,
logPath: string,
isCompose = false,
) => {
const {
appName,
gitlabRepository,
gitlabOwner,
gitlabPathNamespace,
gitlabBranch,
gitlabId,
serverId,
gitlab,
} = entity;
if (!serverId) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Server not found",
});
}
if (!gitlabId) {
const command = `
echo "Error: ❌ Gitlab Provider not found" >> ${logPath};
exit 1;
`;
await execAsyncRemote(serverId, command);
throw new TRPCError({
code: "NOT_FOUND",
message: "Gitlab Provider not found",
});
}
const requirements = getErrorCloneRequirements(entity);
// Build log messages
let logMessages = "";
if (requirements.length > 0) {
logMessages += `\nGitLab Repository configuration failed for application: ${appName}\n`;
logMessages += "Reasons:\n";
logMessages += requirements.join("\n");
const escapedLogMessages = logMessages
.replace(/\\/g, "\\\\")
.replace(/"/g, '\\"')
.replace(/\n/g, "\\n");
const bashCommand = `
echo "${escapedLogMessages}" >> ${logPath};
exit 1; # Exit with error code
`;
await execAsyncRemote(serverId, bashCommand);
return;
}
const { COMPOSE_PATH, APPLICATIONS_PATH } = paths(true);
await refreshGitlabToken(gitlabId);
const basePath = isCompose ? COMPOSE_PATH : APPLICATIONS_PATH;
const outputPath = join(basePath, appName, "code");
await recreateDirectory(outputPath);
const repoclone = `gitlab.com/${gitlabPathNamespace}.git`;
const cloneUrl = `https://oauth2:${gitlab?.accessToken}@${repoclone}`;
const cloneCommand = `
rm -rf ${outputPath};
mkdir -p ${outputPath};
if ! git clone --branch ${gitlabBranch} --depth 1 --progress ${cloneUrl} ${outputPath} >> ${logPath} 2>&1; then
echo "❌ [ERROR] Fail to clone the repository ${repoclone}" >> ${logPath};
exit 1;
fi
echo "Cloned ${repoclone} to ${outputPath}: ✅" >> ${logPath};
`;
return cloneCommand;
};
export const getGitlabRepositories = async (gitlabId?: string) => {
if (!gitlabId) {
return [];
}
await refreshGitlabToken(gitlabId);
const gitlabProvider = await findGitlabById(gitlabId);
const response = await fetch(
`https://gitlab.com/api/v4/projects?membership=true&owned=true&page=${0}&per_page=${100}`,
{
headers: {
Authorization: `Bearer ${gitlabProvider.accessToken}`,
},
},
);
if (!response.ok) {
throw new TRPCError({
code: "BAD_REQUEST",
message: `Failed to fetch repositories: ${response.statusText}`,
});
}
const repositories = await response.json();
const filteredRepos = repositories.filter((repo: any) => {
const { full_path, kind } = repo.namespace;
const groupName = gitlabProvider.groupName?.toLowerCase();
if (groupName) {
return full_path.toLowerCase().includes(groupName) && kind === "group";
}
return kind === "user";
});
const mappedRepositories = filteredRepos.map((repo: any) => {
return {
id: repo.id,
name: repo.name,
url: repo.path_with_namespace,
owner: {
username: repo.namespace.path,
},
};
});
return mappedRepositories as {
id: number;
name: string;
url: string;
owner: {
username: string;
};
}[];
};
export const getGitlabBranches = async (input: {
id?: number;
gitlabId?: string;
owner: string;
repo: string;
}) => {
if (!input.gitlabId || !input.id || input.id === 0) {
return [];
}
const gitlabProvider = await findGitlabById(input.gitlabId);
const branchesResponse = await fetch(
`https://gitlab.com/api/v4/projects/${input.id}/repository/branches`,
{
headers: {
Authorization: `Bearer ${gitlabProvider.accessToken}`,
},
},
);
if (!branchesResponse.ok) {
throw new Error(`Failed to fetch branches: ${branchesResponse.statusText}`);
}
const branches = await branchesResponse.json();
return branches as {
id: string;
name: string;
commit: {
id: string;
};
}[];
};
export const cloneRawGitlabRepository = async (entity: Compose) => {
const {
appName,
gitlabRepository,
gitlabOwner,
gitlabBranch,
gitlabId,
gitlabPathNamespace,
} = entity;
if (!gitlabId) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Gitlab Provider not found",
});
}
const gitlabProvider = await findGitlabById(gitlabId);
const { COMPOSE_PATH } = paths();
await refreshGitlabToken(gitlabId);
const basePath = COMPOSE_PATH;
const outputPath = join(basePath, appName, "code");
await recreateDirectory(outputPath);
const repoclone = `gitlab.com/${gitlabPathNamespace}.git`;
const cloneUrl = `https://oauth2:${gitlabProvider?.accessToken}@${repoclone}`;
try {
await spawnAsync("git", [
"clone",
"--branch",
gitlabBranch!,
"--depth",
"1",
cloneUrl,
outputPath,
"--progress",
]);
} catch (error) {
throw error;
}
};
export const cloneRawGitlabRepositoryRemote = async (compose: Compose) => {
const { appName, gitlabPathNamespace, branch, gitlabId, serverId } = compose;
if (!serverId) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Server not found",
});
}
if (!gitlabId) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Gitlab Provider not found",
});
}
const gitlabProvider = await findGitlabById(gitlabId);
const { COMPOSE_PATH } = paths(true);
await refreshGitlabToken(gitlabId);
const basePath = COMPOSE_PATH;
const outputPath = join(basePath, appName, "code");
const repoclone = `gitlab.com/${gitlabPathNamespace}.git`;
const cloneUrl = `https://oauth2:${gitlabProvider?.accessToken}@${repoclone}`;
try {
const command = `
rm -rf ${outputPath};
git clone --branch ${branch} --depth 1 ${cloneUrl} ${outputPath}
`;
await execAsyncRemote(serverId, command);
} catch (error) {
throw error;
}
};
export const testGitlabConnection = async (
input: typeof apiGitlabTestConnection._type,
) => {
const { gitlabId, groupName } = input;
if (!gitlabId) {
throw new Error("Gitlab provider not found");
}
await refreshGitlabToken(gitlabId);
const gitlabProvider = await findGitlabById(gitlabId);
const response = await fetch(
`https://gitlab.com/api/v4/projects?membership=true&owned=true&page=${0}&per_page=${100}`,
{
headers: {
Authorization: `Bearer ${gitlabProvider.accessToken}`,
},
},
);
if (!response.ok) {
throw new TRPCError({
code: "BAD_REQUEST",
message: `Failed to fetch repositories: ${response.statusText}`,
});
}
const repositories = await response.json();
const filteredRepos = repositories.filter((repo: any) => {
const { full_path, kind } = repo.namespace;
if (groupName) {
return full_path.toLowerCase().includes(groupName) && kind === "group";
}
return kind === "user";
});
return filteredRepos.length;
};

View File

@@ -0,0 +1,81 @@
import { createWriteStream } from "node:fs";
import { writeFile } from "node:fs/promises";
import { join } from "node:path";
import { paths } from "@/server/constants";
import type { Compose } from "@/server/services/compose";
import { encodeBase64 } from "../docker/utils";
import { recreateDirectory } from "../filesystem/directory";
import { execAsyncRemote } from "../process/execAsync";
export const createComposeFile = async (compose: Compose, logPath: string) => {
const { COMPOSE_PATH } = paths();
const { appName, composeFile } = compose;
const writeStream = createWriteStream(logPath, { flags: "a" });
const outputPath = join(COMPOSE_PATH, appName, "code");
try {
await recreateDirectory(outputPath);
writeStream.write(
`\nCreating File 'docker-compose.yml' to ${outputPath}: ✅\n`,
);
await writeFile(join(outputPath, "docker-compose.yml"), composeFile);
writeStream.write(`\nFile 'docker-compose.yml' created: ✅\n`);
} catch (error) {
writeStream.write(`\nERROR Creating Compose File: ${error}: ❌\n`);
throw error;
} finally {
writeStream.end();
}
};
export const getCreateComposeFileCommand = (
compose: Compose,
logPath: string,
) => {
const { COMPOSE_PATH } = paths(true);
const { appName, composeFile } = compose;
const outputPath = join(COMPOSE_PATH, appName, "code");
const filePath = join(outputPath, "docker-compose.yml");
const encodedContent = encodeBase64(composeFile);
const bashCommand = `
rm -rf ${outputPath};
mkdir -p ${outputPath};
echo "${encodedContent}" | base64 -d > "${filePath}";
echo "File 'docker-compose.yml' created: ✅" >> ${logPath};
`;
return bashCommand;
};
export const createComposeFileRaw = async (compose: Compose) => {
const { COMPOSE_PATH } = paths();
const { appName, composeFile } = compose;
const outputPath = join(COMPOSE_PATH, appName, "code");
const filePath = join(outputPath, "docker-compose.yml");
try {
await recreateDirectory(outputPath);
await writeFile(filePath, composeFile);
} catch (error) {
throw error;
}
};
export const createComposeFileRawRemote = async (compose: Compose) => {
const { COMPOSE_PATH } = paths(true);
const { appName, composeFile, serverId } = compose;
const outputPath = join(COMPOSE_PATH, appName, "code");
const filePath = join(outputPath, "docker-compose.yml");
try {
const encodedContent = encodeBase64(composeFile);
const command = `
rm -rf ${outputPath};
mkdir -p ${outputPath};
echo "${encodedContent}" | base64 -d > "${filePath}";
`;
await execAsyncRemote(serverId, command);
} catch (error) {
throw error;
}
};

View File

@@ -0,0 +1,21 @@
import { docker } from "@/server/constants";
import { findServerById } from "@/server/services/server";
import Dockerode from "dockerode";
export const getRemoteDocker = async (serverId?: string | null) => {
if (!serverId) return docker;
const server = await findServerById(serverId);
if (!server.sshKeyId) return docker;
const dockerode = new Dockerode({
host: server.ipAddress,
port: server.port,
username: server.username,
protocol: "ssh",
// @ts-ignore
sshOptions: {
privateKey: server.sshKey?.privateKey,
},
});
return dockerode;
};

View File

@@ -0,0 +1,250 @@
import fs, { writeFileSync } from "node:fs";
import path from "node:path";
import { paths } from "@/server/constants";
import type { Domain } from "@/server/services/domain";
import { dump, load } from "js-yaml";
import { encodeBase64 } from "../docker/utils";
import { execAsyncRemote } from "../process/execAsync";
import type { FileConfig, HttpLoadBalancerService } from "./file-types";
export const createTraefikConfig = (appName: string) => {
const defaultPort = 3000;
const serviceURLDefault = `http://${appName}:${defaultPort}`;
const domainDefault = `Host(\`${appName}.docker.localhost\`)`;
const config: FileConfig = {
http: {
routers: {
...(process.env.NODE_ENV === "production"
? {}
: {
[`${appName}-router-1`]: {
rule: domainDefault,
service: `${appName}-service-1`,
entryPoints: ["web"],
},
}),
},
services: {
...(process.env.NODE_ENV === "production"
? {}
: {
[`${appName}-service-1`]: {
loadBalancer: {
servers: [{ url: serviceURLDefault }],
passHostHeader: true,
},
},
}),
},
},
};
const yamlStr = dump(config);
const { DYNAMIC_TRAEFIK_PATH } = paths();
fs.mkdirSync(DYNAMIC_TRAEFIK_PATH, { recursive: true });
writeFileSync(
path.join(DYNAMIC_TRAEFIK_PATH, `${appName}.yml`),
yamlStr,
"utf8",
);
};
export const removeTraefikConfig = async (
appName: string,
serverId?: string | null,
) => {
try {
const { DYNAMIC_TRAEFIK_PATH } = paths(!!serverId);
const configPath = path.join(DYNAMIC_TRAEFIK_PATH, `${appName}.yml`);
if (serverId) {
await execAsyncRemote(serverId, `rm ${configPath}`);
} else {
if (fs.existsSync(configPath)) {
await fs.promises.unlink(configPath);
}
}
if (fs.existsSync(configPath)) {
await fs.promises.unlink(configPath);
}
} catch (error) {}
};
export const removeTraefikConfigRemote = async (
appName: string,
serverId: string,
) => {
try {
const { DYNAMIC_TRAEFIK_PATH } = paths(true);
const configPath = path.join(DYNAMIC_TRAEFIK_PATH, `${appName}.yml`);
await execAsyncRemote(serverId, `rm ${configPath}`);
} catch (error) {}
};
export const loadOrCreateConfig = (appName: string): FileConfig => {
const { DYNAMIC_TRAEFIK_PATH } = paths();
const configPath = path.join(DYNAMIC_TRAEFIK_PATH, `${appName}.yml`);
if (fs.existsSync(configPath)) {
const yamlStr = fs.readFileSync(configPath, "utf8");
const parsedConfig = (load(yamlStr) as FileConfig) || {
http: { routers: {}, services: {} },
};
return parsedConfig;
}
return { http: { routers: {}, services: {} } };
};
export const loadOrCreateConfigRemote = async (
serverId: string,
appName: string,
) => {
const { DYNAMIC_TRAEFIK_PATH } = paths(true);
const fileConfig: FileConfig = { http: { routers: {}, services: {} } };
const configPath = path.join(DYNAMIC_TRAEFIK_PATH, `${appName}.yml`);
try {
const { stdout } = await execAsyncRemote(serverId, `cat ${configPath}`);
if (!stdout) return fileConfig;
const parsedConfig = (load(stdout) as FileConfig) || {
http: { routers: {}, services: {} },
};
return parsedConfig;
} catch (err) {
return fileConfig;
}
};
export const readConfig = (appName: string) => {
const { DYNAMIC_TRAEFIK_PATH } = paths();
const configPath = path.join(DYNAMIC_TRAEFIK_PATH, `${appName}.yml`);
if (fs.existsSync(configPath)) {
const yamlStr = fs.readFileSync(configPath, "utf8");
return yamlStr;
}
return null;
};
export const readRemoteConfig = async (serverId: string, appName: string) => {
const { DYNAMIC_TRAEFIK_PATH } = paths(true);
const configPath = path.join(DYNAMIC_TRAEFIK_PATH, `${appName}.yml`);
try {
const { stdout } = await execAsyncRemote(serverId, `cat ${configPath}`);
if (!stdout) return null;
return stdout;
} catch (err) {
return null;
}
};
export const readMonitoringConfig = () => {
const { DYNAMIC_TRAEFIK_PATH } = paths();
const configPath = path.join(DYNAMIC_TRAEFIK_PATH, "access.log");
if (fs.existsSync(configPath)) {
const yamlStr = fs.readFileSync(configPath, "utf8");
return yamlStr;
}
return null;
};
export const readConfigInPath = async (pathFile: string, serverId?: string) => {
const configPath = path.join(pathFile);
if (serverId) {
const { stdout } = await execAsyncRemote(serverId, `cat ${configPath}`);
if (!stdout) return null;
return stdout;
}
if (fs.existsSync(configPath)) {
const yamlStr = fs.readFileSync(configPath, "utf8");
return yamlStr;
}
return null;
};
export const writeConfig = (appName: string, traefikConfig: string) => {
try {
const { DYNAMIC_TRAEFIK_PATH } = paths();
const configPath = path.join(DYNAMIC_TRAEFIK_PATH, `${appName}.yml`);
fs.writeFileSync(configPath, traefikConfig, "utf8");
} catch (e) {
console.error("Error saving the YAML config file:", e);
}
};
export const writeConfigRemote = async (
serverId: string,
appName: string,
traefikConfig: string,
) => {
try {
const { DYNAMIC_TRAEFIK_PATH } = paths(true);
const configPath = path.join(DYNAMIC_TRAEFIK_PATH, `${appName}.yml`);
await execAsyncRemote(serverId, `echo '${traefikConfig}' > ${configPath}`);
} catch (e) {
console.error("Error saving the YAML config file:", e);
}
};
export const writeTraefikConfigInPath = async (
pathFile: string,
traefikConfig: string,
serverId?: string,
) => {
try {
const configPath = path.join(pathFile);
if (serverId) {
const encoded = encodeBase64(traefikConfig);
await execAsyncRemote(
serverId,
`echo "${encoded}" | base64 -d > "${configPath}"`,
);
} else {
fs.writeFileSync(configPath, traefikConfig, "utf8");
}
fs.writeFileSync(configPath, traefikConfig, "utf8");
} catch (e) {
console.error("Error saving the YAML config file:", e);
}
};
export const writeTraefikConfig = (
traefikConfig: FileConfig,
appName: string,
) => {
try {
const { DYNAMIC_TRAEFIK_PATH } = paths();
const configPath = path.join(DYNAMIC_TRAEFIK_PATH, `${appName}.yml`);
const yamlStr = dump(traefikConfig);
fs.writeFileSync(configPath, yamlStr, "utf8");
} catch (e) {
console.error("Error saving the YAML config file:", e);
}
};
export const writeTraefikConfigRemote = async (
traefikConfig: FileConfig,
appName: string,
serverId: string,
) => {
try {
const { DYNAMIC_TRAEFIK_PATH } = paths(true);
const configPath = path.join(DYNAMIC_TRAEFIK_PATH, `${appName}.yml`);
const yamlStr = dump(traefikConfig);
await execAsyncRemote(serverId, `echo '${yamlStr}' > ${configPath}`);
} catch (e) {
console.error("Error saving the YAML config file:", e);
}
};
export const createServiceConfig = (
appName: string,
domain: Domain,
): {
loadBalancer: HttpLoadBalancerService;
} => ({
loadBalancer: {
servers: [{ url: `http://${appName}:${domain.port || 80}` }],
passHostHeader: true,
},
});

View File

@@ -0,0 +1,145 @@
import type { Domain } from "@/server/services/domain";
import type { ApplicationNested } from "../builders";
import {
createServiceConfig,
loadOrCreateConfig,
loadOrCreateConfigRemote,
removeTraefikConfig,
removeTraefikConfigRemote,
writeTraefikConfig,
writeTraefikConfigRemote,
} from "./application";
import type { FileConfig, HttpRouter } from "./file-types";
export const manageDomain = async (app: ApplicationNested, domain: Domain) => {
const { appName } = app;
let config: FileConfig;
if (app.serverId) {
config = await loadOrCreateConfigRemote(app.serverId, appName);
} else {
config = loadOrCreateConfig(appName);
}
const serviceName = `${appName}-service-${domain.uniqueConfigKey}`;
const routerName = `${appName}-router-${domain.uniqueConfigKey}`;
const routerNameSecure = `${appName}-router-websecure-${domain.uniqueConfigKey}`;
config.http = config.http || { routers: {}, services: {} };
config.http.routers = config.http.routers || {};
config.http.services = config.http.services || {};
config.http.routers[routerName] = await createRouterConfig(
app,
domain,
"web",
);
if (domain.https) {
config.http.routers[routerNameSecure] = await createRouterConfig(
app,
domain,
"websecure",
);
} else {
delete config.http.routers[routerNameSecure];
}
config.http.services[serviceName] = createServiceConfig(appName, domain);
if (app.serverId) {
await writeTraefikConfigRemote(config, appName, app.serverId);
} else {
writeTraefikConfig(config, appName);
}
};
export const removeDomain = async (
application: ApplicationNested,
uniqueKey: number,
) => {
const { appName, serverId } = application;
let config: FileConfig;
if (serverId) {
config = await loadOrCreateConfigRemote(serverId, appName);
} else {
config = loadOrCreateConfig(appName);
}
const routerKey = `${appName}-router-${uniqueKey}`;
const routerSecureKey = `${appName}-router-websecure-${uniqueKey}`;
const serviceKey = `${appName}-service-${uniqueKey}`;
if (config.http?.routers?.[routerKey]) {
delete config.http.routers[routerKey];
}
if (config.http?.routers?.[routerSecureKey]) {
delete config.http.routers[routerSecureKey];
}
if (config.http?.services?.[serviceKey]) {
delete config.http.services[serviceKey];
}
// verify if is the last router if so we delete the router
if (
config?.http?.routers &&
Object.keys(config?.http?.routers).length === 0
) {
if (serverId) {
await removeTraefikConfigRemote(appName, serverId);
} else {
await removeTraefikConfig(appName);
}
} else {
if (serverId) {
await writeTraefikConfigRemote(config, appName, serverId);
} else {
writeTraefikConfig(config, appName);
}
}
};
export const createRouterConfig = async (
app: ApplicationNested,
domain: Domain,
entryPoint: "web" | "websecure",
) => {
const { appName, redirects, security } = app;
const { certificateType } = domain;
const { host, path, https, uniqueConfigKey } = domain;
const routerConfig: HttpRouter = {
rule: `Host(\`${host}\`)${path !== null && path !== "/" ? ` && PathPrefix(\`${path}\`)` : ""}`,
service: `${appName}-service-${uniqueConfigKey}`,
middlewares: [],
entryPoints: [entryPoint],
};
if (entryPoint === "web" && https) {
routerConfig.middlewares = ["redirect-to-https"];
}
if ((entryPoint === "websecure" && https) || !https) {
// redirects
for (const redirect of redirects) {
const middlewareName = `redirect-${appName}-${redirect.uniqueConfigKey}`;
routerConfig.middlewares?.push(middlewareName);
}
// security
if (security.length > 0) {
const middlewareName = `auth-${appName}`;
routerConfig.middlewares?.push(middlewareName);
}
}
if (entryPoint === "websecure") {
if (certificateType === "letsencrypt") {
routerConfig.tls = { certResolver: "letsencrypt" };
} else if (certificateType === "none") {
routerConfig.tls = undefined;
}
}
return routerConfig;
};

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,107 @@
import { existsSync, readFileSync, writeFileSync } from "node:fs";
import { join } from "node:path";
import { paths } from "@/server/constants";
import { dump, load } from "js-yaml";
import type { ApplicationNested } from "../builders";
import { execAsyncRemote } from "../process/execAsync";
import { writeTraefikConfigRemote } from "./application";
import type { FileConfig } from "./file-types";
export const addMiddleware = (config: FileConfig, middlewareName: string) => {
if (config.http?.routers) {
const values = Object.keys(config.http.routers);
for (const routerName of values) {
const router = config.http.routers[routerName];
if (router) {
if (!router.middlewares) {
router.middlewares = [];
}
if (!router.middlewares.includes(middlewareName)) {
router.middlewares.push(middlewareName);
}
}
}
}
};
export const deleteMiddleware = (
config: FileConfig,
middlewareName: string,
) => {
if (config.http?.routers) {
const values = Object.keys(config?.http?.routers);
for (const routerName of values) {
const router = config.http.routers[routerName];
if (router?.middlewares) {
router.middlewares = router.middlewares.filter(
(m) => m !== middlewareName,
);
}
}
}
};
export const deleteAllMiddlewares = async (application: ApplicationNested) => {
const config = loadMiddlewares<FileConfig>();
const { security, appName, redirects } = application;
if (config.http?.middlewares) {
if (security.length > 0) {
const middlewareName = `auth-${appName}`;
delete config.http.middlewares[middlewareName];
}
for (const redirect of redirects) {
const middlewareName = `redirect-${appName}-${redirect.uniqueConfigKey}`;
delete config.http.middlewares[middlewareName];
}
}
if (application.serverId) {
await writeTraefikConfigRemote(config, "middlewares", application.serverId);
} else {
writeMiddleware(config);
}
};
export const loadMiddlewares = <T>() => {
const { DYNAMIC_TRAEFIK_PATH } = paths();
const configPath = join(DYNAMIC_TRAEFIK_PATH, "middlewares.yml");
if (!existsSync(configPath)) {
throw new Error(`File not found: ${configPath}`);
}
const yamlStr = readFileSync(configPath, "utf8");
const config = load(yamlStr) as T;
return config;
};
export const loadRemoteMiddlewares = async (serverId: string) => {
const { DYNAMIC_TRAEFIK_PATH } = paths(true);
const configPath = join(DYNAMIC_TRAEFIK_PATH, "middlewares.yml");
try {
const { stdout, stderr } = await execAsyncRemote(
serverId,
`cat ${configPath}`,
);
if (stderr) {
console.error(`Error: ${stderr}`);
throw new Error(`File not found: ${configPath}`);
}
const config = load(stdout) as FileConfig;
return config;
} catch (error) {
throw new Error(`File not found: ${configPath}`);
}
};
export const writeMiddleware = <T>(config: T) => {
const { DYNAMIC_TRAEFIK_PATH } = paths();
const configPath = join(DYNAMIC_TRAEFIK_PATH, "middlewares.yml");
const newYamlContent = dump(config);
writeFileSync(configPath, newYamlContent, "utf8");
};

View File

@@ -0,0 +1,132 @@
import type { Redirect } from "@/server/services/redirect";
import type { ApplicationNested } from "../builders";
import {
loadOrCreateConfig,
loadOrCreateConfigRemote,
writeTraefikConfig,
writeTraefikConfigRemote,
} from "./application";
import type { FileConfig } from "./file-types";
import {
addMiddleware,
deleteMiddleware,
loadMiddlewares,
loadRemoteMiddlewares,
writeMiddleware,
} from "./middleware";
export const updateRedirectMiddleware = async (
application: ApplicationNested,
data: Redirect,
) => {
const { appName, serverId } = application;
let config: FileConfig;
if (serverId) {
config = await loadRemoteMiddlewares(serverId);
} else {
config = loadMiddlewares<FileConfig>();
}
const middlewareName = `redirect-${appName}-${data.uniqueConfigKey}`;
if (config?.http?.middlewares?.[middlewareName]) {
config.http.middlewares[middlewareName] = {
redirectRegex: {
regex: data.regex,
replacement: data.replacement,
permanent: data.permanent,
},
};
}
if (serverId) {
await writeTraefikConfigRemote(config, "middlewares", serverId);
} else {
writeMiddleware(config);
}
};
export const createRedirectMiddleware = async (
application: ApplicationNested,
data: Redirect,
) => {
const { appName, serverId } = application;
let config: FileConfig;
if (serverId) {
config = await loadRemoteMiddlewares(serverId);
} else {
config = loadMiddlewares<FileConfig>();
}
const middlewareName = `redirect-${appName}-${data.uniqueConfigKey}`;
const newMiddleware = {
[middlewareName]: {
redirectRegex: {
regex: data.regex,
replacement: data.replacement,
permanent: data.permanent,
},
},
};
if (config?.http) {
config.http.middlewares = {
...config.http.middlewares,
...newMiddleware,
};
}
let appConfig: FileConfig;
if (serverId) {
appConfig = await loadOrCreateConfigRemote(serverId, appName);
} else {
appConfig = loadOrCreateConfig(appName);
}
addMiddleware(appConfig, middlewareName);
if (serverId) {
await writeTraefikConfigRemote(config, "middlewares", serverId);
await writeTraefikConfigRemote(appConfig, appName, serverId);
} else {
writeMiddleware(config);
writeTraefikConfig(appConfig, appName);
}
};
export const removeRedirectMiddleware = async (
application: ApplicationNested,
data: Redirect,
) => {
const { appName, serverId } = application;
let config: FileConfig;
if (serverId) {
config = await loadRemoteMiddlewares(serverId);
} else {
config = loadMiddlewares<FileConfig>();
}
const middlewareName = `redirect-${appName}-${data.uniqueConfigKey}`;
if (config?.http?.middlewares?.[middlewareName]) {
delete config.http.middlewares[middlewareName];
}
let appConfig: FileConfig;
if (serverId) {
appConfig = await loadOrCreateConfigRemote(serverId, appName);
} else {
appConfig = loadOrCreateConfig(appName);
}
deleteMiddleware(appConfig, middlewareName);
if (serverId) {
await writeTraefikConfigRemote(config, "middlewares", serverId);
await writeTraefikConfigRemote(appConfig, appName, serverId);
} else {
writeTraefikConfig(appConfig, appName);
writeMiddleware(config);
}
};

View File

@@ -0,0 +1,75 @@
import { existsSync, mkdirSync, readFileSync, writeFileSync } from "node:fs";
import { join } from "node:path";
import { paths } from "@/server/constants";
import type { Registry } from "@/server/services/registry";
import { dump, load } from "js-yaml";
import { removeDirectoryIfExistsContent } from "../filesystem/directory";
import type { FileConfig, HttpRouter } from "./file-types";
export const manageRegistry = async (registry: Registry) => {
const { REGISTRY_PATH } = paths();
if (!existsSync(REGISTRY_PATH)) {
mkdirSync(REGISTRY_PATH, { recursive: true });
}
const appName = "dokploy-registry";
const config: FileConfig = loadOrCreateConfig();
const serviceName = `${appName}-service`;
const routerName = `${appName}-router`;
config.http = config.http || { routers: {}, services: {} };
config.http.routers = config.http.routers || {};
config.http.services = config.http.services || {};
config.http.routers[routerName] = await createRegistryRouterConfig(registry);
config.http.services[serviceName] = {
loadBalancer: {
servers: [{ url: `http://${appName}:5000` }],
passHostHeader: true,
},
};
const yamlConfig = dump(config);
const configFile = join(REGISTRY_PATH, "registry.yml");
writeFileSync(configFile, yamlConfig);
};
export const removeSelfHostedRegistry = async () => {
const { REGISTRY_PATH } = paths();
await removeDirectoryIfExistsContent(REGISTRY_PATH);
};
const createRegistryRouterConfig = async (registry: Registry) => {
const { registryUrl } = registry;
const routerConfig: HttpRouter = {
rule: `Host(\`${registryUrl}\`)`,
service: "dokploy-registry-service",
middlewares: ["redirect-to-https"],
entryPoints: [
"web",
...(process.env.NODE_ENV === "production" ? ["websecure"] : []),
],
...(process.env.NODE_ENV === "production"
? {
tls: { certResolver: "letsencrypt" },
}
: {}),
};
return routerConfig;
};
const loadOrCreateConfig = (): FileConfig => {
const { REGISTRY_PATH } = paths();
const configPath = join(REGISTRY_PATH, "registry.yml");
if (existsSync(configPath)) {
const yamlStr = readFileSync(configPath, "utf8");
const parsedConfig = (load(yamlStr) as FileConfig) || {
http: { routers: {}, services: {} },
};
return parsedConfig;
}
return { http: { routers: {}, services: {} } };
};

View File

@@ -0,0 +1,129 @@
import type { Security } from "@/server/services/security";
import * as bcrypt from "bcrypt";
import type { ApplicationNested } from "../builders";
import {
loadOrCreateConfig,
loadOrCreateConfigRemote,
writeTraefikConfig,
writeTraefikConfigRemote,
} from "./application";
import type {
BasicAuthMiddleware,
FileConfig,
HttpMiddleware,
} from "./file-types";
import {
addMiddleware,
deleteMiddleware,
loadMiddlewares,
loadRemoteMiddlewares,
writeMiddleware,
} from "./middleware";
export const createSecurityMiddleware = async (
application: ApplicationNested,
data: Security,
) => {
const { appName, serverId } = application;
let config: FileConfig;
if (serverId) {
config = await loadRemoteMiddlewares(serverId);
} else {
config = loadMiddlewares<FileConfig>();
}
const middlewareName = `auth-${appName}`;
const user = `${data.username}:${await bcrypt.hash(data.password, 10)}`;
if (config.http?.middlewares) {
const currentMiddleware = config.http.middlewares[middlewareName];
if (isBasicAuthMiddleware(currentMiddleware)) {
currentMiddleware.basicAuth.users = [
...(currentMiddleware.basicAuth.users || []),
user,
];
} else {
config.http.middlewares[middlewareName] = {
basicAuth: {
removeHeader: true,
users: [user],
},
};
}
}
let appConfig: FileConfig;
if (serverId) {
appConfig = await loadOrCreateConfigRemote(serverId, appName);
} else {
appConfig = loadOrCreateConfig(appName);
}
addMiddleware(appConfig, middlewareName);
if (serverId) {
await writeTraefikConfigRemote(config, "middlewares", serverId);
await writeTraefikConfigRemote(appConfig, appName, serverId);
} else {
writeTraefikConfig(appConfig, appName);
writeMiddleware(config);
}
};
export const removeSecurityMiddleware = async (
application: ApplicationNested,
data: Security,
) => {
const { appName, serverId } = application;
let config: FileConfig;
if (serverId) {
config = await loadRemoteMiddlewares(serverId);
} else {
config = loadMiddlewares<FileConfig>();
}
let appConfig: FileConfig;
if (serverId) {
appConfig = await loadOrCreateConfigRemote(serverId, appName);
} else {
appConfig = loadOrCreateConfig(appName);
}
const middlewareName = `auth-${appName}`;
if (config.http?.middlewares) {
const currentMiddleware = config.http.middlewares[middlewareName];
if (isBasicAuthMiddleware(currentMiddleware)) {
const users = currentMiddleware.basicAuth.users;
const filteredUsers =
users?.filter((user) => {
const [username] = user.split(":");
return username !== data.username;
}) || [];
currentMiddleware.basicAuth.users = filteredUsers;
if (filteredUsers.length === 0) {
if (config?.http?.middlewares?.[middlewareName]) {
delete config.http.middlewares[middlewareName];
}
deleteMiddleware(appConfig, middlewareName);
if (serverId) {
await writeTraefikConfigRemote(appConfig, appName, serverId);
} else {
writeTraefikConfig(appConfig, appName);
}
}
}
}
if (serverId) {
await writeTraefikConfigRemote(config, "middlewares", serverId);
} else {
writeMiddleware(config);
}
};
const isBasicAuthMiddleware = (
middleware: HttpMiddleware | undefined,
): middleware is { basicAuth: BasicAuthMiddleware } => {
return !!middleware && "basicAuth" in middleware;
};

View File

@@ -0,0 +1,574 @@
/* eslint-disable */
export interface MainTraefikConfig {
accessLog?: {
filePath?: string;
format?: string;
filters?: {
statusCodes?: string[];
retryAttempts?: boolean;
minDuration?: string;
[k: string]: unknown;
};
fields?: {
defaultMode?: string;
names?: {
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "[a-zA-Z0-9-_]+".
*/
[k: string]: string;
};
headers?: {
defaultMode?: string;
names?: {
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "[a-zA-Z0-9-_]+".
*/
[k: string]: string;
};
[k: string]: unknown;
};
[k: string]: unknown;
};
bufferingSize?: number;
};
api?: {
insecure?: boolean;
dashboard?: boolean;
debug?: boolean;
};
certificatesResolvers?: {
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "[a-zA-Z0-9-_]+".
*/
[k: string]: {
acme?: {
email?: string;
caServer?: string;
certificatesDuration?: number;
preferredChain?: string;
storage?: string;
keyType?: string;
eab?: {
kid?: string;
hmacEncoded?: string;
[k: string]: unknown;
};
dnsChallenge?: {
provider?: string;
delayBeforeCheck?: string;
resolvers?: string[];
disablePropagationCheck?: boolean;
[k: string]: unknown;
};
httpChallenge?: {
entryPoint?: string;
[k: string]: unknown;
};
tlsChallenge?: {
[k: string]: unknown;
};
[k: string]: unknown;
};
};
};
entryPoints?: {
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "[a-zA-Z0-9-_]+".
*/
[k: string]: {
address?: string;
transport?: {
lifeCycle?: {
requestAcceptGraceTimeout?: string;
graceTimeOut?: string;
[k: string]: unknown;
};
respondingTimeouts?: {
readTimeout?: string;
writeTimeout?: string;
idleTimeout?: string;
[k: string]: unknown;
};
[k: string]: unknown;
};
proxyProtocol?: {
insecure?: boolean;
trustedIPs?: string[];
[k: string]: unknown;
};
forwardedHeaders?: {
insecure?: boolean;
trustedIPs?: string[];
[k: string]: unknown;
};
http?: {
redirections?: {
entryPoint?: {
to?: string;
scheme?: string;
permanent?: boolean;
priority?: number;
[k: string]: unknown;
};
[k: string]: unknown;
};
middlewares?: string[];
tls?: {
options?: string;
certResolver?: string;
domains?: {
main?: string;
sans?: string[];
[k: string]: unknown;
}[];
[k: string]: unknown;
};
[k: string]: unknown;
};
http2?: {
maxConcurrentStreams?: number;
[k: string]: unknown;
};
http3?: {
advertisedPort?: number;
[k: string]: unknown;
};
udp?: {
timeout?: string;
[k: string]: unknown;
};
};
};
experimental?: {
kubernetesGateway?: boolean;
http3?: boolean;
hub?: boolean;
plugins?: {
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "[a-zA-Z0-9-_]+".
*/
[k: string]: {
moduleName?: string;
version?: string;
};
};
localPlugins?: {
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "[a-zA-Z0-9-_]+".
*/
[k: string]: {
moduleName?: string;
};
};
};
global?: {
checkNewVersion?: boolean;
sendAnonymousUsage?: boolean;
};
hostResolver?: {
cnameFlattening?: boolean;
resolvConfig?: string;
resolvDepth?: number;
};
hub?: {
tls?: {
insecure?: boolean;
ca?: string;
cert?: string;
key?: string;
[k: string]: unknown;
};
};
log?: {
level?: string;
filePath?: string;
format?: string;
};
metrics?: {
prometheus?: {
buckets?: number[];
addEntryPointsLabels?: boolean;
addRoutersLabels?: boolean;
addServicesLabels?: boolean;
entryPoint?: string;
manualRouting?: boolean;
};
datadog?: {
address?: string;
pushInterval?: string;
addEntryPointsLabels?: boolean;
addRoutersLabels?: boolean;
addServicesLabels?: boolean;
prefix?: string;
};
statsD?: {
address?: string;
pushInterval?: string;
addEntryPointsLabels?: boolean;
addRoutersLabels?: boolean;
addServicesLabels?: boolean;
prefix?: string;
};
influxDB?: {
address?: string;
protocol?: string;
pushInterval?: string;
database?: string;
retentionPolicy?: string;
username?: string;
password?: string;
addEntryPointsLabels?: boolean;
addRoutersLabels?: boolean;
addServicesLabels?: boolean;
additionalLabels?: {
[k: string]: unknown;
};
};
influxDB2?: {
address?: string;
token?: string;
pushInterval?: string;
org?: string;
bucket?: string;
addEntryPointsLabels?: boolean;
addRoutersLabels?: boolean;
addServicesLabels?: boolean;
additionalLabels?: {
[k: string]: unknown;
};
};
};
pilot?: {
token?: string;
dashboard?: boolean;
};
ping?: {
entryPoint?: string;
manualRouting?: boolean;
terminatingStatusCode?: number;
};
providers?: {
providersThrottleDuration?: string;
docker?: {
allowEmptyServices?: boolean;
constraints?: string;
defaultRule?: string;
endpoint?: string;
exposedByDefault?: boolean;
httpClientTimeout?: number;
network?: string;
swarmMode?: boolean;
swarmModeRefreshSeconds?: string;
tls?: {
ca?: string;
caOptional?: boolean;
cert?: string;
key?: string;
insecureSkipVerify?: boolean;
};
useBindPortIP?: boolean;
watch?: boolean;
};
file?: {
directory?: string;
watch?: boolean;
filename?: string;
debugLogGeneratedTemplate?: boolean;
};
marathon?: {
constraints?: string;
trace?: boolean;
watch?: boolean;
endpoint?: string;
defaultRule?: string;
exposedByDefault?: boolean;
dcosToken?: string;
tls?: {
ca?: string;
caOptional?: boolean;
cert?: string;
key?: string;
insecureSkipVerify?: boolean;
};
dialerTimeout?: string;
responseHeaderTimeout?: string;
tlsHandshakeTimeout?: string;
keepAlive?: string;
forceTaskHostname?: boolean;
basic?: {
httpBasicAuthUser?: string;
httpBasicPassword?: string;
};
respectReadinessChecks?: boolean;
};
kubernetesIngress?: {
endpoint?: string;
token?: string;
certAuthFilePath?: string;
namespaces?: string[];
labelSelector?: string;
ingressClass?: string;
throttleDuration?: string;
allowEmptyServices?: boolean;
allowExternalNameServices?: boolean;
ingressEndpoint?: {
ip?: string;
hostname?: string;
publishedService?: string;
};
};
kubernetesCRD?: {
endpoint?: string;
token?: string;
certAuthFilePath?: string;
namespaces?: string[];
allowCrossNamespace?: boolean;
allowExternalNameServices?: boolean;
labelSelector?: string;
ingressClass?: string;
throttleDuration?: string;
allowEmptyServices?: boolean;
};
kubernetesGateway?: {
endpoint?: string;
token?: string;
certAuthFilePath?: string;
namespaces?: string[];
labelSelector?: string;
throttleDuration?: string;
};
rest?: {
insecure?: boolean;
};
rancher?: {
constraints?: string;
watch?: boolean;
defaultRule?: string;
exposedByDefault?: boolean;
enableServiceHealthFilter?: boolean;
refreshSeconds?: number;
intervalPoll?: boolean;
prefix?: string;
};
consulCatalog?: {
constraints?: string;
prefix?: string;
refreshInterval?: string;
requireConsistent?: boolean;
stale?: boolean;
cache?: boolean;
exposedByDefault?: boolean;
defaultRule?: string;
connectAware?: boolean;
connectByDefault?: boolean;
serviceName?: string;
namespace?: string;
namespaces?: string[];
watch?: boolean;
endpoint?: {
address?: string;
scheme?: string;
datacenter?: string;
token?: string;
endpointWaitTime?: string;
tls?: {
ca?: string;
caOptional?: boolean;
cert?: string;
key?: string;
insecureSkipVerify?: boolean;
};
httpAuth?: {
username?: string;
password?: string;
};
};
[k: string]: unknown;
};
nomad?: {
constraints?: string;
prefix?: string;
refreshInterval?: string;
stale?: boolean;
exposedByDefault?: boolean;
defaultRule?: string;
namespace?: string;
endpoint?: {
address?: string;
region?: string;
token?: string;
endpointWaitTime?: string;
tls?: {
ca?: string;
caOptional?: boolean;
cert?: string;
key?: string;
insecureSkipVerify?: boolean;
};
};
};
ecs?: {
constraints?: string;
exposedByDefault?: boolean;
ecsAnywhere?: boolean;
refreshSeconds?: number;
defaultRule?: string;
clusters?: string[];
autoDiscoverClusters?: boolean;
region?: string;
accessKeyID?: string;
secretAccessKey?: string;
};
consul?: {
rootKey?: string;
endpoints?: string[];
token?: string;
namespace?: string;
namespaces?: string[];
tls?: {
ca?: string;
caOptional?: boolean;
cert?: string;
key?: string;
insecureSkipVerify?: boolean;
};
};
etcd?: {
rootKey?: string;
endpoints?: string[];
username?: string;
password?: string;
tls?: {
ca?: string;
caOptional?: boolean;
cert?: string;
key?: string;
insecureSkipVerify?: boolean;
};
};
zooKeeper?: {
rootKey?: string;
endpoints?: string[];
username?: string;
password?: string;
};
redis?: {
rootKey?: string;
endpoints?: string[];
username?: string;
password?: string;
db?: number;
tls?: {
ca?: string;
caOptional?: boolean;
cert?: string;
key?: string;
insecureSkipVerify?: boolean;
};
};
http?: {
endpoint?: string;
pollInterval?: string;
pollTimeout?: string;
tls?: {
ca?: string;
caOptional?: boolean;
cert?: string;
key?: string;
insecureSkipVerify?: boolean;
};
};
plugin?: {
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "[a-zA-Z0-9-_]+".
*/
[k: string]: {
[k: string]: unknown;
};
};
[k: string]: unknown;
};
serversTransport?: {
insecureSkipVerify?: boolean;
rootCAs?: string[];
maxIdleConnsPerHost?: number;
forwardingTimeouts?: {
dialTimeout?: string;
responseHeaderTimeout?: string;
idleConnTimeout?: string;
};
};
tracing?: {
serviceName?: string;
spanNameLimit?: number;
jaeger?: {
samplingServerURL?: string;
samplingType?: string;
samplingParam?: number;
localAgentHostPort?: string;
gen128Bit?: boolean;
propagation?: string;
traceContextHeaderName?: string;
disableAttemptReconnecting?: boolean;
collector?: {
endpoint?: string;
user?: string;
password?: string;
};
};
zipkin?: {
httpEndpoint?: string;
sameSpan?: boolean;
id128Bit?: boolean;
sampleRate?: number;
};
datadog?: {
localAgentHostPort?: string;
globalTag?: string;
/**
* Sets a list of key:value tags on all spans.
*/
globalTags?: {
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "[a-zA-Z0-9-_]+".
*/
[k: string]: string;
};
debug?: boolean;
prioritySampling?: boolean;
traceIDHeaderName?: string;
parentIDHeaderName?: string;
samplingPriorityHeaderName?: string;
bagagePrefixHeaderName?: string;
};
instana?: {
localAgentHost?: string;
localAgentPort?: number;
logLevel?: string;
enableAutoProfile?: boolean;
};
haystack?: {
localAgentHost?: string;
localAgentPort?: number;
globalTag?: string;
traceIDHeaderName?: string;
parentIDHeaderName?: string;
spanIDHeaderName?: string;
baggagePrefixHeaderName?: string;
};
elastic?: {
serverURL?: string;
secretToken?: string;
serviceEnvironment?: string;
};
};
}

View File

@@ -0,0 +1,79 @@
import { existsSync, readFileSync, writeFileSync } from "node:fs";
import { join } from "node:path";
import { paths } from "@/server/constants";
import type { Admin } from "@/server/services/admin";
import { dump, load } from "js-yaml";
import { loadOrCreateConfig, writeTraefikConfig } from "./application";
import type { FileConfig } from "./file-types";
import type { MainTraefikConfig } from "./types";
export const updateServerTraefik = (
admin: Admin | null,
newHost: string | null,
) => {
const appName = "dokploy";
const config: FileConfig = loadOrCreateConfig(appName);
config.http = config.http || { routers: {}, services: {} };
config.http.routers = config.http.routers || {};
const currentRouterConfig = config.http.routers[`${appName}-router-app`];
if (currentRouterConfig && newHost) {
currentRouterConfig.rule = `Host(\`${newHost}\`)`;
if (admin?.certificateType === "letsencrypt") {
config.http.routers[`${appName}-router-app-secure`] = {
...currentRouterConfig,
entryPoints: ["websecure"],
tls: { certResolver: "letsencrypt" },
};
currentRouterConfig.middlewares = ["redirect-to-https"];
} else {
delete config.http.routers[`${appName}-router-app-secure`];
currentRouterConfig.middlewares = [];
}
}
writeTraefikConfig(config, appName);
};
export const updateLetsEncryptEmail = (newEmail: string | null) => {
try {
if (!newEmail) return;
const { MAIN_TRAEFIK_PATH } = paths();
const configPath = join(MAIN_TRAEFIK_PATH, "traefik.yml");
const configContent = readFileSync(configPath, "utf8");
const config = load(configContent) as MainTraefikConfig;
if (config?.certificatesResolvers?.letsencrypt?.acme) {
config.certificatesResolvers.letsencrypt.acme.email = newEmail;
} else {
throw new Error("Invalid Let's Encrypt configuration structure.");
}
const newYamlContent = dump(config);
writeFileSync(configPath, newYamlContent, "utf8");
} catch (error) {
throw error;
}
};
export const readMainConfig = () => {
const { MAIN_TRAEFIK_PATH } = paths();
const configPath = join(MAIN_TRAEFIK_PATH, "traefik.yml");
if (existsSync(configPath)) {
const yamlStr = readFileSync(configPath, "utf8");
return yamlStr;
}
return null;
};
export const writeMainConfig = (traefikConfig: string) => {
try {
const { MAIN_TRAEFIK_PATH } = paths();
const configPath = join(MAIN_TRAEFIK_PATH, "traefik.yml");
writeFileSync(configPath, traefikConfig, "utf8");
} catch (e) {
console.error("Error saving the YAML config file:", e);
}
};