refactor(server): remove files

This commit is contained in:
Mauricio Siu
2024-09-29 13:28:24 -06:00
parent e4b998c608
commit 2ae7e562bb
67 changed files with 21 additions and 8678 deletions

View File

@@ -1,151 +0,0 @@
import { randomBytes } from "node:crypto";
import { db } from "@/server/db";
import {
admins,
type apiCreateUserInvitation,
auth,
users,
} from "@/server/db/schema";
import { TRPCError } from "@trpc/server";
import * as bcrypt from "bcrypt";
import { eq } from "drizzle-orm";
export type Admin = typeof admins.$inferSelect;
export const createInvitation = async (
input: typeof apiCreateUserInvitation._type,
) => {
await db.transaction(async (tx) => {
const admin = await findAdmin();
const result = await tx
.insert(auth)
.values({
email: input.email,
rol: "user",
password: bcrypt.hashSync("01231203012312", 10),
})
.returning()
.then((res) => res[0]);
if (!result) {
throw new TRPCError({
code: "BAD_REQUEST",
message: "Error to create the user",
});
}
const expiresIn24Hours = new Date();
expiresIn24Hours.setDate(expiresIn24Hours.getDate() + 1);
const token = randomBytes(32).toString("hex");
await tx
.insert(users)
.values({
adminId: admin.adminId,
authId: result.id,
token,
expirationDate: expiresIn24Hours.toISOString(),
})
.returning();
});
};
export const findAdminById = async (adminId: string) => {
const admin = await db.query.admins.findFirst({
where: eq(admins.adminId, adminId),
});
if (!admin) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Admin not found",
});
}
return admin;
};
export const updateAdmin = async (
authId: string,
adminData: Partial<Admin>,
) => {
const admin = await db
.update(admins)
.set({
...adminData,
})
.where(eq(admins.authId, authId))
.returning()
.then((res) => res[0]);
return admin;
};
export const isAdminPresent = async () => {
const admin = await db.query.admins.findFirst();
if (!admin) {
return false;
}
return true;
};
export const findAdminByAuthId = async (authId: string) => {
const admin = await db.query.admins.findFirst({
where: eq(admins.authId, authId),
});
if (!admin) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Admin not found",
});
}
return admin;
};
export const findAdmin = async () => {
const admin = await db.query.admins.findFirst({});
if (!admin) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Admin not found",
});
}
return admin;
};
export const getUserByToken = async (token: string) => {
const user = await db.query.users.findFirst({
where: eq(users.token, token),
with: {
auth: {
columns: {
password: false,
},
},
},
});
if (!user) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Invitation not found",
});
}
return {
...user,
isExpired: user.isRegistered,
};
};
export const removeUserByAuthId = async (authId: string) => {
await db
.delete(auth)
.where(eq(auth.id, authId))
.returning()
.then((res) => res[0]);
};
export const getDokployUrl = async () => {
const admin = await findAdmin();
if (admin.host) {
return `https://${admin.host}`;
}
return `http://${admin.serverIp}:${process.env.PORT}`;
};

View File

@@ -1,394 +0,0 @@
import { docker } from "@/server/constants";
import { db } from "@/server/db";
import { type apiCreateApplication, applications } from "@/server/db/schema";
import { generateAppName } from "@/server/db/schema";
import { getAdvancedStats } from "@/server/monitoring/utilts";
import {
buildApplication,
getBuildCommand,
mechanizeDockerContainer,
} from "@/server/utils/builders";
import { sendBuildErrorNotifications } from "@/server/utils/notifications/build-error";
import { sendBuildSuccessNotifications } from "@/server/utils/notifications/build-success";
import { execAsyncRemote } from "@/server/utils/process/execAsync";
import {
cloneBitbucketRepository,
getBitbucketCloneCommand,
} from "@/server/utils/providers/bitbucket";
import {
buildDocker,
buildRemoteDocker,
} from "@/server/utils/providers/docker";
import {
cloneGitRepository,
getCustomGitCloneCommand,
} from "@/server/utils/providers/git";
import {
cloneGithubRepository,
getGithubCloneCommand,
} from "@/server/utils/providers/github";
import {
cloneGitlabRepository,
getGitlabCloneCommand,
} from "@/server/utils/providers/gitlab";
import { createTraefikConfig } from "@/server/utils/traefik/application";
import { generatePassword } from "@/templates/utils";
import { TRPCError } from "@trpc/server";
import { eq } from "drizzle-orm";
import { getDokployUrl } from "./admin";
import { createDeployment, updateDeploymentStatus } from "./deployment";
import { validUniqueServerAppName } from "./project";
export type Application = typeof applications.$inferSelect;
export const createApplication = async (
input: typeof apiCreateApplication._type,
) => {
input.appName =
`${input.appName}-${generatePassword(6)}` || generateAppName("app");
if (input.appName) {
const valid = await validUniqueServerAppName(input.appName);
if (!valid) {
throw new TRPCError({
code: "CONFLICT",
message: "Application with this 'AppName' already exists",
});
}
}
return await db.transaction(async (tx) => {
const newApplication = await tx
.insert(applications)
.values({
...input,
})
.returning()
.then((value) => value[0]);
if (!newApplication) {
throw new TRPCError({
code: "BAD_REQUEST",
message: "Error to create the application",
});
}
if (process.env.NODE_ENV === "development") {
createTraefikConfig(newApplication.appName);
}
return newApplication;
});
};
export const findApplicationById = async (applicationId: string) => {
const application = await db.query.applications.findFirst({
where: eq(applications.applicationId, applicationId),
with: {
project: true,
domains: true,
deployments: true,
mounts: true,
redirects: true,
security: true,
ports: true,
registry: true,
gitlab: true,
github: true,
bitbucket: true,
server: true,
},
});
if (!application) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Application not found",
});
}
return application;
};
export const findApplicationByName = async (appName: string) => {
const application = await db.query.applications.findFirst({
where: eq(applications.appName, appName),
});
return application;
};
export const updateApplication = async (
applicationId: string,
applicationData: Partial<Application>,
) => {
const application = await db
.update(applications)
.set({
...applicationData,
})
.where(eq(applications.applicationId, applicationId))
.returning();
return application[0];
};
export const updateApplicationStatus = async (
applicationId: string,
applicationStatus: Application["applicationStatus"],
) => {
const application = await db
.update(applications)
.set({
applicationStatus: applicationStatus,
})
.where(eq(applications.applicationId, applicationId))
.returning();
return application;
};
export const deployApplication = async ({
applicationId,
titleLog = "Manual deployment",
descriptionLog = "",
}: {
applicationId: string;
titleLog: string;
descriptionLog: string;
}) => {
const application = await findApplicationById(applicationId);
const buildLink = `${await getDokployUrl()}/dashboard/project/${application.projectId}/services/application/${application.applicationId}?tab=deployments`;
const deployment = await createDeployment({
applicationId: applicationId,
title: titleLog,
description: descriptionLog,
});
try {
if (application.sourceType === "github") {
await cloneGithubRepository(application, deployment.logPath);
await buildApplication(application, deployment.logPath);
} else if (application.sourceType === "gitlab") {
await cloneGitlabRepository(application, deployment.logPath);
await buildApplication(application, deployment.logPath);
} else if (application.sourceType === "bitbucket") {
await cloneBitbucketRepository(application, deployment.logPath);
await buildApplication(application, deployment.logPath);
} else if (application.sourceType === "docker") {
await buildDocker(application, deployment.logPath);
} else if (application.sourceType === "git") {
await cloneGitRepository(application, deployment.logPath);
await buildApplication(application, deployment.logPath);
} else if (application.sourceType === "drop") {
await buildApplication(application, deployment.logPath);
}
await updateDeploymentStatus(deployment.deploymentId, "done");
await updateApplicationStatus(applicationId, "done");
await sendBuildSuccessNotifications({
projectName: application.project.name,
applicationName: application.name,
applicationType: "application",
buildLink,
});
} catch (error) {
await updateDeploymentStatus(deployment.deploymentId, "error");
await updateApplicationStatus(applicationId, "error");
await sendBuildErrorNotifications({
projectName: application.project.name,
applicationName: application.name,
applicationType: "application",
// @ts-ignore
errorMessage: error?.message || "Error to build",
buildLink,
});
console.log(
"Error on ",
application.buildType,
"/",
application.sourceType,
error,
);
throw error;
}
return true;
};
export const rebuildApplication = async ({
applicationId,
titleLog = "Rebuild deployment",
descriptionLog = "",
}: {
applicationId: string;
titleLog: string;
descriptionLog: string;
}) => {
const application = await findApplicationById(applicationId);
const deployment = await createDeployment({
applicationId: applicationId,
title: titleLog,
description: descriptionLog,
});
try {
if (application.sourceType === "github") {
await buildApplication(application, deployment.logPath);
} else if (application.sourceType === "gitlab") {
await buildApplication(application, deployment.logPath);
} else if (application.sourceType === "bitbucket") {
await buildApplication(application, deployment.logPath);
} else if (application.sourceType === "docker") {
await buildDocker(application, deployment.logPath);
} else if (application.sourceType === "git") {
await buildApplication(application, deployment.logPath);
} else if (application.sourceType === "drop") {
await buildApplication(application, deployment.logPath);
}
await updateDeploymentStatus(deployment.deploymentId, "done");
await updateApplicationStatus(applicationId, "done");
} catch (error) {
await updateDeploymentStatus(deployment.deploymentId, "error");
await updateApplicationStatus(applicationId, "error");
throw error;
}
return true;
};
export const deployRemoteApplication = async ({
applicationId,
titleLog = "Manual deployment",
descriptionLog = "",
}: {
applicationId: string;
titleLog: string;
descriptionLog: string;
}) => {
const application = await findApplicationById(applicationId);
const buildLink = `${await getDokployUrl()}/dashboard/project/${application.projectId}/services/application/${application.applicationId}?tab=deployments`;
const deployment = await createDeployment({
applicationId: applicationId,
title: titleLog,
description: descriptionLog,
});
try {
if (application.serverId) {
let command = "set -e;";
if (application.sourceType === "github") {
command += await getGithubCloneCommand(application, deployment.logPath);
} else if (application.sourceType === "gitlab") {
command += await getGitlabCloneCommand(application, deployment.logPath);
} else if (application.sourceType === "bitbucket") {
command += await getBitbucketCloneCommand(
application,
deployment.logPath,
);
} else if (application.sourceType === "git") {
command += await getCustomGitCloneCommand(
application,
deployment.logPath,
);
} else if (application.sourceType === "docker") {
command += await buildRemoteDocker(application, deployment.logPath);
}
if (application.sourceType !== "docker") {
command += getBuildCommand(application, deployment.logPath);
}
await execAsyncRemote(application.serverId, command);
await mechanizeDockerContainer(application);
}
await updateDeploymentStatus(deployment.deploymentId, "done");
await updateApplicationStatus(applicationId, "done");
await sendBuildSuccessNotifications({
projectName: application.project.name,
applicationName: application.name,
applicationType: "application",
buildLink,
});
} catch (error) {
await updateDeploymentStatus(deployment.deploymentId, "error");
await updateApplicationStatus(applicationId, "error");
await sendBuildErrorNotifications({
projectName: application.project.name,
applicationName: application.name,
applicationType: "application",
// @ts-ignore
errorMessage: error?.message || "Error to build",
buildLink,
});
console.log(
"Error on ",
application.buildType,
"/",
application.sourceType,
error,
);
throw error;
}
return true;
};
export const rebuildRemoteApplication = async ({
applicationId,
titleLog = "Rebuild deployment",
descriptionLog = "",
}: {
applicationId: string;
titleLog: string;
descriptionLog: string;
}) => {
const application = await findApplicationById(applicationId);
const deployment = await createDeployment({
applicationId: applicationId,
title: titleLog,
description: descriptionLog,
});
try {
if (application.serverId) {
if (application.sourceType !== "docker") {
let command = "set -e;";
command += getBuildCommand(application, deployment.logPath);
await execAsyncRemote(application.serverId, command);
}
await mechanizeDockerContainer(application);
}
await updateDeploymentStatus(deployment.deploymentId, "done");
await updateApplicationStatus(applicationId, "done");
} catch (error) {
await updateDeploymentStatus(deployment.deploymentId, "error");
await updateApplicationStatus(applicationId, "error");
throw error;
}
return true;
};
export const getApplicationStats = async (appName: string) => {
const filter = {
status: ["running"],
label: [`com.docker.swarm.service.name=${appName}`],
};
const containers = await docker.listContainers({
filters: JSON.stringify(filter),
});
const container = containers[0];
if (!container || container?.State !== "running") {
return null;
}
const data = await getAdvancedStats(appName);
return data;
};

View File

@@ -1,180 +0,0 @@
import { randomBytes } from "node:crypto";
import { db } from "@/server/db";
import {
admins,
type apiCreateAdmin,
type apiCreateUser,
auth,
users,
} from "@/server/db/schema";
import { getPublicIpWithFallback } from "@/server/wss/terminal";
import { TRPCError } from "@trpc/server";
import * as bcrypt from "bcrypt";
import { eq } from "drizzle-orm";
import encode from "hi-base32";
import { TOTP } from "otpauth";
import QRCode from "qrcode";
export type Auth = typeof auth.$inferSelect;
export const createAdmin = async (input: typeof apiCreateAdmin._type) => {
return await db.transaction(async (tx) => {
const hashedPassword = bcrypt.hashSync(input.password, 10);
const newAuth = await tx
.insert(auth)
.values({
email: input.email,
password: hashedPassword,
rol: "admin",
})
.returning()
.then((res) => res[0]);
if (!newAuth) {
throw new TRPCError({
code: "BAD_REQUEST",
message: "Error to create the user",
});
}
await tx
.insert(admins)
.values({
authId: newAuth.id,
serverIp: await getPublicIpWithFallback(),
})
.returning();
return newAuth;
});
};
export const createUser = async (input: typeof apiCreateUser._type) => {
return await db.transaction(async (tx) => {
const hashedPassword = bcrypt.hashSync(input.password, 10);
const res = await tx
.update(auth)
.set({
password: hashedPassword,
})
.where(eq(auth.id, input.id))
.returning()
.then((res) => res[0]);
if (!res) {
throw new TRPCError({
code: "BAD_REQUEST",
message: "Error to create the user",
});
}
const user = await tx
.update(users)
.set({
isRegistered: true,
expirationDate: undefined,
})
.where(eq(users.token, input.token))
.returning()
.then((res) => res[0]);
return user;
});
};
export const findAuthByEmail = async (email: string) => {
const result = await db.query.auth.findFirst({
where: eq(auth.email, email),
});
if (!result) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Auth not found",
});
}
return result;
};
export const findAuthById = async (authId: string) => {
const result = await db.query.auth.findFirst({
where: eq(auth.id, authId),
columns: {
password: false,
},
});
if (!result) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Auth not found",
});
}
return result;
};
export const updateAuthById = async (
authId: string,
authData: Partial<Auth>,
) => {
const result = await db
.update(auth)
.set({
...authData,
})
.where(eq(auth.id, authId))
.returning();
return result[0];
};
export const generate2FASecret = async (authId: string) => {
const auth = await findAuthById(authId);
const base32_secret = generateBase32Secret();
const totp = new TOTP({
issuer: "Dokploy",
label: `${auth?.email}`,
algorithm: "SHA1",
digits: 6,
secret: base32_secret,
});
const otpauth_url = totp.toString();
const qrUrl = await QRCode.toDataURL(otpauth_url);
return {
qrCodeUrl: qrUrl,
secret: base32_secret,
};
};
export const verify2FA = async (
auth: Omit<Auth, "password">,
secret: string,
pin: string,
) => {
const totp = new TOTP({
issuer: "Dokploy",
label: `${auth?.email}`,
algorithm: "SHA1",
digits: 6,
secret: secret,
});
const delta = totp.validate({ token: pin });
if (delta === null) {
throw new TRPCError({
code: "BAD_REQUEST",
message: "Invalid 2FA code",
});
}
return auth;
};
const generateBase32Secret = () => {
const buffer = randomBytes(15);
const base32 = encode.encode(buffer).replace(/=/g, "").substring(0, 24);
return base32;
};

View File

@@ -1,71 +0,0 @@
import { db } from "@/server/db";
import { type apiCreateBackup, backups } from "@/server/db/schema";
import { TRPCError } from "@trpc/server";
import { eq } from "drizzle-orm";
export type Backup = typeof backups.$inferSelect;
export type BackupSchedule = Awaited<ReturnType<typeof findBackupById>>;
export const createBackup = async (input: typeof apiCreateBackup._type) => {
const newBackup = await db
.insert(backups)
.values({
...input,
})
.returning()
.then((value) => value[0]);
if (!newBackup) {
throw new TRPCError({
code: "BAD_REQUEST",
message: "Error to create the Backup",
});
}
return newBackup;
};
export const findBackupById = async (backupId: string) => {
const backup = await db.query.backups.findFirst({
where: eq(backups.backupId, backupId),
with: {
postgres: true,
mysql: true,
mariadb: true,
mongo: true,
destination: true,
},
});
if (!backup) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Backup not found",
});
}
return backup;
};
export const updateBackupById = async (
backupId: string,
backupData: Partial<Backup>,
) => {
const result = await db
.update(backups)
.set({
...backupData,
})
.where(eq(backups.backupId, backupId))
.returning();
return result[0];
};
export const removeBackupById = async (backupId: string) => {
const result = await db
.delete(backups)
.where(eq(backups.backupId, backupId))
.returning();
return result[0];
};

View File

@@ -1,88 +0,0 @@
import { db } from "@/server/db";
import {
type apiCreateBitbucket,
type apiUpdateBitbucket,
bitbucket,
gitProvider,
} from "@/server/db/schema";
import { TRPCError } from "@trpc/server";
import { eq } from "drizzle-orm";
export type Bitbucket = typeof bitbucket.$inferSelect;
export const createBitbucket = async (
input: typeof apiCreateBitbucket._type,
) => {
return await db.transaction(async (tx) => {
const newGitProvider = await tx
.insert(gitProvider)
.values({
providerType: "bitbucket",
authId: input.authId,
name: input.name,
})
.returning()
.then((response) => response[0]);
if (!newGitProvider) {
throw new TRPCError({
code: "BAD_REQUEST",
message: "Error to create the git provider",
});
}
await tx
.insert(bitbucket)
.values({
...input,
gitProviderId: newGitProvider?.gitProviderId,
})
.returning()
.then((response) => response[0]);
});
};
export const findBitbucketById = async (bitbucketId: string) => {
const bitbucketProviderResult = await db.query.bitbucket.findFirst({
where: eq(bitbucket.bitbucketId, bitbucketId),
with: {
gitProvider: true,
},
});
if (!bitbucketProviderResult) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Bitbucket Provider not found",
});
}
return bitbucketProviderResult;
};
export const updateBitbucket = async (
bitbucketId: string,
input: typeof apiUpdateBitbucket._type,
) => {
return await db.transaction(async (tx) => {
const result = await tx
.update(bitbucket)
.set({
...input,
})
.where(eq(bitbucket.bitbucketId, bitbucketId))
.returning();
if (input.name) {
await tx
.update(gitProvider)
.set({
name: input.name,
})
.where(eq(gitProvider.gitProviderId, input.gitProviderId))
.returning();
}
return result[0];
});
};

View File

@@ -1,108 +0,0 @@
import fs from "node:fs";
import path from "node:path";
import { paths } from "@/server/constants";
import { db } from "@/server/db";
import { type apiCreateCertificate, certificates } from "@/server/db/schema";
import { removeDirectoryIfExistsContent } from "@/server/utils/filesystem/directory";
import { TRPCError } from "@trpc/server";
import { eq } from "drizzle-orm";
import { dump } from "js-yaml";
import type { z } from "zod";
export type Certificate = typeof certificates.$inferSelect;
export const findCertificateById = async (certificateId: string) => {
const certificate = await db.query.certificates.findFirst({
where: eq(certificates.certificateId, certificateId),
});
if (!certificate) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Certificate not found",
});
}
return certificate;
};
export const createCertificate = async (
certificateData: z.infer<typeof apiCreateCertificate>,
) => {
const certificate = await db
.insert(certificates)
.values({
...certificateData,
})
.returning();
if (!certificate || certificate[0] === undefined) {
throw new TRPCError({
code: "BAD_REQUEST",
message: "Failed to create the certificate",
});
}
const cer = certificate[0];
createCertificateFiles(cer);
return cer;
};
export const removeCertificateById = async (certificateId: string) => {
const { CERTIFICATES_PATH } = paths();
const certificate = await findCertificateById(certificateId);
const certDir = path.join(CERTIFICATES_PATH, certificate.certificatePath);
await removeDirectoryIfExistsContent(certDir);
const result = await db
.delete(certificates)
.where(eq(certificates.certificateId, certificateId))
.returning();
if (!result) {
throw new TRPCError({
code: "BAD_REQUEST",
message: "Failed to delete the certificate",
});
}
return result;
};
export const findCertificates = async () => {
return await db.query.certificates.findMany();
};
const createCertificateFiles = (certificate: Certificate) => {
const { CERTIFICATES_PATH } = paths();
const dockerPath = "/etc/traefik";
const certDir = path.join(CERTIFICATES_PATH, certificate.certificatePath);
const crtPath = path.join(certDir, "chain.crt");
const keyPath = path.join(certDir, "privkey.key");
const chainPath = path.join(dockerPath, certDir, "chain.crt");
const keyPathDocker = path.join(dockerPath, certDir, "privkey.key");
if (!fs.existsSync(certDir)) {
fs.mkdirSync(certDir, { recursive: true });
}
fs.writeFileSync(crtPath, certificate.certificateData);
fs.writeFileSync(keyPath, certificate.privateKey);
const traefikConfig = {
tls: {
certificates: [
{
certFile: chainPath,
keyFile: keyPathDocker,
},
],
},
};
const yamlConfig = dump(traefikConfig);
const configFile = path.join(certDir, "certificate.yml");
fs.writeFileSync(configFile, yamlConfig);
};

View File

@@ -1,41 +0,0 @@
export interface DockerNode {
ID: string;
Version: {
Index: number;
};
CreatedAt: string;
UpdatedAt: string;
Spec: {
Name: string;
Labels: Record<string, string>;
Role: "worker" | "manager";
Availability: "active" | "pause" | "drain";
};
Description: {
Hostname: string;
Platform: {
Architecture: string;
OS: string;
};
Resources: {
NanoCPUs: number;
MemoryBytes: number;
};
Engine: {
EngineVersion: string;
Plugins: Array<{
Type: string;
Name: string;
}>;
};
};
Status: {
State: "unknown" | "down" | "ready" | "disconnected";
Message: string;
Addr: string;
};
ManagerStatus?: {
Leader: boolean;
Addr: string;
};
}

View File

@@ -1,467 +0,0 @@
import { join } from "node:path";
import { paths } from "@/server/constants";
import { db } from "@/server/db";
import { type apiCreateCompose, compose } from "@/server/db/schema";
import { generateAppName } from "@/server/db/schema";
import {
buildCompose,
getBuildComposeCommand,
} from "@/server/utils/builders/compose";
import { randomizeSpecificationFile } from "@/server/utils/docker/compose";
import {
cloneCompose,
cloneComposeRemote,
loadDockerCompose,
loadDockerComposeRemote,
} from "@/server/utils/docker/domain";
import type { ComposeSpecification } from "@/server/utils/docker/types";
import { sendBuildErrorNotifications } from "@/server/utils/notifications/build-error";
import { sendBuildSuccessNotifications } from "@/server/utils/notifications/build-success";
import { execAsync, execAsyncRemote } from "@/server/utils/process/execAsync";
import {
cloneBitbucketRepository,
getBitbucketCloneCommand,
} from "@/server/utils/providers/bitbucket";
import {
cloneGitRepository,
getCustomGitCloneCommand,
} from "@/server/utils/providers/git";
import {
cloneGithubRepository,
getGithubCloneCommand,
} from "@/server/utils/providers/github";
import {
cloneGitlabRepository,
getGitlabCloneCommand,
} from "@/server/utils/providers/gitlab";
import {
createComposeFile,
getCreateComposeFileCommand,
} from "@/server/utils/providers/raw";
import { generatePassword } from "@/templates/utils";
import { TRPCError } from "@trpc/server";
import { eq } from "drizzle-orm";
import { getDokployUrl } from "./admin";
import { createDeploymentCompose, updateDeploymentStatus } from "./deployment";
import { validUniqueServerAppName } from "./project";
export type Compose = typeof compose.$inferSelect;
export const createCompose = async (input: typeof apiCreateCompose._type) => {
input.appName =
`${input.appName}-${generatePassword(6)}` || generateAppName("compose");
if (input.appName) {
const valid = await validUniqueServerAppName(input.appName);
if (!valid) {
throw new TRPCError({
code: "CONFLICT",
message: "Service with this 'AppName' already exists",
});
}
}
const newDestination = await db
.insert(compose)
.values({
...input,
composeFile: "",
})
.returning()
.then((value) => value[0]);
if (!newDestination) {
throw new TRPCError({
code: "BAD_REQUEST",
message: "Error input: Inserting compose",
});
}
return newDestination;
};
export const createComposeByTemplate = async (
input: typeof compose.$inferInsert,
) => {
if (input.appName) {
const valid = await validUniqueServerAppName(input.appName);
if (!valid) {
throw new TRPCError({
code: "CONFLICT",
message: "Service with this 'AppName' already exists",
});
}
}
const newDestination = await db
.insert(compose)
.values({
...input,
})
.returning()
.then((value) => value[0]);
if (!newDestination) {
throw new TRPCError({
code: "BAD_REQUEST",
message: "Error input: Inserting compose",
});
}
return newDestination;
};
export const findComposeById = async (composeId: string) => {
const result = await db.query.compose.findFirst({
where: eq(compose.composeId, composeId),
with: {
project: true,
deployments: true,
mounts: true,
domains: true,
github: true,
gitlab: true,
bitbucket: true,
server: true,
},
});
if (!result) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Compose not found",
});
}
return result;
};
export const loadServices = async (
composeId: string,
type: "fetch" | "cache" = "fetch",
) => {
const compose = await findComposeById(composeId);
if (type === "fetch") {
if (compose.serverId) {
await cloneComposeRemote(compose);
} else {
await cloneCompose(compose);
}
}
let composeData: ComposeSpecification | null;
if (compose.serverId) {
composeData = await loadDockerComposeRemote(compose);
} else {
composeData = await loadDockerCompose(compose);
}
if (compose.randomize && composeData) {
const randomizedCompose = randomizeSpecificationFile(
composeData,
compose.suffix,
);
composeData = randomizedCompose;
}
if (!composeData?.services) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Services not found",
});
}
const services = Object.keys(composeData.services);
return [...services];
};
export const updateCompose = async (
composeId: string,
composeData: Partial<Compose>,
) => {
const composeResult = await db
.update(compose)
.set({
...composeData,
})
.where(eq(compose.composeId, composeId))
.returning();
return composeResult[0];
};
export const deployCompose = async ({
composeId,
titleLog = "Manual deployment",
descriptionLog = "",
}: {
composeId: string;
titleLog: string;
descriptionLog: string;
}) => {
const compose = await findComposeById(composeId);
const buildLink = `${await getDokployUrl()}/dashboard/project/${compose.projectId}/services/compose/${compose.composeId}?tab=deployments`;
const deployment = await createDeploymentCompose({
composeId: composeId,
title: titleLog,
description: descriptionLog,
});
try {
if (compose.sourceType === "github") {
await cloneGithubRepository(compose, deployment.logPath, true);
} else if (compose.sourceType === "gitlab") {
await cloneGitlabRepository(compose, deployment.logPath, true);
} else if (compose.sourceType === "bitbucket") {
await cloneBitbucketRepository(compose, deployment.logPath, true);
} else if (compose.sourceType === "git") {
await cloneGitRepository(compose, deployment.logPath, true);
} else if (compose.sourceType === "raw") {
await createComposeFile(compose, deployment.logPath);
}
await buildCompose(compose, deployment.logPath);
await updateDeploymentStatus(deployment.deploymentId, "done");
await updateCompose(composeId, {
composeStatus: "done",
});
await sendBuildSuccessNotifications({
projectName: compose.project.name,
applicationName: compose.name,
applicationType: "compose",
buildLink,
});
} catch (error) {
await updateDeploymentStatus(deployment.deploymentId, "error");
await updateCompose(composeId, {
composeStatus: "error",
});
await sendBuildErrorNotifications({
projectName: compose.project.name,
applicationName: compose.name,
applicationType: "compose",
// @ts-ignore
errorMessage: error?.message || "Error to build",
buildLink,
});
throw error;
}
};
export const rebuildCompose = async ({
composeId,
titleLog = "Rebuild deployment",
descriptionLog = "",
}: {
composeId: string;
titleLog: string;
descriptionLog: string;
}) => {
const compose = await findComposeById(composeId);
const deployment = await createDeploymentCompose({
composeId: composeId,
title: titleLog,
description: descriptionLog,
});
try {
if (compose.serverId) {
await getBuildComposeCommand(compose, deployment.logPath);
} else {
await buildCompose(compose, deployment.logPath);
}
await updateDeploymentStatus(deployment.deploymentId, "done");
await updateCompose(composeId, {
composeStatus: "done",
});
} catch (error) {
await updateDeploymentStatus(deployment.deploymentId, "error");
await updateCompose(composeId, {
composeStatus: "error",
});
throw error;
}
return true;
};
export const deployRemoteCompose = async ({
composeId,
titleLog = "Manual deployment",
descriptionLog = "",
}: {
composeId: string;
titleLog: string;
descriptionLog: string;
}) => {
const compose = await findComposeById(composeId);
const buildLink = `${await getDokployUrl()}/dashboard/project/${compose.projectId}/services/compose/${compose.composeId}?tab=deployments`;
const deployment = await createDeploymentCompose({
composeId: composeId,
title: titleLog,
description: descriptionLog,
});
try {
if (compose.serverId) {
let command = "set -e;";
if (compose.sourceType === "github") {
command += await getGithubCloneCommand(
compose,
deployment.logPath,
true,
);
} else if (compose.sourceType === "gitlab") {
command += await getGitlabCloneCommand(
compose,
deployment.logPath,
true,
);
} else if (compose.sourceType === "bitbucket") {
command += await getBitbucketCloneCommand(
compose,
deployment.logPath,
true,
);
} else if (compose.sourceType === "git") {
command += await getCustomGitCloneCommand(
compose,
deployment.logPath,
true,
);
} else if (compose.sourceType === "raw") {
command += getCreateComposeFileCommand(compose, deployment.logPath);
}
await execAsyncRemote(compose.serverId, command);
await getBuildComposeCommand(compose, deployment.logPath);
}
await updateDeploymentStatus(deployment.deploymentId, "done");
await updateCompose(composeId, {
composeStatus: "done",
});
await sendBuildSuccessNotifications({
projectName: compose.project.name,
applicationName: compose.name,
applicationType: "compose",
buildLink,
});
} catch (error) {
console.log(error);
await updateDeploymentStatus(deployment.deploymentId, "error");
await updateCompose(composeId, {
composeStatus: "error",
});
await sendBuildErrorNotifications({
projectName: compose.project.name,
applicationName: compose.name,
applicationType: "compose",
// @ts-ignore
errorMessage: error?.message || "Error to build",
buildLink,
});
throw error;
}
};
export const rebuildRemoteCompose = async ({
composeId,
titleLog = "Rebuild deployment",
descriptionLog = "",
}: {
composeId: string;
titleLog: string;
descriptionLog: string;
}) => {
const compose = await findComposeById(composeId);
const deployment = await createDeploymentCompose({
composeId: composeId,
title: titleLog,
description: descriptionLog,
});
try {
if (compose.serverId) {
await getBuildComposeCommand(compose, deployment.logPath);
}
await updateDeploymentStatus(deployment.deploymentId, "done");
await updateCompose(composeId, {
composeStatus: "done",
});
} catch (error) {
await updateDeploymentStatus(deployment.deploymentId, "error");
await updateCompose(composeId, {
composeStatus: "error",
});
throw error;
}
return true;
};
export const removeCompose = async (compose: Compose) => {
try {
const { COMPOSE_PATH } = paths(!!compose.serverId);
const projectPath = join(COMPOSE_PATH, compose.appName);
if (compose.composeType === "stack") {
const command = `cd ${projectPath} && docker stack rm ${compose.appName} && rm -rf ${projectPath}`;
if (compose.serverId) {
await execAsyncRemote(compose.serverId, command);
} else {
await execAsync(command);
}
await execAsync(command, {
cwd: projectPath,
});
} else {
const command = `cd ${projectPath} && docker compose -p ${compose.appName} down && rm -rf ${projectPath}`;
if (compose.serverId) {
await execAsyncRemote(compose.serverId, command);
} else {
await execAsync(command, {
cwd: projectPath,
});
}
}
} catch (error) {
throw error;
}
return true;
};
export const stopCompose = async (composeId: string) => {
const compose = await findComposeById(composeId);
try {
const { COMPOSE_PATH } = paths(!!compose.serverId);
if (compose.composeType === "docker-compose") {
if (compose.serverId) {
await execAsyncRemote(
compose.serverId,
`cd ${join(COMPOSE_PATH, compose.appName)} && docker compose -p ${compose.appName} stop`,
);
} else {
await execAsync(`docker compose -p ${compose.appName} stop`, {
cwd: join(COMPOSE_PATH, compose.appName),
});
}
}
await updateCompose(composeId, {
composeStatus: "idle",
});
} catch (error) {
await updateCompose(composeId, {
composeStatus: "error",
});
throw error;
}
return true;
};

View File

@@ -1,372 +0,0 @@
import { existsSync, promises as fsPromises } from "node:fs";
import path from "node:path";
import { paths } from "@/server/constants";
import { db } from "@/server/db";
import {
type apiCreateDeployment,
type apiCreateDeploymentCompose,
type apiCreateDeploymentServer,
deployments,
} from "@/server/db/schema";
import { removeDirectoryIfExistsContent } from "@/server/utils/filesystem/directory";
import { TRPCError } from "@trpc/server";
import { format } from "date-fns";
import { desc, eq } from "drizzle-orm";
import {
type Application,
findApplicationById,
updateApplicationStatus,
} from "./application";
import { type Compose, findComposeById, updateCompose } from "./compose";
import { type Server, findServerById } from "./server";
import { execAsyncRemote } from "@/server/utils/process/execAsync";
export type Deployment = typeof deployments.$inferSelect;
export const findDeploymentById = async (applicationId: string) => {
const application = await db.query.deployments.findFirst({
where: eq(deployments.applicationId, applicationId),
with: {
application: true,
},
});
if (!application) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Deployment not found",
});
}
return application;
};
export const createDeployment = async (
deployment: Omit<
typeof apiCreateDeployment._type,
"deploymentId" | "createdAt" | "status" | "logPath"
>,
) => {
const application = await findApplicationById(deployment.applicationId);
try {
// await removeLastTenDeployments(deployment.applicationId);
const { LOGS_PATH } = paths(!!application.serverId);
const formattedDateTime = format(new Date(), "yyyy-MM-dd:HH:mm:ss");
const fileName = `${application.appName}-${formattedDateTime}.log`;
const logFilePath = path.join(LOGS_PATH, application.appName, fileName);
if (application.serverId) {
const server = await findServerById(application.serverId);
const command = `
mkdir -p ${LOGS_PATH}/${application.appName};
echo "Initializing deployment" >> ${logFilePath};
`;
await execAsyncRemote(server.serverId, command);
} else {
await fsPromises.mkdir(path.join(LOGS_PATH, application.appName), {
recursive: true,
});
await fsPromises.writeFile(logFilePath, "Initializing deployment");
}
const deploymentCreate = await db
.insert(deployments)
.values({
applicationId: deployment.applicationId,
title: deployment.title || "Deployment",
status: "running",
logPath: logFilePath,
description: deployment.description || "",
})
.returning();
if (deploymentCreate.length === 0 || !deploymentCreate[0]) {
throw new TRPCError({
code: "BAD_REQUEST",
message: "Error to create the deployment",
});
}
return deploymentCreate[0];
} catch (error) {
await updateApplicationStatus(application.applicationId, "error");
console.log(error);
throw new TRPCError({
code: "BAD_REQUEST",
message: "Error to create the deployment",
});
}
};
export const createDeploymentCompose = async (
deployment: Omit<
typeof apiCreateDeploymentCompose._type,
"deploymentId" | "createdAt" | "status" | "logPath"
>,
) => {
const compose = await findComposeById(deployment.composeId);
try {
// await removeLastTenComposeDeployments(deployment.composeId);
const { LOGS_PATH } = paths(!!compose.serverId);
const formattedDateTime = format(new Date(), "yyyy-MM-dd:HH:mm:ss");
const fileName = `${compose.appName}-${formattedDateTime}.log`;
const logFilePath = path.join(LOGS_PATH, compose.appName, fileName);
if (compose.serverId) {
const server = await findServerById(compose.serverId);
const command = `
mkdir -p ${LOGS_PATH}/${compose.appName};
echo "Initializing deployment" >> ${logFilePath};
`;
await execAsyncRemote(server.serverId, command);
} else {
await fsPromises.mkdir(path.join(LOGS_PATH, compose.appName), {
recursive: true,
});
await fsPromises.writeFile(logFilePath, "Initializing deployment");
}
const deploymentCreate = await db
.insert(deployments)
.values({
composeId: deployment.composeId,
title: deployment.title || "Deployment",
description: deployment.description || "",
status: "running",
logPath: logFilePath,
})
.returning();
if (deploymentCreate.length === 0 || !deploymentCreate[0]) {
throw new TRPCError({
code: "BAD_REQUEST",
message: "Error to create the deployment",
});
}
return deploymentCreate[0];
} catch (error) {
await updateCompose(compose.composeId, {
composeStatus: "error",
});
console.log(error);
throw new TRPCError({
code: "BAD_REQUEST",
message: "Error to create the deployment",
});
}
};
export const removeDeployment = async (deploymentId: string) => {
try {
const deployment = await db
.delete(deployments)
.where(eq(deployments.deploymentId, deploymentId))
.returning();
return deployment[0];
} catch (error) {
throw new TRPCError({
code: "BAD_REQUEST",
message: "Error to delete this deployment",
});
}
};
export const removeDeploymentsByApplicationId = async (
applicationId: string,
) => {
await db
.delete(deployments)
.where(eq(deployments.applicationId, applicationId))
.returning();
};
const removeLastTenDeployments = async (applicationId: string) => {
const deploymentList = await db.query.deployments.findMany({
where: eq(deployments.applicationId, applicationId),
orderBy: desc(deployments.createdAt),
});
if (deploymentList.length > 10) {
const deploymentsToDelete = deploymentList.slice(10);
for (const oldDeployment of deploymentsToDelete) {
const logPath = path.join(oldDeployment.logPath);
if (existsSync(logPath)) {
await fsPromises.unlink(logPath);
}
await removeDeployment(oldDeployment.deploymentId);
}
}
};
const removeLastTenComposeDeployments = async (composeId: string) => {
const deploymentList = await db.query.deployments.findMany({
where: eq(deployments.composeId, composeId),
orderBy: desc(deployments.createdAt),
});
if (deploymentList.length > 10) {
const deploymentsToDelete = deploymentList.slice(10);
for (const oldDeployment of deploymentsToDelete) {
const logPath = path.join(oldDeployment.logPath);
if (existsSync(logPath)) {
await fsPromises.unlink(logPath);
}
await removeDeployment(oldDeployment.deploymentId);
}
}
};
export const removeDeployments = async (application: Application) => {
const { appName, applicationId } = application;
const { LOGS_PATH } = paths(!!application.serverId);
const logsPath = path.join(LOGS_PATH, appName);
if (application.serverId) {
await execAsyncRemote(application.serverId, `rm -rf ${logsPath}`);
} else {
await removeDirectoryIfExistsContent(logsPath);
}
await removeDeploymentsByApplicationId(applicationId);
};
export const removeDeploymentsByComposeId = async (compose: Compose) => {
const { appName } = compose;
const { LOGS_PATH } = paths(!!compose.serverId);
const logsPath = path.join(LOGS_PATH, appName);
if (compose.serverId) {
await execAsyncRemote(compose.serverId, `rm -rf ${logsPath}`);
} else {
await removeDirectoryIfExistsContent(logsPath);
}
await db
.delete(deployments)
.where(eq(deployments.composeId, compose.composeId))
.returning();
};
export const findAllDeploymentsByApplicationId = async (
applicationId: string,
) => {
const deploymentsList = await db.query.deployments.findMany({
where: eq(deployments.applicationId, applicationId),
orderBy: desc(deployments.createdAt),
});
return deploymentsList;
};
export const findAllDeploymentsByComposeId = async (composeId: string) => {
const deploymentsList = await db.query.deployments.findMany({
where: eq(deployments.composeId, composeId),
orderBy: desc(deployments.createdAt),
});
return deploymentsList;
};
export const updateDeployment = async (
deploymentId: string,
deploymentData: Partial<Deployment>,
) => {
const application = await db
.update(deployments)
.set({
...deploymentData,
})
.where(eq(deployments.deploymentId, deploymentId))
.returning();
return application;
};
export const updateDeploymentStatus = async (
deploymentId: string,
deploymentStatus: Deployment["status"],
) => {
const application = await db
.update(deployments)
.set({
status: deploymentStatus,
})
.where(eq(deployments.deploymentId, deploymentId))
.returning();
return application;
};
export const createServerDeployment = async (
deployment: Omit<
typeof apiCreateDeploymentServer._type,
"deploymentId" | "createdAt" | "status" | "logPath"
>,
) => {
try {
const { LOGS_PATH } = paths();
const server = await findServerById(deployment.serverId);
await removeLastFiveDeployments(deployment.serverId);
const formattedDateTime = format(new Date(), "yyyy-MM-dd:HH:mm:ss");
const fileName = `${server.appName}-${formattedDateTime}.log`;
const logFilePath = path.join(LOGS_PATH, server.appName, fileName);
await fsPromises.mkdir(path.join(LOGS_PATH, server.appName), {
recursive: true,
});
await fsPromises.writeFile(logFilePath, "Initializing Setup Server");
const deploymentCreate = await db
.insert(deployments)
.values({
serverId: server.serverId,
title: deployment.title || "Deployment",
description: deployment.description || "",
status: "running",
logPath: logFilePath,
})
.returning();
if (deploymentCreate.length === 0 || !deploymentCreate[0]) {
throw new TRPCError({
code: "BAD_REQUEST",
message: "Error to create the deployment",
});
}
return deploymentCreate[0];
} catch (error) {
console.log(error);
throw new TRPCError({
code: "BAD_REQUEST",
message: "Error to create the deployment",
});
}
};
export const removeLastFiveDeployments = async (serverId: string) => {
const deploymentList = await db.query.deployments.findMany({
where: eq(deployments.serverId, serverId),
orderBy: desc(deployments.createdAt),
});
if (deploymentList.length >= 5) {
const deploymentsToDelete = deploymentList.slice(4);
for (const oldDeployment of deploymentsToDelete) {
const logPath = path.join(oldDeployment.logPath);
if (existsSync(logPath)) {
await fsPromises.unlink(logPath);
}
await removeDeployment(oldDeployment.deploymentId);
}
}
};
export const removeDeploymentsByServerId = async (server: Server) => {
const { LOGS_PATH } = paths();
const { appName } = server;
const logsPath = path.join(LOGS_PATH, appName);
await removeDirectoryIfExistsContent(logsPath);
await db
.delete(deployments)
.where(eq(deployments.serverId, server.serverId))
.returning();
};
export const findAllDeploymentsByServerId = async (serverId: string) => {
const deploymentsList = await db.query.deployments.findMany({
where: eq(deployments.serverId, serverId),
orderBy: desc(deployments.createdAt),
});
return deploymentsList;
};

View File

@@ -1,67 +0,0 @@
import { db } from "@/server/db";
import { type apiCreateDestination, destinations } from "@/server/db/schema";
import { TRPCError } from "@trpc/server";
import { eq } from "drizzle-orm";
import { findAdmin } from "./admin";
export type Destination = typeof destinations.$inferSelect;
export const createDestintation = async (
input: typeof apiCreateDestination._type,
) => {
const adminResponse = await findAdmin();
const newDestination = await db
.insert(destinations)
.values({
...input,
adminId: adminResponse.adminId,
})
.returning()
.then((value) => value[0]);
if (!newDestination) {
throw new TRPCError({
code: "BAD_REQUEST",
message: "Error input: Inserting destination",
});
}
return newDestination;
};
export const findDestinationById = async (destinationId: string) => {
const destination = await db.query.destinations.findFirst({
where: eq(destinations.destinationId, destinationId),
});
if (!destination) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Destination not found",
});
}
return destination;
};
export const removeDestinationById = async (destinationId: string) => {
const result = await db
.delete(destinations)
.where(eq(destinations.destinationId, destinationId))
.returning();
return result[0];
};
export const updateDestinationById = async (
destinationId: string,
destinationData: Partial<Destination>,
) => {
const result = await db
.update(destinations)
.set({
...destinationData,
})
.where(eq(destinations.destinationId, destinationId))
.returning();
return result[0];
};

View File

@@ -1,223 +0,0 @@
import { execAsync, execAsyncRemote } from "@/server/utils/process/execAsync";
export const getContainers = async (serverId?: string | null) => {
try {
const command =
"docker ps -a --format 'CONTAINER ID : {{.ID}} | Name: {{.Names}} | Image: {{.Image}} | Ports: {{.Ports}} | State: {{.State}} | Status: {{.Status}}'";
let stdout = "";
let stderr = "";
if (serverId) {
const result = await execAsyncRemote(serverId, command);
stdout = result.stdout;
stderr = result.stderr;
} else {
const result = await execAsync(command);
stdout = result.stdout;
stderr = result.stderr;
}
if (stderr) {
console.error(`Error: ${stderr}`);
return;
}
const lines = stdout.trim().split("\n");
const containers = lines
.map((line) => {
const parts = line.split(" | ");
const containerId = parts[0]
? parts[0].replace("CONTAINER ID : ", "").trim()
: "No container id";
const name = parts[1]
? parts[1].replace("Name: ", "").trim()
: "No container name";
const image = parts[2]
? parts[2].replace("Image: ", "").trim()
: "No image";
const ports = parts[3]
? parts[3].replace("Ports: ", "").trim()
: "No ports";
const state = parts[4]
? parts[4].replace("State: ", "").trim()
: "No state";
const status = parts[5]
? parts[5].replace("Status: ", "").trim()
: "No status";
return {
containerId,
name,
image,
ports,
state,
status,
serverId,
};
})
.filter((container) => !container.name.includes("dokploy"));
return containers;
} catch (error) {
console.error(error);
return [];
}
};
export const getConfig = async (
containerId: string,
serverId?: string | null,
) => {
try {
const command = `docker inspect ${containerId} --format='{{json .}}'`;
let stdout = "";
let stderr = "";
if (serverId) {
const result = await execAsyncRemote(serverId, command);
stdout = result.stdout;
stderr = result.stderr;
} else {
const result = await execAsync(command);
stdout = result.stdout;
stderr = result.stderr;
}
if (stderr) {
console.error(`Error: ${stderr}`);
return;
}
const config = JSON.parse(stdout);
return config;
} catch (error) {}
};
export const getContainersByAppNameMatch = async (
appName: string,
appType?: "stack" | "docker-compose",
serverId?: string,
) => {
try {
let result: string[] = [];
const cmd =
"docker ps -a --format 'CONTAINER ID : {{.ID}} | Name: {{.Names}} | State: {{.State}}'";
const command =
appType === "docker-compose"
? `${cmd} --filter='label=com.docker.compose.project=${appName}'`
: `${cmd} | grep ${appName}`;
if (serverId) {
const { stdout, stderr } = await execAsyncRemote(serverId, command);
if (stderr) {
return [];
}
if (!stdout) return [];
result = stdout.trim().split("\n");
} else {
const { stdout, stderr } = await execAsync(command);
if (stderr) {
return [];
}
if (!stdout) return [];
result = stdout.trim().split("\n");
}
const containers = result.map((line) => {
const parts = line.split(" | ");
const containerId = parts[0]
? parts[0].replace("CONTAINER ID : ", "").trim()
: "No container id";
const name = parts[1]
? parts[1].replace("Name: ", "").trim()
: "No container name";
const state = parts[2]
? parts[2].replace("State: ", "").trim()
: "No state";
return {
containerId,
name,
state,
};
});
return containers || [];
} catch (error) {}
return [];
};
export const getContainersByAppLabel = async (
appName: string,
serverId?: string,
) => {
try {
let stdout = "";
let stderr = "";
const command = `docker ps --filter "label=com.docker.swarm.service.name=${appName}" --format 'CONTAINER ID : {{.ID}} | Name: {{.Names}} | State: {{.State}}'`;
if (serverId) {
const result = await execAsyncRemote(serverId, command);
stdout = result.stdout;
stderr = result.stderr;
} else {
const result = await execAsync(command);
stdout = result.stdout;
stderr = result.stderr;
}
if (stderr) {
console.error(`Error: ${stderr}`);
return;
}
if (!stdout) return [];
const lines = stdout.trim().split("\n");
const containers = lines.map((line) => {
const parts = line.split(" | ");
const containerId = parts[0]
? parts[0].replace("CONTAINER ID : ", "").trim()
: "No container id";
const name = parts[1]
? parts[1].replace("Name: ", "").trim()
: "No container name";
const state = parts[2]
? parts[2].replace("State: ", "").trim()
: "No state";
return {
containerId,
name,
state,
};
});
return containers || [];
} catch (error) {}
return [];
};
export const containerRestart = async (containerId: string) => {
try {
const { stdout, stderr } = await execAsync(
`docker container restart ${containerId}`,
);
if (stderr) {
console.error(`Error: ${stderr}`);
return;
}
const config = JSON.parse(stdout);
return config;
} catch (error) {}
};

View File

@@ -1,136 +0,0 @@
import { db } from "@/server/db";
import { type apiCreateDomain, domains } from "@/server/db/schema";
import { manageDomain } from "@/server/utils/traefik/domain";
import { generateRandomDomain } from "@/templates/utils";
import { TRPCError } from "@trpc/server";
import { eq } from "drizzle-orm";
import { findAdmin, findAdminById } from "./admin";
import { findApplicationById } from "./application";
import { findServerById } from "./server";
export type Domain = typeof domains.$inferSelect;
export const createDomain = async (input: typeof apiCreateDomain._type) => {
const result = await db.transaction(async (tx) => {
const domain = await tx
.insert(domains)
.values({
...input,
})
.returning()
.then((response) => response[0]);
if (!domain) {
throw new TRPCError({
code: "BAD_REQUEST",
message: "Error creating domain",
});
}
if (domain.applicationId) {
const application = await findApplicationById(domain.applicationId);
await manageDomain(application, domain);
}
return domain;
});
return result;
};
export const generateTraefikMeDomain = async (
appName: string,
adminId: string,
serverId?: string,
) => {
if (serverId) {
const server = await findServerById(serverId);
return generateRandomDomain({
serverIp: server.ipAddress,
projectName: appName,
});
}
if (process.env.NODE_ENV === "development") {
return generateRandomDomain({
serverIp: "",
projectName: appName,
});
}
const admin = await findAdminById(adminId);
return generateRandomDomain({
serverIp: admin?.serverIp || "",
projectName: appName,
});
};
export const generateWildcardDomain = (
appName: string,
serverDomain: string,
) => {
return `${appName}-${serverDomain}`;
};
export const findDomainById = async (domainId: string) => {
const domain = await db.query.domains.findFirst({
where: eq(domains.domainId, domainId),
with: {
application: true,
},
});
if (!domain) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Domain not found",
});
}
return domain;
};
export const findDomainsByApplicationId = async (applicationId: string) => {
const domainsArray = await db.query.domains.findMany({
where: eq(domains.applicationId, applicationId),
with: {
application: true,
},
});
return domainsArray;
};
export const findDomainsByComposeId = async (composeId: string) => {
const domainsArray = await db.query.domains.findMany({
where: eq(domains.composeId, composeId),
with: {
compose: true,
},
});
return domainsArray;
};
export const updateDomainById = async (
domainId: string,
domainData: Partial<Domain>,
) => {
const domain = await db
.update(domains)
.set({
...domainData,
})
.where(eq(domains.domainId, domainId))
.returning();
return domain[0];
};
export const removeDomainById = async (domainId: string) => {
await findDomainById(domainId);
// TODO: fix order
const result = await db
.delete(domains)
.where(eq(domains.domainId, domainId))
.returning();
return result[0];
};

View File

@@ -1,29 +0,0 @@
import { db } from "@/server/db";
import { type apiCreateGithub, gitProvider, github } from "@/server/db/schema";
import { TRPCError } from "@trpc/server";
import { eq } from "drizzle-orm";
export type GitProvider = typeof gitProvider.$inferSelect;
export const removeGitProvider = async (gitProviderId: string) => {
const result = await db
.delete(gitProvider)
.where(eq(gitProvider.gitProviderId, gitProviderId))
.returning();
return result[0];
};
export const updateGitProvider = async (
gitProviderId: string,
input: Partial<GitProvider>,
) => {
return await db
.update(gitProvider)
.set({
...input,
})
.where(eq(gitProvider.gitProviderId, gitProviderId))
.returning()
.then((response) => response[0]);
};

View File

@@ -1,75 +0,0 @@
import { db } from "@/server/db";
import { type apiCreateGithub, gitProvider, github } from "@/server/db/schema";
import { TRPCError } from "@trpc/server";
import { eq } from "drizzle-orm";
export type Github = typeof github.$inferSelect;
export const createGithub = async (input: typeof apiCreateGithub._type) => {
return await db.transaction(async (tx) => {
const newGitProvider = await tx
.insert(gitProvider)
.values({
providerType: "github",
authId: input.authId,
name: input.name,
})
.returning()
.then((response) => response[0]);
if (!newGitProvider) {
throw new TRPCError({
code: "BAD_REQUEST",
message: "Error to create the git provider",
});
}
return await tx
.insert(github)
.values({
...input,
gitProviderId: newGitProvider?.gitProviderId,
})
.returning()
.then((response) => response[0]);
});
};
export const findGithubById = async (githubId: string) => {
const githubProviderResult = await db.query.github.findFirst({
where: eq(github.githubId, githubId),
with: {
gitProvider: true,
},
});
if (!githubProviderResult) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Github Provider not found",
});
}
return githubProviderResult;
};
export const haveGithubRequirements = (github: Github) => {
return !!(
github?.githubAppId &&
github?.githubPrivateKey &&
github?.githubInstallationId
);
};
export const updateGithub = async (
githubId: string,
input: Partial<Github>,
) => {
return await db
.update(github)
.set({
...input,
})
.where(eq(github.githubId, githubId))
.returning()
.then((response) => response[0]);
};

View File

@@ -1,76 +0,0 @@
import { db } from "@/server/db";
import {
type apiCreateGitlab,
type bitbucket,
gitProvider,
type github,
gitlab,
} from "@/server/db/schema";
import { TRPCError } from "@trpc/server";
import { eq } from "drizzle-orm";
export type Github = typeof github.$inferSelect;
export type Bitbucket = typeof bitbucket.$inferSelect;
export type Gitlab = typeof gitlab.$inferSelect;
export const createGitlab = async (input: typeof apiCreateGitlab._type) => {
return await db.transaction(async (tx) => {
const newGitProvider = await tx
.insert(gitProvider)
.values({
providerType: "gitlab",
authId: input.authId,
name: input.name,
})
.returning()
.then((response) => response[0]);
if (!newGitProvider) {
throw new TRPCError({
code: "BAD_REQUEST",
message: "Error to create the git provider",
});
}
await tx
.insert(gitlab)
.values({
...input,
gitProviderId: newGitProvider?.gitProviderId,
})
.returning()
.then((response) => response[0]);
});
};
export const findGitlabById = async (gitlabId: string) => {
const gitlabProviderResult = await db.query.gitlab.findFirst({
where: eq(gitlab.gitlabId, gitlabId),
with: {
gitProvider: true,
},
});
if (!gitlabProviderResult) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Gitlab Provider not found",
});
}
return gitlabProviderResult;
};
export const updateGitlab = async (
gitlabId: string,
input: Partial<Gitlab>,
) => {
return await db
.update(gitlab)
.set({
...input,
})
.where(eq(gitlab.gitlabId, gitlabId))
.returning()
.then((response) => response[0]);
};

View File

@@ -1,148 +0,0 @@
import { generateRandomPassword } from "@/server/auth/random-password";
import { db } from "@/server/db";
import { type apiCreateMariaDB, backups, mariadb } from "@/server/db/schema";
import { generateAppName } from "@/server/db/schema";
import { buildMariadb } from "@/server/utils/databases/mariadb";
import { pullImage } from "@/server/utils/docker/utils";
import { generatePassword } from "@/templates/utils";
import { TRPCError } from "@trpc/server";
import { eq, getTableColumns } from "drizzle-orm";
import { validUniqueServerAppName } from "./project";
import { execAsyncRemote } from "@/server/utils/process/execAsync";
export type Mariadb = typeof mariadb.$inferSelect;
export const createMariadb = async (input: typeof apiCreateMariaDB._type) => {
input.appName =
`${input.appName}-${generatePassword(6)}` || generateAppName("mariadb");
if (input.appName) {
const valid = await validUniqueServerAppName(input.appName);
if (!valid) {
throw new TRPCError({
code: "CONFLICT",
message: "Service with this 'AppName' already exists",
});
}
}
const newMariadb = await db
.insert(mariadb)
.values({
...input,
databasePassword: input.databasePassword
? input.databasePassword
: (await generateRandomPassword()).randomPassword,
databaseRootPassword: input.databaseRootPassword
? input.databaseRootPassword
: (await generateRandomPassword()).randomPassword,
})
.returning()
.then((value) => value[0]);
if (!newMariadb) {
throw new TRPCError({
code: "BAD_REQUEST",
message: "Error input: Inserting mariadb database",
});
}
return newMariadb;
};
// https://github.com/drizzle-team/drizzle-orm/discussions/1483#discussioncomment-7523881
export const findMariadbById = async (mariadbId: string) => {
const result = await db.query.mariadb.findFirst({
where: eq(mariadb.mariadbId, mariadbId),
with: {
project: true,
mounts: true,
server: true,
backups: {
with: {
destination: true,
},
},
},
});
if (!result) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Mariadb not found",
});
}
return result;
};
export const updateMariadbById = async (
mariadbId: string,
mariadbData: Partial<Mariadb>,
) => {
const result = await db
.update(mariadb)
.set({
...mariadbData,
})
.where(eq(mariadb.mariadbId, mariadbId))
.returning();
return result[0];
};
export const removeMariadbById = async (mariadbId: string) => {
const result = await db
.delete(mariadb)
.where(eq(mariadb.mariadbId, mariadbId))
.returning();
return result[0];
};
export const findMariadbByBackupId = async (backupId: string) => {
const result = await db
.select({
...getTableColumns(mariadb),
})
.from(mariadb)
.innerJoin(backups, eq(mariadb.mariadbId, backups.mariadbId))
.where(eq(backups.backupId, backupId))
.limit(1);
if (!result || !result[0]) {
throw new TRPCError({
code: "NOT_FOUND",
message: "MariaDB not found",
});
}
return result[0];
};
export const deployMariadb = async (mariadbId: string) => {
const mariadb = await findMariadbById(mariadbId);
try {
if (mariadb.serverId) {
await execAsyncRemote(
mariadb.serverId,
`docker pull ${mariadb.dockerImage}`,
);
} else {
await pullImage(mariadb.dockerImage);
}
await buildMariadb(mariadb);
await updateMariadbById(mariadbId, {
applicationStatus: "done",
});
} catch (error) {
await updateMariadbById(mariadbId, {
applicationStatus: "error",
});
throw new TRPCError({
code: "INTERNAL_SERVER_ERROR",
message: `Error on deploy mariadb${error}`,
});
}
return mariadb;
};

View File

@@ -1,141 +0,0 @@
import { generateRandomPassword } from "@/server/auth/random-password";
import { db } from "@/server/db";
import { type apiCreateMongo, backups, mongo } from "@/server/db/schema";
import { generateAppName } from "@/server/db/schema";
import { buildMongo } from "@/server/utils/databases/mongo";
import { pullImage } from "@/server/utils/docker/utils";
import { generatePassword } from "@/templates/utils";
import { TRPCError } from "@trpc/server";
import { eq, getTableColumns } from "drizzle-orm";
import { validUniqueServerAppName } from "./project";
import { execAsyncRemote } from "@/server/utils/process/execAsync";
export type Mongo = typeof mongo.$inferSelect;
export const createMongo = async (input: typeof apiCreateMongo._type) => {
input.appName =
`${input.appName}-${generatePassword(6)}` || generateAppName("postgres");
if (input.appName) {
const valid = await validUniqueServerAppName(input.appName);
if (!valid) {
throw new TRPCError({
code: "CONFLICT",
message: "Service with this 'AppName' already exists",
});
}
}
const newMongo = await db
.insert(mongo)
.values({
...input,
databasePassword: input.databasePassword
? input.databasePassword
: (await generateRandomPassword()).randomPassword,
})
.returning()
.then((value) => value[0]);
if (!newMongo) {
throw new TRPCError({
code: "BAD_REQUEST",
message: "Error input: Inserting mongo database",
});
}
return newMongo;
};
export const findMongoById = async (mongoId: string) => {
const result = await db.query.mongo.findFirst({
where: eq(mongo.mongoId, mongoId),
with: {
project: true,
mounts: true,
server: true,
backups: {
with: {
destination: true,
},
},
},
});
if (!result) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Mongo not found",
});
}
return result;
};
export const updateMongoById = async (
mongoId: string,
postgresData: Partial<Mongo>,
) => {
const result = await db
.update(mongo)
.set({
...postgresData,
})
.where(eq(mongo.mongoId, mongoId))
.returning();
return result[0];
};
export const findMongoByBackupId = async (backupId: string) => {
const result = await db
.select({
...getTableColumns(mongo),
})
.from(mongo)
.innerJoin(backups, eq(mongo.mongoId, backups.mongoId))
.where(eq(backups.backupId, backupId))
.limit(1);
if (!result || !result[0]) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Mongo not found",
});
}
return result[0];
};
export const removeMongoById = async (mongoId: string) => {
const result = await db
.delete(mongo)
.where(eq(mongo.mongoId, mongoId))
.returning();
return result[0];
};
export const deployMongo = async (mongoId: string) => {
const mongo = await findMongoById(mongoId);
try {
if (mongo.serverId) {
await execAsyncRemote(mongo.serverId, `docker pull ${mongo.dockerImage}`);
} else {
await pullImage(mongo.dockerImage);
}
await buildMongo(mongo);
await updateMongoById(mongoId, {
applicationStatus: "done",
});
} catch (error) {
await updateMongoById(mongoId, {
applicationStatus: "error",
});
throw new TRPCError({
code: "INTERNAL_SERVER_ERROR",
message: `Error on deploy mongo${error}`,
});
}
return mongo;
};

View File

@@ -1,280 +0,0 @@
import path from "node:path";
import { paths } from "@/server/constants";
import { db } from "@/server/db";
import {
type ServiceType,
type apiCreateMount,
mounts,
} from "@/server/db/schema";
import { createFile, getCreateFileCommand } from "@/server/utils/docker/utils";
import { removeFileOrDirectory } from "@/server/utils/filesystem/directory";
import { execAsyncRemote } from "@/server/utils/process/execAsync";
import { TRPCError } from "@trpc/server";
import { type SQL, eq, sql } from "drizzle-orm";
export type Mount = typeof mounts.$inferSelect;
export const createMount = async (input: typeof apiCreateMount._type) => {
try {
const { serviceId, ...rest } = input;
const value = await db
.insert(mounts)
.values({
...rest,
...(input.serviceType === "application" && {
applicationId: serviceId,
}),
...(input.serviceType === "postgres" && {
postgresId: serviceId,
}),
...(input.serviceType === "mariadb" && {
mariadbId: serviceId,
}),
...(input.serviceType === "mongo" && {
mongoId: serviceId,
}),
...(input.serviceType === "mysql" && {
mysqlId: serviceId,
}),
...(input.serviceType === "redis" && {
redisId: serviceId,
}),
...(input.serviceType === "compose" && {
composeId: serviceId,
}),
})
.returning()
.then((value) => value[0]);
if (!value) {
throw new TRPCError({
code: "BAD_REQUEST",
message: "Error input: Inserting mount",
});
}
if (value.type === "file") {
await createFileMount(value.mountId);
}
return value;
} catch (error) {
console.log(error);
throw new TRPCError({
code: "BAD_REQUEST",
message: "Error to create the mount",
cause: error,
});
}
};
export const createFileMount = async (mountId: string) => {
try {
const mount = await findMountById(mountId);
const baseFilePath = await getBaseFilesPath(mountId);
const serverId = await getServerId(mount);
if (serverId) {
const command = getCreateFileCommand(
baseFilePath,
mount.filePath || "",
mount.content || "",
);
await execAsyncRemote(serverId, command);
} else {
await createFile(baseFilePath, mount.filePath || "", mount.content || "");
}
} catch (error) {
console.log(`Error to create the file mount: ${error}`);
throw new TRPCError({
code: "BAD_REQUEST",
message: "Error to create the mount",
cause: error,
});
}
};
export const findMountById = async (mountId: string) => {
const mount = await db.query.mounts.findFirst({
where: eq(mounts.mountId, mountId),
with: {
application: true,
postgres: true,
mariadb: true,
mongo: true,
mysql: true,
redis: true,
compose: true,
},
});
if (!mount) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Mount not found",
});
}
return mount;
};
export const updateMount = async (
mountId: string,
mountData: Partial<Mount>,
) => {
return await db.transaction(async (transaction) => {
const mount = await db
.update(mounts)
.set({
...mountData,
})
.where(eq(mounts.mountId, mountId))
.returning()
.then((value) => value[0]);
if (!mount) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Mount not found",
});
}
if (mount.type === "file") {
await deleteFileMount(mountId);
await createFileMount(mountId);
}
return mount;
});
};
export const findMountsByApplicationId = async (
serviceId: string,
serviceType: ServiceType,
) => {
const sqlChunks: SQL[] = [];
switch (serviceType) {
case "application":
sqlChunks.push(eq(mounts.applicationId, serviceId));
break;
case "postgres":
sqlChunks.push(eq(mounts.postgresId, serviceId));
break;
case "mariadb":
sqlChunks.push(eq(mounts.mariadbId, serviceId));
break;
case "mongo":
sqlChunks.push(eq(mounts.mongoId, serviceId));
break;
case "mysql":
sqlChunks.push(eq(mounts.mysqlId, serviceId));
break;
case "redis":
sqlChunks.push(eq(mounts.redisId, serviceId));
break;
default:
throw new Error(`Unknown service type: ${serviceType}`);
}
const mount = await db.query.mounts.findMany({
where: sql.join(sqlChunks, sql.raw(" ")),
});
return mount;
};
export const deleteMount = async (mountId: string) => {
const { type } = await findMountById(mountId);
if (type === "file") {
await deleteFileMount(mountId);
}
const deletedMount = await db
.delete(mounts)
.where(eq(mounts.mountId, mountId))
.returning();
return deletedMount[0];
};
export const deleteFileMount = async (mountId: string) => {
const mount = await findMountById(mountId);
if (!mount.filePath) return;
const basePath = await getBaseFilesPath(mountId);
const fullPath = path.join(basePath, mount.filePath);
try {
const serverId = await getServerId(mount);
if (serverId) {
const command = `rm -rf ${fullPath}`;
await execAsyncRemote(serverId, command);
} else {
await removeFileOrDirectory(fullPath);
}
} catch (error) {}
};
export const getBaseFilesPath = async (mountId: string) => {
const mount = await findMountById(mountId);
let absoluteBasePath = "";
let appName = "";
let directoryPath = "";
if (mount.serviceType === "application" && mount.application) {
const { APPLICATIONS_PATH } = paths(!!mount.application.serverId);
absoluteBasePath = path.resolve(APPLICATIONS_PATH);
appName = mount.application.appName;
} else if (mount.serviceType === "postgres" && mount.postgres) {
const { APPLICATIONS_PATH } = paths(!!mount.postgres.serverId);
absoluteBasePath = path.resolve(APPLICATIONS_PATH);
appName = mount.postgres.appName;
} else if (mount.serviceType === "mariadb" && mount.mariadb) {
const { APPLICATIONS_PATH } = paths(!!mount.mariadb.serverId);
absoluteBasePath = path.resolve(APPLICATIONS_PATH);
appName = mount.mariadb.appName;
} else if (mount.serviceType === "mongo" && mount.mongo) {
const { APPLICATIONS_PATH } = paths(!!mount.mongo.serverId);
absoluteBasePath = path.resolve(APPLICATIONS_PATH);
appName = mount.mongo.appName;
} else if (mount.serviceType === "mysql" && mount.mysql) {
const { APPLICATIONS_PATH } = paths(!!mount.mysql.serverId);
absoluteBasePath = path.resolve(APPLICATIONS_PATH);
appName = mount.mysql.appName;
} else if (mount.serviceType === "redis" && mount.redis) {
const { APPLICATIONS_PATH } = paths(!!mount.redis.serverId);
absoluteBasePath = path.resolve(APPLICATIONS_PATH);
appName = mount.redis.appName;
} else if (mount.serviceType === "compose" && mount.compose) {
const { COMPOSE_PATH } = paths(!!mount.compose.serverId);
appName = mount.compose.appName;
absoluteBasePath = path.resolve(COMPOSE_PATH);
}
directoryPath = path.join(absoluteBasePath, appName, "files");
return directoryPath;
};
type MountNested = Awaited<ReturnType<typeof findMountById>>;
export const getServerId = async (mount: MountNested) => {
if (mount.serviceType === "application" && mount?.application?.serverId) {
return mount.application.serverId;
}
if (mount.serviceType === "postgres" && mount?.postgres?.serverId) {
return mount.postgres.serverId;
}
if (mount.serviceType === "mariadb" && mount?.mariadb?.serverId) {
return mount.mariadb.serverId;
}
if (mount.serviceType === "mongo" && mount?.mongo?.serverId) {
return mount.mongo.serverId;
}
if (mount.serviceType === "mysql" && mount?.mysql?.serverId) {
return mount.mysql.serverId;
}
if (mount.serviceType === "redis" && mount?.redis?.serverId) {
return mount.redis.serverId;
}
if (mount.serviceType === "compose" && mount?.compose?.serverId) {
return mount.compose.serverId;
}
return null;
};

View File

@@ -1,145 +0,0 @@
import { generateRandomPassword } from "@/server/auth/random-password";
import { db } from "@/server/db";
import { type apiCreateMySql, backups, mysql } from "@/server/db/schema";
import { generateAppName } from "@/server/db/schema";
import { buildMysql } from "@/server/utils/databases/mysql";
import { pullImage } from "@/server/utils/docker/utils";
import { generatePassword } from "@/templates/utils";
import { TRPCError } from "@trpc/server";
import { eq, getTableColumns } from "drizzle-orm";
import { validUniqueServerAppName } from "./project";
import { execAsyncRemote } from "@/server/utils/process/execAsync";
export type MySql = typeof mysql.$inferSelect;
export const createMysql = async (input: typeof apiCreateMySql._type) => {
input.appName =
`${input.appName}-${generatePassword(6)}` || generateAppName("mysql");
if (input.appName) {
const valid = await validUniqueServerAppName(input.appName);
if (!valid) {
throw new TRPCError({
code: "CONFLICT",
message: "Service with this 'AppName' already exists",
});
}
}
const newMysql = await db
.insert(mysql)
.values({
...input,
databasePassword: input.databasePassword
? input.databasePassword
: (await generateRandomPassword()).randomPassword,
databaseRootPassword: input.databaseRootPassword
? input.databaseRootPassword
: (await generateRandomPassword()).randomPassword,
})
.returning()
.then((value) => value[0]);
if (!newMysql) {
throw new TRPCError({
code: "BAD_REQUEST",
message: "Error input: Inserting mysql database",
});
}
return newMysql;
};
// https://github.com/drizzle-team/drizzle-orm/discussions/1483#discussioncomment-7523881
export const findMySqlById = async (mysqlId: string) => {
const result = await db.query.mysql.findFirst({
where: eq(mysql.mysqlId, mysqlId),
with: {
project: true,
mounts: true,
server: true,
backups: {
with: {
destination: true,
},
},
},
});
if (!result) {
throw new TRPCError({
code: "NOT_FOUND",
message: "MySql not found",
});
}
return result;
};
export const updateMySqlById = async (
mysqlId: string,
mysqlData: Partial<MySql>,
) => {
const result = await db
.update(mysql)
.set({
...mysqlData,
})
.where(eq(mysql.mysqlId, mysqlId))
.returning();
return result[0];
};
export const findMySqlByBackupId = async (backupId: string) => {
const result = await db
.select({
...getTableColumns(mysql),
})
.from(mysql)
.innerJoin(backups, eq(mysql.mysqlId, backups.mysqlId))
.where(eq(backups.backupId, backupId))
.limit(1);
if (!result || !result[0]) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Mysql not found",
});
}
return result[0];
};
export const removeMySqlById = async (mysqlId: string) => {
const result = await db
.delete(mysql)
.where(eq(mysql.mysqlId, mysqlId))
.returning();
return result[0];
};
export const deployMySql = async (mysqlId: string) => {
const mysql = await findMySqlById(mysqlId);
try {
if (mysql.serverId) {
await execAsyncRemote(mysql.serverId, `docker pull ${mysql.dockerImage}`);
} else {
await pullImage(mysql.dockerImage);
}
await buildMysql(mysql);
await updateMySqlById(mysqlId, {
applicationStatus: "done",
});
} catch (error) {
await updateMySqlById(mysqlId, {
applicationStatus: "error",
});
throw new TRPCError({
code: "INTERNAL_SERVER_ERROR",
message: `Error on deploy mysql${error}`,
});
}
return mysql;
};

View File

@@ -1,409 +0,0 @@
import { db } from "@/server/db";
import {
type apiCreateDiscord,
type apiCreateEmail,
type apiCreateSlack,
type apiCreateTelegram,
type apiUpdateDiscord,
type apiUpdateEmail,
type apiUpdateSlack,
type apiUpdateTelegram,
discord,
email,
notifications,
slack,
telegram,
} from "@/server/db/schema";
import { TRPCError } from "@trpc/server";
import { eq } from "drizzle-orm";
export type Notification = typeof notifications.$inferSelect;
export const createSlackNotification = async (
input: typeof apiCreateSlack._type,
) => {
await db.transaction(async (tx) => {
const newSlack = await tx
.insert(slack)
.values({
channel: input.channel,
webhookUrl: input.webhookUrl,
})
.returning()
.then((value) => value[0]);
if (!newSlack) {
throw new TRPCError({
code: "BAD_REQUEST",
message: "Error input: Inserting slack",
});
}
const newDestination = await tx
.insert(notifications)
.values({
slackId: newSlack.slackId,
name: input.name,
appDeploy: input.appDeploy,
appBuildError: input.appBuildError,
databaseBackup: input.databaseBackup,
dokployRestart: input.dokployRestart,
dockerCleanup: input.dockerCleanup,
notificationType: "slack",
})
.returning()
.then((value) => value[0]);
if (!newDestination) {
throw new TRPCError({
code: "BAD_REQUEST",
message: "Error input: Inserting notification",
});
}
return newDestination;
});
};
export const updateSlackNotification = async (
input: typeof apiUpdateSlack._type,
) => {
await db.transaction(async (tx) => {
const newDestination = await tx
.update(notifications)
.set({
name: input.name,
appDeploy: input.appDeploy,
appBuildError: input.appBuildError,
databaseBackup: input.databaseBackup,
dokployRestart: input.dokployRestart,
dockerCleanup: input.dockerCleanup,
})
.where(eq(notifications.notificationId, input.notificationId))
.returning()
.then((value) => value[0]);
if (!newDestination) {
throw new TRPCError({
code: "BAD_REQUEST",
message: "Error Updating notification",
});
}
await tx
.update(slack)
.set({
channel: input.channel,
webhookUrl: input.webhookUrl,
})
.where(eq(slack.slackId, input.slackId))
.returning()
.then((value) => value[0]);
return newDestination;
});
};
export const createTelegramNotification = async (
input: typeof apiCreateTelegram._type,
) => {
await db.transaction(async (tx) => {
const newTelegram = await tx
.insert(telegram)
.values({
botToken: input.botToken,
chatId: input.chatId,
})
.returning()
.then((value) => value[0]);
if (!newTelegram) {
throw new TRPCError({
code: "BAD_REQUEST",
message: "Error input: Inserting telegram",
});
}
const newDestination = await tx
.insert(notifications)
.values({
telegramId: newTelegram.telegramId,
name: input.name,
appDeploy: input.appDeploy,
appBuildError: input.appBuildError,
databaseBackup: input.databaseBackup,
dokployRestart: input.dokployRestart,
dockerCleanup: input.dockerCleanup,
notificationType: "telegram",
})
.returning()
.then((value) => value[0]);
if (!newDestination) {
throw new TRPCError({
code: "BAD_REQUEST",
message: "Error input: Inserting notification",
});
}
return newDestination;
});
};
export const updateTelegramNotification = async (
input: typeof apiUpdateTelegram._type,
) => {
await db.transaction(async (tx) => {
const newDestination = await tx
.update(notifications)
.set({
name: input.name,
appDeploy: input.appDeploy,
appBuildError: input.appBuildError,
databaseBackup: input.databaseBackup,
dokployRestart: input.dokployRestart,
dockerCleanup: input.dockerCleanup,
})
.where(eq(notifications.notificationId, input.notificationId))
.returning()
.then((value) => value[0]);
if (!newDestination) {
throw new TRPCError({
code: "BAD_REQUEST",
message: "Error Updating notification",
});
}
await tx
.update(telegram)
.set({
botToken: input.botToken,
chatId: input.chatId,
})
.where(eq(telegram.telegramId, input.telegramId))
.returning()
.then((value) => value[0]);
return newDestination;
});
};
export const createDiscordNotification = async (
input: typeof apiCreateDiscord._type,
) => {
await db.transaction(async (tx) => {
const newDiscord = await tx
.insert(discord)
.values({
webhookUrl: input.webhookUrl,
})
.returning()
.then((value) => value[0]);
if (!newDiscord) {
throw new TRPCError({
code: "BAD_REQUEST",
message: "Error input: Inserting discord",
});
}
const newDestination = await tx
.insert(notifications)
.values({
discordId: newDiscord.discordId,
name: input.name,
appDeploy: input.appDeploy,
appBuildError: input.appBuildError,
databaseBackup: input.databaseBackup,
dokployRestart: input.dokployRestart,
dockerCleanup: input.dockerCleanup,
notificationType: "discord",
})
.returning()
.then((value) => value[0]);
if (!newDestination) {
throw new TRPCError({
code: "BAD_REQUEST",
message: "Error input: Inserting notification",
});
}
return newDestination;
});
};
export const updateDiscordNotification = async (
input: typeof apiUpdateDiscord._type,
) => {
await db.transaction(async (tx) => {
const newDestination = await tx
.update(notifications)
.set({
name: input.name,
appDeploy: input.appDeploy,
appBuildError: input.appBuildError,
databaseBackup: input.databaseBackup,
dokployRestart: input.dokployRestart,
dockerCleanup: input.dockerCleanup,
})
.where(eq(notifications.notificationId, input.notificationId))
.returning()
.then((value) => value[0]);
if (!newDestination) {
throw new TRPCError({
code: "BAD_REQUEST",
message: "Error Updating notification",
});
}
await tx
.update(discord)
.set({
webhookUrl: input.webhookUrl,
})
.where(eq(discord.discordId, input.discordId))
.returning()
.then((value) => value[0]);
return newDestination;
});
};
export const createEmailNotification = async (
input: typeof apiCreateEmail._type,
) => {
await db.transaction(async (tx) => {
const newEmail = await tx
.insert(email)
.values({
smtpServer: input.smtpServer,
smtpPort: input.smtpPort,
username: input.username,
password: input.password,
fromAddress: input.fromAddress,
toAddresses: input.toAddresses,
})
.returning()
.then((value) => value[0]);
if (!newEmail) {
throw new TRPCError({
code: "BAD_REQUEST",
message: "Error input: Inserting email",
});
}
const newDestination = await tx
.insert(notifications)
.values({
emailId: newEmail.emailId,
name: input.name,
appDeploy: input.appDeploy,
appBuildError: input.appBuildError,
databaseBackup: input.databaseBackup,
dokployRestart: input.dokployRestart,
dockerCleanup: input.dockerCleanup,
notificationType: "email",
})
.returning()
.then((value) => value[0]);
if (!newDestination) {
throw new TRPCError({
code: "BAD_REQUEST",
message: "Error input: Inserting notification",
});
}
return newDestination;
});
};
export const updateEmailNotification = async (
input: typeof apiUpdateEmail._type,
) => {
await db.transaction(async (tx) => {
const newDestination = await tx
.update(notifications)
.set({
name: input.name,
appDeploy: input.appDeploy,
appBuildError: input.appBuildError,
databaseBackup: input.databaseBackup,
dokployRestart: input.dokployRestart,
dockerCleanup: input.dockerCleanup,
})
.where(eq(notifications.notificationId, input.notificationId))
.returning()
.then((value) => value[0]);
if (!newDestination) {
throw new TRPCError({
code: "BAD_REQUEST",
message: "Error Updating notification",
});
}
await tx
.update(email)
.set({
smtpServer: input.smtpServer,
smtpPort: input.smtpPort,
username: input.username,
password: input.password,
fromAddress: input.fromAddress,
toAddresses: input.toAddresses,
})
.where(eq(email.emailId, input.emailId))
.returning()
.then((value) => value[0]);
return newDestination;
});
};
export const findNotificationById = async (notificationId: string) => {
const notification = await db.query.notifications.findFirst({
where: eq(notifications.notificationId, notificationId),
with: {
slack: true,
telegram: true,
discord: true,
email: true,
},
});
if (!notification) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Notification not found",
});
}
return notification;
};
export const removeNotificationById = async (notificationId: string) => {
const result = await db
.delete(notifications)
.where(eq(notifications.notificationId, notificationId))
.returning();
return result[0];
};
export const updateDestinationById = async (
notificationId: string,
notificationData: Partial<Notification>,
) => {
const result = await db
.update(notifications)
.set({
...notificationData,
})
.where(eq(notifications.notificationId, notificationId))
.returning();
return result[0];
};

View File

@@ -1,62 +0,0 @@
import { db } from "@/server/db";
import { type apiCreatePort, ports } from "@/server/db/schema";
import { TRPCError } from "@trpc/server";
import { eq } from "drizzle-orm";
export type Port = typeof ports.$inferSelect;
export const createPort = async (input: typeof apiCreatePort._type) => {
const newPort = await db
.insert(ports)
.values({
...input,
})
.returning()
.then((value) => value[0]);
if (!newPort) {
throw new TRPCError({
code: "BAD_REQUEST",
message: "Error input: Inserting port",
});
}
return newPort;
};
export const finPortById = async (portId: string) => {
const result = await db.query.ports.findFirst({
where: eq(ports.portId, portId),
});
if (!result) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Port not found",
});
}
return result;
};
export const removePortById = async (portId: string) => {
const result = await db
.delete(ports)
.where(eq(ports.portId, portId))
.returning();
return result[0];
};
export const updatePortById = async (
portId: string,
portData: Partial<Port>,
) => {
const result = await db
.update(ports)
.set({
...portData,
})
.where(eq(ports.portId, portId))
.returning();
return result[0];
};

View File

@@ -1,143 +0,0 @@
import { generateRandomPassword } from "@/server/auth/random-password";
import { db } from "@/server/db";
import { type apiCreatePostgres, backups, postgres } from "@/server/db/schema";
import { generateAppName } from "@/server/db/schema";
import { buildPostgres } from "@/server/utils/databases/postgres";
import { pullImage } from "@/server/utils/docker/utils";
import { generatePassword } from "@/templates/utils";
import { TRPCError } from "@trpc/server";
import { eq, getTableColumns } from "drizzle-orm";
import { validUniqueServerAppName } from "./project";
import { execAsyncRemote } from "@/server/utils/process/execAsync";
export type Postgres = typeof postgres.$inferSelect;
export const createPostgres = async (input: typeof apiCreatePostgres._type) => {
input.appName =
`${input.appName}-${generatePassword(6)}` || generateAppName("postgres");
if (input.appName) {
const valid = await validUniqueServerAppName(input.appName);
if (!valid) {
throw new TRPCError({
code: "CONFLICT",
message: "Service with this 'AppName' already exists",
});
}
}
const newPostgres = await db
.insert(postgres)
.values({
...input,
databasePassword: input.databasePassword
? input.databasePassword
: (await generateRandomPassword()).randomPassword,
})
.returning()
.then((value) => value[0]);
if (!newPostgres) {
throw new TRPCError({
code: "BAD_REQUEST",
message: "Error input: Inserting postgresql database",
});
}
return newPostgres;
};
export const findPostgresById = async (postgresId: string) => {
const result = await db.query.postgres.findFirst({
where: eq(postgres.postgresId, postgresId),
with: {
project: true,
mounts: true,
server: true,
backups: {
with: {
destination: true,
},
},
},
});
if (!result) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Postgres not found",
});
}
return result;
};
export const findPostgresByBackupId = async (backupId: string) => {
const result = await db
.select({
...getTableColumns(postgres),
})
.from(postgres)
.innerJoin(backups, eq(postgres.postgresId, backups.postgresId))
.where(eq(backups.backupId, backupId))
.limit(1);
if (!result || !result[0]) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Postgres not found",
});
}
return result[0];
};
export const updatePostgresById = async (
postgresId: string,
postgresData: Partial<Postgres>,
) => {
const result = await db
.update(postgres)
.set({
...postgresData,
})
.where(eq(postgres.postgresId, postgresId))
.returning();
return result[0];
};
export const removePostgresById = async (postgresId: string) => {
const result = await db
.delete(postgres)
.where(eq(postgres.postgresId, postgresId))
.returning();
return result[0];
};
export const deployPostgres = async (postgresId: string) => {
const postgres = await findPostgresById(postgresId);
try {
const promises = [];
if (postgres.serverId) {
const result = await execAsyncRemote(
postgres.serverId,
`docker pull ${postgres.dockerImage}`,
);
} else {
await pullImage(postgres.dockerImage);
}
await buildPostgres(postgres);
await updatePostgresById(postgresId, {
applicationStatus: "done",
});
} catch (error) {
await updatePostgresById(postgresId, {
applicationStatus: "error",
});
throw new TRPCError({
code: "INTERNAL_SERVER_ERROR",
message: `Error on deploy postgres${error}`,
});
}
return postgres;
};

View File

@@ -1,124 +0,0 @@
import { db } from "@/server/db";
import {
type apiCreateProject,
applications,
mariadb,
mongo,
mysql,
postgres,
projects,
redis,
} from "@/server/db/schema";
import { TRPCError } from "@trpc/server";
import { eq } from "drizzle-orm";
export type Project = typeof projects.$inferSelect;
export const createProject = async (
input: typeof apiCreateProject._type,
adminId: string,
) => {
const newProject = await db
.insert(projects)
.values({
...input,
adminId: adminId,
})
.returning()
.then((value) => value[0]);
if (!newProject) {
throw new TRPCError({
code: "BAD_REQUEST",
message: "Error to create the project",
});
}
return newProject;
};
export const findProjectById = async (projectId: string) => {
const project = await db.query.projects.findFirst({
where: eq(projects.projectId, projectId),
with: {
applications: true,
mariadb: true,
mongo: true,
mysql: true,
postgres: true,
redis: true,
compose: true,
},
});
if (!project) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Project not found",
});
}
return project;
};
export const deleteProject = async (projectId: string) => {
const project = await db
.delete(projects)
.where(eq(projects.projectId, projectId))
.returning()
.then((value) => value[0]);
return project;
};
export const updateProjectById = async (
projectId: string,
projectData: Partial<Project>,
) => {
const result = await db
.update(projects)
.set({
...projectData,
})
.where(eq(projects.projectId, projectId))
.returning()
.then((res) => res[0]);
return result;
};
export const validUniqueServerAppName = async (appName: string) => {
const query = await db.query.projects.findMany({
with: {
applications: {
where: eq(applications.appName, appName),
},
mariadb: {
where: eq(mariadb.appName, appName),
},
mongo: {
where: eq(mongo.appName, appName),
},
mysql: {
where: eq(mysql.appName, appName),
},
postgres: {
where: eq(postgres.appName, appName),
},
redis: {
where: eq(redis.appName, appName),
},
},
});
// Filter out items with non-empty fields
const nonEmptyProjects = query.filter(
(project) =>
project.applications.length > 0 ||
project.mariadb.length > 0 ||
project.mongo.length > 0 ||
project.mysql.length > 0 ||
project.postgres.length > 0 ||
project.redis.length > 0,
);
return nonEmptyProjects.length === 0;
};

View File

@@ -1,123 +0,0 @@
import { db } from "@/server/db";
import { type apiCreateRedirect, redirects } from "@/server/db/schema";
import {
createRedirectMiddleware,
removeRedirectMiddleware,
updateRedirectMiddleware,
} from "@/server/utils/traefik/redirect";
import { TRPCError } from "@trpc/server";
import { desc, eq } from "drizzle-orm";
import type { z } from "zod";
import { findApplicationById } from "./application";
export type Redirect = typeof redirects.$inferSelect;
export const findRedirectById = async (redirectId: string) => {
const application = await db.query.redirects.findFirst({
where: eq(redirects.redirectId, redirectId),
});
if (!application) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Redirect not found",
});
}
return application;
};
export const createRedirect = async (
redirectData: z.infer<typeof apiCreateRedirect>,
) => {
try {
await db.transaction(async (tx) => {
const redirect = await tx
.insert(redirects)
.values({
...redirectData,
})
.returning()
.then((res) => res[0]);
if (!redirect) {
throw new TRPCError({
code: "BAD_REQUEST",
message: "Error to create the redirect",
});
}
const application = await findApplicationById(redirect.applicationId);
createRedirectMiddleware(application, redirect);
});
return true;
} catch (error) {
console.log(error);
throw new TRPCError({
code: "BAD_REQUEST",
message: "Error to create this redirect",
cause: error,
});
}
};
export const removeRedirectById = async (redirectId: string) => {
try {
const response = await db
.delete(redirects)
.where(eq(redirects.redirectId, redirectId))
.returning()
.then((res) => res[0]);
if (!response) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Redirect not found",
});
}
const application = await findApplicationById(response.applicationId);
await removeRedirectMiddleware(application, response);
return response;
} catch (error) {
throw new TRPCError({
code: "BAD_REQUEST",
message: "Error to remove this redirect",
cause: error,
});
}
};
export const updateRedirectById = async (
redirectId: string,
redirectData: Partial<Redirect>,
) => {
try {
const redirect = await db
.update(redirects)
.set({
...redirectData,
})
.where(eq(redirects.redirectId, redirectId))
.returning()
.then((res) => res[0]);
if (!redirect) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Redirect not found",
});
}
const application = await findApplicationById(redirect.applicationId);
await updateRedirectMiddleware(application, redirect);
return redirect;
} catch (error) {
throw new TRPCError({
code: "BAD_REQUEST",
message: "Error to update this redirect",
});
}
};

View File

@@ -1,118 +0,0 @@
import { generateRandomPassword } from "@/server/auth/random-password";
import { db } from "@/server/db";
import { type apiCreateRedis, redis } from "@/server/db/schema";
import { generateAppName } from "@/server/db/schema";
import { buildRedis } from "@/server/utils/databases/redis";
import { pullImage } from "@/server/utils/docker/utils";
import { generatePassword } from "@/templates/utils";
import { TRPCError } from "@trpc/server";
import { eq } from "drizzle-orm";
import { validUniqueServerAppName } from "./project";
import { execAsyncRemote } from "@/server/utils/process/execAsync";
export type Redis = typeof redis.$inferSelect;
// https://github.com/drizzle-team/drizzle-orm/discussions/1483#discussioncomment-7523881
export const createRedis = async (input: typeof apiCreateRedis._type) => {
input.appName =
`${input.appName}-${generatePassword(6)}` || generateAppName("redis");
if (input.appName) {
const valid = await validUniqueServerAppName(input.appName);
if (!valid) {
throw new TRPCError({
code: "CONFLICT",
message: "Service with this 'AppName' already exists",
});
}
}
const newRedis = await db
.insert(redis)
.values({
...input,
databasePassword: input.databasePassword
? input.databasePassword
: (await generateRandomPassword()).randomPassword,
})
.returning()
.then((value) => value[0]);
if (!newRedis) {
throw new TRPCError({
code: "BAD_REQUEST",
message: "Error input: Inserting redis database",
});
}
return newRedis;
};
export const findRedisById = async (redisId: string) => {
const result = await db.query.redis.findFirst({
where: eq(redis.redisId, redisId),
with: {
project: true,
mounts: true,
server: true,
},
});
if (!result) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Redis not found",
});
}
return result;
};
export const updateRedisById = async (
redisId: string,
redisData: Partial<Redis>,
) => {
const result = await db
.update(redis)
.set({
...redisData,
})
.where(eq(redis.redisId, redisId))
.returning();
return result[0];
};
export const removeRedisById = async (redisId: string) => {
const result = await db
.delete(redis)
.where(eq(redis.redisId, redisId))
.returning();
return result[0];
};
export const deployRedis = async (redisId: string) => {
const redis = await findRedisById(redisId);
try {
if (redis.serverId) {
await execAsyncRemote(redis.serverId, `docker pull ${redis.dockerImage}`);
} else {
await pullImage(redis.dockerImage);
}
await buildRedis(redis);
await updateRedisById(redisId, {
applicationStatus: "done",
});
} catch (error) {
await updateRedisById(redisId, {
applicationStatus: "error",
});
throw new TRPCError({
code: "INTERNAL_SERVER_ERROR",
message: `Error on deploy redis${error}`,
});
}
return redis;
};

View File

@@ -1,124 +0,0 @@
import { db } from "@/server/db";
import { type apiCreateRegistry, registry } from "@/server/db/schema";
import { initializeRegistry } from "@/server/setup/registry-setup";
import { removeService } from "@/server/utils/docker/utils";
import { execAsync } from "@/server/utils/process/execAsync";
import {
manageRegistry,
removeSelfHostedRegistry,
} from "@/server/utils/traefik/registry";
import { TRPCError } from "@trpc/server";
import { eq } from "drizzle-orm";
import { findAdmin } from "./admin";
export type Registry = typeof registry.$inferSelect;
export const createRegistry = async (input: typeof apiCreateRegistry._type) => {
const admin = await findAdmin();
return await db.transaction(async (tx) => {
const newRegistry = await tx
.insert(registry)
.values({
...input,
adminId: admin.adminId,
})
.returning()
.then((value) => value[0]);
if (!newRegistry) {
throw new TRPCError({
code: "BAD_REQUEST",
message: "Error input: Inserting registry",
});
}
if (newRegistry.registryType === "cloud") {
const loginCommand = `echo ${input.password} | docker login ${input.registryUrl} --username ${input.username} --password-stdin`;
await execAsync(loginCommand);
}
return newRegistry;
});
};
export const removeRegistry = async (registryId: string) => {
try {
const response = await db
.delete(registry)
.where(eq(registry.registryId, registryId))
.returning()
.then((res) => res[0]);
if (!response) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Registry not found",
});
}
if (response.registryType === "selfHosted") {
await removeSelfHostedRegistry();
await removeService("dokploy-registry");
}
await execAsync(`docker logout ${response.registryUrl}`);
return response;
} catch (error) {
throw new TRPCError({
code: "BAD_REQUEST",
message: "Error to remove this registry",
cause: error,
});
}
};
export const updateRegistry = async (
registryId: string,
registryData: Partial<Registry>,
) => {
try {
const response = await db
.update(registry)
.set({
...registryData,
})
.where(eq(registry.registryId, registryId))
.returning()
.then((res) => res[0]);
if (response?.registryType === "selfHosted") {
await manageRegistry(response);
await initializeRegistry(response.username, response.password);
}
return response;
} catch (error) {
throw new TRPCError({
code: "BAD_REQUEST",
message: "Error to update this registry",
});
}
};
export const findRegistryById = async (registryId: string) => {
const registryResponse = await db.query.registry.findFirst({
where: eq(registry.registryId, registryId),
columns: {
password: false,
},
});
if (!registryResponse) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Registry not found",
});
}
return registryResponse;
};
export const findAllRegistry = async () => {
const registryResponse = await db.query.registry.findMany();
return registryResponse;
};

View File

@@ -1,107 +0,0 @@
import { db } from "@/server/db";
import { type apiCreateSecurity, security } from "@/server/db/schema";
import {
createSecurityMiddleware,
removeSecurityMiddleware,
} from "@/server/utils/traefik/security";
import { TRPCError } from "@trpc/server";
import { eq } from "drizzle-orm";
import type { z } from "zod";
import { findApplicationById } from "./application";
export type Security = typeof security.$inferSelect;
export const findSecurityById = async (securityId: string) => {
const application = await db.query.security.findFirst({
where: eq(security.securityId, securityId),
});
if (!application) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Security not found",
});
}
return application;
};
export const createSecurity = async (
data: z.infer<typeof apiCreateSecurity>,
) => {
try {
await db.transaction(async (tx) => {
const application = await findApplicationById(data.applicationId);
const securityResponse = await tx
.insert(security)
.values({
...data,
})
.returning()
.then((res) => res[0]);
if (!securityResponse) {
throw new TRPCError({
code: "BAD_REQUEST",
message: "Error to create the security",
});
}
await createSecurityMiddleware(application, securityResponse);
return true;
});
} catch (error) {
console.log(error);
throw new TRPCError({
code: "BAD_REQUEST",
message: "Error to create this security",
cause: error,
});
}
};
export const deleteSecurityById = async (securityId: string) => {
try {
const result = await db
.delete(security)
.where(eq(security.securityId, securityId))
.returning()
.then((res) => res[0]);
if (!result) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Security not found",
});
}
const application = await findApplicationById(result.applicationId);
await removeSecurityMiddleware(application, result);
return result;
} catch (error) {
throw new TRPCError({
code: "BAD_REQUEST",
message: "Error to remove this security",
});
}
};
export const updateSecurityById = async (
securityId: string,
data: Partial<Security>,
) => {
try {
const response = await db
.update(security)
.set({
...data,
})
.where(eq(security.securityId, securityId))
.returning();
return response[0];
} catch (error) {
throw new TRPCError({
code: "BAD_REQUEST",
message: "Error to update this security",
});
}
};

View File

@@ -1,121 +0,0 @@
import { db } from "@/server/db";
import { type apiCreateServer, server } from "@/server/db/schema";
import { TRPCError } from "@trpc/server";
import { desc, eq } from "drizzle-orm";
export type Server = typeof server.$inferSelect;
export const createServer = async (
input: typeof apiCreateServer._type,
adminId: string,
) => {
const newServer = await db
.insert(server)
.values({
...input,
adminId: adminId,
})
.returning()
.then((value) => value[0]);
if (!newServer) {
throw new TRPCError({
code: "BAD_REQUEST",
message: "Error to create the server",
});
}
return newServer;
};
export const findServerById = async (serverId: string) => {
const currentServer = await db.query.server.findFirst({
where: eq(server.serverId, serverId),
with: {
deployments: true,
sshKey: true,
},
});
if (!currentServer) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Server not found",
});
}
return currentServer;
};
export const findServersByAdminId = async (adminId: string) => {
const servers = await db.query.server.findMany({
where: eq(server.adminId, adminId),
orderBy: desc(server.createdAt),
});
return servers;
};
export const deleteServer = async (serverId: string) => {
const currentServer = await db
.delete(server)
.where(eq(server.serverId, serverId))
.returning()
.then((value) => value[0]);
return currentServer;
};
export const haveActiveServices = async (serverId: string) => {
const currentServer = await db.query.server.findFirst({
where: eq(server.serverId, serverId),
with: {
applications: true,
compose: true,
redis: true,
mariadb: true,
mongo: true,
mysql: true,
postgres: true,
},
});
if (!currentServer) {
return false;
}
const total =
currentServer?.applications?.length +
currentServer?.compose?.length +
currentServer?.redis?.length +
currentServer?.mariadb?.length +
currentServer?.mongo?.length +
currentServer?.mysql?.length +
currentServer?.postgres?.length;
if (total === 0) {
return false;
}
return true;
};
export const updateServerById = async (
serverId: string,
serverData: Partial<Server>,
) => {
const result = await db
.update(server)
.set({
...serverData,
})
.where(eq(server.serverId, serverId))
.returning()
.then((res) => res[0]);
return result;
};
export const getAllServers = async () => {
const servers = await db.query.server.findMany();
return servers;
};

View File

@@ -1,148 +0,0 @@
import { readdirSync } from "node:fs";
import { join } from "node:path";
import { docker } from "@/server/constants";
import { getServiceContainer } from "@/server/utils/docker/utils";
import { execAsyncRemote } from "@/server/utils/process/execAsync";
import packageInfo from "../../../package.json";
const updateIsAvailable = async () => {
try {
const service = await getServiceContainer("dokploy");
const localImage = await docker.getImage(getDokployImage()).inspect();
return localImage.Id !== service?.ImageID;
} catch (error) {
return false;
}
};
export const getDokployImage = () => {
return `dokploy/dokploy:${process.env.RELEASE_TAG || "latest"}`;
};
export const pullLatestRelease = async () => {
try {
const stream = await docker.pull(getDokployImage(), {});
await new Promise((resolve, reject) => {
docker.modem.followProgress(stream, (err, res) =>
err ? reject(err) : resolve(res),
);
});
const newUpdateIsAvailable = await updateIsAvailable();
return newUpdateIsAvailable;
} catch (error) {}
return false;
};
export const getDokployVersion = () => {
return packageInfo.version;
};
interface TreeDataItem {
id: string;
name: string;
type: "file" | "directory";
children?: TreeDataItem[];
}
export const readDirectory = async (
dirPath: string,
serverId?: string,
): Promise<TreeDataItem[]> => {
if (serverId) {
const { stdout } = await execAsyncRemote(
serverId,
`
process_items() {
local parent_dir="$1"
local __resultvar=$2
local items_json=""
local first=true
for item in "$parent_dir"/*; do
[ -e "$item" ] || continue
process_item "$item" item_json
if [ "$first" = true ]; then
first=false
items_json="$item_json"
else
items_json="$items_json,$item_json"
fi
done
eval $__resultvar="'[$items_json]'"
}
process_item() {
local item_path="$1"
local __resultvar=$2
local item_name=$(basename "$item_path")
local escaped_name=$(echo "$item_name" | sed 's/"/\\"/g')
local escaped_path=$(echo "$item_path" | sed 's/"/\\"/g')
if [ -d "$item_path" ]; then
# Is directory
process_items "$item_path" children_json
local json='{"id":"'"$escaped_path"'","name":"'"$escaped_name"'","type":"directory","children":'"$children_json"'}'
else
# Is file
local json='{"id":"'"$escaped_path"'","name":"'"$escaped_name"'","type":"file"}'
fi
eval $__resultvar="'$json'"
}
root_dir=${dirPath}
process_items "$root_dir" json_output
echo "$json_output"
`,
);
const result = JSON.parse(stdout);
return result;
}
const items = readdirSync(dirPath, { withFileTypes: true });
const stack = [dirPath];
const result: TreeDataItem[] = [];
const parentMap: Record<string, TreeDataItem[]> = {};
while (stack.length > 0) {
const currentPath = stack.pop();
if (!currentPath) continue;
const items = readdirSync(currentPath, { withFileTypes: true });
const currentDirectoryResult: TreeDataItem[] = [];
for (const item of items) {
const fullPath = join(currentPath, item.name);
if (item.isDirectory()) {
stack.push(fullPath);
const directoryItem: TreeDataItem = {
id: fullPath,
name: item.name,
type: "directory",
children: [],
};
currentDirectoryResult.push(directoryItem);
parentMap[fullPath] = directoryItem.children as TreeDataItem[];
} else {
const fileItem: TreeDataItem = {
id: fullPath,
name: item.name,
type: "file",
};
currentDirectoryResult.push(fileItem);
}
}
if (parentMap[currentPath]) {
parentMap[currentPath].push(...currentDirectoryResult);
} else {
result.push(...currentDirectoryResult);
}
}
return result;
};

View File

@@ -1,78 +0,0 @@
import { db } from "@/server/db";
import {
type apiCreateSshKey,
type apiFindOneSshKey,
type apiRemoveSshKey,
type apiUpdateSshKey,
sshKeys,
} from "@/server/db/schema";
import { removeSSHKey, saveSSHKey } from "@/server/utils/filesystem/ssh";
import { TRPCError } from "@trpc/server";
import { eq } from "drizzle-orm";
export const createSshKey = async ({
privateKey,
...input
}: typeof apiCreateSshKey._type) => {
await db.transaction(async (tx) => {
const sshKey = await tx
.insert(sshKeys)
.values(input)
.returning()
.then((response) => response[0])
.catch((e) => console.error(e));
if (sshKey) {
saveSSHKey(sshKey.sshKeyId, sshKey.publicKey, privateKey);
}
if (!sshKey) {
throw new TRPCError({
code: "BAD_REQUEST",
message: "Error to create the ssh key",
});
}
return sshKey;
});
};
export const removeSSHKeyById = async (
sshKeyId: (typeof apiRemoveSshKey._type)["sshKeyId"],
) => {
const result = await db
.delete(sshKeys)
.where(eq(sshKeys.sshKeyId, sshKeyId))
.returning();
removeSSHKey(sshKeyId);
return result[0];
};
export const updateSSHKeyById = async ({
sshKeyId,
...input
}: typeof apiUpdateSshKey._type) => {
const result = await db
.update(sshKeys)
.set(input)
.where(eq(sshKeys.sshKeyId, sshKeyId))
.returning();
return result[0];
};
export const findSSHKeyById = async (
sshKeyId: (typeof apiFindOneSshKey._type)["sshKeyId"],
) => {
const sshKey = await db.query.sshKeys.findFirst({
where: eq(sshKeys.sshKeyId, sshKeyId),
});
if (!sshKey) {
throw new TRPCError({
code: "NOT_FOUND",
message: "SSH Key not found",
});
}
return sshKey;
};

View File

@@ -1,207 +0,0 @@
import { db } from "@/server/db";
import { users } from "@/server/db/schema";
import { TRPCError } from "@trpc/server";
import { eq } from "drizzle-orm";
export type User = typeof users.$inferSelect;
export const findUserById = async (userId: string) => {
const user = await db.query.users.findFirst({
where: eq(users.userId, userId),
});
if (!user) {
throw new TRPCError({
code: "NOT_FOUND",
message: "User not found",
});
}
return user;
};
export const findUserByAuthId = async (authId: string) => {
const user = await db.query.users.findFirst({
where: eq(users.authId, authId),
with: {
auth: true,
},
});
if (!user) {
throw new TRPCError({
code: "NOT_FOUND",
message: "User not found",
});
}
return user;
};
export const findUsers = async () => {
const users = await db.query.users.findMany({
with: {
auth: {
columns: {
secret: false,
},
},
},
});
return users;
};
export const addNewProject = async (authId: string, projectId: string) => {
const user = await findUserByAuthId(authId);
await db
.update(users)
.set({
accesedProjects: [...user.accesedProjects, projectId],
})
.where(eq(users.authId, authId));
};
export const addNewService = async (authId: string, serviceId: string) => {
const user = await findUserByAuthId(authId);
await db
.update(users)
.set({
accesedServices: [...user.accesedServices, serviceId],
})
.where(eq(users.authId, authId));
};
export const canPerformCreationService = async (
userId: string,
projectId: string,
) => {
const { accesedProjects, canCreateServices } = await findUserByAuthId(userId);
const haveAccessToProject = accesedProjects.includes(projectId);
if (canCreateServices && haveAccessToProject) {
return true;
}
return false;
};
export const canPerformAccessService = async (
userId: string,
serviceId: string,
) => {
const { accesedServices } = await findUserByAuthId(userId);
const haveAccessToService = accesedServices.includes(serviceId);
if (haveAccessToService) {
return true;
}
return false;
};
export const canPeformDeleteService = async (
authId: string,
serviceId: string,
) => {
const { accesedServices, canDeleteServices } = await findUserByAuthId(authId);
const haveAccessToService = accesedServices.includes(serviceId);
if (canDeleteServices && haveAccessToService) {
return true;
}
return false;
};
export const canPerformCreationProject = async (authId: string) => {
const { canCreateProjects } = await findUserByAuthId(authId);
if (canCreateProjects) {
return true;
}
return false;
};
export const canPerformDeleteProject = async (authId: string) => {
const { canDeleteProjects } = await findUserByAuthId(authId);
if (canDeleteProjects) {
return true;
}
return false;
};
export const canPerformAccessProject = async (
authId: string,
projectId: string,
) => {
const { accesedProjects } = await findUserByAuthId(authId);
const haveAccessToProject = accesedProjects.includes(projectId);
if (haveAccessToProject) {
return true;
}
return false;
};
export const canAccessToTraefikFiles = async (authId: string) => {
const { canAccessToTraefikFiles } = await findUserByAuthId(authId);
return canAccessToTraefikFiles;
};
export const checkServiceAccess = async (
authId: string,
serviceId: string,
action = "access" as "access" | "create" | "delete",
) => {
let hasPermission = false;
switch (action) {
case "create":
hasPermission = await canPerformCreationService(authId, serviceId);
break;
case "access":
hasPermission = await canPerformAccessService(authId, serviceId);
break;
case "delete":
hasPermission = await canPeformDeleteService(authId, serviceId);
break;
default:
hasPermission = false;
}
if (!hasPermission) {
throw new TRPCError({
code: "UNAUTHORIZED",
message: "Permission denied",
});
}
};
export const checkProjectAccess = async (
authId: string,
action: "create" | "delete" | "access",
projectId?: string,
) => {
let hasPermission = false;
switch (action) {
case "access":
hasPermission = await canPerformAccessProject(
authId,
projectId as string,
);
break;
case "create":
hasPermission = await canPerformCreationProject(authId);
break;
case "delete":
hasPermission = await canPerformDeleteProject(authId);
break;
default:
hasPermission = false;
}
if (!hasPermission) {
throw new TRPCError({
code: "UNAUTHORIZED",
message: "Permission denied",
});
}
};

View File

@@ -4,8 +4,7 @@ import { DrizzlePostgreSQLAdapter } from "@lucia-auth/adapter-drizzle";
import { TimeSpan } from "lucia";
import { Lucia } from "lucia/dist/core.js";
import type { Session, User } from "lucia/dist/core.js";
import { findAdminByAuthId } from "../api/services/admin";
import { findUserByAuthId } from "../api/services/user";
import { findAdminByAuthId, findUserByAuthId } from "@dokploy/builders";
import { db } from "../db";
import { type DatabaseUser, auth, sessionTable } from "../db/schema";

View File

@@ -1,18 +1,23 @@
import { type Job, Worker } from "bullmq";
// import {
// deployApplication,
// deployRemoteApplication,
// rebuildApplication,
// rebuildRemoteApplication,
// updateApplicationStatus,
// } from "../api/services/application";
import {
deployApplication,
deployRemoteApplication,
rebuildApplication,
rebuildRemoteApplication,
updateApplicationStatus,
} from "../api/services/application";
import {
deployCompose,
deployRemoteCompose,
rebuildCompose,
rebuildRemoteCompose,
updateCompose,
} from "../api/services/compose";
} from "@dokploy/builders";
import { myQueue, redisConfig } from "./queueSetup";
type DeployJob =

View File

@@ -4,18 +4,17 @@ import { config } from "dotenv";
import next from "next";
// import { IS_CLOUD } from "./constants";
import { deploymentWorker } from "./queues/deployments-queue";
// import { deploymentWorker } from "./queues/deployments-queue";
import { setupDirectories } from "./setup/config-paths";
import { initializePostgres } from "./setup/postgres-setup";
import { initializeRedis } from "./setup/redis-setup";
import { initializeNetwork } from "./setup/setup";
import {
setupDirectories,
initializePostgres,
initializeRedis,
initializeNetwork,
createDefaultMiddlewares,
createDefaultServerTraefikConfig,
createDefaultTraefikConfig,
initializeTraefik,
} from "./setup/traefik-setup";
import { initCronJobs } from "./utils/backups";
initCronJobs,
} from "@dokploy/builders";
import { sendDokployRestartNotifications } from "./utils/notifications/dokploy-restart";
import { setupDockerContainerLogsWebSocketServer } from "./wss/docker-container-logs";
import { setupDockerContainerTerminalWebSocketServer } from "./wss/docker-container-terminal";

View File

@@ -1,157 +0,0 @@
import { findAdmin } from "@/server/api/services/admin";
import { getAllServers } from "@/server/api/services/server";
import { scheduleJob } from "node-schedule";
import { db } from "../../db/index";
import {
cleanUpDockerBuilder,
cleanUpSystemPrune,
cleanUpUnusedImages,
} from "../docker/utils";
import { runMariadbBackup } from "./mariadb";
import { runMongoBackup } from "./mongo";
import { runMySqlBackup } from "./mysql";
import { runPostgresBackup } from "./postgres";
export const initCronJobs = async () => {
console.log("Setting up cron jobs....");
const admin = await findAdmin();
if (admin?.enableDockerCleanup) {
scheduleJob("docker-cleanup", "0 0 * * *", async () => {
console.log(
`Docker Cleanup ${new Date().toLocaleString()}] Running docker cleanup`,
);
await cleanUpUnusedImages();
await cleanUpDockerBuilder();
await cleanUpSystemPrune();
});
}
const servers = await getAllServers();
for (const server of servers) {
const { appName, serverId } = server;
if (serverId) {
scheduleJob(serverId, "0 0 * * *", async () => {
console.log(
`SERVER-BACKUP[${new Date().toLocaleString()}] Running Cleanup ${appName}`,
);
await cleanUpUnusedImages(serverId);
await cleanUpDockerBuilder(serverId);
await cleanUpSystemPrune(serverId);
});
}
}
const pgs = await db.query.postgres.findMany({
with: {
backups: {
with: {
destination: true,
postgres: true,
mariadb: true,
mysql: true,
mongo: true,
},
},
},
});
for (const pg of pgs) {
for (const backup of pg.backups) {
const { schedule, backupId, enabled } = backup;
if (enabled) {
scheduleJob(backupId, schedule, async () => {
console.log(
`PG-SERVER[${new Date().toLocaleString()}] Running Backup ${backupId}`,
);
runPostgresBackup(pg, backup);
});
}
}
}
const mariadbs = await db.query.mariadb.findMany({
with: {
backups: {
with: {
destination: true,
postgres: true,
mariadb: true,
mysql: true,
mongo: true,
},
},
},
});
for (const maria of mariadbs) {
for (const backup of maria.backups) {
const { schedule, backupId, enabled } = backup;
if (enabled) {
scheduleJob(backupId, schedule, async () => {
console.log(
`MARIADB-SERVER[${new Date().toLocaleString()}] Running Backup ${backupId}`,
);
await runMariadbBackup(maria, backup);
});
}
}
}
const mongodbs = await db.query.mongo.findMany({
with: {
backups: {
with: {
destination: true,
postgres: true,
mariadb: true,
mysql: true,
mongo: true,
},
},
},
});
for (const mongo of mongodbs) {
for (const backup of mongo.backups) {
const { schedule, backupId, enabled } = backup;
if (enabled) {
scheduleJob(backupId, schedule, async () => {
console.log(
`MONGO-SERVER[${new Date().toLocaleString()}] Running Backup ${backupId}`,
);
await runMongoBackup(mongo, backup);
});
}
}
}
const mysqls = await db.query.mysql.findMany({
with: {
backups: {
with: {
destination: true,
postgres: true,
mariadb: true,
mysql: true,
mongo: true,
},
},
},
});
for (const mysql of mysqls) {
for (const backup of mysql.backups) {
const { schedule, backupId, enabled } = backup;
if (enabled) {
scheduleJob(backupId, schedule, async () => {
console.log(
`MYSQL-SERVER[${new Date().toLocaleString()}] Running Backup ${backupId}`,
);
await runMySqlBackup(mysql, backup);
});
}
}
}
};

View File

@@ -1,65 +0,0 @@
import path from "node:path";
import type { BackupSchedule } from "@/server/api/services/backup";
import type { Mariadb } from "@/server/api/services/mariadb";
import { findProjectById } from "@/server/api/services/project";
import {
getRemoteServiceContainer,
getServiceContainer,
} from "../docker/utils";
import { sendDatabaseBackupNotifications } from "../notifications/database-backup";
import { execAsync, execAsyncRemote } from "../process/execAsync";
import { getS3Credentials } from "./utils";
export const runMariadbBackup = async (
mariadb: Mariadb,
backup: BackupSchedule,
) => {
const { appName, databasePassword, databaseUser, projectId, name } = mariadb;
const project = await findProjectById(projectId);
const { prefix, database } = backup;
const destination = backup.destination;
const backupFileName = `${new Date().toISOString()}.sql.gz`;
const bucketDestination = path.join(prefix, backupFileName);
try {
const rcloneFlags = getS3Credentials(destination);
const rcloneDestination = `:s3:${destination.bucket}/${bucketDestination}`;
const rcloneCommand = `rclone rcat ${rcloneFlags.join(" ")} "${rcloneDestination}"`;
if (mariadb.serverId) {
const { Id: containerId } = await getRemoteServiceContainer(
mariadb.serverId,
appName,
);
const mariadbDumpCommand = `docker exec ${containerId} sh -c "mariadb-dump --user='${databaseUser}' --password='${databasePassword}' --databases ${database} | gzip"`;
await execAsyncRemote(
mariadb.serverId,
`${mariadbDumpCommand} | ${rcloneCommand}`,
);
} else {
const { Id: containerId } = await getServiceContainer(appName);
const mariadbDumpCommand = `docker exec ${containerId} sh -c "mariadb-dump --user='${databaseUser}' --password='${databasePassword}' --databases ${database} | gzip"`;
await execAsync(`${mariadbDumpCommand} | ${rcloneCommand}`);
}
await sendDatabaseBackupNotifications({
applicationName: name,
projectName: project.name,
databaseType: "mariadb",
type: "success",
});
} catch (error) {
console.log(error);
await sendDatabaseBackupNotifications({
applicationName: name,
projectName: project.name,
databaseType: "mariadb",
type: "error",
// @ts-ignore
errorMessage: error?.message || "Error message not provided",
});
throw error;
}
};

View File

@@ -1,63 +0,0 @@
import path from "node:path";
import type { BackupSchedule } from "@/server/api/services/backup";
import type { Mongo } from "@/server/api/services/mongo";
import { findProjectById } from "@/server/api/services/project";
import {
getRemoteServiceContainer,
getServiceContainer,
} from "../docker/utils";
import { sendDatabaseBackupNotifications } from "../notifications/database-backup";
import { execAsync, execAsyncRemote } from "../process/execAsync";
import { getS3Credentials } from "./utils";
// mongodb://mongo:Bqh7AQl-PRbnBu@localhost:27017/?tls=false&directConnection=true
export const runMongoBackup = async (mongo: Mongo, backup: BackupSchedule) => {
const { appName, databasePassword, databaseUser, projectId, name } = mongo;
const project = await findProjectById(projectId);
const { prefix, database } = backup;
const destination = backup.destination;
const backupFileName = `${new Date().toISOString()}.dump.gz`;
const bucketDestination = path.join(prefix, backupFileName);
try {
const rcloneFlags = getS3Credentials(destination);
const rcloneDestination = `:s3:${destination.bucket}/${bucketDestination}`;
const rcloneCommand = `rclone rcat ${rcloneFlags.join(" ")} "${rcloneDestination}"`;
if (mongo.serverId) {
const { Id: containerId } = await getRemoteServiceContainer(
mongo.serverId,
appName,
);
const mongoDumpCommand = `docker exec ${containerId} sh -c "mongodump -d '${database}' -u '${databaseUser}' -p '${databasePassword}' --authenticationDatabase=admin --gzip"`;
await execAsyncRemote(
mongo.serverId,
`${mongoDumpCommand} | ${rcloneCommand}`,
);
} else {
const { Id: containerId } = await getServiceContainer(appName);
const mongoDumpCommand = `docker exec ${containerId} sh -c "mongodump -d '${database}' -u '${databaseUser}' -p '${databasePassword}' --authenticationDatabase=admin --gzip"`;
await execAsync(`${mongoDumpCommand} | ${rcloneCommand}`);
}
await sendDatabaseBackupNotifications({
applicationName: name,
projectName: project.name,
databaseType: "mongodb",
type: "success",
});
} catch (error) {
console.log(error);
await sendDatabaseBackupNotifications({
applicationName: name,
projectName: project.name,
databaseType: "mongodb",
type: "error",
// @ts-ignore
errorMessage: error?.message || "Error message not provided",
});
throw error;
}
};
// mongorestore -d monguito -u mongo -p Bqh7AQl-PRbnBu --authenticationDatabase admin --gzip --archive=2024-04-13T05:03:58.937Z.dump.gz

View File

@@ -1,62 +0,0 @@
import { unlink } from "node:fs/promises";
import path from "node:path";
import type { BackupSchedule } from "@/server/api/services/backup";
import type { MySql } from "@/server/api/services/mysql";
import { findProjectById } from "@/server/api/services/project";
import {
getRemoteServiceContainer,
getServiceContainer,
} from "../docker/utils";
import { sendDatabaseBackupNotifications } from "../notifications/database-backup";
import { execAsync, execAsyncRemote } from "../process/execAsync";
import { getS3Credentials } from "./utils";
export const runMySqlBackup = async (mysql: MySql, backup: BackupSchedule) => {
const { appName, databaseRootPassword, projectId, name } = mysql;
const project = await findProjectById(projectId);
const { prefix, database } = backup;
const destination = backup.destination;
const backupFileName = `${new Date().toISOString()}.sql.gz`;
const bucketDestination = path.join(prefix, backupFileName);
try {
const rcloneFlags = getS3Credentials(destination);
const rcloneDestination = `:s3:${destination.bucket}/${bucketDestination}`;
const rcloneCommand = `rclone rcat ${rcloneFlags.join(" ")} "${rcloneDestination}"`;
if (mysql.serverId) {
const { Id: containerId } = await getRemoteServiceContainer(
mysql.serverId,
appName,
);
const mysqlDumpCommand = `docker exec ${containerId} sh -c "mysqldump --default-character-set=utf8mb4 -u 'root' --password='${databaseRootPassword}' --single-transaction --no-tablespaces --quick '${database}' | gzip"`;
await execAsyncRemote(
mysql.serverId,
`${mysqlDumpCommand} | ${rcloneCommand}`,
);
} else {
const { Id: containerId } = await getServiceContainer(appName);
const mysqlDumpCommand = `docker exec ${containerId} sh -c "mysqldump --default-character-set=utf8mb4 -u 'root' --password='${databaseRootPassword}' --single-transaction --no-tablespaces --quick '${database}' | gzip"`;
await execAsync(`${mysqlDumpCommand} | ${rcloneCommand}`);
}
await sendDatabaseBackupNotifications({
applicationName: name,
projectName: project.name,
databaseType: "mysql",
type: "success",
});
} catch (error) {
console.log(error);
await sendDatabaseBackupNotifications({
applicationName: name,
projectName: project.name,
databaseType: "mysql",
type: "error",
// @ts-ignore
errorMessage: error?.message || "Error message not provided",
});
throw error;
}
};

View File

@@ -1,69 +0,0 @@
import path from "node:path";
import type { BackupSchedule } from "@/server/api/services/backup";
import type { Postgres } from "@/server/api/services/postgres";
import { findProjectById } from "@/server/api/services/project";
import {
getRemoteServiceContainer,
getServiceContainer,
} from "../docker/utils";
import { sendDatabaseBackupNotifications } from "../notifications/database-backup";
import { execAsync, execAsyncRemote } from "../process/execAsync";
import { getS3Credentials } from "./utils";
export const runPostgresBackup = async (
postgres: Postgres,
backup: BackupSchedule,
) => {
const { appName, databaseUser, name, projectId } = postgres;
const project = await findProjectById(projectId);
const { prefix, database } = backup;
const destination = backup.destination;
const backupFileName = `${new Date().toISOString()}.sql.gz`;
const bucketDestination = path.join(prefix, backupFileName);
try {
const rcloneFlags = getS3Credentials(destination);
const rcloneDestination = `:s3:${destination.bucket}/${bucketDestination}`;
const rcloneCommand = `rclone rcat ${rcloneFlags.join(" ")} "${rcloneDestination}"`;
if (postgres.serverId) {
const { Id: containerId } = await getRemoteServiceContainer(
postgres.serverId,
appName,
);
const pgDumpCommand = `docker exec ${containerId} sh -c "pg_dump -Fc --no-acl --no-owner -h localhost -U ${databaseUser} --no-password '${database}' | gzip"`;
await execAsyncRemote(
postgres.serverId,
`${pgDumpCommand} | ${rcloneCommand}`,
);
} else {
const { Id: containerId } = await getServiceContainer(appName);
const pgDumpCommand = `docker exec ${containerId} sh -c "pg_dump -Fc --no-acl --no-owner -h localhost -U ${databaseUser} --no-password '${database}' | gzip"`;
await execAsync(`${pgDumpCommand} | ${rcloneCommand}`);
}
await sendDatabaseBackupNotifications({
applicationName: name,
projectName: project.name,
databaseType: "postgres",
type: "success",
});
} catch (error) {
await sendDatabaseBackupNotifications({
applicationName: name,
projectName: project.name,
databaseType: "postgres",
type: "error",
// @ts-ignore
errorMessage: error?.message || "Error message not provided",
});
throw error;
} finally {
}
};
// Restore
// /Applications/pgAdmin 4.app/Contents/SharedSupport/pg_restore --host "localhost" --port "5432" --username "mauricio" --no-password --dbname "postgres" --verbose "/Users/mauricio/Downloads/_databases_2024-04-12T07_02_05.234Z.sql"

View File

@@ -1,43 +0,0 @@
import type { BackupSchedule } from "@/server/api/services/backup";
import type { Destination } from "@/server/api/services/destination";
import { scheduleJob, scheduledJobs } from "node-schedule";
import { runMariadbBackup } from "./mariadb";
import { runMongoBackup } from "./mongo";
import { runMySqlBackup } from "./mysql";
import { runPostgresBackup } from "./postgres";
export const scheduleBackup = (backup: BackupSchedule) => {
const { schedule, backupId, databaseType, postgres, mysql, mongo, mariadb } =
backup;
scheduleJob(backupId, schedule, async () => {
if (databaseType === "postgres" && postgres) {
await runPostgresBackup(postgres, backup);
} else if (databaseType === "mysql" && mysql) {
await runMySqlBackup(mysql, backup);
} else if (databaseType === "mongo" && mongo) {
await runMongoBackup(mongo, backup);
} else if (databaseType === "mariadb" && mariadb) {
await runMariadbBackup(mariadb, backup);
}
});
};
export const removeScheduleBackup = (backupId: string) => {
const currentJob = scheduledJobs[backupId];
currentJob?.cancel();
};
export const getS3Credentials = (destination: Destination) => {
const { accessKey, secretAccessKey, bucket, region, endpoint } = destination;
const rcloneFlags = [
// `--s3-provider=Cloudflare`,
`--s3-access-key-id=${accessKey}`,
`--s3-secret-access-key=${secretAccessKey}`,
`--s3-region=${region}`,
`--s3-endpoint=${endpoint}`,
"--s3-no-check-bucket",
"--s3-force-path-style",
];
return rcloneFlags;
};

View File

@@ -1,216 +0,0 @@
import {
createWriteStream,
existsSync,
mkdirSync,
writeFileSync,
} from "node:fs";
import { dirname, join } from "node:path";
import { paths } from "@/server/constants";
import type { InferResultType } from "@/server/types/with";
import boxen from "boxen";
import {
writeDomainsToCompose,
writeDomainsToComposeRemote,
} from "../docker/domain";
import { encodeBase64, prepareEnvironmentVariables } from "../docker/utils";
import { execAsyncRemote } from "../process/execAsync";
import { spawnAsync } from "../process/spawnAsync";
export type ComposeNested = InferResultType<
"compose",
{ project: true; mounts: true; domains: true }
>;
export const buildCompose = async (compose: ComposeNested, logPath: string) => {
const writeStream = createWriteStream(logPath, { flags: "a" });
const { sourceType, appName, mounts, composeType, domains } = compose;
try {
const { COMPOSE_PATH } = paths();
const command = createCommand(compose);
await writeDomainsToCompose(compose, domains);
createEnvFile(compose);
const logContent = `
App Name: ${appName}
Build Compose 🐳
Detected: ${mounts.length} mounts 📂
Command: docker ${command}
Source Type: docker ${sourceType}
Compose Type: ${composeType}`;
const logBox = boxen(logContent, {
padding: {
left: 1,
right: 1,
bottom: 1,
},
width: 80,
borderStyle: "double",
});
writeStream.write(`\n${logBox}\n`);
const projectPath = join(COMPOSE_PATH, compose.appName, "code");
await spawnAsync(
"docker",
[...command.split(" ")],
(data) => {
if (writeStream.writable) {
writeStream.write(data.toString());
}
},
{
cwd: projectPath,
env: {
NODE_ENV: process.env.NODE_ENV,
PATH: process.env.PATH,
},
},
);
writeStream.write("Docker Compose Deployed: ✅");
} catch (error) {
writeStream.write("Error ❌");
throw error;
} finally {
writeStream.end();
}
};
export const getBuildComposeCommand = async (
compose: ComposeNested,
logPath: string,
) => {
const { COMPOSE_PATH } = paths(true);
const { sourceType, appName, mounts, composeType, domains, composePath } =
compose;
const command = createCommand(compose);
const envCommand = getCreateEnvFileCommand(compose);
const projectPath = join(COMPOSE_PATH, compose.appName, "code");
const newCompose = await writeDomainsToComposeRemote(
compose,
domains,
logPath,
);
const logContent = `
App Name: ${appName}
Build Compose 🐳
Detected: ${mounts.length} mounts 📂
Command: docker ${command}
Source Type: docker ${sourceType}
Compose Type: ${composeType}`;
const logBox = boxen(logContent, {
padding: {
left: 1,
right: 1,
bottom: 1,
},
width: 80,
borderStyle: "double",
});
const bashCommand = `
set -e
{
echo "${logBox}" >> "${logPath}"
${newCompose}
${envCommand}
cd "${projectPath}";
docker ${command.split(" ").join(" ")} >> "${logPath}" 2>&1 || { echo "Error: ❌ Docker command failed" >> "${logPath}"; exit 1; }
echo "Docker Compose Deployed: ✅" >> "${logPath}"
} || {
echo "Error: ❌ Script execution failed" >> "${logPath}"
exit 1
}
`;
return await execAsyncRemote(compose.serverId, bashCommand);
};
const sanitizeCommand = (command: string) => {
const sanitizedCommand = command.trim();
const parts = sanitizedCommand.split(/\s+/);
const restCommand = parts.map((arg) => arg.replace(/^"(.*)"$/, "$1"));
return restCommand.join(" ");
};
export const createCommand = (compose: ComposeNested) => {
const { composeType, appName, sourceType } = compose;
const path =
sourceType === "raw" ? "docker-compose.yml" : compose.composePath;
let command = "";
if (composeType === "docker-compose") {
command = `compose -p ${appName} -f ${path} up -d --build --remove-orphans`;
} else if (composeType === "stack") {
command = `stack deploy -c ${path} ${appName} --prune`;
}
const customCommand = sanitizeCommand(compose.command);
if (customCommand) {
command = `${command} ${customCommand}`;
}
return command;
};
const createEnvFile = (compose: ComposeNested) => {
const { COMPOSE_PATH } = paths();
const { env, composePath, appName } = compose;
const composeFilePath =
join(COMPOSE_PATH, appName, "code", composePath) ||
join(COMPOSE_PATH, appName, "code", "docker-compose.yml");
const envFilePath = join(dirname(composeFilePath), ".env");
let envContent = env || "";
if (!envContent.includes("DOCKER_CONFIG")) {
envContent += "\nDOCKER_CONFIG=/root/.docker/config.json";
}
if (compose.randomize) {
envContent += `\nCOMPOSE_PREFIX=${compose.suffix}`;
}
const envFileContent = prepareEnvironmentVariables(envContent).join("\n");
if (!existsSync(dirname(envFilePath))) {
mkdirSync(dirname(envFilePath), { recursive: true });
}
writeFileSync(envFilePath, envFileContent);
};
export const getCreateEnvFileCommand = (compose: ComposeNested) => {
const { COMPOSE_PATH } = paths(true);
const { env, composePath, appName } = compose;
const composeFilePath =
join(COMPOSE_PATH, appName, "code", composePath) ||
join(COMPOSE_PATH, appName, "code", "docker-compose.yml");
const envFilePath = join(dirname(composeFilePath), ".env");
let envContent = env || "";
if (!envContent.includes("DOCKER_CONFIG")) {
envContent += "\nDOCKER_CONFIG=/root/.docker/config.json";
}
if (compose.randomize) {
envContent += `\nCOMPOSE_PREFIX=${compose.suffix}`;
}
const envFileContent = prepareEnvironmentVariables(envContent).join("\n");
const encodedContent = encodeBase64(envFileContent);
return `
touch ${envFilePath};
echo "${encodedContent}" | base64 -d > "${envFilePath}";
`;
};

View File

@@ -1,116 +0,0 @@
import type { WriteStream } from "node:fs";
import { prepareEnvironmentVariables } from "@/server/utils/docker/utils";
import type { ApplicationNested } from ".";
import {
getBuildAppDirectory,
getDockerContextPath,
} from "../filesystem/directory";
import { spawnAsync } from "../process/spawnAsync";
import { createEnvFile, createEnvFileCommand } from "./utils";
export const buildCustomDocker = async (
application: ApplicationNested,
writeStream: WriteStream,
) => {
const { appName, env, publishDirectory, buildArgs, dockerBuildStage } =
application;
const dockerFilePath = getBuildAppDirectory(application);
try {
const image = `${appName}`;
const defaultContextPath =
dockerFilePath.substring(0, dockerFilePath.lastIndexOf("/") + 1) || ".";
const args = prepareEnvironmentVariables(buildArgs);
const dockerContextPath = getDockerContextPath(application);
const commandArgs = ["build", "-t", image, "-f", dockerFilePath, "."];
if (dockerBuildStage) {
commandArgs.push("--target", dockerBuildStage);
}
for (const arg of args) {
commandArgs.push("--build-arg", arg);
}
/*
Do not generate an environment file when publishDirectory is specified,
as it could be publicly exposed.
*/
if (!publishDirectory) {
createEnvFile(dockerFilePath, env);
}
await spawnAsync(
"docker",
commandArgs,
(data) => {
if (writeStream.writable) {
writeStream.write(data);
}
},
{
cwd: dockerContextPath || defaultContextPath,
},
);
} catch (error) {
throw error;
}
};
export const getDockerCommand = (
application: ApplicationNested,
logPath: string,
) => {
const { appName, env, publishDirectory, buildArgs, dockerBuildStage } =
application;
const dockerFilePath = getBuildAppDirectory(application);
try {
const image = `${appName}`;
const defaultContextPath =
dockerFilePath.substring(0, dockerFilePath.lastIndexOf("/") + 1) || ".";
const args = prepareEnvironmentVariables(buildArgs);
const dockerContextPath =
getDockerContextPath(application) || defaultContextPath;
const commandArgs = ["build", "-t", image, "-f", dockerFilePath, "."];
if (dockerBuildStage) {
commandArgs.push("--target", dockerBuildStage);
}
for (const arg of args) {
commandArgs.push("--build-arg", arg);
}
/*
Do not generate an environment file when publishDirectory is specified,
as it could be publicly exposed.
*/
let command = "";
if (!publishDirectory) {
command += createEnvFileCommand(dockerFilePath, env);
}
command = `
echo "Building ${appName}" >> ${logPath};
cd ${dockerContextPath} >> ${logPath} 2>> ${logPath} || {
echo "❌ The path ${dockerContextPath} does not exist" >> ${logPath};
exit 1;
}
docker ${commandArgs.join(" ")} >> ${logPath} 2>> ${logPath} || {
echo "❌ Docker build failed" >> ${logPath};
exit 1;
}
echo "✅ Docker build completed." >> ${logPath};
`;
return command;
} catch (error) {
throw error;
}
};

View File

@@ -1,124 +0,0 @@
import fs from "node:fs/promises";
import path, { join } from "node:path";
import type { Application } from "@/server/api/services/application";
import { findServerById } from "@/server/api/services/server";
import { paths } from "@/server/constants";
import AdmZip from "adm-zip";
import { Client, type SFTPWrapper } from "ssh2";
import {
recreateDirectory,
recreateDirectoryRemote,
} from "../filesystem/directory";
import { readSSHKey } from "../filesystem/ssh";
import { execAsyncRemote } from "../process/execAsync";
export const unzipDrop = async (zipFile: File, application: Application) => {
let sftp: SFTPWrapper | null = null;
try {
const { appName } = application;
const { APPLICATIONS_PATH } = paths(!!application.serverId);
const outputPath = join(APPLICATIONS_PATH, appName, "code");
if (application.serverId) {
await recreateDirectoryRemote(outputPath, application.serverId);
} else {
await recreateDirectory(outputPath);
}
const arrayBuffer = await zipFile.arrayBuffer();
const buffer = Buffer.from(arrayBuffer);
const zip = new AdmZip(buffer);
const zipEntries = zip.getEntries();
const rootEntries = zipEntries.filter(
(entry) =>
entry.entryName.split("/").length === 1 ||
(entry.entryName.split("/").length === 2 &&
entry.entryName.endsWith("/")),
);
const hasSingleRootFolder = !!(
rootEntries.length === 1 && rootEntries[0]?.isDirectory
);
const rootFolderName = hasSingleRootFolder
? rootEntries[0]?.entryName.split("/")[0]
: "";
if (application.serverId) {
sftp = await getSFTPConnection(application.serverId);
}
for (const entry of zipEntries) {
let filePath = entry.entryName;
if (
hasSingleRootFolder &&
rootFolderName &&
filePath.startsWith(`${rootFolderName}/`)
) {
filePath = filePath.slice(rootFolderName?.length + 1);
}
if (!filePath) continue;
const fullPath = path.join(outputPath, filePath);
if (application.serverId) {
if (entry.isDirectory) {
await execAsyncRemote(application.serverId, `mkdir -p ${fullPath}`);
} else {
if (sftp === null) throw new Error("No SFTP connection available");
await uploadFileToServer(sftp, entry.getData(), fullPath);
}
} else {
if (entry.isDirectory) {
await fs.mkdir(fullPath, { recursive: true });
} else {
await fs.mkdir(path.dirname(fullPath), { recursive: true });
await fs.writeFile(fullPath, entry.getData());
}
}
}
} catch (error) {
console.error("Error processing ZIP file:", error);
throw error;
} finally {
sftp?.end();
}
};
const getSFTPConnection = async (serverId: string): Promise<SFTPWrapper> => {
const server = await findServerById(serverId);
if (!server.sshKeyId) throw new Error("No SSH key available for this server");
const keys = await readSSHKey(server.sshKeyId);
return new Promise((resolve, reject) => {
const conn = new Client();
conn
.on("ready", () => {
conn.sftp((err, sftp) => {
if (err) return reject(err);
resolve(sftp);
});
})
.connect({
host: server.ipAddress,
port: server.port,
username: server.username,
privateKey: keys.privateKey,
timeout: 99999,
});
});
};
const uploadFileToServer = (
sftp: SFTPWrapper,
data: Buffer,
remotePath: string,
): Promise<void> => {
return new Promise((resolve, reject) => {
sftp.writeFile(remotePath, data, (err) => {
if (err) return reject(err);
resolve();
});
});
};

View File

@@ -1,73 +0,0 @@
import type { WriteStream } from "node:fs";
import type { ApplicationNested } from ".";
import { prepareEnvironmentVariables } from "../docker/utils";
import { getBuildAppDirectory } from "../filesystem/directory";
import { spawnAsync } from "../process/spawnAsync";
// TODO: integrate in the vps sudo chown -R $(whoami) ~/.docker
export const buildHeroku = async (
application: ApplicationNested,
writeStream: WriteStream,
) => {
const { env, appName } = application;
const buildAppDirectory = getBuildAppDirectory(application);
const envVariables = prepareEnvironmentVariables(env);
try {
const args = [
"build",
appName,
"--path",
buildAppDirectory,
"--builder",
"heroku/builder:24",
];
for (const env of envVariables) {
args.push("--env", env);
}
await spawnAsync("pack", args, (data) => {
if (writeStream.writable) {
writeStream.write(data);
}
});
return true;
} catch (e) {
throw e;
}
};
export const getHerokuCommand = (
application: ApplicationNested,
logPath: string,
) => {
const { env, appName } = application;
const buildAppDirectory = getBuildAppDirectory(application);
const envVariables = prepareEnvironmentVariables(env);
const args = [
"build",
appName,
"--path",
buildAppDirectory,
"--builder",
"heroku/builder:24",
];
for (const env of envVariables) {
args.push("--env", env);
}
const command = `pack ${args.join(" ")}`;
const bashCommand = `
echo "Starting heroku build..." >> ${logPath};
${command} >> ${logPath} 2>> ${logPath} || {
echo "❌ Heroku build failed" >> ${logPath};
exit 1;
}
echo "✅ Heroku build completed." >> ${logPath};
`;
return bashCommand;
};

View File

@@ -1,216 +0,0 @@
import { createWriteStream } from "node:fs";
import type { InferResultType } from "@/server/types/with";
import type { CreateServiceOptions } from "dockerode";
import { uploadImage } from "../cluster/upload";
import {
calculateResources,
generateBindMounts,
generateConfigContainer,
generateFileMounts,
generateVolumeMounts,
prepareEnvironmentVariables,
} from "../docker/utils";
import { getRemoteDocker } from "../servers/remote-docker";
import { buildCustomDocker, getDockerCommand } from "./docker-file";
import { buildHeroku, getHerokuCommand } from "./heroku";
import { buildNixpacks, getNixpacksCommand } from "./nixpacks";
import { buildPaketo, getPaketoCommand } from "./paketo";
import { buildStatic, getStaticCommand } from "./static";
// NIXPACKS codeDirectory = where is the path of the code directory
// HEROKU codeDirectory = where is the path of the code directory
// PAKETO codeDirectory = where is the path of the code directory
// DOCKERFILE codeDirectory = where is the exact path of the (Dockerfile)
export type ApplicationNested = InferResultType<
"applications",
{ mounts: true; security: true; redirects: true; ports: true; registry: true }
>;
export const buildApplication = async (
application: ApplicationNested,
logPath: string,
) => {
const writeStream = createWriteStream(logPath, { flags: "a" });
const { buildType, sourceType } = application;
try {
writeStream.write(
`\nBuild ${buildType}: ✅\nSource Type: ${sourceType}: ✅\n`,
);
console.log(`Build ${buildType}: ✅`);
if (buildType === "nixpacks") {
await buildNixpacks(application, writeStream);
} else if (buildType === "heroku_buildpacks") {
await buildHeroku(application, writeStream);
} else if (buildType === "paketo_buildpacks") {
await buildPaketo(application, writeStream);
} else if (buildType === "dockerfile") {
await buildCustomDocker(application, writeStream);
} else if (buildType === "static") {
await buildStatic(application, writeStream);
}
if (application.registryId) {
await uploadImage(application, writeStream);
}
await mechanizeDockerContainer(application);
writeStream.write("Docker Deployed: ✅");
} catch (error) {
if (error instanceof Error) {
writeStream.write(`Error ❌\n${error?.message}`);
} else {
writeStream.write("Error ❌");
}
throw error;
} finally {
writeStream.end();
}
};
export const getBuildCommand = (
application: ApplicationNested,
logPath: string,
) => {
const { buildType } = application;
switch (buildType) {
case "nixpacks":
return getNixpacksCommand(application, logPath);
case "heroku_buildpacks":
return getHerokuCommand(application, logPath);
case "paketo_buildpacks":
return getPaketoCommand(application, logPath);
case "static":
return getStaticCommand(application, logPath);
case "dockerfile":
return getDockerCommand(application, logPath);
}
};
export const mechanizeDockerContainer = async (
application: ApplicationNested,
) => {
const {
appName,
env,
mounts,
cpuLimit,
memoryLimit,
memoryReservation,
cpuReservation,
command,
ports,
} = application;
const resources = calculateResources({
memoryLimit,
memoryReservation,
cpuLimit,
cpuReservation,
});
const volumesMount = generateVolumeMounts(mounts);
const {
HealthCheck,
RestartPolicy,
Placement,
Labels,
Mode,
RollbackConfig,
UpdateConfig,
Networks,
} = generateConfigContainer(application);
const bindsMount = generateBindMounts(mounts);
const filesMount = generateFileMounts(appName, application);
const envVariables = prepareEnvironmentVariables(env);
const image = getImageName(application);
const authConfig = getAuthConfig(application);
const docker = await getRemoteDocker(application.serverId);
const settings: CreateServiceOptions = {
authconfig: authConfig,
Name: appName,
TaskTemplate: {
ContainerSpec: {
HealthCheck,
Image: image,
Env: envVariables,
Mounts: [...volumesMount, ...bindsMount, ...filesMount],
...(command
? {
Command: ["/bin/sh"],
Args: ["-c", command],
}
: {}),
Labels,
},
Networks,
RestartPolicy,
Placement,
Resources: {
...resources,
},
},
Mode,
RollbackConfig,
EndpointSpec: {
Ports: ports.map((port) => ({
Protocol: port.protocol,
TargetPort: port.targetPort,
PublishedPort: port.publishedPort,
})),
},
UpdateConfig,
};
try {
const service = docker.getService(appName);
const inspect = await service.inspect();
await service.update({
version: Number.parseInt(inspect.Version.Index),
...settings,
TaskTemplate: {
...settings.TaskTemplate,
ForceUpdate: inspect.Spec.TaskTemplate.ForceUpdate + 1,
},
});
} catch (error) {
await docker.createService(settings);
}
};
const getImageName = (application: ApplicationNested) => {
const { appName, sourceType, dockerImage, registry } = application;
if (sourceType === "docker") {
return dockerImage || "ERROR-NO-IMAGE-PROVIDED";
}
const registryUrl = registry?.registryUrl || "";
const imagePrefix = registry?.imagePrefix ? `${registry.imagePrefix}/` : "";
return registry
? `${registryUrl}/${imagePrefix}${appName}`
: `${appName}:latest`;
};
const getAuthConfig = (application: ApplicationNested) => {
const { registry, username, password, sourceType } = application;
if (sourceType === "docker") {
if (username && password) {
return {
password,
username,
serveraddress: "https://index.docker.io/v1/",
};
}
} else if (registry) {
return {
password: registry.password,
username: registry.username,
serveraddress: registry.registryUrl,
};
}
return undefined;
};

View File

@@ -1,124 +0,0 @@
import type { WriteStream } from "node:fs";
import path from "node:path";
import { buildStatic, getStaticCommand } from "@/server/utils/builders/static";
import { nanoid } from "nanoid";
import type { ApplicationNested } from ".";
import { prepareEnvironmentVariables } from "../docker/utils";
import { getBuildAppDirectory } from "../filesystem/directory";
import { spawnAsync } from "../process/spawnAsync";
export const buildNixpacks = async (
application: ApplicationNested,
writeStream: WriteStream,
) => {
const { env, appName, publishDirectory, serverId } = application;
const buildAppDirectory = getBuildAppDirectory(application);
const buildContainerId = `${appName}-${nanoid(10)}`;
const envVariables = prepareEnvironmentVariables(env);
const writeToStream = (data: string) => {
if (writeStream.writable) {
writeStream.write(data);
}
};
try {
const args = ["build", buildAppDirectory, "--name", appName];
for (const env of envVariables) {
args.push("--env", env);
}
if (publishDirectory) {
/* No need for any start command, since we'll use nginx later on */
args.push("--no-error-without-start");
}
await spawnAsync("nixpacks", args, writeToStream);
/*
Run the container with the image created by nixpacks,
and copy the artifacts on the host filesystem.
Then, remove the container and create a static build.
*/
if (publishDirectory) {
await spawnAsync(
"docker",
["create", "--name", buildContainerId, appName],
writeToStream,
);
await spawnAsync(
"docker",
[
"cp",
`${buildContainerId}:/app/${publishDirectory}`,
path.join(buildAppDirectory, publishDirectory),
],
writeToStream,
);
await spawnAsync("docker", ["rm", buildContainerId], writeToStream);
await buildStatic(application, writeStream);
}
return true;
} catch (e) {
await spawnAsync("docker", ["rm", buildContainerId], writeToStream);
throw e;
}
};
export const getNixpacksCommand = (
application: ApplicationNested,
logPath: string,
) => {
const { env, appName, publishDirectory, serverId } = application;
const buildAppDirectory = getBuildAppDirectory(application);
const buildContainerId = `${appName}-${nanoid(10)}`;
const envVariables = prepareEnvironmentVariables(env);
const args = ["build", buildAppDirectory, "--name", appName];
for (const env of envVariables) {
args.push("--env", env);
}
if (publishDirectory) {
/* No need for any start command, since we'll use nginx later on */
args.push("--no-error-without-start");
}
const command = `nixpacks ${args.join(" ")}`;
let bashCommand = `
echo "Starting nixpacks build..." >> ${logPath};
${command} >> ${logPath} 2>> ${logPath} || {
echo "❌ Nixpacks build failed" >> ${logPath};
exit 1;
}
echo "✅ Nixpacks build completed." >> ${logPath};
`;
/*
Run the container with the image created by nixpacks,
and copy the artifacts on the host filesystem.
Then, remove the container and create a static build.
*/
if (publishDirectory) {
bashCommand += `
docker create --name ${buildContainerId} ${appName}
docker cp ${buildContainerId}:/app/${publishDirectory} ${path.join(buildAppDirectory, publishDirectory)} >> ${logPath} 2>> ${logPath} || {
docker rm ${buildContainerId}
echo "❌ Copying ${publishDirectory} to ${path.join(buildAppDirectory, publishDirectory)} failed" >> ${logPath};
exit 1;
}
docker rm ${buildContainerId}
${getStaticCommand(application, logPath)}
`;
}
return bashCommand;
};

View File

@@ -1,72 +0,0 @@
import type { WriteStream } from "node:fs";
import type { ApplicationNested } from ".";
import { prepareEnvironmentVariables } from "../docker/utils";
import { getBuildAppDirectory } from "../filesystem/directory";
import { spawnAsync } from "../process/spawnAsync";
export const buildPaketo = async (
application: ApplicationNested,
writeStream: WriteStream,
) => {
const { env, appName } = application;
const buildAppDirectory = getBuildAppDirectory(application);
const envVariables = prepareEnvironmentVariables(env);
try {
const args = [
"build",
appName,
"--path",
buildAppDirectory,
"--builder",
"paketobuildpacks/builder-jammy-full",
];
for (const env of envVariables) {
args.push("--env", env);
}
await spawnAsync("pack", args, (data) => {
if (writeStream.writable) {
writeStream.write(data);
}
});
return true;
} catch (e) {
throw e;
}
};
export const getPaketoCommand = (
application: ApplicationNested,
logPath: string,
) => {
const { env, appName } = application;
const buildAppDirectory = getBuildAppDirectory(application);
const envVariables = prepareEnvironmentVariables(env);
const args = [
"build",
appName,
"--path",
buildAppDirectory,
"--builder",
"paketobuildpacks/builder-jammy-full",
];
for (const env of envVariables) {
args.push("--env", env);
}
const command = `pack ${args.join(" ")}`;
const bashCommand = `
echo "Starting Paketo build..." >> ${logPath};
${command} >> ${logPath} 2>> ${logPath} || {
echo "❌ Paketo build failed" >> ${logPath};
exit 1;
}
echo "✅ Paketo build completed." >> ${logPath};
`;
return bashCommand;
};

View File

@@ -1,69 +0,0 @@
import type { WriteStream } from "node:fs";
import {
buildCustomDocker,
getDockerCommand,
} from "@/server/utils/builders/docker-file";
import type { ApplicationNested } from ".";
import { createFile, getCreateFileCommand } from "../docker/utils";
import { getBuildAppDirectory } from "../filesystem/directory";
export const buildStatic = async (
application: ApplicationNested,
writeStream: WriteStream,
) => {
const { publishDirectory } = application;
const buildAppDirectory = getBuildAppDirectory(application);
try {
createFile(
buildAppDirectory,
"Dockerfile",
[
"FROM nginx:alpine",
"WORKDIR /usr/share/nginx/html/",
`COPY ${publishDirectory || "."} .`,
].join("\n"),
);
await buildCustomDocker(
{
...application,
buildType: "dockerfile",
dockerfile: "Dockerfile",
},
writeStream,
);
return true;
} catch (e) {
throw e;
}
};
export const getStaticCommand = (
application: ApplicationNested,
logPath: string,
) => {
const { publishDirectory } = application;
const buildAppDirectory = getBuildAppDirectory(application);
let command = getCreateFileCommand(
buildAppDirectory,
"Dockerfile",
[
"FROM nginx:alpine",
"WORKDIR /usr/share/nginx/html/",
`COPY ${publishDirectory || "."} .`,
].join("\n"),
);
command += getDockerCommand(
{
...application,
buildType: "dockerfile",
dockerfile: "Dockerfile",
},
logPath,
);
return command;
};

View File

@@ -1,21 +0,0 @@
import { existsSync, mkdirSync, writeFileSync } from "node:fs";
import { dirname, join } from "node:path";
import { prepareEnvironmentVariables } from "../docker/utils";
export const createEnvFile = (directory: string, env: string | null) => {
const envFilePath = join(dirname(directory), ".env");
if (!existsSync(dirname(envFilePath))) {
mkdirSync(dirname(envFilePath), { recursive: true });
}
const envFileContent = prepareEnvironmentVariables(env).join("\n");
writeFileSync(envFilePath, envFileContent);
};
export const createEnvFileCommand = (directory: string, env: string | null) => {
const envFilePath = join(dirname(directory), ".env");
if (!existsSync(dirname(envFilePath))) {
mkdirSync(dirname(envFilePath), { recursive: true });
}
const envFileContent = prepareEnvironmentVariables(env).join("\n");
return `echo "${envFileContent}" > ${envFilePath}`;
};

View File

@@ -1,65 +0,0 @@
import type { WriteStream } from "node:fs";
import type { ApplicationNested } from "../builders";
import { spawnAsync } from "../process/spawnAsync";
export const uploadImage = async (
application: ApplicationNested,
writeStream: WriteStream,
) => {
const registry = application.registry;
if (!registry) {
throw new Error("Registry not found");
}
const { registryUrl, imagePrefix, registryType } = registry;
const { appName } = application;
const imageName = `${appName}:latest`;
const finalURL =
registryType === "selfHosted"
? process.env.NODE_ENV === "development"
? "localhost:5000"
: registryUrl
: registryUrl;
const registryTag = imagePrefix
? `${finalURL}/${imagePrefix}/${imageName}`
: `${finalURL}/${imageName}`;
try {
console.log(finalURL, registryTag);
writeStream.write(
`📦 [Enabled Registry] Uploading image to ${registry.registryType} | ${registryTag} | ${finalURL}\n`,
);
await spawnAsync(
"docker",
["login", finalURL, "-u", registry.username, "-p", registry.password],
(data) => {
if (writeStream.writable) {
writeStream.write(data);
}
},
);
await spawnAsync("docker", ["tag", imageName, registryTag], (data) => {
if (writeStream.writable) {
writeStream.write(data);
}
});
await spawnAsync("docker", ["push", registryTag], (data) => {
if (writeStream.writable) {
writeStream.write(data);
}
});
} catch (error) {
console.log(error);
throw error;
}
};
// docker:
// endpoint: "unix:///var/run/docker.sock"
// exposedByDefault: false
// swarmMode: true

View File

@@ -1,98 +0,0 @@
import type { InferResultType } from "@/server/types/with";
import type { CreateServiceOptions } from "dockerode";
import {
calculateResources,
generateBindMounts,
generateFileMounts,
generateVolumeMounts,
prepareEnvironmentVariables,
} from "../docker/utils";
import { getRemoteDocker } from "../servers/remote-docker";
export type MariadbNested = InferResultType<"mariadb", { mounts: true }>;
export const buildMariadb = async (mariadb: MariadbNested) => {
const {
appName,
env,
externalPort,
dockerImage,
memoryLimit,
memoryReservation,
databaseName,
databaseUser,
databasePassword,
databaseRootPassword,
cpuLimit,
cpuReservation,
command,
mounts,
} = mariadb;
const defaultMariadbEnv = `MARIADB_DATABASE=${databaseName}\nMARIADB_USER=${databaseUser}\nMARIADB_PASSWORD=${databasePassword}\nMARIADB_ROOT_PASSWORD=${databaseRootPassword}${
env ? `\n${env}` : ""
}`;
const resources = calculateResources({
memoryLimit,
memoryReservation,
cpuLimit,
cpuReservation,
});
const envVariables = prepareEnvironmentVariables(defaultMariadbEnv);
const volumesMount = generateVolumeMounts(mounts);
const bindsMount = generateBindMounts(mounts);
const filesMount = generateFileMounts(appName, mariadb);
const docker = await getRemoteDocker(mariadb.serverId);
const settings: CreateServiceOptions = {
Name: appName,
TaskTemplate: {
ContainerSpec: {
Image: dockerImage,
Env: envVariables,
Mounts: [...volumesMount, ...bindsMount, ...filesMount],
...(command
? {
Command: ["/bin/sh"],
Args: ["-c", command],
}
: {}),
},
Networks: [{ Target: "dokploy-network" }],
Resources: {
...resources,
},
Placement: {
Constraints: ["node.role==manager"],
},
},
Mode: {
Replicated: {
Replicas: 1,
},
},
EndpointSpec: {
Mode: "dnsrr",
Ports: externalPort
? [
{
Protocol: "tcp",
TargetPort: 3306,
PublishedPort: externalPort,
PublishMode: "host",
},
]
: [],
},
};
try {
const service = docker.getService(appName);
const inspect = await service.inspect();
await service.update({
version: Number.parseInt(inspect.Version.Index),
...settings,
});
} catch (error) {
await docker.createService(settings);
}
};

View File

@@ -1,97 +0,0 @@
import type { InferResultType } from "@/server/types/with";
import type { CreateServiceOptions } from "dockerode";
import {
calculateResources,
generateBindMounts,
generateFileMounts,
generateVolumeMounts,
prepareEnvironmentVariables,
} from "../docker/utils";
import { getRemoteDocker } from "../servers/remote-docker";
export type MongoNested = InferResultType<"mongo", { mounts: true }>;
export const buildMongo = async (mongo: MongoNested) => {
const {
appName,
env,
externalPort,
dockerImage,
memoryLimit,
memoryReservation,
cpuLimit,
cpuReservation,
databaseUser,
databasePassword,
command,
mounts,
} = mongo;
const defaultMongoEnv = `MONGO_INITDB_ROOT_USERNAME=${databaseUser}\nMONGO_INITDB_ROOT_PASSWORD=${databasePassword}${
env ? `\n${env}` : ""
}`;
const resources = calculateResources({
memoryLimit,
memoryReservation,
cpuLimit,
cpuReservation,
});
const envVariables = prepareEnvironmentVariables(defaultMongoEnv);
const volumesMount = generateVolumeMounts(mounts);
const bindsMount = generateBindMounts(mounts);
const filesMount = generateFileMounts(appName, mongo);
const docker = await getRemoteDocker(mongo.serverId);
const settings: CreateServiceOptions = {
Name: appName,
TaskTemplate: {
ContainerSpec: {
Image: dockerImage,
Env: envVariables,
Mounts: [...volumesMount, ...bindsMount, ...filesMount],
...(command
? {
Command: ["/bin/sh"],
Args: ["-c", command],
}
: {}),
},
Networks: [{ Target: "dokploy-network" }],
Resources: {
...resources,
},
Placement: {
Constraints: ["node.role==manager"],
},
},
Mode: {
Replicated: {
Replicas: 1,
},
},
EndpointSpec: {
Mode: "dnsrr",
Ports: externalPort
? [
{
Protocol: "tcp",
TargetPort: 27017,
PublishedPort: externalPort,
PublishMode: "host",
},
]
: [],
},
};
try {
const service = docker.getService(appName);
const inspect = await service.inspect();
await service.update({
version: Number.parseInt(inspect.Version.Index),
...settings,
});
} catch (error) {
await docker.createService(settings);
}
};

View File

@@ -1,104 +0,0 @@
import type { InferResultType } from "@/server/types/with";
import type { CreateServiceOptions } from "dockerode";
import {
calculateResources,
generateBindMounts,
generateFileMounts,
generateVolumeMounts,
prepareEnvironmentVariables,
} from "../docker/utils";
import { getRemoteDocker } from "../servers/remote-docker";
export type MysqlNested = InferResultType<"mysql", { mounts: true }>;
export const buildMysql = async (mysql: MysqlNested) => {
const {
appName,
env,
externalPort,
dockerImage,
memoryLimit,
memoryReservation,
databaseName,
databaseUser,
databasePassword,
databaseRootPassword,
cpuLimit,
cpuReservation,
command,
mounts,
} = mysql;
const defaultMysqlEnv =
databaseUser !== "root"
? `MYSQL_USER=${databaseUser}\nMYSQL_DATABASE=${databaseName}\nMYSQL_PASSWORD=${databasePassword}\nMYSQL_ROOT_PASSWORD=${databaseRootPassword}${
env ? `\n${env}` : ""
}`
: `MYSQL_DATABASE=${databaseName}\nMYSQL_ROOT_PASSWORD=${databaseRootPassword}${
env ? `\n${env}` : ""
}`;
const resources = calculateResources({
memoryLimit,
memoryReservation,
cpuLimit,
cpuReservation,
});
const envVariables = prepareEnvironmentVariables(defaultMysqlEnv);
const volumesMount = generateVolumeMounts(mounts);
const bindsMount = generateBindMounts(mounts);
const filesMount = generateFileMounts(appName, mysql);
const docker = await getRemoteDocker(mysql.serverId);
const settings: CreateServiceOptions = {
Name: appName,
TaskTemplate: {
ContainerSpec: {
Image: dockerImage,
Env: envVariables,
Mounts: [...volumesMount, ...bindsMount, ...filesMount],
...(command
? {
Command: ["/bin/sh"],
Args: ["-c", command],
}
: {}),
},
Networks: [{ Target: "dokploy-network" }],
Resources: {
...resources,
},
Placement: {
Constraints: ["node.role==manager"],
},
},
Mode: {
Replicated: {
Replicas: 1,
},
},
EndpointSpec: {
Mode: "dnsrr",
Ports: externalPort
? [
{
Protocol: "tcp",
TargetPort: 3306,
PublishedPort: externalPort,
PublishMode: "host",
},
]
: [],
},
};
try {
const service = docker.getService(appName);
const inspect = await service.inspect();
await service.update({
version: Number.parseInt(inspect.Version.Index),
...settings,
});
} catch (error) {
await docker.createService(settings);
}
};

View File

@@ -1,98 +0,0 @@
import type { InferResultType } from "@/server/types/with";
import type { CreateServiceOptions } from "dockerode";
import {
calculateResources,
generateBindMounts,
generateFileMounts,
generateVolumeMounts,
prepareEnvironmentVariables,
} from "../docker/utils";
import { getRemoteDocker } from "../servers/remote-docker";
export type PostgresNested = InferResultType<"postgres", { mounts: true }>;
export const buildPostgres = async (postgres: PostgresNested) => {
const {
appName,
env,
externalPort,
dockerImage,
memoryLimit,
memoryReservation,
cpuLimit,
cpuReservation,
databaseName,
databaseUser,
databasePassword,
command,
mounts,
} = postgres;
const defaultPostgresEnv = `POSTGRES_DB=${databaseName}\nPOSTGRES_USER=${databaseUser}\nPOSTGRES_PASSWORD=${databasePassword}${
env ? `\n${env}` : ""
}`;
const resources = calculateResources({
memoryLimit,
memoryReservation,
cpuLimit,
cpuReservation,
});
const envVariables = prepareEnvironmentVariables(defaultPostgresEnv);
const volumesMount = generateVolumeMounts(mounts);
const bindsMount = generateBindMounts(mounts);
const filesMount = generateFileMounts(appName, postgres);
const docker = await getRemoteDocker(postgres.serverId);
const settings: CreateServiceOptions = {
Name: appName,
TaskTemplate: {
ContainerSpec: {
Image: dockerImage,
Env: envVariables,
Mounts: [...volumesMount, ...bindsMount, ...filesMount],
...(command
? {
Command: ["/bin/sh"],
Args: ["-c", command],
}
: {}),
},
Networks: [{ Target: "dokploy-network" }],
Resources: {
...resources,
},
Placement: {
Constraints: ["node.role==manager"],
},
},
Mode: {
Replicated: {
Replicas: 1,
},
},
EndpointSpec: {
Mode: "dnsrr",
Ports: externalPort
? [
{
Protocol: "tcp",
TargetPort: 5432,
PublishedPort: externalPort,
PublishMode: "host",
},
]
: [],
},
};
try {
const service = docker.getService(appName);
const inspect = await service.inspect();
await service.update({
version: Number.parseInt(inspect.Version.Index),
...settings,
});
} catch (error) {
console.log("error", error);
await docker.createService(settings);
}
};

View File

@@ -1,95 +0,0 @@
import type { InferResultType } from "@/server/types/with";
import type { CreateServiceOptions } from "dockerode";
import {
calculateResources,
generateBindMounts,
generateFileMounts,
generateVolumeMounts,
prepareEnvironmentVariables,
} from "../docker/utils";
import { getRemoteDocker } from "../servers/remote-docker";
export type RedisNested = InferResultType<"redis", { mounts: true }>;
export const buildRedis = async (redis: RedisNested) => {
const {
appName,
env,
externalPort,
dockerImage,
memoryLimit,
memoryReservation,
databasePassword,
cpuLimit,
cpuReservation,
command,
mounts,
} = redis;
const defaultRedisEnv = `REDIS_PASSWORD=${databasePassword}${
env ? `\n${env}` : ""
}`;
const resources = calculateResources({
memoryLimit,
memoryReservation,
cpuLimit,
cpuReservation,
});
const envVariables = prepareEnvironmentVariables(defaultRedisEnv);
const volumesMount = generateVolumeMounts(mounts);
const bindsMount = generateBindMounts(mounts);
const filesMount = generateFileMounts(appName, redis);
const docker = await getRemoteDocker(redis.serverId);
const settings: CreateServiceOptions = {
Name: appName,
TaskTemplate: {
ContainerSpec: {
Image: dockerImage,
Env: envVariables,
Mounts: [...volumesMount, ...bindsMount, ...filesMount],
Command: ["/bin/sh"],
Args: [
"-c",
command ? command : `redis-server --requirepass ${databasePassword}`,
],
},
Networks: [{ Target: "dokploy-network" }],
Resources: {
...resources,
},
Placement: {
Constraints: ["node.role==manager"],
},
},
Mode: {
Replicated: {
Replicas: 1,
},
},
EndpointSpec: {
Mode: "dnsrr",
Ports: externalPort
? [
{
Protocol: "tcp",
TargetPort: 6379,
PublishedPort: externalPort,
PublishMode: "host",
},
]
: [],
},
};
try {
const service = docker.getService(appName);
const inspect = await service.inspect();
await service.update({
version: Number.parseInt(inspect.Version.Index),
...settings,
});
} catch (error) {
await docker.createService(settings);
}
};

View File

@@ -1,6 +1,6 @@
import { exec } from "node:child_process";
import util from "node:util";
import { findServerById } from "@/server/api/services/server";
import { findServerById } from "@dokploy/builders";
import { Client } from "ssh2";
import { readSSHKey } from "../filesystem/ssh";
export const execAsync = util.promisify(exec);

View File

@@ -1,361 +0,0 @@
import { createWriteStream } from "node:fs";
import { join } from "node:path";
import { findBitbucketById } from "@/server/api/services/bitbucket";
import type { Compose } from "@/server/api/services/compose";
import { paths } from "@/server/constants";
import type {
apiBitbucketTestConnection,
apiFindBitbucketBranches,
} from "@/server/db/schema";
import type { InferResultType } from "@/server/types/with";
import { TRPCError } from "@trpc/server";
import { recreateDirectory } from "../filesystem/directory";
import { execAsyncRemote } from "../process/execAsync";
import { spawnAsync } from "../process/spawnAsync";
export type ApplicationWithBitbucket = InferResultType<
"applications",
{ bitbucket: true }
>;
export type ComposeWithBitbucket = InferResultType<
"compose",
{ bitbucket: true }
>;
export const cloneBitbucketRepository = async (
entity: ApplicationWithBitbucket | ComposeWithBitbucket,
logPath: string,
isCompose = false,
) => {
const { COMPOSE_PATH, APPLICATIONS_PATH } = paths();
const writeStream = createWriteStream(logPath, { flags: "a" });
const {
appName,
bitbucketRepository,
bitbucketOwner,
bitbucketBranch,
bitbucketId,
bitbucket,
} = entity;
if (!bitbucketId) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Bitbucket Provider not found",
});
}
const basePath = isCompose ? COMPOSE_PATH : APPLICATIONS_PATH;
const outputPath = join(basePath, appName, "code");
await recreateDirectory(outputPath);
const repoclone = `bitbucket.org/${bitbucketOwner}/${bitbucketRepository}.git`;
const cloneUrl = `https://${bitbucket?.bitbucketUsername}:${bitbucket?.appPassword}@${repoclone}`;
try {
writeStream.write(`\nCloning Repo ${repoclone} to ${outputPath}: ✅\n`);
await spawnAsync(
"git",
[
"clone",
"--branch",
bitbucketBranch!,
"--depth",
"1",
cloneUrl,
outputPath,
"--progress",
],
(data) => {
if (writeStream.writable) {
writeStream.write(data);
}
},
);
writeStream.write(`\nCloned ${repoclone} to ${outputPath}: ✅\n`);
} catch (error) {
writeStream.write(`ERROR Clonning: ${error}: ❌`);
throw error;
} finally {
writeStream.end();
}
};
export const cloneRawBitbucketRepository = async (entity: Compose) => {
const { COMPOSE_PATH } = paths();
const {
appName,
bitbucketRepository,
bitbucketOwner,
bitbucketBranch,
bitbucketId,
} = entity;
if (!bitbucketId) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Bitbucket Provider not found",
});
}
const bitbucketProvider = await findBitbucketById(bitbucketId);
const basePath = COMPOSE_PATH;
const outputPath = join(basePath, appName, "code");
await recreateDirectory(outputPath);
const repoclone = `bitbucket.org/${bitbucketOwner}/${bitbucketRepository}.git`;
const cloneUrl = `https://${bitbucketProvider?.bitbucketUsername}:${bitbucketProvider?.appPassword}@${repoclone}`;
try {
await spawnAsync("git", [
"clone",
"--branch",
bitbucketBranch!,
"--depth",
"1",
cloneUrl,
outputPath,
"--progress",
]);
} catch (error) {
throw error;
}
};
export const cloneRawBitbucketRepositoryRemote = async (compose: Compose) => {
const { COMPOSE_PATH } = paths(true);
const {
appName,
bitbucketRepository,
bitbucketOwner,
bitbucketBranch,
bitbucketId,
serverId,
} = compose;
if (!serverId) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Server not found",
});
}
if (!bitbucketId) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Bitbucket Provider not found",
});
}
const bitbucketProvider = await findBitbucketById(bitbucketId);
const basePath = COMPOSE_PATH;
const outputPath = join(basePath, appName, "code");
await recreateDirectory(outputPath);
const repoclone = `bitbucket.org/${bitbucketOwner}/${bitbucketRepository}.git`;
const cloneUrl = `https://${bitbucketProvider?.bitbucketUsername}:${bitbucketProvider?.appPassword}@${repoclone}`;
try {
await execAsyncRemote(
serverId,
`git clone --branch ${bitbucketBranch} --depth 1 ${cloneUrl} ${outputPath}`,
);
} catch (error) {
throw error;
}
};
export const getBitbucketCloneCommand = async (
entity: ApplicationWithBitbucket | ComposeWithBitbucket,
logPath: string,
isCompose = false,
) => {
const { COMPOSE_PATH, APPLICATIONS_PATH } = paths(true);
const {
appName,
bitbucketRepository,
bitbucketOwner,
bitbucketBranch,
bitbucketId,
serverId,
bitbucket,
} = entity;
if (!serverId) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Server not found",
});
}
if (!bitbucketId) {
const command = `
echo "Error: ❌ Bitbucket Provider not found" >> ${logPath};
exit 1;
`;
await execAsyncRemote(serverId, command);
throw new TRPCError({
code: "NOT_FOUND",
message: "Bitbucket Provider not found",
});
}
const bitbucketProvider = await findBitbucketById(bitbucketId);
const basePath = isCompose ? COMPOSE_PATH : APPLICATIONS_PATH;
const outputPath = join(basePath, appName, "code");
await recreateDirectory(outputPath);
const repoclone = `bitbucket.org/${bitbucketOwner}/${bitbucketRepository}.git`;
const cloneUrl = `https://${bitbucketProvider?.bitbucketUsername}:${bitbucketProvider?.appPassword}@${repoclone}`;
const cloneCommand = `
rm -rf ${outputPath};
mkdir -p ${outputPath};
if ! git clone --branch ${bitbucketBranch} --depth 1 --progress ${cloneUrl} ${outputPath} >> ${logPath} 2>&1; then
echo "❌ [ERROR] Fail to clone the repository ${repoclone}" >> ${logPath};
exit 1;
fi
echo "Cloned ${repoclone} to ${outputPath}: ✅" >> ${logPath};
`;
return cloneCommand;
};
export const getBitbucketRepositories = async (bitbucketId?: string) => {
if (!bitbucketId) {
return [];
}
const bitbucketProvider = await findBitbucketById(bitbucketId);
const username =
bitbucketProvider.bitbucketWorkspaceName ||
bitbucketProvider.bitbucketUsername;
const url = `https://api.bitbucket.org/2.0/repositories/${username}`;
try {
const response = await fetch(url, {
method: "GET",
headers: {
Authorization: `Basic ${Buffer.from(`${bitbucketProvider.bitbucketUsername}:${bitbucketProvider.appPassword}`).toString("base64")}`,
},
});
if (!response.ok) {
throw new TRPCError({
code: "BAD_REQUEST",
message: `Failed to fetch repositories: ${response.statusText}`,
});
}
const data = await response.json();
const mappedData = data.values.map((repo: any) => {
return {
name: repo.name,
url: repo.links.html.href,
owner: {
username: repo.workspace.slug,
},
};
});
return mappedData as {
name: string;
url: string;
owner: {
username: string;
};
}[];
} catch (error) {
throw error;
}
};
export const getBitbucketBranches = async (
input: typeof apiFindBitbucketBranches._type,
) => {
if (!input.bitbucketId) {
return [];
}
const bitbucketProvider = await findBitbucketById(input.bitbucketId);
const { owner, repo } = input;
const url = `https://api.bitbucket.org/2.0/repositories/${owner}/${repo}/refs/branches`;
try {
const response = await fetch(url, {
method: "GET",
headers: {
Authorization: `Basic ${Buffer.from(`${bitbucketProvider.bitbucketUsername}:${bitbucketProvider.appPassword}`).toString("base64")}`,
},
});
if (!response.ok) {
throw new TRPCError({
code: "BAD_REQUEST",
message: `HTTP error! status: ${response.status}`,
});
}
const data = await response.json();
const mappedData = data.values.map((branch: any) => {
return {
name: branch.name,
commit: {
sha: branch.target.hash,
},
};
});
return mappedData as {
name: string;
commit: {
sha: string;
};
}[];
} catch (error) {
throw error;
}
};
export const testBitbucketConnection = async (
input: typeof apiBitbucketTestConnection._type,
) => {
const bitbucketProvider = await findBitbucketById(input.bitbucketId);
if (!bitbucketProvider) {
throw new Error("Bitbucket provider not found");
}
const { bitbucketUsername, workspaceName } = input;
const username = workspaceName || bitbucketUsername;
const url = `https://api.bitbucket.org/2.0/repositories/${username}`;
try {
const response = await fetch(url, {
method: "GET",
headers: {
Authorization: `Basic ${Buffer.from(`${bitbucketProvider.bitbucketUsername}:${bitbucketProvider.appPassword}`).toString("base64")}`,
},
});
if (!response.ok) {
throw new TRPCError({
code: "BAD_REQUEST",
message: `Failed to fetch repositories: ${response.statusText}`,
});
}
const data = await response.json();
const mappedData = data.values.map((repo: any) => {
return {
name: repo.name,
url: repo.links.html.href,
owner: {
username: repo.workspace.slug,
},
};
}) as [];
return mappedData.length;
} catch (error) {
throw error;
}
};

View File

@@ -1,86 +0,0 @@
import { createWriteStream } from "node:fs";
import { type ApplicationNested, mechanizeDockerContainer } from "../builders";
import { pullImage } from "../docker/utils";
interface RegistryAuth {
username: string;
password: string;
serveraddress: string;
}
export const buildDocker = async (
application: ApplicationNested,
logPath: string,
): Promise<void> => {
const { buildType, dockerImage, username, password } = application;
const authConfig: Partial<RegistryAuth> = {
username: username || "",
password: password || "",
};
const writeStream = createWriteStream(logPath, { flags: "a" });
writeStream.write(`\nBuild ${buildType}\n`);
writeStream.write(`Pulling ${dockerImage}: ✅\n`);
try {
if (!dockerImage) {
throw new Error("Docker image not found");
}
await pullImage(
dockerImage,
(data) => {
if (writeStream.writable) {
writeStream.write(`${data.status}\n`);
}
},
authConfig,
);
await mechanizeDockerContainer(application);
writeStream.write("\nDocker Deployed: ✅\n");
} catch (error) {
writeStream.write(`ERROR: ${error}: ❌`);
throw error;
} finally {
writeStream.end();
}
};
export const buildRemoteDocker = async (
application: ApplicationNested,
logPath: string,
) => {
const { sourceType, dockerImage, username, password } = application;
try {
if (!dockerImage) {
throw new Error("Docker image not found");
}
let command = `
echo "Pulling ${dockerImage}" >> ${logPath};
`;
if (username && password) {
command += `
if ! docker login --username ${username} --password ${password} https://index.docker.io/v1/ >> ${logPath} 2>&1; then
echo "❌ Login failed" >> ${logPath};
exit 1;
fi
`;
}
command += `
docker pull ${dockerImage} >> ${logPath} 2>> ${logPath} || {
echo "❌ Pulling image failed" >> ${logPath};
exit 1;
}
echo "✅ Pulling image completed." >> ${logPath};
`;
return command;
} catch (error) {
throw error;
}
};

View File

@@ -1,347 +0,0 @@
import { createWriteStream } from "node:fs";
import path, { join } from "node:path";
import type { Compose } from "@/server/api/services/compose";
import { updateSSHKeyById } from "@/server/api/services/ssh-key";
import { paths } from "@/server/constants";
import { TRPCError } from "@trpc/server";
import { recreateDirectory } from "../filesystem/directory";
import { execAsync, execAsyncRemote } from "../process/execAsync";
import { spawnAsync } from "../process/spawnAsync";
export const cloneGitRepository = async (
entity: {
appName: string;
customGitUrl?: string | null;
customGitBranch?: string | null;
customGitSSHKeyId?: string | null;
},
logPath: string,
isCompose = false,
) => {
const { SSH_PATH, COMPOSE_PATH, APPLICATIONS_PATH } = paths();
const { appName, customGitUrl, customGitBranch, customGitSSHKeyId } = entity;
if (!customGitUrl || !customGitBranch) {
throw new TRPCError({
code: "BAD_REQUEST",
message: "Error: Repository not found",
});
}
const writeStream = createWriteStream(logPath, { flags: "a" });
const keyPath = path.join(SSH_PATH, `${customGitSSHKeyId}_rsa`);
const basePath = isCompose ? COMPOSE_PATH : APPLICATIONS_PATH;
const outputPath = join(basePath, appName, "code");
const knownHostsPath = path.join(SSH_PATH, "known_hosts");
try {
if (!isHttpOrHttps(customGitUrl)) {
await addHostToKnownHosts(customGitUrl);
}
await recreateDirectory(outputPath);
// const command = `GIT_SSH_COMMAND="ssh -i ${keyPath} -o UserKnownHostsFile=${knownHostsPath}" git clone --branch ${customGitBranch} --depth 1 ${customGitUrl} ${gitCopyPath} --progress`;
// const { stdout, stderr } = await execAsync(command);
writeStream.write(
`\nCloning Repo Custom ${customGitUrl} to ${outputPath}: ✅\n`,
);
if (customGitSSHKeyId) {
await updateSSHKeyById({
sshKeyId: customGitSSHKeyId,
lastUsedAt: new Date().toISOString(),
});
}
await spawnAsync(
"git",
[
"clone",
"--branch",
customGitBranch,
"--depth",
"1",
"--recurse-submodules",
customGitUrl,
outputPath,
"--progress",
],
(data) => {
if (writeStream.writable) {
writeStream.write(data);
}
},
{
env: {
...process.env,
...(customGitSSHKeyId && {
GIT_SSH_COMMAND: `ssh -i ${keyPath} -o UserKnownHostsFile=${knownHostsPath}`,
}),
},
},
);
writeStream.write(`\nCloned Custom Git ${customGitUrl}: ✅\n`);
} catch (error) {
writeStream.write(`\nERROR Cloning Custom Git: ${error}: ❌\n`);
throw error;
} finally {
writeStream.end();
}
};
export const getCustomGitCloneCommand = async (
entity: {
appName: string;
customGitUrl?: string | null;
customGitBranch?: string | null;
customGitSSHKeyId?: string | null;
serverId: string | null;
},
logPath: string,
isCompose = false,
) => {
const { SSH_PATH, COMPOSE_PATH, APPLICATIONS_PATH } = paths(true);
const {
appName,
customGitUrl,
customGitBranch,
customGitSSHKeyId,
serverId,
} = entity;
if (!customGitUrl || !customGitBranch) {
const command = `
echo "Error: ❌ Repository not found" >> ${logPath};
exit 1;
`;
await execAsyncRemote(serverId, command);
throw new TRPCError({
code: "BAD_REQUEST",
message: "Error: Repository not found",
});
}
const keyPath = path.join(SSH_PATH, `${customGitSSHKeyId}_rsa`);
const basePath = isCompose ? COMPOSE_PATH : APPLICATIONS_PATH;
const outputPath = join(basePath, appName, "code");
const knownHostsPath = path.join(SSH_PATH, "known_hosts");
if (customGitSSHKeyId) {
await updateSSHKeyById({
sshKeyId: customGitSSHKeyId,
lastUsedAt: new Date().toISOString(),
});
}
try {
const command = [];
if (!isHttpOrHttps(customGitUrl)) {
command.push(addHostToKnownHostsCommand(customGitUrl));
}
command.push(`rm -rf ${outputPath};`);
command.push(`mkdir -p ${outputPath};`);
command.push(
`echo "Cloning Custom Git ${customGitUrl}" to ${outputPath}: ✅ >> ${logPath};`,
);
if (customGitSSHKeyId) {
command.push(
`GIT_SSH_COMMAND="ssh -i ${keyPath} -o UserKnownHostsFile=${knownHostsPath}"`,
);
}
command.push(
`if ! git clone --branch ${customGitBranch} --depth 1 --progress ${customGitUrl} ${outputPath} >> ${logPath} 2>&1; then
echo "❌ [ERROR] Fail to clone the repository ${customGitUrl}" >> ${logPath};
exit 1;
fi
`,
);
command.push(`echo "Cloned Custom Git ${customGitUrl}: ✅" >> ${logPath};`);
return command.join("\n");
} catch (error) {
console.log(error);
throw error;
}
};
const isHttpOrHttps = (url: string): boolean => {
const regex = /^https?:\/\//;
return regex.test(url);
};
const addHostToKnownHosts = async (repositoryURL: string) => {
const { SSH_PATH } = paths();
const { domain, port } = sanitizeRepoPathSSH(repositoryURL);
const knownHostsPath = path.join(SSH_PATH, "known_hosts");
const command = `ssh-keyscan -p ${port} ${domain} >> ${knownHostsPath}`;
try {
await execAsync(command);
} catch (error) {
console.error(`Error adding host to known_hosts: ${error}`);
throw error;
}
};
const addHostToKnownHostsCommand = (repositoryURL: string) => {
const { SSH_PATH } = paths();
const { domain, port } = sanitizeRepoPathSSH(repositoryURL);
const knownHostsPath = path.join(SSH_PATH, "known_hosts");
return `ssh-keyscan -p ${port} ${domain} >> ${knownHostsPath};`;
};
const sanitizeRepoPathSSH = (input: string) => {
const SSH_PATH_RE = new RegExp(
[
/^\s*/,
/(?:(?<proto>[a-z]+):\/\/)?/,
/(?:(?<user>[a-z_][a-z0-9_-]+)@)?/,
/(?<domain>[^\s\/\?#:]+)/,
/(?::(?<port>[0-9]{1,5}))?/,
/(?:[\/:](?<owner>[^\s\/\?#:]+))?/,
/(?:[\/:](?<repo>(?:[^\s\?#:.]|\.(?!git\/?\s*$))+))/,
/(?:.git)?\/?\s*$/,
]
.map((r) => r.source)
.join(""),
"i",
);
const found = input.match(SSH_PATH_RE);
if (!found) {
throw new Error(`Malformatted SSH path: ${input}`);
}
return {
user: found.groups?.user ?? "git",
domain: found.groups?.domain,
port: Number(found.groups?.port ?? 22),
owner: found.groups?.owner ?? "",
repo: found.groups?.repo,
get repoPath() {
return `ssh://${this.user}@${this.domain}:${this.port}/${this.owner}${
this.owner && "/"
}${this.repo}.git`;
},
};
};
export const cloneGitRawRepository = async (entity: {
appName: string;
customGitUrl?: string | null;
customGitBranch?: string | null;
customGitSSHKeyId?: string | null;
}) => {
const { appName, customGitUrl, customGitBranch, customGitSSHKeyId } = entity;
if (!customGitUrl || !customGitBranch) {
throw new TRPCError({
code: "BAD_REQUEST",
message: "Error: Repository not found",
});
}
const { SSH_PATH, COMPOSE_PATH } = paths();
const keyPath = path.join(SSH_PATH, `${customGitSSHKeyId}_rsa`);
const basePath = COMPOSE_PATH;
const outputPath = join(basePath, appName, "code");
const knownHostsPath = path.join(SSH_PATH, "known_hosts");
try {
await addHostToKnownHosts(customGitUrl);
await recreateDirectory(outputPath);
if (customGitSSHKeyId) {
await updateSSHKeyById({
sshKeyId: customGitSSHKeyId,
lastUsedAt: new Date().toISOString(),
});
}
await spawnAsync(
"git",
[
"clone",
"--branch",
customGitBranch,
"--depth",
"1",
customGitUrl,
outputPath,
"--progress",
],
(data) => {},
{
env: {
...process.env,
...(customGitSSHKeyId && {
GIT_SSH_COMMAND: `ssh -i ${keyPath} -o UserKnownHostsFile=${knownHostsPath}`,
}),
},
},
);
} catch (error) {
throw error;
}
};
export const cloneRawGitRepositoryRemote = async (compose: Compose) => {
const {
appName,
customGitBranch,
customGitUrl,
customGitSSHKeyId,
serverId,
} = compose;
if (!serverId) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Server not found",
});
}
if (!customGitUrl) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Git Provider not found",
});
}
const { SSH_PATH, COMPOSE_PATH } = paths(true);
const keyPath = path.join(SSH_PATH, `${customGitSSHKeyId}_rsa`);
const basePath = COMPOSE_PATH;
const outputPath = join(basePath, appName, "code");
const knownHostsPath = path.join(SSH_PATH, "known_hosts");
if (customGitSSHKeyId) {
await updateSSHKeyById({
sshKeyId: customGitSSHKeyId,
lastUsedAt: new Date().toISOString(),
});
}
try {
const command = [];
if (!isHttpOrHttps(customGitUrl)) {
command.push(addHostToKnownHostsCommand(customGitUrl));
}
command.push(`rm -rf ${outputPath};`);
command.push(`mkdir -p ${outputPath};`);
if (customGitSSHKeyId) {
command.push(
`GIT_SSH_COMMAND="ssh -i ${keyPath} -o UserKnownHostsFile=${knownHostsPath}"`,
);
}
command.push(
`if ! git clone --branch ${customGitBranch} --depth 1 --progress ${customGitUrl} ${outputPath} ; then
echo "[ERROR] Fail to clone the repository ";
exit 1;
fi
`,
);
await execAsyncRemote(serverId, command.join("\n"));
} catch (error) {
throw error;
}
};

View File

@@ -1,336 +0,0 @@
import { createWriteStream } from "node:fs";
import { join } from "node:path";
import { paths } from "@/server/constants";
import type { InferResultType } from "@/server/types/with";
import { createAppAuth } from "@octokit/auth-app";
import { TRPCError } from "@trpc/server";
import { Octokit } from "octokit";
import { recreateDirectory } from "../filesystem/directory";
import { spawnAsync } from "../process/spawnAsync";
import type { Compose } from "@/server/api/services/compose";
import { type Github, findGithubById } from "@/server/api/services/github";
import type { apiFindGithubBranches } from "@/server/db/schema";
import { execAsyncRemote } from "../process/execAsync";
export const authGithub = (githubProvider: Github) => {
if (!haveGithubRequirements(githubProvider)) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Github Account not configured correctly",
});
}
const octokit = new Octokit({
authStrategy: createAppAuth,
auth: {
appId: githubProvider?.githubAppId || 0,
privateKey: githubProvider?.githubPrivateKey || "",
installationId: githubProvider?.githubInstallationId,
},
});
return octokit;
};
export const getGithubToken = async (
octokit: ReturnType<typeof authGithub>,
) => {
const installation = (await octokit.auth({
type: "installation",
})) as {
token: string;
};
return installation.token;
};
export const haveGithubRequirements = (githubProvider: Github) => {
return !!(
githubProvider?.githubAppId &&
githubProvider?.githubPrivateKey &&
githubProvider?.githubInstallationId
);
};
const getErrorCloneRequirements = (entity: {
repository?: string | null;
owner?: string | null;
branch?: string | null;
}) => {
const reasons: string[] = [];
const { repository, owner, branch } = entity;
if (!repository) reasons.push("1. Repository not assigned.");
if (!owner) reasons.push("2. Owner not specified.");
if (!branch) reasons.push("3. Branch not defined.");
return reasons;
};
export type ApplicationWithGithub = InferResultType<
"applications",
{ github: true }
>;
export type ComposeWithGithub = InferResultType<"compose", { github: true }>;
export const cloneGithubRepository = async (
entity: ApplicationWithGithub | ComposeWithGithub,
logPath: string,
isCompose = false,
) => {
const { APPLICATIONS_PATH, COMPOSE_PATH } = paths();
const writeStream = createWriteStream(logPath, { flags: "a" });
const { appName, repository, owner, branch, githubId } = entity;
if (!githubId) {
throw new TRPCError({
code: "NOT_FOUND",
message: "GitHub Provider not found",
});
}
const requirements = getErrorCloneRequirements(entity);
// Check if requirements are met
if (requirements.length > 0) {
writeStream.write(
`\nGitHub Repository configuration failed for application: ${appName}\n`,
);
writeStream.write("Reasons:\n");
writeStream.write(requirements.join("\n"));
writeStream.end();
throw new TRPCError({
code: "BAD_REQUEST",
message: "Error: GitHub repository information is incomplete.",
});
}
const githubProvider = await findGithubById(githubId);
const basePath = isCompose ? COMPOSE_PATH : APPLICATIONS_PATH;
const outputPath = join(basePath, appName, "code");
const octokit = authGithub(githubProvider);
const token = await getGithubToken(octokit);
const repoclone = `github.com/${owner}/${repository}.git`;
await recreateDirectory(outputPath);
const cloneUrl = `https://oauth2:${token}@${repoclone}`;
try {
writeStream.write(`\nClonning Repo ${repoclone} to ${outputPath}: ✅\n`);
await spawnAsync(
"git",
[
"clone",
"--branch",
branch!,
"--depth",
"1",
cloneUrl,
outputPath,
"--progress",
],
(data) => {
if (writeStream.writable) {
writeStream.write(data);
}
},
);
writeStream.write(`\nCloned ${repoclone}: ✅\n`);
} catch (error) {
writeStream.write(`ERROR Clonning: ${error}: ❌`);
throw error;
} finally {
writeStream.end();
}
};
export const getGithubCloneCommand = async (
entity: ApplicationWithGithub | ComposeWithGithub,
logPath: string,
isCompose = false,
) => {
const { appName, repository, owner, branch, githubId, serverId } = entity;
if (!serverId) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Server not found",
});
}
if (!githubId) {
const command = `
echo "Error: ❌ Github Provider not found" >> ${logPath};
exit 1;
`;
await execAsyncRemote(serverId, command);
throw new TRPCError({
code: "NOT_FOUND",
message: "GitHub Provider not found",
});
}
const requirements = getErrorCloneRequirements(entity);
// Build log messages
let logMessages = "";
if (requirements.length > 0) {
logMessages += `\nGitHub Repository configuration failed for application: ${appName}\n`;
logMessages += "Reasons:\n";
logMessages += requirements.join("\n");
const escapedLogMessages = logMessages
.replace(/\\/g, "\\\\")
.replace(/"/g, '\\"')
.replace(/\n/g, "\\n");
const bashCommand = `
echo "${escapedLogMessages}" >> ${logPath};
exit 1; # Exit with error code
`;
await execAsyncRemote(serverId, bashCommand);
return;
}
const { COMPOSE_PATH, APPLICATIONS_PATH } = paths(true);
const githubProvider = await findGithubById(githubId);
const basePath = isCompose ? COMPOSE_PATH : APPLICATIONS_PATH;
const outputPath = join(basePath, appName, "code");
const octokit = authGithub(githubProvider);
const token = await getGithubToken(octokit);
const repoclone = `github.com/${owner}/${repository}.git`;
const cloneUrl = `https://oauth2:${token}@${repoclone}`;
const cloneCommand = `
rm -rf ${outputPath};
mkdir -p ${outputPath};
if ! git clone --branch ${branch} --depth 1 --progress ${cloneUrl} ${outputPath} >> ${logPath} 2>&1; then
echo "❌ [ERROR] Fallo al clonar el repositorio ${repoclone}" >> ${logPath};
exit 1;
fi
echo "Cloned ${repoclone} to ${outputPath}: ✅" >> ${logPath};
`;
return cloneCommand;
};
export const cloneRawGithubRepository = async (entity: Compose) => {
const { appName, repository, owner, branch, githubId } = entity;
if (!githubId) {
throw new TRPCError({
code: "NOT_FOUND",
message: "GitHub Provider not found",
});
}
const { COMPOSE_PATH } = paths();
const githubProvider = await findGithubById(githubId);
const basePath = COMPOSE_PATH;
const outputPath = join(basePath, appName, "code");
const octokit = authGithub(githubProvider);
const token = await getGithubToken(octokit);
const repoclone = `github.com/${owner}/${repository}.git`;
await recreateDirectory(outputPath);
const cloneUrl = `https://oauth2:${token}@${repoclone}`;
try {
await spawnAsync("git", [
"clone",
"--branch",
branch!,
"--depth",
"1",
cloneUrl,
outputPath,
"--progress",
]);
} catch (error) {
throw error;
}
};
export const cloneRawGithubRepositoryRemote = async (compose: Compose) => {
const { appName, repository, owner, branch, githubId, serverId } = compose;
if (!serverId) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Server not found",
});
}
if (!githubId) {
throw new TRPCError({
code: "NOT_FOUND",
message: "GitHub Provider not found",
});
}
const { COMPOSE_PATH } = paths(true);
const githubProvider = await findGithubById(githubId);
const basePath = COMPOSE_PATH;
const outputPath = join(basePath, appName, "code");
const octokit = authGithub(githubProvider);
const token = await getGithubToken(octokit);
const repoclone = `github.com/${owner}/${repository}.git`;
await recreateDirectory(outputPath);
const cloneUrl = `https://oauth2:${token}@${repoclone}`;
try {
await execAsyncRemote(
serverId,
`git clone --branch ${branch} --depth 1 ${cloneUrl} ${outputPath}`,
);
} catch (error) {
throw error;
}
};
export const getGithubRepositories = async (githubId?: string) => {
if (!githubId) {
return [];
}
const githubProvider = await findGithubById(githubId);
const octokit = new Octokit({
authStrategy: createAppAuth,
auth: {
appId: githubProvider.githubAppId,
privateKey: githubProvider.githubPrivateKey,
installationId: githubProvider.githubInstallationId,
},
});
const repositories = (await octokit.paginate(
octokit.rest.apps.listReposAccessibleToInstallation,
)) as unknown as Awaited<
ReturnType<typeof octokit.rest.apps.listReposAccessibleToInstallation>
>["data"]["repositories"];
return repositories;
};
export const getGithubBranches = async (
input: typeof apiFindGithubBranches._type,
) => {
if (!input.githubId) {
return [];
}
const githubProvider = await findGithubById(input.githubId);
const octokit = new Octokit({
authStrategy: createAppAuth,
auth: {
appId: githubProvider.githubAppId,
privateKey: githubProvider.githubPrivateKey,
installationId: githubProvider.githubInstallationId,
},
});
const branches = (await octokit.paginate(octokit.rest.repos.listBranches, {
owner: input.owner,
repo: input.repo,
})) as unknown as Awaited<
ReturnType<typeof octokit.rest.repos.listBranches>
>["data"];
return branches;
};

View File

@@ -1,447 +0,0 @@
import { createWriteStream } from "node:fs";
import { join } from "node:path";
import type { Compose } from "@/server/api/services/compose";
import {
type Gitlab,
findGitlabById,
updateGitlab,
} from "@/server/api/services/gitlab";
import { paths } from "@/server/constants";
import type { apiGitlabTestConnection } from "@/server/db/schema";
import type { InferResultType } from "@/server/types/with";
import { TRPCError } from "@trpc/server";
import { recreateDirectory } from "../filesystem/directory";
import { execAsyncRemote } from "../process/execAsync";
import { spawnAsync } from "../process/spawnAsync";
export const refreshGitlabToken = async (gitlabProviderId: string) => {
const gitlabProvider = await findGitlabById(gitlabProviderId);
const currentTime = Math.floor(Date.now() / 1000);
const safetyMargin = 60;
if (
gitlabProvider.expiresAt &&
currentTime + safetyMargin < gitlabProvider.expiresAt
) {
return;
}
const response = await fetch("https://gitlab.com/oauth/token", {
method: "POST",
headers: {
"Content-Type": "application/x-www-form-urlencoded",
},
body: new URLSearchParams({
grant_type: "refresh_token",
refresh_token: gitlabProvider.refreshToken as string,
client_id: gitlabProvider.applicationId as string,
client_secret: gitlabProvider.secret as string,
}),
});
if (!response.ok) {
throw new Error(`Failed to refresh token: ${response.statusText}`);
}
const data = await response.json();
const expiresAt = Math.floor(Date.now() / 1000) + data.expires_in;
console.log("Refreshed token");
await updateGitlab(gitlabProviderId, {
accessToken: data.access_token,
refreshToken: data.refresh_token,
expiresAt,
});
return data;
};
export const haveGitlabRequirements = (gitlabProvider: Gitlab) => {
return !!(gitlabProvider?.accessToken && gitlabProvider?.refreshToken);
};
const getErrorCloneRequirements = (entity: {
gitlabRepository?: string | null;
gitlabOwner?: string | null;
gitlabBranch?: string | null;
gitlabPathNamespace?: string | null;
}) => {
const reasons: string[] = [];
const { gitlabBranch, gitlabOwner, gitlabRepository, gitlabPathNamespace } =
entity;
if (!gitlabRepository) reasons.push("1. Repository not assigned.");
if (!gitlabOwner) reasons.push("2. Owner not specified.");
if (!gitlabBranch) reasons.push("3. Branch not defined.");
if (!gitlabPathNamespace) reasons.push("4. Path namespace not defined.");
return reasons;
};
export type ApplicationWithGitlab = InferResultType<
"applications",
{ gitlab: true }
>;
export type ComposeWithGitlab = InferResultType<"compose", { gitlab: true }>;
export const cloneGitlabRepository = async (
entity: ApplicationWithGitlab | ComposeWithGitlab,
logPath: string,
isCompose = false,
) => {
const writeStream = createWriteStream(logPath, { flags: "a" });
const { appName, gitlabBranch, gitlabId, gitlab, gitlabPathNamespace } =
entity;
if (!gitlabId) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Gitlab Provider not found",
});
}
await refreshGitlabToken(gitlabId);
const requirements = getErrorCloneRequirements(entity);
// Check if requirements are met
if (requirements.length > 0) {
writeStream.write(
`\nGitLab Repository configuration failed for application: ${appName}\n`,
);
writeStream.write("Reasons:\n");
writeStream.write(requirements.join("\n"));
writeStream.end();
throw new TRPCError({
code: "BAD_REQUEST",
message: "Error: GitLab repository information is incomplete.",
});
}
const { COMPOSE_PATH, APPLICATIONS_PATH } = paths();
const basePath = isCompose ? COMPOSE_PATH : APPLICATIONS_PATH;
const outputPath = join(basePath, appName, "code");
await recreateDirectory(outputPath);
const repoclone = `gitlab.com/${gitlabPathNamespace}.git`;
const cloneUrl = `https://oauth2:${gitlab?.accessToken}@${repoclone}`;
try {
writeStream.write(`\nClonning Repo ${repoclone} to ${outputPath}: ✅\n`);
await spawnAsync(
"git",
[
"clone",
"--branch",
gitlabBranch!,
"--depth",
"1",
cloneUrl,
outputPath,
"--progress",
],
(data) => {
if (writeStream.writable) {
writeStream.write(data);
}
},
);
writeStream.write(`\nCloned ${repoclone}: ✅\n`);
} catch (error) {
writeStream.write(`ERROR Clonning: ${error}: ❌`);
throw error;
} finally {
writeStream.end();
}
};
export const getGitlabCloneCommand = async (
entity: ApplicationWithGitlab | ComposeWithGitlab,
logPath: string,
isCompose = false,
) => {
const {
appName,
gitlabRepository,
gitlabOwner,
gitlabPathNamespace,
gitlabBranch,
gitlabId,
serverId,
gitlab,
} = entity;
if (!serverId) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Server not found",
});
}
if (!gitlabId) {
const command = `
echo "Error: ❌ Gitlab Provider not found" >> ${logPath};
exit 1;
`;
await execAsyncRemote(serverId, command);
throw new TRPCError({
code: "NOT_FOUND",
message: "Gitlab Provider not found",
});
}
const requirements = getErrorCloneRequirements(entity);
// Build log messages
let logMessages = "";
if (requirements.length > 0) {
logMessages += `\nGitLab Repository configuration failed for application: ${appName}\n`;
logMessages += "Reasons:\n";
logMessages += requirements.join("\n");
const escapedLogMessages = logMessages
.replace(/\\/g, "\\\\")
.replace(/"/g, '\\"')
.replace(/\n/g, "\\n");
const bashCommand = `
echo "${escapedLogMessages}" >> ${logPath};
exit 1; # Exit with error code
`;
await execAsyncRemote(serverId, bashCommand);
return;
}
const { COMPOSE_PATH, APPLICATIONS_PATH } = paths(true);
await refreshGitlabToken(gitlabId);
const basePath = isCompose ? COMPOSE_PATH : APPLICATIONS_PATH;
const outputPath = join(basePath, appName, "code");
await recreateDirectory(outputPath);
const repoclone = `gitlab.com/${gitlabPathNamespace}.git`;
const cloneUrl = `https://oauth2:${gitlab?.accessToken}@${repoclone}`;
const cloneCommand = `
rm -rf ${outputPath};
mkdir -p ${outputPath};
if ! git clone --branch ${gitlabBranch} --depth 1 --progress ${cloneUrl} ${outputPath} >> ${logPath} 2>&1; then
echo "❌ [ERROR] Fail to clone the repository ${repoclone}" >> ${logPath};
exit 1;
fi
echo "Cloned ${repoclone} to ${outputPath}: ✅" >> ${logPath};
`;
return cloneCommand;
};
export const getGitlabRepositories = async (gitlabId?: string) => {
if (!gitlabId) {
return [];
}
await refreshGitlabToken(gitlabId);
const gitlabProvider = await findGitlabById(gitlabId);
const response = await fetch(
`https://gitlab.com/api/v4/projects?membership=true&owned=true&page=${0}&per_page=${100}`,
{
headers: {
Authorization: `Bearer ${gitlabProvider.accessToken}`,
},
},
);
if (!response.ok) {
throw new TRPCError({
code: "BAD_REQUEST",
message: `Failed to fetch repositories: ${response.statusText}`,
});
}
const repositories = await response.json();
const filteredRepos = repositories.filter((repo: any) => {
const { full_path, kind } = repo.namespace;
const groupName = gitlabProvider.groupName?.toLowerCase();
if (groupName) {
return full_path.toLowerCase().includes(groupName) && kind === "group";
}
return kind === "user";
});
const mappedRepositories = filteredRepos.map((repo: any) => {
return {
id: repo.id,
name: repo.name,
url: repo.path_with_namespace,
owner: {
username: repo.namespace.path,
},
};
});
return mappedRepositories as {
id: number;
name: string;
url: string;
owner: {
username: string;
};
}[];
};
export const getGitlabBranches = async (input: {
id?: number;
gitlabId?: string;
owner: string;
repo: string;
}) => {
if (!input.gitlabId || !input.id || input.id === 0) {
return [];
}
const gitlabProvider = await findGitlabById(input.gitlabId);
const branchesResponse = await fetch(
`https://gitlab.com/api/v4/projects/${input.id}/repository/branches`,
{
headers: {
Authorization: `Bearer ${gitlabProvider.accessToken}`,
},
},
);
if (!branchesResponse.ok) {
throw new Error(`Failed to fetch branches: ${branchesResponse.statusText}`);
}
const branches = await branchesResponse.json();
return branches as {
id: string;
name: string;
commit: {
id: string;
};
}[];
};
export const cloneRawGitlabRepository = async (entity: Compose) => {
const {
appName,
gitlabRepository,
gitlabOwner,
gitlabBranch,
gitlabId,
gitlabPathNamespace,
} = entity;
if (!gitlabId) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Gitlab Provider not found",
});
}
const gitlabProvider = await findGitlabById(gitlabId);
const { COMPOSE_PATH } = paths();
await refreshGitlabToken(gitlabId);
const basePath = COMPOSE_PATH;
const outputPath = join(basePath, appName, "code");
await recreateDirectory(outputPath);
const repoclone = `gitlab.com/${gitlabPathNamespace}.git`;
const cloneUrl = `https://oauth2:${gitlabProvider?.accessToken}@${repoclone}`;
try {
await spawnAsync("git", [
"clone",
"--branch",
gitlabBranch!,
"--depth",
"1",
cloneUrl,
outputPath,
"--progress",
]);
} catch (error) {
throw error;
}
};
export const cloneRawGitlabRepositoryRemote = async (compose: Compose) => {
const { appName, gitlabPathNamespace, branch, gitlabId, serverId } = compose;
if (!serverId) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Server not found",
});
}
if (!gitlabId) {
throw new TRPCError({
code: "NOT_FOUND",
message: "Gitlab Provider not found",
});
}
const gitlabProvider = await findGitlabById(gitlabId);
const { COMPOSE_PATH } = paths(true);
await refreshGitlabToken(gitlabId);
const basePath = COMPOSE_PATH;
const outputPath = join(basePath, appName, "code");
await recreateDirectory(outputPath);
const repoclone = `gitlab.com/${gitlabPathNamespace}.git`;
const cloneUrl = `https://oauth2:${gitlabProvider?.accessToken}@${repoclone}`;
try {
await execAsyncRemote(
serverId,
`git clone --branch ${branch} --depth 1 ${cloneUrl} ${outputPath}`,
);
} catch (error) {
throw error;
}
};
export const testGitlabConnection = async (
input: typeof apiGitlabTestConnection._type,
) => {
const { gitlabId, groupName } = input;
if (!gitlabId) {
throw new Error("Gitlab provider not found");
}
await refreshGitlabToken(gitlabId);
const gitlabProvider = await findGitlabById(gitlabId);
const response = await fetch(
`https://gitlab.com/api/v4/projects?membership=true&owned=true&page=${0}&per_page=${100}`,
{
headers: {
Authorization: `Bearer ${gitlabProvider.accessToken}`,
},
},
);
if (!response.ok) {
throw new TRPCError({
code: "BAD_REQUEST",
message: `Failed to fetch repositories: ${response.statusText}`,
});
}
const repositories = await response.json();
const filteredRepos = repositories.filter((repo: any) => {
const { full_path, kind } = repo.namespace;
if (groupName) {
return full_path.toLowerCase().includes(groupName) && kind === "group";
}
return kind === "user";
});
return filteredRepos.length;
};

View File

@@ -1,80 +0,0 @@
import { createWriteStream } from "node:fs";
import { writeFile } from "node:fs/promises";
import { join } from "node:path";
import type { Compose } from "@/server/api/services/compose";
import { paths } from "@/server/constants";
import { encodeBase64 } from "../docker/utils";
import { recreateDirectory } from "../filesystem/directory";
import { execAsyncRemote } from "../process/execAsync";
export const createComposeFile = async (compose: Compose, logPath: string) => {
const { COMPOSE_PATH } = paths();
const { appName, composeFile } = compose;
const writeStream = createWriteStream(logPath, { flags: "a" });
const outputPath = join(COMPOSE_PATH, appName, "code");
try {
await recreateDirectory(outputPath);
writeStream.write(
`\nCreating File 'docker-compose.yml' to ${outputPath}: ✅\n`,
);
await writeFile(join(outputPath, "docker-compose.yml"), composeFile);
writeStream.write(`\nFile 'docker-compose.yml' created: ✅\n`);
} catch (error) {
writeStream.write(`\nERROR Creating Compose File: ${error}: ❌\n`);
throw error;
} finally {
writeStream.end();
}
};
export const getCreateComposeFileCommand = (
compose: Compose,
logPath: string,
) => {
const { COMPOSE_PATH } = paths(true);
const { appName, composeFile } = compose;
const outputPath = join(COMPOSE_PATH, appName, "code");
const filePath = join(outputPath, "docker-compose.yml");
const encodedContent = encodeBase64(composeFile);
const bashCommand = `
rm -rf ${outputPath};
mkdir -p ${outputPath};
echo "${encodedContent}" | base64 -d > "${filePath}";
echo "File 'docker-compose.yml' created: ✅" >> ${logPath};
`;
return bashCommand;
};
export const createComposeFileRaw = async (compose: Compose) => {
const { COMPOSE_PATH } = paths();
const { appName, composeFile } = compose;
const outputPath = join(COMPOSE_PATH, appName, "code");
const filePath = join(outputPath, "docker-compose.yml");
try {
await recreateDirectory(outputPath);
await writeFile(filePath, composeFile);
} catch (error) {
throw error;
}
};
export const createComposeFileRawRemote = async (compose: Compose) => {
const { COMPOSE_PATH } = paths(true);
const { appName, composeFile, serverId } = compose;
const outputPath = join(COMPOSE_PATH, appName, "code");
const filePath = join(outputPath, "docker-compose.yml");
try {
const encodedContent = encodeBase64(composeFile);
const command = `
mkdir -p ${outputPath};
echo "${encodedContent}" | base64 -d > "${filePath}";
`;
await execAsyncRemote(serverId, command);
} catch (error) {
throw error;
}
};

View File

@@ -1,4 +1,4 @@
import { findServerById } from "@/server/api/services/server";
import { findServerById } from "@dokploy/builders";
import { docker } from "@/server/constants";
import Dockerode from "dockerode";
import { readSSHKey } from "../filesystem/ssh";

View File

@@ -2,7 +2,7 @@ import type http from "node:http";
import { spawn } from "node-pty";
import { Client } from "ssh2";
import { WebSocketServer } from "ws";
import { findServerById } from "../api/services/server";
import { findServerById } from "@dokploy/builders";
import { validateWebSocketRequest } from "../auth/auth";
import { readSSHKey } from "../utils/filesystem/ssh";
import { getShell } from "./utils";

View File

@@ -2,7 +2,7 @@ import type http from "node:http";
import { spawn } from "node-pty";
import { Client } from "ssh2";
import { WebSocketServer } from "ws";
import { findServerById } from "../api/services/server";
import { findServerById } from "@dokploy/builders";
import { validateWebSocketRequest } from "../auth/auth";
import { readSSHKey } from "../utils/filesystem/ssh";
import { getShell } from "./utils";

View File

@@ -2,7 +2,7 @@ import { spawn } from "node:child_process";
import type http from "node:http";
import { Client } from "ssh2";
import { WebSocketServer } from "ws";
import { findServerById } from "../api/services/server";
import { findServerById } from "@dokploy/builders";
import { validateWebSocketRequest } from "../auth/auth";
import { readSSHKey } from "../utils/filesystem/ssh";

View File

@@ -3,7 +3,7 @@ import path from "node:path";
import { spawn } from "node-pty";
import { publicIpv4, publicIpv6 } from "public-ip";
import { WebSocketServer } from "ws";
import { findServerById } from "../api/services/server";
import { findServerById } from "@dokploy/builders";
import { validateWebSocketRequest } from "../auth/auth";
import { paths } from "../constants";