refactor(multi-server): update path imports

This commit is contained in:
Mauricio Siu
2024-09-29 18:04:45 -06:00
parent 2ae7e562bb
commit 7bd6b66551
115 changed files with 186 additions and 7124 deletions

View File

@@ -1,5 +1,5 @@
import { addSuffixToAllProperties } from "@/server/utils/docker/compose";
import type { ComposeSpecification } from "@/server/utils/docker/types";
import { addSuffixToAllProperties } from "@dokploy/builders";
import type { ComposeSpecification } from "@dokploy/builders";
import { load } from "js-yaml";
import { expect, test } from "vitest";

View File

@@ -1,6 +1,6 @@
import { generateRandomHash } from "@/server/utils/docker/compose";
import { addSuffixToConfigsRoot } from "@/server/utils/docker/compose/configs";
import type { ComposeSpecification } from "@/server/utils/docker/types";
import { generateRandomHash } from "@dokploy/builders";
import { addSuffixToConfigsRoot } from "@dokploy/builders";
import type { ComposeSpecification } from "@dokploy/builders";
import { load } from "js-yaml";
import { expect, test } from "vitest";

View File

@@ -1,6 +1,6 @@
import { generateRandomHash } from "@/server/utils/docker/compose";
import { addSuffixToConfigsInServices } from "@/server/utils/docker/compose/configs";
import type { ComposeSpecification } from "@/server/utils/docker/types";
import { generateRandomHash } from "@dokploy/builders";
import { addSuffixToConfigsInServices } from "@dokploy/builders";
import type { ComposeSpecification } from "@dokploy/builders";
import { load } from "js-yaml";
import { expect, test } from "vitest";

View File

@@ -1,9 +1,9 @@
import { generateRandomHash } from "@/server/utils/docker/compose";
import { generateRandomHash } from "@dokploy/builders";
import {
addSuffixToAllConfigs,
addSuffixToConfigsRoot,
} from "@/server/utils/docker/compose/configs";
import type { ComposeSpecification } from "@/server/utils/docker/types";
} from "@dokploy/builders";
import type { ComposeSpecification } from "@dokploy/builders";
import { load } from "js-yaml";
import { expect, test } from "vitest";

View File

@@ -1,5 +1,5 @@
import type { Domain } from "@/server/api/services/domain";
import { createDomainLabels } from "@/server/utils/docker/domain";
import type { Domain } from "@dokploy/builders";
import { createDomainLabels } from "@dokploy/builders";
import { describe, expect, it } from "vitest";
describe("createDomainLabels", () => {

View File

@@ -1,4 +1,4 @@
import { addDokployNetworkToRoot } from "@/server/utils/docker/domain";
import { addDokployNetworkToRoot } from "@dokploy/builders";
import { describe, expect, it } from "vitest";
describe("addDokployNetworkToRoot", () => {

View File

@@ -1,4 +1,4 @@
import { addDokployNetworkToService } from "@/server/utils/docker/domain";
import { addDokployNetworkToService } from "@dokploy/builders";
import { describe, expect, it } from "vitest";
describe("addDokployNetworkToService", () => {

View File

@@ -1,6 +1,6 @@
import { generateRandomHash } from "@/server/utils/docker/compose";
import { addSuffixToNetworksRoot } from "@/server/utils/docker/compose/network";
import type { ComposeSpecification } from "@/server/utils/docker/types";
import { generateRandomHash } from "@dokploy/builders";
import { addSuffixToNetworksRoot } from "@dokploy/builders";
import type { ComposeSpecification } from "@dokploy/builders";
import { load } from "js-yaml";
import { expect, test } from "vitest";

View File

@@ -1,6 +1,6 @@
import { generateRandomHash } from "@/server/utils/docker/compose";
import { addSuffixToServiceNetworks } from "@/server/utils/docker/compose/network";
import type { ComposeSpecification } from "@/server/utils/docker/types";
import { generateRandomHash } from "@dokploy/builders";
import { addSuffixToServiceNetworks } from "@dokploy/builders";
import type { ComposeSpecification } from "@dokploy/builders";
import { load } from "js-yaml";
import { expect, test } from "vitest";

View File

@@ -1,10 +1,10 @@
import { generateRandomHash } from "@/server/utils/docker/compose";
import { generateRandomHash } from "@dokploy/builders";
import {
addSuffixToAllNetworks,
addSuffixToServiceNetworks,
} from "@/server/utils/docker/compose/network";
import { addSuffixToNetworksRoot } from "@/server/utils/docker/compose/network";
import type { ComposeSpecification } from "@/server/utils/docker/types";
} from "@dokploy/builders";
import { addSuffixToNetworksRoot } from "@dokploy/builders";
import type { ComposeSpecification } from "@dokploy/builders";
import { load } from "js-yaml";
import { expect, test } from "vitest";

View File

@@ -1,6 +1,6 @@
import { generateRandomHash } from "@/server/utils/docker/compose";
import { addSuffixToSecretsRoot } from "@/server/utils/docker/compose/secrets";
import type { ComposeSpecification } from "@/server/utils/docker/types";
import { generateRandomHash } from "@dokploy/builders";
import { addSuffixToSecretsRoot } from "@dokploy/builders";
import type { ComposeSpecification } from "@dokploy/builders";
import { dump, load } from "js-yaml";
import { expect, test } from "vitest";

View File

@@ -1,6 +1,6 @@
import { generateRandomHash } from "@/server/utils/docker/compose";
import { addSuffixToSecretsInServices } from "@/server/utils/docker/compose/secrets";
import type { ComposeSpecification } from "@/server/utils/docker/types";
import { generateRandomHash } from "@dokploy/builders";
import { addSuffixToSecretsInServices } from "@dokploy/builders";
import type { ComposeSpecification } from "@dokploy/builders";
import { load } from "js-yaml";
import { expect, test } from "vitest";

View File

@@ -1,5 +1,5 @@
import { addSuffixToAllSecrets } from "@/server/utils/docker/compose/secrets";
import type { ComposeSpecification } from "@/server/utils/docker/types";
import { addSuffixToAllSecrets } from "@dokploy/builders";
import type { ComposeSpecification } from "@dokploy/builders";
import { load } from "js-yaml";
import { expect, test } from "vitest";

View File

@@ -1,6 +1,6 @@
import { generateRandomHash } from "@/server/utils/docker/compose";
import { addSuffixToServiceNames } from "@/server/utils/docker/compose/service";
import type { ComposeSpecification } from "@/server/utils/docker/types";
import { generateRandomHash } from "@dokploy/builders";
import { addSuffixToServiceNames } from "@dokploy/builders";
import type { ComposeSpecification } from "@dokploy/builders";
import { load } from "js-yaml";
import { expect, test } from "vitest";

View File

@@ -1,6 +1,6 @@
import { generateRandomHash } from "@/server/utils/docker/compose";
import { addSuffixToServiceNames } from "@/server/utils/docker/compose/service";
import type { ComposeSpecification } from "@/server/utils/docker/types";
import { generateRandomHash } from "@dokploy/builders";
import { addSuffixToServiceNames } from "@dokploy/builders";
import type { ComposeSpecification } from "@dokploy/builders";
import { load } from "js-yaml";
import { expect, test } from "vitest";

View File

@@ -1,6 +1,6 @@
import { generateRandomHash } from "@/server/utils/docker/compose";
import { addSuffixToServiceNames } from "@/server/utils/docker/compose/service";
import type { ComposeSpecification } from "@/server/utils/docker/types";
import { generateRandomHash } from "@dokploy/builders";
import { addSuffixToServiceNames } from "@dokploy/builders";
import type { ComposeSpecification } from "@dokploy/builders";
import { load } from "js-yaml";
import { expect, test } from "vitest";

View File

@@ -1,6 +1,6 @@
import { generateRandomHash } from "@/server/utils/docker/compose";
import { addSuffixToServiceNames } from "@/server/utils/docker/compose/service";
import type { ComposeSpecification } from "@/server/utils/docker/types";
import { generateRandomHash } from "@dokploy/builders";
import { addSuffixToServiceNames } from "@dokploy/builders";
import type { ComposeSpecification } from "@dokploy/builders";
import { load } from "js-yaml";
import { expect, test } from "vitest";

View File

@@ -1,6 +1,6 @@
import { generateRandomHash } from "@/server/utils/docker/compose";
import { addSuffixToServiceNames } from "@/server/utils/docker/compose/service";
import type { ComposeSpecification } from "@/server/utils/docker/types";
import { generateRandomHash } from "@dokploy/builders";
import { addSuffixToServiceNames } from "@dokploy/builders";
import type { ComposeSpecification } from "@dokploy/builders";
import { load } from "js-yaml";
import { expect, test } from "vitest";

View File

@@ -1,8 +1,8 @@
import {
addSuffixToAllServiceNames,
addSuffixToServiceNames,
} from "@/server/utils/docker/compose/service";
import type { ComposeSpecification } from "@/server/utils/docker/types";
} from "@dokploy/builders";
import type { ComposeSpecification } from "@dokploy/builders";
import { load } from "js-yaml";
import { expect, test } from "vitest";

View File

@@ -1,6 +1,6 @@
import { generateRandomHash } from "@/server/utils/docker/compose";
import { addSuffixToServiceNames } from "@/server/utils/docker/compose/service";
import type { ComposeSpecification } from "@/server/utils/docker/types";
import { generateRandomHash } from "@dokploy/builders";
import { addSuffixToServiceNames } from "@dokploy/builders";
import type { ComposeSpecification } from "@dokploy/builders";
import { load } from "js-yaml";
import { expect, test } from "vitest";

View File

@@ -1,9 +1,9 @@
import { generateRandomHash } from "@/server/utils/docker/compose";
import { generateRandomHash } from "@dokploy/builders";
import {
addSuffixToAllVolumes,
addSuffixToVolumesRoot,
} from "@/server/utils/docker/compose/volume";
import type { ComposeSpecification } from "@/server/utils/docker/types";
} from "@dokploy/builders";
import type { ComposeSpecification } from "@dokploy/builders";
import { load } from "js-yaml";
import { expect, test } from "vitest";

View File

@@ -1,6 +1,6 @@
import { generateRandomHash } from "@/server/utils/docker/compose";
import { addSuffixToVolumesRoot } from "@/server/utils/docker/compose/volume";
import type { ComposeSpecification } from "@/server/utils/docker/types";
import { generateRandomHash } from "@dokploy/builders";
import { addSuffixToVolumesRoot } from "@dokploy/builders";
import type { ComposeSpecification } from "@dokploy/builders";
import { load } from "js-yaml";
import { expect, test } from "vitest";

View File

@@ -1,6 +1,6 @@
import { generateRandomHash } from "@/server/utils/docker/compose";
import { addSuffixToVolumesInServices } from "@/server/utils/docker/compose/volume";
import type { ComposeSpecification } from "@/server/utils/docker/types";
import { generateRandomHash } from "@dokploy/builders";
import { addSuffixToVolumesInServices } from "@dokploy/builders";
import type { ComposeSpecification } from "@dokploy/builders";
import { load } from "js-yaml";
import { expect, test } from "vitest";

View File

@@ -1,9 +1,9 @@
import { generateRandomHash } from "@/server/utils/docker/compose";
import { generateRandomHash } from "@dokploy/builders";
import {
addSuffixToAllVolumes,
addSuffixToVolumesInServices,
} from "@/server/utils/docker/compose/volume";
import type { ComposeSpecification } from "@/server/utils/docker/types";
} from "@dokploy/builders";
import type { ComposeSpecification } from "@dokploy/builders";
import { load } from "js-yaml";
import { expect, test } from "vitest";

View File

@@ -2,8 +2,8 @@ import fs from "node:fs/promises";
import path from "node:path";
import { paths } from "@/server/constants";
const { APPLICATIONS_PATH } = paths();
import type { ApplicationNested } from "@/server/utils/builders";
import { unzipDrop } from "@/server/utils/builders/drop";
import type { ApplicationNested } from "@dokploy/builders";
import { unzipDrop } from "@dokploy/builders";
import AdmZip from "adm-zip";
import { afterAll, beforeAll, describe, expect, it, vi } from "vitest";

View File

@@ -1,4 +1,4 @@
import { parseRawConfig, processLogs } from "@/server/utils/access-log/utils";
import { parseRawConfig, processLogs } from "@dokploy/builders";
import { describe, expect, it } from "vitest";
const sampleLogEntry = `{"ClientAddr":"172.19.0.1:56732","ClientHost":"172.19.0.1","ClientPort":"56732","ClientUsername":"-","DownstreamContentSize":0,"DownstreamStatus":304,"Duration":14729375,"OriginContentSize":0,"OriginDuration":14051833,"OriginStatus":304,"Overhead":677542,"RequestAddr":"s222-umami-c381af.traefik.me","RequestContentSize":0,"RequestCount":122,"RequestHost":"s222-umami-c381af.traefik.me","RequestMethod":"GET","RequestPath":"/dashboard?_rsc=1rugv","RequestPort":"-","RequestProtocol":"HTTP/1.1","RequestScheme":"http","RetryAttempts":0,"RouterName":"s222-umami-60e104-47-web@docker","ServiceAddr":"10.0.1.15:3000","ServiceName":"s222-umami-60e104-47-web@docker","ServiceURL":{"Scheme":"http","Opaque":"","User":null,"Host":"10.0.1.15:3000","Path":"","RawPath":"","ForceQuery":false,"RawQuery":"","Fragment":"","RawFragment":""},"StartLocal":"2024-08-25T04:34:37.306691884Z","StartUTC":"2024-08-25T04:34:37.306691884Z","entryPointName":"web","level":"info","msg":"","time":"2024-08-25T04:34:37Z"}`;

View File

@@ -5,11 +5,12 @@ vi.mock("node:fs", () => ({
default: fs,
}));
import type { Admin } from "@/server/api/services/admin";
import { createDefaultServerTraefikConfig } from "@/server/setup/traefik-setup";
import { loadOrCreateConfig } from "@/server/utils/traefik/application";
import type { FileConfig } from "@/server/utils/traefik/file-types";
import { updateServerTraefik } from "@/server/utils/traefik/web-server";
import type { Admin, FileConfig } from "@dokploy/builders";
import {
createDefaultServerTraefikConfig,
loadOrCreateConfig,
updateServerTraefik,
} from "@dokploy/builders";
import { beforeEach, expect, test, vi } from "vitest";
const baseAdmin: Admin = {

View File

@@ -1,7 +1,7 @@
import type { Domain } from "@/server/api/services/domain";
import type { Redirect } from "@/server/api/services/redirect";
import type { ApplicationNested } from "@/server/utils/builders";
import { createRouterConfig } from "@/server/utils/traefik/domain";
import type { Domain } from "@dokploy/builders";
import type { Redirect } from "@dokploy/builders";
import type { ApplicationNested } from "@dokploy/builders";
import { createRouterConfig } from "@dokploy/builders";
import { expect, test } from "vitest";
const baseApp: ApplicationNested = {

View File

@@ -1,6 +1,5 @@
import { AddProject } from "@/components/dashboard/projects/add";
import type { Auth } from "@/server/api/services/auth";
import type { User } from "@/server/api/services/user";
import type { Auth, User } from "@dokploy/builders";
import { api } from "@/utils/api";
import { useRouter } from "next/router";
import { useEffect, useMemo, useState } from "react";

View File

@@ -1,7 +1,6 @@
import { appRouter } from "@/server/api/root";
import { createTRPCContext } from "@/server/api/trpc";
import { validateRequest } from "@/server/auth/auth";
import { validateBearerToken } from "@/server/auth/token";
import { validateRequest, validateBearerToken } from "@dokploy/builders";
import { createOpenApiNextHandler } from "@dokploy/trpc-openapi";
import type { NextApiRequest, NextApiResponse } from "next";
@@ -19,7 +18,6 @@ const handler = async (req: NextApiRequest, res: NextApiResponse) => {
return;
}
console.log(user);
// @ts-ignore
return createOpenApiNextHandler({
router: appRouter,

View File

@@ -1,4 +1,4 @@
import { findAdmin } from "@/server/api/services/admin";
import { findAdmin } from "@dokploy/builders";
import { db } from "@/server/db";
import { applications, compose, github } from "@/server/db/schema";
import type { DeploymentJob } from "@/server/queues/deployments-queue";

View File

@@ -1,4 +1,4 @@
import { createGithub } from "@/server/api/services/github";
import { createGithub } from "@dokploy/builders";
import { db } from "@/server/db";
import { github } from "@/server/db/schema";
import { eq } from "drizzle-orm";

View File

@@ -1,4 +1,4 @@
import { findGitlabById, updateGitlab } from "@/server/api/services/gitlab";
import { findGitlabById, updateGitlab } from "@dokploy/builders";
import type { NextApiRequest, NextApiResponse } from "next";
export default async function handler(

View File

@@ -1,18 +0,0 @@
import type { NextRequest } from "next/server";
import { renderToString } from "react-dom/server";
import Page418 from "../hola"; // Importa la página 418
export const GET = async (req: NextRequest) => {
// Renderiza el componente de la página 418 como HTML
const htmlContent = renderToString(Page418());
// Devuelve la respuesta con el código de estado HTTP 418
return new Response(htmlContent, {
headers: {
"Content-Type": "text/html",
},
status: 418,
});
};
export default GET;

View File

@@ -1,7 +1,7 @@
import { ShowContainers } from "@/components/dashboard/docker/show/show-containers";
import { DashboardLayout } from "@/components/layouts/dashboard-layout";
import { appRouter } from "@/server/api/root";
import { validateRequest } from "@/server/auth/auth";
import { validateRequest } from "@dokploy/builders";
import { createServerSideHelpers } from "@trpc/react-query/server";
import type { GetServerSidePropsContext } from "next";
import React, { type ReactElement } from "react";

View File

@@ -1,6 +1,6 @@
import { ShowMonitoring } from "@/components/dashboard/monitoring/web-server/show";
import { DashboardLayout } from "@/components/layouts/dashboard-layout";
import { validateRequest } from "@/server/auth/auth";
import { validateRequest } from "@dokploy/builders";
import type { GetServerSidePropsContext } from "next";
import React, { type ReactElement } from "react";

View File

@@ -29,8 +29,8 @@ import {
DropdownMenuTrigger,
} from "@/components/ui/dropdown-menu";
import { appRouter } from "@/server/api/root";
import type { findProjectById } from "@/server/api/services/project";
import { validateRequest } from "@/server/auth/auth";
import type { findProjectById } from "@dokploy/builders";
import { validateRequest } from "@dokploy/builders";
import { api } from "@/utils/api";
import { createServerSideHelpers } from "@trpc/react-query/server";
import { CircuitBoard, FolderInput, GlobeIcon, PlusIcon } from "lucide-react";

View File

@@ -25,7 +25,7 @@ import {
import { Tabs, TabsContent, TabsList, TabsTrigger } from "@/components/ui/tabs";
import { cn } from "@/lib/utils";
import { appRouter } from "@/server/api/root";
import { validateRequest } from "@/server/auth/auth";
import { validateRequest } from "@dokploy/builders";
import { api } from "@/utils/api";
import { createServerSideHelpers } from "@trpc/react-query/server";
import { GlobeIcon } from "lucide-react";

View File

@@ -19,7 +19,7 @@ import {
import { Tabs, TabsContent, TabsList, TabsTrigger } from "@/components/ui/tabs";
import { cn } from "@/lib/utils";
import { appRouter } from "@/server/api/root";
import { validateRequest } from "@/server/auth/auth";
import { validateRequest } from "@dokploy/builders";
import { api } from "@/utils/api";
import { createServerSideHelpers } from "@trpc/react-query/server";
import { CircuitBoard } from "lucide-react";

View File

@@ -20,7 +20,7 @@ import {
import { Tabs, TabsContent, TabsList, TabsTrigger } from "@/components/ui/tabs";
import { cn } from "@/lib/utils";
import { appRouter } from "@/server/api/root";
import { validateRequest } from "@/server/auth/auth";
import { validateRequest } from "@dokploy/builders";
import { api } from "@/utils/api";
import { createServerSideHelpers } from "@trpc/react-query/server";
import type {

View File

@@ -20,7 +20,7 @@ import {
import { Tabs, TabsContent, TabsList, TabsTrigger } from "@/components/ui/tabs";
import { cn } from "@/lib/utils";
import { appRouter } from "@/server/api/root";
import { validateRequest } from "@/server/auth/auth";
import { validateRequest } from "@dokploy/builders";
import { api } from "@/utils/api";
import { createServerSideHelpers } from "@trpc/react-query/server";
import type {

View File

@@ -20,7 +20,7 @@ import {
import { Tabs, TabsContent, TabsList, TabsTrigger } from "@/components/ui/tabs";
import { cn } from "@/lib/utils";
import { appRouter } from "@/server/api/root";
import { validateRequest } from "@/server/auth/auth";
import { validateRequest } from "@dokploy/builders";
import { api } from "@/utils/api";
import { createServerSideHelpers } from "@trpc/react-query/server";
import type {

View File

@@ -20,7 +20,7 @@ import {
import { Tabs, TabsContent, TabsList, TabsTrigger } from "@/components/ui/tabs";
import { cn } from "@/lib/utils";
import { appRouter } from "@/server/api/root";
import { validateRequest } from "@/server/auth/auth";
import { validateRequest } from "@dokploy/builders";
import { api } from "@/utils/api";
import { createServerSideHelpers } from "@trpc/react-query/server";
import type {

View File

@@ -19,7 +19,7 @@ import {
import { Tabs, TabsContent, TabsList, TabsTrigger } from "@/components/ui/tabs";
import { cn } from "@/lib/utils";
import { appRouter } from "@/server/api/root";
import { validateRequest } from "@/server/auth/auth";
import { validateRequest } from "@dokploy/builders";
import { api } from "@/utils/api";
import { createServerSideHelpers } from "@trpc/react-query/server";
import type {

View File

@@ -1,6 +1,6 @@
import { ShowProjects } from "@/components/dashboard/projects/show";
import { DashboardLayout } from "@/components/layouts/dashboard-layout";
import { validateRequest } from "@/server/auth/auth";
import { validateRequest } from "@dokploy/builders";
import type { GetServerSidePropsContext } from "next";
import React, { type ReactElement } from "react";

View File

@@ -1,6 +1,6 @@
import { ShowRequests } from "@/components/dashboard/requests/show-requests";
import { DashboardLayout } from "@/components/layouts/dashboard-layout";
import { validateRequest } from "@/server/auth/auth";
import { validateRequest } from "@dokploy/builders";
import type { GetServerSidePropsContext } from "next";
import type { ReactElement } from "react";
import * as React from "react";

View File

@@ -1,7 +1,7 @@
import { AppearanceForm } from "@/components/dashboard/settings/appearance-form";
import { DashboardLayout } from "@/components/layouts/dashboard-layout";
import { SettingsLayout } from "@/components/layouts/settings-layout";
import { validateRequest } from "@/server/auth/auth";
import { validateRequest } from "@dokploy/builders";
import type { GetServerSidePropsContext } from "next";
import React, { type ReactElement } from "react";

View File

@@ -1,7 +1,7 @@
import { ShowCertificates } from "@/components/dashboard/settings/certificates/show-certificates";
import { DashboardLayout } from "@/components/layouts/dashboard-layout";
import { SettingsLayout } from "@/components/layouts/settings-layout";
import { validateRequest } from "@/server/auth/auth";
import { validateRequest } from "@dokploy/builders";
import type { GetServerSidePropsContext } from "next";
import React, { type ReactElement } from "react";

View File

@@ -2,7 +2,7 @@ import { ShowNodes } from "@/components/dashboard/settings/cluster/nodes/show-no
import { ShowRegistry } from "@/components/dashboard/settings/cluster/registry/show-registry";
import { DashboardLayout } from "@/components/layouts/dashboard-layout";
import { SettingsLayout } from "@/components/layouts/settings-layout";
import { validateRequest } from "@/server/auth/auth";
import { validateRequest } from "@dokploy/builders";
import type { GetServerSidePropsContext } from "next";
import React, { type ReactElement } from "react";

View File

@@ -1,7 +1,7 @@
import { ShowDestinations } from "@/components/dashboard/settings/destination/show-destinations";
import { DashboardLayout } from "@/components/layouts/dashboard-layout";
import { SettingsLayout } from "@/components/layouts/settings-layout";
import { validateRequest } from "@/server/auth/auth";
import { validateRequest } from "@dokploy/builders";
import type { GetServerSidePropsContext } from "next";
import React, { type ReactElement } from "react";

View File

@@ -2,7 +2,7 @@ import { ShowGitProviders } from "@/components/dashboard/settings/git/show-git-p
import { DashboardLayout } from "@/components/layouts/dashboard-layout";
import { SettingsLayout } from "@/components/layouts/settings-layout";
import { appRouter } from "@/server/api/root";
import { validateRequest } from "@/server/auth/auth";
import { validateRequest } from "@dokploy/builders";
import { createServerSideHelpers } from "@trpc/react-query/server";
import type { GetServerSidePropsContext } from "next";
import React, { type ReactElement } from "react";

View File

@@ -2,7 +2,7 @@ import { ShowDestinations } from "@/components/dashboard/settings/destination/sh
import { ShowNotifications } from "@/components/dashboard/settings/notifications/show-notifications";
import { DashboardLayout } from "@/components/layouts/dashboard-layout";
import { SettingsLayout } from "@/components/layouts/settings-layout";
import { validateRequest } from "@/server/auth/auth";
import { validateRequest } from "@dokploy/builders";
import type { GetServerSidePropsContext } from "next";
import React, { type ReactElement } from "react";

View File

@@ -2,7 +2,7 @@ import { GenerateToken } from "@/components/dashboard/settings/profile/generate-
import { ProfileForm } from "@/components/dashboard/settings/profile/profile-form";
import { DashboardLayout } from "@/components/layouts/dashboard-layout";
import { SettingsLayout } from "@/components/layouts/settings-layout";
import { validateRequest } from "@/server/auth/auth";
import { validateRequest } from "@dokploy/builders";
import { api } from "@/utils/api";
import type { GetServerSidePropsContext } from "next";
import React, { type ReactElement } from "react";

View File

@@ -2,7 +2,7 @@ import { WebDomain } from "@/components/dashboard/settings/web-domain";
import { WebServer } from "@/components/dashboard/settings/web-server";
import { DashboardLayout } from "@/components/layouts/dashboard-layout";
import { SettingsLayout } from "@/components/layouts/settings-layout";
import { validateRequest } from "@/server/auth/auth";
import { validateRequest } from "@dokploy/builders";
import type { GetServerSidePropsContext } from "next";
import React, { type ReactElement } from "react";

View File

@@ -1,7 +1,7 @@
import { ShowServers } from "@/components/dashboard/settings/servers/show-servers";
import { DashboardLayout } from "@/components/layouts/dashboard-layout";
import { SettingsLayout } from "@/components/layouts/settings-layout";
import { validateRequest } from "@/server/auth/auth";
import { validateRequest } from "@dokploy/builders";
import type { GetServerSidePropsContext } from "next";
import React, { type ReactElement } from "react";

View File

@@ -2,7 +2,7 @@ import { ShowDestinations } from "@/components/dashboard/settings/ssh-keys/show-
import { DashboardLayout } from "@/components/layouts/dashboard-layout";
import { SettingsLayout } from "@/components/layouts/settings-layout";
import { appRouter } from "@/server/api/root";
import { validateRequest } from "@/server/auth/auth";
import { validateRequest } from "@dokploy/builders";
import { createServerSideHelpers } from "@trpc/react-query/server";
import type { GetServerSidePropsContext } from "next";
import React, { type ReactElement } from "react";

View File

@@ -1,7 +1,7 @@
import { ShowUsers } from "@/components/dashboard/settings/users/show-users";
import { DashboardLayout } from "@/components/layouts/dashboard-layout";
import { SettingsLayout } from "@/components/layouts/settings-layout";
import { validateRequest } from "@/server/auth/auth";
import { validateRequest } from "@dokploy/builders";
import type { GetServerSidePropsContext } from "next";
import React, { type ReactElement } from "react";

View File

@@ -1,7 +1,7 @@
import { ShowTraefikSystem } from "@/components/dashboard/file-system/show-traefik-system";
import { DashboardLayout } from "@/components/layouts/dashboard-layout";
import { appRouter } from "@/server/api/root";
import { validateRequest } from "@/server/auth/auth";
import { validateRequest } from "@dokploy/builders";
import { createServerSideHelpers } from "@trpc/react-query/server";
import type { GetServerSidePropsContext } from "next";
import React, { type ReactElement } from "react";

View File

@@ -1,3 +0,0 @@
export default function hola() {
return <div>hola</div>;
}

View File

@@ -17,8 +17,7 @@ import {
FormMessage,
} from "@/components/ui/form";
import { Input } from "@/components/ui/input";
import { isAdminPresent } from "@/server/api/services/admin";
import { validateRequest } from "@/server/auth/auth";
import { validateRequest, isAdminPresent } from "@dokploy/builders";
import { api } from "@/utils/api";
import { zodResolver } from "@hookform/resolvers/zod";
import type { GetServerSidePropsContext } from "next";

View File

@@ -15,7 +15,7 @@ import {
FormMessage,
} from "@/components/ui/form";
import { Input } from "@/components/ui/input";
import { getUserByToken } from "@/server/api/services/admin";
import { getUserByToken } from "@dokploy/builders";
import { api } from "@/utils/api";
import { zodResolver } from "@hookform/resolvers/zod";
import { AlertTriangle } from "lucide-react";

View File

@@ -15,7 +15,7 @@ import {
FormMessage,
} from "@/components/ui/form";
import { Input } from "@/components/ui/input";
import { isAdminPresent } from "@/server/api/services/admin";
import { isAdminPresent } from "@dokploy/builders";
// import { IS_CLOUD } from "@/server/constants";
import { api } from "@/utils/api";
import { zodResolver } from "@hookform/resolvers/zod";

View File

@@ -1,5 +1,5 @@
import { appRouter } from "@/server/api/root";
import { validateRequest } from "@/server/auth/auth";
import { validateRequest } from "@dokploy/builders";
import { api } from "@/utils/api";
import { createServerSideHelpers } from "@trpc/react-query/server";
import type { GetServerSidePropsContext, NextPage } from "next";

View File

@@ -1,6 +1,3 @@
import { lucia, validateRequest } from "@/server/auth/auth";
import { luciaToken } from "@/server/auth/token";
// import { IS_CLOUD } from "@/server/constants";
import {
apiCreateAdmin,
apiCreateUser,
@@ -23,6 +20,9 @@ import {
generate2FASecret,
updateAuthById,
verify2FA,
lucia,
validateRequest,
luciaToken,
} from "@dokploy/builders";
import {
adminProcedure,

View File

@@ -8,6 +8,11 @@ import {
mysql,
postgres,
redis,
apiCreateProject,
apiFindOneProject,
apiRemoveProject,
apiUpdateProject,
projects,
} from "@/server/db/schema";
import { TRPCError } from "@trpc/server";
@@ -15,11 +20,6 @@ import { desc, eq, sql } from "drizzle-orm";
import type { AnyPgColumn } from "drizzle-orm/pg-core";
import {
apiCreateProject,
apiFindOneProject,
apiRemoveProject,
apiUpdateProject,
projects,
createProject,
deleteProject,
findProjectById,

View File

@@ -51,10 +51,10 @@ import {
updateServerById,
canAccessToTraefikFiles,
getDokployImage,
getDokployVersion,
pullLatestRelease,
readDirectory,
} from "@dokploy/builders";
import packageInfo from "../../../package.json";
import { adminProcedure, createTRPCRouter, protectedProcedure } from "../trpc";
export const settingsRouter = createTRPCRouter({
@@ -269,7 +269,7 @@ export const settingsRouter = createTRPCRouter({
}),
getDokployVersion: adminProcedure.query(() => {
return getDokployVersion();
return packageInfo.version;
}),
readDirectories: protectedProcedure
.input(apiServerSchema)

View File

@@ -20,8 +20,7 @@ import {
import type { Session, User } from "lucia";
import superjson from "superjson";
import { ZodError } from "zod";
import { validateRequest } from "../auth/auth";
import { validateBearerToken } from "../auth/token";
import { validateRequest, validateBearerToken } from "@dokploy/builders";
/**
* 1. CONTEXT

View File

@@ -1,112 +0,0 @@
import { webcrypto } from "node:crypto";
import type { IncomingMessage, ServerResponse } from "node:http";
import { DrizzlePostgreSQLAdapter } from "@lucia-auth/adapter-drizzle";
import { TimeSpan } from "lucia";
import { Lucia } from "lucia/dist/core.js";
import type { Session, User } from "lucia/dist/core.js";
import { findAdminByAuthId, findUserByAuthId } from "@dokploy/builders";
import { db } from "../db";
import { type DatabaseUser, auth, sessionTable } from "../db/schema";
globalThis.crypto = webcrypto as Crypto;
export const adapter = new DrizzlePostgreSQLAdapter(db, sessionTable, auth);
export const lucia = new Lucia(adapter, {
sessionCookie: {
attributes: {
secure: false,
},
},
sessionExpiresIn: new TimeSpan(1, "d"),
getUserAttributes: (attributes) => {
return {
email: attributes.email,
rol: attributes.rol,
secret: attributes.secret !== null,
adminId: attributes.adminId,
};
},
});
declare module "lucia" {
interface Register {
Lucia: typeof lucia;
DatabaseUserAttributes: Omit<DatabaseUser, "id"> & {
authId: string;
adminId: string;
};
}
}
export type ReturnValidateToken = Promise<{
user: (User & { authId: string; adminId: string }) | null;
session: Session | null;
}>;
export async function validateRequest(
req: IncomingMessage,
res: ServerResponse,
): ReturnValidateToken {
const sessionId = lucia.readSessionCookie(req.headers.cookie ?? "");
if (!sessionId) {
return {
user: null,
session: null,
};
}
const result = await lucia.validateSession(sessionId);
if (result?.session?.fresh) {
res.appendHeader(
"Set-Cookie",
lucia.createSessionCookie(result.session.id).serialize(),
);
}
if (!result.session) {
res.appendHeader(
"Set-Cookie",
lucia.createBlankSessionCookie().serialize(),
);
}
if (result.user) {
if (result.user?.rol === "admin") {
const admin = await findAdminByAuthId(result.user.id);
result.user.adminId = admin.adminId;
} else if (result.user?.rol === "user") {
const userResult = await findUserByAuthId(result.user.id);
result.user.adminId = userResult.adminId;
}
}
return {
session: result.session,
...((result.user && {
user: {
authId: result.user.id,
email: result.user.email,
rol: result.user.rol,
id: result.user.id,
secret: result.user.secret,
adminId: result.user.adminId,
},
}) || {
user: null,
}),
};
}
export async function validateWebSocketRequest(
req: IncomingMessage,
): Promise<{ user: User; session: Session } | { user: null; session: null }> {
const sessionId = lucia.readSessionCookie(req.headers.cookie ?? "");
if (!sessionId) {
return {
user: null,
session: null,
};
}
const result = await lucia.validateSession(sessionId);
return result;
}

View File

@@ -1,20 +0,0 @@
import bcrypt from "bcrypt";
export const generateRandomPassword = async () => {
const passwordLength = 16;
const characters =
"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
let randomPassword = "";
for (let i = 0; i < passwordLength; i++) {
randomPassword += characters.charAt(
Math.floor(Math.random() * characters.length),
);
}
const saltRounds = 10;
const hashedPassword = await bcrypt.hash(randomPassword, saltRounds);
return { randomPassword, hashedPassword };
};

View File

@@ -1,49 +0,0 @@
import type { IncomingMessage } from "node:http";
import { TimeSpan } from "lucia";
import { Lucia } from "lucia/dist/core.js";
import { type ReturnValidateToken, adapter } from "./auth";
export const luciaToken = new Lucia(adapter, {
sessionCookie: {
attributes: {
secure: false,
},
},
sessionExpiresIn: new TimeSpan(365, "d"),
getUserAttributes: (attributes) => {
return {
email: attributes.email,
rol: attributes.rol,
secret: attributes.secret !== null,
};
},
});
export const validateBearerToken = async (
req: IncomingMessage,
): ReturnValidateToken => {
const authorizationHeader = req.headers.authorization;
const sessionId = luciaToken.readBearerToken(authorizationHeader ?? "");
if (!sessionId) {
return {
user: null,
session: null,
};
}
const result = await luciaToken.validateSession(sessionId);
return {
session: result.session,
...((result.user && {
user: {
adminId: result.user.adminId,
authId: result.user.id,
email: result.user.email,
rol: result.user.rol,
id: result.user.id,
secret: result.user.secret,
},
}) || {
user: null,
}),
};
};

View File

@@ -1 +1 @@
export * from "@dokploy/builders";
export * from "@dokploy/builders/dist/db/schema";

View File

@@ -1,198 +0,0 @@
import { promises } from "node:fs";
import type Dockerode from "dockerode";
import osUtils from "node-os-utils";
import { paths } from "../constants";
export const recordAdvancedStats = async (
stats: Dockerode.ContainerStats,
appName: string,
) => {
const { MONITORING_PATH } = paths();
const path = `${MONITORING_PATH}/${appName}`;
await promises.mkdir(path, { recursive: true });
const cpuPercent = calculateCpuUsagePercent(
stats.cpu_stats,
stats.precpu_stats,
);
const memoryStats = calculateMemoryStats(stats.memory_stats);
const blockIO = calculateBlockIO(stats.blkio_stats);
const networkUsage = calculateNetworkUsage(stats.networks);
await updateStatsFile(appName, "cpu", cpuPercent);
await updateStatsFile(appName, "memory", {
used: memoryStats.used,
free: memoryStats.free,
usedPercentage: memoryStats.usedPercentage,
total: memoryStats.total,
});
await updateStatsFile(appName, "block", {
readMb: blockIO.readMb,
writeMb: blockIO.writeMb,
});
await updateStatsFile(appName, "network", {
inputMb: networkUsage.inputMb,
outputMb: networkUsage.outputMb,
});
if (appName === "dokploy") {
const disk = await osUtils.drive.info("/");
const diskUsage = disk.usedGb;
const diskTotal = disk.totalGb;
const diskUsedPercentage = disk.usedPercentage;
const diskFree = disk.freeGb;
await updateStatsFile(appName, "disk", {
diskTotal: +diskTotal,
diskUsedPercentage: +diskUsedPercentage,
diskUsage: +diskUsage,
diskFree: +diskFree,
});
}
};
export const getAdvancedStats = async (appName: string) => {
return {
cpu: await readStatsFile(appName, "cpu"),
memory: await readStatsFile(appName, "memory"),
disk: await readStatsFile(appName, "disk"),
network: await readStatsFile(appName, "network"),
block: await readStatsFile(appName, "block"),
};
};
export const readStatsFile = async (
appName: string,
statType: "cpu" | "memory" | "disk" | "network" | "block",
) => {
try {
const { MONITORING_PATH } = paths();
const filePath = `${MONITORING_PATH}/${appName}/${statType}.json`;
const data = await promises.readFile(filePath, "utf-8");
return JSON.parse(data);
} catch (error) {
return [];
}
};
export const updateStatsFile = async (
appName: string,
statType: "cpu" | "memory" | "disk" | "network" | "block",
value: number | string | unknown,
) => {
const { MONITORING_PATH } = paths();
const stats = await readStatsFile(appName, statType);
stats.push({ value, time: new Date() });
if (stats.length > 288) {
stats.shift();
}
const content = JSON.stringify(stats);
await promises.writeFile(
`${MONITORING_PATH}/${appName}/${statType}.json`,
content,
);
};
export const readLastValueStatsFile = async (
appName: string,
statType: "cpu" | "memory" | "disk" | "network" | "block",
) => {
try {
const { MONITORING_PATH } = paths();
const filePath = `${MONITORING_PATH}/${appName}/${statType}.json`;
const data = await promises.readFile(filePath, "utf-8");
const stats = JSON.parse(data);
return stats[stats.length - 1] || null;
} catch (error) {
return null;
}
};
export const getLastAdvancedStatsFile = async (appName: string) => {
return {
cpu: await readLastValueStatsFile(appName, "cpu"),
memory: await readLastValueStatsFile(appName, "memory"),
disk: await readLastValueStatsFile(appName, "disk"),
network: await readLastValueStatsFile(appName, "network"),
block: await readLastValueStatsFile(appName, "block"),
};
};
const calculateCpuUsagePercent = (
cpu_stats: Dockerode.ContainerStats["cpu_stats"],
precpu_stats: Dockerode.ContainerStats["precpu_stats"],
) => {
const cpuDelta =
cpu_stats.cpu_usage.total_usage - precpu_stats.cpu_usage.total_usage;
const systemDelta =
cpu_stats.system_cpu_usage - precpu_stats.system_cpu_usage;
const numberCpus =
cpu_stats.online_cpus ||
(cpu_stats.cpu_usage.percpu_usage
? cpu_stats.cpu_usage.percpu_usage.length
: 1);
if (systemDelta > 0 && cpuDelta > 0) {
return (cpuDelta / systemDelta) * numberCpus * 100.0;
}
return 0;
};
const calculateMemoryStats = (
memory_stats: Dockerode.ContainerStats["memory_stats"],
) => {
const usedMemory = memory_stats.usage - (memory_stats.stats.cache || 0);
const availableMemory = memory_stats.limit;
const memoryUsedPercentage = (usedMemory / availableMemory) * 100.0;
return {
used: usedMemory,
free: availableMemory - usedMemory,
usedPercentage: memoryUsedPercentage,
total: availableMemory,
};
};
const calculateBlockIO = (
blkio_stats: Dockerode.ContainerStats["blkio_stats"],
) => {
let readIO = 0;
let writeIO = 0;
if (blkio_stats?.io_service_bytes_recursive) {
for (const io of blkio_stats.io_service_bytes_recursive) {
if (io.op === "read") {
readIO += io.value;
} else if (io.op === "write") {
writeIO += io.value;
}
}
}
return {
readMb: readIO / (1024 * 1024),
writeMb: writeIO / (1024 * 1024),
};
};
const calculateNetworkUsage = (
networks: Dockerode.ContainerStats["networks"],
) => {
let totalRx = 0;
let totalTx = 0;
const stats = Object.keys(networks);
for (const interfaceName of stats) {
const net = networks[interfaceName];
totalRx += net?.rx_bytes || 0;
totalTx += net?.tx_bytes || 0;
}
return {
inputMb: totalRx / (1024 * 1024),
outputMb: totalTx / (1024 * 1024),
};
};

View File

@@ -1,11 +1,4 @@
import { type Job, Worker } from "bullmq";
// import {
// deployApplication,
// deployRemoteApplication,
// rebuildApplication,
// rebuildRemoteApplication,
// updateApplicationStatus,
// } from "../api/services/application";
import {
deployApplication,
deployRemoteApplication,

View File

@@ -2,7 +2,6 @@ import http from "node:http";
import { migration } from "@/server/db/migration";
import { config } from "dotenv";
import next from "next";
// import { IS_CLOUD } from "./constants";
import { deploymentWorker } from "./queues/deployments-queue";
import {
setupDirectories,
@@ -14,8 +13,8 @@ import {
createDefaultTraefikConfig,
initializeTraefik,
initCronJobs,
sendDokployRestartNotifications,
} from "@dokploy/builders";
import { sendDokployRestartNotifications } from "./utils/notifications/dokploy-restart";
import { setupDockerContainerLogsWebSocketServer } from "./wss/docker-container-logs";
import { setupDockerContainerTerminalWebSocketServer } from "./wss/docker-container-terminal";
import { setupDockerStatsMonitoringSocketServer } from "./wss/docker-stats";

View File

@@ -1,43 +0,0 @@
import { chmodSync, existsSync, mkdirSync } from "node:fs";
import { paths } from "../constants";
const createDirectoryIfNotExist = (dirPath: string) => {
if (!existsSync(dirPath)) {
mkdirSync(dirPath, { recursive: true });
console.log(`Directory created: ${dirPath}`);
}
};
export const setupDirectories = () => {
const {
APPLICATIONS_PATH,
BASE_PATH,
CERTIFICATES_PATH,
DYNAMIC_TRAEFIK_PATH,
LOGS_PATH,
MAIN_TRAEFIK_PATH,
MONITORING_PATH,
SSH_PATH,
} = paths();
const directories = [
BASE_PATH,
MAIN_TRAEFIK_PATH,
DYNAMIC_TRAEFIK_PATH,
LOGS_PATH,
APPLICATIONS_PATH,
SSH_PATH,
CERTIFICATES_PATH,
MONITORING_PATH,
];
for (const dir of directories) {
try {
createDirectoryIfNotExist(dir);
if (dir === SSH_PATH) {
chmodSync(SSH_PATH, "700");
}
} catch (error) {
console.log(error, " On path: ", dir);
}
}
};

View File

@@ -1,61 +0,0 @@
import type { CreateServiceOptions } from "dockerode";
import { docker } from "../constants";
import { pullImage } from "../utils/docker/utils";
export const initializePostgres = async () => {
const imageName = "postgres:16";
const containerName = "dokploy-postgres";
const settings: CreateServiceOptions = {
Name: containerName,
TaskTemplate: {
ContainerSpec: {
Image: imageName,
Env: [
"POSTGRES_USER=dokploy",
"POSTGRES_DB=dokploy",
"POSTGRES_PASSWORD=amukds4wi9001583845717ad2",
],
Mounts: [
{
Type: "volume",
Source: "dokploy-postgres-database",
Target: "/var/lib/postgresql/data",
},
],
},
Networks: [{ Target: "dokploy-network" }],
Placement: {
Constraints: ["node.role==manager"],
},
},
Mode: {
Replicated: {
Replicas: 1,
},
},
EndpointSpec: {
Ports: [
{
TargetPort: 5432,
PublishedPort: process.env.NODE_ENV === "development" ? 5432 : 0,
Protocol: "tcp",
PublishMode: "host",
},
],
},
};
try {
await pullImage(imageName);
const service = docker.getService(containerName);
const inspect = await service.inspect();
await service.update({
version: Number.parseInt(inspect.Version.Index),
...settings,
});
console.log("Postgres Started ✅");
} catch (error) {
await docker.createService(settings);
console.log("Postgres Not Found: Starting ✅");
}
};

View File

@@ -1,57 +0,0 @@
import type { CreateServiceOptions } from "dockerode";
import { docker } from "../constants";
import { pullImage } from "../utils/docker/utils";
export const initializeRedis = async () => {
const imageName = "redis:7";
const containerName = "dokploy-redis";
const settings: CreateServiceOptions = {
Name: containerName,
TaskTemplate: {
ContainerSpec: {
Image: imageName,
Mounts: [
{
Type: "volume",
Source: "redis-data-volume",
Target: "/data",
},
],
},
Networks: [{ Target: "dokploy-network" }],
Placement: {
Constraints: ["node.role==manager"],
},
},
Mode: {
Replicated: {
Replicas: 1,
},
},
EndpointSpec: {
Ports: [
{
TargetPort: 6379,
PublishedPort: process.env.NODE_ENV === "development" ? 6379 : 0,
Protocol: "tcp",
PublishMode: "host",
},
],
},
};
try {
await pullImage(imageName);
const service = docker.getService(containerName);
const inspect = await service.inspect();
await service.update({
version: Number.parseInt(inspect.Version.Index),
...settings,
});
console.log("Redis Started ✅");
} catch (error) {
await docker.createService(settings);
console.log("Redis Not Found: Starting ✅");
}
};

View File

@@ -1,91 +0,0 @@
import type { CreateServiceOptions } from "dockerode";
import { generateRandomPassword } from "../auth/random-password";
import { docker, paths } from "../constants";
import { pullImage } from "../utils/docker/utils";
import { execAsync } from "../utils/process/execAsync";
export const initializeRegistry = async (
username: string,
password: string,
) => {
const { REGISTRY_PATH } = paths();
const imageName = "registry:2.8.3";
const containerName = "dokploy-registry";
await generateRegistryPassword(username, password);
const randomPass = await generateRandomPassword();
const settings: CreateServiceOptions = {
Name: containerName,
TaskTemplate: {
ContainerSpec: {
Image: imageName,
Env: [
"REGISTRY_STORAGE_DELETE_ENABLED=true",
"REGISTRY_AUTH=htpasswd",
"REGISTRY_AUTH_HTPASSWD_REALM=Registry Realm",
"REGISTRY_AUTH_HTPASSWD_PATH=/auth/htpasswd",
`REGISTRY_HTTP_SECRET=${randomPass.hashedPassword}`,
],
Mounts: [
{
Type: "bind",
Source: `${REGISTRY_PATH}/htpasswd`,
Target: "/auth/htpasswd",
ReadOnly: true,
},
{
Type: "volume",
Source: "registry-data",
Target: "/var/lib/registry",
ReadOnly: false,
},
],
},
Networks: [{ Target: "dokploy-network" }],
Placement: {
Constraints: ["node.role==manager"],
},
},
Mode: {
Replicated: {
Replicas: 1,
},
},
EndpointSpec: {
Ports: [
{
TargetPort: 5000,
PublishedPort: 5000,
Protocol: "tcp",
PublishMode: "host",
},
],
},
};
try {
await pullImage(imageName);
const service = docker.getService(containerName);
const inspect = await service.inspect();
await service.update({
version: Number.parseInt(inspect.Version.Index),
...settings,
});
console.log("Registry Started ✅");
} catch (error) {
await docker.createService(settings);
console.log("Registry Not Found: Starting ✅");
}
};
const generateRegistryPassword = async (username: string, password: string) => {
try {
const { REGISTRY_PATH } = paths();
const command = `htpasswd -nbB ${username} "${password}" > ${REGISTRY_PATH}/htpasswd`;
const result = await execAsync(command);
console.log("Password generated ✅");
return result.stdout.trim();
} catch (error) {
console.error("Error generating password:", error);
return null;
}
};

View File

@@ -1,300 +0,0 @@
import { createWriteStream } from "node:fs";
import path from "node:path";
import { slugify } from "@/lib/slug";
import {
createServerDeployment,
updateDeploymentStatus,
} from "@/server/api/services/deployment";
import { findServerById } from "@/server/api/services/server";
import { paths } from "@/server/constants";
import {
getDefaultMiddlewares,
getDefaultServerTraefikConfig,
} from "@/server/setup/traefik-setup";
import { Client } from "ssh2";
import { recreateDirectory } from "../utils/filesystem/directory";
import { readSSHKey } from "../utils/filesystem/ssh";
export const serverSetup = async (serverId: string) => {
const server = await findServerById(serverId);
const { LOGS_PATH } = paths();
const slugifyName = slugify(`server ${server.name}`);
const fullPath = path.join(LOGS_PATH, slugifyName);
await recreateDirectory(fullPath);
const deployment = await createServerDeployment({
serverId: server.serverId,
title: "Setup Server",
description: "Setup Server",
});
const writeStream = createWriteStream(deployment.logPath, { flags: "a" });
try {
writeStream.write("\nInstalling Server Dependencies: ✅\n");
await installRequirements(serverId, deployment.logPath);
writeStream.close();
await updateDeploymentStatus(deployment.deploymentId, "done");
} catch (err) {
console.log(err);
await updateDeploymentStatus(deployment.deploymentId, "error");
writeStream.write(err);
writeStream.close();
}
};
const installRequirements = async (serverId: string, logPath: string) => {
const writeStream = createWriteStream(logPath, { flags: "a" });
const client = new Client();
const server = await findServerById(serverId);
if (!server.sshKeyId) {
writeStream.write("❌ No SSH Key found");
writeStream.close();
throw new Error("No SSH Key found");
}
const keys = await readSSHKey(server.sshKeyId);
if (!keys.privateKey) {
writeStream.write("❌ No SSH Key found");
writeStream.close();
throw new Error("No SSH Key found");
}
return new Promise<void>((resolve, reject) => {
client
.once("ready", () => {
const bashCommand = `
${validatePorts()}
command_exists() {
command -v "$@" > /dev/null 2>&1
}
${installRClone()}
${installDocker()}
${setupSwarm()}
${setupNetwork()}
${setupMainDirectory()}
${setupDirectories()}
${createTraefikConfig()}
${createDefaultMiddlewares()}
${createTraefikInstance()}
${installNixpacks()}
${installBuildpacks()}
`;
client.exec(bashCommand, (err, stream) => {
if (err) {
writeStream.write(err);
reject(err);
return;
}
stream
.on("close", () => {
writeStream.write("Connection closed ✅");
client.end();
resolve();
})
.on("data", (data: string) => {
writeStream.write(data.toString());
})
.stderr.on("data", (data) => {
writeStream.write(data.toString());
});
});
})
.on("error", (err) => {
client.end();
if (err.level === "client-authentication") {
writeStream.write(
`Authentication failed: Invalid SSH private key. ❌ Error: ${err.message} ${err.level}`,
);
reject(
new Error(
`Authentication failed: Invalid SSH private key. ❌ Error: ${err.message} ${err.level}`,
),
);
} else {
writeStream.write(
`SSH connection error: ${err.message} ${err.level}`,
);
reject(new Error(`SSH connection error: ${err.message}`));
}
})
.connect({
host: server.ipAddress,
port: server.port,
username: server.username,
privateKey: keys.privateKey,
timeout: 99999,
});
});
};
const setupDirectories = () => {
const { SSH_PATH } = paths(true);
const directories = Object.values(paths(true));
const createDirsCommand = directories
.map((dir) => `mkdir -p "${dir}"`)
.join(" && ");
const chmodCommand = `chmod 700 "${SSH_PATH}"`;
const command = `
${createDirsCommand}
${chmodCommand}
`;
return command;
};
const setupMainDirectory = () => `
# Check if the /etc/dokploy directory exists
if [ -d /etc/dokploy ]; then
echo "/etc/dokploy already exists ✅"
else
# Create the /etc/dokploy directory
mkdir -p /etc/dokploy
chmod 777 /etc/dokploy
echo "Directory /etc/dokploy created ✅"
fi
`;
export const setupSwarm = () => `
# Check if the node is already part of a Docker Swarm
if docker info | grep -q 'Swarm: active'; then
echo "Already part of a Docker Swarm ✅"
else
# Get IP address
get_ip() {
# Try to get IPv4
local ipv4=\$(curl -4s https://ifconfig.io 2>/dev/null)
if [ -n "\$ipv4" ]; then
echo "\$ipv4"
else
# Try to get IPv6
local ipv6=\$(curl -6s https://ifconfig.io 2>/dev/null)
if [ -n "\$ipv6" ]; then
echo "\$ipv6"
fi
fi
}
advertise_addr=\$(get_ip)
# Initialize Docker Swarm
docker swarm init --advertise-addr \$advertise_addr
echo "Swarm initialized ✅"
fi
`;
const setupNetwork = () => `
# Check if the dokploy-network already exists
if docker network ls | grep -q 'dokploy-network'; then
echo "Network dokploy-network already exists ✅"
else
# Create the dokploy-network if it doesn't exist
docker network create --driver overlay --attachable dokploy-network
echo "Network created ✅"
fi
`;
const installDocker = () => `
if command_exists docker; then
echo "Docker already installed ✅"
else
echo "Installing Docker ✅"
curl -sSL https://get.docker.com | sh -s -- --version 27.2.0
fi
`;
const validatePorts = () => `
# check if something is running on port 80
if ss -tulnp | grep ':80 ' >/dev/null; then
echo "Something is already running on port 80" >&2
fi
# check if something is running on port 443
if ss -tulnp | grep ':443 ' >/dev/null; then
echo "Something is already running on port 443" >&2
fi
`;
const createTraefikConfig = () => {
const config = getDefaultServerTraefikConfig();
const command = `
if [ -f "/etc/dokploy/traefik/dynamic/acme.json" ]; then
chmod 600 "/etc/dokploy/traefik/dynamic/acme.json"
fi
if [ -f "/etc/dokploy/traefik/traefik.yml" ]; then
echo "Traefik config already exists ✅"
else
echo "${config}" > /etc/dokploy/traefik/traefik.yml
fi
`;
return command;
};
export const createDefaultMiddlewares = () => {
const config = getDefaultMiddlewares();
const command = `
if [ -f "/etc/dokploy/traefik/dynamic/middlewares.yml" ]; then
echo "Middlewares config already exists ✅"
else
echo "${config}" > /etc/dokploy/traefik/dynamic/middlewares.yml
fi
`;
return command;
};
export const installRClone = () => `
curl https://rclone.org/install.sh | sudo bash
`;
export const createTraefikInstance = () => {
const command = `
# Check if dokpyloy-traefik exists
if docker service ls | grep -q 'dokploy-traefik'; then
echo "Traefik already exists ✅"
else
# Create the dokploy-traefik service
docker service create \
--name dokploy-traefik \
--replicas 1 \
--constraint 'node.role==manager' \
--network dokploy-network \
--mount type=bind,src=/etc/dokploy/traefik/traefik.yml,dst=/etc/traefik/traefik.yml \
--mount type=bind,src=/etc/dokploy/traefik/dynamic,dst=/etc/dokploy/traefik/dynamic \
--mount type=bind,src=/var/run/docker.sock,dst=/var/run/docker.sock \
--label traefik.enable=true \
--publish mode=host,target=443,published=443 \
--publish mode=host,target=80,published=80 \
traefik:v3.1.2
fi
`;
return command;
};
const installNixpacks = () => `
if command_exists nixpacks; then
echo "Nixpacks already installed ✅"
else
VERSION=1.28.1 bash -c "$(curl -fsSL https://nixpacks.com/install.sh)"
echo "Nixpacks version 1.28.1 installed ✅"
fi
`;
const installBuildpacks = () => `
if command_exists pack; then
echo "Buildpacks already installed ✅"
else
curl -sSL "https://github.com/buildpacks/pack/releases/download/v0.35.0/pack-v0.35.0-linux.tgz" | tar -C /usr/local/bin/ --no-same-owner -xzv pack
echo "Buildpacks version 0.35.0 installed ✅"
fi
`;

View File

@@ -1,47 +0,0 @@
import { docker } from "../constants";
export const initializeSwarm = async () => {
const swarmInitialized = await dockerSwarmInitialized();
if (swarmInitialized) {
console.log("Swarm is already initilized");
} else {
await docker.swarmInit({
AdvertiseAddr: "127.0.0.1",
ListenAddr: "0.0.0.0",
});
console.log("Swarm was initilized");
}
};
export const dockerSwarmInitialized = async () => {
try {
await docker.swarmInspect();
return true;
} catch (e) {
return false;
}
};
export const initializeNetwork = async () => {
const networkInitialized = await dockerNetworkInitialized();
if (networkInitialized) {
console.log("Network is already initilized");
} else {
docker.createNetwork({
Attachable: true,
Name: "dokploy-network",
Driver: "overlay",
});
console.log("Network was initilized");
}
};
export const dockerNetworkInitialized = async () => {
try {
await docker.getNetwork("dokploy-network").inspect();
return true;
} catch (e) {
return false;
}
};

View File

@@ -1,320 +0,0 @@
import { chmodSync, existsSync, mkdirSync, writeFileSync } from "node:fs";
import path from "node:path";
import type { ContainerTaskSpec, CreateServiceOptions } from "dockerode";
import { dump } from "js-yaml";
import { paths } from "../constants";
import { pullImage, pullRemoteImage } from "../utils/docker/utils";
import { getRemoteDocker } from "../utils/servers/remote-docker";
import type { FileConfig } from "../utils/traefik/file-types";
import type { MainTraefikConfig } from "../utils/traefik/types";
const TRAEFIK_SSL_PORT =
Number.parseInt(process.env.TRAEFIK_SSL_PORT ?? "", 10) || 443;
const TRAEFIK_PORT = Number.parseInt(process.env.TRAEFIK_PORT ?? "", 10) || 80;
interface TraefikOptions {
enableDashboard?: boolean;
env?: string[];
serverId?: string;
}
export const initializeTraefik = async ({
enableDashboard = false,
env,
serverId,
}: TraefikOptions = {}) => {
const { MAIN_TRAEFIK_PATH, DYNAMIC_TRAEFIK_PATH } = paths(!!serverId);
const imageName = "traefik:v3.1.2";
const containerName = "dokploy-traefik";
const settings: CreateServiceOptions = {
Name: containerName,
TaskTemplate: {
ContainerSpec: {
Image: imageName,
Env: env,
Mounts: [
{
Type: "bind",
Source: `${MAIN_TRAEFIK_PATH}/traefik.yml`,
Target: "/etc/traefik/traefik.yml",
},
{
Type: "bind",
Source: DYNAMIC_TRAEFIK_PATH,
Target: "/etc/dokploy/traefik/dynamic",
},
{
Type: "bind",
Source: "/var/run/docker.sock",
Target: "/var/run/docker.sock",
},
],
},
Networks: [{ Target: "dokploy-network" }],
Placement: {
Constraints: ["node.role==manager"],
},
},
Mode: {
Replicated: {
Replicas: 1,
},
},
Labels: {
"traefik.enable": "true",
},
EndpointSpec: {
Ports: [
{
TargetPort: 443,
PublishedPort: TRAEFIK_SSL_PORT,
PublishMode: "host",
},
{
TargetPort: 80,
PublishedPort: TRAEFIK_PORT,
PublishMode: "host",
},
...(enableDashboard
? [
{
TargetPort: 8080,
PublishedPort: 8080,
PublishMode: "host" as const,
},
]
: []),
],
},
};
const docker = await getRemoteDocker(serverId);
try {
if (serverId) {
await pullRemoteImage(imageName, serverId);
} else {
await pullImage(imageName);
}
const service = docker.getService(containerName);
const inspect = await service.inspect();
const existingEnv = inspect.Spec.TaskTemplate.ContainerSpec.Env || [];
const updatedEnv = !env ? existingEnv : env;
const updatedSettings = {
...settings,
TaskTemplate: {
...settings.TaskTemplate,
ContainerSpec: {
...(settings?.TaskTemplate as ContainerTaskSpec).ContainerSpec,
Env: updatedEnv,
},
},
};
await service.update({
version: Number.parseInt(inspect.Version.Index),
...updatedSettings,
});
console.log("Traefik Started ✅");
} catch (error) {
await docker.createService(settings);
console.log("Traefik Not Found: Starting ✅");
}
};
export const createDefaultServerTraefikConfig = () => {
const { DYNAMIC_TRAEFIK_PATH } = paths();
const configFilePath = path.join(DYNAMIC_TRAEFIK_PATH, "dokploy.yml");
if (existsSync(configFilePath)) {
console.log("Default traefik config already exists");
return;
}
const appName = "dokploy";
const serviceURLDefault = `http://${appName}:${process.env.PORT || 3000}`;
const config: FileConfig = {
http: {
routers: {
[`${appName}-router-app`]: {
rule: `Host(\`${appName}.docker.localhost\`) && PathPrefix(\`/\`)`,
service: `${appName}-service-app`,
entryPoints: ["web"],
},
},
services: {
[`${appName}-service-app`]: {
loadBalancer: {
servers: [{ url: serviceURLDefault }],
passHostHeader: true,
},
},
},
},
};
const yamlStr = dump(config);
mkdirSync(DYNAMIC_TRAEFIK_PATH, { recursive: true });
writeFileSync(
path.join(DYNAMIC_TRAEFIK_PATH, `${appName}.yml`),
yamlStr,
"utf8",
);
};
export const getDefaultTraefikConfig = () => {
const configObject: MainTraefikConfig = {
providers: {
...(process.env.NODE_ENV === "development"
? {
docker: {
defaultRule:
"Host(`{{ trimPrefix `/` .Name }}.docker.localhost`)",
},
}
: {
swarm: {
exposedByDefault: false,
watch: false,
},
docker: {
exposedByDefault: false,
},
}),
file: {
directory: "/etc/dokploy/traefik/dynamic",
watch: true,
},
},
entryPoints: {
web: {
address: `:${TRAEFIK_PORT}`,
},
websecure: {
address: `:${TRAEFIK_SSL_PORT}`,
...(process.env.NODE_ENV === "production" && {
http: {
tls: {
certResolver: "letsencrypt",
},
},
}),
},
},
api: {
insecure: true,
},
...(process.env.NODE_ENV === "production" && {
certificatesResolvers: {
letsencrypt: {
acme: {
email: "test@localhost.com",
storage: "/etc/dokploy/traefik/dynamic/acme.json",
httpChallenge: {
entryPoint: "web",
},
},
},
},
}),
};
const yamlStr = dump(configObject);
return yamlStr;
};
export const getDefaultServerTraefikConfig = () => {
const configObject: MainTraefikConfig = {
providers: {
swarm: {
exposedByDefault: false,
watch: false,
},
docker: {
exposedByDefault: false,
},
file: {
directory: "/etc/dokploy/traefik/dynamic",
watch: true,
},
},
entryPoints: {
web: {
address: `:${TRAEFIK_PORT}`,
},
websecure: {
address: `:${TRAEFIK_SSL_PORT}`,
http: {
tls: {
certResolver: "letsencrypt",
},
},
},
},
api: {
insecure: true,
},
certificatesResolvers: {
letsencrypt: {
acme: {
email: "test@localhost.com",
storage: "/etc/dokploy/traefik/dynamic/acme.json",
httpChallenge: {
entryPoint: "web",
},
},
},
},
};
const yamlStr = dump(configObject);
return yamlStr;
};
export const createDefaultTraefikConfig = () => {
const { MAIN_TRAEFIK_PATH, DYNAMIC_TRAEFIK_PATH } = paths();
const mainConfig = path.join(MAIN_TRAEFIK_PATH, "traefik.yml");
const acmeJsonPath = path.join(DYNAMIC_TRAEFIK_PATH, "acme.json");
if (existsSync(acmeJsonPath)) {
chmodSync(acmeJsonPath, "600");
}
if (existsSync(mainConfig)) {
console.log("Main config already exists");
return;
}
const yamlStr = getDefaultTraefikConfig();
mkdirSync(MAIN_TRAEFIK_PATH, { recursive: true });
writeFileSync(mainConfig, yamlStr, "utf8");
};
export const getDefaultMiddlewares = () => {
const defaultMiddlewares = {
http: {
middlewares: {
"redirect-to-https": {
redirectScheme: {
scheme: "https",
permanent: true,
},
},
},
},
};
const yamlStr = dump(defaultMiddlewares);
return yamlStr;
};
export const createDefaultMiddlewares = () => {
const { DYNAMIC_TRAEFIK_PATH } = paths();
const middlewaresPath = path.join(DYNAMIC_TRAEFIK_PATH, "middlewares.yml");
if (existsSync(middlewaresPath)) {
console.log("Default middlewares already exists");
return;
}
const yamlStr = getDefaultMiddlewares();
mkdirSync(DYNAMIC_TRAEFIK_PATH, { recursive: true });
writeFileSync(middlewaresPath, yamlStr, "utf8");
};

View File

@@ -1,114 +0,0 @@
import { findAdmin, updateAdmin } from "@/server/api/services/admin";
import { paths } from "@/server/constants";
import { type RotatingFileStream, createStream } from "rotating-file-stream";
import { execAsync } from "../process/execAsync";
class LogRotationManager {
private static instance: LogRotationManager;
private stream: RotatingFileStream | null = null;
private constructor() {
this.initialize().catch(console.error);
}
public static getInstance(): LogRotationManager {
if (!LogRotationManager.instance) {
LogRotationManager.instance = new LogRotationManager();
}
return LogRotationManager.instance;
}
private async initialize(): Promise<void> {
const isActive = await this.getStateFromDB();
if (isActive) {
await this.activateStream();
}
}
private async getStateFromDB(): Promise<boolean> {
const setting = await findAdmin();
return setting?.enableLogRotation ?? false;
}
private async setStateInDB(active: boolean): Promise<void> {
const admin = await findAdmin();
await updateAdmin(admin.authId, {
enableLogRotation: active,
});
}
private async activateStream(): Promise<void> {
const { DYNAMIC_TRAEFIK_PATH } = paths();
if (this.stream) {
await this.deactivateStream();
}
this.stream = createStream("access.log", {
size: "100M",
interval: "1d",
path: DYNAMIC_TRAEFIK_PATH,
rotate: 6,
compress: "gzip",
});
this.stream.on("rotation", this.handleRotation.bind(this));
}
private async deactivateStream(): Promise<void> {
return new Promise<void>((resolve) => {
if (this.stream) {
this.stream.end(() => {
this.stream = null;
resolve();
});
} else {
resolve();
}
});
}
public async activate(): Promise<boolean> {
const currentState = await this.getStateFromDB();
if (currentState) {
return true;
}
await this.setStateInDB(true);
await this.activateStream();
return true;
}
public async deactivate(): Promise<boolean> {
console.log("Deactivating log rotation...");
const currentState = await this.getStateFromDB();
if (!currentState) {
console.log("Log rotation is already inactive in DB");
return true;
}
await this.setStateInDB(false);
await this.deactivateStream();
console.log("Log rotation deactivated successfully");
return true;
}
private async handleRotation() {
try {
const status = await this.getStatus();
if (!status) {
await this.deactivateStream();
}
await execAsync(
"docker kill -s USR1 $(docker ps -q --filter name=dokploy-traefik)",
);
console.log("USR1 Signal send to Traefik");
} catch (error) {
console.error("Error to send USR1 Signal to Traefik:", error);
}
}
public async getStatus(): Promise<boolean> {
const dbState = await this.getStateFromDB();
return dbState;
}
}
export const logRotationManager = LogRotationManager.getInstance();

View File

@@ -1,48 +0,0 @@
export interface LogEntry {
ClientAddr: string;
ClientHost: string;
ClientPort: string;
ClientUsername: string;
DownstreamContentSize: number;
DownstreamStatus: number;
Duration: number;
OriginContentSize: number;
OriginDuration: number;
OriginStatus: number;
Overhead: number;
RequestAddr: string;
RequestContentSize: number;
RequestCount: number;
RequestHost: string;
RequestMethod: string;
RequestPath: string;
RequestPort: string;
RequestProtocol: string;
RequestScheme: string;
RetryAttempts: number;
RouterName: string;
ServiceAddr: string;
ServiceName: string;
ServiceURL: {
Scheme: string;
Opaque: string;
User: null;
Host: string;
Path: string;
RawPath: string;
ForceQuery: boolean;
RawQuery: string;
Fragment: string;
RawFragment: string;
};
StartLocal: string;
StartUTC: string;
downstream_Content_Type: string;
entryPointName: string;
level: string;
msg: string;
origin_Content_Type: string;
request_Content_Type: string;
request_User_Agent: string;
time: string;
}

View File

@@ -1,119 +0,0 @@
import _ from "lodash";
import type { LogEntry } from "./types";
interface HourlyData {
hour: string;
count: number;
}
export function processLogs(logString: string): HourlyData[] {
if (_.isEmpty(logString)) {
return [];
}
const hourlyData = _(logString)
.split("\n")
.compact()
.map((entry) => {
try {
const log: LogEntry = JSON.parse(entry);
if (log.ServiceName === "dokploy-service-app@file") {
return null;
}
const date = new Date(log.StartUTC);
return `${date.toISOString().slice(0, 13)}:00:00Z`;
} catch (error) {
console.error("Error parsing log entry:", error);
return null;
}
})
.compact()
.countBy()
.map((count, hour) => ({ hour, count }))
.value();
return _.sortBy(hourlyData, (entry) => new Date(entry.hour).getTime());
}
interface PageInfo {
pageIndex: number;
pageSize: number;
}
interface SortInfo {
id: string;
desc: boolean;
}
export function parseRawConfig(
rawConfig: string,
page?: PageInfo,
sort?: SortInfo,
search?: string,
status?: string[],
): { data: LogEntry[]; totalCount: number } {
try {
if (_.isEmpty(rawConfig)) {
return { data: [], totalCount: 0 };
}
let parsedLogs = _(rawConfig)
.split("\n")
.compact()
.map((line) => JSON.parse(line) as LogEntry)
.value();
parsedLogs = parsedLogs.filter(
(log) => log.ServiceName !== "dokploy-service-app@file",
);
if (search) {
parsedLogs = parsedLogs.filter((log) =>
log.RequestPath.toLowerCase().includes(search.toLowerCase()),
);
}
if (status && status.length > 0) {
parsedLogs = parsedLogs.filter((log) =>
status.some((range) => isStatusInRange(log.DownstreamStatus, range)),
);
}
const totalCount = parsedLogs.length;
if (sort) {
parsedLogs = _.orderBy(
parsedLogs,
[sort.id],
[sort.desc ? "desc" : "asc"],
);
} else {
parsedLogs = _.orderBy(parsedLogs, ["time"], ["desc"]);
}
if (page) {
const startIndex = page.pageIndex * page.pageSize;
parsedLogs = parsedLogs.slice(startIndex, startIndex + page.pageSize);
}
return { data: parsedLogs, totalCount };
} catch (error) {
console.error("Error parsing rawConfig:", error);
throw new Error("Failed to parse rawConfig");
}
}
const isStatusInRange = (status: number, range: string) => {
switch (range) {
case "info":
return status >= 100 && status <= 199;
case "success":
return status >= 200 && status <= 299;
case "redirect":
return status >= 300 && status <= 399;
case "client":
return status >= 400 && status <= 499;
case "server":
return status >= 500 && status <= 599;
default:
return false;
}
};

View File

@@ -1,56 +0,0 @@
import crypto from "node:crypto";
import { findComposeById } from "@/server/api/services/compose";
import { dump, load } from "js-yaml";
import { addSuffixToAllConfigs } from "./compose/configs";
import { addSuffixToAllNetworks } from "./compose/network";
import { addSuffixToAllSecrets } from "./compose/secrets";
import { addSuffixToAllServiceNames } from "./compose/service";
import { addSuffixToAllVolumes } from "./compose/volume";
import type { ComposeSpecification } from "./types";
export const generateRandomHash = (): string => {
return crypto.randomBytes(4).toString("hex");
};
export const randomizeComposeFile = async (
composeId: string,
suffix?: string,
) => {
const compose = await findComposeById(composeId);
const composeFile = compose.composeFile;
const composeData = load(composeFile) as ComposeSpecification;
const randomSuffix = suffix || generateRandomHash();
const newComposeFile = addSuffixToAllProperties(composeData, randomSuffix);
return dump(newComposeFile);
};
export const randomizeSpecificationFile = (
composeSpec: ComposeSpecification,
suffix?: string,
) => {
if (!suffix) {
return composeSpec;
}
const newComposeFile = addSuffixToAllProperties(composeSpec, suffix);
return newComposeFile;
};
export const addSuffixToAllProperties = (
composeData: ComposeSpecification,
suffix: string,
): ComposeSpecification => {
let updatedComposeData = { ...composeData };
updatedComposeData = addSuffixToAllServiceNames(updatedComposeData, suffix);
updatedComposeData = addSuffixToAllVolumes(updatedComposeData, suffix);
updatedComposeData = addSuffixToAllNetworks(updatedComposeData, suffix);
updatedComposeData = addSuffixToAllConfigs(updatedComposeData, suffix);
updatedComposeData = addSuffixToAllSecrets(updatedComposeData, suffix);
return updatedComposeData;
};

View File

@@ -1,73 +0,0 @@
import _ from "lodash";
import type {
ComposeSpecification,
DefinitionsConfig,
DefinitionsService,
} from "../types";
export const addSuffixToConfigsRoot = (
configs: { [key: string]: DefinitionsConfig },
suffix: string,
): { [key: string]: DefinitionsConfig } => {
const newConfigs: { [key: string]: DefinitionsConfig } = {};
_.forEach(configs, (config, configName) => {
const newConfigName = `${configName}-${suffix}`;
newConfigs[newConfigName] = _.cloneDeep(config);
});
return newConfigs;
};
export const addSuffixToConfigsInServices = (
services: { [key: string]: DefinitionsService },
suffix: string,
): { [key: string]: DefinitionsService } => {
const newServices: { [key: string]: DefinitionsService } = {};
_.forEach(services, (serviceConfig, serviceName) => {
const newServiceConfig = _.cloneDeep(serviceConfig);
// Reemplazar nombres de configs en configs
if (_.has(newServiceConfig, "configs")) {
newServiceConfig.configs = _.map(newServiceConfig.configs, (config) => {
if (_.isString(config)) {
return `${config}-${suffix}`;
}
if (_.isObject(config) && config.source) {
return {
...config,
source: `${config.source}-${suffix}`,
};
}
return config;
});
}
newServices[serviceName] = newServiceConfig;
});
return newServices;
};
export const addSuffixToAllConfigs = (
composeData: ComposeSpecification,
suffix: string,
): ComposeSpecification => {
const updatedComposeData = { ...composeData };
if (composeData?.configs) {
updatedComposeData.configs = addSuffixToConfigsRoot(
composeData.configs,
suffix,
);
}
if (composeData?.services) {
updatedComposeData.services = addSuffixToConfigsInServices(
composeData.services,
suffix,
);
}
return updatedComposeData;
};

View File

@@ -1,83 +0,0 @@
import _ from "lodash";
import type {
ComposeSpecification,
DefinitionsNetwork,
DefinitionsService,
} from "../types";
export const addSuffixToNetworksRoot = (
networks: { [key: string]: DefinitionsNetwork },
suffix: string,
): { [key: string]: DefinitionsNetwork } => {
return _.mapKeys(networks, (_value, key) => {
if (key === "dokploy-network") {
return "dokploy-network";
}
return `${key}-${suffix}`;
});
};
export const addSuffixToServiceNetworks = (
services: { [key: string]: DefinitionsService },
suffix: string,
): { [key: string]: DefinitionsService } => {
return _.mapValues(services, (service) => {
if (service.networks) {
// 1 Case the most common
if (Array.isArray(service.networks)) {
service.networks = service.networks.map((network: string) => {
if (network === "dokploy-network") {
return "dokploy-network";
}
return `${network}-${suffix}`;
});
} else {
// 2 Case
service.networks = _.mapKeys(service.networks, (_value, key) => {
if (key === "dokploy-network") {
return "dokploy-network";
}
return `${key}-${suffix}`;
});
// 3 Case
service.networks = _.mapValues(service.networks, (value) => {
if (value && typeof value === "object") {
return _.mapKeys(value, (_val, innerKey) => {
if (innerKey === "aliases") {
return "aliases";
}
return `${innerKey}-${suffix}`;
});
}
return value;
});
}
}
return service;
});
};
export const addSuffixToAllNetworks = (
composeData: ComposeSpecification,
suffix: string,
): ComposeSpecification => {
const updatedComposeData = { ...composeData };
if (updatedComposeData.networks) {
updatedComposeData.networks = addSuffixToNetworksRoot(
updatedComposeData.networks,
suffix,
);
}
if (updatedComposeData.services) {
updatedComposeData.services = addSuffixToServiceNetworks(
updatedComposeData.services,
suffix,
);
}
return updatedComposeData;
};

View File

@@ -1,68 +0,0 @@
import _ from "lodash";
import type { ComposeSpecification, DefinitionsService } from "../types";
export const addSuffixToSecretsRoot = (
secrets: ComposeSpecification["secrets"],
suffix: string,
): ComposeSpecification["secrets"] => {
const newSecrets: ComposeSpecification["secrets"] = {};
_.forEach(secrets, (secretConfig, secretName) => {
const newSecretName = `${secretName}-${suffix}`;
newSecrets[newSecretName] = _.cloneDeep(secretConfig);
});
return newSecrets;
};
export const addSuffixToSecretsInServices = (
services: { [key: string]: DefinitionsService },
suffix: string,
): { [key: string]: DefinitionsService } => {
const newServices: { [key: string]: DefinitionsService } = {};
_.forEach(services, (serviceConfig, serviceName) => {
const newServiceConfig = _.cloneDeep(serviceConfig);
// Replace secret names in secrets
if (_.has(newServiceConfig, "secrets")) {
newServiceConfig.secrets = _.map(newServiceConfig.secrets, (secret) => {
if (_.isString(secret)) {
return `${secret}-${suffix}`;
}
if (_.isObject(secret) && secret.source) {
return {
...secret,
source: `${secret.source}-${suffix}`,
};
}
return secret;
});
}
newServices[serviceName] = newServiceConfig;
});
return newServices;
};
export const addSuffixToAllSecrets = (
composeData: ComposeSpecification,
suffix: string,
): ComposeSpecification => {
const updatedComposeData = { ...composeData };
if (composeData?.secrets) {
updatedComposeData.secrets = addSuffixToSecretsRoot(
composeData.secrets,
suffix,
);
}
if (composeData?.services) {
updatedComposeData.services = addSuffixToSecretsInServices(
composeData.services,
suffix,
);
}
return updatedComposeData;
};

View File

@@ -1,90 +0,0 @@
// En la sección depends_on de otros servicios: Para definir dependencias entre servicios.
// En la sección networks de otros servicios: Aunque esto no es común, es posible referenciar servicios en redes personalizadas.
// En la sección volumes_from de otros servicios: Para reutilizar volúmenes definidos por otro servicio.
// En la sección links de otros servicios: Para crear enlaces entre servicios.
// En la sección extends de otros servicios: Para extender la configuración de otro servicio.
import _ from "lodash";
import type { ComposeSpecification, DefinitionsService } from "../types";
type DependsOnObject = NonNullable<
Exclude<DefinitionsService["depends_on"], string[]> extends infer T
? { [K in keyof T]: T[K] }
: never
>;
export const addSuffixToServiceNames = (
services: { [key: string]: DefinitionsService },
suffix: string,
): { [key: string]: DefinitionsService } => {
const newServices: { [key: string]: DefinitionsService } = {};
for (const [serviceName, serviceConfig] of Object.entries(services)) {
const newServiceName = `${serviceName}-${suffix}`;
const newServiceConfig = _.cloneDeep(serviceConfig);
// Reemplazar nombres de servicios en depends_on
if (newServiceConfig.depends_on) {
if (Array.isArray(newServiceConfig.depends_on)) {
newServiceConfig.depends_on = newServiceConfig.depends_on.map(
(dep) => `${dep}-${suffix}`,
);
} else {
const newDependsOn: DependsOnObject = {};
for (const [depName, depConfig] of Object.entries(
newServiceConfig.depends_on,
)) {
newDependsOn[`${depName}-${suffix}`] = depConfig;
}
newServiceConfig.depends_on = newDependsOn;
}
}
// Reemplazar nombre en container_name
if (newServiceConfig.container_name) {
newServiceConfig.container_name = `${newServiceConfig.container_name}-${suffix}`;
}
// Reemplazar nombres de servicios en links
if (newServiceConfig.links) {
newServiceConfig.links = newServiceConfig.links.map(
(link) => `${link}-${suffix}`,
);
}
// Reemplazar nombres de servicios en extends
if (newServiceConfig.extends) {
if (typeof newServiceConfig.extends === "string") {
newServiceConfig.extends = `${newServiceConfig.extends}-${suffix}`;
} else {
newServiceConfig.extends.service = `${newServiceConfig.extends.service}-${suffix}`;
}
}
// Reemplazar nombres de servicios en volumes_from
if (newServiceConfig.volumes_from) {
newServiceConfig.volumes_from = newServiceConfig.volumes_from.map(
(vol) => `${vol}-${suffix}`,
);
}
newServices[newServiceName] = newServiceConfig;
}
return newServices;
};
export const addSuffixToAllServiceNames = (
composeData: ComposeSpecification,
suffix: string,
): ComposeSpecification => {
const updatedComposeData = { ...composeData };
if (updatedComposeData.services) {
updatedComposeData.services = addSuffixToServiceNames(
updatedComposeData.services,
suffix,
);
}
return updatedComposeData;
};

View File

@@ -1,78 +0,0 @@
import _ from "lodash";
import type {
ComposeSpecification,
DefinitionsService,
DefinitionsVolume,
} from "../types";
// Función para agregar prefijo a volúmenes
export const addSuffixToVolumesRoot = (
volumes: { [key: string]: DefinitionsVolume },
suffix: string,
): { [key: string]: DefinitionsVolume } => {
return _.mapKeys(volumes, (_value, key) => `${key}-${suffix}`);
};
export const addSuffixToVolumesInServices = (
services: { [key: string]: DefinitionsService },
suffix: string,
): { [key: string]: DefinitionsService } => {
const newServices: { [key: string]: DefinitionsService } = {};
_.forEach(services, (serviceConfig, serviceName) => {
const newServiceConfig = _.cloneDeep(serviceConfig);
// Reemplazar nombres de volúmenes en volumes
if (_.has(newServiceConfig, "volumes")) {
newServiceConfig.volumes = _.map(newServiceConfig.volumes, (volume) => {
if (_.isString(volume)) {
const [volumeName, path] = volume.split(":");
// skip bind mounts and variables (e.g. $PWD)
if (
volumeName?.startsWith(".") ||
volumeName?.startsWith("/") ||
volumeName?.startsWith("$")
) {
return volume;
}
return `${volumeName}-${suffix}:${path}`;
}
if (_.isObject(volume) && volume.type === "volume" && volume.source) {
return {
...volume,
source: `${volume.source}-${suffix}`,
};
}
return volume;
});
}
newServices[serviceName] = newServiceConfig;
});
return newServices;
};
export const addSuffixToAllVolumes = (
composeData: ComposeSpecification,
suffix: string,
): ComposeSpecification => {
const updatedComposeData = { ...composeData };
if (updatedComposeData.volumes) {
updatedComposeData.volumes = addSuffixToVolumesRoot(
updatedComposeData.volumes,
suffix,
);
}
if (updatedComposeData.services) {
updatedComposeData.services = addSuffixToVolumesInServices(
updatedComposeData.services,
suffix,
);
}
return updatedComposeData;
};

View File

@@ -1,327 +0,0 @@
import fs, { existsSync, readFileSync } from "node:fs";
import { writeFile } from "node:fs/promises";
import { join } from "node:path";
import type { Compose } from "@/server/api/services/compose";
import type { Domain } from "@/server/api/services/domain";
import { paths } from "@/server/constants";
import { dump, load } from "js-yaml";
import { execAsyncRemote } from "../process/execAsync";
import {
cloneRawBitbucketRepository,
cloneRawBitbucketRepositoryRemote,
} from "../providers/bitbucket";
import {
cloneGitRawRepository,
cloneRawGitRepositoryRemote,
} from "../providers/git";
import {
cloneRawGithubRepository,
cloneRawGithubRepositoryRemote,
} from "../providers/github";
import {
cloneRawGitlabRepository,
cloneRawGitlabRepositoryRemote,
} from "../providers/gitlab";
import {
createComposeFileRaw,
createComposeFileRawRemote,
} from "../providers/raw";
import { randomizeSpecificationFile } from "./compose";
import type {
ComposeSpecification,
DefinitionsService,
PropertiesNetworks,
} from "./types";
import { encodeBase64 } from "./utils";
export const cloneCompose = async (compose: Compose) => {
if (compose.sourceType === "github") {
await cloneRawGithubRepository(compose);
} else if (compose.sourceType === "gitlab") {
await cloneRawGitlabRepository(compose);
} else if (compose.sourceType === "bitbucket") {
await cloneRawBitbucketRepository(compose);
} else if (compose.sourceType === "git") {
await cloneGitRawRepository(compose);
} else if (compose.sourceType === "raw") {
await createComposeFileRaw(compose);
}
};
export const cloneComposeRemote = async (compose: Compose) => {
if (compose.sourceType === "github") {
await cloneRawGithubRepositoryRemote(compose);
} else if (compose.sourceType === "gitlab") {
await cloneRawGitlabRepositoryRemote(compose);
} else if (compose.sourceType === "bitbucket") {
await cloneRawBitbucketRepositoryRemote(compose);
} else if (compose.sourceType === "git") {
await cloneRawGitRepositoryRemote(compose);
} else if (compose.sourceType === "raw") {
await createComposeFileRawRemote(compose);
}
};
export const getComposePath = (compose: Compose) => {
const { COMPOSE_PATH } = paths(!!compose.serverId);
const { appName, sourceType, composePath } = compose;
let path = "";
if (sourceType === "raw") {
path = "docker-compose.yml";
} else {
path = composePath;
}
return join(COMPOSE_PATH, appName, "code", path);
};
export const loadDockerCompose = async (
compose: Compose,
): Promise<ComposeSpecification | null> => {
const path = getComposePath(compose);
if (existsSync(path)) {
const yamlStr = readFileSync(path, "utf8");
const parsedConfig = load(yamlStr) as ComposeSpecification;
return parsedConfig;
}
return null;
};
export const loadDockerComposeRemote = async (
compose: Compose,
): Promise<ComposeSpecification | null> => {
const path = getComposePath(compose);
try {
if (!compose.serverId) {
return null;
}
const { stdout, stderr } = await execAsyncRemote(
compose.serverId,
`cat ${path}`,
);
if (stderr) {
return null;
}
if (!stdout) return null;
const parsedConfig = load(stdout) as ComposeSpecification;
return parsedConfig;
} catch (err) {
return null;
}
};
export const readComposeFile = async (compose: Compose) => {
const path = getComposePath(compose);
if (existsSync(path)) {
const yamlStr = readFileSync(path, "utf8");
return yamlStr;
}
return null;
};
export const writeDomainsToCompose = async (
compose: Compose,
domains: Domain[],
) => {
if (!domains.length) {
return;
}
const composeConverted = await addDomainToCompose(compose, domains);
const path = getComposePath(compose);
const composeString = dump(composeConverted, { lineWidth: 1000 });
try {
await writeFile(path, composeString, "utf8");
} catch (error) {
throw error;
}
};
export const writeDomainsToComposeRemote = async (
compose: Compose,
domains: Domain[],
logPath: string,
) => {
if (!domains.length) {
return "";
}
try {
const composeConverted = await addDomainToCompose(compose, domains);
const path = getComposePath(compose);
if (!composeConverted) {
return `
echo "❌ Error: Compose file not found" >> ${logPath};
exit 1;
`;
}
if (compose.serverId) {
const composeString = dump(composeConverted, { lineWidth: 1000 });
const encodedContent = encodeBase64(composeString);
return `echo "${encodedContent}" | base64 -d > "${path}";`;
}
} catch (error) {
// @ts-ignore
return `echo "❌ Has occured an error: ${error?.message || error}" >> ${logPath};
exit 1;
`;
}
};
// (node:59875) MaxListenersExceededWarning: Possible EventEmitter memory leak detected. 11 SIGTERM listeners added to [process]. Use emitter.setMaxListeners() to increase limit
export const addDomainToCompose = async (
compose: Compose,
domains: Domain[],
) => {
const { appName } = compose;
let result: ComposeSpecification | null;
if (compose.serverId) {
result = await loadDockerComposeRemote(compose); // aca hay que ir al servidor e ir a traer el compose file al servidor
} else {
result = await loadDockerCompose(compose);
}
if (!result || domains.length === 0) {
return null;
}
if (compose.randomize) {
const randomized = randomizeSpecificationFile(result, compose.suffix);
result = randomized;
}
for (const domain of domains) {
const { serviceName, https } = domain;
if (!serviceName) {
throw new Error("Service name not found");
}
if (!result?.services?.[serviceName]) {
throw new Error(`The service ${serviceName} not found in the compose`);
}
if (!result.services[serviceName].labels) {
result.services[serviceName].labels = [];
}
const httpLabels = await createDomainLabels(appName, domain, "web");
if (https) {
const httpsLabels = await createDomainLabels(
appName,
domain,
"websecure",
);
httpLabels.push(...httpsLabels);
}
const labels = result.services[serviceName].labels;
if (Array.isArray(labels)) {
if (!labels.includes("traefik.enable=true")) {
labels.push("traefik.enable=true");
}
labels.push(...httpLabels);
}
// Add the dokploy-network to the service
result.services[serviceName].networks = addDokployNetworkToService(
result.services[serviceName].networks,
);
}
// Add dokploy-network to the root of the compose file
result.networks = addDokployNetworkToRoot(result.networks);
return result;
};
export const writeComposeFile = async (
compose: Compose,
composeSpec: ComposeSpecification,
) => {
const path = getComposePath(compose);
try {
const composeFile = dump(composeSpec, {
lineWidth: 1000,
});
fs.writeFileSync(path, composeFile, "utf8");
} catch (e) {
console.error("Error saving the YAML config file:", e);
}
};
export const createDomainLabels = async (
appName: string,
domain: Domain,
entrypoint: "web" | "websecure",
) => {
const { host, port, https, uniqueConfigKey, certificateType } = domain;
const routerName = `${appName}-${uniqueConfigKey}-${entrypoint}`;
const labels = [
`traefik.http.routers.${routerName}.rule=Host(\`${host}\`)`,
`traefik.http.routers.${routerName}.entrypoints=${entrypoint}`,
`traefik.http.services.${routerName}.loadbalancer.server.port=${port}`,
`traefik.http.routers.${routerName}.service=${routerName}`,
];
if (entrypoint === "web" && https) {
labels.push(
`traefik.http.routers.${routerName}.middlewares=redirect-to-https@file`,
);
}
if (entrypoint === "websecure") {
if (certificateType === "letsencrypt") {
labels.push(
`traefik.http.routers.${routerName}.tls.certresolver=letsencrypt`,
);
}
}
return labels;
};
export const addDokployNetworkToService = (
networkService: DefinitionsService["networks"],
) => {
let networks = networkService;
const network = "dokploy-network";
if (!networks) {
networks = [];
}
if (Array.isArray(networks)) {
if (!networks.includes(network)) {
networks.push(network);
}
} else if (networks && typeof networks === "object") {
if (!(network in networks)) {
networks[network] = {};
}
}
return networks;
};
export const addDokployNetworkToRoot = (
networkRoot: PropertiesNetworks | undefined,
) => {
let networks = networkRoot;
const network = "dokploy-network";
if (!networks) {
networks = {};
}
if (networks[network] || !networks[network]) {
networks[network] = {
external: true,
};
}
return networks;
};

View File

@@ -1,879 +0,0 @@
export type DefinitionsInclude =
| string
| {
path?: StringOrList;
env_file?: StringOrList;
project_directory?: string;
};
export type StringOrList = string | ListOfStrings;
export type ListOfStrings = string[];
export type DefinitionsDevelopment = {
watch?: {
ignore?: string[];
path: string;
action: "rebuild" | "sync" | "sync+restart";
target?: string;
[k: string]: unknown;
}[];
[k: string]: unknown;
} & Development;
export type Development = {
watch?: {
ignore?: string[];
path: string;
action: "rebuild" | "sync" | "sync+restart";
target?: string;
[k: string]: unknown;
}[];
[k: string]: unknown;
} | null;
export type DefinitionsDeployment = {
mode?: string;
endpoint_mode?: string;
replicas?: number;
labels?: ListOrDict;
rollback_config?: {
parallelism?: number;
delay?: string;
failure_action?: string;
monitor?: string;
max_failure_ratio?: number;
order?: "start-first" | "stop-first";
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
update_config?: {
parallelism?: number;
delay?: string;
failure_action?: string;
monitor?: string;
max_failure_ratio?: number;
order?: "start-first" | "stop-first";
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
resources?: {
limits?: {
cpus?: number | string;
memory?: string;
pids?: number;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
reservations?: {
cpus?: number | string;
memory?: string;
generic_resources?: DefinitionsGenericResources;
devices?: DefinitionsDevices;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
restart_policy?: {
condition?: string;
delay?: string;
max_attempts?: number;
window?: string;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
placement?: {
constraints?: string[];
preferences?: {
spread?: string;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
}[];
max_replicas_per_node?: number;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
} & Deployment;
export type ListOrDict =
| {
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` ".+".
*/
[k: string]: string | number | boolean | null;
}
| string[];
export type DefinitionsGenericResources = {
discrete_resource_spec?: {
kind?: string;
value?: number;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
}[];
export type DefinitionsDevices = {
capabilities?: ListOfStrings;
count?: string | number;
device_ids?: ListOfStrings;
driver?: string;
options?: ListOrDict;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
}[];
export type Deployment = {
mode?: string;
endpoint_mode?: string;
replicas?: number;
labels?: ListOrDict;
rollback_config?: {
parallelism?: number;
delay?: string;
failure_action?: string;
monitor?: string;
max_failure_ratio?: number;
order?: "start-first" | "stop-first";
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
update_config?: {
parallelism?: number;
delay?: string;
failure_action?: string;
monitor?: string;
max_failure_ratio?: number;
order?: "start-first" | "stop-first";
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
resources?: {
limits?: {
cpus?: number | string;
memory?: string;
pids?: number;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
reservations?: {
cpus?: number | string;
memory?: string;
generic_resources?: DefinitionsGenericResources;
devices?: DefinitionsDevices;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
restart_policy?: {
condition?: string;
delay?: string;
max_attempts?: number;
window?: string;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
placement?: {
constraints?: string[];
preferences?: {
spread?: string;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
}[];
max_replicas_per_node?: number;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
} | null;
export type ServiceConfigOrSecret = (
| string
| {
source?: string;
target?: string;
uid?: string;
gid?: string;
mode?: number;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
}
)[];
export type Command = null | string | string[];
export type EnvFile =
| string
| (
| string
| {
path: string;
required?: boolean;
}
)[];
/**
* This interface was referenced by `PropertiesNetworks`'s JSON-Schema definition
* via the `patternProperty` "^[a-zA-Z0-9._-]+$".
*/
export type DefinitionsNetwork = {
name?: string;
driver?: string;
driver_opts?: {
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^.+$".
*/
[k: string]: string | number;
};
ipam?: {
driver?: string;
config?: {
subnet?: string;
ip_range?: string;
gateway?: string;
aux_addresses?: {
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^.+$".
*/
[k: string]: string;
};
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
}[];
options?: {
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^.+$".
*/
[k: string]: string;
};
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
external?:
| boolean
| {
name?: string;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
internal?: boolean;
enable_ipv6?: boolean;
attachable?: boolean;
labels?: ListOrDict;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
} & Network;
export type Network = {
name?: string;
driver?: string;
driver_opts?: {
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^.+$".
*/
[k: string]: string | number;
};
ipam?: {
driver?: string;
config?: {
subnet?: string;
ip_range?: string;
gateway?: string;
aux_addresses?: {
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^.+$".
*/
[k: string]: string;
};
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
}[];
options?: {
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^.+$".
*/
[k: string]: string;
};
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
external?:
| boolean
| {
name?: string;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
internal?: boolean;
enable_ipv6?: boolean;
attachable?: boolean;
labels?: ListOrDict;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
} | null;
/**
* This interface was referenced by `PropertiesVolumes`'s JSON-Schema definition
* via the `patternProperty` "^[a-zA-Z0-9._-]+$".
*/
export type DefinitionsVolume = {
name?: string;
driver?: string;
driver_opts?: {
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^.+$".
*/
[k: string]: string | number;
};
external?:
| boolean
| {
name?: string;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
labels?: ListOrDict;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
} & Volume;
export type Volume = {
name?: string;
driver?: string;
driver_opts?: {
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^.+$".
*/
[k: string]: string | number;
};
external?:
| boolean
| {
name?: string;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
labels?: ListOrDict;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
} | null;
/**
* The Compose file is a YAML file defining a multi-containers based application.
*/
export interface ComposeSpecification {
/**
* declared for backward compatibility, ignored.
*/
version?: string;
/**
* define the Compose project name, until user defines one explicitly.
*/
name?: string;
/**
* compose sub-projects to be included.
*/
include?: DefinitionsInclude[];
services?: PropertiesServices;
networks?: PropertiesNetworks;
volumes?: PropertiesVolumes;
secrets?: PropertiesSecrets;
configs?: PropertiesConfigs;
/**
* This interface was referenced by `ComposeSpecification`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
}
export interface PropertiesServices {
[k: string]: DefinitionsService;
}
/**
* This interface was referenced by `PropertiesServices`'s JSON-Schema definition
* via the `patternProperty` "^[a-zA-Z0-9._-]+$".
*/
export interface DefinitionsService {
develop?: DefinitionsDevelopment;
deploy?: DefinitionsDeployment;
annotations?: ListOrDict;
attach?: boolean;
build?:
| string
| {
context?: string;
dockerfile?: string;
dockerfile_inline?: string;
entitlements?: string[];
args?: ListOrDict;
ssh?: ListOrDict;
labels?: ListOrDict;
cache_from?: string[];
cache_to?: string[];
no_cache?: boolean;
additional_contexts?: ListOrDict;
network?: string;
pull?: boolean;
target?: string;
shm_size?: number | string;
extra_hosts?: ListOrDict;
isolation?: string;
privileged?: boolean;
secrets?: ServiceConfigOrSecret;
tags?: string[];
ulimits?: Ulimits;
platforms?: string[];
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
blkio_config?: {
device_read_bps?: BlkioLimit[];
device_read_iops?: BlkioLimit[];
device_write_bps?: BlkioLimit[];
device_write_iops?: BlkioLimit[];
weight?: number;
weight_device?: BlkioWeight[];
};
cap_add?: string[];
cap_drop?: string[];
cgroup?: "host" | "private";
cgroup_parent?: string;
command?: Command;
configs?: ServiceConfigOrSecret;
container_name?: string;
cpu_count?: number;
cpu_percent?: number;
cpu_shares?: number | string;
cpu_quota?: number | string;
cpu_period?: number | string;
cpu_rt_period?: number | string;
cpu_rt_runtime?: number | string;
cpus?: number | string;
cpuset?: string;
credential_spec?: {
config?: string;
file?: string;
registry?: string;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
depends_on?:
| ListOfStrings
| {
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^[a-zA-Z0-9._-]+$".
*/
[k: string]: {
restart?: boolean;
required?: boolean;
condition:
| "service_started"
| "service_healthy"
| "service_completed_successfully";
};
};
device_cgroup_rules?: ListOfStrings;
devices?: string[];
dns?: StringOrList;
dns_opt?: string[];
dns_search?: StringOrList;
domainname?: string;
entrypoint?: Command;
env_file?: EnvFile;
environment?: ListOrDict;
expose?: (string | number)[];
extends?:
| string
| {
service: string;
file?: string;
};
external_links?: string[];
extra_hosts?: ListOrDict;
group_add?: (string | number)[];
healthcheck?: DefinitionsHealthcheck;
hostname?: string;
image?: string;
init?: boolean;
ipc?: string;
isolation?: string;
labels?: ListOrDict;
links?: string[];
logging?: {
driver?: string;
options?: {
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^.+$".
*/
[k: string]: string | number | null;
};
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
mac_address?: string;
mem_limit?: number | string;
mem_reservation?: string | number;
mem_swappiness?: number;
memswap_limit?: number | string;
network_mode?: string;
networks?:
| ListOfStrings
| {
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^[a-zA-Z0-9._-]+$".
*/
[k: string]: {
aliases?: ListOfStrings;
ipv4_address?: string;
ipv6_address?: string;
link_local_ips?: ListOfStrings;
mac_address?: string;
driver_opts?: {
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^.+$".
*/
[k: string]: string | number;
};
priority?: number;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
} | null;
};
oom_kill_disable?: boolean;
oom_score_adj?: number;
pid?: string | null;
pids_limit?: number | string;
platform?: string;
ports?: (
| number
| string
| {
name?: string;
mode?: string;
host_ip?: string;
target?: number;
published?: string | number;
protocol?: string;
app_protocol?: string;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
}
)[];
privileged?: boolean;
profiles?: ListOfStrings;
pull_policy?: "always" | "never" | "if_not_present" | "build" | "missing";
read_only?: boolean;
restart?: string;
runtime?: string;
scale?: number;
security_opt?: string[];
shm_size?: number | string;
secrets?: ServiceConfigOrSecret;
sysctls?: ListOrDict;
stdin_open?: boolean;
stop_grace_period?: string;
stop_signal?: string;
storage_opt?: {
[k: string]: unknown;
};
tmpfs?: StringOrList;
tty?: boolean;
ulimits?: Ulimits;
user?: string;
uts?: string;
userns_mode?: string;
volumes?: (
| string
| {
type: string;
source?: string;
target?: string;
read_only?: boolean;
consistency?: string;
bind?: {
propagation?: string;
create_host_path?: boolean;
selinux?: "z" | "Z";
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
volume?: {
nocopy?: boolean;
subpath?: string;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
tmpfs?: {
size?: number | string;
mode?: number;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
}
)[];
volumes_from?: string[];
working_dir?: string;
/**
* This interface was referenced by `DefinitionsService`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
}
export interface Ulimits {
/**
* This interface was referenced by `Ulimits`'s JSON-Schema definition
* via the `patternProperty` "^[a-z]+$".
*/
[k: string]:
| number
| {
hard: number;
soft: number;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
}
export interface BlkioLimit {
path?: string;
rate?: number | string;
}
export interface BlkioWeight {
path?: string;
weight?: number;
}
export interface DefinitionsHealthcheck {
disable?: boolean;
interval?: string;
retries?: number;
test?: string | string[];
timeout?: string;
start_period?: string;
start_interval?: string;
/**
* This interface was referenced by `DefinitionsHealthcheck`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
}
export interface PropertiesNetworks {
[k: string]: DefinitionsNetwork;
}
export interface PropertiesVolumes {
[k: string]: DefinitionsVolume;
}
export interface PropertiesSecrets {
[k: string]: DefinitionsSecret;
}
/**
* This interface was referenced by `PropertiesSecrets`'s JSON-Schema definition
* via the `patternProperty` "^[a-zA-Z0-9._-]+$".
*/
export interface DefinitionsSecret {
name?: string;
environment?: string;
file?: string;
external?:
| boolean
| {
name?: string;
[k: string]: unknown;
};
labels?: ListOrDict;
driver?: string;
driver_opts?: {
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^.+$".
*/
[k: string]: string | number;
};
template_driver?: string;
/**
* This interface was referenced by `DefinitionsSecret`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
}
export interface PropertiesConfigs {
[k: string]: DefinitionsConfig;
}
/**
* This interface was referenced by `PropertiesConfigs`'s JSON-Schema definition
* via the `patternProperty` "^[a-zA-Z0-9._-]+$".
*/
export interface DefinitionsConfig {
name?: string;
content?: string;
environment?: string;
file?: string;
external?:
| boolean
| {
name?: string;
[k: string]: unknown;
};
labels?: ListOrDict;
template_driver?: string;
/**
* This interface was referenced by `DefinitionsConfig`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
}

View File

@@ -1,525 +0,0 @@
import fs from "node:fs";
import path from "node:path";
import type { Readable } from "node:stream";
import { docker, paths } from "@/server/constants";
import type { ContainerInfo, ResourceRequirements } from "dockerode";
import { parse } from "dotenv";
import type { ApplicationNested } from "../builders";
import type { MariadbNested } from "../databases/mariadb";
import type { MongoNested } from "../databases/mongo";
import type { MysqlNested } from "../databases/mysql";
import type { PostgresNested } from "../databases/postgres";
import type { RedisNested } from "../databases/redis";
import { execAsync, execAsyncRemote } from "../process/execAsync";
import { getRemoteDocker } from "../servers/remote-docker";
interface RegistryAuth {
username: string;
password: string;
serveraddress: string;
}
export const pullImage = async (
dockerImage: string,
onData?: (data: any) => void,
authConfig?: Partial<RegistryAuth>,
): Promise<void> => {
try {
if (!dockerImage) {
throw new Error("Docker image not found");
}
await new Promise((resolve, reject) => {
docker.pull(dockerImage, { authconfig: authConfig }, (err, stream) => {
if (err) {
reject(err);
return;
}
docker.modem.followProgress(
stream as Readable,
(err: Error | null, res) => {
if (!err) {
resolve(res);
}
if (err) {
reject(err);
}
},
(event) => {
onData?.(event);
},
);
});
});
} catch (error) {
throw error;
}
};
export const pullRemoteImage = async (
dockerImage: string,
serverId: string,
onData?: (data: any) => void,
authConfig?: Partial<RegistryAuth>,
): Promise<void> => {
try {
if (!dockerImage) {
throw new Error("Docker image not found");
}
const remoteDocker = await getRemoteDocker(serverId);
await new Promise((resolve, reject) => {
remoteDocker.pull(
dockerImage,
{ authconfig: authConfig },
(err, stream) => {
if (err) {
reject(err);
return;
}
remoteDocker.modem.followProgress(
stream as Readable,
(err: Error | null, res) => {
if (!err) {
resolve(res);
}
if (err) {
reject(err);
}
},
(event) => {
onData?.(event);
},
);
},
);
});
} catch (error) {
throw error;
}
};
export const containerExists = async (containerName: string) => {
const container = docker.getContainer(containerName);
try {
await container.inspect();
return true;
} catch (error) {
return false;
}
};
export const stopService = async (appName: string) => {
try {
await execAsync(`docker service scale ${appName}=0 `);
} catch (error) {
console.error(error);
return error;
}
};
export const stopServiceRemote = async (serverId: string, appName: string) => {
try {
await execAsyncRemote(serverId, `docker service scale ${appName}=0 `);
} catch (error) {
console.error(error);
return error;
}
};
export const getContainerByName = (name: string): Promise<ContainerInfo> => {
const opts = {
limit: 1,
filters: {
name: [name],
},
};
return new Promise((resolve, reject) => {
docker.listContainers(opts, (err, containers) => {
if (err) {
reject(err);
} else if (containers?.length === 0) {
reject(new Error(`No container found with name: ${name}`));
} else if (containers && containers?.length > 0 && containers[0]) {
resolve(containers[0]);
}
});
});
};
export const cleanUpUnusedImages = async (serverId?: string) => {
try {
if (serverId) {
await execAsyncRemote(serverId, "docker image prune --all --force");
} else {
await execAsync("docker image prune --all --force");
}
} catch (error) {
console.error(error);
throw error;
}
};
export const cleanStoppedContainers = async (serverId?: string) => {
try {
if (serverId) {
await execAsyncRemote(serverId, "docker container prune --force");
} else {
await execAsync("docker container prune --force");
}
} catch (error) {
console.error(error);
throw error;
}
};
export const cleanUpUnusedVolumes = async (serverId?: string) => {
try {
if (serverId) {
await execAsyncRemote(serverId, "docker volume prune --all --force");
} else {
await execAsync("docker volume prune --all --force");
}
} catch (error) {
console.error(error);
throw error;
}
};
export const cleanUpInactiveContainers = async () => {
try {
const containers = await docker.listContainers({ all: true });
const inactiveContainers = containers.filter(
(container) => container.State !== "running",
);
for (const container of inactiveContainers) {
await docker.getContainer(container.Id).remove({ force: true });
console.log(`Cleaning up inactive container: ${container.Id}`);
}
} catch (error) {
console.error("Error cleaning up inactive containers:", error);
throw error;
}
};
export const cleanUpDockerBuilder = async (serverId?: string) => {
if (serverId) {
await execAsyncRemote(serverId, "docker builder prune --all --force");
} else {
await execAsync("docker builder prune --all --force");
}
};
export const cleanUpSystemPrune = async (serverId?: string) => {
if (serverId) {
await execAsyncRemote(
serverId,
"docker system prune --all --force --volumes",
);
} else {
await execAsync("docker system prune --all --force --volumes");
}
};
export const startService = async (appName: string) => {
try {
await execAsync(`docker service scale ${appName}=1 `);
} catch (error) {
console.error(error);
throw error;
}
};
export const startServiceRemote = async (serverId: string, appName: string) => {
try {
await execAsyncRemote(serverId, `docker service scale ${appName}=1 `);
} catch (error) {
console.error(error);
throw error;
}
};
export const removeService = async (
appName: string,
serverId?: string | null,
) => {
try {
const command = `docker service rm ${appName}`;
if (serverId) {
await execAsyncRemote(serverId, command);
} else {
await execAsync(command);
}
} catch (error) {
return error;
}
};
export const prepareEnvironmentVariables = (env: string | null) =>
Object.entries(parse(env ?? "")).map(([key, value]) => `${key}=${value}`);
export const prepareBuildArgs = (input: string | null) => {
const pairs = (input ?? "").split("\n");
const jsonObject: Record<string, string> = {};
for (const pair of pairs) {
const [key, value] = pair.split("=");
if (key && value) {
jsonObject[key] = value;
}
}
return jsonObject;
};
export const generateVolumeMounts = (mounts: ApplicationNested["mounts"]) => {
if (!mounts || mounts.length === 0) {
return [];
}
return mounts
.filter((mount) => mount.type === "volume")
.map((mount) => ({
Type: "volume" as const,
Source: mount.volumeName || "",
Target: mount.mountPath,
}));
};
type Resources = {
memoryLimit: number | null;
memoryReservation: number | null;
cpuLimit: number | null;
cpuReservation: number | null;
};
export const calculateResources = ({
memoryLimit,
memoryReservation,
cpuLimit,
cpuReservation,
}: Resources): ResourceRequirements => {
return {
Limits: {
MemoryBytes: memoryLimit ? memoryLimit * 1024 * 1024 : undefined,
NanoCPUs: memoryLimit ? (cpuLimit || 1) * 1000 * 1000 * 1000 : undefined,
},
Reservations: {
MemoryBytes: memoryLimit
? (memoryReservation || 1) * 1024 * 1024
: undefined,
NanoCPUs: memoryLimit
? (cpuReservation || 1) * 1000 * 1000 * 1000
: undefined,
},
};
};
export const generateConfigContainer = (application: ApplicationNested) => {
const {
healthCheckSwarm,
restartPolicySwarm,
placementSwarm,
updateConfigSwarm,
rollbackConfigSwarm,
modeSwarm,
labelsSwarm,
replicas,
mounts,
networkSwarm,
} = application;
const haveMounts = mounts.length > 0;
return {
...(healthCheckSwarm && {
HealthCheck: healthCheckSwarm,
}),
...(restartPolicySwarm
? {
RestartPolicy: restartPolicySwarm,
}
: {}),
...(placementSwarm
? {
Placement: placementSwarm,
}
: {
// if app have mounts keep manager as constraint
Placement: {
Constraints: haveMounts ? ["node.role==manager"] : [],
},
}),
...(labelsSwarm && {
Labels: labelsSwarm,
}),
...(modeSwarm
? {
Mode: modeSwarm,
}
: {
// use replicas value if no modeSwarm provided
Mode: {
Replicated: {
Replicas: replicas,
},
},
}),
...(rollbackConfigSwarm && {
RollbackConfig: rollbackConfigSwarm,
}),
...(updateConfigSwarm
? { UpdateConfig: updateConfigSwarm }
: {
// default config if no updateConfigSwarm provided
UpdateConfig: {
Parallelism: 1,
Order: "start-first",
},
}),
...(networkSwarm
? {
Networks: networkSwarm,
}
: {
Networks: [{ Target: "dokploy-network" }],
}),
};
};
export const generateBindMounts = (mounts: ApplicationNested["mounts"]) => {
if (!mounts || mounts.length === 0) {
return [];
}
return mounts
.filter((mount) => mount.type === "bind")
.map((mount) => ({
Type: "bind" as const,
Source: mount.hostPath || "",
Target: mount.mountPath,
}));
};
export const generateFileMounts = (
appName: string,
service:
| ApplicationNested
| MongoNested
| MariadbNested
| MysqlNested
| PostgresNested
| RedisNested,
) => {
const { mounts } = service;
const { APPLICATIONS_PATH } = paths(!!service.serverId);
if (!mounts || mounts.length === 0) {
return [];
}
return mounts
.filter((mount) => mount.type === "file")
.map((mount) => {
const fileName = mount.filePath;
const absoluteBasePath = path.resolve(APPLICATIONS_PATH);
const directory = path.join(absoluteBasePath, appName, "files");
const sourcePath = path.join(directory, fileName || "");
return {
Type: "bind" as const,
Source: sourcePath,
Target: mount.mountPath,
};
});
};
export const createFile = async (
outputPath: string,
filePath: string,
content: string,
) => {
try {
const fullPath = path.join(outputPath, filePath);
if (fullPath.endsWith(path.sep) || filePath.endsWith("/")) {
fs.mkdirSync(fullPath, { recursive: true });
return;
}
const directory = path.dirname(fullPath);
fs.mkdirSync(directory, { recursive: true });
fs.writeFileSync(fullPath, content || "");
} catch (error) {
throw error;
}
};
export const encodeBase64 = (content: string) =>
Buffer.from(content, "utf-8").toString("base64");
export const getCreateFileCommand = (
outputPath: string,
filePath: string,
content: string,
) => {
const fullPath = path.join(outputPath, filePath);
if (fullPath.endsWith(path.sep) || filePath.endsWith("/")) {
return `mkdir -p ${fullPath};`;
}
const directory = path.dirname(fullPath);
const encodedContent = encodeBase64(content);
return `
mkdir -p ${directory};
echo "${encodedContent}" | base64 -d > "${fullPath}";
`;
};
export const getServiceContainer = async (appName: string) => {
try {
const filter = {
status: ["running"],
label: [`com.docker.swarm.service.name=${appName}`],
};
const containers = await docker.listContainers({
filters: JSON.stringify(filter),
});
if (containers.length === 0 || !containers[0]) {
throw new Error(`No container found with name: ${appName}`);
}
const container = containers[0];
return container;
} catch (error) {
throw error;
}
};
export const getRemoteServiceContainer = async (
serverId: string,
appName: string,
) => {
try {
const filter = {
status: ["running"],
label: [`com.docker.swarm.service.name=${appName}`],
};
const remoteDocker = await getRemoteDocker(serverId);
const containers = await remoteDocker.listContainers({
filters: JSON.stringify(filter),
});
if (containers.length === 0 || !containers[0]) {
throw new Error(`No container found with name: ${appName}`);
}
const container = containers[0];
return container;
} catch (error) {
throw error;
}
};

View File

@@ -1,142 +0,0 @@
import fs, { promises as fsPromises } from "node:fs";
import path from "node:path";
import type { Application } from "@/server/api/services/application";
import { paths } from "@/server/constants";
import { execAsync, execAsyncRemote } from "../process/execAsync";
export const recreateDirectory = async (pathFolder: string): Promise<void> => {
try {
await removeDirectoryIfExistsContent(pathFolder);
await fsPromises.mkdir(pathFolder, { recursive: true });
} catch (error) {
console.error(`Error recreating directory '${pathFolder}':`, error);
}
};
export const recreateDirectoryRemote = async (
pathFolder: string,
serverId: string | null,
): Promise<void> => {
try {
await execAsyncRemote(
serverId,
`rm -rf ${pathFolder}; mkdir -p ${pathFolder}`,
);
} catch (error) {
console.error(`Error recreating directory '${pathFolder}':`, error);
}
};
export const removeDirectoryIfExistsContent = async (
path: string,
): Promise<void> => {
if (fs.existsSync(path) && fs.readdirSync(path).length !== 0) {
await execAsync(`rm -rf ${path}`);
}
};
export const removeFileOrDirectory = async (path: string) => {
try {
await execAsync(`rm -rf ${path}`);
} catch (error) {
console.error(`Error to remove ${path}: ${error}`);
throw error;
}
};
export const removeDirectoryCode = async (
appName: string,
serverId?: string | null,
) => {
const { APPLICATIONS_PATH } = paths(!!serverId);
const directoryPath = path.join(APPLICATIONS_PATH, appName);
const command = `rm -rf ${directoryPath}`;
try {
if (serverId) {
await execAsyncRemote(serverId, command);
} else {
await execAsync(command);
}
} catch (error) {
console.error(`Error to remove ${directoryPath}: ${error}`);
throw error;
}
};
export const removeComposeDirectory = async (
appName: string,
serverId?: string | null,
) => {
const { COMPOSE_PATH } = paths(!!serverId);
const directoryPath = path.join(COMPOSE_PATH, appName);
const command = `rm -rf ${directoryPath}`;
try {
if (serverId) {
await execAsyncRemote(serverId, command);
} else {
await execAsync(command);
}
} catch (error) {
console.error(`Error to remove ${directoryPath}: ${error}`);
throw error;
}
};
export const removeMonitoringDirectory = async (
appName: string,
serverId?: string | null,
) => {
const { MONITORING_PATH } = paths(!!serverId);
const directoryPath = path.join(MONITORING_PATH, appName);
const command = `rm -rf ${directoryPath}`;
try {
if (serverId) {
await execAsyncRemote(serverId, command);
} else {
await execAsync(command);
}
} catch (error) {
console.error(`Error to remove ${directoryPath}: ${error}`);
throw error;
}
};
export const getBuildAppDirectory = (application: Application) => {
const { APPLICATIONS_PATH } = paths(!!application.serverId);
const { appName, buildType, sourceType, customGitBuildPath, dockerfile } =
application;
let buildPath = "";
if (sourceType === "github") {
buildPath = application?.buildPath || "";
} else if (sourceType === "gitlab") {
buildPath = application?.gitlabBuildPath || "";
} else if (sourceType === "bitbucket") {
buildPath = application?.bitbucketBuildPath || "";
} else if (sourceType === "drop") {
buildPath = application?.dropBuildPath || "";
} else if (sourceType === "git") {
buildPath = customGitBuildPath || "";
}
if (buildType === "dockerfile") {
return path.join(
APPLICATIONS_PATH,
appName,
"code",
buildPath ?? "",
dockerfile || "",
);
}
return path.join(APPLICATIONS_PATH, appName, "code", buildPath ?? "");
};
export const getDockerContextPath = (application: Application) => {
const { APPLICATIONS_PATH } = paths(!!application.serverId);
const { appName, dockerContextPath } = application;
if (!dockerContextPath) {
return null;
}
return path.join(APPLICATIONS_PATH, appName, "code", dockerContextPath);
};

View File

@@ -1,99 +0,0 @@
import * as fs from "node:fs";
import * as path from "node:path";
import { paths } from "@/server/constants";
import { spawnAsync } from "../process/spawnAsync";
export const readSSHKey = async (id: string) => {
const { SSH_PATH } = paths();
try {
if (!fs.existsSync(SSH_PATH)) {
fs.mkdirSync(SSH_PATH, { recursive: true });
}
return {
privateKey: fs.readFileSync(path.join(SSH_PATH, `${id}_rsa`), {
encoding: "utf-8",
}),
publicKey: fs.readFileSync(path.join(SSH_PATH, `${id}_rsa.pub`), {
encoding: "utf-8",
}),
};
} catch (error) {
throw error;
}
};
export const saveSSHKey = async (
id: string,
publicKey: string,
privateKey: string,
) => {
const { SSH_PATH } = paths();
const applicationDirectory = SSH_PATH;
const privateKeyPath = path.join(applicationDirectory, `${id}_rsa`);
const publicKeyPath = path.join(applicationDirectory, `${id}_rsa.pub`);
const privateKeyStream = fs.createWriteStream(privateKeyPath, {
mode: 0o600,
});
privateKeyStream.write(privateKey);
privateKeyStream.end();
fs.writeFileSync(publicKeyPath, publicKey);
};
export const generateSSHKey = async (type: "rsa" | "ed25519" = "rsa") => {
const { SSH_PATH } = paths();
const applicationDirectory = SSH_PATH;
if (!fs.existsSync(applicationDirectory)) {
fs.mkdirSync(applicationDirectory, { recursive: true });
}
const keyPath = path.join(applicationDirectory, "temp_rsa");
if (fs.existsSync(`${keyPath}`)) {
fs.unlinkSync(`${keyPath}`);
}
if (fs.existsSync(`${keyPath}.pub`)) {
fs.unlinkSync(`${keyPath}.pub`);
}
const args = [
"-t",
type,
"-b",
"4096",
"-C",
"dokploy",
"-m",
"PEM",
"-f",
keyPath,
"-N",
"",
];
try {
await spawnAsync("ssh-keygen", args);
const data = await readSSHKey("temp");
await removeSSHKey("temp");
return data;
} catch (error) {
throw error;
}
};
export const removeSSHKey = async (id: string) => {
try {
const { SSH_PATH } = paths();
const publicKeyPath = path.join(SSH_PATH, `${id}_rsa.pub`);
const privateKeyPath = path.join(SSH_PATH, `${id}_rsa`);
await fs.promises.unlink(publicKeyPath);
await fs.promises.unlink(privateKeyPath);
} catch (error) {
throw error;
}
};

View File

@@ -1,73 +0,0 @@
import { exec } from "node:child_process";
import util from "node:util";
import { findServerById } from "@dokploy/builders";
import { Client } from "ssh2";
import { readSSHKey } from "../filesystem/ssh";
export const execAsync = util.promisify(exec);
export const execAsyncRemote = async (
serverId: string | null,
command: string,
): Promise<{ stdout: string; stderr: string }> => {
if (!serverId) return { stdout: "", stderr: "" };
const server = await findServerById(serverId);
if (!server.sshKeyId) throw new Error("No SSH key available for this server");
const keys = await readSSHKey(server.sshKeyId);
let stdout = "";
let stderr = "";
return new Promise((resolve, reject) => {
const conn = new Client();
sleep(1000);
conn
.once("ready", () => {
conn.exec(command, (err, stream) => {
if (err) throw err;
stream
.on("close", (code: number, signal: string) => {
conn.end();
if (code === 0) {
resolve({ stdout, stderr });
} else {
reject(
new Error(
`Command exited with code ${code}. Stderr: ${stderr}, command: ${command}`,
),
);
}
})
.on("data", (data: string) => {
stdout += data.toString();
})
.stderr.on("data", (data) => {
stderr += data.toString();
});
});
})
.on("error", (err) => {
conn.end();
if (err.level === "client-authentication") {
reject(
new Error(
`Authentication failed: Invalid SSH private key. ❌ Error: ${err.message} ${err.level}`,
),
);
} else {
reject(new Error(`SSH connection error: ${err.message}`));
}
})
.connect({
host: server.ipAddress,
port: server.port,
username: server.username,
privateKey: keys.privateKey,
timeout: 99999,
});
});
};
export const sleep = (ms: number) => {
return new Promise((resolve) => setTimeout(resolve, ms));
};

View File

@@ -1,58 +0,0 @@
import {
type ChildProcess,
type SpawnOptions,
spawn,
} from "node:child_process";
import BufferList from "bl";
export const spawnAsync = (
command: string,
args?: string[] | undefined,
onData?: (data: string) => void, // Callback opcional para manejar datos en tiempo real
options?: SpawnOptions,
): Promise<BufferList> & { child: ChildProcess } => {
const child = spawn(command, args ?? [], options ?? {});
const stdout = child.stdout ? new BufferList() : new BufferList();
const stderr = child.stderr ? new BufferList() : new BufferList();
if (child.stdout) {
child.stdout.on("data", (data) => {
stdout.append(data);
if (onData) {
onData(data.toString());
}
});
}
if (child.stderr) {
child.stderr.on("data", (data) => {
stderr.append(data);
if (onData) {
onData(data.toString());
}
});
}
const promise = new Promise<BufferList>((resolve, reject) => {
child.on("error", reject);
child.on("close", (code) => {
if (code === 0) {
resolve(stdout);
} else {
const err = new Error(`${stderr.toString()}`) as Error & {
code: number;
stderr: BufferList;
stdout: BufferList;
};
err.code = code || -1;
err.stderr = stderr;
err.stdout = stdout;
reject(err);
}
});
}) as Promise<BufferList> & { child: ChildProcess };
promise.child = child;
return promise;
};

View File

@@ -1,23 +0,0 @@
import { findServerById } from "@dokploy/builders";
import { docker } from "@/server/constants";
import Dockerode from "dockerode";
import { readSSHKey } from "../filesystem/ssh";
export const getRemoteDocker = async (serverId?: string | null) => {
if (!serverId) return docker;
const server = await findServerById(serverId);
if (!server.sshKeyId) return docker;
const keys = await readSSHKey(server.sshKeyId);
const dockerode = new Dockerode({
host: server.ipAddress,
port: server.port,
username: server.username,
protocol: "ssh",
// @ts-ignore
sshOptions: {
privateKey: keys.privateKey,
},
});
return dockerode;
};

View File

@@ -1,250 +0,0 @@
import fs, { writeFileSync } from "node:fs";
import path from "node:path";
import type { Domain } from "@/server/api/services/domain";
import { paths } from "@/server/constants";
import { dump, load } from "js-yaml";
import { encodeBase64 } from "../docker/utils";
import { execAsyncRemote } from "../process/execAsync";
import type { FileConfig, HttpLoadBalancerService } from "./file-types";
export const createTraefikConfig = (appName: string) => {
const defaultPort = 3000;
const serviceURLDefault = `http://${appName}:${defaultPort}`;
const domainDefault = `Host(\`${appName}.docker.localhost\`)`;
const config: FileConfig = {
http: {
routers: {
...(process.env.NODE_ENV === "production"
? {}
: {
[`${appName}-router-1`]: {
rule: domainDefault,
service: `${appName}-service-1`,
entryPoints: ["web"],
},
}),
},
services: {
...(process.env.NODE_ENV === "production"
? {}
: {
[`${appName}-service-1`]: {
loadBalancer: {
servers: [{ url: serviceURLDefault }],
passHostHeader: true,
},
},
}),
},
},
};
const yamlStr = dump(config);
const { DYNAMIC_TRAEFIK_PATH } = paths();
fs.mkdirSync(DYNAMIC_TRAEFIK_PATH, { recursive: true });
writeFileSync(
path.join(DYNAMIC_TRAEFIK_PATH, `${appName}.yml`),
yamlStr,
"utf8",
);
};
export const removeTraefikConfig = async (
appName: string,
serverId?: string | null,
) => {
try {
const { DYNAMIC_TRAEFIK_PATH } = paths(!!serverId);
const configPath = path.join(DYNAMIC_TRAEFIK_PATH, `${appName}.yml`);
if (serverId) {
await execAsyncRemote(serverId, `rm ${configPath}`);
} else {
if (fs.existsSync(configPath)) {
await fs.promises.unlink(configPath);
}
}
if (fs.existsSync(configPath)) {
await fs.promises.unlink(configPath);
}
} catch (error) {}
};
export const removeTraefikConfigRemote = async (
appName: string,
serverId: string,
) => {
try {
const { DYNAMIC_TRAEFIK_PATH } = paths(true);
const configPath = path.join(DYNAMIC_TRAEFIK_PATH, `${appName}.yml`);
await execAsyncRemote(serverId, `rm ${configPath}`);
} catch (error) {}
};
export const loadOrCreateConfig = (appName: string): FileConfig => {
const { DYNAMIC_TRAEFIK_PATH } = paths();
const configPath = path.join(DYNAMIC_TRAEFIK_PATH, `${appName}.yml`);
if (fs.existsSync(configPath)) {
const yamlStr = fs.readFileSync(configPath, "utf8");
const parsedConfig = (load(yamlStr) as FileConfig) || {
http: { routers: {}, services: {} },
};
return parsedConfig;
}
return { http: { routers: {}, services: {} } };
};
export const loadOrCreateConfigRemote = async (
serverId: string,
appName: string,
) => {
const { DYNAMIC_TRAEFIK_PATH } = paths(true);
const fileConfig: FileConfig = { http: { routers: {}, services: {} } };
const configPath = path.join(DYNAMIC_TRAEFIK_PATH, `${appName}.yml`);
try {
const { stdout } = await execAsyncRemote(serverId, `cat ${configPath}`);
if (!stdout) return fileConfig;
const parsedConfig = (load(stdout) as FileConfig) || {
http: { routers: {}, services: {} },
};
return parsedConfig;
} catch (err) {
return fileConfig;
}
};
export const readConfig = (appName: string) => {
const { DYNAMIC_TRAEFIK_PATH } = paths();
const configPath = path.join(DYNAMIC_TRAEFIK_PATH, `${appName}.yml`);
if (fs.existsSync(configPath)) {
const yamlStr = fs.readFileSync(configPath, "utf8");
return yamlStr;
}
return null;
};
export const readRemoteConfig = async (serverId: string, appName: string) => {
const { DYNAMIC_TRAEFIK_PATH } = paths(true);
const configPath = path.join(DYNAMIC_TRAEFIK_PATH, `${appName}.yml`);
try {
const { stdout } = await execAsyncRemote(serverId, `cat ${configPath}`);
if (!stdout) return null;
return stdout;
} catch (err) {
return null;
}
};
export const readMonitoringConfig = () => {
const { DYNAMIC_TRAEFIK_PATH } = paths();
const configPath = path.join(DYNAMIC_TRAEFIK_PATH, "access.log");
if (fs.existsSync(configPath)) {
const yamlStr = fs.readFileSync(configPath, "utf8");
return yamlStr;
}
return null;
};
export const readConfigInPath = async (pathFile: string, serverId?: string) => {
const configPath = path.join(pathFile);
if (serverId) {
const { stdout } = await execAsyncRemote(serverId, `cat ${configPath}`);
if (!stdout) return null;
return stdout;
}
if (fs.existsSync(configPath)) {
const yamlStr = fs.readFileSync(configPath, "utf8");
return yamlStr;
}
return null;
};
export const writeConfig = (appName: string, traefikConfig: string) => {
try {
const { DYNAMIC_TRAEFIK_PATH } = paths();
const configPath = path.join(DYNAMIC_TRAEFIK_PATH, `${appName}.yml`);
fs.writeFileSync(configPath, traefikConfig, "utf8");
} catch (e) {
console.error("Error saving the YAML config file:", e);
}
};
export const writeConfigRemote = async (
serverId: string,
appName: string,
traefikConfig: string,
) => {
try {
const { DYNAMIC_TRAEFIK_PATH } = paths(true);
const configPath = path.join(DYNAMIC_TRAEFIK_PATH, `${appName}.yml`);
await execAsyncRemote(serverId, `echo '${traefikConfig}' > ${configPath}`);
} catch (e) {
console.error("Error saving the YAML config file:", e);
}
};
export const writeTraefikConfigInPath = async (
pathFile: string,
traefikConfig: string,
serverId?: string,
) => {
try {
const configPath = path.join(pathFile);
if (serverId) {
const encoded = encodeBase64(traefikConfig);
await execAsyncRemote(
serverId,
`echo "${encoded}" | base64 -d > "${configPath}"`,
);
} else {
fs.writeFileSync(configPath, traefikConfig, "utf8");
}
fs.writeFileSync(configPath, traefikConfig, "utf8");
} catch (e) {
console.error("Error saving the YAML config file:", e);
}
};
export const writeTraefikConfig = (
traefikConfig: FileConfig,
appName: string,
) => {
try {
const { DYNAMIC_TRAEFIK_PATH } = paths();
const configPath = path.join(DYNAMIC_TRAEFIK_PATH, `${appName}.yml`);
const yamlStr = dump(traefikConfig);
fs.writeFileSync(configPath, yamlStr, "utf8");
} catch (e) {
console.error("Error saving the YAML config file:", e);
}
};
export const writeTraefikConfigRemote = async (
traefikConfig: FileConfig,
appName: string,
serverId: string,
) => {
try {
const { DYNAMIC_TRAEFIK_PATH } = paths(true);
const configPath = path.join(DYNAMIC_TRAEFIK_PATH, `${appName}.yml`);
const yamlStr = dump(traefikConfig);
await execAsyncRemote(serverId, `echo '${yamlStr}' > ${configPath}`);
} catch (e) {
console.error("Error saving the YAML config file:", e);
}
};
export const createServiceConfig = (
appName: string,
domain: Domain,
): {
loadBalancer: HttpLoadBalancerService;
} => ({
loadBalancer: {
servers: [{ url: `http://${appName}:${domain.port || 80}` }],
passHostHeader: true,
},
});

View File

@@ -1,145 +0,0 @@
import type { Domain } from "@/server/api/services/domain";
import type { ApplicationNested } from "../builders";
import {
createServiceConfig,
loadOrCreateConfig,
loadOrCreateConfigRemote,
removeTraefikConfig,
removeTraefikConfigRemote,
writeTraefikConfig,
writeTraefikConfigRemote,
} from "./application";
import type { FileConfig, HttpRouter } from "./file-types";
export const manageDomain = async (app: ApplicationNested, domain: Domain) => {
const { appName } = app;
let config: FileConfig;
if (app.serverId) {
config = await loadOrCreateConfigRemote(app.serverId, appName);
} else {
config = loadOrCreateConfig(appName);
}
const serviceName = `${appName}-service-${domain.uniqueConfigKey}`;
const routerName = `${appName}-router-${domain.uniqueConfigKey}`;
const routerNameSecure = `${appName}-router-websecure-${domain.uniqueConfigKey}`;
config.http = config.http || { routers: {}, services: {} };
config.http.routers = config.http.routers || {};
config.http.services = config.http.services || {};
config.http.routers[routerName] = await createRouterConfig(
app,
domain,
"web",
);
if (domain.https) {
config.http.routers[routerNameSecure] = await createRouterConfig(
app,
domain,
"websecure",
);
} else {
delete config.http.routers[routerNameSecure];
}
config.http.services[serviceName] = createServiceConfig(appName, domain);
if (app.serverId) {
await writeTraefikConfigRemote(config, appName, app.serverId);
} else {
writeTraefikConfig(config, appName);
}
};
export const removeDomain = async (
application: ApplicationNested,
uniqueKey: number,
) => {
const { appName, serverId } = application;
let config: FileConfig;
if (serverId) {
config = await loadOrCreateConfigRemote(serverId, appName);
} else {
config = loadOrCreateConfig(appName);
}
const routerKey = `${appName}-router-${uniqueKey}`;
const routerSecureKey = `${appName}-router-websecure-${uniqueKey}`;
const serviceKey = `${appName}-service-${uniqueKey}`;
if (config.http?.routers?.[routerKey]) {
delete config.http.routers[routerKey];
}
if (config.http?.routers?.[routerSecureKey]) {
delete config.http.routers[routerSecureKey];
}
if (config.http?.services?.[serviceKey]) {
delete config.http.services[serviceKey];
}
// verify if is the last router if so we delete the router
if (
config?.http?.routers &&
Object.keys(config?.http?.routers).length === 0
) {
if (serverId) {
await removeTraefikConfigRemote(appName, serverId);
} else {
await removeTraefikConfig(appName);
}
} else {
if (serverId) {
await writeTraefikConfigRemote(config, appName, serverId);
} else {
writeTraefikConfig(config, appName);
}
}
};
export const createRouterConfig = async (
app: ApplicationNested,
domain: Domain,
entryPoint: "web" | "websecure",
) => {
const { appName, redirects, security } = app;
const { certificateType } = domain;
const { host, path, https, uniqueConfigKey } = domain;
const routerConfig: HttpRouter = {
rule: `Host(\`${host}\`)${path !== null && path !== "/" ? ` && PathPrefix(\`${path}\`)` : ""}`,
service: `${appName}-service-${uniqueConfigKey}`,
middlewares: [],
entryPoints: [entryPoint],
};
if (entryPoint === "web" && https) {
routerConfig.middlewares = ["redirect-to-https"];
}
if ((entryPoint === "websecure" && https) || !https) {
// redirects
for (const redirect of redirects) {
const middlewareName = `redirect-${appName}-${redirect.uniqueConfigKey}`;
routerConfig.middlewares?.push(middlewareName);
}
// security
if (security.length > 0) {
const middlewareName = `auth-${appName}`;
routerConfig.middlewares?.push(middlewareName);
}
}
if (entryPoint === "websecure") {
if (certificateType === "letsencrypt") {
routerConfig.tls = { certResolver: "letsencrypt" };
} else if (certificateType === "none") {
routerConfig.tls = undefined;
}
}
return routerConfig;
};

Some files were not shown because too many files have changed in this diff Show More