refactor(multi-server): replace executeCommand with execAsyncRemote

This commit is contained in:
Mauricio Siu
2024-09-16 00:40:11 -06:00
parent 19295ba746
commit d8d0b60cb3
76 changed files with 622 additions and 19123 deletions

View File

@@ -17,10 +17,10 @@ import {
SelectValue,
} from "@/components/ui/select";
import { api } from "@/utils/api";
import { Loader2 } from "lucide-react";
import { useEffect, useState } from "react";
import { toast } from "sonner";
import { DockerMonitoring } from "../../monitoring/docker/show";
import { Loader2 } from "lucide-react";
interface Props {
appName: string;
@@ -119,7 +119,6 @@ export const ShowMonitoringCompose = ({
</Button>
</div>
<DockerMonitoring
serverId={serverId || ""}
appName={containerAppName || ""}
appType={appType}
/>

View File

@@ -23,7 +23,7 @@ export const DockerMemoryChart = ({
return {
time: item.time,
name: `Point ${index + 1}`,
usage: (item.value.used / 1024).toFixed(2),
usage: (item.value.used / 1024 ** 3).toFixed(2),
};
});
return (

View File

@@ -150,6 +150,8 @@ export const DockerMonitoring = ({
});
}, [data]);
console.log(currentData);
useEffect(() => {
const protocol = window.location.protocol === "https:" ? "wss:" : "ws:";
const wsUrl = `${protocol}//${window.location.host}/listen-docker-stats-monitoring?appName=${appName}&appType=${appType}`;
@@ -208,9 +210,7 @@ export const DockerMonitoring = ({
<div className="flex flex-col gap-2 w-full ">
<span className="text-base font-medium">Memory</span>
<span className="text-sm text-muted-foreground">
{`Used: ${(currentData.memory.value.used / 1024).toFixed(
2,
)} GB / Limit: ${(currentData.memory.value.total / 1024).toFixed(2)} GB`}
{`Used: ${(currentData.memory.value.used / 1024 ** 3).toFixed(2)} GB / Limit: ${(currentData.memory.value.total / 1024 ** 3).toFixed(2)} GB`}
</span>
<Progress
value={currentData.memory.value.usedPercentage}
@@ -218,7 +218,7 @@ export const DockerMonitoring = ({
/>
<DockerMemoryChart
acummulativeData={acummulativeData.memory}
memoryLimitGB={currentData.memory.value.total / 1024}
memoryLimitGB={currentData.memory.value.total / 1024 ** 3}
/>
</div>
{appName === "dokploy" && (
@@ -240,9 +240,9 @@ export const DockerMonitoring = ({
<div className="flex flex-col gap-2 w-full ">
<span className="text-base font-medium">Block I/O</span>
<span className="text-sm text-muted-foreground">
{`Used: ${currentData.block.value.readMb.toFixed(
{`Read: ${currentData.block.value.readMb.toFixed(
2,
)} MB / Limit: ${currentData.block.value.writeMb.toFixed(
)} MB / Write: ${currentData.block.value.writeMb.toFixed(
3,
)} MB`}
</span>

View File

@@ -19,15 +19,6 @@ import {
FormMessage,
} from "@/components/ui/form";
import { Input } from "@/components/ui/input";
import { Textarea } from "@/components/ui/textarea";
import { slugify } from "@/lib/slug";
import { api } from "@/utils/api";
import { zodResolver } from "@hookform/resolvers/zod";
import { Folder } from "lucide-react";
import { useState } from "react";
import { useForm } from "react-hook-form";
import { toast } from "sonner";
import { z } from "zod";
import {
Select,
SelectContent,
@@ -37,6 +28,21 @@ import {
SelectTrigger,
SelectValue,
} from "@/components/ui/select";
import { Textarea } from "@/components/ui/textarea";
import {
Tooltip,
TooltipContent,
TooltipProvider,
TooltipTrigger,
} from "@/components/ui/tooltip";
import { slugify } from "@/lib/slug";
import { api } from "@/utils/api";
import { zodResolver } from "@hookform/resolvers/zod";
import { Folder, HelpCircle } from "lucide-react";
import { useState } from "react";
import { useForm } from "react-hook-form";
import { toast } from "sonner";
import { z } from "zod";
const AddTemplateSchema = z.object({
name: z.string().min(1, {
@@ -52,9 +58,7 @@ const AddTemplateSchema = z.object({
"App name supports lowercase letters, numbers, '-' and can only start and end letters, and does not support continuous '-'",
}),
description: z.string().optional(),
serverId: z.string().min(1, {
message: "Server is required",
}),
serverId: z.string().optional(),
});
type AddTemplate = z.infer<typeof AddTemplateSchema>;
@@ -155,7 +159,27 @@ export const AddApplication = ({ projectId, projectName }: Props) => {
name="serverId"
render={({ field }) => (
<FormItem>
<FormLabel>Select a Server</FormLabel>
<TooltipProvider delayDuration={0}>
<Tooltip>
<TooltipTrigger asChild>
<FormLabel className="break-all w-fit flex flex-row gap-1 items-center">
Select a Server (Optional)
<HelpCircle className="size-4 text-muted-foreground" />
</FormLabel>
</TooltipTrigger>
<TooltipContent
className="z-[999] w-[300px]"
align="start"
side="top"
>
<span>
If not server is selected, the application will be
deployed on the server where the user is logged in.
</span>
</TooltipContent>
</Tooltip>
</TooltipProvider>
<Select
onValueChange={field.onChange}
defaultValue={field.value}

View File

@@ -32,11 +32,17 @@ import { Textarea } from "@/components/ui/textarea";
import { slugify } from "@/lib/slug";
import { api } from "@/utils/api";
import { zodResolver } from "@hookform/resolvers/zod";
import { CircuitBoard, Folder } from "lucide-react";
import { CircuitBoard, HelpCircle } from "lucide-react";
import { useEffect } from "react";
import { useForm } from "react-hook-form";
import { toast } from "sonner";
import { z } from "zod";
import {
Tooltip,
TooltipContent,
TooltipProvider,
TooltipTrigger,
} from "@/components/ui/tooltip";
const AddComposeSchema = z.object({
composeType: z.enum(["docker-compose", "stack"]).optional(),
@@ -53,9 +59,7 @@ const AddComposeSchema = z.object({
"App name supports lowercase letters, numbers, '-' and can only start and end letters, and does not support continuous '-'",
}),
description: z.string().optional(),
serverId: z.string().min(1, {
message: "Server is required",
}),
serverId: z.string().optional(),
});
type AddCompose = z.infer<typeof AddComposeSchema>;
@@ -78,7 +82,6 @@ export const AddCompose = ({ projectId, projectName }: Props) => {
description: "",
composeType: "docker-compose",
appName: `${slug}-`,
serverId: "",
},
resolver: zodResolver(AddComposeSchema),
});
@@ -161,7 +164,27 @@ export const AddCompose = ({ projectId, projectName }: Props) => {
name="serverId"
render={({ field }) => (
<FormItem>
<FormLabel>Select a Server</FormLabel>
<TooltipProvider delayDuration={0}>
<Tooltip>
<TooltipTrigger asChild>
<FormLabel className="break-all w-fit flex flex-row gap-1 items-center">
Select a Server (Optional)
<HelpCircle className="size-4 text-muted-foreground" />
</FormLabel>
</TooltipTrigger>
<TooltipContent
className="z-[999] w-[300px]"
align="start"
side="top"
>
<span>
If not server is selected, the application will be
deployed on the server where the user is logged in.
</span>
</TooltipContent>
</Tooltip>
</TooltipProvider>
<Select
onValueChange={field.onChange}
defaultValue={field.value}

View File

@@ -26,15 +26,6 @@ import {
import { Input } from "@/components/ui/input";
import { Label } from "@/components/ui/label";
import { RadioGroup, RadioGroupItem } from "@/components/ui/radio-group";
import { Textarea } from "@/components/ui/textarea";
import { slugify } from "@/lib/slug";
import { api } from "@/utils/api";
import { zodResolver } from "@hookform/resolvers/zod";
import { AlertTriangle, Database } from "lucide-react";
import { useState } from "react";
import { useForm } from "react-hook-form";
import { toast } from "sonner";
import { z } from "zod";
import {
Select,
SelectContent,
@@ -44,6 +35,15 @@ import {
SelectTrigger,
SelectValue,
} from "@/components/ui/select";
import { Textarea } from "@/components/ui/textarea";
import { slugify } from "@/lib/slug";
import { api } from "@/utils/api";
import { zodResolver } from "@hookform/resolvers/zod";
import { AlertTriangle, Database } from "lucide-react";
import { useState } from "react";
import { useForm } from "react-hook-form";
import { toast } from "sonner";
import { z } from "zod";
type DbType = typeof mySchema._type.type;

View File

@@ -34,16 +34,12 @@ const AddProjectSchema = z.object({
message: "Name is required",
}),
description: z.string().optional(),
serverId: z.string().min(1, {
message: "Server is required",
}),
});
type AddProject = z.infer<typeof AddProjectSchema>;
export const AddProject = () => {
const utils = api.useUtils();
const { data: servers } = api.server.all.useQuery();
const [isOpen, setIsOpen] = useState(false);
const { mutateAsync, error, isError } = api.project.create.useMutation();
const router = useRouter();
@@ -51,7 +47,6 @@ export const AddProject = () => {
defaultValues: {
description: "",
name: "",
serverId: "",
},
resolver: zodResolver(AddProjectSchema),
});
@@ -60,7 +55,6 @@ export const AddProject = () => {
form.reset({
description: "",
name: "",
serverId: "",
});
}, [form, form.reset, form.formState.isSubmitSuccessful]);

View File

@@ -18,15 +18,6 @@ import {
FormMessage,
} from "@/components/ui/form";
import { Input } from "@/components/ui/input";
import { Textarea } from "@/components/ui/textarea";
import { api } from "@/utils/api";
import { zodResolver } from "@hookform/resolvers/zod";
import { PlusIcon } from "lucide-react";
import { useRouter } from "next/router";
import { useEffect, useState } from "react";
import { useForm } from "react-hook-form";
import { toast } from "sonner";
import { z } from "zod";
import {
Select,
SelectContent,
@@ -36,6 +27,15 @@ import {
SelectTrigger,
SelectValue,
} from "@/components/ui/select";
import { Textarea } from "@/components/ui/textarea";
import { api } from "@/utils/api";
import { zodResolver } from "@hookform/resolvers/zod";
import { PlusIcon } from "lucide-react";
import { useRouter } from "next/router";
import { useEffect, useState } from "react";
import { useForm } from "react-hook-form";
import { toast } from "sonner";
import { z } from "zod";
const Schema = z.object({
name: z.string().min(1, {

View File

@@ -1,4 +1,14 @@
import { DateTooltip } from "@/components/shared/date-tooltip";
import { DialogAction } from "@/components/shared/dialog-action";
import { StatusTooltip } from "@/components/shared/status-tooltip";
import { Button } from "@/components/ui/button";
import {
Card,
CardContent,
CardDescription,
CardHeader,
CardTitle,
} from "@/components/ui/card";
import {
Dialog,
DialogContent,
@@ -12,17 +22,7 @@ import { useUrl } from "@/utils/hooks/use-url";
import { RocketIcon, ServerIcon } from "lucide-react";
import { useState } from "react";
import { toast } from "sonner";
import { DateTooltip } from "@/components/shared/date-tooltip";
import { StatusTooltip } from "@/components/shared/status-tooltip";
import {
Card,
CardContent,
CardDescription,
CardHeader,
CardTitle,
} from "@/components/ui/card";
import { ShowDeployment } from "../../application/deployments/show-deployment";
import { DialogAction } from "@/components/shared/dialog-action";
interface Props {
serverId: string;

View File

@@ -1,12 +1,6 @@
import { api } from "@/utils/api";
import { format } from "date-fns";
import { AddServer } from "./add-server";
import { KeyIcon, MoreHorizontal, ServerIcon } from "lucide-react";
import Link from "next/link";
import { DialogAction } from "@/components/shared/dialog-action";
import { Button } from "@/components/ui/button";
import { toast } from "sonner";
import { Badge } from "@/components/ui/badge";
import { Button } from "@/components/ui/button";
import {
DropdownMenu,
DropdownMenuContent,
@@ -23,8 +17,14 @@ import {
TableHeader,
TableRow,
} from "@/components/ui/table";
import { SetupServer } from "./setup-server";
import { api } from "@/utils/api";
import { format } from "date-fns";
import { KeyIcon, MoreHorizontal, ServerIcon } from "lucide-react";
import Link from "next/link";
import { toast } from "sonner";
import { TerminalModal } from "../web-server/terminal-modal";
import { AddServer } from "./add-server";
import { SetupServer } from "./setup-server";
export const ShowServers = () => {
const { data, refetch } = api.server.all.useQuery();
const { mutateAsync } = api.server.remove.useMutation();

View File

@@ -124,7 +124,7 @@ export const WebServer = () => {
</DropdownMenuItem>
</ShowServerMiddlewareConfig>
<TerminalModal>
<TerminalModal serverId={""}>
<span>Enter the terminal</span>
</TerminalModal>
</DropdownMenuGroup>

View File

@@ -0,0 +1,81 @@
CREATE TABLE IF NOT EXISTS "server" (
"serverId" text PRIMARY KEY NOT NULL,
"name" text NOT NULL,
"description" text,
"ipAddress" text NOT NULL,
"port" integer NOT NULL,
"username" text DEFAULT 'root' NOT NULL,
"appName" text NOT NULL,
"redisPassword" text DEFAULT 'xYBugfHkULig1iLN' NOT NULL,
"createdAt" text NOT NULL,
"adminId" text NOT NULL,
"sshKeyId" text
);
--> statement-breakpoint
ALTER TABLE "application" ADD COLUMN "serverId" text;--> statement-breakpoint
ALTER TABLE "postgres" ADD COLUMN "serverId" text;--> statement-breakpoint
ALTER TABLE "mariadb" ADD COLUMN "serverId" text;--> statement-breakpoint
ALTER TABLE "mongo" ADD COLUMN "serverId" text;--> statement-breakpoint
ALTER TABLE "mysql" ADD COLUMN "serverId" text;--> statement-breakpoint
ALTER TABLE "deployment" ADD COLUMN "serverId" text;--> statement-breakpoint
ALTER TABLE "redis" ADD COLUMN "serverId" text;--> statement-breakpoint
ALTER TABLE "compose" ADD COLUMN "serverId" text;--> statement-breakpoint
DO $$ BEGIN
ALTER TABLE "server" ADD CONSTRAINT "server_adminId_admin_adminId_fk" FOREIGN KEY ("adminId") REFERENCES "public"."admin"("adminId") ON DELETE cascade ON UPDATE no action;
EXCEPTION
WHEN duplicate_object THEN null;
END $$;
--> statement-breakpoint
DO $$ BEGIN
ALTER TABLE "server" ADD CONSTRAINT "server_sshKeyId_ssh-key_sshKeyId_fk" FOREIGN KEY ("sshKeyId") REFERENCES "public"."ssh-key"("sshKeyId") ON DELETE set null ON UPDATE no action;
EXCEPTION
WHEN duplicate_object THEN null;
END $$;
--> statement-breakpoint
DO $$ BEGIN
ALTER TABLE "application" ADD CONSTRAINT "application_serverId_server_serverId_fk" FOREIGN KEY ("serverId") REFERENCES "public"."server"("serverId") ON DELETE cascade ON UPDATE no action;
EXCEPTION
WHEN duplicate_object THEN null;
END $$;
--> statement-breakpoint
DO $$ BEGIN
ALTER TABLE "postgres" ADD CONSTRAINT "postgres_serverId_server_serverId_fk" FOREIGN KEY ("serverId") REFERENCES "public"."server"("serverId") ON DELETE cascade ON UPDATE no action;
EXCEPTION
WHEN duplicate_object THEN null;
END $$;
--> statement-breakpoint
DO $$ BEGIN
ALTER TABLE "mariadb" ADD CONSTRAINT "mariadb_serverId_server_serverId_fk" FOREIGN KEY ("serverId") REFERENCES "public"."server"("serverId") ON DELETE cascade ON UPDATE no action;
EXCEPTION
WHEN duplicate_object THEN null;
END $$;
--> statement-breakpoint
DO $$ BEGIN
ALTER TABLE "mongo" ADD CONSTRAINT "mongo_serverId_server_serverId_fk" FOREIGN KEY ("serverId") REFERENCES "public"."server"("serverId") ON DELETE cascade ON UPDATE no action;
EXCEPTION
WHEN duplicate_object THEN null;
END $$;
--> statement-breakpoint
DO $$ BEGIN
ALTER TABLE "mysql" ADD CONSTRAINT "mysql_serverId_server_serverId_fk" FOREIGN KEY ("serverId") REFERENCES "public"."server"("serverId") ON DELETE cascade ON UPDATE no action;
EXCEPTION
WHEN duplicate_object THEN null;
END $$;
--> statement-breakpoint
DO $$ BEGIN
ALTER TABLE "deployment" ADD CONSTRAINT "deployment_serverId_server_serverId_fk" FOREIGN KEY ("serverId") REFERENCES "public"."server"("serverId") ON DELETE cascade ON UPDATE no action;
EXCEPTION
WHEN duplicate_object THEN null;
END $$;
--> statement-breakpoint
DO $$ BEGIN
ALTER TABLE "redis" ADD CONSTRAINT "redis_serverId_server_serverId_fk" FOREIGN KEY ("serverId") REFERENCES "public"."server"("serverId") ON DELETE cascade ON UPDATE no action;
EXCEPTION
WHEN duplicate_object THEN null;
END $$;
--> statement-breakpoint
DO $$ BEGIN
ALTER TABLE "compose" ADD CONSTRAINT "compose_serverId_server_serverId_fk" FOREIGN KEY ("serverId") REFERENCES "public"."server"("serverId") ON DELETE cascade ON UPDATE no action;
EXCEPTION
WHEN duplicate_object THEN null;
END $$;

View File

@@ -1,18 +0,0 @@
CREATE TABLE IF NOT EXISTS "server" (
"serverId" text PRIMARY KEY NOT NULL,
"name" text NOT NULL,
"description" text,
"ipAddress" text NOT NULL,
"port" integer NOT NULL,
"username" text DEFAULT 'root' NOT NULL,
"appName" text,
"createdAt" text NOT NULL,
"adminId" text NOT NULL,
CONSTRAINT "server_appName_unique" UNIQUE("appName")
);
--> statement-breakpoint
DO $$ BEGIN
ALTER TABLE "server" ADD CONSTRAINT "server_adminId_admin_adminId_fk" FOREIGN KEY ("adminId") REFERENCES "public"."admin"("adminId") ON DELETE cascade ON UPDATE no action;
EXCEPTION
WHEN duplicate_object THEN null;
END $$;

View File

@@ -1,6 +0,0 @@
ALTER TABLE "deployment" ADD COLUMN "serverId" text;--> statement-breakpoint
DO $$ BEGIN
ALTER TABLE "deployment" ADD CONSTRAINT "deployment_serverId_server_serverId_fk" FOREIGN KEY ("serverId") REFERENCES "public"."server"("serverId") ON DELETE cascade ON UPDATE no action;
EXCEPTION
WHEN duplicate_object THEN null;
END $$;

View File

@@ -1,8 +0,0 @@
ALTER TABLE "server" DROP CONSTRAINT "server_appName_unique";--> statement-breakpoint
ALTER TABLE "server" ALTER COLUMN "appName" SET NOT NULL;--> statement-breakpoint
ALTER TABLE "server" ADD COLUMN "sshKeyId" text;--> statement-breakpoint
DO $$ BEGIN
ALTER TABLE "server" ADD CONSTRAINT "server_sshKeyId_ssh-key_sshKeyId_fk" FOREIGN KEY ("sshKeyId") REFERENCES "public"."ssh-key"("sshKeyId") ON DELETE set null ON UPDATE no action;
EXCEPTION
WHEN duplicate_object THEN null;
END $$;

View File

@@ -1,13 +0,0 @@
ALTER TABLE "application" ADD COLUMN "serverId" text;--> statement-breakpoint
ALTER TABLE "compose" ADD COLUMN "serverId" text;--> statement-breakpoint
DO $$ BEGIN
ALTER TABLE "application" ADD CONSTRAINT "application_serverId_server_serverId_fk" FOREIGN KEY ("serverId") REFERENCES "public"."server"("serverId") ON DELETE cascade ON UPDATE no action;
EXCEPTION
WHEN duplicate_object THEN null;
END $$;
--> statement-breakpoint
DO $$ BEGIN
ALTER TABLE "compose" ADD CONSTRAINT "compose_serverId_server_serverId_fk" FOREIGN KEY ("serverId") REFERENCES "public"."server"("serverId") ON DELETE cascade ON UPDATE no action;
EXCEPTION
WHEN duplicate_object THEN null;
END $$;

View File

@@ -1 +0,0 @@
ALTER TABLE "server" ADD COLUMN "redisPassword" text DEFAULT 'xYBugfHkULig1iLN' NOT NULL;

View File

@@ -1,34 +0,0 @@
ALTER TABLE "postgres" ADD COLUMN "serverId" text;--> statement-breakpoint
ALTER TABLE "mariadb" ADD COLUMN "serverId" text;--> statement-breakpoint
ALTER TABLE "mongo" ADD COLUMN "serverId" text;--> statement-breakpoint
ALTER TABLE "mysql" ADD COLUMN "serverId" text;--> statement-breakpoint
ALTER TABLE "redis" ADD COLUMN "serverId" text;--> statement-breakpoint
DO $$ BEGIN
ALTER TABLE "postgres" ADD CONSTRAINT "postgres_serverId_server_serverId_fk" FOREIGN KEY ("serverId") REFERENCES "public"."server"("serverId") ON DELETE cascade ON UPDATE no action;
EXCEPTION
WHEN duplicate_object THEN null;
END $$;
--> statement-breakpoint
DO $$ BEGIN
ALTER TABLE "mariadb" ADD CONSTRAINT "mariadb_serverId_server_serverId_fk" FOREIGN KEY ("serverId") REFERENCES "public"."server"("serverId") ON DELETE cascade ON UPDATE no action;
EXCEPTION
WHEN duplicate_object THEN null;
END $$;
--> statement-breakpoint
DO $$ BEGIN
ALTER TABLE "mongo" ADD CONSTRAINT "mongo_serverId_server_serverId_fk" FOREIGN KEY ("serverId") REFERENCES "public"."server"("serverId") ON DELETE cascade ON UPDATE no action;
EXCEPTION
WHEN duplicate_object THEN null;
END $$;
--> statement-breakpoint
DO $$ BEGIN
ALTER TABLE "mysql" ADD CONSTRAINT "mysql_serverId_server_serverId_fk" FOREIGN KEY ("serverId") REFERENCES "public"."server"("serverId") ON DELETE cascade ON UPDATE no action;
EXCEPTION
WHEN duplicate_object THEN null;
END $$;
--> statement-breakpoint
DO $$ BEGIN
ALTER TABLE "redis" ADD CONSTRAINT "redis_serverId_server_serverId_fk" FOREIGN KEY ("serverId") REFERENCES "public"."server"("serverId") ON DELETE cascade ON UPDATE no action;
EXCEPTION
WHEN duplicate_object THEN null;
END $$;

View File

@@ -1,5 +1,5 @@
{
"id": "ae3c47b8-c680-4409-a6c6-8570862f1018",
"id": "8a72c5cc-bc93-4cd4-a129-6e86a7889b89",
"prevId": "74cd1475-b79c-4226-b4e6-e5ddb9576025",
"version": "6",
"dialect": "postgresql",
@@ -377,6 +377,12 @@
"type": "text",
"primaryKey": false,
"notNull": false
},
"serverId": {
"name": "serverId",
"type": "text",
"primaryKey": false,
"notNull": false
}
},
"indexes": {},
@@ -458,6 +464,19 @@
],
"onDelete": "set null",
"onUpdate": "no action"
},
"application_serverId_server_serverId_fk": {
"name": "application_serverId_server_serverId_fk",
"tableFrom": "application",
"tableTo": "server",
"columnsFrom": [
"serverId"
],
"columnsTo": [
"serverId"
],
"onDelete": "cascade",
"onUpdate": "no action"
}
},
"compositePrimaryKeys": {},
@@ -584,6 +603,12 @@
"type": "text",
"primaryKey": false,
"notNull": true
},
"serverId": {
"name": "serverId",
"type": "text",
"primaryKey": false,
"notNull": false
}
},
"indexes": {},
@@ -600,6 +625,19 @@
],
"onDelete": "cascade",
"onUpdate": "no action"
},
"postgres_serverId_server_serverId_fk": {
"name": "postgres_serverId_server_serverId_fk",
"tableFrom": "postgres",
"tableTo": "server",
"columnsFrom": [
"serverId"
],
"columnsTo": [
"serverId"
],
"onDelete": "cascade",
"onUpdate": "no action"
}
},
"compositePrimaryKeys": {},
@@ -1221,6 +1259,12 @@
"type": "text",
"primaryKey": false,
"notNull": true
},
"serverId": {
"name": "serverId",
"type": "text",
"primaryKey": false,
"notNull": false
}
},
"indexes": {},
@@ -1237,6 +1281,19 @@
],
"onDelete": "cascade",
"onUpdate": "no action"
},
"mariadb_serverId_server_serverId_fk": {
"name": "mariadb_serverId_server_serverId_fk",
"tableFrom": "mariadb",
"tableTo": "server",
"columnsFrom": [
"serverId"
],
"columnsTo": [
"serverId"
],
"onDelete": "cascade",
"onUpdate": "no action"
}
},
"compositePrimaryKeys": {},
@@ -1357,6 +1414,12 @@
"type": "text",
"primaryKey": false,
"notNull": true
},
"serverId": {
"name": "serverId",
"type": "text",
"primaryKey": false,
"notNull": false
}
},
"indexes": {},
@@ -1373,6 +1436,19 @@
],
"onDelete": "cascade",
"onUpdate": "no action"
},
"mongo_serverId_server_serverId_fk": {
"name": "mongo_serverId_server_serverId_fk",
"tableFrom": "mongo",
"tableTo": "server",
"columnsFrom": [
"serverId"
],
"columnsTo": [
"serverId"
],
"onDelete": "cascade",
"onUpdate": "no action"
}
},
"compositePrimaryKeys": {},
@@ -1505,6 +1581,12 @@
"type": "text",
"primaryKey": false,
"notNull": true
},
"serverId": {
"name": "serverId",
"type": "text",
"primaryKey": false,
"notNull": false
}
},
"indexes": {},
@@ -1521,6 +1603,19 @@
],
"onDelete": "cascade",
"onUpdate": "no action"
},
"mysql_serverId_server_serverId_fk": {
"name": "mysql_serverId_server_serverId_fk",
"tableFrom": "mysql",
"tableTo": "server",
"columnsFrom": [
"serverId"
],
"columnsTo": [
"serverId"
],
"onDelete": "cascade",
"onUpdate": "no action"
}
},
"compositePrimaryKeys": {},
@@ -1797,6 +1892,12 @@
"primaryKey": false,
"notNull": false
},
"serverId": {
"name": "serverId",
"type": "text",
"primaryKey": false,
"notNull": false
},
"createdAt": {
"name": "createdAt",
"type": "text",
@@ -1831,6 +1932,19 @@
],
"onDelete": "cascade",
"onUpdate": "no action"
},
"deployment_serverId_server_serverId_fk": {
"name": "deployment_serverId_server_serverId_fk",
"tableFrom": "deployment",
"tableTo": "server",
"columnsFrom": [
"serverId"
],
"columnsTo": [
"serverId"
],
"onDelete": "cascade",
"onUpdate": "no action"
}
},
"compositePrimaryKeys": {},
@@ -2413,6 +2527,12 @@
"type": "text",
"primaryKey": false,
"notNull": true
},
"serverId": {
"name": "serverId",
"type": "text",
"primaryKey": false,
"notNull": false
}
},
"indexes": {},
@@ -2429,6 +2549,19 @@
],
"onDelete": "cascade",
"onUpdate": "no action"
},
"redis_serverId_server_serverId_fk": {
"name": "redis_serverId_server_serverId_fk",
"tableFrom": "redis",
"tableTo": "server",
"columnsFrom": [
"serverId"
],
"columnsTo": [
"serverId"
],
"onDelete": "cascade",
"onUpdate": "no action"
}
},
"compositePrimaryKeys": {},
@@ -2660,6 +2793,12 @@
"type": "text",
"primaryKey": false,
"notNull": false
},
"serverId": {
"name": "serverId",
"type": "text",
"primaryKey": false,
"notNull": false
}
},
"indexes": {},
@@ -2728,6 +2867,19 @@
],
"onDelete": "set null",
"onUpdate": "no action"
},
"compose_serverId_server_serverId_fk": {
"name": "compose_serverId_server_serverId_fk",
"tableFrom": "compose",
"tableTo": "server",
"columnsFrom": [
"serverId"
],
"columnsTo": [
"serverId"
],
"onDelete": "cascade",
"onUpdate": "no action"
}
},
"compositePrimaryKeys": {},
@@ -3447,7 +3599,14 @@
"name": "appName",
"type": "text",
"primaryKey": false,
"notNull": false
"notNull": true
},
"redisPassword": {
"name": "redisPassword",
"type": "text",
"primaryKey": false,
"notNull": true,
"default": "'xYBugfHkULig1iLN'"
},
"createdAt": {
"name": "createdAt",
@@ -3460,6 +3619,12 @@
"type": "text",
"primaryKey": false,
"notNull": true
},
"sshKeyId": {
"name": "sshKeyId",
"type": "text",
"primaryKey": false,
"notNull": false
}
},
"indexes": {},
@@ -3476,18 +3641,23 @@
],
"onDelete": "cascade",
"onUpdate": "no action"
},
"server_sshKeyId_ssh-key_sshKeyId_fk": {
"name": "server_sshKeyId_ssh-key_sshKeyId_fk",
"tableFrom": "server",
"tableTo": "ssh-key",
"columnsFrom": [
"sshKeyId"
],
"columnsTo": [
"sshKeyId"
],
"onDelete": "set null",
"onUpdate": "no action"
}
},
"compositePrimaryKeys": {},
"uniqueConstraints": {
"server_appName_unique": {
"name": "server_appName_unique",
"nullsNotDistinct": false,
"columns": [
"appName"
]
}
}
"uniqueConstraints": {}
}
},
"enums": {

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -264,43 +264,8 @@
{
"idx": 37,
"version": "6",
"when": 1725773488051,
"tag": "0037_small_adam_warlock",
"breakpoints": true
},
{
"idx": 38,
"version": "6",
"when": 1725773967628,
"tag": "0038_thankful_magneto",
"breakpoints": true
},
{
"idx": 39,
"version": "6",
"when": 1725776089878,
"tag": "0039_military_doctor_faustus",
"breakpoints": true
},
{
"idx": 40,
"version": "6",
"when": 1725812869228,
"tag": "0040_bent_santa_claus",
"breakpoints": true
},
{
"idx": 41,
"version": "6",
"when": 1725830160928,
"tag": "0041_mute_polaris",
"breakpoints": true
},
{
"idx": 42,
"version": "6",
"when": 1725856996201,
"tag": "0042_wandering_inhumans",
"when": 1726462845274,
"tag": "0037_quick_callisto",
"breakpoints": true
}
]

View File

@@ -154,7 +154,10 @@ const Service = (
</TabsContent>
<TabsContent value="monitoring">
<div className="flex flex-col gap-4 pt-2.5">
<DockerMonitoring appName={data?.appName || ""} />
<DockerMonitoring
appName={data?.appName || ""}
serverId={data?.serverId || ""}
/>
</div>
</TabsContent>
<TabsContent value="logs">

View File

@@ -26,10 +26,10 @@ import { redirectsRouter } from "./routers/redirects";
import { redisRouter } from "./routers/redis";
import { registryRouter } from "./routers/registry";
import { securityRouter } from "./routers/security";
import { serverRouter } from "./routers/server";
import { settingsRouter } from "./routers/settings";
import { sshRouter } from "./routers/ssh-key";
import { userRouter } from "./routers/user";
import { serverRouter } from "./routers/server";
/**
* This is the primary router for your server.

View File

@@ -24,6 +24,7 @@ import {
cleanQueuesByApplication,
} from "@/server/queues/deployments-queue";
import { myQueue } from "@/server/queues/queueSetup";
import { unzipDrop } from "@/server/utils/builders/drop";
import {
removeService,
startService,
@@ -43,6 +44,7 @@ import {
writeConfigRemote,
} from "@/server/utils/traefik/application";
import { deleteAllMiddlewares } from "@/server/utils/traefik/middleware";
import { uploadFileSchema } from "@/utils/schema";
import { TRPCError } from "@trpc/server";
import { eq } from "drizzle-orm";
import { nanoid } from "nanoid";
@@ -56,8 +58,6 @@ import {
} from "../services/application";
import { removeDeployments } from "../services/deployment";
import { addNewService, checkServiceAccess } from "../services/user";
import { unzipDrop } from "@/server/utils/builders/drop";
import { uploadFileSchema } from "@/utils/schema";
export const applicationRouter = createTRPCRouter({
create: protectedProcedure

View File

@@ -1,5 +1,6 @@
import { lucia, validateRequest } from "@/server/auth/auth";
import { luciaToken } from "@/server/auth/token";
import { IS_CLOUD } from "@/server/constants";
import {
apiCreateAdmin,
apiCreateUser,
@@ -29,7 +30,6 @@ import {
protectedProcedure,
publicProcedure,
} from "../trpc";
import { IS_CLOUD } from "@/server/constants";
export const authRouter = createTRPCRouter({
createAdmin: publicProcedure

View File

@@ -53,7 +53,6 @@ import { createMount } from "../services/mount";
import { findProjectById } from "../services/project";
import { addNewService, checkServiceAccess } from "../services/user";
import { createTRPCRouter, protectedProcedure } from "../trpc";
import { findApplicationById } from "../services/application";
export const composeRouter = createTRPCRouter({
create: protectedProcedure

View File

@@ -1,16 +1,14 @@
import { createTRPCRouter, protectedProcedure } from "@/server/api/trpc";
import {
apiCreateDomain,
apiCreateTraefikMeDomain,
apiFindCompose,
apiFindDomain,
apiFindDomainByApplication,
apiFindDomainByCompose,
apiFindOneApplication,
apiUpdateDomain,
} from "@/server/db/schema";
import { manageDomain, removeDomain } from "@/server/utils/traefik/domain";
import { TRPCError } from "@trpc/server";
import { z } from "zod";
import { findApplicationById } from "../services/application";
import {
createDomain,
@@ -21,7 +19,6 @@ import {
removeDomainById,
updateDomainById,
} from "../services/domain";
import { z } from "zod";
export const domainRouter = createTRPCRouter({
create: protectedProcedure

View File

@@ -8,16 +8,16 @@ import {
apiUpdateServer,
server,
} from "@/server/db/schema";
import { setupServer } from "@/server/utils/servers/setup-server";
import { TRPCError } from "@trpc/server";
import { desc } from "drizzle-orm";
import { removeDeploymentsByServerId } from "../services/deployment";
import {
createServer,
deleteServer,
findServerById,
updateServerById,
createServer,
} from "../services/server";
import { setupServer } from "@/server/utils/servers/setup-server";
import { removeDeploymentsByServerId } from "../services/deployment";
export const serverRouter = createTRPCRouter({
create: protectedProcedure

View File

@@ -8,6 +8,13 @@ import {
getBuildCommand,
mechanizeDockerContainer,
} from "@/server/utils/builders";
import { sendBuildErrorNotifications } from "@/server/utils/notifications/build-error";
import { sendBuildSuccessNotifications } from "@/server/utils/notifications/build-success";
import { execAsyncRemote } from "@/server/utils/process/execAsync";
import {
cloneBitbucketRepository,
getBitbucketCloneCommand,
} from "@/server/utils/providers/bitbucket";
import {
buildDocker,
buildRemoteDocker,
@@ -20,25 +27,17 @@ import {
cloneGithubRepository,
getGithubCloneCommand,
} from "@/server/utils/providers/github";
import {
cloneGitlabRepository,
getGitlabCloneCommand,
} from "@/server/utils/providers/gitlab";
import { createTraefikConfig } from "@/server/utils/traefik/application";
import { generatePassword } from "@/templates/utils";
import { TRPCError } from "@trpc/server";
import { eq } from "drizzle-orm";
import { getDokployUrl } from "./admin";
import { createDeployment, updateDeploymentStatus } from "./deployment";
import { sendBuildErrorNotifications } from "@/server/utils/notifications/build-error";
import { sendBuildSuccessNotifications } from "@/server/utils/notifications/build-success";
import {
cloneBitbucketRepository,
getBitbucketCloneCommand,
} from "@/server/utils/providers/bitbucket";
import {
cloneGitlabRepository,
getGitlabCloneCommand,
} from "@/server/utils/providers/gitlab";
import { validUniqueServerAppName } from "./project";
import { executeCommand } from "@/server/utils/servers/command";
import { execAsyncRemote } from "@/server/utils/process/execAsync";
export type Application = typeof applications.$inferSelect;
export const createApplication = async (
@@ -164,49 +163,22 @@ export const deployApplication = async ({
});
try {
if (application.serverId) {
let command = "set -e;";
if (application.sourceType === "github") {
command += await getGithubCloneCommand(application, deployment.logPath);
} else if (application.sourceType === "gitlab") {
command += await getGitlabCloneCommand(application, deployment.logPath);
} else if (application.sourceType === "bitbucket") {
command += await getBitbucketCloneCommand(
application,
deployment.logPath,
);
} else if (application.sourceType === "git") {
command += await getCustomGitCloneCommand(
application,
deployment.logPath,
);
} else if (application.sourceType === "docker") {
command += await buildRemoteDocker(application, deployment.logPath);
}
if (application.sourceType !== "docker") {
command += getBuildCommand(application, deployment.logPath);
}
await execAsyncRemote(application.serverId, command);
await mechanizeDockerContainer(application);
} else {
if (application.sourceType === "github") {
await cloneGithubRepository(application, deployment.logPath);
await buildApplication(application, deployment.logPath);
} else if (application.sourceType === "gitlab") {
await cloneGitlabRepository(application, deployment.logPath);
await buildApplication(application, deployment.logPath);
} else if (application.sourceType === "bitbucket") {
await cloneBitbucketRepository(application, deployment.logPath);
await buildApplication(application, deployment.logPath);
} else if (application.sourceType === "docker") {
await buildDocker(application, deployment.logPath);
} else if (application.sourceType === "git") {
await cloneGitRepository(application, deployment.logPath);
await buildApplication(application, deployment.logPath);
} else if (application.sourceType === "drop") {
await buildApplication(application, deployment.logPath);
}
if (application.sourceType === "github") {
await cloneGithubRepository(application, deployment.logPath);
await buildApplication(application, deployment.logPath);
} else if (application.sourceType === "gitlab") {
await cloneGitlabRepository(application, deployment.logPath);
await buildApplication(application, deployment.logPath);
} else if (application.sourceType === "bitbucket") {
await cloneBitbucketRepository(application, deployment.logPath);
await buildApplication(application, deployment.logPath);
} else if (application.sourceType === "docker") {
await buildDocker(application, deployment.logPath);
} else if (application.sourceType === "git") {
await cloneGitRepository(application, deployment.logPath);
await buildApplication(application, deployment.logPath);
} else if (application.sourceType === "drop") {
await buildApplication(application, deployment.logPath);
}
await updateDeploymentStatus(deployment.deploymentId, "done");

View File

@@ -14,6 +14,7 @@ import {
loadDockerCompose,
loadDockerComposeRemote,
} from "@/server/utils/docker/domain";
import type { ComposeSpecification } from "@/server/utils/docker/types";
import { sendBuildErrorNotifications } from "@/server/utils/notifications/build-error";
import { sendBuildSuccessNotifications } from "@/server/utils/notifications/build-success";
import { execAsync, execAsyncRemote } from "@/server/utils/process/execAsync";
@@ -43,8 +44,6 @@ import { eq } from "drizzle-orm";
import { getDokployUrl } from "./admin";
import { createDeploymentCompose, updateDeploymentStatus } from "./deployment";
import { validUniqueServerAppName } from "./project";
import { executeCommand } from "@/server/utils/servers/command";
import type { ComposeSpecification } from "@/server/utils/docker/types";
export type Compose = typeof compose.$inferSelect;
@@ -61,6 +60,7 @@ export const createCompose = async (input: typeof apiCreateCompose._type) => {
});
}
}
const newDestination = await db
.insert(compose)
.values({

View File

@@ -14,8 +14,8 @@ import { format } from "date-fns";
import { desc, eq } from "drizzle-orm";
import { type Application, findApplicationById } from "./application";
import { type Compose, findComposeById } from "./compose";
import { findServerById, type Server } from "./server";
import { executeCommand } from "@/server/utils/servers/command";
import { type Server, findServerById } from "./server";
import { execAsyncRemote } from "@/server/utils/process/execAsync";
export type Deployment = typeof deployments.$inferSelect;
@@ -58,7 +58,7 @@ export const createDeployment = async (
echo "Initializing deployment" >> ${logFilePath};
`;
await executeCommand(server.serverId, command);
await execAsyncRemote(server.serverId, command);
} else {
await fsPromises.mkdir(path.join(LOGS_PATH, application.appName), {
recursive: true,
@@ -114,7 +114,7 @@ mkdir -p ${LOGS_PATH}/${compose.appName};
echo "Initializing deployment" >> ${logFilePath};
`;
await executeCommand(server.serverId, command);
await execAsyncRemote(server.serverId, command);
} else {
await fsPromises.mkdir(path.join(LOGS_PATH, compose.appName), {
recursive: true,

View File

@@ -8,7 +8,8 @@ import { generatePassword } from "@/templates/utils";
import { TRPCError } from "@trpc/server";
import { eq, getTableColumns } from "drizzle-orm";
import { validUniqueServerAppName } from "./project";
import { executeCommand } from "@/server/utils/servers/command";
import { execAsyncRemote } from "@/server/utils/process/execAsync";
export type Mariadb = typeof mariadb.$inferSelect;
@@ -121,7 +122,7 @@ export const deployMariadb = async (mariadbId: string) => {
const mariadb = await findMariadbById(mariadbId);
try {
if (mariadb.serverId) {
await executeCommand(
await execAsyncRemote(
mariadb.serverId,
`docker pull ${mariadb.dockerImage}`,
);

View File

@@ -8,7 +8,8 @@ import { generatePassword } from "@/templates/utils";
import { TRPCError } from "@trpc/server";
import { eq, getTableColumns } from "drizzle-orm";
import { validUniqueServerAppName } from "./project";
import { executeCommand } from "@/server/utils/servers/command";
import { execAsyncRemote } from "@/server/utils/process/execAsync";
export type Mongo = typeof mongo.$inferSelect;
@@ -117,7 +118,7 @@ export const deployMongo = async (mongoId: string) => {
const mongo = await findMongoById(mongoId);
try {
if (mongo.serverId) {
await executeCommand(mongo.serverId, `docker pull ${mongo.dockerImage}`);
await execAsyncRemote(mongo.serverId, `docker pull ${mongo.dockerImage}`);
} else {
await pullImage(mongo.dockerImage);
}

View File

@@ -8,9 +8,9 @@ import {
} from "@/server/db/schema";
import { createFile, getCreateFileCommand } from "@/server/utils/docker/utils";
import { removeFileOrDirectory } from "@/server/utils/filesystem/directory";
import { execAsyncRemote } from "@/server/utils/process/execAsync";
import { TRPCError } from "@trpc/server";
import { type SQL, eq, sql } from "drizzle-orm";
import { execAsyncRemote } from "@/server/utils/process/execAsync";
export type Mount = typeof mounts.$inferSelect;

View File

@@ -8,7 +8,8 @@ import { generatePassword } from "@/templates/utils";
import { TRPCError } from "@trpc/server";
import { eq, getTableColumns } from "drizzle-orm";
import { validUniqueServerAppName } from "./project";
import { executeCommand } from "@/server/utils/servers/command";
import { execAsyncRemote } from "@/server/utils/process/execAsync";
export type MySql = typeof mysql.$inferSelect;
@@ -122,7 +123,7 @@ export const deployMySql = async (mysqlId: string) => {
const mysql = await findMySqlById(mysqlId);
try {
if (mysql.serverId) {
await executeCommand(mysql.serverId, `docker pull ${mysql.dockerImage}`);
await execAsyncRemote(mysql.serverId, `docker pull ${mysql.dockerImage}`);
} else {
await pullImage(mysql.dockerImage);
}

View File

@@ -8,7 +8,8 @@ import { generatePassword } from "@/templates/utils";
import { TRPCError } from "@trpc/server";
import { eq, getTableColumns } from "drizzle-orm";
import { validUniqueServerAppName } from "./project";
import { executeCommand } from "@/server/utils/servers/command";
import { execAsyncRemote } from "@/server/utils/process/execAsync";
export type Postgres = typeof postgres.$inferSelect;
@@ -116,7 +117,7 @@ export const deployPostgres = async (postgresId: string) => {
const postgres = await findPostgresById(postgresId);
try {
if (postgres.serverId) {
await executeCommand(
await execAsyncRemote(
postgres.serverId,
`docker pull ${postgres.dockerImage}`,
);

View File

@@ -8,7 +8,8 @@ import { generatePassword } from "@/templates/utils";
import { TRPCError } from "@trpc/server";
import { eq } from "drizzle-orm";
import { validUniqueServerAppName } from "./project";
import { executeCommand } from "@/server/utils/servers/command";
import { execAsyncRemote } from "@/server/utils/process/execAsync";
export type Redis = typeof redis.$inferSelect;
@@ -94,7 +95,7 @@ export const deployRedis = async (redisId: string) => {
const redis = await findRedisById(redisId);
try {
if (redis.serverId) {
await executeCommand(redis.serverId, `docker pull ${redis.dockerImage}`);
await execAsyncRemote(redis.serverId, `docker pull ${redis.dockerImage}`);
} else {
await pullImage(redis.dockerImage);
}

View File

@@ -4,10 +4,10 @@ import { DrizzlePostgreSQLAdapter } from "@lucia-auth/adapter-drizzle";
import { TimeSpan } from "lucia";
import { Lucia } from "lucia/dist/core.js";
import type { Session, User } from "lucia/dist/core.js";
import { findAdminByAuthId } from "../api/services/admin";
import { findUserByAuthId } from "../api/services/user";
import { db } from "../db";
import { type DatabaseUser, auth, sessionTable } from "../db/schema";
import { findUserByAuthId } from "../api/services/user";
import { findAdminByAuthId } from "../api/services/admin";
globalThis.crypto = webcrypto as Crypto;
export const adapter = new DrizzlePostgreSQLAdapter(db, sessionTable, auth);

View File

@@ -35,6 +35,7 @@ export const validateBearerToken = async (
session: result.session,
...((result.user && {
user: {
adminId: result.user.adminId,
authId: result.user.id,
email: result.user.email,
rol: result.user.rol,

View File

@@ -1,16 +1,25 @@
import path from "node:path";
import Docker from "dockerode";
const IS_MULTI_SERVER = process.env.IS_MULTI_SERVER === "true";
export const IS_CLOUD = process.env.IS_CLOUD === "true";
export const BASE_PATH =
process.env.NODE_ENV === "production"
? "/etc/dokploy"
: path.join(process.cwd(), ".docker");
export const IS_CLOUD = process.env.IS_CLOUD === "true";
// IS_MULTI_SERVER
// ? "/etc/dokploy"
// : process.env.NODE_ENV === "production"
// ? "/etc/dokploy"
// : path.join(process.cwd(), ".docker");
export const MAIN_TRAEFIK_PATH = `${BASE_PATH}/traefik`;
export const DYNAMIC_TRAEFIK_PATH = `/etc/dokploy/traefik/dynamic`;
export const LOGS_PATH = `/etc/dokploy/logs`;
export const APPLICATIONS_PATH = `/etc/dokploy/applications`;
export const COMPOSE_PATH = `/etc/dokploy/compose`;
export const DYNAMIC_TRAEFIK_PATH = `${BASE_PATH}/traefik/dynamic`;
export const LOGS_PATH = `${BASE_PATH}/logs`;
export const APPLICATIONS_PATH = `${BASE_PATH}/applications`;
export const COMPOSE_PATH = `${BASE_PATH}/compose`;
export const SSH_PATH = `${BASE_PATH}/ssh`;
export const CERTIFICATES_PATH = `${DYNAMIC_TRAEFIK_PATH}/certificates`;
export const REGISTRY_PATH = `${DYNAMIC_TRAEFIK_PATH}/registry`;

View File

@@ -7,9 +7,9 @@ import { z } from "zod";
import { backups } from "./backups";
import { mounts } from "./mount";
import { projects } from "./project";
import { server } from "./server";
import { applicationStatus } from "./shared";
import { generateAppName } from "./utils";
import { server } from "./server";
export const mariadb = pgTable("mariadb", {
mariadbId: text("mariadbId")

View File

@@ -7,9 +7,9 @@ import { z } from "zod";
import { backups } from "./backups";
import { mounts } from "./mount";
import { projects } from "./project";
import { server } from "./server";
import { applicationStatus } from "./shared";
import { generateAppName } from "./utils";
import { server } from "./server";
export const mongo = pgTable("mongo", {
mongoId: text("mongoId")

View File

@@ -6,9 +6,9 @@ import { z } from "zod";
import { backups } from "./backups";
import { mounts } from "./mount";
import { projects } from "./project";
import { server } from "./server";
import { applicationStatus } from "./shared";
import { generateAppName } from "./utils";
import { server } from "./server";
export const mysql = pgTable("mysql", {
mysqlId: text("mysqlId")

View File

@@ -7,9 +7,9 @@ import { z } from "zod";
import { backups } from "./backups";
import { mounts } from "./mount";
import { projects } from "./project";
import { server } from "./server";
import { applicationStatus } from "./shared";
import { generateAppName } from "./utils";
import { server } from "./server";
export const postgres = pgTable("postgres", {
postgresId: text("postgresId")

View File

@@ -5,9 +5,9 @@ import { nanoid } from "nanoid";
import { z } from "zod";
import { mounts } from "./mount";
import { projects } from "./project";
import { server } from "./server";
import { applicationStatus } from "./shared";
import { generateAppName } from "./utils";
import { server } from "./server";
export const redis = pgTable("redis", {
redisId: text("redisId")

View File

@@ -3,10 +3,6 @@ import { integer, pgTable, text } from "drizzle-orm/pg-core";
import { createInsertSchema } from "drizzle-zod";
import { nanoid } from "nanoid";
import { z } from "zod";
import { admins } from "./admin";
import { generateAppName } from "./utils";
import { deployments } from "./deployment";
import { sshKeys } from "./ssh-key";
import {
applications,
compose,
@@ -16,6 +12,10 @@ import {
postgres,
redis,
} from ".";
import { admins } from "./admin";
import { deployments } from "./deployment";
import { sshKeys } from "./ssh-key";
import { generateAppName } from "./utils";
export const server = pgTable("server", {
serverId: text("serverId")

View File

@@ -1,74 +1,49 @@
import { promises } from "node:fs";
import dockerstats from "dockerstats";
import type Dockerode from "dockerode";
import osUtils from "node-os-utils";
import { MONITORING_PATH } from "../constants";
export const recordAdvancedStats = async (
stats: Dockerode.ContainerStats,
appName: string,
containerId: string,
) => {
await promises.mkdir(`${MONITORING_PATH}/${appName}`, { recursive: true });
const path = `${MONITORING_PATH}/${appName}`;
const result = await dockerstats.dockerContainerStats(containerId);
await promises.mkdir(path, { recursive: true });
if (!result || result.length === 0 || !result[0]) return;
const { memoryStats, cpuStats, precpuStats, netIO, blockIO } = result[0];
const memoryUsage = memoryStats.usage / 1024 / 1024;
const memoryTotal = memoryStats.limit / 1024 / 1024;
const memoryFree = memoryTotal - memoryUsage;
const memoryUsedPercentage = (memoryUsage / memoryTotal) * 100;
const cpuDelta =
cpuStats.cpu_usage.total_usage - precpuStats.cpu_usage.total_usage;
const systemDelta = cpuStats.system_cpu_usage - precpuStats.system_cpu_usage;
const onlineCpus = cpuStats.online_cpus;
// Calcular el porcentaje de uso del CPU
const cpuPercent = (cpuDelta / systemDelta) * onlineCpus * 100;
// Extraer los valores de entrada y salida del objeto netIO
const networkInBytes = netIO.rx;
const networkOutBytes = netIO.wx;
// Convertir bytes a Megabytes
const networkInMB = networkInBytes / 1024 / 1024;
const networkOutMB = networkOutBytes / 1024 / 1024;
// BlockIO
const blockRead = blockIO.r;
const blockWrite = blockIO.w;
const blockInMBBlocks = blockRead / 1024 / 1024;
const blockOutMBBlocks = blockWrite / 1024 / 1024;
// Disk
const disk = await osUtils.drive.info("/");
const diskUsage = disk.usedGb;
const diskTotal = disk.totalGb;
const diskUsedPercentage = disk.usedPercentage;
const diskFree = disk.freeGb;
const cpuPercent = calculateCpuUsagePercent(
stats.cpu_stats,
stats.precpu_stats,
);
const memoryStats = calculateMemoryStats(stats.memory_stats);
const blockIO = calculateBlockIO(stats.blkio_stats);
const networkUsage = calculateNetworkUsage(stats.networks);
await updateStatsFile(appName, "cpu", cpuPercent);
await updateStatsFile(appName, "memory", {
used: memoryUsage,
free: memoryFree,
usedPercentage: memoryUsedPercentage,
total: memoryTotal,
used: memoryStats.used,
free: memoryStats.free,
usedPercentage: memoryStats.usedPercentage,
total: memoryStats.total,
});
await updateStatsFile(appName, "block", {
readMb: blockInMBBlocks,
writeMb: blockOutMBBlocks,
readMb: blockIO.readMb,
writeMb: blockIO.writeMb,
});
await updateStatsFile(appName, "network", {
inputMb: networkInMB,
outputMb: networkOutMB,
inputMb: networkUsage.inputMb,
outputMb: networkUsage.outputMb,
});
if (appName === "dokploy") {
const disk = await osUtils.drive.info("/");
const diskUsage = disk.usedGb;
const diskTotal = disk.totalGb;
const diskUsedPercentage = disk.usedPercentage;
const diskFree = disk.freeGb;
await updateStatsFile(appName, "disk", {
diskTotal: +diskTotal,
diskUsedPercentage: +diskUsedPercentage,
@@ -143,3 +118,77 @@ export const getLastAdvancedStatsFile = async (appName: string) => {
block: await readLastValueStatsFile(appName, "block"),
};
};
const calculateCpuUsagePercent = (
cpu_stats: Dockerode.ContainerStats["cpu_stats"],
precpu_stats: Dockerode.ContainerStats["precpu_stats"],
) => {
const cpuDelta =
cpu_stats.cpu_usage.total_usage - precpu_stats.cpu_usage.total_usage;
const systemDelta =
cpu_stats.system_cpu_usage - precpu_stats.system_cpu_usage;
const numberCpus =
cpu_stats.online_cpus ||
(cpu_stats.cpu_usage.percpu_usage
? cpu_stats.cpu_usage.percpu_usage.length
: 1);
if (systemDelta > 0 && cpuDelta > 0) {
return (cpuDelta / systemDelta) * numberCpus * 100.0;
}
return 0;
};
const calculateMemoryStats = (
memory_stats: Dockerode.ContainerStats["memory_stats"],
) => {
const usedMemory = memory_stats.usage - (memory_stats.stats.cache || 0);
const availableMemory = memory_stats.limit;
const memoryUsedPercentage = (usedMemory / availableMemory) * 100.0;
return {
used: usedMemory,
free: availableMemory - usedMemory,
usedPercentage: memoryUsedPercentage,
total: availableMemory,
};
};
const calculateBlockIO = (
blkio_stats: Dockerode.ContainerStats["blkio_stats"],
) => {
let readIO = 0;
let writeIO = 0;
if (blkio_stats?.io_service_bytes_recursive) {
for (const io of blkio_stats.io_service_bytes_recursive) {
if (io.op === "read") {
readIO += io.value;
} else if (io.op === "write") {
writeIO += io.value;
}
}
}
return {
readMb: readIO / (1024 * 1024),
writeMb: writeIO / (1024 * 1024),
};
};
const calculateNetworkUsage = (
networks: Dockerode.ContainerStats["networks"],
) => {
let totalRx = 0;
let totalTx = 0;
const stats = Object.keys(networks);
for (const interfaceName of stats) {
const net = networks[interfaceName];
totalRx += net?.rx_bytes || 0;
totalTx += net?.tx_bytes || 0;
}
return {
inputMb: totalRx / (1024 * 1024),
outputMb: totalTx / (1024 * 1024),
};
};

View File

@@ -1,24 +1,8 @@
import { type ConnectionOptions, type Job, Queue, Worker } from "bullmq";
import { findServerById, type Server } from "../api/services/server";
import type { DeploymentJob } from "./deployments-queue";
import {
deployApplication,
rebuildApplication,
updateApplicationStatus,
} from "../api/services/application";
import {
updateCompose,
deployCompose,
rebuildCompose,
} from "../api/services/compose";
import { type ConnectionOptions, Queue } from "bullmq";
export const redisConfig: ConnectionOptions = {
// host: "31.220.108.27",
// password: "xYBugfHkULig1iLN",
host: process.env.NODE_ENV === "production" ? "dokploy-redis" : "127.0.0.1",
// port: 1233,
};
// TODO: maybe add a options to clean the queue to the times
const myQueue = new Queue("deployments", {
connection: redisConfig,
});
@@ -38,13 +22,3 @@ myQueue.on("error", (error) => {
});
export { myQueue };
const workersMap = new Map<string, Worker>();
const queuesMap = new Map<string, Queue>();
function createRedisConnection(server: Server) {
return {
host: server.ipAddress,
port: "6379",
} as ConnectionOptions;
}

View File

@@ -2,6 +2,8 @@ import http from "node:http";
import { migration } from "@/server/db/migration";
import { config } from "dotenv";
import next from "next";
// import { IS_CLOUD } from "./constants";
import { deploymentWorker } from "./queues/deployments-queue";
// import { deploymentWorker } from "./queues/deployments-queue";
import { setupDirectories } from "./setup/config-paths";
import { initializePostgres } from "./setup/postgres-setup";
@@ -23,8 +25,6 @@ import {
getPublicIpWithFallback,
setupTerminalWebSocketServer,
} from "./wss/terminal";
import { IS_CLOUD } from "./constants";
import { deploymentWorker } from "./queues/deployments-queue";
config({ path: ".env" });
const PORT = Number.parseInt(process.env.PORT || "3000", 10);
@@ -45,18 +45,16 @@ void app.prepare().then(async () => {
setupDockerStatsMonitoringSocketServer(server);
if (process.env.NODE_ENV === "production") {
if (!IS_CLOUD) {
setupDirectories();
createDefaultMiddlewares();
setupDirectories();
createDefaultMiddlewares();
await initializeNetwork();
createDefaultTraefikConfig();
createDefaultServerTraefikConfig();
await initializePostgres();
await initializeTraefik();
await initializeRedis();
}
setupDirectories();
createDefaultMiddlewares();
setupDirectories();
createDefaultMiddlewares();
await initializeNetwork();
createDefaultTraefikConfig();
createDefaultServerTraefikConfig();
await initializePostgres();
await initializeTraefik();
await initializeRedis();
initCronJobs();
welcomeServer();

View File

@@ -1,4 +1,3 @@
import { spawnSync } from "node:child_process";
import { chmodSync, existsSync, mkdirSync } from "node:fs";
import {
APPLICATIONS_PATH,

View File

@@ -13,8 +13,8 @@ import {
writeDomainsToComposeRemote,
} from "../docker/domain";
import { encodeBase64, prepareEnvironmentVariables } from "../docker/utils";
import { spawnAsync } from "../process/spawnAsync";
import { execAsyncRemote } from "../process/execAsync";
import { spawnAsync } from "../process/spawnAsync";
export type ComposeNested = InferResultType<
"compose",

View File

@@ -1,16 +1,16 @@
import fs from "node:fs/promises";
import path, { join } from "node:path";
import type { Application } from "@/server/api/services/application";
import { findServerById } from "@/server/api/services/server";
import { APPLICATIONS_PATH } from "@/server/constants";
import AdmZip from "adm-zip";
import { Client, type SFTPWrapper } from "ssh2";
import {
recreateDirectory,
recreateDirectoryRemote,
} from "../filesystem/directory";
import type { Application } from "@/server/api/services/application";
import { execAsyncRemote } from "../process/execAsync";
import { Client, type SFTPWrapper } from "ssh2";
import { findServerById } from "@/server/api/services/server";
import { readSSHKey } from "../filesystem/ssh";
import { execAsyncRemote } from "../process/execAsync";
export const unzipDrop = async (zipFile: File, application: Application) => {
let sftp: SFTPWrapper | null = null;

View File

@@ -1,9 +1,7 @@
import { createWriteStream } from "node:fs";
// import { docker } from "@/server/constants";
import type { InferResultType } from "@/server/types/with";
import type { CreateServiceOptions } from "dockerode";
import { uploadImage } from "../cluster/upload";
import Dockerode from "dockerode";
import {
calculateResources,
generateBindMounts,
@@ -12,14 +10,12 @@ import {
generateVolumeMounts,
prepareEnvironmentVariables,
} from "../docker/utils";
import { getRemoteDocker } from "../servers/remote-docker";
import { buildCustomDocker, getDockerCommand } from "./docker-file";
import { buildHeroku, getHerokuCommand } from "./heroku";
import { buildNixpacks, getNixpacksCommand } from "./nixpacks";
import { buildPaketo, getPaketoCommand } from "./paketo";
import { buildStatic, getStaticCommand } from "./static";
import { findServerById } from "@/server/api/services/server";
import { readSSHKey } from "../filesystem/ssh";
import { getRemoteDocker } from "../servers/remote-docker";
// NIXPACKS codeDirectory = where is the path of the code directory
// HEROKU codeDirectory = where is the path of the code directory

View File

@@ -5,6 +5,7 @@ import type { Compose } from "@/server/api/services/compose";
import type { Domain } from "@/server/api/services/domain";
import { COMPOSE_PATH } from "@/server/constants";
import { dump, load } from "js-yaml";
import { execAsyncRemote } from "../process/execAsync";
import {
cloneRawBitbucketRepository,
cloneRawBitbucketRepositoryRemote,
@@ -31,7 +32,6 @@ import type {
DefinitionsService,
PropertiesNetworks,
} from "./types";
import { execAsyncRemote } from "../process/execAsync";
import { encodeBase64 } from "./utils";
export const cloneCompose = async (compose: Compose) => {

View File

@@ -1,8 +1,8 @@
import { exec } from "node:child_process";
import util from "node:util";
import { findServerById } from "@/server/api/services/server";
import { readSSHKey } from "../filesystem/ssh";
import { Client } from "ssh2";
import { readSSHKey } from "../filesystem/ssh";
export const execAsync = util.promisify(exec);
export const execAsyncRemote = async (
@@ -25,7 +25,7 @@ export const execAsyncRemote = async (
conn.exec(command, (err, stream) => {
if (err) throw err;
stream
.on("close", (code, signal) => {
.on("close", (code: number, signal: string) => {
console.log(
`Stream :: close :: code: ${code}, signal: ${signal}`,
);
@@ -55,31 +55,5 @@ export const execAsyncRemote = async (
privateKey: keys.privateKey,
timeout: 99999,
});
// client.exec(command, (err, stream) => {
// if (err) {
// client.end();
// return reject(err);
// }
// let stdout = "";
// let stderr = "";
// stream
// .on("data", (data: string) => {
// stdout += data.toString();
// })
// .on("close", (code, signal) => {
// client.end();
// if (code === 0) {
// resolve({ stdout, stderr });
// } else {
// reject(new Error(`Command exited with code ${code}`));
// }
// })
// .stderr.on("data", (data) => {
// stderr += data.toString();
// });
// });
});
};

View File

@@ -10,8 +10,8 @@ import type {
import type { InferResultType } from "@/server/types/with";
import { TRPCError } from "@trpc/server";
import { recreateDirectory } from "../filesystem/directory";
import { spawnAsync } from "../process/spawnAsync";
import { execAsyncRemote } from "../process/execAsync";
import { spawnAsync } from "../process/spawnAsync";
export type ApplicationWithBitbucket = InferResultType<
"applications",

View File

@@ -1,12 +1,12 @@
import { createWriteStream } from "node:fs";
import path, { join } from "node:path";
import type { Compose } from "@/server/api/services/compose";
import { updateSSHKeyById } from "@/server/api/services/ssh-key";
import { APPLICATIONS_PATH, COMPOSE_PATH, SSH_PATH } from "@/server/constants";
import { TRPCError } from "@trpc/server";
import { recreateDirectory } from "../filesystem/directory";
import { execAsync, execAsyncRemote } from "../process/execAsync";
import { spawnAsync } from "../process/spawnAsync";
import type { Compose } from "@/server/api/services/compose";
export const cloneGitRepository = async (
entity: {

View File

@@ -11,7 +11,6 @@ import { spawnAsync } from "../process/spawnAsync";
import type { Compose } from "@/server/api/services/compose";
import { type Github, findGithubById } from "@/server/api/services/github";
import type { apiFindGithubBranches } from "@/server/db/schema";
import { executeCommand } from "../servers/command";
import { execAsyncRemote } from "../process/execAsync";
export const authGithub = (githubProvider: Github) => {
@@ -164,7 +163,7 @@ export const getGithubCloneCommand = async (
exit 1;
`;
await executeCommand(serverId, command);
await execAsyncRemote(serverId, command);
throw new TRPCError({
code: "NOT_FOUND",
message: "GitHub Provider not found",
@@ -189,7 +188,7 @@ export const getGithubCloneCommand = async (
exit 1; # Exit with error code
`;
await executeCommand(serverId, bashCommand);
await execAsyncRemote(serverId, bashCommand);
return;
}

View File

@@ -11,9 +11,8 @@ import type { apiGitlabTestConnection } from "@/server/db/schema";
import type { InferResultType } from "@/server/types/with";
import { TRPCError } from "@trpc/server";
import { recreateDirectory } from "../filesystem/directory";
import { spawnAsync } from "../process/spawnAsync";
import { executeCommand } from "../servers/command";
import { execAsyncRemote } from "../process/execAsync";
import { spawnAsync } from "../process/spawnAsync";
export const refreshGitlabToken = async (gitlabProviderId: string) => {
const gitlabProvider = await findGitlabById(gitlabProviderId);

View File

@@ -3,9 +3,9 @@ import { writeFile } from "node:fs/promises";
import { join } from "node:path";
import type { Compose } from "@/server/api/services/compose";
import { COMPOSE_PATH } from "@/server/constants";
import { encodeBase64 } from "../docker/utils";
import { recreateDirectory } from "../filesystem/directory";
import { execAsyncRemote } from "../process/execAsync";
import { encodeBase64 } from "../docker/utils";
export const createComposeFile = async (compose: Compose, logPath: string) => {
const { appName, composeFile } = compose;

View File

@@ -1,10 +0,0 @@
import { execAsyncRemote } from "../process/execAsync";
export const executeCommand = async (serverId: string, command: string) => {
try {
await execAsyncRemote(serverId, command);
} catch (err) {
console.error("Execution error:", err);
throw err;
}
};

View File

@@ -1,7 +1,7 @@
import { findServerById } from "@/server/api/services/server";
import { readSSHKey } from "../filesystem/ssh";
import Dockerode from "dockerode";
import { docker } from "@/server/constants";
import Dockerode from "dockerode";
import { readSSHKey } from "../filesystem/ssh";
export const getRemoteDocker = async (serverId: string | null) => {
if (!serverId) return docker;

View File

@@ -1,30 +1,19 @@
import { findServerById } from "@/server/api/services/server";
import { recreateDirectory } from "../filesystem/directory";
import { slugify } from "@/lib/slug";
import { createWriteStream } from "node:fs";
import path from "node:path";
import {
APPLICATIONS_PATH,
BASE_PATH,
CERTIFICATES_PATH,
DYNAMIC_TRAEFIK_PATH,
getPaths,
LOGS_PATH,
MAIN_TRAEFIK_PATH,
MONITORING_PATH,
SSH_PATH,
} from "@/server/constants";
import { slugify } from "@/lib/slug";
import {
createServerDeployment,
updateDeploymentStatus,
} from "@/server/api/services/deployment";
import { createWriteStream } from "node:fs";
import { Client } from "ssh2";
import { readSSHKey } from "../filesystem/ssh";
import { findServerById } from "@/server/api/services/server";
import { LOGS_PATH, SSH_PATH, getPaths } from "@/server/constants";
import {
getDefaultMiddlewares,
getDefaultServerTraefikConfig,
getDefaultTraefikConfig,
} from "@/server/setup/traefik-setup";
import { Client } from "ssh2";
import { recreateDirectory } from "../filesystem/directory";
import { readSSHKey } from "../filesystem/ssh";
export const setupServer = async (serverId: string) => {
const server = await findServerById(serverId);
@@ -57,8 +46,6 @@ export const setupServer = async (serverId: string) => {
}
};
const setupTraefikInstance = async (serverId: string) => {};
const connectToServer = async (serverId: string, logPath: string) => {
const writeStream = createWriteStream(logPath, { flags: "a" });
const client = new Client();

View File

@@ -3,8 +3,8 @@ import path from "node:path";
import type { Domain } from "@/server/api/services/domain";
import { DYNAMIC_TRAEFIK_PATH } from "@/server/constants";
import { dump, load } from "js-yaml";
import type { FileConfig, HttpLoadBalancerService } from "./file-types";
import { execAsyncRemote } from "../process/execAsync";
import type { FileConfig, HttpLoadBalancerService } from "./file-types";
export const createTraefikConfig = (appName: string) => {
const defaultPort = 3000;

View File

@@ -3,9 +3,9 @@ import { join } from "node:path";
import { DYNAMIC_TRAEFIK_PATH } from "@/server/constants";
import { dump, load } from "js-yaml";
import type { ApplicationNested } from "../builders";
import type { FileConfig } from "./file-types";
import { execAsyncRemote } from "../process/execAsync";
import { writeTraefikConfigRemote } from "./application";
import type { FileConfig } from "./file-types";
export const addMiddleware = (config: FileConfig, middlewareName: string) => {
if (config.http?.routers) {

View File

@@ -1,4 +1,5 @@
import type { Redirect } from "@/server/api/services/redirect";
import type { ApplicationNested } from "../builders";
import {
loadOrCreateConfig,
loadOrCreateConfigRemote,
@@ -13,7 +14,6 @@ import {
loadRemoteMiddlewares,
writeMiddleware,
} from "./middleware";
import type { ApplicationNested } from "../builders";
export const updateRedirectMiddleware = async (
application: ApplicationNested,

View File

@@ -1,5 +1,6 @@
import type { Security } from "@/server/api/services/security";
import * as bcrypt from "bcrypt";
import type { ApplicationNested } from "../builders";
import {
loadOrCreateConfig,
loadOrCreateConfigRemote,
@@ -18,7 +19,6 @@ import {
loadRemoteMiddlewares,
writeMiddleware,
} from "./middleware";
import type { ApplicationNested } from "../builders";
export const createSecurityMiddleware = async (
application: ApplicationNested,

View File

@@ -1,11 +1,11 @@
import type http from "node:http";
import { spawn } from "node-pty";
import { WebSocketServer } from "ws";
import { validateWebSocketRequest } from "../auth/auth";
import { getShell } from "./utils";
import { Client } from "ssh2";
import { WebSocketServer } from "ws";
import { findServerById } from "../api/services/server";
import { validateWebSocketRequest } from "../auth/auth";
import { readSSHKey } from "../utils/filesystem/ssh";
import { getShell } from "./utils";
export const setupDockerContainerLogsWebSocketServer = (
server: http.Server<typeof http.IncomingMessage, typeof http.ServerResponse>,

View File

@@ -46,7 +46,6 @@ export const setupDockerStatsMonitoringSocketServer = (
ws.close();
return;
}
const intervalId = setInterval(async () => {
try {
const filter = {
@@ -72,7 +71,11 @@ export const setupDockerStatsMonitoringSocketServer = (
return;
}
await recordAdvancedStats(appName, container?.Id);
const stats = await docker.getContainer(container.Id).stats({
stream: false,
});
await recordAdvancedStats(stats, appName);
const data = await getLastAdvancedStatsFile(appName);
ws.send(

View File

@@ -1,10 +1,10 @@
import { spawn } from "node:child_process";
import type http from "node:http";
import { WebSocketServer } from "ws";
import { validateWebSocketRequest } from "../auth/auth";
import { findServerById } from "../api/services/server";
import { readSSHKey } from "../utils/filesystem/ssh";
import { Client } from "ssh2";
import { WebSocketServer } from "ws";
import { findServerById } from "../api/services/server";
import { validateWebSocketRequest } from "../auth/auth";
import { readSSHKey } from "../utils/filesystem/ssh";
export const setupDeploymentLogsWebSocketServer = (
server: http.Server<typeof http.IncomingMessage, typeof http.ServerResponse>,
@@ -39,12 +39,6 @@ export const setupDeploymentLogsWebSocketServer = (
return;
}
// if (!serverId) {
// console.log("serverId no provided");
// ws.close(4000, "serverId no provided");
// return;
// }
if (!user || !session) {
ws.close();
return;

View File

@@ -1,11 +1,11 @@
import type http from "node:http";
import path from "node:path";
import { spawn } from "node-pty";
import { publicIpv4, publicIpv6 } from "public-ip";
import { WebSocketServer } from "ws";
import { validateWebSocketRequest } from "../auth/auth";
import { findServerById } from "../api/services/server";
import { validateWebSocketRequest } from "../auth/auth";
import { SSH_PATH } from "../constants";
import path from "node:path";
export const getPublicIpWithFallback = async () => {
// @ts-ignore

View File

@@ -2,10 +2,10 @@ import { randomBytes } from "node:crypto";
import { readFile } from "node:fs/promises";
import { join } from "node:path";
import type { Domain } from "@/server/api/services/domain";
// import { IS_CLOUD } from "@/server/constants";
import { TRPCError } from "@trpc/server";
import { templates } from "../templates";
import type { TemplatesKeys } from "../types/templates-data.type";
import { IS_CLOUD } from "@/server/constants";
export interface Schema {
serverIp: string;
@@ -29,7 +29,9 @@ export const generateRandomDomain = ({
}: Schema): string => {
const hash = randomBytes(3).toString("hex");
const slugIp = serverIp.replaceAll(".", "-");
return `${projectName}-${hash}${process.env.NODE_ENV === "production" || IS_CLOUD ? `-${slugIp}` : ""}.traefik.me`;
return "";
// return `${projectName}-${hash}${process.env.NODE_ENV === "production" || IS_CLOUD ? `-${slugIp}` : ""}.traefik.me`;
};
export const generateHash = (projectName: string, quantity = 3): string => {