mirror of
https://github.com/Dokploy/dokploy
synced 2025-06-26 18:27:59 +00:00
feat: add redis installation on server and deploy applications on servers
This commit is contained in:
@@ -11,8 +11,9 @@ interface Props {
|
||||
logPath: string | null;
|
||||
open: boolean;
|
||||
onClose: () => void;
|
||||
serverId: string;
|
||||
}
|
||||
export const ShowDeployment = ({ logPath, open, onClose }: Props) => {
|
||||
export const ShowDeployment = ({ logPath, open, onClose, serverId }: Props) => {
|
||||
const [data, setData] = useState("");
|
||||
const endOfLogsRef = useRef<HTMLDivElement>(null);
|
||||
|
||||
@@ -21,7 +22,7 @@ export const ShowDeployment = ({ logPath, open, onClose }: Props) => {
|
||||
|
||||
const protocol = window.location.protocol === "https:" ? "wss:" : "ws:";
|
||||
|
||||
const wsUrl = `${protocol}//${window.location.host}/listen-deployment?logPath=${logPath}`;
|
||||
const wsUrl = `${protocol}//${window.location.host}/listen-deployment?logPath=${logPath}&serverId=${serverId}`;
|
||||
const ws = new WebSocket(wsUrl);
|
||||
|
||||
ws.onmessage = (e) => {
|
||||
|
||||
@@ -25,7 +25,7 @@ export const ShowDeployments = ({ applicationId }: Props) => {
|
||||
{ applicationId },
|
||||
{
|
||||
enabled: !!applicationId,
|
||||
refetchInterval: 5000,
|
||||
refetchInterval: 1000,
|
||||
},
|
||||
);
|
||||
const [url, setUrl] = React.useState("");
|
||||
@@ -110,6 +110,7 @@ export const ShowDeployments = ({ applicationId }: Props) => {
|
||||
</div>
|
||||
)}
|
||||
<ShowDeployment
|
||||
serverId={data?.serverId || ""}
|
||||
open={activeLog !== null}
|
||||
onClose={() => setActiveLog(null)}
|
||||
logPath={activeLog}
|
||||
|
||||
@@ -72,6 +72,7 @@ export const ShowGeneralApplication = ({ applicationId }: Props) => {
|
||||
Open Terminal
|
||||
</Button>
|
||||
</DockerTerminalModal>
|
||||
{data?.server?.name || "No Server"}
|
||||
</CardContent>
|
||||
</Card>
|
||||
<ShowProviderForm applicationId={applicationId} />
|
||||
|
||||
@@ -28,6 +28,15 @@ import { useState } from "react";
|
||||
import { useForm } from "react-hook-form";
|
||||
import { toast } from "sonner";
|
||||
import { z } from "zod";
|
||||
import {
|
||||
Select,
|
||||
SelectContent,
|
||||
SelectGroup,
|
||||
SelectItem,
|
||||
SelectLabel,
|
||||
SelectTrigger,
|
||||
SelectValue,
|
||||
} from "@/components/ui/select";
|
||||
|
||||
const AddTemplateSchema = z.object({
|
||||
name: z.string().min(1, {
|
||||
@@ -43,6 +52,9 @@ const AddTemplateSchema = z.object({
|
||||
"App name supports lowercase letters, numbers, '-' and can only start and end letters, and does not support continuous '-'",
|
||||
}),
|
||||
description: z.string().optional(),
|
||||
serverId: z.string().min(1, {
|
||||
message: "Server is required",
|
||||
}),
|
||||
});
|
||||
|
||||
type AddTemplate = z.infer<typeof AddTemplateSchema>;
|
||||
@@ -54,8 +66,10 @@ interface Props {
|
||||
|
||||
export const AddApplication = ({ projectId, projectName }: Props) => {
|
||||
const utils = api.useUtils();
|
||||
|
||||
const [visible, setVisible] = useState(false);
|
||||
const slug = slugify(projectName);
|
||||
const { data: servers } = api.server.all.useQuery();
|
||||
|
||||
const { mutateAsync, isLoading, error, isError } =
|
||||
api.application.create.useMutation();
|
||||
@@ -75,6 +89,7 @@ export const AddApplication = ({ projectId, projectName }: Props) => {
|
||||
appName: data.appName,
|
||||
description: data.description,
|
||||
projectId,
|
||||
serverId: data.serverId,
|
||||
})
|
||||
.then(async () => {
|
||||
toast.success("Service Created");
|
||||
@@ -135,6 +150,39 @@ export const AddApplication = ({ projectId, projectName }: Props) => {
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
<FormField
|
||||
control={form.control}
|
||||
name="serverId"
|
||||
render={({ field }) => (
|
||||
<FormItem>
|
||||
<FormLabel>Select a Server</FormLabel>
|
||||
<Select
|
||||
onValueChange={field.onChange}
|
||||
defaultValue={field.value}
|
||||
>
|
||||
<SelectTrigger>
|
||||
<SelectValue placeholder="Select a Server" />
|
||||
</SelectTrigger>
|
||||
<SelectContent>
|
||||
<SelectGroup>
|
||||
{servers?.map((server) => (
|
||||
<SelectItem
|
||||
key={server.serverId}
|
||||
value={server.serverId}
|
||||
>
|
||||
{server.name}
|
||||
</SelectItem>
|
||||
))}
|
||||
<SelectLabel>
|
||||
Registries ({servers?.length})
|
||||
</SelectLabel>
|
||||
</SelectGroup>
|
||||
</SelectContent>
|
||||
</Select>
|
||||
<FormMessage />
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
<FormField
|
||||
control={form.control}
|
||||
name="appName"
|
||||
|
||||
@@ -17,6 +17,7 @@ import {
|
||||
FormLabel,
|
||||
FormMessage,
|
||||
} from "@/components/ui/form";
|
||||
|
||||
import { Input } from "@/components/ui/input";
|
||||
import { Textarea } from "@/components/ui/textarea";
|
||||
import { api } from "@/utils/api";
|
||||
@@ -33,12 +34,16 @@ const AddProjectSchema = z.object({
|
||||
message: "Name is required",
|
||||
}),
|
||||
description: z.string().optional(),
|
||||
serverId: z.string().min(1, {
|
||||
message: "Server is required",
|
||||
}),
|
||||
});
|
||||
|
||||
type AddProject = z.infer<typeof AddProjectSchema>;
|
||||
|
||||
export const AddProject = () => {
|
||||
const utils = api.useUtils();
|
||||
const { data: servers } = api.server.all.useQuery();
|
||||
const [isOpen, setIsOpen] = useState(false);
|
||||
const { mutateAsync, error, isError } = api.project.create.useMutation();
|
||||
const router = useRouter();
|
||||
@@ -46,6 +51,7 @@ export const AddProject = () => {
|
||||
defaultValues: {
|
||||
description: "",
|
||||
name: "",
|
||||
serverId: "",
|
||||
},
|
||||
resolver: zodResolver(AddProjectSchema),
|
||||
});
|
||||
@@ -54,6 +60,7 @@ export const AddProject = () => {
|
||||
form.reset({
|
||||
description: "",
|
||||
name: "",
|
||||
serverId: "",
|
||||
});
|
||||
}, [form, form.reset, form.formState.isSubmitSuccessful]);
|
||||
|
||||
@@ -109,6 +116,7 @@ export const AddProject = () => {
|
||||
)}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<FormField
|
||||
control={form.control}
|
||||
name="description"
|
||||
|
||||
@@ -41,8 +41,6 @@ export const SetupServer = ({ serverId }: Props) => {
|
||||
},
|
||||
);
|
||||
|
||||
const { mutateAsync } = api.server.setup.useMutation();
|
||||
|
||||
const [activeLog, setActiveLog] = useState<string | null>(null);
|
||||
const { data: deployments, refetch } = api.deployment.allByServer.useQuery(
|
||||
{ serverId },
|
||||
@@ -52,6 +50,14 @@ export const SetupServer = ({ serverId }: Props) => {
|
||||
},
|
||||
);
|
||||
|
||||
const { mutateAsync } = api.server.setup.useMutation({
|
||||
// onMutate: async (variables) => {
|
||||
// console.log("Running....");
|
||||
// utils.deployment.allByServer.invalidate({ serverId: variables.serverId });
|
||||
// // refetch();
|
||||
// },
|
||||
});
|
||||
|
||||
return (
|
||||
<Dialog open={isOpen} onOpenChange={setIsOpen}>
|
||||
<DialogTrigger asChild>
|
||||
@@ -94,7 +100,7 @@ export const SetupServer = ({ serverId }: Props) => {
|
||||
serverId: server?.serverId || "",
|
||||
})
|
||||
.then(async () => {
|
||||
refetch();
|
||||
// refetch();
|
||||
toast.success("Server setup successfully");
|
||||
})
|
||||
.catch(() => {
|
||||
|
||||
13
apps/dokploy/drizzle/0040_bent_santa_claus.sql
Normal file
13
apps/dokploy/drizzle/0040_bent_santa_claus.sql
Normal file
@@ -0,0 +1,13 @@
|
||||
ALTER TABLE "application" ADD COLUMN "serverId" text;--> statement-breakpoint
|
||||
ALTER TABLE "compose" ADD COLUMN "serverId" text;--> statement-breakpoint
|
||||
DO $$ BEGIN
|
||||
ALTER TABLE "application" ADD CONSTRAINT "application_serverId_server_serverId_fk" FOREIGN KEY ("serverId") REFERENCES "public"."server"("serverId") ON DELETE cascade ON UPDATE no action;
|
||||
EXCEPTION
|
||||
WHEN duplicate_object THEN null;
|
||||
END $$;
|
||||
--> statement-breakpoint
|
||||
DO $$ BEGIN
|
||||
ALTER TABLE "compose" ADD CONSTRAINT "compose_serverId_server_serverId_fk" FOREIGN KEY ("serverId") REFERENCES "public"."server"("serverId") ON DELETE cascade ON UPDATE no action;
|
||||
EXCEPTION
|
||||
WHEN duplicate_object THEN null;
|
||||
END $$;
|
||||
1
apps/dokploy/drizzle/0041_mute_polaris.sql
Normal file
1
apps/dokploy/drizzle/0041_mute_polaris.sql
Normal file
@@ -0,0 +1 @@
|
||||
ALTER TABLE "server" ADD COLUMN "redisPassword" text DEFAULT 'xYBugfHkULig1iLN' NOT NULL;
|
||||
3721
apps/dokploy/drizzle/meta/0040_snapshot.json
Normal file
3721
apps/dokploy/drizzle/meta/0040_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
3728
apps/dokploy/drizzle/meta/0041_snapshot.json
Normal file
3728
apps/dokploy/drizzle/meta/0041_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
@@ -281,6 +281,20 @@
|
||||
"when": 1725776089878,
|
||||
"tag": "0039_military_doctor_faustus",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 40,
|
||||
"version": "6",
|
||||
"when": 1725812869228,
|
||||
"tag": "0040_bent_santa_claus",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 41,
|
||||
"version": "6",
|
||||
"when": 1725830160928,
|
||||
"tag": "0041_mute_polaris",
|
||||
"breakpoints": true
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -23,7 +23,11 @@ import {
|
||||
type DeploymentJob,
|
||||
cleanQueuesByApplication,
|
||||
} from "@/server/queues/deployments-queue";
|
||||
import { myQueue } from "@/server/queues/queueSetup";
|
||||
import {
|
||||
enqueueDeploymentJob,
|
||||
myQueue,
|
||||
redisConfig,
|
||||
} from "@/server/queues/queueSetup";
|
||||
import {
|
||||
removeService,
|
||||
startService,
|
||||
@@ -55,6 +59,7 @@ import { addNewService, checkServiceAccess } from "../services/user";
|
||||
|
||||
import { unzipDrop } from "@/server/utils/builders/drop";
|
||||
import { uploadFileSchema } from "@/utils/schema";
|
||||
import { Queue } from "bullmq";
|
||||
|
||||
export const applicationRouter = createTRPCRouter({
|
||||
create: protectedProcedure
|
||||
@@ -306,7 +311,8 @@ export const applicationRouter = createTRPCRouter({
|
||||
}),
|
||||
deploy: protectedProcedure
|
||||
.input(apiFindOneApplication)
|
||||
.mutation(async ({ input }) => {
|
||||
.mutation(async ({ input, ctx }) => {
|
||||
const application = await findApplicationById(input.applicationId);
|
||||
const jobData: DeploymentJob = {
|
||||
applicationId: input.applicationId,
|
||||
titleLog: "Manual deployment",
|
||||
@@ -314,14 +320,10 @@ export const applicationRouter = createTRPCRouter({
|
||||
type: "deploy",
|
||||
applicationType: "application",
|
||||
};
|
||||
await myQueue.add(
|
||||
"deployments",
|
||||
{ ...jobData },
|
||||
{
|
||||
removeOnComplete: true,
|
||||
removeOnFail: true,
|
||||
},
|
||||
);
|
||||
if (!application.serverId) {
|
||||
} else {
|
||||
await enqueueDeploymentJob(application.serverId, jobData);
|
||||
}
|
||||
}),
|
||||
|
||||
cleanQueues: protectedProcedure
|
||||
|
||||
@@ -1,28 +1,43 @@
|
||||
import { docker } from "@/server/constants";
|
||||
import { db } from "@/server/db";
|
||||
import {
|
||||
type apiCreateApplication,
|
||||
applications,
|
||||
domains,
|
||||
} from "@/server/db/schema";
|
||||
import { type apiCreateApplication, applications } from "@/server/db/schema";
|
||||
import { generateAppName } from "@/server/db/schema/utils";
|
||||
import { getAdvancedStats } from "@/server/monitoring/utilts";
|
||||
import { buildApplication } from "@/server/utils/builders";
|
||||
import { buildDocker } from "@/server/utils/providers/docker";
|
||||
import { cloneGitRepository } from "@/server/utils/providers/git";
|
||||
import { cloneGithubRepository } from "@/server/utils/providers/github";
|
||||
import {
|
||||
buildApplication,
|
||||
getBuildCommand,
|
||||
mechanizeDockerContainer,
|
||||
} from "@/server/utils/builders";
|
||||
import {
|
||||
buildDocker,
|
||||
buildRemoteDocker,
|
||||
} from "@/server/utils/providers/docker";
|
||||
import {
|
||||
cloneGitRepository,
|
||||
getCustomGitCloneCommand,
|
||||
} from "@/server/utils/providers/git";
|
||||
import {
|
||||
cloneGithubRepository,
|
||||
getGithubCloneCommand,
|
||||
} from "@/server/utils/providers/github";
|
||||
import { createTraefikConfig } from "@/server/utils/traefik/application";
|
||||
import { generatePassword } from "@/templates/utils";
|
||||
import { TRPCError } from "@trpc/server";
|
||||
import { eq } from "drizzle-orm";
|
||||
import { findAdmin, getDokployUrl } from "./admin";
|
||||
import { getDokployUrl } from "./admin";
|
||||
import { createDeployment, updateDeploymentStatus } from "./deployment";
|
||||
|
||||
import { sendBuildErrorNotifications } from "@/server/utils/notifications/build-error";
|
||||
import { sendBuildSuccessNotifications } from "@/server/utils/notifications/build-success";
|
||||
import { cloneBitbucketRepository } from "@/server/utils/providers/bitbucket";
|
||||
import { cloneGitlabRepository } from "@/server/utils/providers/gitlab";
|
||||
import {
|
||||
cloneBitbucketRepository,
|
||||
getBitbucketCloneCommand,
|
||||
} from "@/server/utils/providers/bitbucket";
|
||||
import {
|
||||
cloneGitlabRepository,
|
||||
getGitlabCloneCommand,
|
||||
} from "@/server/utils/providers/gitlab";
|
||||
import { validUniqueServerAppName } from "./project";
|
||||
import { executeCommand } from "@/server/utils/servers/command";
|
||||
export type Application = typeof applications.$inferSelect;
|
||||
|
||||
export const createApplication = async (
|
||||
@@ -80,6 +95,7 @@ export const findApplicationById = async (applicationId: string) => {
|
||||
gitlab: true,
|
||||
github: true,
|
||||
bitbucket: true,
|
||||
server: true,
|
||||
},
|
||||
});
|
||||
if (!application) {
|
||||
@@ -147,23 +163,54 @@ export const deployApplication = async ({
|
||||
});
|
||||
|
||||
try {
|
||||
if (application.sourceType === "github") {
|
||||
await cloneGithubRepository(application, deployment.logPath);
|
||||
await buildApplication(application, deployment.logPath);
|
||||
} else if (application.sourceType === "gitlab") {
|
||||
await cloneGitlabRepository(application, deployment.logPath);
|
||||
await buildApplication(application, deployment.logPath);
|
||||
} else if (application.sourceType === "bitbucket") {
|
||||
await cloneBitbucketRepository(application, deployment.logPath);
|
||||
await buildApplication(application, deployment.logPath);
|
||||
} else if (application.sourceType === "docker") {
|
||||
await buildDocker(application, deployment.logPath);
|
||||
} else if (application.sourceType === "git") {
|
||||
await cloneGitRepository(application, deployment.logPath);
|
||||
await buildApplication(application, deployment.logPath);
|
||||
} else if (application.sourceType === "drop") {
|
||||
await buildApplication(application, deployment.logPath);
|
||||
if (application.serverId) {
|
||||
let command = `
|
||||
set -e
|
||||
`;
|
||||
if (application.sourceType === "github") {
|
||||
command += await getGithubCloneCommand(application, deployment.logPath);
|
||||
console.log(application);
|
||||
command += getBuildCommand(application, deployment.logPath);
|
||||
} else if (application.sourceType === "gitlab") {
|
||||
command += await getGitlabCloneCommand(application, deployment.logPath);
|
||||
command += getBuildCommand(application, deployment.logPath);
|
||||
} else if (application.sourceType === "bitbucket") {
|
||||
command += await getBitbucketCloneCommand(
|
||||
application,
|
||||
deployment.logPath,
|
||||
);
|
||||
command += getBuildCommand(application, deployment.logPath);
|
||||
} else if (application.sourceType === "git") {
|
||||
command += await getCustomGitCloneCommand(
|
||||
application,
|
||||
deployment.logPath,
|
||||
);
|
||||
command += getBuildCommand(application, deployment.logPath);
|
||||
} else if (application.sourceType === "docker") {
|
||||
command += await buildRemoteDocker(application, deployment.logPath);
|
||||
}
|
||||
await executeCommand(application.serverId, command);
|
||||
await mechanizeDockerContainer(application);
|
||||
} else {
|
||||
if (application.sourceType === "github") {
|
||||
await cloneGithubRepository(application, deployment.logPath);
|
||||
await buildApplication(application, deployment.logPath);
|
||||
} else if (application.sourceType === "gitlab") {
|
||||
await cloneGitlabRepository(application, deployment.logPath);
|
||||
await buildApplication(application, deployment.logPath);
|
||||
} else if (application.sourceType === "bitbucket") {
|
||||
await cloneBitbucketRepository(application, deployment.logPath);
|
||||
await buildApplication(application, deployment.logPath);
|
||||
} else if (application.sourceType === "docker") {
|
||||
await buildDocker(application, deployment.logPath);
|
||||
} else if (application.sourceType === "git") {
|
||||
await cloneGitRepository(application, deployment.logPath);
|
||||
await buildApplication(application, deployment.logPath);
|
||||
} else if (application.sourceType === "drop") {
|
||||
await buildApplication(application, deployment.logPath);
|
||||
}
|
||||
}
|
||||
|
||||
await updateDeploymentStatus(deployment.deploymentId, "done");
|
||||
await updateApplicationStatus(applicationId, "done");
|
||||
|
||||
|
||||
@@ -15,6 +15,7 @@ import { desc, eq } from "drizzle-orm";
|
||||
import { type Application, findApplicationById } from "./application";
|
||||
import { type Compose, findComposeById } from "./compose";
|
||||
import { findServerById, type Server } from "./server";
|
||||
import { executeCommand } from "@/server/utils/servers/command";
|
||||
|
||||
export type Deployment = typeof deployments.$inferSelect;
|
||||
|
||||
@@ -43,14 +44,27 @@ export const createDeployment = async (
|
||||
try {
|
||||
const application = await findApplicationById(deployment.applicationId);
|
||||
|
||||
await removeLastTenDeployments(deployment.applicationId);
|
||||
// await removeLastTenDeployments(deployment.applicationId);
|
||||
const formattedDateTime = format(new Date(), "yyyy-MM-dd:HH:mm:ss");
|
||||
const fileName = `${application.appName}-${formattedDateTime}.log`;
|
||||
const logFilePath = path.join(LOGS_PATH, application.appName, fileName);
|
||||
await fsPromises.mkdir(path.join(LOGS_PATH, application.appName), {
|
||||
recursive: true,
|
||||
});
|
||||
await fsPromises.writeFile(logFilePath, "Initializing deployment");
|
||||
|
||||
if (application.serverId) {
|
||||
const server = await findServerById(application.serverId);
|
||||
|
||||
const command = `
|
||||
mkdir -p ${LOGS_PATH}/${application.appName};
|
||||
echo "Initializing deployment" >> ${logFilePath};
|
||||
`;
|
||||
|
||||
await executeCommand(server.serverId, command);
|
||||
} else {
|
||||
await fsPromises.mkdir(path.join(LOGS_PATH, application.appName), {
|
||||
recursive: true,
|
||||
});
|
||||
await fsPromises.writeFile(logFilePath, "Initializing deployment");
|
||||
}
|
||||
|
||||
const deploymentCreate = await db
|
||||
.insert(deployments)
|
||||
.values({
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { db } from "@/server/db";
|
||||
import { type apiCreateServer, server } from "@/server/db/schema";
|
||||
import { generatePassword } from "@/templates/utils";
|
||||
import { TRPCError } from "@trpc/server";
|
||||
import { eq } from "drizzle-orm";
|
||||
|
||||
@@ -14,6 +15,7 @@ export const createServer = async (
|
||||
.values({
|
||||
...input,
|
||||
adminId: adminId,
|
||||
redisPassword: generatePassword(12),
|
||||
})
|
||||
.returning()
|
||||
.then((value) => value[0]);
|
||||
|
||||
@@ -8,9 +8,9 @@ export const BASE_PATH =
|
||||
export const IS_CLOUD = process.env.IS_CLOUD === "true";
|
||||
export const MAIN_TRAEFIK_PATH = `${BASE_PATH}/traefik`;
|
||||
export const DYNAMIC_TRAEFIK_PATH = `${BASE_PATH}/traefik/dynamic`;
|
||||
export const LOGS_PATH = `${BASE_PATH}/logs`;
|
||||
export const APPLICATIONS_PATH = `${BASE_PATH}/applications`;
|
||||
export const COMPOSE_PATH = `${BASE_PATH}/compose`;
|
||||
export const LOGS_PATH = `/etc/dokploy/logs`;
|
||||
export const APPLICATIONS_PATH = `/etc/dokploy/applications`;
|
||||
export const COMPOSE_PATH = `/etc/dokploy/compose`;
|
||||
export const SSH_PATH = `${BASE_PATH}/ssh`;
|
||||
export const CERTIFICATES_PATH = `${DYNAMIC_TRAEFIK_PATH}/certificates`;
|
||||
export const REGISTRY_PATH = `${DYNAMIC_TRAEFIK_PATH}/registry`;
|
||||
|
||||
@@ -10,7 +10,7 @@ import {
|
||||
import { createInsertSchema } from "drizzle-zod";
|
||||
import { nanoid } from "nanoid";
|
||||
import { z } from "zod";
|
||||
import { bitbucket, github, gitlab } from ".";
|
||||
import { bitbucket, github, gitlab, server } from ".";
|
||||
import { deployments } from "./deployment";
|
||||
import { domains } from "./domain";
|
||||
import { mounts } from "./mount";
|
||||
@@ -193,6 +193,9 @@ export const applications = pgTable("application", {
|
||||
bitbucketId: text("bitbucketId").references(() => bitbucket.bitbucketId, {
|
||||
onDelete: "set null",
|
||||
}),
|
||||
serverId: text("serverId").references(() => server.serverId, {
|
||||
onDelete: "cascade",
|
||||
}),
|
||||
});
|
||||
|
||||
export const applicationsRelations = relations(
|
||||
@@ -228,6 +231,10 @@ export const applicationsRelations = relations(
|
||||
fields: [applications.bitbucketId],
|
||||
references: [bitbucket.bitbucketId],
|
||||
}),
|
||||
server: one(server, {
|
||||
fields: [applications.serverId],
|
||||
references: [server.serverId],
|
||||
}),
|
||||
}),
|
||||
);
|
||||
|
||||
@@ -373,6 +380,7 @@ export const apiCreateApplication = createSchema.pick({
|
||||
appName: true,
|
||||
description: true,
|
||||
projectId: true,
|
||||
serverId: true,
|
||||
});
|
||||
|
||||
export const apiFindOneApplication = createSchema
|
||||
|
||||
@@ -4,7 +4,7 @@ import { boolean, integer, pgEnum, pgTable, text } from "drizzle-orm/pg-core";
|
||||
import { createInsertSchema } from "drizzle-zod";
|
||||
import { nanoid } from "nanoid";
|
||||
import { z } from "zod";
|
||||
import { bitbucket, github, gitlab } from ".";
|
||||
import { bitbucket, github, gitlab, server } from ".";
|
||||
import { deployments } from "./deployment";
|
||||
import { domains } from "./domain";
|
||||
import { mounts } from "./mount";
|
||||
@@ -83,6 +83,9 @@ export const compose = pgTable("compose", {
|
||||
bitbucketId: text("bitbucketId").references(() => bitbucket.bitbucketId, {
|
||||
onDelete: "set null",
|
||||
}),
|
||||
serverId: text("serverId").references(() => server.serverId, {
|
||||
onDelete: "cascade",
|
||||
}),
|
||||
});
|
||||
|
||||
export const composeRelations = relations(compose, ({ one, many }) => ({
|
||||
@@ -109,6 +112,10 @@ export const composeRelations = relations(compose, ({ one, many }) => ({
|
||||
fields: [compose.bitbucketId],
|
||||
references: [bitbucket.bitbucketId],
|
||||
}),
|
||||
server: one(server, {
|
||||
fields: [compose.serverId],
|
||||
references: [server.serverId],
|
||||
}),
|
||||
}));
|
||||
|
||||
const createSchema = createInsertSchema(compose, {
|
||||
|
||||
@@ -7,6 +7,7 @@ import { admins } from "./admin";
|
||||
import { generateAppName } from "./utils";
|
||||
import { deployments } from "./deployment";
|
||||
import { sshKeys } from "./ssh-key";
|
||||
import { applications, compose } from ".";
|
||||
|
||||
export const server = pgTable("server", {
|
||||
serverId: text("serverId")
|
||||
@@ -21,6 +22,7 @@ export const server = pgTable("server", {
|
||||
appName: text("appName")
|
||||
.notNull()
|
||||
.$defaultFn(() => generateAppName("server")),
|
||||
redisPassword: text("redisPassword").notNull().default("xYBugfHkULig1iLN"),
|
||||
createdAt: text("createdAt")
|
||||
.notNull()
|
||||
.$defaultFn(() => new Date().toISOString()),
|
||||
@@ -42,6 +44,8 @@ export const serverRelations = relations(server, ({ one, many }) => ({
|
||||
fields: [server.sshKeyId],
|
||||
references: [sshKeys.sshKeyId],
|
||||
}),
|
||||
applications: many(applications),
|
||||
compose: many(compose),
|
||||
}));
|
||||
|
||||
const createSchema = createInsertSchema(server, {
|
||||
|
||||
@@ -29,52 +29,52 @@ type DeployJob =
|
||||
|
||||
export type DeploymentJob = DeployJob;
|
||||
|
||||
export const deploymentWorker = new Worker(
|
||||
"deployments",
|
||||
async (job: Job<DeploymentJob>) => {
|
||||
try {
|
||||
if (job.data.applicationType === "application") {
|
||||
await updateApplicationStatus(job.data.applicationId, "running");
|
||||
if (job.data.type === "redeploy") {
|
||||
await rebuildApplication({
|
||||
applicationId: job.data.applicationId,
|
||||
titleLog: job.data.titleLog,
|
||||
descriptionLog: job.data.descriptionLog,
|
||||
});
|
||||
} else if (job.data.type === "deploy") {
|
||||
await deployApplication({
|
||||
applicationId: job.data.applicationId,
|
||||
titleLog: job.data.titleLog,
|
||||
descriptionLog: job.data.descriptionLog,
|
||||
});
|
||||
}
|
||||
} else if (job.data.applicationType === "compose") {
|
||||
await updateCompose(job.data.composeId, {
|
||||
composeStatus: "running",
|
||||
});
|
||||
if (job.data.type === "deploy") {
|
||||
await deployCompose({
|
||||
composeId: job.data.composeId,
|
||||
titleLog: job.data.titleLog,
|
||||
descriptionLog: job.data.descriptionLog,
|
||||
});
|
||||
} else if (job.data.type === "redeploy") {
|
||||
await rebuildCompose({
|
||||
composeId: job.data.composeId,
|
||||
titleLog: job.data.titleLog,
|
||||
descriptionLog: job.data.descriptionLog,
|
||||
});
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.log("Error", error);
|
||||
}
|
||||
},
|
||||
{
|
||||
autorun: false,
|
||||
connection: redisConfig,
|
||||
},
|
||||
);
|
||||
// export const deploymentWorker = new Worker(
|
||||
// "deployments",
|
||||
// async (job: Job<DeploymentJob>) => {
|
||||
// try {
|
||||
// if (job.data.applicationType === "application") {
|
||||
// await updateApplicationStatus(job.data.applicationId, "running");
|
||||
// if (job.data.type === "redeploy") {
|
||||
// await rebuildApplication({
|
||||
// applicationId: job.data.applicationId,
|
||||
// titleLog: job.data.titleLog,
|
||||
// descriptionLog: job.data.descriptionLog,
|
||||
// });
|
||||
// } else if (job.data.type === "deploy") {
|
||||
// await deployApplication({
|
||||
// applicationId: job.data.applicationId,
|
||||
// titleLog: job.data.titleLog,
|
||||
// descriptionLog: job.data.descriptionLog,
|
||||
// });
|
||||
// }
|
||||
// } else if (job.data.applicationType === "compose") {
|
||||
// await updateCompose(job.data.composeId, {
|
||||
// composeStatus: "running",
|
||||
// });
|
||||
// if (job.data.type === "deploy") {
|
||||
// await deployCompose({
|
||||
// composeId: job.data.composeId,
|
||||
// titleLog: job.data.titleLog,
|
||||
// descriptionLog: job.data.descriptionLog,
|
||||
// });
|
||||
// } else if (job.data.type === "redeploy") {
|
||||
// await rebuildCompose({
|
||||
// composeId: job.data.composeId,
|
||||
// titleLog: job.data.titleLog,
|
||||
// descriptionLog: job.data.descriptionLog,
|
||||
// });
|
||||
// }
|
||||
// }
|
||||
// } catch (error) {
|
||||
// console.log("Error", error);
|
||||
// }
|
||||
// },
|
||||
// {
|
||||
// autorun: false,
|
||||
// connection: redisConfig,
|
||||
// },
|
||||
// );
|
||||
|
||||
export const cleanQueuesByApplication = async (applicationId: string) => {
|
||||
const jobs = await myQueue.getJobs(["waiting", "delayed"]);
|
||||
|
||||
@@ -1,4 +1,10 @@
|
||||
import { type ConnectionOptions, Queue } from "bullmq";
|
||||
import { type ConnectionOptions, type Job, Queue, Worker } from "bullmq";
|
||||
import { findServerById, type Server } from "../api/services/server";
|
||||
import type { DeploymentJob } from "./deployments-queue";
|
||||
import {
|
||||
deployApplication,
|
||||
updateApplicationStatus,
|
||||
} from "../api/services/application";
|
||||
|
||||
export const redisConfig: ConnectionOptions = {
|
||||
host: "31.220.108.27",
|
||||
@@ -26,3 +32,66 @@ myQueue.on("error", (error) => {
|
||||
});
|
||||
|
||||
export { myQueue };
|
||||
|
||||
const workersMap = new Map<string, Worker>();
|
||||
const queuesMap = new Map<string, Queue>();
|
||||
|
||||
function createRedisConnection(server: Server) {
|
||||
return {
|
||||
host: server.ipAddress,
|
||||
port: "6379",
|
||||
} as ConnectionOptions;
|
||||
}
|
||||
|
||||
async function setupServerQueueAndWorker(server: Server) {
|
||||
const connection = createRedisConnection(server);
|
||||
|
||||
if (!workersMap.has(server.serverId)) {
|
||||
const queue = new Queue(`deployments-${server.serverId}`, {
|
||||
connection,
|
||||
});
|
||||
const worker = new Worker(
|
||||
`deployments-${server.serverId}`,
|
||||
async (job: Job<DeploymentJob>) => {
|
||||
// Ejecuta el trabajo de despliegue
|
||||
if (job.data.applicationType === "application") {
|
||||
await updateApplicationStatus(job.data.applicationId, "running");
|
||||
if (job.data.type === "deploy") {
|
||||
await deployApplication({
|
||||
applicationId: job.data.applicationId,
|
||||
titleLog: job.data.titleLog,
|
||||
descriptionLog: job.data.descriptionLog,
|
||||
});
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
limiter: {
|
||||
max: 1,
|
||||
duration: 1000,
|
||||
},
|
||||
connection,
|
||||
},
|
||||
);
|
||||
// Almacena worker y queue para reutilizar
|
||||
workersMap.set(server.serverId, worker);
|
||||
queuesMap.set(server.serverId, queue);
|
||||
}
|
||||
return {
|
||||
queue: queuesMap.get(server.serverId),
|
||||
worker: workersMap.get(server.serverId),
|
||||
};
|
||||
}
|
||||
|
||||
export async function enqueueDeploymentJob(
|
||||
serverId: string,
|
||||
jobData: DeploymentJob,
|
||||
) {
|
||||
const server = await findServerById(serverId);
|
||||
const { queue } = await setupServerQueueAndWorker(server);
|
||||
|
||||
await queue?.add(`deployments-${serverId}`, jobData, {
|
||||
removeOnComplete: true,
|
||||
removeOnFail: true,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@ import http from "node:http";
|
||||
import { migration } from "@/server/db/migration";
|
||||
import { config } from "dotenv";
|
||||
import next from "next";
|
||||
import { deploymentWorker } from "./queues/deployments-queue";
|
||||
// import { deploymentWorker } from "./queues/deployments-queue";
|
||||
import { setupDirectories } from "./setup/config-paths";
|
||||
import { initializePostgres } from "./setup/postgres-setup";
|
||||
import { initializeRedis } from "./setup/redis-setup";
|
||||
@@ -23,6 +23,7 @@ import {
|
||||
getPublicIpWithFallback,
|
||||
setupTerminalWebSocketServer,
|
||||
} from "./wss/terminal";
|
||||
import { IS_CLOUD } from "./constants";
|
||||
|
||||
config({ path: ".env" });
|
||||
const PORT = Number.parseInt(process.env.PORT || "3000", 10);
|
||||
@@ -43,14 +44,19 @@ void app.prepare().then(async () => {
|
||||
setupDockerStatsMonitoringSocketServer(server);
|
||||
|
||||
if (process.env.NODE_ENV === "production") {
|
||||
setupDirectories();
|
||||
createDefaultMiddlewares();
|
||||
await initializeNetwork();
|
||||
createDefaultTraefikConfig();
|
||||
createDefaultServerTraefikConfig();
|
||||
await initializePostgres();
|
||||
await initializeTraefik();
|
||||
await initializeRedis();
|
||||
if (!IS_CLOUD) {
|
||||
setupDirectories();
|
||||
createDefaultMiddlewares();
|
||||
setupDirectories();
|
||||
createDefaultMiddlewares();
|
||||
await initializeNetwork();
|
||||
createDefaultTraefikConfig();
|
||||
createDefaultServerTraefikConfig();
|
||||
await initializePostgres();
|
||||
await initializeTraefik();
|
||||
await initializeRedis();
|
||||
}
|
||||
|
||||
initCronJobs();
|
||||
welcomeServer();
|
||||
|
||||
@@ -62,7 +68,7 @@ void app.prepare().then(async () => {
|
||||
|
||||
server.listen(PORT);
|
||||
console.log("Server Started:", PORT);
|
||||
deploymentWorker.run();
|
||||
// deploymentWorker.run();
|
||||
} catch (e) {
|
||||
console.error("Main Server Error", e);
|
||||
}
|
||||
|
||||
@@ -153,17 +153,7 @@ export const createDefaultServerTraefikConfig = () => {
|
||||
);
|
||||
};
|
||||
|
||||
export const createDefaultTraefikConfig = () => {
|
||||
const mainConfig = path.join(MAIN_TRAEFIK_PATH, "traefik.yml");
|
||||
const acmeJsonPath = path.join(DYNAMIC_TRAEFIK_PATH, "acme.json");
|
||||
|
||||
if (existsSync(acmeJsonPath)) {
|
||||
chmodSync(acmeJsonPath, "600");
|
||||
}
|
||||
if (existsSync(mainConfig)) {
|
||||
console.log("Main config already exists");
|
||||
return;
|
||||
}
|
||||
export const getDefaultTraefikConfig = () => {
|
||||
const configObject: MainTraefikConfig = {
|
||||
providers: {
|
||||
...(process.env.NODE_ENV === "development"
|
||||
@@ -221,16 +211,27 @@ export const createDefaultTraefikConfig = () => {
|
||||
};
|
||||
|
||||
const yamlStr = dump(configObject);
|
||||
|
||||
return yamlStr;
|
||||
};
|
||||
|
||||
export const createDefaultTraefikConfig = () => {
|
||||
const mainConfig = path.join(MAIN_TRAEFIK_PATH, "traefik.yml");
|
||||
const acmeJsonPath = path.join(DYNAMIC_TRAEFIK_PATH, "acme.json");
|
||||
|
||||
if (existsSync(acmeJsonPath)) {
|
||||
chmodSync(acmeJsonPath, "600");
|
||||
}
|
||||
if (existsSync(mainConfig)) {
|
||||
console.log("Main config already exists");
|
||||
return;
|
||||
}
|
||||
const yamlStr = getDefaultTraefikConfig();
|
||||
mkdirSync(MAIN_TRAEFIK_PATH, { recursive: true });
|
||||
writeFileSync(mainConfig, yamlStr, "utf8");
|
||||
};
|
||||
|
||||
export const createDefaultMiddlewares = () => {
|
||||
const middlewaresPath = path.join(DYNAMIC_TRAEFIK_PATH, "middlewares.yml");
|
||||
if (existsSync(middlewaresPath)) {
|
||||
console.log("Default middlewares already exists");
|
||||
return;
|
||||
}
|
||||
export const getDefaultMiddlewares = () => {
|
||||
const defaultMiddlewares = {
|
||||
http: {
|
||||
middlewares: {
|
||||
@@ -244,6 +245,15 @@ export const createDefaultMiddlewares = () => {
|
||||
},
|
||||
};
|
||||
const yamlStr = dump(defaultMiddlewares);
|
||||
return yamlStr;
|
||||
};
|
||||
export const createDefaultMiddlewares = () => {
|
||||
const middlewaresPath = path.join(DYNAMIC_TRAEFIK_PATH, "middlewares.yml");
|
||||
if (existsSync(middlewaresPath)) {
|
||||
console.log("Default middlewares already exists");
|
||||
return;
|
||||
}
|
||||
const yamlStr = getDefaultMiddlewares();
|
||||
mkdirSync(DYNAMIC_TRAEFIK_PATH, { recursive: true });
|
||||
writeFileSync(middlewaresPath, yamlStr, "utf8");
|
||||
};
|
||||
|
||||
@@ -57,3 +57,44 @@ export const buildCustomDocker = async (
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
export const getDockerCommand = (
|
||||
application: ApplicationNested,
|
||||
logPath: string,
|
||||
) => {
|
||||
const { appName, env, publishDirectory, buildArgs, dockerBuildStage } =
|
||||
application;
|
||||
const dockerFilePath = getBuildAppDirectory(application);
|
||||
|
||||
try {
|
||||
const image = `${appName}`;
|
||||
|
||||
const defaultContextPath =
|
||||
dockerFilePath.substring(0, dockerFilePath.lastIndexOf("/") + 1) || ".";
|
||||
const args = prepareEnvironmentVariables(buildArgs);
|
||||
|
||||
const dockerContextPath =
|
||||
getDockerContextPath(application) || defaultContextPath;
|
||||
|
||||
const commandArgs = ["build", "-t", image, "-f", dockerFilePath, "."];
|
||||
|
||||
if (dockerBuildStage) {
|
||||
commandArgs.push("--target", dockerBuildStage);
|
||||
}
|
||||
|
||||
for (const arg of args) {
|
||||
commandArgs.push("--build-arg", arg);
|
||||
}
|
||||
|
||||
const command = `
|
||||
echo "Building ${appName}" >> ${logPath};
|
||||
cd ${dockerContextPath} || exit 1;
|
||||
docker ${commandArgs.join(" ")} >> ${logPath} 2>&1;
|
||||
echo "Docker build completed." >> ${logPath};
|
||||
`;
|
||||
|
||||
return command;
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
@@ -38,3 +38,35 @@ export const buildHeroku = async (
|
||||
throw e;
|
||||
}
|
||||
};
|
||||
|
||||
export const getHerokuCommand = (
|
||||
application: ApplicationNested,
|
||||
logPath: string,
|
||||
) => {
|
||||
const { env, appName } = application;
|
||||
|
||||
const buildAppDirectory = getBuildAppDirectory(application);
|
||||
const envVariables = prepareEnvironmentVariables(env);
|
||||
|
||||
const args = [
|
||||
"build",
|
||||
appName,
|
||||
"--path",
|
||||
buildAppDirectory,
|
||||
"--builder",
|
||||
"heroku/builder:24",
|
||||
];
|
||||
|
||||
for (const env of envVariables) {
|
||||
args.push("--env", env);
|
||||
}
|
||||
|
||||
const command = `pack ${args.join(" ")}`;
|
||||
const bashCommand = `
|
||||
echo "Starting heroku build..." >> ${logPath};
|
||||
${command} >> ${logPath} 2>&1;
|
||||
echo "Heroku build completed." >> ${logPath};
|
||||
`;
|
||||
|
||||
return bashCommand;
|
||||
};
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
import { createWriteStream } from "node:fs";
|
||||
import { docker } from "@/server/constants";
|
||||
// import { docker } from "@/server/constants";
|
||||
import type { InferResultType } from "@/server/types/with";
|
||||
import type { CreateServiceOptions } from "dockerode";
|
||||
import { uploadImage } from "../cluster/upload";
|
||||
import Dockerode from "dockerode";
|
||||
import {
|
||||
calculateResources,
|
||||
generateBindMounts,
|
||||
@@ -11,11 +12,14 @@ import {
|
||||
generateVolumeMounts,
|
||||
prepareEnvironmentVariables,
|
||||
} from "../docker/utils";
|
||||
import { buildCustomDocker } from "./docker-file";
|
||||
import { buildHeroku } from "./heroku";
|
||||
import { buildNixpacks } from "./nixpacks";
|
||||
import { buildPaketo } from "./paketo";
|
||||
import { buildCustomDocker, getDockerCommand } from "./docker-file";
|
||||
import { buildHeroku, getHerokuCommand } from "./heroku";
|
||||
import { buildNixpacks, getNixpacksCommand } from "./nixpacks";
|
||||
import { buildPaketo, getPaketoCommand } from "./paketo";
|
||||
import { buildStatic } from "./static";
|
||||
import { findServerById } from "@/server/api/services/server";
|
||||
import { readSSHKey } from "../filesystem/ssh";
|
||||
import { getRemoteDocker } from "../servers/remote-docker";
|
||||
|
||||
// NIXPACKS codeDirectory = where is the path of the code directory
|
||||
// HEROKU codeDirectory = where is the path of the code directory
|
||||
@@ -65,6 +69,25 @@ export const buildApplication = async (
|
||||
}
|
||||
};
|
||||
|
||||
export const getBuildCommand = (
|
||||
application: ApplicationNested,
|
||||
logPath: string,
|
||||
) => {
|
||||
const { buildType } = application;
|
||||
switch (buildType) {
|
||||
case "nixpacks":
|
||||
return getNixpacksCommand(application, logPath);
|
||||
case "heroku_buildpacks":
|
||||
return getHerokuCommand(application, logPath);
|
||||
case "paketo_buildpacks":
|
||||
return getPaketoCommand(application, logPath);
|
||||
// case "static":
|
||||
// return buildStatic(application, writeStream);
|
||||
case "dockerfile":
|
||||
return getDockerCommand(application, logPath);
|
||||
}
|
||||
};
|
||||
|
||||
export const mechanizeDockerContainer = async (
|
||||
application: ApplicationNested,
|
||||
) => {
|
||||
@@ -106,6 +129,21 @@ export const mechanizeDockerContainer = async (
|
||||
|
||||
const image = getImageName(application);
|
||||
const authConfig = getAuthConfig(application);
|
||||
const docker = await getRemoteDocker(application.serverId);
|
||||
// const server = await findServerById(application.serverId);
|
||||
// if (!server.sshKeyId) return;
|
||||
// const keys = await readSSHKey(server.sshKeyId);
|
||||
// const docker = new Dockerode({
|
||||
// host: server.ipAddress,
|
||||
// port: server.port,
|
||||
// username: server.username,
|
||||
// protocol: "ssh",
|
||||
// sshOptions: {
|
||||
// privateKey: keys.privateKey,
|
||||
// },
|
||||
// });
|
||||
// const results = await docker2.listContainers();
|
||||
// console.log(results);
|
||||
|
||||
const settings: CreateServiceOptions = {
|
||||
authconfig: authConfig,
|
||||
|
||||
@@ -6,12 +6,13 @@ import type { ApplicationNested } from ".";
|
||||
import { prepareEnvironmentVariables } from "../docker/utils";
|
||||
import { getBuildAppDirectory } from "../filesystem/directory";
|
||||
import { spawnAsync } from "../process/spawnAsync";
|
||||
import { executeCommand } from "../servers/command";
|
||||
|
||||
export const buildNixpacks = async (
|
||||
application: ApplicationNested,
|
||||
writeStream: WriteStream,
|
||||
) => {
|
||||
const { env, appName, publishDirectory } = application;
|
||||
const { env, appName, publishDirectory, serverId } = application;
|
||||
|
||||
const buildAppDirectory = getBuildAppDirectory(application);
|
||||
const buildContainerId = `${appName}-${nanoid(10)}`;
|
||||
@@ -71,3 +72,55 @@ export const buildNixpacks = async (
|
||||
throw e;
|
||||
}
|
||||
};
|
||||
|
||||
export const getNixpacksCommand = (
|
||||
application: ApplicationNested,
|
||||
logPath: string,
|
||||
) => {
|
||||
const { env, appName, publishDirectory, serverId } = application;
|
||||
|
||||
const buildAppDirectory = getBuildAppDirectory(application);
|
||||
const buildContainerId = `${appName}-${nanoid(10)}`;
|
||||
const envVariables = prepareEnvironmentVariables(env);
|
||||
|
||||
try {
|
||||
const args = ["build", buildAppDirectory, "--name", appName];
|
||||
|
||||
for (const env of envVariables) {
|
||||
args.push("--env", env);
|
||||
}
|
||||
|
||||
if (publishDirectory) {
|
||||
/* No need for any start command, since we'll use nginx later on */
|
||||
args.push("--no-error-without-start");
|
||||
}
|
||||
|
||||
const command = `nixpacks ${args.join(" ")}`;
|
||||
const bashCommand = `
|
||||
echo "Starting nixpacks build..." >> ${logPath};
|
||||
${command} >> ${logPath} 2>&1;
|
||||
echo "Nixpacks build completed." >> ${logPath};
|
||||
`;
|
||||
|
||||
/*
|
||||
Run the container with the image created by nixpacks,
|
||||
and copy the artifacts on the host filesystem.
|
||||
Then, remove the container and create a static build.
|
||||
*/
|
||||
|
||||
// if (publishDirectory) {
|
||||
// bashCommand += `
|
||||
// docker create --name ${buildContainerId} ${appName}
|
||||
// docker cp ${buildContainerId}:/app/${publishDirectory} ${path.join(buildAppDirectory, publishDirectory)}
|
||||
// docker rm ${buildContainerId}
|
||||
// buildStatic ${application} ${writeStream}
|
||||
// `;
|
||||
// }
|
||||
|
||||
return bashCommand;
|
||||
} catch (e) {
|
||||
// await spawnAsync("docker", ["rm", buildContainerId], writeToStream);
|
||||
|
||||
throw e;
|
||||
}
|
||||
};
|
||||
|
||||
@@ -4,7 +4,6 @@ import { prepareEnvironmentVariables } from "../docker/utils";
|
||||
import { getBuildAppDirectory } from "../filesystem/directory";
|
||||
import { spawnAsync } from "../process/spawnAsync";
|
||||
|
||||
// TODO: integrate in the vps sudo chown -R $(whoami) ~/.docker
|
||||
export const buildPaketo = async (
|
||||
application: ApplicationNested,
|
||||
writeStream: WriteStream,
|
||||
@@ -36,3 +35,35 @@ export const buildPaketo = async (
|
||||
throw e;
|
||||
}
|
||||
};
|
||||
|
||||
export const getPaketoCommand = (
|
||||
application: ApplicationNested,
|
||||
logPath: string,
|
||||
) => {
|
||||
const { env, appName } = application;
|
||||
|
||||
const buildAppDirectory = getBuildAppDirectory(application);
|
||||
const envVariables = prepareEnvironmentVariables(env);
|
||||
|
||||
const args = [
|
||||
"build",
|
||||
appName,
|
||||
"--path",
|
||||
buildAppDirectory,
|
||||
"--builder",
|
||||
"paketobuildpacks/builder-jammy-full",
|
||||
];
|
||||
|
||||
for (const env of envVariables) {
|
||||
args.push("--env", env);
|
||||
}
|
||||
|
||||
const command = `pack ${args.join(" ")}`;
|
||||
const bashCommand = `
|
||||
echo "Starting Paketo build..." >> ${logPath};
|
||||
${command} >> ${logPath} 2>&1;
|
||||
echo "Paketo build completed." >> ${logPath};
|
||||
`;
|
||||
|
||||
return bashCommand;
|
||||
};
|
||||
|
||||
@@ -10,3 +10,13 @@ export const createEnvFile = (directory: string, env: string | null) => {
|
||||
const envFileContent = prepareEnvironmentVariables(env).join("\n");
|
||||
writeFileSync(envFilePath, envFileContent);
|
||||
};
|
||||
|
||||
export const createEnvFileCommand = (directory: string, env: string | null) => {
|
||||
const envFilePath = join(dirname(directory), ".env");
|
||||
// let command = ``
|
||||
if (!existsSync(dirname(envFilePath))) {
|
||||
mkdirSync(dirname(envFilePath), { recursive: true });
|
||||
}
|
||||
const envFileContent = prepareEnvironmentVariables(env).join("\n");
|
||||
return `echo "${envFileContent}" > ${envFilePath}`;
|
||||
};
|
||||
|
||||
@@ -6,6 +6,7 @@ import type { ContainerInfo, ResourceRequirements } from "dockerode";
|
||||
import { parse } from "dotenv";
|
||||
import type { ApplicationNested } from "../builders";
|
||||
import { execAsync } from "../process/execAsync";
|
||||
import { getRemoteDocker } from "../servers/remote-docker";
|
||||
|
||||
interface RegistryAuth {
|
||||
username: string;
|
||||
@@ -51,6 +52,51 @@ export const pullImage = async (
|
||||
}
|
||||
};
|
||||
|
||||
export const pullRemoteImage = async (
|
||||
dockerImage: string,
|
||||
serverId: string,
|
||||
onData?: (data: any) => void,
|
||||
authConfig?: Partial<RegistryAuth>,
|
||||
): Promise<void> => {
|
||||
try {
|
||||
if (!dockerImage) {
|
||||
throw new Error("Docker image not found");
|
||||
}
|
||||
|
||||
const remoteDocker = await getRemoteDocker(serverId);
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
remoteDocker.pull(
|
||||
dockerImage,
|
||||
{ authconfig: authConfig },
|
||||
(err, stream) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
return;
|
||||
}
|
||||
|
||||
remoteDocker.modem.followProgress(
|
||||
stream as Readable,
|
||||
(err: Error | null, res) => {
|
||||
if (!err) {
|
||||
resolve(res);
|
||||
}
|
||||
if (err) {
|
||||
reject(err);
|
||||
}
|
||||
},
|
||||
(event) => {
|
||||
onData?.(event);
|
||||
},
|
||||
);
|
||||
},
|
||||
);
|
||||
});
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
export const containerExists = async (containerName: string) => {
|
||||
const container = docker.getContainer(containerName);
|
||||
try {
|
||||
|
||||
@@ -117,6 +117,55 @@ export const cloneRawBitbucketRepository = async (entity: Compose) => {
|
||||
}
|
||||
};
|
||||
|
||||
export const getBitbucketCloneCommand = async (
|
||||
entity: ApplicationWithBitbucket | ComposeWithBitbucket,
|
||||
logPath: string,
|
||||
isCompose = false,
|
||||
) => {
|
||||
const {
|
||||
appName,
|
||||
bitbucketRepository,
|
||||
bitbucketOwner,
|
||||
bitbucketBranch,
|
||||
bitbucketId,
|
||||
serverId,
|
||||
bitbucket,
|
||||
} = entity;
|
||||
|
||||
if (!serverId) {
|
||||
throw new TRPCError({
|
||||
code: "NOT_FOUND",
|
||||
message: "Server not found",
|
||||
});
|
||||
}
|
||||
|
||||
if (!bitbucketId) {
|
||||
throw new TRPCError({
|
||||
code: "NOT_FOUND",
|
||||
message: "Bitbucket Provider not found",
|
||||
});
|
||||
}
|
||||
|
||||
const bitbucketProvider = await findBitbucketById(bitbucketId);
|
||||
const basePath = COMPOSE_PATH;
|
||||
const outputPath = join(basePath, appName, "code");
|
||||
await recreateDirectory(outputPath);
|
||||
const repoclone = `bitbucket.org/${bitbucketOwner}/${bitbucketRepository}.git`;
|
||||
const cloneUrl = `https://${bitbucketProvider?.bitbucketUsername}:${bitbucketProvider?.appPassword}@${repoclone}`;
|
||||
|
||||
const cloneCommand = `
|
||||
rm -rf ${outputPath};
|
||||
mkdir -p ${outputPath};
|
||||
if ! git clone --branch ${bitbucketBranch} --depth 1 --progress ${cloneUrl} ${outputPath} >> ${logPath} 2>&1; then
|
||||
echo "[ERROR] Fail to clone the repository ${repoclone}" >> ${logPath};
|
||||
exit 1;
|
||||
fi
|
||||
echo "Cloned ${repoclone} to ${outputPath}: ✅" >> ${logPath};
|
||||
`;
|
||||
|
||||
return cloneCommand;
|
||||
};
|
||||
|
||||
export const getBitbucketRepositories = async (bitbucketId?: string) => {
|
||||
if (!bitbucketId) {
|
||||
return [];
|
||||
|
||||
@@ -47,3 +47,38 @@ export const buildDocker = async (
|
||||
writeStream.end();
|
||||
}
|
||||
};
|
||||
|
||||
export const buildRemoteDocker = async (
|
||||
application: ApplicationNested,
|
||||
logPath: string,
|
||||
) => {
|
||||
const { sourceType, dockerImage, username, password } = application;
|
||||
|
||||
try {
|
||||
if (!dockerImage) {
|
||||
throw new Error("Docker image not found");
|
||||
}
|
||||
let command = `
|
||||
echo "Building ${sourceType}" >> ${logPath};
|
||||
echo "Pulling ${dockerImage}" >> ${logPath};
|
||||
`;
|
||||
|
||||
if (username && password) {
|
||||
command += `
|
||||
if ! docker login --username ${username} --password ${password} https://index.docker.io/v1/ >> ${logPath} 2>&1; then
|
||||
echo "Error logging in to Docker Hub" >> ${logPath};
|
||||
exit 1;
|
||||
fi
|
||||
`;
|
||||
}
|
||||
|
||||
command += `
|
||||
echo "Pulling ${dockerImage}" >> ${logPath};
|
||||
docker pull ${dockerImage} >> ${logPath} 2>&1;
|
||||
`;
|
||||
|
||||
return command;
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
@@ -87,6 +87,66 @@ export const cloneGitRepository = async (
|
||||
}
|
||||
};
|
||||
|
||||
export const getCustomGitCloneCommand = async (
|
||||
entity: {
|
||||
appName: string;
|
||||
customGitUrl?: string | null;
|
||||
customGitBranch?: string | null;
|
||||
customGitSSHKeyId?: string | null;
|
||||
},
|
||||
logPath: string,
|
||||
isCompose = false,
|
||||
) => {
|
||||
const { appName, customGitUrl, customGitBranch, customGitSSHKeyId } = entity;
|
||||
|
||||
if (!customGitUrl || !customGitBranch) {
|
||||
throw new TRPCError({
|
||||
code: "BAD_REQUEST",
|
||||
message: "Error: Repository not found",
|
||||
});
|
||||
}
|
||||
|
||||
const keyPath = path.join(SSH_PATH, `${customGitSSHKeyId}_rsa`);
|
||||
const basePath = isCompose ? COMPOSE_PATH : APPLICATIONS_PATH;
|
||||
const outputPath = join(basePath, appName, "code");
|
||||
const knownHostsPath = path.join(SSH_PATH, "known_hosts");
|
||||
|
||||
if (customGitSSHKeyId) {
|
||||
await updateSSHKeyById({
|
||||
sshKeyId: customGitSSHKeyId,
|
||||
lastUsedAt: new Date().toISOString(),
|
||||
});
|
||||
}
|
||||
try {
|
||||
const command = [];
|
||||
if (!isHttpOrHttps(customGitUrl)) {
|
||||
command.push(addHostToKnownHostsCommand(customGitUrl));
|
||||
}
|
||||
command.push(`rm -rf ${outputPath};`);
|
||||
command.push(`mkdir -p ${outputPath};`);
|
||||
command.push(
|
||||
`echo "Cloning Custom Git ${customGitUrl}" to ${outputPath}: ✅ >> ${logPath};`,
|
||||
);
|
||||
if (customGitSSHKeyId) {
|
||||
command.push(
|
||||
`GIT_SSH_COMMAND="ssh -i ${keyPath} -o UserKnownHostsFile=${knownHostsPath}"`,
|
||||
);
|
||||
}
|
||||
|
||||
command.push(
|
||||
`if ! git clone --branch ${customGitBranch} --depth 1 --progress ${customGitUrl} ${outputPath} >> ${logPath} 2>&1; then
|
||||
echo "[ERROR] Fail to clone the repository ${customGitUrl}" >> ${logPath};
|
||||
exit 1;
|
||||
fi
|
||||
`,
|
||||
);
|
||||
command.push(`echo "Cloned Custom Git ${customGitUrl}: ✅" >> ${logPath}`);
|
||||
return command.join("\n");
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
const isHttpOrHttps = (url: string): boolean => {
|
||||
const regex = /^https?:\/\//;
|
||||
return regex.test(url);
|
||||
@@ -104,6 +164,13 @@ const addHostToKnownHosts = async (repositoryURL: string) => {
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
const addHostToKnownHostsCommand = (repositoryURL: string) => {
|
||||
const { domain, port } = sanitizeRepoPathSSH(repositoryURL);
|
||||
const knownHostsPath = path.join(SSH_PATH, "known_hosts");
|
||||
|
||||
return `ssh-keyscan -p ${port} ${domain} >> ${knownHostsPath};`;
|
||||
};
|
||||
const sanitizeRepoPathSSH = (input: string) => {
|
||||
const SSH_PATH_RE = new RegExp(
|
||||
[
|
||||
|
||||
@@ -11,6 +11,7 @@ import { spawnAsync } from "../process/spawnAsync";
|
||||
import type { Compose } from "@/server/api/services/compose";
|
||||
import { type Github, findGithubById } from "@/server/api/services/github";
|
||||
import type { apiFindGithubBranches } from "@/server/db/schema";
|
||||
import { executeCommand } from "../servers/command";
|
||||
|
||||
export const authGithub = (githubProvider: Github) => {
|
||||
if (!haveGithubRequirements(githubProvider)) {
|
||||
@@ -142,6 +143,63 @@ export const cloneGithubRepository = async (
|
||||
}
|
||||
};
|
||||
|
||||
export const getGithubCloneCommand = async (
|
||||
entity: ApplicationWithGithub | ComposeWithGithub,
|
||||
logPath: string,
|
||||
isCompose = false,
|
||||
) => {
|
||||
const { appName, repository, owner, branch, githubId, serverId } = entity;
|
||||
|
||||
if (!githubId || !serverId) {
|
||||
throw new TRPCError({
|
||||
code: "NOT_FOUND",
|
||||
message: "GitHub Provider not found",
|
||||
});
|
||||
}
|
||||
|
||||
const requirements = getErrorCloneRequirements(entity);
|
||||
|
||||
// Build log messages
|
||||
let logMessages = "";
|
||||
if (requirements.length > 0) {
|
||||
logMessages += `\nGitHub Repository configuration failed for application: ${appName}\n`;
|
||||
logMessages += "Reasons:\n";
|
||||
logMessages += requirements.join("\n");
|
||||
const escapedLogMessages = logMessages
|
||||
.replace(/\\/g, "\\\\")
|
||||
.replace(/"/g, '\\"')
|
||||
.replace(/\n/g, "\\n");
|
||||
|
||||
const bashCommand = `
|
||||
echo "${escapedLogMessages}" >> ${logPath};
|
||||
exit 1; # Exit with error code
|
||||
`;
|
||||
|
||||
await executeCommand(serverId, bashCommand);
|
||||
return;
|
||||
}
|
||||
|
||||
const githubProvider = await findGithubById(githubId);
|
||||
const basePath = isCompose ? COMPOSE_PATH : APPLICATIONS_PATH;
|
||||
const outputPath = join(basePath, appName, "code");
|
||||
const octokit = authGithub(githubProvider);
|
||||
const token = await getGithubToken(octokit);
|
||||
const repoclone = `github.com/${owner}/${repository}.git`;
|
||||
const cloneUrl = `https://oauth2:${token}@${repoclone}`;
|
||||
|
||||
const cloneCommand = `
|
||||
rm -rf ${outputPath};
|
||||
mkdir -p ${outputPath};
|
||||
if ! git clone --branch ${branch} --depth 1 --progress ${cloneUrl} ${outputPath} >> ${logPath} 2>&1; then
|
||||
echo "[ERROR] Fallo al clonar el repositorio ${repoclone}" >> ${logPath};
|
||||
exit 1;
|
||||
fi
|
||||
echo "Cloned ${repoclone} to ${outputPath}: ✅" >> ${logPath};
|
||||
`;
|
||||
|
||||
return cloneCommand;
|
||||
};
|
||||
|
||||
export const cloneRawGithubRepository = async (entity: Compose) => {
|
||||
const { appName, repository, owner, branch, githubId } = entity;
|
||||
|
||||
|
||||
@@ -12,6 +12,7 @@ import type { InferResultType } from "@/server/types/with";
|
||||
import { TRPCError } from "@trpc/server";
|
||||
import { recreateDirectory } from "../filesystem/directory";
|
||||
import { spawnAsync } from "../process/spawnAsync";
|
||||
import { executeCommand } from "../servers/command";
|
||||
|
||||
export const refreshGitlabToken = async (gitlabProviderId: string) => {
|
||||
const gitlabProvider = await findGitlabById(gitlabProviderId);
|
||||
@@ -153,6 +154,78 @@ export const cloneGitlabRepository = async (
|
||||
}
|
||||
};
|
||||
|
||||
export const getGitlabCloneCommand = async (
|
||||
entity: ApplicationWithGitlab | ComposeWithGitlab,
|
||||
logPath: string,
|
||||
isCompose = false,
|
||||
) => {
|
||||
const {
|
||||
appName,
|
||||
gitlabRepository,
|
||||
gitlabOwner,
|
||||
gitlabPathNamespace,
|
||||
gitlabBranch,
|
||||
gitlabId,
|
||||
serverId,
|
||||
gitlab,
|
||||
} = entity;
|
||||
|
||||
if (!serverId) {
|
||||
throw new TRPCError({
|
||||
code: "NOT_FOUND",
|
||||
message: "Server not found",
|
||||
});
|
||||
}
|
||||
|
||||
if (!gitlabId) {
|
||||
throw new TRPCError({
|
||||
code: "NOT_FOUND",
|
||||
message: "Gitlab Provider not found",
|
||||
});
|
||||
}
|
||||
|
||||
const requirements = getErrorCloneRequirements(entity);
|
||||
|
||||
// Build log messages
|
||||
let logMessages = "";
|
||||
if (requirements.length > 0) {
|
||||
logMessages += `\nGitLab Repository configuration failed for application: ${appName}\n`;
|
||||
logMessages += "Reasons:\n";
|
||||
logMessages += requirements.join("\n");
|
||||
const escapedLogMessages = logMessages
|
||||
.replace(/\\/g, "\\\\")
|
||||
.replace(/"/g, '\\"')
|
||||
.replace(/\n/g, "\\n");
|
||||
|
||||
const bashCommand = `
|
||||
echo "${escapedLogMessages}" >> ${logPath};
|
||||
exit 1; # Exit with error code
|
||||
`;
|
||||
|
||||
await executeCommand(serverId, bashCommand);
|
||||
return;
|
||||
}
|
||||
|
||||
await refreshGitlabToken(gitlabId);
|
||||
const basePath = isCompose ? COMPOSE_PATH : APPLICATIONS_PATH;
|
||||
const outputPath = join(basePath, appName, "code");
|
||||
await recreateDirectory(outputPath);
|
||||
const repoclone = `gitlab.com/${gitlabPathNamespace}.git`;
|
||||
const cloneUrl = `https://oauth2:${gitlab?.accessToken}@${repoclone}`;
|
||||
|
||||
const cloneCommand = `
|
||||
rm -rf ${outputPath};
|
||||
mkdir -p ${outputPath};
|
||||
if ! git clone --branch ${gitlabBranch} --depth 1 --progress ${cloneUrl} ${outputPath} >> ${logPath} 2>&1; then
|
||||
echo "[ERROR] Fail to clone the repository ${repoclone}" >> ${logPath};
|
||||
exit 1;
|
||||
fi
|
||||
echo "Cloned ${repoclone} to ${outputPath}: ✅" >> ${logPath};
|
||||
`;
|
||||
|
||||
return cloneCommand;
|
||||
};
|
||||
|
||||
export const getGitlabRepositories = async (gitlabId?: string) => {
|
||||
if (!gitlabId) {
|
||||
return [];
|
||||
|
||||
47
apps/dokploy/server/utils/servers/command.ts
Normal file
47
apps/dokploy/server/utils/servers/command.ts
Normal file
@@ -0,0 +1,47 @@
|
||||
import { findServerById } from "@/server/api/services/server";
|
||||
import { readSSHKey } from "../filesystem/ssh";
|
||||
import { Client } from "ssh2";
|
||||
|
||||
export const executeCommand = async (serverId: string, command: string) => {
|
||||
const server = await findServerById(serverId);
|
||||
|
||||
if (!server.sshKeyId) return;
|
||||
const keys = await readSSHKey(server.sshKeyId);
|
||||
const client = new Client();
|
||||
return new Promise<void>((resolve, reject) => {
|
||||
client
|
||||
.on("ready", () => {
|
||||
console.log("Client :: ready", command);
|
||||
client.exec(command, (err, stream) => {
|
||||
if (err) {
|
||||
console.error("Execution error:", err);
|
||||
reject(err);
|
||||
return;
|
||||
}
|
||||
stream
|
||||
.on("close", (code, signal) => {
|
||||
console.log("Connection closed ✅");
|
||||
client.end();
|
||||
if (code === 0) {
|
||||
resolve();
|
||||
} else {
|
||||
reject(new Error(`Command exited with code ${code}`));
|
||||
}
|
||||
})
|
||||
.on("data", (data: string) => {
|
||||
console.log(`OUTPUT: ${data.toString()}`);
|
||||
})
|
||||
.stderr.on("data", (data) => {
|
||||
console.error(`STDERR: ${data.toString()}`);
|
||||
});
|
||||
});
|
||||
})
|
||||
.connect({
|
||||
host: server.ipAddress,
|
||||
port: server.port,
|
||||
username: server.username,
|
||||
privateKey: keys.privateKey,
|
||||
timeout: 99999,
|
||||
});
|
||||
});
|
||||
};
|
||||
23
apps/dokploy/server/utils/servers/remote-docker.ts
Normal file
23
apps/dokploy/server/utils/servers/remote-docker.ts
Normal file
@@ -0,0 +1,23 @@
|
||||
import { findServerById } from "@/server/api/services/server";
|
||||
import { readSSHKey } from "../filesystem/ssh";
|
||||
import Dockerode from "dockerode";
|
||||
import { docker } from "@/server/constants";
|
||||
|
||||
export const getRemoteDocker = async (serverId: string | null) => {
|
||||
if (!serverId) return docker;
|
||||
const server = await findServerById(serverId);
|
||||
if (!server.sshKeyId) return docker;
|
||||
const keys = await readSSHKey(server.sshKeyId);
|
||||
const dockerode = new Dockerode({
|
||||
host: server.ipAddress,
|
||||
port: server.port,
|
||||
username: server.username,
|
||||
protocol: "ssh",
|
||||
// @ts-ignore
|
||||
sshOptions: {
|
||||
privateKey: keys.privateKey,
|
||||
},
|
||||
});
|
||||
|
||||
return dockerode;
|
||||
};
|
||||
@@ -17,9 +17,13 @@ import {
|
||||
createServerDeployment,
|
||||
updateDeploymentStatus,
|
||||
} from "@/server/api/services/deployment";
|
||||
import { chmodSync, createWriteStream } from "node:fs";
|
||||
import { createWriteStream } from "node:fs";
|
||||
import { Client } from "ssh2";
|
||||
import { readSSHKey } from "../filesystem/ssh";
|
||||
import {
|
||||
getDefaultMiddlewares,
|
||||
getDefaultTraefikConfig,
|
||||
} from "@/server/setup/traefik-setup";
|
||||
|
||||
export const setupServer = async (serverId: string) => {
|
||||
const server = await findServerById(serverId);
|
||||
@@ -76,6 +80,12 @@ const connectToServer = async (serverId: string, logPath: string) => {
|
||||
${setupNetwork()}
|
||||
${setupMainDirectory()}
|
||||
${setupDirectories()}
|
||||
${createTraefikConfig()}
|
||||
${createDefaultMiddlewares()}
|
||||
${createTraefikInstance()}
|
||||
${installNixpacks()}
|
||||
${installBuildpacks()}
|
||||
${setupRedis()}
|
||||
`;
|
||||
|
||||
client.exec(bashCommand, (err, stream) => {
|
||||
@@ -90,13 +100,11 @@ const connectToServer = async (serverId: string, logPath: string) => {
|
||||
client.end();
|
||||
resolve();
|
||||
})
|
||||
.on("data", (data) => {
|
||||
.on("data", (data: string) => {
|
||||
writeStream.write(data.toString());
|
||||
console.log(`OUTPUT: ${data}`);
|
||||
})
|
||||
.stderr.on("data", (data) => {
|
||||
writeStream.write(data.toString());
|
||||
console.log(`STDERR: ${data}`);
|
||||
});
|
||||
});
|
||||
})
|
||||
@@ -105,7 +113,7 @@ const connectToServer = async (serverId: string, logPath: string) => {
|
||||
port: server.port,
|
||||
username: server.username,
|
||||
privateKey: keys.privateKey,
|
||||
timeout: 10000,
|
||||
timeout: 99999,
|
||||
});
|
||||
});
|
||||
};
|
||||
@@ -135,7 +143,6 @@ const setupDirectories = () => {
|
||||
${chmodCommand}
|
||||
`;
|
||||
|
||||
console.log(command);
|
||||
return command;
|
||||
};
|
||||
|
||||
@@ -203,16 +210,102 @@ const installDocker = () => `
|
||||
const validatePorts = () => `
|
||||
# check if something is running on port 80
|
||||
if ss -tulnp | grep ':80 ' >/dev/null; then
|
||||
echo "Error: something is already running on port 80" >&2
|
||||
exit 1
|
||||
echo "Something is already running on port 80" >&2
|
||||
fi
|
||||
|
||||
# check if something is running on port 443
|
||||
if ss -tulnp | grep ':443 ' >/dev/null; then
|
||||
echo "Error: something is already running on port 443" >&2
|
||||
exit 1
|
||||
echo "Something is already running on port 443" >&2
|
||||
fi
|
||||
`;
|
||||
|
||||
// mkdir -p "/Users/mauricio/Documents/Github/Personal/dokploy/apps/dokploy/.docker" && mkdir -p "/Users/mauricio/Documents/Github/Personal/dokploy/apps/dokploy/.docker/traefik" && mkdir -p "/Users/mauricio/Documents/Github/Personal/dokploy/apps/dokploy/.docker/traefik/dynamic" && mkdir -p "/Users/mauricio/Documents/Github/Personal/dokploy/apps/dokploy/.docker/logs" && mkdir -p "/Users/mauricio/Documents/Github/Personal/dokploy/apps/dokploy/.docker/applications" && mkdir -p "/Users/mauricio/Documents/Github/Personal/dokploy/apps/dokploy/.docker/ssh" && mkdir -p "/Users/mauricio/Documents/Github/Personal/dokploy/apps/dokploy/.docker/traefik/dynamic/certificates" && mkdir -p "/Users/mauricio/Documents/Github/Personal/dokploy/apps/dokploy/.docker/monitoring"
|
||||
// chmod 700 "/Users/mauricio/Documents/Github/Personal/dokploy/apps/dokploy/.docker/ssh"
|
||||
const createTraefikConfig = () => {
|
||||
const config = getDefaultTraefikConfig();
|
||||
|
||||
const command = `
|
||||
if [ -f "/etc/dokploy/traefik/dynamic/acme.json" ]; then
|
||||
chmod 600 "/etc/dokploy/traefik/dynamic/acme.json"
|
||||
fi
|
||||
if [ -f "/etc/dokploy/traefik/traefik.yml" ]; then
|
||||
echo "Traefik config already exists ✅"
|
||||
else
|
||||
echo "${config}" > /etc/dokploy/traefik/traefik.yml
|
||||
fi
|
||||
`;
|
||||
|
||||
return command;
|
||||
};
|
||||
|
||||
export const createDefaultMiddlewares = () => {
|
||||
const config = getDefaultMiddlewares();
|
||||
const command = `
|
||||
if [ -f "/etc/dokploy/traefik/dynamic/middlewares.yml" ]; then
|
||||
echo "Middlewares config already exists ✅"
|
||||
else
|
||||
echo "${config}" > /etc/dokploy/traefik/dynamic/middlewares.yml
|
||||
fi
|
||||
`;
|
||||
return command;
|
||||
};
|
||||
|
||||
export const createTraefikInstance = () => {
|
||||
const command = `
|
||||
# Check if dokpyloy-traefik exists
|
||||
if docker service ls | grep -q 'dokploy-traefik'; then
|
||||
echo "Traefik already exists ✅"
|
||||
else
|
||||
# Create the dokploy-traefik service
|
||||
docker service create \
|
||||
--name dokploy-traefik \
|
||||
--replicas 1 \
|
||||
--constraint 'node.role==manager' \
|
||||
--network dokploy-network \
|
||||
--mount type=bind,src=/etc/dokploy/traefik/traefik.yml,dst=/etc/traefik/traefik.yml \
|
||||
--mount type=bind,src=/etc/dokploy/traefik/dynamic,dst=/etc/dokploy/traefik/dynamic \
|
||||
--mount type=bind,src=/var/run/docker.sock,dst=/var/run/docker.sock \
|
||||
--label traefik.enable=true \
|
||||
--publish mode=host,target=443,published=443 \
|
||||
--publish mode=host,target=80,published=80 \
|
||||
--publish mode=host,target=8080,published=8080 \
|
||||
traefik:v3.1.2
|
||||
fi
|
||||
`;
|
||||
|
||||
return command;
|
||||
};
|
||||
|
||||
const installNixpacks = () => `
|
||||
if command_exists nixpacks; then
|
||||
echo "Nixpacks already installed ✅"
|
||||
else
|
||||
VERSION=1.28.1 bash -c "$(curl -fsSL https://nixpacks.com/install.sh)"
|
||||
echo "Nixpacks version 1.28.1 installed ✅"
|
||||
fi
|
||||
`;
|
||||
|
||||
const installBuildpacks = () => `
|
||||
if command_exists pack; then
|
||||
echo "Buildpacks already installed ✅"
|
||||
else
|
||||
curl -sSL "https://github.com/buildpacks/pack/releases/download/v0.35.0/pack-v0.35.0-linux.tgz" | tar -C /usr/local/bin/ --no-same-owner -xzv pack
|
||||
echo "Buildpacks version 0.35.0 installed ✅"
|
||||
fi
|
||||
`;
|
||||
|
||||
const setupRedis = () => `
|
||||
# Check if redis is already installed
|
||||
if docker service ls | grep -q 'dokploy-redis'; then
|
||||
echo "Redis already installed ✅"
|
||||
else
|
||||
# Install Redis
|
||||
docker service create \
|
||||
--name dokploy-redis \
|
||||
--replicas 1 \
|
||||
--constraint 'node.role==manager' \
|
||||
--mount type=volume,source=redis-data-volume,target=/data \
|
||||
--network dokploy-network \
|
||||
--publish target=6379,published=6379,protocol=tcp,mode=host \
|
||||
redis:7
|
||||
fi
|
||||
|
||||
`;
|
||||
|
||||
@@ -2,6 +2,9 @@ import { spawn } from "node:child_process";
|
||||
import type http from "node:http";
|
||||
import { WebSocketServer } from "ws";
|
||||
import { validateWebSocketRequest } from "../auth/auth";
|
||||
import { findServerById } from "../api/services/server";
|
||||
import { readSSHKey } from "../utils/filesystem/ssh";
|
||||
import { Client } from "ssh2";
|
||||
|
||||
export const setupDeploymentLogsWebSocketServer = (
|
||||
server: http.Server<typeof http.IncomingMessage, typeof http.ServerResponse>,
|
||||
@@ -27,6 +30,7 @@ export const setupDeploymentLogsWebSocketServer = (
|
||||
wssTerm.on("connection", async (ws, req) => {
|
||||
const url = new URL(req.url || "", `http://${req.headers.host}`);
|
||||
const logPath = url.searchParams.get("logPath");
|
||||
const serverId = url.searchParams.get("serverId");
|
||||
const { user, session } = await validateWebSocketRequest(req);
|
||||
|
||||
if (!logPath) {
|
||||
@@ -35,20 +39,70 @@ export const setupDeploymentLogsWebSocketServer = (
|
||||
return;
|
||||
}
|
||||
|
||||
// if (!serverId) {
|
||||
// console.log("serverId no provided");
|
||||
// ws.close(4000, "serverId no provided");
|
||||
// return;
|
||||
// }
|
||||
|
||||
if (!user || !session) {
|
||||
ws.close();
|
||||
return;
|
||||
}
|
||||
try {
|
||||
const tail = spawn("tail", ["-n", "+1", "-f", logPath]);
|
||||
if (serverId) {
|
||||
const server = await findServerById(serverId);
|
||||
|
||||
tail.stdout.on("data", (data) => {
|
||||
ws.send(data.toString());
|
||||
});
|
||||
if (!server.sshKeyId) return;
|
||||
const keys = await readSSHKey(server.sshKeyId);
|
||||
const client = new Client();
|
||||
new Promise<void>((resolve, reject) => {
|
||||
client
|
||||
.on("ready", () => {
|
||||
const command = `
|
||||
tail -n +1 -f ${logPath};
|
||||
`;
|
||||
client.exec(command, (err, stream) => {
|
||||
if (err) {
|
||||
console.error("Execution error:", err);
|
||||
reject(err);
|
||||
return;
|
||||
}
|
||||
stream
|
||||
.on("close", () => {
|
||||
console.log("Connection closed ✅");
|
||||
client.end();
|
||||
resolve();
|
||||
})
|
||||
.on("data", (data: string) => {
|
||||
ws.send(data.toString());
|
||||
// console.log(`OUTPUT: ${data.toString()}`);
|
||||
})
|
||||
.stderr.on("data", (data) => {
|
||||
ws.send(data.toString());
|
||||
// console.error(`STDERR: ${data.toString()}`);
|
||||
});
|
||||
});
|
||||
})
|
||||
.connect({
|
||||
host: server.ipAddress,
|
||||
port: server.port,
|
||||
username: server.username,
|
||||
privateKey: keys.privateKey,
|
||||
timeout: 99999,
|
||||
});
|
||||
});
|
||||
} else {
|
||||
const tail = spawn("tail", ["-n", "+1", "-f", logPath]);
|
||||
|
||||
tail.stderr.on("data", (data) => {
|
||||
ws.send(new Error(`tail error: ${data.toString()}`).message);
|
||||
});
|
||||
tail.stdout.on("data", (data) => {
|
||||
ws.send(data.toString());
|
||||
});
|
||||
|
||||
tail.stderr.on("data", (data) => {
|
||||
ws.send(new Error(`tail error: ${data.toString()}`).message);
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
// @ts-ignore
|
||||
// const errorMessage = error?.message as unknown as string;
|
||||
|
||||
Reference in New Issue
Block a user