diff --git a/LICENSE.MD b/LICENSE.MD index 8a508efb..7e49a35b 100644 --- a/LICENSE.MD +++ b/LICENSE.MD @@ -19,8 +19,8 @@ See the License for the specific language governing permissions and limitations The following additional terms apply to the multi-node support, Docker Compose file, Preview Deployments and Multi Server features of Dokploy. In the event of a conflict, these provisions shall take precedence over those in the Apache License: -- **Self-Hosted Version Free**: All features of Dokploy, including multi-node support, Docker Compose file support, Preview Deployments and Multi Server, will always be free to use in the self-hosted version. -- **Restriction on Resale**: The multi-node support, Docker Compose file support, Preview Deployments and Multi Server features cannot be sold or offered as a service by any party other than the copyright holder without prior written consent. -- **Modification Distribution**: Any modifications to the multi-node support, Docker Compose file support, Preview Deployments and Multi Server features must be distributed freely and cannot be sold or offered as a service. +- **Self-Hosted Version Free**: All features of Dokploy, including multi-node support, Docker Compose file support, Schedules, Preview Deployments and Multi Server, will always be free to use in the self-hosted version. +- **Restriction on Resale**: The multi-node support, Docker Compose file support, Schedules, Preview Deployments and Multi Server features cannot be sold or offered as a service by any party other than the copyright holder without prior written consent. +- **Modification Distribution**: Any modifications to the multi-node support, Docker Compose file support, Schedules, Preview Deployments and Multi Server features must be distributed freely and cannot be sold or offered as a service. For further inquiries or permissions, please contact us directly. diff --git a/apps/dokploy/components/dashboard/application/deployments/show-deployments.tsx b/apps/dokploy/components/dashboard/application/deployments/show-deployments.tsx index 76e5bb26..3a46835d 100644 --- a/apps/dokploy/components/dashboard/application/deployments/show-deployments.tsx +++ b/apps/dokploy/components/dashboard/application/deployments/show-deployments.tsx @@ -9,12 +9,13 @@ import { CardTitle, } from "@/components/ui/card"; import { type RouterOutputs, api } from "@/utils/api"; -import { RocketIcon } from "lucide-react"; +import { RocketIcon, Clock } from "lucide-react"; import React, { useEffect, useState } from "react"; import { CancelQueues } from "./cancel-queues"; import { RefreshToken } from "./refresh-token"; import { ShowDeployment } from "./show-deployment"; - +import { Badge } from "@/components/ui/badge"; +import { formatDuration } from "../schedules/show-schedules-logs"; interface Props { applicationId: string; } @@ -96,8 +97,23 @@ export const ShowDeployments = ({ applicationId }: Props) => { )}
-
+
+ {deployment.startedAt && deployment.finishedAt && ( + + + {formatDuration( + Math.floor( + (new Date(deployment.finishedAt).getTime() - + new Date(deployment.startedAt).getTime()) / + 1000, + ), + )} + + )}
+ ) : ( + + )} + + + + {scheduleId ? "Edit" : "Create"} Schedule + +
+ + {scheduleTypeForm === "compose" && ( +
+ {errorServices && ( + + {errorServices?.message} + + )} + ( + + Service Name +
+ + + + + + + +

+ Fetch: Will clone the repository and load the + services +

+
+
+
+ + + + + + +

+ Cache: If you previously deployed this compose, + it will read the services from the last + deployment/fetch from the repository +

+
+
+
+
+ + +
+ )} + /> +
+ )} + + ( + + + Task Name + + + + + + A descriptive name for your scheduled task + + + + )} + /> + + ( + + + Schedule + + + + + + +

+ Cron expression format: minute hour day month + weekday +

+

Example: 0 0 * * * (daily at midnight)

+
+
+
+
+
+ +
+ + + +
+
+ + Choose a predefined schedule or enter a custom cron + expression + + +
+ )} + /> + + {(scheduleTypeForm === "application" || + scheduleTypeForm === "compose") && ( + <> + ( + + + Shell Type + + + + Choose the shell to execute your command + + + + )} + /> + ( + + + Command + + + + + + The command to execute in your container + + + + )} + /> + + )} + + {(scheduleTypeForm === "dokploy-server" || + scheduleTypeForm === "server") && ( + ( + + Script + + + + + + + + )} + /> + )} + + ( + + + + Enabled + + + )} + /> + + + + +
+ + ); +}; diff --git a/apps/dokploy/components/dashboard/application/schedules/show-schedules-logs.tsx b/apps/dokploy/components/dashboard/application/schedules/show-schedules-logs.tsx new file mode 100644 index 00000000..64753601 --- /dev/null +++ b/apps/dokploy/components/dashboard/application/schedules/show-schedules-logs.tsx @@ -0,0 +1,131 @@ +import { DateTooltip } from "@/components/shared/date-tooltip"; +import { StatusTooltip } from "@/components/shared/status-tooltip"; +import { Button } from "@/components/ui/button"; +import { + Dialog, + DialogContent, + DialogDescription, + DialogHeader, + DialogTitle, + DialogTrigger, +} from "@/components/ui/dialog"; + +import type { RouterOutputs } from "@/utils/api"; +import { useState } from "react"; +import { ShowDeployment } from "../deployments/show-deployment"; +import { ClipboardList, Clock } from "lucide-react"; +import { Badge } from "@/components/ui/badge"; + +interface Props { + deployments: RouterOutputs["deployment"]["all"]; + serverId?: string; + children?: React.ReactNode; +} + +export const formatDuration = (seconds: number) => { + if (seconds < 60) return `${seconds}s`; + const minutes = Math.floor(seconds / 60); + const remainingSeconds = seconds % 60; + return `${minutes}m ${remainingSeconds}s`; +}; + +export const ShowSchedulesLogs = ({ + deployments, + serverId, + children, +}: Props) => { + const [activeLog, setActiveLog] = useState< + RouterOutputs["deployment"]["all"][number] | null + >(null); + const [isOpen, setIsOpen] = useState(false); + return ( + + + {children ? ( + children + ) : ( + + )} + + + + Logs + + See all the logs for this schedule + + + {deployments.length > 0 ? ( +
+ {deployments.map((deployment, index) => ( +
+
+ + {index + 1} {deployment.status} + + + + {deployment.title} + + {deployment.description && ( + + {deployment.description} + + )} +
+
+
+ + {deployment.startedAt && deployment.finishedAt && ( + + + {formatDuration( + Math.floor( + (new Date(deployment.finishedAt).getTime() - + new Date(deployment.startedAt).getTime()) / + 1000, + ), + )} + + )} +
+ + +
+
+ ))} +
+ ) : ( +
+ +

No logs found

+

This schedule hasn't been executed yet

+
+ )} +
+ setActiveLog(null)} + logPath={activeLog?.logPath || ""} + errorMessage={activeLog?.errorMessage || ""} + /> +
+ ); +}; diff --git a/apps/dokploy/components/dashboard/application/schedules/show-schedules.tsx b/apps/dokploy/components/dashboard/application/schedules/show-schedules.tsx new file mode 100644 index 00000000..19092ec9 --- /dev/null +++ b/apps/dokploy/components/dashboard/application/schedules/show-schedules.tsx @@ -0,0 +1,243 @@ +import { Button } from "@/components/ui/button"; +import { api } from "@/utils/api"; +import { HandleSchedules } from "./handle-schedules"; +import { + Clock, + Play, + Terminal, + Trash2, + ClipboardList, + Loader2, +} from "lucide-react"; +import { + Card, + CardContent, + CardDescription, + CardHeader, + CardTitle, +} from "@/components/ui/card"; +import { Badge } from "@/components/ui/badge"; +import { toast } from "sonner"; +import { ShowSchedulesLogs } from "./show-schedules-logs"; +import { + Tooltip, + TooltipContent, + TooltipProvider, + TooltipTrigger, +} from "@/components/ui/tooltip"; +import { DialogAction } from "@/components/shared/dialog-action"; + +interface Props { + id: string; + scheduleType?: "application" | "compose" | "server" | "dokploy-server"; +} + +export const ShowSchedules = ({ id, scheduleType = "application" }: Props) => { + const { + data: schedules, + isLoading: isLoadingSchedules, + refetch: refetchSchedules, + } = api.schedule.list.useQuery( + { + id: id || "", + scheduleType, + }, + { + enabled: !!id, + }, + ); + + const utils = api.useUtils(); + + const { mutateAsync: deleteSchedule, isLoading: isDeleting } = + api.schedule.delete.useMutation(); + + const { mutateAsync: runManually, isLoading } = + api.schedule.runManually.useMutation(); + + return ( + + +
+
+ + Scheduled Tasks + + + Schedule tasks to run automatically at specified intervals. + +
+ + {schedules && schedules.length > 0 && ( + + )} +
+
+ + {isLoadingSchedules ? ( +
+ + + Loading scheduled tasks... + +
+ ) : schedules && schedules.length > 0 ? ( +
+ {schedules.map((schedule) => { + const serverId = + schedule.serverId || + schedule.application?.serverId || + schedule.compose?.serverId; + const deployments = schedule.deployments; + return ( +
+
+
+ +
+
+
+

+ {schedule.name} +

+ + {schedule.enabled ? "Enabled" : "Disabled"} + +
+
+ + Cron: {schedule.cronExpression} + + {schedule.scheduleType !== "server" && + schedule.scheduleType !== "dokploy-server" && ( + <> + + • + + + {schedule.shellType} + + + )} +
+ {schedule.command && ( +
+ + + {schedule.command} + +
+ )} +
+
+ +
+ + + + + + + + + + Run Manual Schedule + + + + + + { + await deleteSchedule({ + scheduleId: schedule.scheduleId, + }) + .then(() => { + utils.schedule.list.invalidate({ + id, + scheduleType, + }); + toast.success("Schedule deleted successfully"); + }) + .catch(() => { + toast.error("Error deleting schedule"); + }); + }} + > + + +
+
+ ); + })} +
+ ) : ( +
+ +

+ No scheduled tasks +

+

+ Create your first scheduled task to automate your workflows +

+ +
+ )} +
+
+ ); +}; diff --git a/apps/dokploy/components/dashboard/compose/deployments/show-deployments-compose.tsx b/apps/dokploy/components/dashboard/compose/deployments/show-deployments-compose.tsx index fce4f33f..d79d4172 100644 --- a/apps/dokploy/components/dashboard/compose/deployments/show-deployments-compose.tsx +++ b/apps/dokploy/components/dashboard/compose/deployments/show-deployments-compose.tsx @@ -9,12 +9,13 @@ import { CardTitle, } from "@/components/ui/card"; import { type RouterOutputs, api } from "@/utils/api"; -import { RocketIcon } from "lucide-react"; +import { RocketIcon, Clock } from "lucide-react"; import React, { useEffect, useState } from "react"; import { CancelQueuesCompose } from "./cancel-queues-compose"; import { RefreshTokenCompose } from "./refresh-token-compose"; import { ShowDeploymentCompose } from "./show-deployment-compose"; - +import { Badge } from "@/components/ui/badge"; +import { formatDuration } from "@/components/dashboard/application/schedules/show-schedules-logs"; interface Props { composeId: string; } @@ -96,8 +97,23 @@ export const ShowDeploymentsCompose = ({ composeId }: Props) => { )}
-
+
+ {deployment.startedAt && deployment.finishedAt && ( + + + {formatDuration( + Math.floor( + (new Date(deployment.finishedAt).getTime() - + new Date(deployment.startedAt).getTime()) / + 1000, + ), + )} + + )}
+ +
+ +
+
diff --git a/apps/dokploy/pages/dashboard/project/[projectId]/services/compose/[composeId].tsx b/apps/dokploy/pages/dashboard/project/[projectId]/services/compose/[composeId].tsx index 4314983e..5e6cd528 100644 --- a/apps/dokploy/pages/dashboard/project/[projectId]/services/compose/[composeId].tsx +++ b/apps/dokploy/pages/dashboard/project/[projectId]/services/compose/[composeId].tsx @@ -1,6 +1,7 @@ import { ShowImport } from "@/components/dashboard/application/advanced/import/show-import"; import { ShowVolumes } from "@/components/dashboard/application/advanced/volumes/show-volumes"; import { ShowEnvironment } from "@/components/dashboard/application/environment/show-enviroment"; +import { ShowSchedules } from "@/components/dashboard/application/schedules/show-schedules"; import { AddCommandCompose } from "@/components/dashboard/compose/advanced/add-command"; import { DeleteService } from "@/components/dashboard/compose/delete-service"; import { ShowDeploymentsCompose } from "@/components/dashboard/compose/deployments/show-deployments-compose"; @@ -230,6 +231,7 @@ const Service = ( Deployments Backups Logs + Schedules {((data?.serverId && isCloud) || !data?.server) && ( Monitoring )} @@ -253,6 +255,12 @@ const Service = (
+ +
+ +
+
+
diff --git a/apps/dokploy/pages/dashboard/schedules.tsx b/apps/dokploy/pages/dashboard/schedules.tsx new file mode 100644 index 00000000..37c8d298 --- /dev/null +++ b/apps/dokploy/pages/dashboard/schedules.tsx @@ -0,0 +1,54 @@ +import { DashboardLayout } from "@/components/layouts/dashboard-layout"; +import type { ReactElement } from "react"; +import type { GetServerSidePropsContext } from "next"; +import { validateRequest } from "@dokploy/server/lib/auth"; +import { IS_CLOUD } from "@dokploy/server/constants"; +import { api } from "@/utils/api"; +import { ShowSchedules } from "@/components/dashboard/application/schedules/show-schedules"; +import { Card } from "@/components/ui/card"; +function SchedulesPage() { + const { data: user } = api.user.get.useQuery(); + return ( +
+ +
+ +
+
+
+ ); +} +export default SchedulesPage; + +SchedulesPage.getLayout = (page: ReactElement) => { + return {page}; +}; + +export async function getServerSideProps( + ctx: GetServerSidePropsContext<{ serviceId: string }>, +) { + if (IS_CLOUD) { + return { + redirect: { + permanent: true, + destination: "/dashboard/projects", + }, + }; + } + const { user } = await validateRequest(ctx.req); + if (!user || user.role !== "owner") { + return { + redirect: { + permanent: true, + destination: "/", + }, + }; + } + + return { + props: {}, + }; +} diff --git a/apps/dokploy/server/api/root.ts b/apps/dokploy/server/api/root.ts index 6ad4b433..95a00fec 100644 --- a/apps/dokploy/server/api/root.ts +++ b/apps/dokploy/server/api/root.ts @@ -35,6 +35,7 @@ import { sshRouter } from "./routers/ssh-key"; import { stripeRouter } from "./routers/stripe"; import { swarmRouter } from "./routers/swarm"; import { userRouter } from "./routers/user"; +import { scheduleRouter } from "./routers/schedule"; /** * This is the primary router for your server. * @@ -78,6 +79,7 @@ export const appRouter = createTRPCRouter({ swarm: swarmRouter, ai: aiRouter, organization: organizationRouter, + schedule: scheduleRouter, }); // export type definition of API diff --git a/apps/dokploy/server/api/routers/schedule.ts b/apps/dokploy/server/api/routers/schedule.ts new file mode 100644 index 00000000..2b204adb --- /dev/null +++ b/apps/dokploy/server/api/routers/schedule.ts @@ -0,0 +1,142 @@ +import { TRPCError } from "@trpc/server"; +import { z } from "zod"; +import { + createScheduleSchema, + schedules, + updateScheduleSchema, +} from "@dokploy/server/db/schema/schedule"; +import { desc, eq } from "drizzle-orm"; +import { db } from "@dokploy/server/db"; +import { createTRPCRouter, protectedProcedure } from "../trpc"; +import { runCommand } from "@dokploy/server/index"; +import { deployments } from "@dokploy/server/db/schema/deployment"; +import { + deleteSchedule, + findScheduleById, + createSchedule, + updateSchedule, +} from "@dokploy/server/services/schedule"; +import { IS_CLOUD, scheduleJob } from "@dokploy/server"; +import { removeJob, schedule } from "@/server/utils/backup"; +import { removeScheduleJob } from "@dokploy/server"; +export const scheduleRouter = createTRPCRouter({ + create: protectedProcedure + .input(createScheduleSchema) + .mutation(async ({ input }) => { + const newSchedule = await createSchedule(input); + + if (newSchedule?.enabled) { + if (IS_CLOUD) { + schedule({ + scheduleId: newSchedule.scheduleId, + type: "schedule", + cronSchedule: newSchedule.cronExpression, + }); + } else { + scheduleJob(newSchedule); + } + } + return newSchedule; + }), + + update: protectedProcedure + .input(updateScheduleSchema) + .mutation(async ({ input }) => { + const updatedSchedule = await updateSchedule(input); + + if (IS_CLOUD) { + if (updatedSchedule?.enabled) { + schedule({ + scheduleId: updatedSchedule.scheduleId, + type: "schedule", + cronSchedule: updatedSchedule.cronExpression, + }); + } else { + await removeJob({ + cronSchedule: updatedSchedule.cronExpression, + scheduleId: updatedSchedule.scheduleId, + type: "schedule", + }); + } + } else { + if (updatedSchedule?.enabled) { + removeScheduleJob(updatedSchedule.scheduleId); + scheduleJob(updatedSchedule); + } else { + removeScheduleJob(updatedSchedule.scheduleId); + } + } + return updatedSchedule; + }), + + delete: protectedProcedure + .input(z.object({ scheduleId: z.string() })) + .mutation(async ({ input }) => { + const schedule = await findScheduleById(input.scheduleId); + await deleteSchedule(input.scheduleId); + + if (IS_CLOUD) { + await removeJob({ + cronSchedule: schedule.cronExpression, + scheduleId: schedule.scheduleId, + type: "schedule", + }); + } else { + removeScheduleJob(schedule.scheduleId); + } + return true; + }), + + list: protectedProcedure + .input( + z.object({ + id: z.string(), + scheduleType: z.enum([ + "application", + "compose", + "server", + "dokploy-server", + ]), + }), + ) + .query(async ({ input }) => { + const where = { + application: eq(schedules.applicationId, input.id), + compose: eq(schedules.composeId, input.id), + server: eq(schedules.serverId, input.id), + "dokploy-server": eq(schedules.userId, input.id), + }; + return db.query.schedules.findMany({ + where: where[input.scheduleType], + with: { + application: true, + server: true, + compose: true, + deployments: { + orderBy: [desc(deployments.createdAt)], + }, + }, + }); + }), + + one: protectedProcedure + .input(z.object({ scheduleId: z.string() })) + .query(async ({ input }) => { + return await findScheduleById(input.scheduleId); + }), + + runManually: protectedProcedure + .input(z.object({ scheduleId: z.string().min(1) })) + .mutation(async ({ input }) => { + try { + await runCommand(input.scheduleId); + return true; + } catch (error) { + throw new TRPCError({ + code: "INTERNAL_SERVER_ERROR", + message: + error instanceof Error ? error.message : "Error running schedule", + }); + } + }), +}); diff --git a/apps/dokploy/server/server.ts b/apps/dokploy/server/server.ts index fd908245..148edda0 100644 --- a/apps/dokploy/server/server.ts +++ b/apps/dokploy/server/server.ts @@ -6,6 +6,7 @@ import { createDefaultServerTraefikConfig, createDefaultTraefikConfig, initCronJobs, + initSchedules, initializeNetwork, sendDokployRestartNotifications, setupDirectories, @@ -49,6 +50,7 @@ void app.prepare().then(async () => { createDefaultServerTraefikConfig(); await migration(); await initCronJobs(); + await initSchedules(); await sendDokployRestartNotifications(); } diff --git a/apps/dokploy/server/utils/backup.ts b/apps/dokploy/server/utils/backup.ts index 4fc9db93..cf0b6c22 100644 --- a/apps/dokploy/server/utils/backup.ts +++ b/apps/dokploy/server/utils/backup.ts @@ -14,6 +14,11 @@ type QueueJob = type: "server"; cronSchedule: string; serverId: string; + } + | { + type: "schedule"; + cronSchedule: string; + scheduleId: string; }; export const schedule = async (job: QueueJob) => { try { diff --git a/apps/schedules/src/index.ts b/apps/schedules/src/index.ts index 0ef8e930..7ab2b98c 100644 --- a/apps/schedules/src/index.ts +++ b/apps/schedules/src/index.ts @@ -34,8 +34,8 @@ app.use(async (c, next) => { app.post("/create-backup", zValidator("json", jobQueueSchema), async (c) => { const data = c.req.valid("json"); scheduleJob(data); - logger.info({ data }, "Backup created successfully"); - return c.json({ message: "Backup created successfully" }); + logger.info({ data }, `[${data.type}] created successfully`); + return c.json({ message: `[${data.type}] created successfully` }); }); app.post("/update-backup", zValidator("json", jobQueueSchema), async (c) => { @@ -55,6 +55,12 @@ app.post("/update-backup", zValidator("json", jobQueueSchema), async (c) => { type: "server", cronSchedule: job.pattern, }); + } else if (data.type === "schedule") { + result = await removeJob({ + scheduleId: data.scheduleId, + type: "schedule", + cronSchedule: job.pattern, + }); } logger.info({ result }, "Job removed"); } diff --git a/apps/schedules/src/queue.ts b/apps/schedules/src/queue.ts index e751fa6d..5a1efc05 100644 --- a/apps/schedules/src/queue.ts +++ b/apps/schedules/src/queue.ts @@ -36,6 +36,12 @@ export const scheduleJob = (job: QueueJob) => { pattern: job.cronSchedule, }, }); + } else if (job.type === "schedule") { + jobQueue.add(job.scheduleId, job, { + repeat: { + pattern: job.cronSchedule, + }, + }); } }; @@ -54,7 +60,13 @@ export const removeJob = async (data: QueueJob) => { }); return result; } - + if (data.type === "schedule") { + const { scheduleId, cronSchedule } = data; + const result = await jobQueue.removeRepeatable(scheduleId, { + pattern: cronSchedule, + }); + return result; + } return false; }; @@ -72,6 +84,11 @@ export const getJobRepeatable = async ( const job = repeatableJobs.find((j) => j.name === `${serverId}-cleanup`); return job ? job : null; } + if (data.type === "schedule") { + const { scheduleId } = data; + const job = repeatableJobs.find((j) => j.name === scheduleId); + return job ? job : null; + } return null; }; diff --git a/apps/schedules/src/schema.ts b/apps/schedules/src/schema.ts index feadb5a9..32b2536b 100644 --- a/apps/schedules/src/schema.ts +++ b/apps/schedules/src/schema.ts @@ -11,6 +11,11 @@ export const jobQueueSchema = z.discriminatedUnion("type", [ type: z.literal("server"), serverId: z.string(), }), + z.object({ + cronSchedule: z.string(), + type: z.literal("schedule"), + scheduleId: z.string(), + }), ]); export type QueueJob = z.infer; diff --git a/apps/schedules/src/utils.ts b/apps/schedules/src/utils.ts index 1ec6b7e0..be8a0ccf 100644 --- a/apps/schedules/src/utils.ts +++ b/apps/schedules/src/utils.ts @@ -3,8 +3,10 @@ import { cleanUpSystemPrune, cleanUpUnusedImages, findBackupById, + findScheduleById, findServerById, keepLatestNBackups, + runCommand, runMariadbBackup, runMongoBackup, runMySqlBackup, @@ -12,7 +14,7 @@ import { runComposeBackup, } from "@dokploy/server"; import { db } from "@dokploy/server/dist/db"; -import { backups, server } from "@dokploy/server/dist/db/schema"; +import { backups, schedules, server } from "@dokploy/server/dist/db/schema"; import { and, eq } from "drizzle-orm"; import { logger } from "./logger.js"; import { scheduleJob } from "./queue.js"; @@ -75,8 +77,7 @@ export const runJobs = async (job: QueueJob) => { } await runComposeBackup(compose, backup); } - } - if (job.type === "server") { + } else if (job.type === "server") { const { serverId } = job; const server = await findServerById(serverId); if (server.serverStatus === "inactive") { @@ -86,6 +87,12 @@ export const runJobs = async (job: QueueJob) => { await cleanUpUnusedImages(serverId); await cleanUpDockerBuilder(serverId); await cleanUpSystemPrune(serverId); + } else if (job.type === "schedule") { + const { scheduleId } = job; + const schedule = await findScheduleById(scheduleId); + if (schedule.enabled) { + await runCommand(schedule.scheduleId); + } } } catch (error) { logger.error(error); @@ -134,4 +141,17 @@ export const initializeJobs = async () => { }); } logger.info({ Quantity: backupsResult.length }, "Backups Initialized"); + + const schedulesResult = await db.query.schedules.findMany({ + where: eq(schedules.enabled, true), + }); + + for (const schedule of schedulesResult) { + scheduleJob({ + scheduleId: schedule.scheduleId, + type: "schedule", + cronSchedule: schedule.cronExpression, + }); + } + logger.info({ Quantity: schedulesResult.length }, "Schedules Initialized"); }; diff --git a/packages/server/src/constants/index.ts b/packages/server/src/constants/index.ts index b6dfd217..461d40f5 100644 --- a/packages/server/src/constants/index.ts +++ b/packages/server/src/constants/index.ts @@ -23,5 +23,6 @@ export const paths = (isServer = false) => { CERTIFICATES_PATH: `${DYNAMIC_TRAEFIK_PATH}/certificates`, MONITORING_PATH: `${BASE_PATH}/monitoring`, REGISTRY_PATH: `${BASE_PATH}/registry`, + SCHEDULES_PATH: `${BASE_PATH}/schedules`, }; }; diff --git a/packages/server/src/db/schema/compose.ts b/packages/server/src/db/schema/compose.ts index e4a7bde8..f67521e5 100644 --- a/packages/server/src/db/schema/compose.ts +++ b/packages/server/src/db/schema/compose.ts @@ -17,6 +17,7 @@ import { sshKeys } from "./ssh-key"; import { generateAppName } from "./utils"; import { backups } from "./backups"; +import { schedules } from "./schedule"; export const sourceTypeCompose = pgEnum("sourceTypeCompose", [ "git", "github", @@ -137,6 +138,7 @@ export const composeRelations = relations(compose, ({ one, many }) => ({ references: [server.serverId], }), backups: many(backups), + schedules: many(schedules), })); const createSchema = createInsertSchema(compose, { diff --git a/packages/server/src/db/schema/deployment.ts b/packages/server/src/db/schema/deployment.ts index 4dfed76b..f807a27f 100644 --- a/packages/server/src/db/schema/deployment.ts +++ b/packages/server/src/db/schema/deployment.ts @@ -13,7 +13,7 @@ import { applications } from "./application"; import { compose } from "./compose"; import { previewDeployments } from "./preview-deployments"; import { server } from "./server"; - +import { schedules } from "./schedule"; export const deploymentStatus = pgEnum("deploymentStatus", [ "running", "done", @@ -47,7 +47,13 @@ export const deployments = pgTable("deployment", { createdAt: text("createdAt") .notNull() .$defaultFn(() => new Date().toISOString()), + startedAt: text("startedAt"), + finishedAt: text("finishedAt"), errorMessage: text("errorMessage"), + scheduleId: text("scheduleId").references( + (): AnyPgColumn => schedules.scheduleId, + { onDelete: "cascade" }, + ), }); export const deploymentsRelations = relations(deployments, ({ one }) => ({ @@ -67,6 +73,10 @@ export const deploymentsRelations = relations(deployments, ({ one }) => ({ fields: [deployments.previewDeploymentId], references: [previewDeployments.previewDeploymentId], }), + schedule: one(schedules, { + fields: [deployments.scheduleId], + references: [schedules.scheduleId], + }), })); const schema = createInsertSchema(deployments, { @@ -128,6 +138,17 @@ export const apiCreateDeploymentServer = schema serverId: z.string().min(1), }); +export const apiCreateDeploymentSchedule = schema + .pick({ + title: true, + status: true, + logPath: true, + description: true, + }) + .extend({ + scheduleId: z.string().min(1), + }); + export const apiFindAllByApplication = schema .pick({ applicationId: true, diff --git a/packages/server/src/db/schema/index.ts b/packages/server/src/db/schema/index.ts index 7d8c184f..e5c346cf 100644 --- a/packages/server/src/db/schema/index.ts +++ b/packages/server/src/db/schema/index.ts @@ -31,3 +31,4 @@ export * from "./utils"; export * from "./preview-deployments"; export * from "./ai"; export * from "./account"; +export * from "./schedule"; diff --git a/packages/server/src/db/schema/schedule.ts b/packages/server/src/db/schema/schedule.ts new file mode 100644 index 00000000..3c83247f --- /dev/null +++ b/packages/server/src/db/schema/schedule.ts @@ -0,0 +1,83 @@ +import { relations } from "drizzle-orm"; +import { boolean, pgEnum, pgTable, text } from "drizzle-orm/pg-core"; +import { createInsertSchema } from "drizzle-zod"; +import { nanoid } from "nanoid"; +import { z } from "zod"; +import { applications } from "./application"; +import { deployments } from "./deployment"; +import { generateAppName } from "./utils"; +import { compose } from "./compose"; +import { server } from "./server"; +import { users_temp } from "./user"; +export const shellTypes = pgEnum("shellType", ["bash", "sh"]); + +export const scheduleType = pgEnum("scheduleType", [ + "application", + "compose", + "server", + "dokploy-server", +]); + +export const schedules = pgTable("schedule", { + scheduleId: text("scheduleId") + .notNull() + .primaryKey() + .$defaultFn(() => nanoid()), + name: text("name").notNull(), + cronExpression: text("cronExpression").notNull(), + appName: text("appName") + .notNull() + .$defaultFn(() => generateAppName("schedule")), + serviceName: text("serviceName"), + shellType: shellTypes("shellType").notNull().default("bash"), + scheduleType: scheduleType("scheduleType").notNull().default("application"), + command: text("command").notNull(), + script: text("script"), + applicationId: text("applicationId").references( + () => applications.applicationId, + { + onDelete: "cascade", + }, + ), + composeId: text("composeId").references(() => compose.composeId, { + onDelete: "cascade", + }), + serverId: text("serverId").references(() => server.serverId, { + onDelete: "cascade", + }), + userId: text("userId").references(() => users_temp.id, { + onDelete: "cascade", + }), + enabled: boolean("enabled").notNull().default(true), + createdAt: text("createdAt") + .notNull() + .$defaultFn(() => new Date().toISOString()), +}); + +export type Schedule = typeof schedules.$inferSelect; + +export const schedulesRelations = relations(schedules, ({ one, many }) => ({ + application: one(applications, { + fields: [schedules.applicationId], + references: [applications.applicationId], + }), + compose: one(compose, { + fields: [schedules.composeId], + references: [compose.composeId], + }), + server: one(server, { + fields: [schedules.serverId], + references: [server.serverId], + }), + user: one(users_temp, { + fields: [schedules.userId], + references: [users_temp.id], + }), + deployments: many(deployments), +})); + +export const createScheduleSchema = createInsertSchema(schedules); + +export const updateScheduleSchema = createScheduleSchema.extend({ + scheduleId: z.string().min(1), +}); diff --git a/packages/server/src/db/schema/server.ts b/packages/server/src/db/schema/server.ts index 26bb4632..31434122 100644 --- a/packages/server/src/db/schema/server.ts +++ b/packages/server/src/db/schema/server.ts @@ -22,7 +22,7 @@ import { postgres } from "./postgres"; import { redis } from "./redis"; import { sshKeys } from "./ssh-key"; import { generateAppName } from "./utils"; - +import { schedules } from "./schedule"; export const serverStatus = pgEnum("serverStatus", ["active", "inactive"]); export const server = pgTable("server", { @@ -114,6 +114,7 @@ export const serverRelations = relations(server, ({ one, many }) => ({ fields: [server.organizationId], references: [organization.id], }), + schedules: many(schedules), })); const createSchema = createInsertSchema(server, { diff --git a/packages/server/src/db/schema/user.ts b/packages/server/src/db/schema/user.ts index 9f4a5482..18bcb359 100644 --- a/packages/server/src/db/schema/user.ts +++ b/packages/server/src/db/schema/user.ts @@ -14,6 +14,7 @@ import { account, apikey, organization } from "./account"; import { projects } from "./project"; import { certificateType } from "./shared"; import { backups } from "./backups"; +import { schedules } from "./schedule"; /** * This is an example of how to use the multi-project schema feature of Drizzle ORM. Use the same * database instance for multiple projects. @@ -127,6 +128,7 @@ export const usersRelations = relations(users_temp, ({ one, many }) => ({ projects: many(projects), apiKeys: many(apikey), backups: many(backups), + schedules: many(schedules), })); const createSchema = createInsertSchema(users_temp, { diff --git a/packages/server/src/index.ts b/packages/server/src/index.ts index 317ac0e7..b321f9ce 100644 --- a/packages/server/src/index.ts +++ b/packages/server/src/index.ts @@ -30,6 +30,7 @@ export * from "./services/github"; export * from "./services/gitlab"; export * from "./services/gitea"; export * from "./services/server"; +export * from "./services/schedule"; export * from "./services/application"; export * from "./utils/databases/rebuild"; export * from "./setup/config-paths"; @@ -127,3 +128,6 @@ export { stopLogCleanup, getLogCleanupStatus, } from "./utils/access-log/handler"; + +export * from "./utils/schedules/utils"; +export * from "./utils/schedules/index"; diff --git a/packages/server/src/services/deployment.ts b/packages/server/src/services/deployment.ts index 88086615..d9c814f8 100644 --- a/packages/server/src/services/deployment.ts +++ b/packages/server/src/services/deployment.ts @@ -6,6 +6,7 @@ import { type apiCreateDeployment, type apiCreateDeploymentCompose, type apiCreateDeploymentPreview, + type apiCreateDeploymentSchedule, type apiCreateDeploymentServer, deployments, } from "@dokploy/server/db/schema"; @@ -27,6 +28,7 @@ import { findPreviewDeploymentById, updatePreviewDeployment, } from "./preview-deployment"; +import { findScheduleById } from "./schedule"; export type Deployment = typeof deployments.$inferSelect; @@ -57,6 +59,7 @@ export const createDeployment = async ( try { await removeLastTenDeployments( deployment.applicationId, + "application", application.serverId, ); const { LOGS_PATH } = paths(!!application.serverId); @@ -88,6 +91,7 @@ export const createDeployment = async ( status: "running", logPath: logFilePath, description: deployment.description || "", + startedAt: new Date().toISOString(), }) .returning(); if (deploymentCreate.length === 0 || !deploymentCreate[0]) { @@ -107,6 +111,8 @@ export const createDeployment = async ( logPath: "", description: deployment.description || "", errorMessage: `An error have occured: ${error instanceof Error ? error.message : error}`, + startedAt: new Date().toISOString(), + finishedAt: new Date().toISOString(), }) .returning(); await updateApplicationStatus(application.applicationId, "error"); @@ -128,8 +134,9 @@ export const createDeploymentPreview = async ( deployment.previewDeploymentId, ); try { - await removeLastTenPreviewDeploymenById( + await removeLastTenDeployments( deployment.previewDeploymentId, + "previewDeployment", previewDeployment?.application?.serverId, ); @@ -165,6 +172,7 @@ export const createDeploymentPreview = async ( logPath: logFilePath, description: deployment.description || "", previewDeploymentId: deployment.previewDeploymentId, + startedAt: new Date().toISOString(), }) .returning(); if (deploymentCreate.length === 0 || !deploymentCreate[0]) { @@ -184,6 +192,8 @@ export const createDeploymentPreview = async ( logPath: "", description: deployment.description || "", errorMessage: `An error have occured: ${error instanceof Error ? error.message : error}`, + startedAt: new Date().toISOString(), + finishedAt: new Date().toISOString(), }) .returning(); await updatePreviewDeployment(deployment.previewDeploymentId, { @@ -205,8 +215,9 @@ export const createDeploymentCompose = async ( ) => { const compose = await findComposeById(deployment.composeId); try { - await removeLastTenComposeDeployments( + await removeLastTenDeployments( deployment.composeId, + "compose", compose.serverId, ); const { LOGS_PATH } = paths(!!compose.serverId); @@ -238,6 +249,7 @@ echo "Initializing deployment" >> ${logFilePath}; description: deployment.description || "", status: "running", logPath: logFilePath, + startedAt: new Date().toISOString(), }) .returning(); if (deploymentCreate.length === 0 || !deploymentCreate[0]) { @@ -257,6 +269,8 @@ echo "Initializing deployment" >> ${logFilePath}; logPath: "", description: deployment.description || "", errorMessage: `An error have occured: ${error instanceof Error ? error.message : error}`, + startedAt: new Date().toISOString(), + finishedAt: new Date().toISOString(), }) .returning(); await updateCompose(compose.composeId, { @@ -270,6 +284,82 @@ echo "Initializing deployment" >> ${logFilePath}; } }; +export const createDeploymentSchedule = async ( + deployment: Omit< + typeof apiCreateDeploymentSchedule._type, + "deploymentId" | "createdAt" | "status" | "logPath" + >, +) => { + const schedule = await findScheduleById(deployment.scheduleId); + + try { + const serverId = + schedule.application?.serverId || + schedule.compose?.serverId || + schedule.server?.serverId; + await removeLastTenDeployments(deployment.scheduleId, "schedule", serverId); + const { SCHEDULES_PATH } = paths(!!serverId); + const formattedDateTime = format(new Date(), "yyyy-MM-dd:HH:mm:ss"); + const fileName = `${schedule.appName}-${formattedDateTime}.log`; + const logFilePath = path.join(SCHEDULES_PATH, schedule.appName, fileName); + + if (serverId) { + const server = await findServerById(serverId); + + const command = ` + mkdir -p ${SCHEDULES_PATH}/${schedule.appName}; + echo "Initializing schedule" >> ${logFilePath}; + `; + + await execAsyncRemote(server.serverId, command); + } else { + await fsPromises.mkdir(path.join(SCHEDULES_PATH, schedule.appName), { + recursive: true, + }); + await fsPromises.writeFile(logFilePath, "Initializing schedule\n"); + } + + const deploymentCreate = await db + .insert(deployments) + .values({ + scheduleId: deployment.scheduleId, + title: deployment.title || "Deployment", + status: "running", + logPath: logFilePath, + description: deployment.description || "", + startedAt: new Date().toISOString(), + }) + .returning(); + if (deploymentCreate.length === 0 || !deploymentCreate[0]) { + throw new TRPCError({ + code: "BAD_REQUEST", + message: "Error creating the deployment", + }); + } + return deploymentCreate[0]; + } catch (error) { + console.log(error); + await db + .insert(deployments) + .values({ + scheduleId: deployment.scheduleId, + title: deployment.title || "Deployment", + status: "error", + logPath: "", + description: deployment.description || "", + errorMessage: `An error have occured: ${error instanceof Error ? error.message : error}`, + startedAt: new Date().toISOString(), + finishedAt: new Date().toISOString(), + }) + .returning(); + + throw new TRPCError({ + code: "BAD_REQUEST", + message: "Error creating the deployment", + }); + } +}; + export const removeDeployment = async (deploymentId: string) => { try { const deployment = await db @@ -296,109 +386,15 @@ export const removeDeploymentsByApplicationId = async ( .returning(); }; -const removeLastTenDeployments = async ( - applicationId: string, - serverId: string | null, +const getDeploymentsByType = async ( + id: string, + type: "application" | "compose" | "server" | "schedule" | "previewDeployment", ) => { const deploymentList = await db.query.deployments.findMany({ - where: eq(deployments.applicationId, applicationId), + where: eq(deployments[`${type}Id`], id), orderBy: desc(deployments.createdAt), }); - - if (deploymentList.length > 10) { - const deploymentsToDelete = deploymentList.slice(9); - if (serverId) { - let command = ""; - for (const oldDeployment of deploymentsToDelete) { - const logPath = path.join(oldDeployment.logPath); - - command += ` - rm -rf ${logPath}; - `; - await removeDeployment(oldDeployment.deploymentId); - } - - await execAsyncRemote(serverId, command); - } else { - for (const oldDeployment of deploymentsToDelete) { - const logPath = path.join(oldDeployment.logPath); - if (existsSync(logPath)) { - await fsPromises.unlink(logPath); - } - await removeDeployment(oldDeployment.deploymentId); - } - } - } -}; - -const removeLastTenComposeDeployments = async ( - composeId: string, - serverId: string | null, -) => { - const deploymentList = await db.query.deployments.findMany({ - where: eq(deployments.composeId, composeId), - orderBy: desc(deployments.createdAt), - }); - if (deploymentList.length > 10) { - if (serverId) { - let command = ""; - const deploymentsToDelete = deploymentList.slice(9); - for (const oldDeployment of deploymentsToDelete) { - const logPath = path.join(oldDeployment.logPath); - - command += ` - rm -rf ${logPath}; - `; - await removeDeployment(oldDeployment.deploymentId); - } - - await execAsyncRemote(serverId, command); - } else { - const deploymentsToDelete = deploymentList.slice(9); - for (const oldDeployment of deploymentsToDelete) { - const logPath = path.join(oldDeployment.logPath); - if (existsSync(logPath)) { - await fsPromises.unlink(logPath); - } - await removeDeployment(oldDeployment.deploymentId); - } - } - } -}; - -export const removeLastTenPreviewDeploymenById = async ( - previewDeploymentId: string, - serverId: string | null, -) => { - const deploymentList = await db.query.deployments.findMany({ - where: eq(deployments.previewDeploymentId, previewDeploymentId), - orderBy: desc(deployments.createdAt), - }); - - if (deploymentList.length > 10) { - const deploymentsToDelete = deploymentList.slice(9); - if (serverId) { - let command = ""; - for (const oldDeployment of deploymentsToDelete) { - const logPath = path.join(oldDeployment.logPath); - - command += ` - rm -rf ${logPath}; - `; - await removeDeployment(oldDeployment.deploymentId); - } - - await execAsyncRemote(serverId, command); - } else { - for (const oldDeployment of deploymentsToDelete) { - const logPath = path.join(oldDeployment.logPath); - if (existsSync(logPath)) { - await fsPromises.unlink(logPath); - } - await removeDeployment(oldDeployment.deploymentId); - } - } - } + return deploymentList; }; export const removeDeployments = async (application: Application) => { @@ -413,6 +409,38 @@ export const removeDeployments = async (application: Application) => { await removeDeploymentsByApplicationId(applicationId); }; +const removeLastTenDeployments = async ( + id: string, + type: "application" | "compose" | "server" | "schedule" | "previewDeployment", + serverId?: string | null, +) => { + const deploymentList = await getDeploymentsByType(id, type); + if (deploymentList.length > 10) { + const deploymentsToDelete = deploymentList.slice(10); + if (serverId) { + let command = ""; + for (const oldDeployment of deploymentsToDelete) { + const logPath = path.join(oldDeployment.logPath); + + command += ` + rm -rf ${logPath}; + `; + await removeDeployment(oldDeployment.deploymentId); + } + + await execAsyncRemote(serverId, command); + } else { + for (const oldDeployment of deploymentsToDelete) { + const logPath = path.join(oldDeployment.logPath); + if (existsSync(logPath)) { + await fsPromises.unlink(logPath); + } + await removeDeployment(oldDeployment.deploymentId); + } + } + } +}; + export const removeDeploymentsByPreviewDeploymentId = async ( previewDeployment: PreviewDeployment, serverId: string | null, @@ -494,6 +522,10 @@ export const updateDeploymentStatus = async ( .update(deployments) .set({ status: deploymentStatus, + finishedAt: + deploymentStatus === "done" || deploymentStatus === "error" + ? new Date().toISOString() + : null, }) .where(eq(deployments.deploymentId, deploymentId)) .returning(); diff --git a/packages/server/src/services/schedule.ts b/packages/server/src/services/schedule.ts new file mode 100644 index 00000000..b9a6cd52 --- /dev/null +++ b/packages/server/src/services/schedule.ts @@ -0,0 +1,126 @@ +import { type Schedule, schedules } from "../db/schema/schedule"; +import { db } from "../db"; +import { eq } from "drizzle-orm"; +import { TRPCError } from "@trpc/server"; +import type { z } from "zod"; +import type { + createScheduleSchema, + updateScheduleSchema, +} from "../db/schema/schedule"; +import { execAsync, execAsyncRemote } from "../utils/process/execAsync"; +import { paths } from "../constants"; +import path from "node:path"; +import { encodeBase64 } from "../utils/docker/utils"; + +export type ScheduleExtended = Awaited>; + +export const createSchedule = async ( + input: z.infer, +) => { + const { scheduleId, ...rest } = input; + const [newSchedule] = await db.insert(schedules).values(rest).returning(); + + if ( + newSchedule && + (newSchedule.scheduleType === "dokploy-server" || + newSchedule.scheduleType === "server") + ) { + await handleScript(newSchedule); + } + + return newSchedule; +}; + +export const findScheduleById = async (scheduleId: string) => { + const schedule = await db.query.schedules.findFirst({ + where: eq(schedules.scheduleId, scheduleId), + with: { + application: true, + compose: true, + server: true, + }, + }); + + if (!schedule) { + throw new TRPCError({ + code: "NOT_FOUND", + message: "Schedule not found", + }); + } + return schedule; +}; + +export const deleteSchedule = async (scheduleId: string) => { + const schedule = await findScheduleById(scheduleId); + const serverId = + schedule?.serverId || + schedule?.application?.serverId || + schedule?.compose?.serverId; + const { SCHEDULES_PATH } = paths(!!serverId); + + const fullPath = path.join(SCHEDULES_PATH, schedule?.appName || ""); + const command = `rm -rf ${fullPath}`; + if (serverId) { + await execAsyncRemote(serverId, command); + } else { + await execAsync(command); + } + + const scheduleResult = await db + .delete(schedules) + .where(eq(schedules.scheduleId, scheduleId)); + if (!scheduleResult) { + throw new TRPCError({ + code: "NOT_FOUND", + message: "Schedule not found", + }); + } + + return true; +}; + +export const updateSchedule = async ( + input: z.infer, +) => { + const { scheduleId, ...rest } = input; + const [updatedSchedule] = await db + .update(schedules) + .set(rest) + .where(eq(schedules.scheduleId, scheduleId)) + .returning(); + + if (!updatedSchedule) { + throw new TRPCError({ + code: "NOT_FOUND", + message: "Schedule not found", + }); + } + + if ( + updatedSchedule?.scheduleType === "dokploy-server" || + updatedSchedule?.scheduleType === "server" + ) { + await handleScript(updatedSchedule); + } + + return updatedSchedule; +}; + +const handleScript = async (schedule: Schedule) => { + const { SCHEDULES_PATH } = paths(!!schedule?.serverId); + const fullPath = path.join(SCHEDULES_PATH, schedule?.appName || ""); + const encodedContent = encodeBase64(schedule?.script || ""); + const script = ` + mkdir -p ${fullPath} + rm -f ${fullPath}/script.sh + touch ${fullPath}/script.sh + chmod +x ${fullPath}/script.sh + echo "${encodedContent}" | base64 -d > ${fullPath}/script.sh + `; + + if (schedule?.scheduleType === "dokploy-server") { + await execAsync(script); + } else if (schedule?.scheduleType === "server") { + await execAsyncRemote(schedule?.serverId || "", script); + } +}; diff --git a/packages/server/src/setup/config-paths.ts b/packages/server/src/setup/config-paths.ts index 190e438b..95f2d30d 100644 --- a/packages/server/src/setup/config-paths.ts +++ b/packages/server/src/setup/config-paths.ts @@ -18,6 +18,7 @@ export const setupDirectories = () => { MAIN_TRAEFIK_PATH, MONITORING_PATH, SSH_PATH, + SCHEDULES_PATH, } = paths(); const directories = [ BASE_PATH, @@ -28,6 +29,7 @@ export const setupDirectories = () => { SSH_PATH, CERTIFICATES_PATH, MONITORING_PATH, + SCHEDULES_PATH, ]; for (const dir of directories) { diff --git a/packages/server/src/utils/docker/utils.ts b/packages/server/src/utils/docker/utils.ts index 71b7e4aa..8af461b4 100644 --- a/packages/server/src/utils/docker/utils.ts +++ b/packages/server/src/utils/docker/utils.ts @@ -13,6 +13,7 @@ import type { RedisNested } from "../databases/redis"; import { execAsync, execAsyncRemote } from "../process/execAsync"; import { spawnAsync } from "../process/spawnAsync"; import { getRemoteDocker } from "../servers/remote-docker"; +import type { Compose } from "@dokploy/server/services/compose"; interface RegistryAuth { username: string; @@ -541,3 +542,67 @@ export const getRemoteServiceContainer = async ( throw error; } }; + +export const getServiceContainerIV2 = async ( + appName: string, + serverId?: string | null, +) => { + try { + const filter = { + status: ["running"], + label: [`com.docker.swarm.service.name=${appName}`], + }; + const remoteDocker = await getRemoteDocker(serverId); + const containers = await remoteDocker.listContainers({ + filters: JSON.stringify(filter), + }); + + if (containers.length === 0 || !containers[0]) { + throw new Error(`No container found with name: ${appName}`); + } + + const container = containers[0]; + return container; + } catch (error) { + throw error; + } +}; + +export const getComposeContainer = async ( + compose: Compose, + serviceName: string, +) => { + try { + const { appName, composeType, serverId } = compose; + // 1. Determine the correct labels based on composeType + const labels: string[] = []; + if (composeType === "stack") { + // Labels for Docker Swarm stack services + labels.push(`com.docker.stack.namespace=${appName}`); + labels.push(`com.docker.swarm.service.name=${appName}_${serviceName}`); + } else { + // Labels for Docker Compose projects (default) + labels.push(`com.docker.compose.project=${appName}`); + labels.push(`com.docker.compose.service=${serviceName}`); + } + const filter = { + status: ["running"], + label: labels, + }; + + const remoteDocker = await getRemoteDocker(serverId); + const containers = await remoteDocker.listContainers({ + filters: JSON.stringify(filter), + limit: 1, + }); + + if (containers.length === 0 || !containers[0]) { + throw new Error(`No container found with name: ${appName}`); + } + + const container = containers[0]; + return container; + } catch (error) { + throw error; + } +}; diff --git a/packages/server/src/utils/schedules/index.ts b/packages/server/src/utils/schedules/index.ts new file mode 100644 index 00000000..0f9b167e --- /dev/null +++ b/packages/server/src/utils/schedules/index.ts @@ -0,0 +1,28 @@ +import { db } from "../../db/index"; +import { schedules } from "@dokploy/server/db/schema"; +import { eq } from "drizzle-orm"; +import { scheduleJob } from "./utils"; + +export const initSchedules = async () => { + try { + const schedulesResult = await db.query.schedules.findMany({ + where: eq(schedules.enabled, true), + with: { + server: true, + application: true, + compose: true, + user: true, + }, + }); + + console.log(`Initializing ${schedulesResult.length} schedules`); + for (const schedule of schedulesResult) { + scheduleJob(schedule); + console.log( + `Initialized schedule: ${schedule.name} ${schedule.scheduleType} ✅`, + ); + } + } catch (error) { + console.log(`Error initializing schedules: ${error}`); + } +}; diff --git a/packages/server/src/utils/schedules/utils.ts b/packages/server/src/utils/schedules/utils.ts new file mode 100644 index 00000000..14c32213 --- /dev/null +++ b/packages/server/src/utils/schedules/utils.ts @@ -0,0 +1,149 @@ +import type { Schedule } from "@dokploy/server/db/schema/schedule"; +import { findScheduleById } from "@dokploy/server/services/schedule"; +import { scheduledJobs, scheduleJob as scheduleJobNode } from "node-schedule"; +import { getComposeContainer, getServiceContainerIV2 } from "../docker/utils"; +import { execAsyncRemote } from "../process/execAsync"; +import { spawnAsync } from "../process/spawnAsync"; +import { createDeploymentSchedule } from "@dokploy/server/services/deployment"; +import { createWriteStream } from "node:fs"; +import { updateDeploymentStatus } from "@dokploy/server/services/deployment"; +import { paths } from "@dokploy/server/constants"; +import path from "node:path"; + +export const scheduleJob = (schedule: Schedule) => { + const { cronExpression, scheduleId } = schedule; + + scheduleJobNode(scheduleId, cronExpression, async () => { + await runCommand(scheduleId); + }); +}; + +export const removeScheduleJob = (scheduleId: string) => { + const currentJob = scheduledJobs[scheduleId]; + currentJob?.cancel(); +}; + +export const runCommand = async (scheduleId: string) => { + const { + application, + command, + shellType, + scheduleType, + compose, + serviceName, + appName, + serverId, + } = await findScheduleById(scheduleId); + + const deployment = await createDeploymentSchedule({ + scheduleId, + title: "Schedule", + description: "Schedule", + }); + + if (scheduleType === "application" || scheduleType === "compose") { + let containerId = ""; + let serverId = ""; + if (scheduleType === "application" && application) { + const container = await getServiceContainerIV2( + application.appName, + application.serverId, + ); + containerId = container.Id; + serverId = application.serverId || ""; + } + if (scheduleType === "compose" && compose) { + const container = await getComposeContainer(compose, serviceName || ""); + containerId = container.Id; + serverId = compose.serverId || ""; + } + + if (serverId) { + try { + await execAsyncRemote( + serverId, + ` + set -e + echo "Running command: docker exec ${containerId} ${shellType} -c \"${command}\"" >> ${deployment.logPath}; + docker exec ${containerId} ${shellType} -c "${command}" >> ${deployment.logPath} 2>> ${deployment.logPath} || { + echo "❌ Command failed" >> ${deployment.logPath}; + exit 1; + } + echo "✅ Command executed successfully" >> ${deployment.logPath}; + `, + ); + } catch (error) { + await updateDeploymentStatus(deployment.deploymentId, "error"); + throw error; + } + } else { + const writeStream = createWriteStream(deployment.logPath, { flags: "a" }); + + try { + writeStream.write( + `docker exec ${containerId} ${shellType} -c "${command}"\n`, + ); + await spawnAsync( + "docker", + ["exec", containerId, shellType, "-c", command], + (data) => { + if (writeStream.writable) { + writeStream.write(data); + } + }, + ); + + writeStream.write("✅ Command executed successfully\n"); + } catch (error) { + writeStream.write("❌ Command failed\n"); + writeStream.write( + error instanceof Error ? error.message : "Unknown error", + ); + writeStream.end(); + await updateDeploymentStatus(deployment.deploymentId, "error"); + throw error; + } + } + } else if (scheduleType === "dokploy-server") { + try { + const writeStream = createWriteStream(deployment.logPath, { flags: "a" }); + const { SCHEDULES_PATH } = paths(); + const fullPath = path.join(SCHEDULES_PATH, appName || ""); + + await spawnAsync( + "bash", + ["-c", "./script.sh"], + (data) => { + if (writeStream.writable) { + writeStream.write(data); + } + }, + { + cwd: fullPath, + }, + ); + } catch (error) { + await updateDeploymentStatus(deployment.deploymentId, "error"); + throw error; + } + } else if (scheduleType === "server") { + try { + const { SCHEDULES_PATH } = paths(true); + const fullPath = path.join(SCHEDULES_PATH, appName || ""); + const command = ` + set -e + echo "Running script" >> ${deployment.logPath}; + bash -c ${fullPath}/script.sh >> ${deployment.logPath} 2>> ${deployment.logPath} || { + echo "❌ Command failed" >> ${deployment.logPath}; + exit 1; + } + echo "✅ Command executed successfully" >> ${deployment.logPath}; + `; + await execAsyncRemote(serverId, command); + } catch (error) { + await updateDeploymentStatus(deployment.deploymentId, "error"); + throw error; + } + } + await updateDeploymentStatus(deployment.deploymentId, "done"); +};