diff --git a/apps/api/src/index.ts b/apps/api/src/index.ts index 9aa4fa15..a086a1ec 100644 --- a/apps/api/src/index.ts +++ b/apps/api/src/index.ts @@ -7,12 +7,28 @@ import { createClient } from "redis"; import { logger } from "./logger"; import { type DeployJob, deployJobSchema } from "./schema"; import { deploy } from "./utils"; +import { validateBearerTokenAPI } from "@dokploy/server"; const app = new Hono(); const redisClient = createClient({ url: process.env.REDIS_URL, }); +app.use(async (c, next) => { + const authHeader = c.req.header("authorization"); + + if (!authHeader || !authHeader.startsWith("Bearer ")) { + return c.json({ message: "Authorization header missing" }, 401); + } + + const result = await validateBearerTokenAPI(authHeader); + + if (!result.user || !result.session) { + return c.json({ message: "Invalid session" }, 403); + } + return next(); +}); + app.post("/deploy", zValidator("json", deployJobSchema), (c) => { const data = c.req.valid("json"); const res = queue.add(data, { groupName: data.serverId }); diff --git a/apps/dokploy/server/api/routers/application.ts b/apps/dokploy/server/api/routers/application.ts index 85ceb1e3..a264a125 100644 --- a/apps/dokploy/server/api/routers/application.ts +++ b/apps/dokploy/server/api/routers/application.ts @@ -254,7 +254,7 @@ export const applicationRouter = createTRPCRouter({ if (IS_CLOUD && application.serverId) { jobData.serverId = application.serverId; - await deploy(jobData); + await deploy(jobData, ctx.session.id); return true; } await myQueue.add( @@ -482,7 +482,7 @@ export const applicationRouter = createTRPCRouter({ }; if (IS_CLOUD && application.serverId) { jobData.serverId = application.serverId; - await deploy(jobData); + await deploy(jobData, ctx.session.id); return true; } @@ -571,7 +571,7 @@ export const applicationRouter = createTRPCRouter({ }; if (IS_CLOUD && app.serverId) { jobData.serverId = app.serverId; - await deploy(jobData); + await deploy(jobData, ctx.session.id); return true; } diff --git a/apps/dokploy/server/api/routers/backup.ts b/apps/dokploy/server/api/routers/backup.ts index 98c02d78..19dd8031 100644 --- a/apps/dokploy/server/api/routers/backup.ts +++ b/apps/dokploy/server/api/routers/backup.ts @@ -5,7 +5,7 @@ import { apiRemoveBackup, apiUpdateBackup, } from "@/server/db/schema"; -import { removeJob, schedule } from "@/server/utils/backup"; +import { removeJob, schedule, updateJob } from "@/server/utils/backup"; import { IS_CLOUD, createBackup, @@ -29,18 +29,21 @@ import { TRPCError } from "@trpc/server"; export const backupRouter = createTRPCRouter({ create: protectedProcedure .input(apiCreateBackup) - .mutation(async ({ input }) => { + .mutation(async ({ input, ctx }) => { try { const newBackup = await createBackup(input); const backup = await findBackupById(newBackup.backupId); if (IS_CLOUD && backup.enabled) { - await schedule({ - cronSchedule: backup.schedule, - backupId: backup.backupId, - type: "backup", - }); + await schedule( + { + cronSchedule: backup.schedule, + backupId: backup.backupId, + type: "backup", + }, + ctx.session.id, + ); } else { if (backup.enabled) { scheduleBackup(backup); @@ -60,17 +63,31 @@ export const backupRouter = createTRPCRouter({ }), update: protectedProcedure .input(apiUpdateBackup) - .mutation(async ({ input }) => { + .mutation(async ({ input, ctx }) => { try { await updateBackupById(input.backupId, input); const backup = await findBackupById(input.backupId); - if (IS_CLOUD && backup.enabled) { - await schedule({ - cronSchedule: backup.schedule, - backupId: backup.backupId, - type: "backup", - }); + if (IS_CLOUD) { + if (backup.enabled) { + await updateJob( + { + cronSchedule: backup.schedule, + backupId: backup.backupId, + type: "backup", + }, + ctx.session.id, + ); + } else { + await removeJob( + { + cronSchedule: backup.schedule, + backupId: backup.backupId, + type: "backup", + }, + ctx.session.id, + ); + } } else { if (backup.enabled) { removeScheduleBackup(input.backupId); @@ -88,16 +105,19 @@ export const backupRouter = createTRPCRouter({ }), remove: protectedProcedure .input(apiRemoveBackup) - .mutation(async ({ input }) => { + .mutation(async ({ input, ctx }) => { try { const value = await removeBackupById(input.backupId); if (IS_CLOUD && value) { - removeJob({ - backupId: input.backupId, - cronSchedule: value.schedule, - type: "backup", - }); - } else { + removeJob( + { + backupId: input.backupId, + cronSchedule: value.schedule, + type: "backup", + }, + ctx.session.id, + ); + } else if (!IS_CLOUD) { removeScheduleBackup(input.backupId); } return value; diff --git a/apps/dokploy/server/api/routers/compose.ts b/apps/dokploy/server/api/routers/compose.ts index 21704f94..2042f941 100644 --- a/apps/dokploy/server/api/routers/compose.ts +++ b/apps/dokploy/server/api/routers/compose.ts @@ -256,7 +256,7 @@ export const composeRouter = createTRPCRouter({ if (IS_CLOUD && compose.serverId) { jobData.serverId = compose.serverId; - await deploy(jobData); + await deploy(jobData, ctx.session.id); return true; } await myQueue.add( @@ -288,7 +288,7 @@ export const composeRouter = createTRPCRouter({ }; if (IS_CLOUD && compose.serverId) { jobData.serverId = compose.serverId; - await deploy(jobData); + await deploy(jobData, ctx.session.id); return true; } await myQueue.add( diff --git a/apps/dokploy/server/utils/backup.ts b/apps/dokploy/server/utils/backup.ts index edb490c0..7d10c616 100644 --- a/apps/dokploy/server/utils/backup.ts +++ b/apps/dokploy/server/utils/backup.ts @@ -9,12 +9,13 @@ type QueueJob = cronSchedule: string; serverId: string; }; -export const schedule = async (job: QueueJob) => { +export const schedule = async (job: QueueJob, authSession: string) => { try { const result = await fetch(`${process.env.JOBS_URL}/create-backup`, { method: "POST", headers: { "Content-Type": "application/json", + Authorization: `Bearer ${authSession}`, }, body: JSON.stringify(job), }); @@ -27,12 +28,32 @@ export const schedule = async (job: QueueJob) => { } }; -export const removeJob = async (job: QueueJob) => { +export const removeJob = async (job: QueueJob, authSession: string) => { try { const result = await fetch(`${process.env.JOBS_URL}/remove-job`, { method: "POST", headers: { "Content-Type": "application/json", + Authorization: `Bearer ${authSession}`, + }, + body: JSON.stringify(job), + }); + const data = await result.json(); + console.log(data); + return data; + } catch (error) { + console.log(error); + throw error; + } +}; + +export const updateJob = async (job: QueueJob, authSession: string) => { + try { + const result = await fetch(`${process.env.JOBS_URL}/update-backup`, { + method: "POST", + headers: { + "Content-Type": "application/json", + Authorization: `Bearer ${authSession}`, }, body: JSON.stringify(job), }); diff --git a/apps/dokploy/server/utils/deploy.ts b/apps/dokploy/server/utils/deploy.ts index a5b3da97..87b47e52 100644 --- a/apps/dokploy/server/utils/deploy.ts +++ b/apps/dokploy/server/utils/deploy.ts @@ -1,11 +1,12 @@ import type { DeploymentJob } from "../queues/deployments-queue"; -export const deploy = async (jobData: DeploymentJob) => { +export const deploy = async (jobData: DeploymentJob, sessionId: string) => { try { const result = await fetch(`${process.env.SERVER_URL}/deploy`, { method: "POST", headers: { "Content-Type": "application/json", + Authorization: `Bearer ${sessionId}`, }, body: JSON.stringify(jobData), }); diff --git a/apps/schedules/src/index.ts b/apps/schedules/src/index.ts index 351775b7..78eb5fb1 100644 --- a/apps/schedules/src/index.ts +++ b/apps/schedules/src/index.ts @@ -3,22 +3,62 @@ import { Hono } from "hono"; import "dotenv/config"; import { zValidator } from "@hono/zod-validator"; import { logger } from "./logger"; -import { cleanQueue, removeJob, scheduleJob } from "./queue"; +import { cleanQueue, getJobRepeatable, removeJob, scheduleJob } from "./queue"; import { jobQueueSchema } from "./schema"; import { firstWorker, secondWorker } from "./workers"; +import { validateBearerTokenAPI } from "@dokploy/server"; const app = new Hono(); cleanQueue(); -app.post("/create-backup", zValidator("json", jobQueueSchema), (c) => { +app.use(async (c, next) => { + const authHeader = c.req.header("authorization"); + + if (!authHeader || !authHeader.startsWith("Bearer ")) { + return c.json({ message: "Authorization header missing" }, 401); + } + + const result = await validateBearerTokenAPI(authHeader); + + if (!result.user || !result.session) { + return c.json({ message: "Invalid session" }, 403); + } + return next(); +}); + +app.post("/create-backup", zValidator("json", jobQueueSchema), async (c) => { const data = c.req.valid("json"); scheduleJob(data); - logger.info("Backup created successfully", data); return c.json({ message: "Backup created successfully" }); }); +app.post("/update-backup", zValidator("json", jobQueueSchema), async (c) => { + const data = c.req.valid("json"); + const job = await getJobRepeatable(data); + if (job) { + let result = false; + if (data.type === "backup") { + result = await removeJob({ + backupId: data.backupId, + type: "backup", + cronSchedule: job.pattern, + }); + } else if (data.type === "server") { + result = await removeJob({ + serverId: data.serverId, + type: "server", + cronSchedule: job.pattern, + }); + } + logger.info("Job removed", result); + } + scheduleJob(data); + + return c.json({ message: "Backup updated successfully" }); +}); + app.post("/remove-job", zValidator("json", jobQueueSchema), async (c) => { const data = c.req.valid("json"); const result = await removeJob(data); diff --git a/apps/schedules/src/queue.ts b/apps/schedules/src/queue.ts index aec548e6..df7ef8c9 100644 --- a/apps/schedules/src/queue.ts +++ b/apps/schedules/src/queue.ts @@ -1,4 +1,4 @@ -import { Queue } from "bullmq"; +import { Queue, type RepeatableJob } from "bullmq"; import { logger } from "./logger"; import type { QueueJob } from "./schema"; @@ -52,4 +52,24 @@ export const removeJob = async (data: QueueJob) => { }); return result; } + + return false; +}; + +export const getJobRepeatable = async ( + data: QueueJob, +): Promise => { + const repeatableJobs = await jobQueue.getRepeatableJobs(); + if (data.type === "backup") { + const { backupId } = data; + const job = repeatableJobs.find((j) => j.name === backupId); + return job ? job : null; + } + if (data.type === "server") { + const { serverId } = data; + const job = repeatableJobs.find((j) => j.name === `${serverId}-cleanup`); + return job ? job : null; + } + + return null; }; diff --git a/apps/schedules/src/workers.ts b/apps/schedules/src/workers.ts index a2583e4d..2bd7a623 100644 --- a/apps/schedules/src/workers.ts +++ b/apps/schedules/src/workers.ts @@ -5,6 +5,7 @@ import { runJobs } from "./utils"; export const firstWorker = new Worker( "backupQueue", async (job: Job) => { + console.log("Job received", job.data); await runJobs(job.data); }, { @@ -17,6 +18,7 @@ export const firstWorker = new Worker( export const secondWorker = new Worker( "backupQueue", async (job: Job) => { + console.log(job.data); await runJobs(job.data); }, { diff --git a/packages/server/src/auth/token.ts b/packages/server/src/auth/token.ts index 8831a684..f29d4dbd 100644 --- a/packages/server/src/auth/token.ts +++ b/packages/server/src/auth/token.ts @@ -59,3 +59,41 @@ export const validateBearerToken = async ( }), }; }; + +export const validateBearerTokenAPI = async ( + authorizationHeader: string, +): ReturnValidateToken => { + const sessionId = luciaToken.readBearerToken(authorizationHeader ?? ""); + if (!sessionId) { + return { + user: null, + session: null, + }; + } + const result = await luciaToken.validateSession(sessionId); + + if (result.user) { + if (result.user?.rol === "admin") { + const admin = await findAdminByAuthId(result.user.id); + result.user.adminId = admin.adminId; + } else if (result.user?.rol === "user") { + const userResult = await findUserByAuthId(result.user.id); + result.user.adminId = userResult.adminId; + } + } + return { + session: result.session, + ...((result.user && { + user: { + adminId: result.user.adminId, + authId: result.user.id, + email: result.user.email, + rol: result.user.rol, + id: result.user.id, + secret: result.user.secret, + }, + }) || { + user: null, + }), + }; +};