- Submit Log in issue on Github
+
+ Submit Log in issue on Github
+
diff --git a/apps/dokploy/pages/dashboard/project/[projectId]/services/compose/[composeId].tsx b/apps/dokploy/pages/dashboard/project/[projectId]/services/compose/[composeId].tsx
index 39f399e9..80909563 100644
--- a/apps/dokploy/pages/dashboard/project/[projectId]/services/compose/[composeId].tsx
+++ b/apps/dokploy/pages/dashboard/project/[projectId]/services/compose/[composeId].tsx
@@ -217,12 +217,12 @@ const Service = (
General
diff --git a/apps/dokploy/server/api/routers/compose.ts b/apps/dokploy/server/api/routers/compose.ts
index 48c2bfa7..583dde5c 100644
--- a/apps/dokploy/server/api/routers/compose.ts
+++ b/apps/dokploy/server/api/routers/compose.ts
@@ -439,7 +439,15 @@ export const composeRouter = createTRPCRouter({
}
const projectName = slugify(`${project.name} ${input.id}`);
- const generate = processTemplate(template.config, {
+ const appName = `${projectName}-${generatePassword(6)}`;
+ const config = {
+ ...template.config,
+ variables: {
+ APP_NAME: appName,
+ ...template.config.variables,
+ },
+ };
+ const generate = processTemplate(config, {
serverIp: serverIp,
projectName: projectName,
});
@@ -451,7 +459,7 @@ export const composeRouter = createTRPCRouter({
serverId: input.serverId,
name: input.id,
sourceType: "raw",
- appName: `${projectName}-${generatePassword(6)}`,
+ appName: appName,
isolatedDeployment: true,
});
@@ -605,7 +613,15 @@ export const composeRouter = createTRPCRouter({
});
}
- const processedTemplate = processTemplate(config, {
+ const configModified = {
+ ...config,
+ variables: {
+ APP_NAME: compose.appName,
+ ...config.variables,
+ },
+ };
+
+ const processedTemplate = processTemplate(configModified, {
serverIp: serverIp,
projectName: compose.appName,
});
@@ -675,7 +691,15 @@ export const composeRouter = createTRPCRouter({
});
}
- const processedTemplate = processTemplate(config, {
+ const configModified = {
+ ...config,
+ variables: {
+ APP_NAME: compose.appName,
+ ...config.variables,
+ },
+ };
+
+ const processedTemplate = processTemplate(configModified, {
serverIp: serverIp,
projectName: compose.appName,
});
diff --git a/apps/dokploy/server/api/routers/project.ts b/apps/dokploy/server/api/routers/project.ts
index 05875450..b98c93fa 100644
--- a/apps/dokploy/server/api/routers/project.ts
+++ b/apps/dokploy/server/api/routers/project.ts
@@ -309,6 +309,7 @@ export const projectRouter = createTRPCRouter({
}),
)
.optional(),
+ duplicateInSameProject: z.boolean().default(false),
}),
)
.mutation(async ({ ctx, input }) => {
@@ -331,15 +332,17 @@ export const projectRouter = createTRPCRouter({
});
}
- // Create new project
- const newProject = await createProject(
- {
- name: input.name,
- description: input.description,
- env: sourceProject.env,
- },
- ctx.session.activeOrganizationId,
- );
+ // Create new project or use existing one
+ const targetProject = input.duplicateInSameProject
+ ? sourceProject
+ : await createProject(
+ {
+ name: input.name,
+ description: input.description,
+ env: sourceProject.env,
+ },
+ ctx.session.activeOrganizationId,
+ );
if (input.includeServices) {
const servicesToDuplicate = input.selectedServices || [];
@@ -362,7 +365,10 @@ export const projectRouter = createTRPCRouter({
const newApplication = await createApplication({
...application,
- projectId: newProject.projectId,
+ name: input.duplicateInSameProject
+ ? `${application.name} (copy)`
+ : application.name,
+ projectId: targetProject.projectId,
});
for (const domain of domains) {
@@ -423,7 +429,10 @@ export const projectRouter = createTRPCRouter({
const newPostgres = await createPostgres({
...postgres,
- projectId: newProject.projectId,
+ name: input.duplicateInSameProject
+ ? `${postgres.name} (copy)`
+ : postgres.name,
+ projectId: targetProject.projectId,
});
for (const mount of mounts) {
@@ -449,7 +458,10 @@ export const projectRouter = createTRPCRouter({
await findMariadbById(id);
const newMariadb = await createMariadb({
...mariadb,
- projectId: newProject.projectId,
+ name: input.duplicateInSameProject
+ ? `${mariadb.name} (copy)`
+ : mariadb.name,
+ projectId: targetProject.projectId,
});
for (const mount of mounts) {
@@ -475,7 +487,10 @@ export const projectRouter = createTRPCRouter({
await findMongoById(id);
const newMongo = await createMongo({
...mongo,
- projectId: newProject.projectId,
+ name: input.duplicateInSameProject
+ ? `${mongo.name} (copy)`
+ : mongo.name,
+ projectId: targetProject.projectId,
});
for (const mount of mounts) {
@@ -501,7 +516,10 @@ export const projectRouter = createTRPCRouter({
await findMySqlById(id);
const newMysql = await createMysql({
...mysql,
- projectId: newProject.projectId,
+ name: input.duplicateInSameProject
+ ? `${mysql.name} (copy)`
+ : mysql.name,
+ projectId: targetProject.projectId,
});
for (const mount of mounts) {
@@ -526,7 +544,10 @@ export const projectRouter = createTRPCRouter({
const { redisId, mounts, ...redis } = await findRedisById(id);
const newRedis = await createRedis({
...redis,
- projectId: newProject.projectId,
+ name: input.duplicateInSameProject
+ ? `${redis.name} (copy)`
+ : redis.name,
+ projectId: targetProject.projectId,
});
for (const mount of mounts) {
@@ -545,7 +566,10 @@ export const projectRouter = createTRPCRouter({
await findComposeById(id);
const newCompose = await createCompose({
...compose,
- projectId: newProject.projectId,
+ name: input.duplicateInSameProject
+ ? `${compose.name} (copy)`
+ : compose.name,
+ projectId: targetProject.projectId,
});
for (const mount of mounts) {
@@ -572,21 +596,20 @@ export const projectRouter = createTRPCRouter({
};
// Duplicate selected services
-
for (const service of servicesToDuplicate) {
await duplicateService(service.id, service.type);
}
}
- if (ctx.user.role === "member") {
+ if (!input.duplicateInSameProject && ctx.user.role === "member") {
await addNewProject(
ctx.user.id,
- newProject.projectId,
+ targetProject.projectId,
ctx.session.activeOrganizationId,
);
}
- return newProject;
+ return targetProject;
} catch (error) {
throw new TRPCError({
code: "BAD_REQUEST",
diff --git a/packages/server/src/setup/server-setup.ts b/packages/server/src/setup/server-setup.ts
index d1d42257..d6d60c2e 100644
--- a/packages/server/src/setup/server-setup.ts
+++ b/packages/server/src/setup/server-setup.ts
@@ -356,20 +356,20 @@ const installUtilities = () => `
case "$OS_TYPE" in
arch)
- pacman -Sy --noconfirm --needed curl wget git jq openssl >/dev/null || true
+ pacman -Sy --noconfirm --needed curl wget git git-lfs jq openssl >/dev/null || true
;;
alpine)
sed -i '/^#.*\/community/s/^#//' /etc/apk/repositories
apk update >/dev/null
- apk add curl wget git jq openssl sudo unzip tar >/dev/null
+ apk add curl wget git git-lfs jq openssl sudo unzip tar >/dev/null
;;
ubuntu | debian | raspbian)
DEBIAN_FRONTEND=noninteractive apt-get update -y >/dev/null
- DEBIAN_FRONTEND=noninteractive apt-get install -y unzip curl wget git jq openssl >/dev/null
+ DEBIAN_FRONTEND=noninteractive apt-get install -y unzip curl wget git git-lfs jq openssl >/dev/null
;;
centos | fedora | rhel | ol | rocky | almalinux | amzn)
if [ "$OS_TYPE" = "amzn" ]; then
- dnf install -y wget git jq openssl >/dev/null
+ dnf install -y wget git git-lfs jq openssl >/dev/null
else
if ! command -v dnf >/dev/null; then
yum install -y dnf >/dev/null
@@ -377,12 +377,12 @@ const installUtilities = () => `
if ! command -v curl >/dev/null; then
dnf install -y curl >/dev/null
fi
- dnf install -y wget git jq openssl unzip >/dev/null
+ dnf install -y wget git git-lfs jq openssl unzip >/dev/null
fi
;;
sles | opensuse-leap | opensuse-tumbleweed)
zypper refresh >/dev/null
- zypper install -y curl wget git jq openssl >/dev/null
+ zypper install -y curl wget git git-lfs jq openssl >/dev/null
;;
*)
echo "This script only supports Debian, Redhat, Arch Linux, or SLES based operating systems for now."
@@ -577,7 +577,7 @@ const installNixpacks = () => `
if command_exists nixpacks; then
echo "Nixpacks already installed ✅"
else
- export NIXPACKS_VERSION=1.35.0
+ export NIXPACKS_VERSION=1.39.0
bash -c "$(curl -fsSL https://nixpacks.com/install.sh)"
echo "Nixpacks version $NIXPACKS_VERSION installed ✅"
fi
diff --git a/packages/server/src/utils/backups/web-server.ts b/packages/server/src/utils/backups/web-server.ts
index 71df47ba..ed6b020f 100644
--- a/packages/server/src/utils/backups/web-server.ts
+++ b/packages/server/src/utils/backups/web-server.ts
@@ -3,7 +3,7 @@ import { execAsync } from "../process/execAsync";
import { getS3Credentials, normalizeS3Path } from "./utils";
import { findDestinationById } from "@dokploy/server/services/destination";
import { IS_CLOUD, paths } from "@dokploy/server/constants";
-import { mkdtemp } from "node:fs/promises";
+import { mkdtemp, rm } from "node:fs/promises";
import { join } from "node:path";
import { tmpdir } from "node:os";
import {
@@ -51,10 +51,20 @@ export const runWebServerBackup = async (backup: BackupSchedule) => {
const postgresContainerId = containerId.trim();
- const postgresCommand = `docker exec ${postgresContainerId} pg_dump -v -Fc -U dokploy -d dokploy > '${tempDir}/database.sql'`;
+ // First dump the database inside the container
+ const dumpCommand = `docker exec ${postgresContainerId} pg_dump -v -Fc -U dokploy -d dokploy -f /tmp/database.sql`;
+ writeStream.write(`Running dump command: ${dumpCommand}\n`);
+ await execAsync(dumpCommand);
- writeStream.write(`Running command: ${postgresCommand}\n`);
- await execAsync(postgresCommand);
+ // Then copy the file from the container to host
+ const copyCommand = `docker cp ${postgresContainerId}:/tmp/database.sql ${tempDir}/database.sql`;
+ writeStream.write(`Copying database dump: ${copyCommand}\n`);
+ await execAsync(copyCommand);
+
+ // Clean up the temp file in the container
+ const cleanupCommand = `docker exec ${postgresContainerId} rm -f /tmp/database.sql`;
+ writeStream.write(`Cleaning up temp file: ${cleanupCommand}\n`);
+ await execAsync(cleanupCommand);
await execAsync(
`rsync -av --ignore-errors ${BASE_PATH}/ ${tempDir}/filesystem/`,
@@ -77,7 +87,11 @@ export const runWebServerBackup = async (backup: BackupSchedule) => {
await updateDeploymentStatus(deployment.deploymentId, "done");
return true;
} finally {
- await execAsync(`rm -rf ${tempDir}`);
+ try {
+ await rm(tempDir, { recursive: true, force: true });
+ } catch (cleanupError) {
+ console.error("Cleanup error:", cleanupError);
+ }
}
} catch (error) {
console.error("Backup error:", error);
diff --git a/packages/server/src/utils/builders/compose.ts b/packages/server/src/utils/builders/compose.ts
index 7b00fc72..92add1e6 100644
--- a/packages/server/src/utils/builders/compose.ts
+++ b/packages/server/src/utils/builders/compose.ts
@@ -190,7 +190,8 @@ const createEnvFile = (compose: ComposeNested) => {
join(COMPOSE_PATH, appName, "code", "docker-compose.yml");
const envFilePath = join(dirname(composeFilePath), ".env");
- let envContent = env || "";
+ let envContent = `APP_NAME=${appName}\n`;
+ envContent += env || "";
if (!envContent.includes("DOCKER_CONFIG")) {
envContent += "\nDOCKER_CONFIG=/root/.docker/config.json";
}
@@ -219,7 +220,8 @@ export const getCreateEnvFileCommand = (compose: ComposeNested) => {
const envFilePath = join(dirname(composeFilePath), ".env");
- let envContent = env || "";
+ let envContent = `APP_NAME=${appName}\n`;
+ envContent += env || "";
if (!envContent.includes("DOCKER_CONFIG")) {
envContent += "\nDOCKER_CONFIG=/root/.docker/config.json";
}
diff --git a/packages/server/src/utils/providers/gitlab.ts b/packages/server/src/utils/providers/gitlab.ts
index 9e848fd5..7fa804eb 100644
--- a/packages/server/src/utils/providers/gitlab.ts
+++ b/packages/server/src/utils/providers/gitlab.ts
@@ -246,32 +246,16 @@ export const getGitlabRepositories = async (gitlabId?: string) => {
const gitlabProvider = await findGitlabById(gitlabId);
- const response = await fetch(
- `${gitlabProvider.gitlabUrl}/api/v4/projects?membership=true&owned=true&page=${0}&per_page=${100}`,
- {
- headers: {
- Authorization: `Bearer ${gitlabProvider.accessToken}`,
- },
- },
- );
+ const allProjects = await validateGitlabProvider(gitlabProvider);
- if (!response.ok) {
- throw new TRPCError({
- code: "BAD_REQUEST",
- message: `Failed to fetch repositories: ${response.statusText}`,
- });
- }
-
- const repositories = await response.json();
-
- const filteredRepos = repositories.filter((repo: any) => {
+ const filteredRepos = allProjects.filter((repo: any) => {
const { full_path, kind } = repo.namespace;
const groupName = gitlabProvider.groupName?.toLowerCase();
if (groupName) {
const isIncluded = groupName
.split(",")
- .some((name) => full_path.toLowerCase().includes(name));
+ .some((name) => full_path === name);
return isIncluded && kind === "group";
}
@@ -432,34 +416,60 @@ export const testGitlabConnection = async (
const gitlabProvider = await findGitlabById(gitlabId);
- const response = await fetch(
- `${gitlabProvider.gitlabUrl}/api/v4/projects?membership=true&owned=true&page=${0}&per_page=${100}`,
- {
- headers: {
- Authorization: `Bearer ${gitlabProvider.accessToken}`,
- },
- },
- );
-
- if (!response.ok) {
- throw new TRPCError({
- code: "BAD_REQUEST",
- message: `Failed to fetch repositories: ${response.statusText}`,
- });
- }
-
- const repositories = await response.json();
+ const repositories = await validateGitlabProvider(gitlabProvider);
const filteredRepos = repositories.filter((repo: any) => {
const { full_path, kind } = repo.namespace;
if (groupName) {
- return groupName
- .split(",")
- .some((name) => full_path.toLowerCase().includes(name));
+ return groupName.split(",").some((name) => full_path === name);
}
return kind === "user";
});
return filteredRepos.length;
};
+
+export const validateGitlabProvider = async (gitlabProvider: Gitlab) => {
+ try {
+ const allProjects = [];
+ let page = 1;
+ const perPage = 100; // GitLab's max per page is 100
+
+ while (true) {
+ const response = await fetch(
+ `${gitlabProvider.gitlabUrl}/api/v4/projects?membership=true&owned=true&page=${page}&per_page=${perPage}`,
+ {
+ headers: {
+ Authorization: `Bearer ${gitlabProvider.accessToken}`,
+ },
+ },
+ );
+
+ if (!response.ok) {
+ throw new TRPCError({
+ code: "BAD_REQUEST",
+ message: `Failed to fetch repositories: ${response.statusText}`,
+ });
+ }
+
+ const projects = await response.json();
+
+ if (projects.length === 0) {
+ break;
+ }
+
+ allProjects.push(...projects);
+ page++;
+
+ const total = response.headers.get("x-total");
+ if (total && allProjects.length >= Number.parseInt(total)) {
+ break;
+ }
+ }
+
+ return allProjects;
+ } catch (error) {
+ throw error;
+ }
+};