Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
65 changes: 64 additions & 1 deletion apps/dokploy/__test__/utils/backups.test.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,9 @@
import { normalizeS3Path } from "@dokploy/server/utils/backups/utils";
import {
getRcloneDestination,
getRclonePrefixPath,
getS3Credentials,
normalizeS3Path,
} from "@dokploy/server/utils/backups/utils";
import { describe, expect, test } from "vitest";

describe("normalizeS3Path", () => {
Expand Down Expand Up @@ -59,3 +64,61 @@ describe("normalizeS3Path", () => {
expect(normalizeS3Path("instance-backups")).toBe("instance-backups/");
});
});

describe("rclone destination provider mapping", () => {
const baseDestination = {
destinationId: "dest_1",
name: "test",
organizationId: "org_1",
createdAt: new Date(),
accessKey: "key",
secretAccessKey: "secret",
bucket: "bucket-root",
region: "us-east-1",
endpoint: "endpoint",
provider: "AWS",
};

test("should keep S3 behavior by default", () => {
const flags = getS3Credentials(baseDestination);
expect(flags.some((flag) => flag.includes("--s3-access-key-id"))).toBe(true);
expect(getRcloneDestination(baseDestination, "prefix/file.sql.gz")).toBe(
":s3:bucket-root/prefix/file.sql.gz",
);
expect(getRclonePrefixPath(baseDestination, "prefix")).toBe(
":s3:bucket-root/prefix/",
);
});

test("should generate ftp credentials and destination", () => {
const ftp = {
...baseDestination,
provider: "FTP",
bucket: "remote-root",
endpoint: "ftp.example.com",
};
const flags = getS3Credentials(ftp);
expect(flags.some((flag) => flag.includes("--ftp-host"))).toBe(true);
expect(getRcloneDestination(ftp, "prefix/file.sql.gz")).toBe(
":ftp:remote-root/prefix/file.sql.gz",
);
expect(getRclonePrefixPath(ftp, "prefix")).toBe(":ftp:remote-root/prefix/");
});

test("should generate sftp credentials and destination", () => {
const sftp = {
...baseDestination,
provider: "SFTP",
bucket: "remote-root",
endpoint: "sftp.example.com",
};
const flags = getS3Credentials(sftp);
expect(flags.some((flag) => flag.includes("--sftp-host"))).toBe(true);
expect(getRcloneDestination(sftp, "prefix/file.sql.gz")).toBe(
":sftp:remote-root/prefix/file.sql.gz",
);
expect(getRclonePrefixPath(sftp, "prefix")).toBe(
":sftp:remote-root/prefix/",
);
});
});
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,22 @@ export const S3_PROVIDERS: Array<{
key: string;
name: string;
}> = [
{
key: "FTP",
name: "FTP (Rclone)",
},
{
key: "SFTP",
name: "SFTP (Rclone)",
},
{
key: "GDrive",
name: "Google Drive (Rclone)",
},
{
key: "OneDrive",
name: "OneDrive (Rclone)",
},
{
key: "AWS",
name: "Amazon Web Services (AWS) S3",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -92,6 +92,36 @@ export const HandleDestinations = ({ destinationId }: Props) => {
},
resolver: zodResolver(addDestination),
});
const selectedProvider = form.watch("provider");
const normalizedProvider = selectedProvider.toLowerCase();
const isFtp = normalizedProvider === "ftp" || normalizedProvider === "sftp";
const isDrive =
normalizedProvider === "gdrive" || normalizedProvider === "onedrive";
const accessKeyLabel = isFtp
? "Username"
: isDrive
? "Client ID"
: "Access Key Id";
const secretLabel = isFtp
? "Password"
: isDrive
? "Client Secret"
: "Secret Access Key";
const bucketLabel = isFtp
? "Remote Root Path"
: isDrive
? "Remote Folder"
: "Bucket";
const endpointLabel = isFtp
? "Host"
: isDrive
? "OAuth Token JSON"
: "Endpoint";
const endpointPlaceholder = isFtp
? "storage.example.com"
: isDrive
? '{"access_token":"...","refresh_token":"..."}'
: "https://s3.example.com";
useEffect(() => {
if (destination) {
form.reset({
Expand Down Expand Up @@ -168,8 +198,6 @@ export const HandleDestinations = ({ destinationId }: Props) => {
const endpoint = form.getValues("endpoint");
const region = form.getValues("region");

const connectionString = `:s3,provider=${provider},access_key_id=${accessKey},secret_access_key=${secretKey},endpoint=${endpoint}${region ? `,region=${region}` : ""}:${bucket}`;

await testConnection({
provider,
accessKey,
Expand All @@ -185,7 +213,7 @@ export const HandleDestinations = ({ destinationId }: Props) => {
})
.catch((e) => {
toast.error("Error connecting to provider", {
description: `${e.message}\n\nTry manually: rclone ls ${connectionString}`,
description: e.message,
});
});
};
Expand Down Expand Up @@ -261,7 +289,7 @@ export const HandleDestinations = ({ destinationId }: Props) => {
>
<FormControl>
<SelectTrigger>
<SelectValue placeholder="Select a S3 Provider" />
<SelectValue placeholder="Select a Provider" />
</SelectTrigger>
</FormControl>
<SelectContent>
Expand All @@ -288,7 +316,7 @@ export const HandleDestinations = ({ destinationId }: Props) => {
render={({ field }) => {
return (
<FormItem>
<FormLabel>Access Key Id</FormLabel>
<FormLabel>{accessKeyLabel}</FormLabel>
<FormControl>
<Input placeholder={"xcas41dasde"} {...field} />
</FormControl>
Expand All @@ -303,7 +331,7 @@ export const HandleDestinations = ({ destinationId }: Props) => {
render={({ field }) => (
<FormItem>
<div className="space-y-0.5">
<FormLabel>Secret Access Key</FormLabel>
<FormLabel>{secretLabel}</FormLabel>
</div>
<FormControl>
<Input placeholder={"asd123asdasw"} {...field} />
Expand All @@ -318,7 +346,7 @@ export const HandleDestinations = ({ destinationId }: Props) => {
render={({ field }) => (
<FormItem>
<div className="space-y-0.5">
<FormLabel>Bucket</FormLabel>
<FormLabel>{bucketLabel}</FormLabel>
</div>
<FormControl>
<Input placeholder={"dokploy-bucket"} {...field} />
Expand All @@ -327,32 +355,31 @@ export const HandleDestinations = ({ destinationId }: Props) => {
</FormItem>
)}
/>
<FormField
control={form.control}
name="region"
render={({ field }) => (
<FormItem>
<div className="space-y-0.5">
<FormLabel>Region</FormLabel>
</div>
<FormControl>
<Input placeholder={"us-east-1"} {...field} />
</FormControl>
<FormMessage />
</FormItem>
)}
/>
{!isFtp && !isDrive && (
<FormField
control={form.control}
name="region"
render={({ field }) => (
<FormItem>
<div className="space-y-0.5">
<FormLabel>Region</FormLabel>
</div>
<FormControl>
<Input placeholder={"us-east-1"} {...field} />
</FormControl>
<FormMessage />
</FormItem>
)}
/>
)}
<FormField
control={form.control}
name="endpoint"
render={({ field }) => (
<FormItem>
<FormLabel>Endpoint</FormLabel>
<FormLabel>{endpointLabel}</FormLabel>
<FormControl>
<Input
placeholder={"https://us.bucket.aws/s3"}
{...field}
/>
<Input placeholder={endpointPlaceholder} {...field} />
</FormControl>
<FormMessage />
</FormItem>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,11 +23,11 @@ export const ShowDestinations = () => {
<CardHeader className="">
<CardTitle className="text-xl flex flex-row gap-2">
<Database className="size-6 text-muted-foreground self-center" />
S3 Destinations
Backup Destinations
</CardTitle>
<CardDescription>
Add your providers like AWS S3, Cloudflare R2, Wasabi,
DigitalOcean Spaces etc.
Add providers like AWS S3, Cloudflare R2, FTP, SFTP, Google
Drive, and OneDrive.
</CardDescription>
</CardHeader>
<CardContent className="space-y-2 py-8 border-t">
Expand Down
32 changes: 12 additions & 20 deletions apps/dokploy/server/api/routers/destination.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@ import {
execAsync,
execAsyncRemote,
findDestinationById,
getRcloneDestination,
getS3Credentials,
IS_CLOUD,
removeDestinationById,
updateDestinationById,
Expand Down Expand Up @@ -43,26 +45,16 @@ export const destinationRouter = createTRPCRouter({
testConnection: adminProcedure
.input(apiCreateDestination)
.mutation(async ({ input }) => {
const { secretAccessKey, bucket, region, endpoint, accessKey, provider } =
input;
try {
const rcloneFlags = [
`--s3-access-key-id="${accessKey}"`,
`--s3-secret-access-key="${secretAccessKey}"`,
`--s3-region="${region}"`,
`--s3-endpoint="${endpoint}"`,
"--s3-no-check-bucket",
"--s3-force-path-style",
"--retries 1",
"--low-level-retries 1",
"--timeout 10s",
"--contimeout 5s",
];
if (provider) {
rcloneFlags.unshift(`--s3-provider="${provider}"`);
}
const rcloneDestination = `:s3:${bucket}`;
const rcloneCommand = `rclone ls ${rcloneFlags.join(" ")} "${rcloneDestination}"`;
const destinationLike = {
...input,
createdAt: new Date(),
destinationId: "test-connection",
organizationId: "test-connection",
};
const rcloneFlags = getS3Credentials(destinationLike);
const rcloneDestination = getRcloneDestination(destinationLike, "");
const rcloneCommand = `rclone ls ${rcloneFlags.join(" ")} "${rcloneDestination}" --retries 1 --low-level-retries 1 --timeout 10s --contimeout 5s`;

if (IS_CLOUD && !input.serverId) {
throw new TRPCError({
Expand All @@ -82,7 +74,7 @@ export const destinationRouter = createTRPCRouter({
message:
error instanceof Error
? error?.message
: "Error connecting to bucket",
: "Error connecting to destination",
cause: error,
});
}
Expand Down
12 changes: 10 additions & 2 deletions packages/server/src/utils/backups/compose.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,12 @@ import { findEnvironmentById } from "@dokploy/server/services/environment";
import { findProjectById } from "@dokploy/server/services/project";
import { sendDatabaseBackupNotifications } from "../notifications/database-backup";
import { execAsync, execAsyncRemote } from "../process/execAsync";
import { getBackupCommand, getS3Credentials, normalizeS3Path } from "./utils";
import {
getBackupCommand,
getRcloneDestination,
getS3Credentials,
normalizeS3Path,
} from "./utils";

export const runComposeBackup = async (
compose: Compose,
Expand All @@ -30,7 +35,10 @@ export const runComposeBackup = async (

try {
const rcloneFlags = getS3Credentials(destination);
const rcloneDestination = `:s3:${destination.bucket}/${bucketDestination}`;
const rcloneDestination = getRcloneDestination(
destination,
bucketDestination,
);
const rcloneCommand = `rclone rcat ${rcloneFlags.join(" ")} "${rcloneDestination}"`;

const backupCommand = getBackupCommand(
Expand Down
9 changes: 7 additions & 2 deletions packages/server/src/utils/backups/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,8 @@ import { startLogCleanup } from "../access-log/handler";
import { cleanupAll } from "../docker/utils";
import { sendDockerCleanupNotifications } from "../notifications/docker-cleanup";
import { execAsync, execAsyncRemote } from "../process/execAsync";
import { getS3Credentials, normalizeS3Path, scheduleBackup } from "./utils";
import { getRclonePrefixPath, getS3Credentials, scheduleBackup } from "./utils";
import { normalizeS3Path } from "./utils";

export const initCronJobs = async () => {
console.log("Setting up cron jobs....");
Expand Down Expand Up @@ -131,7 +132,11 @@ export const keepLatestNBackups = async (
try {
const rcloneFlags = getS3Credentials(backup.destination);
const appName = getServiceAppName(backup);
const backupFilesPath = `:s3:${backup.destination.bucket}/${appName}/${normalizeS3Path(backup.prefix)}`;
const backupPrefix = `${appName}/${normalizeS3Path(backup.prefix)}`;
const backupFilesPath = getRclonePrefixPath(
backup.destination,
backupPrefix,
);

// --include "*.sql.gz" or "*.zip" ensures nothing else other than the dokploy backup files are touched by rclone
const rcloneList = `rclone lsf ${rcloneFlags.join(" ")} --include "*${backup.databaseType === "web-server" ? ".zip" : ".sql.gz"}" ${backupFilesPath}`;
Expand Down
12 changes: 10 additions & 2 deletions packages/server/src/utils/backups/mariadb.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,12 @@ import type { Mariadb } from "@dokploy/server/services/mariadb";
import { findProjectById } from "@dokploy/server/services/project";
import { sendDatabaseBackupNotifications } from "../notifications/database-backup";
import { execAsync, execAsyncRemote } from "../process/execAsync";
import { getBackupCommand, getS3Credentials, normalizeS3Path } from "./utils";
import {
getBackupCommand,
getRcloneDestination,
getS3Credentials,
normalizeS3Path,
} from "./utils";

export const runMariadbBackup = async (
mariadb: Mariadb,
Expand All @@ -28,7 +33,10 @@ export const runMariadbBackup = async (
});
try {
const rcloneFlags = getS3Credentials(destination);
const rcloneDestination = `:s3:${destination.bucket}/${bucketDestination}`;
const rcloneDestination = getRcloneDestination(
destination,
bucketDestination,
);
const rcloneCommand = `rclone rcat ${rcloneFlags.join(" ")} "${rcloneDestination}"`;

const backupCommand = getBackupCommand(
Expand Down
Loading