diff --git a/scripts/restore.ts b/scripts/restore.ts index 856b01d..a96cc56 100644 --- a/scripts/restore.ts +++ b/scripts/restore.ts @@ -1,6 +1,7 @@ import datasquirel from "@moduletrace/datasquirel"; import { execSync, type ExecSyncOptions } from "child_process"; import path from "path"; +import s3DownloadFile from "../utils/s3-download"; const BACKUP_DIR = `/root/backups`; const BACKUP_FILE_NAME = "backup.tar.xz"; @@ -11,21 +12,16 @@ const DOCKER_VOLUMES_BACKUP_DIR = path.join( `docker`, `volumes`, ); -const BACKUP_FILE_TAR = path.join(BACKUP_DIR, BACKUP_FILE_NAME); -const RSYNC_IGNORE_FILE = `/root/.coderank/server/rsync-ignore.txt`; -const DSQL_FOLDER = `/projects/coderank/archives/servers`; -const DSQL_FILE_NAME = `${process.env.CODERANK_HOST_DOMAIN}.tar.xz`; +const R2_FILE_NAME = `${process.env.CODERANK_HOST_DOMAIN}.tar.xz`; +const R2_FOLDER = `archives/servers`; +const BACKUP_FILE_TAR = path.join(BACKUP_DIR, R2_FILE_NAME); const execOpts: ExecSyncOptions = { stdio: ["inherit", "inherit", "ignore"], }; -const dsqlFile = await datasquirel.api.media.get({ - // folder: DSQL_FOLDER, - mediaName: DSQL_FILE_NAME, -}); - -datasquirel.utils.debugLog({ - log: dsqlFile, - addTime: true, +await s3DownloadFile({ + downloadPath: BACKUP_FILE_TAR, + downloadFileName: R2_FILE_NAME, + folder: R2_FOLDER, }); diff --git a/utils/s3-download.ts b/utils/s3-download.ts new file mode 100644 index 0000000..d6c9294 --- /dev/null +++ b/utils/s3-download.ts @@ -0,0 +1,38 @@ +import { S3Client } from "bun"; +import { createWriteStream } from "fs"; +import path from "path"; + +type Params = { + downloadPath: string; + downloadFileName: string; + folder?: string; +}; + +const client = new S3Client({ + accessKeyId: process.env.CLOUDFLARE_R2_ACCESS_KEY_ID, + secretAccessKey: process.env.CLOUDFLARE_R2_SECRET, + bucket: "coderank", + endpoint: process.env.CLOUDFLARE_R2_ENDPOINT, +}); + +export default async function s3DownloadFile({ + downloadPath, + folder, + downloadFileName, +}: Params) { + let finalFileName = path.join(folder || "", downloadFileName); + + const s3file = client.file(finalFileName); + + const writable = createWriteStream(downloadPath); + const reader = s3file.stream().getReader(); + + while (true) { + const { done, value } = await reader.read(); + if (done) break; + writable.write(value); + } + + writable.end(); + console.log("Download complete!"); +}