Updates
This commit is contained in:
parent
aeabe9a0ae
commit
97f5b6d7d9
1
.gitignore
vendored
1
.gitignore
vendored
@ -32,3 +32,4 @@ report.[0-9]_.[0-9]_.[0-9]_.[0-9]_.json
|
|||||||
|
|
||||||
# Finder (MacOS) folder config
|
# Finder (MacOS) folder config
|
||||||
.DS_Store
|
.DS_Store
|
||||||
|
test
|
||||||
|
|||||||
@ -1,6 +1,6 @@
|
|||||||
import datasquirel from "@moduletrace/datasquirel";
|
|
||||||
import { execSync, type ExecSyncOptions } from "child_process";
|
import { execSync, type ExecSyncOptions } from "child_process";
|
||||||
import path from "path";
|
import path from "path";
|
||||||
|
import s3UploadFile from "../utils/s3-upload";
|
||||||
|
|
||||||
const BACKUP_DIR = `/root/backups`;
|
const BACKUP_DIR = `/root/backups`;
|
||||||
const BACKUP_FILE_NAME = "backup.tar.xz";
|
const BACKUP_FILE_NAME = "backup.tar.xz";
|
||||||
@ -13,8 +13,8 @@ const DOCKER_VOLUMES_BACKUP_DIR = path.join(
|
|||||||
);
|
);
|
||||||
const BACKUP_FILE_TAR = path.join(BACKUP_DIR, BACKUP_FILE_NAME);
|
const BACKUP_FILE_TAR = path.join(BACKUP_DIR, BACKUP_FILE_NAME);
|
||||||
const RSYNC_IGNORE_FILE = `/root/.coderank/server/rsync-ignore.txt`;
|
const RSYNC_IGNORE_FILE = `/root/.coderank/server/rsync-ignore.txt`;
|
||||||
const DSQL_FOLDER = `/projects/coderank/archives/servers`;
|
const R2_FOLDER = `archives/servers`;
|
||||||
const DSQL_FILE_NAME = `${process.env.CODERANK_HOST_DOMAIN}.tar.xz`;
|
const R2_FILE_NAME = `${process.env.CODERANK_HOST_DOMAIN}.tar.xz`;
|
||||||
|
|
||||||
const execOpts: ExecSyncOptions = {
|
const execOpts: ExecSyncOptions = {
|
||||||
stdio: ["inherit", "inherit", "ignore"],
|
stdio: ["inherit", "inherit", "ignore"],
|
||||||
@ -30,27 +30,8 @@ execSync(syncDockerVolsCmd, execOpts);
|
|||||||
|
|
||||||
execSync(`tar -czvf ${BACKUP_FILE_TAR} ${BACKUP_DIR_BACKUP}`, execOpts);
|
execSync(`tar -czvf ${BACKUP_FILE_TAR} ${BACKUP_DIR_BACKUP}`, execOpts);
|
||||||
|
|
||||||
const file = Bun.file(BACKUP_FILE_TAR);
|
await s3UploadFile({
|
||||||
const buffer = await file.arrayBuffer();
|
fileName: R2_FILE_NAME,
|
||||||
const base64 = Buffer.from(buffer).toString("base64");
|
filePath: BACKUP_FILE_TAR,
|
||||||
|
folder: R2_FOLDER,
|
||||||
await datasquirel.api.media.add({
|
|
||||||
media: [
|
|
||||||
{
|
|
||||||
fileName: DSQL_FILE_NAME,
|
|
||||||
fileType: "tar.xz",
|
|
||||||
fileBase64: base64,
|
|
||||||
private: true,
|
|
||||||
privateFolder: true,
|
|
||||||
overwrite: true,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
type: "file",
|
|
||||||
folder: DSQL_FOLDER,
|
|
||||||
update: true,
|
|
||||||
});
|
|
||||||
|
|
||||||
datasquirel.utils.debugLog({
|
|
||||||
log: `Media Uploaded Successfully!`,
|
|
||||||
addTime: true,
|
|
||||||
});
|
});
|
||||||
|
|||||||
43
utils/s3-upload.ts
Normal file
43
utils/s3-upload.ts
Normal file
@ -0,0 +1,43 @@
|
|||||||
|
import { S3Client } from "bun";
|
||||||
|
import path from "path";
|
||||||
|
|
||||||
|
type Params = {
|
||||||
|
filePath: string;
|
||||||
|
fileName: string;
|
||||||
|
folder?: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
const client = new S3Client({
|
||||||
|
accessKeyId: process.env.CLOUDFLARE_R2_ACCESS_KEY_ID,
|
||||||
|
secretAccessKey: process.env.CLOUDFLARE_R2_SECRET,
|
||||||
|
bucket: "coderank",
|
||||||
|
endpoint: process.env.CLOUDFLARE_R2_ENDPOINT,
|
||||||
|
});
|
||||||
|
|
||||||
|
export default async function s3UploadFile({
|
||||||
|
filePath,
|
||||||
|
folder,
|
||||||
|
fileName,
|
||||||
|
}: Params) {
|
||||||
|
let finalFileName = path.join(folder || "", fileName);
|
||||||
|
|
||||||
|
const s3file = client.file(finalFileName);
|
||||||
|
const file = Bun.file(filePath);
|
||||||
|
|
||||||
|
const writer = s3file.writer({
|
||||||
|
retry: 3,
|
||||||
|
queueSize: 10,
|
||||||
|
partSize: 5 * 1024 * 1024,
|
||||||
|
});
|
||||||
|
|
||||||
|
const reader = file.stream().getReader();
|
||||||
|
|
||||||
|
while (true) {
|
||||||
|
const { done, value } = await reader.read();
|
||||||
|
if (done) break;
|
||||||
|
await writer.write(value);
|
||||||
|
}
|
||||||
|
|
||||||
|
await writer.end();
|
||||||
|
console.log("Upload complete!");
|
||||||
|
}
|
||||||
Loading…
Reference in New Issue
Block a user