Updates
This commit is contained in:
parent
a53b6e6974
commit
20a390e4a8
3
dist/index.d.ts
vendored
3
dist/index.d.ts
vendored
@ -26,6 +26,7 @@ import debugLog from "./package-shared/utils/logging/debug-log";
|
||||
import { ErrorCallback } from "./package-shared/types";
|
||||
import parseEnv from "./package-shared/utils/parse-env";
|
||||
import dbHandler from "./package-shared/functions/backend/dbHandler";
|
||||
import httpsRequest from "./package-shared/functions/backend/httpsRequest";
|
||||
/**
|
||||
* Main Export
|
||||
*/
|
||||
@ -137,6 +138,8 @@ declare const datasquirel: {
|
||||
connDbHandler: typeof connDbHandler;
|
||||
debugLog: typeof debugLog;
|
||||
parseEnv: typeof parseEnv;
|
||||
httpsRequest: typeof httpsRequest;
|
||||
httpRequest: typeof httpsRequest;
|
||||
};
|
||||
/**
|
||||
* Run Crud actions `get`, `insert`, `update`, `delete`
|
||||
|
3
dist/index.js
vendored
3
dist/index.js
vendored
@ -25,6 +25,7 @@ const user_1 = __importDefault(require("./package-shared/api/user"));
|
||||
const local_user_1 = __importDefault(require("./package-shared/api/user/local-user"));
|
||||
const media_1 = __importDefault(require("./package-shared/api/media"));
|
||||
const dbHandler_1 = __importDefault(require("./package-shared/functions/backend/dbHandler"));
|
||||
const httpsRequest_1 = __importDefault(require("./package-shared/functions/backend/httpsRequest"));
|
||||
/**
|
||||
* User Functions Object
|
||||
*/
|
||||
@ -93,6 +94,8 @@ const datasquirel = {
|
||||
connDbHandler: conn_db_handler_1.default,
|
||||
debugLog: debug_log_1.default,
|
||||
parseEnv: parse_env_1.default,
|
||||
httpsRequest: httpsRequest_1.default,
|
||||
httpRequest: httpsRequest_1.default,
|
||||
},
|
||||
/**
|
||||
* Run Crud actions `get`, `insert`, `update`, `delete`
|
||||
|
49
dist/package-shared/actions/users/login-user.js
vendored
49
dist/package-shared/actions/users/login-user.js
vendored
@ -13,14 +13,12 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.default = loginUser;
|
||||
const encrypt_1 = __importDefault(require("../../functions/dsql/encrypt"));
|
||||
const api_login_1 = __importDefault(require("../../functions/api/users/api-login"));
|
||||
const get_auth_cookie_names_1 = __importDefault(require("../../functions/backend/cookies/get-auth-cookie-names"));
|
||||
const write_auth_files_1 = require("../../functions/backend/auth/write-auth-files");
|
||||
const debug_log_1 = __importDefault(require("../../utils/logging/debug-log"));
|
||||
const grab_cookie_expirt_date_1 = __importDefault(require("../../utils/grab-cookie-expirt-date"));
|
||||
const grab_api_path_1 = __importDefault(require("../../utils/backend/users/grab-api-path"));
|
||||
const query_dsql_api_1 = __importDefault(require("../../functions/api/query-dsql-api"));
|
||||
const post_login_response_handler_1 = __importDefault(require("../../functions/backend/auth/post-login-response-handler"));
|
||||
function debugFn(log, label) {
|
||||
(0, debug_log_1.default)({ log, addTime: true, title: "loginUser", label });
|
||||
}
|
||||
@ -29,7 +27,6 @@ function debugFn(log, label) {
|
||||
*/
|
||||
function loginUser(_a) {
|
||||
return __awaiter(this, arguments, void 0, function* ({ apiKey, payload, database, additionalFields, response, encryptionKey, encryptionSalt, email_login, email_login_code, temp_code_field, token, skipPassword, apiUserID, skipWriteAuthFile, dbUserId, debug, cleanupTokens, secureCookie, useLocal, apiVersion = "v1", }) {
|
||||
var _b, _c;
|
||||
const COOKIE_EXPIRY_DATE = (0, grab_cookie_expirt_date_1.default)();
|
||||
const defaultTempLoginFieldName = "temp_login_code";
|
||||
const emailLoginTempCodeFieldName = email_login
|
||||
@ -104,42 +101,18 @@ function loginUser(_a) {
|
||||
* # Send Response
|
||||
*/
|
||||
if (httpResponse === null || httpResponse === void 0 ? void 0 : httpResponse.success) {
|
||||
let encryptedPayload = (0, encrypt_1.default)({
|
||||
data: JSON.stringify(httpResponse.payload),
|
||||
encryptionKey: finalEncryptionKey,
|
||||
encryptionSalt: finalEncryptionSalt,
|
||||
});
|
||||
try {
|
||||
if (token && encryptedPayload)
|
||||
httpResponse["token"] = encryptedPayload;
|
||||
}
|
||||
catch (error) {
|
||||
console.log("Login User HTTP Response Error:", error.message);
|
||||
}
|
||||
const cookieNames = (0, get_auth_cookie_names_1.default)({
|
||||
(0, post_login_response_handler_1.default)({
|
||||
database,
|
||||
httpResponse,
|
||||
cleanupTokens,
|
||||
debug,
|
||||
encryptionKey,
|
||||
encryptionSalt,
|
||||
response,
|
||||
secureCookie,
|
||||
skipWriteAuthFile,
|
||||
token,
|
||||
});
|
||||
if (httpResponse.csrf && !skipWriteAuthFile) {
|
||||
(0, write_auth_files_1.writeAuthFile)(httpResponse.csrf, JSON.stringify(httpResponse.payload), cleanupTokens && ((_b = httpResponse.payload) === null || _b === void 0 ? void 0 : _b.id)
|
||||
? { userId: httpResponse.payload.id }
|
||||
: undefined);
|
||||
}
|
||||
httpResponse["cookieNames"] = cookieNames;
|
||||
httpResponse["key"] = String(encryptedPayload);
|
||||
const authKeyName = cookieNames.keyCookieName;
|
||||
const csrfName = cookieNames.csrfCookieName;
|
||||
if (debug) {
|
||||
debugFn(authKeyName, "authKeyName");
|
||||
debugFn(csrfName, "csrfName");
|
||||
debugFn(encryptedPayload, "encryptedPayload");
|
||||
}
|
||||
response === null || response === void 0 ? void 0 : response.setHeader("Set-Cookie", [
|
||||
`${authKeyName}=${encryptedPayload};samesite=strict;path=/;HttpOnly=true;Expires=${COOKIE_EXPIRY_DATE}${secureCookie ? ";Secure=true" : ""}`,
|
||||
`${csrfName}=${(_c = httpResponse.payload) === null || _c === void 0 ? void 0 : _c.csrf_k};samesite=strict;path=/;HttpOnly=true;Expires=${COOKIE_EXPIRY_DATE}`,
|
||||
]);
|
||||
if (debug) {
|
||||
debugFn("Response Sent!");
|
||||
}
|
||||
}
|
||||
return httpResponse;
|
||||
});
|
||||
|
@ -21,12 +21,13 @@ const query_dsql_api_1 = __importDefault(require("../../functions/api/query-dsql
|
||||
*/
|
||||
function sendEmailCode(params) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const { apiKey, email, database, temp_code_field_name, mail_domain, mail_password, mail_username, mail_port, sender, response, extraCookies, useLocal, apiVersion, dbUserId, } = params;
|
||||
const { apiKey, email, database, temp_code_field_name, mail_domain, mail_password, mail_username, mail_port, sender, response, extraCookies, useLocal, apiVersion, dbUserId, html, } = params;
|
||||
const defaultTempLoginFieldName = "temp_login_code";
|
||||
const emailLoginTempCodeFieldName = temp_code_field_name
|
||||
? temp_code_field_name
|
||||
: defaultTempLoginFieldName;
|
||||
const emailHtml = `<p>Please use this code to login</p>\n<h2>{{code}}</h2>\n<p>Please note that this code expires after 15 minutes</p>`;
|
||||
const emailHtml = html ||
|
||||
`<p>Please use this code to login</p>\n<h2>{{code}}</h2>\n<p>Please note that this code expires after 15 minutes</p>`;
|
||||
const apiSendEmailCodeParams = {
|
||||
database,
|
||||
email,
|
||||
|
@ -2,4 +2,4 @@ import { APIResponseObject, GoogleAuthParams } from "../../../types";
|
||||
/**
|
||||
* # SERVER FUNCTION: Login with google Function
|
||||
*/
|
||||
export default function googleAuth({ apiKey, token, database, response, encryptionKey, encryptionSalt, additionalFields, additionalData, apiUserID, debug, secureCookie, loginOnly, useLocal, apiVersion, }: GoogleAuthParams): Promise<APIResponseObject>;
|
||||
export default function googleAuth({ apiKey, token, database, response, encryptionKey, encryptionSalt, additionalFields, additionalData, apiUserID, debug, secureCookie, loginOnly, useLocal, apiVersion, skipWriteAuthFile, cleanupTokens, }: GoogleAuthParams): Promise<APIResponseObject>;
|
||||
|
@ -13,20 +13,15 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.default = googleAuth;
|
||||
const encrypt_1 = __importDefault(require("../../../functions/dsql/encrypt"));
|
||||
const api_google_login_1 = __importDefault(require("../../../functions/api/users/social/api-google-login"));
|
||||
const get_auth_cookie_names_1 = __importDefault(require("../../../functions/backend/cookies/get-auth-cookie-names"));
|
||||
const write_auth_files_1 = require("../../../functions/backend/auth/write-auth-files");
|
||||
const grab_cookie_expirt_date_1 = __importDefault(require("../../../utils/grab-cookie-expirt-date"));
|
||||
const query_dsql_api_1 = __importDefault(require("../../../functions/api/query-dsql-api"));
|
||||
const grab_api_path_1 = __importDefault(require("../../../utils/backend/users/grab-api-path"));
|
||||
const post_login_response_handler_1 = __importDefault(require("../../../functions/backend/auth/post-login-response-handler"));
|
||||
/**
|
||||
* # SERVER FUNCTION: Login with google Function
|
||||
*/
|
||||
function googleAuth(_a) {
|
||||
return __awaiter(this, arguments, void 0, function* ({ apiKey, token, database, response, encryptionKey, encryptionSalt, additionalFields, additionalData, apiUserID, debug, secureCookie, loginOnly, useLocal, apiVersion, }) {
|
||||
var _b;
|
||||
const COOKIE_EXPIRY_DATE = (0, grab_cookie_expirt_date_1.default)();
|
||||
return __awaiter(this, arguments, void 0, function* ({ apiKey, token, database, response, encryptionKey, encryptionSalt, additionalFields, additionalData, apiUserID, debug, secureCookie, loginOnly, useLocal, apiVersion, skipWriteAuthFile, cleanupTokens, }) {
|
||||
const finalEncryptionKey = encryptionKey || process.env.DSQL_ENCRYPTION_PASSWORD;
|
||||
const finalEncryptionSalt = encryptionSalt || process.env.DSQL_ENCRYPTION_SALT;
|
||||
if (!(finalEncryptionKey === null || finalEncryptionKey === void 0 ? void 0 : finalEncryptionKey.match(/.{8,}/))) {
|
||||
@ -96,26 +91,18 @@ function googleAuth(_a) {
|
||||
*
|
||||
* @description make a request to datasquirel.com
|
||||
*/
|
||||
if ((httpResponse === null || httpResponse === void 0 ? void 0 : httpResponse.success) && (httpResponse === null || httpResponse === void 0 ? void 0 : httpResponse.payload)) {
|
||||
let encryptedPayload = (0, encrypt_1.default)({
|
||||
data: JSON.stringify(httpResponse.payload),
|
||||
encryptionKey: finalEncryptionKey,
|
||||
encryptionSalt: finalEncryptionSalt,
|
||||
});
|
||||
const cookieNames = (0, get_auth_cookie_names_1.default)({
|
||||
if ((httpResponse === null || httpResponse === void 0 ? void 0 : httpResponse.success) && (httpResponse === null || httpResponse === void 0 ? void 0 : httpResponse.payload) && database) {
|
||||
(0, post_login_response_handler_1.default)({
|
||||
database,
|
||||
httpResponse,
|
||||
cleanupTokens,
|
||||
debug,
|
||||
encryptionKey,
|
||||
encryptionSalt,
|
||||
response,
|
||||
secureCookie,
|
||||
skipWriteAuthFile,
|
||||
});
|
||||
if (httpResponse.csrf) {
|
||||
(0, write_auth_files_1.writeAuthFile)(httpResponse.csrf, JSON.stringify(httpResponse.payload));
|
||||
}
|
||||
httpResponse["cookieNames"] = cookieNames;
|
||||
httpResponse["key"] = String(encryptedPayload);
|
||||
const authKeyName = cookieNames.keyCookieName;
|
||||
const csrfName = cookieNames.csrfCookieName;
|
||||
response === null || response === void 0 ? void 0 : response.setHeader("Set-Cookie", [
|
||||
`${authKeyName}=${encryptedPayload};samesite=strict;path=/;HttpOnly=true;;Expires=${COOKIE_EXPIRY_DATE}${secureCookie ? ";Secure=true" : ""}`,
|
||||
`${csrfName}=${(_b = httpResponse.payload) === null || _b === void 0 ? void 0 : _b.csrf_k};samesite=strict;path=/;HttpOnly=true;;Expires=${COOKIE_EXPIRY_DATE}`,
|
||||
]);
|
||||
}
|
||||
return httpResponse;
|
||||
});
|
||||
|
2
dist/package-shared/dict/app-names.d.ts
vendored
2
dist/package-shared/dict/app-names.d.ts
vendored
@ -6,4 +6,6 @@ export declare const AppNames: {
|
||||
readonly PrivateMediaInsertTriggerName: "dsql_trg_user_private_folders_insert";
|
||||
readonly PrivateMediaDeleteTriggerName: "dsql_trg_user_private_folders_delete";
|
||||
readonly WebsocketPathname: "dsql-websocket";
|
||||
readonly ReverseProxyForwardURLHeaderName: "x-original-uri";
|
||||
readonly PrivateAPIAuthHeaderName: "x-api-auth-key";
|
||||
};
|
||||
|
2
dist/package-shared/dict/app-names.js
vendored
2
dist/package-shared/dict/app-names.js
vendored
@ -9,4 +9,6 @@ exports.AppNames = {
|
||||
PrivateMediaInsertTriggerName: "dsql_trg_user_private_folders_insert",
|
||||
PrivateMediaDeleteTriggerName: "dsql_trg_user_private_folders_delete",
|
||||
WebsocketPathname: "dsql-websocket",
|
||||
ReverseProxyForwardURLHeaderName: "x-original-uri",
|
||||
PrivateAPIAuthHeaderName: "x-api-auth-key",
|
||||
};
|
||||
|
@ -16,6 +16,7 @@ exports.default = queryDSQLAPI;
|
||||
const path_1 = __importDefault(require("path"));
|
||||
const grab_host_names_1 = __importDefault(require("../../utils/grab-host-names"));
|
||||
const serialize_query_1 = __importDefault(require("../../utils/serialize-query"));
|
||||
const lodash_1 = __importDefault(require("lodash"));
|
||||
/**
|
||||
* # Query DSQL API
|
||||
*/
|
||||
@ -88,7 +89,12 @@ function queryDSQLAPI(_a) {
|
||||
payload: undefined,
|
||||
msg: `An error occurred while parsing the response`,
|
||||
error: error.message,
|
||||
errorData: { requestOptions, grabedHostNames },
|
||||
errorData: {
|
||||
requestOptions,
|
||||
grabedHostNames: lodash_1.default.omit(grabedHostNames, [
|
||||
"scheme",
|
||||
]),
|
||||
},
|
||||
});
|
||||
}
|
||||
});
|
||||
@ -108,7 +114,10 @@ function queryDSQLAPI(_a) {
|
||||
payload: undefined,
|
||||
msg: `An error occurred while making the request`,
|
||||
error: err.message,
|
||||
errorData: { requestOptions, grabedHostNames },
|
||||
errorData: {
|
||||
requestOptions,
|
||||
grabedHostNames: lodash_1.default.omit(grabedHostNames, ["scheme"]),
|
||||
},
|
||||
});
|
||||
});
|
||||
if (reqPayload) {
|
||||
|
@ -1,5 +1,5 @@
|
||||
import { APILoginFunctionReturn, HandleSocialDbFunctionParams } from "../../../types";
|
||||
import { APIResponseObject, HandleSocialDbFunctionParams } from "../../../types";
|
||||
/**
|
||||
* # Handle Social DB
|
||||
*/
|
||||
export default function handleSocialDb({ database, email, social_platform, payload, invitation, supEmail, additionalFields, debug, loginOnly, apiUserId, }: HandleSocialDbFunctionParams): Promise<APILoginFunctionReturn>;
|
||||
export default function handleSocialDb({ database, email, social_platform, payload, invitation, supEmail, additionalFields, debug, loginOnly, apiUserId, }: HandleSocialDbFunctionParams): Promise<APIResponseObject>;
|
||||
|
@ -15,7 +15,6 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.default = handleSocialDb;
|
||||
const fs_1 = __importDefault(require("fs"));
|
||||
const handleNodemailer_1 = __importDefault(require("../../backend/handleNodemailer"));
|
||||
const path_1 = __importDefault(require("path"));
|
||||
const addMariadbUser_1 = __importDefault(require("../../backend/addMariadbUser"));
|
||||
const dbHandler_1 = __importDefault(require("../../backend/dbHandler"));
|
||||
const encrypt_1 = __importDefault(require("../../dsql/encrypt"));
|
||||
@ -162,26 +161,19 @@ function handleSocialDb(_a) {
|
||||
.replace(/{{token}}/, generatedToken || ""),
|
||||
}).then(() => { });
|
||||
}
|
||||
const { STATIC_ROOT } = (0, grab_dir_names_1.default)();
|
||||
if (!STATIC_ROOT) {
|
||||
console.log("Static File ENV not Found!");
|
||||
return {
|
||||
success: false,
|
||||
payload: null,
|
||||
msg: "Static File ENV not Found!",
|
||||
};
|
||||
}
|
||||
const { userPrivateMediaDir, userPublicMediaDir } = (0, grab_dir_names_1.default)({
|
||||
userId: newUser.payload.insertId,
|
||||
});
|
||||
/**
|
||||
* Create new user folder and file
|
||||
*
|
||||
* @description Create new user folder and file
|
||||
*/
|
||||
if (!database || (database === null || database === void 0 ? void 0 : database.match(/^datasquirel$/))) {
|
||||
let newUserSchemaFolderPath = `${process.env.DSQL_USER_DB_SCHEMA_PATH}/user-${newUser.payload.insertId}`;
|
||||
let newUserMediaFolderPath = path_1.default.join(STATIC_ROOT, `images/user-images/user-${newUser.payload.insertId}`);
|
||||
fs_1.default.mkdirSync(newUserSchemaFolderPath);
|
||||
fs_1.default.mkdirSync(newUserMediaFolderPath);
|
||||
fs_1.default.writeFileSync(`${newUserSchemaFolderPath}/main.json`, JSON.stringify([]), "utf8");
|
||||
userPublicMediaDir &&
|
||||
fs_1.default.mkdirSync(userPublicMediaDir, { recursive: true });
|
||||
userPrivateMediaDir &&
|
||||
fs_1.default.mkdirSync(userPrivateMediaDir, { recursive: true });
|
||||
}
|
||||
return yield (0, loginSocialUser_1.default)({
|
||||
user: newUserQueried[0],
|
||||
|
@ -1,10 +1,7 @@
|
||||
import { APILoginFunctionReturn } from "../../../types";
|
||||
import { APIResponseObject } from "../../../types";
|
||||
type Param = {
|
||||
user: {
|
||||
first_name: string;
|
||||
last_name: string;
|
||||
email: string;
|
||||
social_id: string | number;
|
||||
};
|
||||
social_platform: string;
|
||||
invitation?: any;
|
||||
@ -18,5 +15,5 @@ type Param = {
|
||||
* @description This function logs in the user after 'handleSocialDb' function finishes
|
||||
* the user creation or confirmation process
|
||||
*/
|
||||
export default function loginSocialUser({ user, social_platform, invitation, database, additionalFields, debug, }: Param): Promise<APILoginFunctionReturn>;
|
||||
export default function loginSocialUser({ user, social_platform, invitation, database, additionalFields, debug, }: Param): Promise<APIResponseObject>;
|
||||
export {};
|
||||
|
@ -13,8 +13,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.default = loginSocialUser;
|
||||
const addAdminUserOnLogin_1 = __importDefault(require("../../backend/addAdminUserOnLogin"));
|
||||
const dbHandler_1 = __importDefault(require("../../backend/dbHandler"));
|
||||
const login_user_1 = __importDefault(require("../../../actions/users/login-user"));
|
||||
/**
|
||||
* Function to login social user
|
||||
* ==============================================================================
|
||||
@ -24,57 +23,15 @@ const dbHandler_1 = __importDefault(require("../../backend/dbHandler"));
|
||||
function loginSocialUser(_a) {
|
||||
return __awaiter(this, arguments, void 0, function* ({ user, social_platform, invitation, database, additionalFields, debug, }) {
|
||||
const finalDbName = database ? database : "datasquirel";
|
||||
const dbAppend = database ? `\`${finalDbName}\`.` : "";
|
||||
const foundUserQuery = `SELECT * FROM ${dbAppend}\`users\` WHERE email=?`;
|
||||
const foundUserValues = [user.email];
|
||||
const foundUser = (yield (0, dbHandler_1.default)({
|
||||
query: foundUserQuery,
|
||||
values: foundUserValues,
|
||||
let userPayload = yield (0, login_user_1.default)({
|
||||
database: finalDbName,
|
||||
}));
|
||||
if (!(foundUser === null || foundUser === void 0 ? void 0 : foundUser[0]))
|
||||
return {
|
||||
success: false,
|
||||
payload: null,
|
||||
msg: "Couldn't find Social User.",
|
||||
};
|
||||
let csrfKey = Math.random().toString(36).substring(2) +
|
||||
"-" +
|
||||
Math.random().toString(36).substring(2);
|
||||
let userPayload = {
|
||||
id: foundUser[0].id,
|
||||
uuid: foundUser[0].uuid,
|
||||
first_name: foundUser[0].first_name,
|
||||
last_name: foundUser[0].last_name,
|
||||
username: foundUser[0].username,
|
||||
user_type: foundUser[0].user_type,
|
||||
email: foundUser[0].email,
|
||||
social_id: foundUser[0].social_id,
|
||||
image: foundUser[0].image,
|
||||
image_thumbnail: foundUser[0].image_thumbnail,
|
||||
verification_status: foundUser[0].verification_status,
|
||||
social_login: foundUser[0].social_login,
|
||||
social_platform: foundUser[0].social_platform,
|
||||
csrf_k: csrfKey,
|
||||
logged_in_status: true,
|
||||
date: Date.now(),
|
||||
};
|
||||
if (additionalFields === null || additionalFields === void 0 ? void 0 : additionalFields[0]) {
|
||||
additionalFields.forEach((key) => {
|
||||
userPayload[key] = foundUser[0][key];
|
||||
payload: { email: user.email },
|
||||
skipPassword: true,
|
||||
skipWriteAuthFile: true,
|
||||
additionalFields,
|
||||
debug,
|
||||
useLocal: true,
|
||||
});
|
||||
}
|
||||
if (invitation && (!database || (database === null || database === void 0 ? void 0 : database.match(/^datasquirel$/)))) {
|
||||
(0, addAdminUserOnLogin_1.default)({
|
||||
query: invitation,
|
||||
user: userPayload,
|
||||
});
|
||||
}
|
||||
let result = {
|
||||
success: true,
|
||||
payload: userPayload,
|
||||
csrf: csrfKey,
|
||||
};
|
||||
return result;
|
||||
return userPayload;
|
||||
});
|
||||
}
|
||||
|
@ -1,5 +1,6 @@
|
||||
import { APIGoogleLoginFunctionParams, APILoginFunctionReturn } from "../../../../types";
|
||||
import { APIGoogleLoginFunctionParams } from "../../../../types";
|
||||
import { APIResponseObject } from "../../../../types";
|
||||
/**
|
||||
* # API google login
|
||||
*/
|
||||
export default function apiGoogleLogin({ token, database, additionalFields, additionalData, debug, loginOnly, apiUserId, }: APIGoogleLoginFunctionParams): Promise<APILoginFunctionReturn>;
|
||||
export default function apiGoogleLogin({ token, database, additionalFields, additionalData, debug, loginOnly, apiUserId, }: APIGoogleLoginFunctionParams): Promise<APIResponseObject>;
|
||||
|
21
dist/package-shared/functions/backend/auth/post-login-response-handler.d.ts
vendored
Normal file
21
dist/package-shared/functions/backend/auth/post-login-response-handler.d.ts
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
import { ServerResponse } from "http";
|
||||
import { APIResponseObject } from "../../../types";
|
||||
type Params = {
|
||||
database: string;
|
||||
httpResponse: APIResponseObject;
|
||||
response?: ServerResponse & {
|
||||
[s: string]: any;
|
||||
};
|
||||
encryptionKey?: string;
|
||||
encryptionSalt?: string;
|
||||
debug?: boolean;
|
||||
skipWriteAuthFile?: boolean;
|
||||
token?: boolean;
|
||||
cleanupTokens?: boolean;
|
||||
secureCookie?: boolean;
|
||||
};
|
||||
/**
|
||||
* # Login A user
|
||||
*/
|
||||
export default function postLoginResponseHandler({ database, httpResponse, response, encryptionKey, encryptionSalt, debug, token, skipWriteAuthFile, cleanupTokens, secureCookie, }: Params): boolean;
|
||||
export {};
|
63
dist/package-shared/functions/backend/auth/post-login-response-handler.js
vendored
Normal file
63
dist/package-shared/functions/backend/auth/post-login-response-handler.js
vendored
Normal file
@ -0,0 +1,63 @@
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.default = postLoginResponseHandler;
|
||||
const encrypt_1 = __importDefault(require("../../dsql/encrypt"));
|
||||
const debug_log_1 = __importDefault(require("../../../utils/logging/debug-log"));
|
||||
const get_auth_cookie_names_1 = __importDefault(require("../cookies/get-auth-cookie-names"));
|
||||
const write_auth_files_1 = require("./write-auth-files");
|
||||
const grab_cookie_expirt_date_1 = __importDefault(require("../../../utils/grab-cookie-expirt-date"));
|
||||
function debugFn(log, label) {
|
||||
(0, debug_log_1.default)({ log, addTime: true, title: "loginUser", label });
|
||||
}
|
||||
/**
|
||||
* # Login A user
|
||||
*/
|
||||
function postLoginResponseHandler({ database, httpResponse, response, encryptionKey, encryptionSalt, debug, token, skipWriteAuthFile, cleanupTokens, secureCookie, }) {
|
||||
var _a, _b;
|
||||
const COOKIE_EXPIRY_DATE = (0, grab_cookie_expirt_date_1.default)();
|
||||
if (httpResponse === null || httpResponse === void 0 ? void 0 : httpResponse.success) {
|
||||
let encryptedPayload = (0, encrypt_1.default)({
|
||||
data: JSON.stringify(httpResponse.payload),
|
||||
encryptionKey,
|
||||
encryptionSalt,
|
||||
});
|
||||
try {
|
||||
if (token && encryptedPayload)
|
||||
httpResponse["token"] = encryptedPayload;
|
||||
}
|
||||
catch (error) {
|
||||
console.log("Login User HTTP Response Error:", error.message);
|
||||
}
|
||||
const cookieNames = (0, get_auth_cookie_names_1.default)({
|
||||
database,
|
||||
});
|
||||
if (httpResponse.csrf && !skipWriteAuthFile) {
|
||||
(0, write_auth_files_1.writeAuthFile)(httpResponse.csrf, JSON.stringify(httpResponse.payload), cleanupTokens && ((_a = httpResponse.payload) === null || _a === void 0 ? void 0 : _a.id)
|
||||
? { userId: httpResponse.payload.id }
|
||||
: undefined);
|
||||
}
|
||||
httpResponse["cookieNames"] = cookieNames;
|
||||
httpResponse["key"] = String(encryptedPayload);
|
||||
const authKeyName = cookieNames.keyCookieName;
|
||||
const csrfName = cookieNames.csrfCookieName;
|
||||
if (debug) {
|
||||
debugFn(authKeyName, "authKeyName");
|
||||
debugFn(csrfName, "csrfName");
|
||||
debugFn(encryptedPayload, "encryptedPayload");
|
||||
}
|
||||
response === null || response === void 0 ? void 0 : response.setHeader("Set-Cookie", [
|
||||
`${authKeyName}=${encryptedPayload};samesite=strict;path=/;HttpOnly=true;Expires=${COOKIE_EXPIRY_DATE}${secureCookie ? ";Secure=true" : ""}`,
|
||||
`${csrfName}=${(_b = httpResponse.payload) === null || _b === void 0 ? void 0 : _b.csrf_k};samesite=strict;path=/;HttpOnly=true;Expires=${COOKIE_EXPIRY_DATE}`,
|
||||
]);
|
||||
if (debug) {
|
||||
debugFn("Response Sent!");
|
||||
}
|
||||
return true;
|
||||
}
|
||||
else {
|
||||
return false;
|
||||
}
|
||||
}
|
@ -22,7 +22,7 @@ function suAddBackup(_a) {
|
||||
return __awaiter(this, arguments, void 0, function* ({ targetUserId, }) {
|
||||
var _b, _c;
|
||||
try {
|
||||
const { mainBackupDir, userBackupDir } = (0, grab_dir_names_1.default)({
|
||||
const { mainBackupDir, userBackupDir, STATIC_ROOT, privateDataDir } = (0, grab_dir_names_1.default)({
|
||||
userId: targetUserId,
|
||||
});
|
||||
if (targetUserId && !userBackupDir) {
|
||||
|
@ -22,7 +22,7 @@ const export_mariadb_database_1 = __importDefault(require("../../../../../utils/
|
||||
function writeBackupFiles(_a) {
|
||||
return __awaiter(this, arguments, void 0, function* ({ backup, }) {
|
||||
try {
|
||||
const { mainBackupDir, userBackupDir, sqlBackupDirName, schemasBackupDirName, targetUserPrivateDir, oldSchemasDir, } = (0, grab_dir_names_1.default)({
|
||||
const { mainBackupDir, userBackupDir, sqlBackupDirName, schemasBackupDirName, targetUserPrivateDir, oldSchemasDir, STATIC_ROOT, privateDataDir, } = (0, grab_dir_names_1.default)({
|
||||
userId: backup.user_id,
|
||||
});
|
||||
if (backup.user_id && !userBackupDir) {
|
||||
|
9
dist/package-shared/functions/backend/backups/su/download-backup.d.ts
vendored
Normal file
9
dist/package-shared/functions/backend/backups/su/download-backup.d.ts
vendored
Normal file
@ -0,0 +1,9 @@
|
||||
import { DSQL_DATASQUIREL_BACKUPS } from "../../../../types/dsql";
|
||||
import { APIResponseObject } from "../../../../types";
|
||||
import { NextApiResponse } from "next";
|
||||
type Params = {
|
||||
backup: DSQL_DATASQUIREL_BACKUPS;
|
||||
res: NextApiResponse;
|
||||
};
|
||||
export default function downloadBackup({ backup, res, }: Params): Promise<APIResponseObject>;
|
||||
export {};
|
64
dist/package-shared/functions/backend/backups/su/download-backup.js
vendored
Normal file
64
dist/package-shared/functions/backend/backups/su/download-backup.js
vendored
Normal file
@ -0,0 +1,64 @@
|
||||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.default = downloadBackup;
|
||||
const fs_1 = __importDefault(require("fs"));
|
||||
const path_1 = __importDefault(require("path"));
|
||||
const grab_dir_names_1 = __importDefault(require("../../../../utils/backend/names/grab-dir-names"));
|
||||
const child_process_1 = require("child_process");
|
||||
function downloadBackup(_a) {
|
||||
return __awaiter(this, arguments, void 0, function* ({ backup, res, }) {
|
||||
try {
|
||||
const { mainBackupDir, userBackupDir, tempBackupExportName } = (0, grab_dir_names_1.default)({
|
||||
userId: backup.user_id,
|
||||
});
|
||||
if (backup.user_id && !userBackupDir) {
|
||||
return {
|
||||
success: false,
|
||||
msg: `Error grabbing user backup directory`,
|
||||
};
|
||||
}
|
||||
if (!backup.uuid) {
|
||||
return {
|
||||
success: false,
|
||||
msg: `No UUID found for backup`,
|
||||
};
|
||||
}
|
||||
const allBackupsDir = backup.user_id && userBackupDir ? userBackupDir : mainBackupDir;
|
||||
const targetBackupDir = path_1.default.join(allBackupsDir, backup.uuid);
|
||||
const zipFilesCmd = (0, child_process_1.execSync)(`tar -cJf ${tempBackupExportName} ${backup.uuid}`, {
|
||||
cwd: allBackupsDir,
|
||||
});
|
||||
const exportFilePath = path_1.default.join(allBackupsDir, tempBackupExportName);
|
||||
const readStream = fs_1.default.createReadStream(exportFilePath);
|
||||
readStream.pipe(res);
|
||||
readStream.on("end", () => {
|
||||
console.log("Pipe Complete!");
|
||||
setTimeout(() => {
|
||||
(0, child_process_1.execSync)(`rm -f ${tempBackupExportName}`, {
|
||||
cwd: allBackupsDir,
|
||||
});
|
||||
}, 1000);
|
||||
});
|
||||
return { success: true };
|
||||
}
|
||||
catch (error) {
|
||||
return {
|
||||
success: false,
|
||||
msg: `Failed to write backup files`,
|
||||
error: error.message,
|
||||
};
|
||||
}
|
||||
});
|
||||
}
|
@ -13,15 +13,13 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.default = addDbEntry;
|
||||
const sanitize_html_1 = __importDefault(require("sanitize-html"));
|
||||
const sanitizeHtmlOptions_1 = __importDefault(require("../html/sanitizeHtmlOptions"));
|
||||
const updateDbEntry_1 = __importDefault(require("./updateDbEntry"));
|
||||
const lodash_1 = __importDefault(require("lodash"));
|
||||
const encrypt_1 = __importDefault(require("../../dsql/encrypt"));
|
||||
const conn_db_handler_1 = __importDefault(require("../../../utils/db/conn-db-handler"));
|
||||
const check_if_is_master_1 = __importDefault(require("../../../utils/check-if-is-master"));
|
||||
const debug_log_1 = __importDefault(require("../../../utils/logging/debug-log"));
|
||||
const purge_default_fields_1 = __importDefault(require("../../../utils/purge-default-fields"));
|
||||
const grab_parsed_value_1 = __importDefault(require("./grab-parsed-value"));
|
||||
/**
|
||||
* Add a db Entry Function
|
||||
*/
|
||||
@ -74,7 +72,6 @@ function addDbEntry(_a) {
|
||||
}
|
||||
}
|
||||
function generateQuery(data) {
|
||||
var _a, _b;
|
||||
const dataKeys = Object.keys(data);
|
||||
let insertKeysArray = [];
|
||||
let insertValuesArray = [];
|
||||
@ -82,47 +79,21 @@ function addDbEntry(_a) {
|
||||
try {
|
||||
const dataKey = dataKeys[i];
|
||||
let value = data[dataKey];
|
||||
const targetFieldSchemaArray = tableSchema
|
||||
? (_a = tableSchema === null || tableSchema === void 0 ? void 0 : tableSchema.fields) === null || _a === void 0 ? void 0 : _a.filter((field) => field.fieldName == dataKey)
|
||||
: null;
|
||||
const targetFieldSchema = targetFieldSchemaArray && targetFieldSchemaArray[0]
|
||||
? targetFieldSchemaArray[0]
|
||||
: null;
|
||||
if (value == null || value == undefined)
|
||||
continue;
|
||||
if (((_b = targetFieldSchema === null || targetFieldSchema === void 0 ? void 0 : targetFieldSchema.dataType) === null || _b === void 0 ? void 0 : _b.match(/int$/i)) &&
|
||||
typeof value == "string" &&
|
||||
!(value === null || value === void 0 ? void 0 : value.match(/./)))
|
||||
continue;
|
||||
if (targetFieldSchema === null || targetFieldSchema === void 0 ? void 0 : targetFieldSchema.encrypted) {
|
||||
value = (0, encrypt_1.default)({
|
||||
data: value,
|
||||
const parsedValue = (0, grab_parsed_value_1.default)({
|
||||
dataKey,
|
||||
encryptionKey,
|
||||
encryptionSalt,
|
||||
tableSchema,
|
||||
value,
|
||||
});
|
||||
console.log("DSQL: Encrypted value =>", value);
|
||||
}
|
||||
const htmlRegex = /<[^>]+>/g;
|
||||
if ((targetFieldSchema === null || targetFieldSchema === void 0 ? void 0 : targetFieldSchema.richText) ||
|
||||
String(value).match(htmlRegex)) {
|
||||
value = (0, sanitize_html_1.default)(value, sanitizeHtmlOptions_1.default);
|
||||
}
|
||||
if (targetFieldSchema === null || targetFieldSchema === void 0 ? void 0 : targetFieldSchema.pattern) {
|
||||
const pattern = new RegExp(targetFieldSchema.pattern, targetFieldSchema.patternFlags || "");
|
||||
if (!pattern.test(value)) {
|
||||
console.log("DSQL: Pattern not matched =>", value);
|
||||
value = "";
|
||||
}
|
||||
}
|
||||
if (typeof parsedValue == "undefined")
|
||||
continue;
|
||||
insertKeysArray.push("`" + dataKey + "`");
|
||||
if (typeof value === "object") {
|
||||
value = JSON.stringify(value);
|
||||
}
|
||||
if (typeof value == "number") {
|
||||
insertValuesArray.push(String(value));
|
||||
if (typeof parsedValue == "number") {
|
||||
insertValuesArray.push(String(parsedValue));
|
||||
}
|
||||
else {
|
||||
insertValuesArray.push(value);
|
||||
insertValuesArray.push(parsedValue);
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
|
14
dist/package-shared/functions/backend/db/grab-parsed-value.d.ts
vendored
Normal file
14
dist/package-shared/functions/backend/db/grab-parsed-value.d.ts
vendored
Normal file
@ -0,0 +1,14 @@
|
||||
import { DSQL_TableSchemaType } from "../../../types";
|
||||
type Param = {
|
||||
value?: any;
|
||||
tableSchema?: DSQL_TableSchemaType;
|
||||
encryptionKey?: string;
|
||||
encryptionSalt?: string;
|
||||
dataKey: string;
|
||||
};
|
||||
/**
|
||||
* # Update DB Function
|
||||
* @description
|
||||
*/
|
||||
export default function grabParsedValue({ value, tableSchema, encryptionKey, encryptionSalt, dataKey, }: Param): any;
|
||||
export {};
|
68
dist/package-shared/functions/backend/db/grab-parsed-value.js
vendored
Normal file
68
dist/package-shared/functions/backend/db/grab-parsed-value.js
vendored
Normal file
@ -0,0 +1,68 @@
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.default = grabParsedValue;
|
||||
const sanitize_html_1 = __importDefault(require("sanitize-html"));
|
||||
const sanitizeHtmlOptions_1 = __importDefault(require("../html/sanitizeHtmlOptions"));
|
||||
const encrypt_1 = __importDefault(require("../../dsql/encrypt"));
|
||||
/**
|
||||
* # Update DB Function
|
||||
* @description
|
||||
*/
|
||||
function grabParsedValue({ value, tableSchema, encryptionKey, encryptionSalt, dataKey, }) {
|
||||
var _a, _b;
|
||||
let newValue = value;
|
||||
const targetFieldSchemaArray = tableSchema
|
||||
? (_a = tableSchema === null || tableSchema === void 0 ? void 0 : tableSchema.fields) === null || _a === void 0 ? void 0 : _a.filter((field) => field.fieldName === dataKey)
|
||||
: null;
|
||||
const targetFieldSchema = targetFieldSchemaArray && targetFieldSchemaArray[0]
|
||||
? targetFieldSchemaArray[0]
|
||||
: null;
|
||||
if (typeof newValue == "undefined")
|
||||
return;
|
||||
if (typeof newValue == "object" && !newValue)
|
||||
newValue = "";
|
||||
const htmlRegex = /<[^>]+>/g;
|
||||
if ((targetFieldSchema === null || targetFieldSchema === void 0 ? void 0 : targetFieldSchema.richText) || String(newValue).match(htmlRegex)) {
|
||||
newValue = (0, sanitize_html_1.default)(newValue, sanitizeHtmlOptions_1.default);
|
||||
}
|
||||
if (((_b = targetFieldSchema === null || targetFieldSchema === void 0 ? void 0 : targetFieldSchema.dataType) === null || _b === void 0 ? void 0 : _b.match(/int$/i)) &&
|
||||
typeof value == "string" &&
|
||||
!(value === null || value === void 0 ? void 0 : value.match(/./))) {
|
||||
value = "";
|
||||
}
|
||||
if (targetFieldSchema === null || targetFieldSchema === void 0 ? void 0 : targetFieldSchema.encrypted) {
|
||||
newValue = (0, encrypt_1.default)({
|
||||
data: newValue,
|
||||
encryptionKey,
|
||||
encryptionSalt,
|
||||
});
|
||||
}
|
||||
if (typeof newValue === "object") {
|
||||
newValue = JSON.stringify(newValue);
|
||||
}
|
||||
if (targetFieldSchema === null || targetFieldSchema === void 0 ? void 0 : targetFieldSchema.pattern) {
|
||||
const pattern = new RegExp(targetFieldSchema.pattern, targetFieldSchema.patternFlags || "");
|
||||
if (!pattern.test(newValue)) {
|
||||
console.log("DSQL: Pattern not matched =>", newValue);
|
||||
newValue = "";
|
||||
}
|
||||
}
|
||||
if (typeof newValue === "string" && newValue.match(/^null$/i)) {
|
||||
newValue = {
|
||||
toSqlString: function () {
|
||||
return "NULL";
|
||||
},
|
||||
};
|
||||
}
|
||||
if (typeof newValue === "string" && !newValue.match(/./i)) {
|
||||
newValue = {
|
||||
toSqlString: function () {
|
||||
return "NULL";
|
||||
},
|
||||
};
|
||||
}
|
||||
return newValue;
|
||||
}
|
@ -13,20 +13,17 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.default = updateDbEntry;
|
||||
const sanitize_html_1 = __importDefault(require("sanitize-html"));
|
||||
const sanitizeHtmlOptions_1 = __importDefault(require("../html/sanitizeHtmlOptions"));
|
||||
const encrypt_1 = __importDefault(require("../../dsql/encrypt"));
|
||||
const check_if_is_master_1 = __importDefault(require("../../../utils/check-if-is-master"));
|
||||
const conn_db_handler_1 = __importDefault(require("../../../utils/db/conn-db-handler"));
|
||||
const lodash_1 = __importDefault(require("lodash"));
|
||||
const purge_default_fields_1 = __importDefault(require("../../../utils/purge-default-fields"));
|
||||
const grab_parsed_value_1 = __importDefault(require("./grab-parsed-value"));
|
||||
/**
|
||||
* # Update DB Function
|
||||
* @description
|
||||
*/
|
||||
function updateDbEntry(_a) {
|
||||
return __awaiter(this, arguments, void 0, function* ({ dbContext, dbFullName, tableName, data, tableSchema, identifierColumnName, identifierValue, encryptionKey, encryptionSalt, forceLocal, debug, }) {
|
||||
var _b;
|
||||
/**
|
||||
* Check if data is valid
|
||||
*/
|
||||
@ -57,55 +54,21 @@ function updateDbEntry(_a) {
|
||||
try {
|
||||
const dataKey = dataKeys[i];
|
||||
let value = newData[dataKey];
|
||||
const targetFieldSchemaArray = tableSchema
|
||||
? (_b = tableSchema === null || tableSchema === void 0 ? void 0 : tableSchema.fields) === null || _b === void 0 ? void 0 : _b.filter((field) => field.fieldName === dataKey)
|
||||
: null;
|
||||
const targetFieldSchema = targetFieldSchemaArray && targetFieldSchemaArray[0]
|
||||
? targetFieldSchemaArray[0]
|
||||
: null;
|
||||
if (value == null || value == undefined)
|
||||
continue;
|
||||
const htmlRegex = /<[^>]+>/g;
|
||||
if ((targetFieldSchema === null || targetFieldSchema === void 0 ? void 0 : targetFieldSchema.richText) || String(value).match(htmlRegex)) {
|
||||
value = (0, sanitize_html_1.default)(value, sanitizeHtmlOptions_1.default);
|
||||
}
|
||||
if (targetFieldSchema === null || targetFieldSchema === void 0 ? void 0 : targetFieldSchema.encrypted) {
|
||||
value = (0, encrypt_1.default)({
|
||||
data: value,
|
||||
const parsedValue = (0, grab_parsed_value_1.default)({
|
||||
dataKey,
|
||||
encryptionKey,
|
||||
encryptionSalt,
|
||||
tableSchema,
|
||||
value,
|
||||
});
|
||||
}
|
||||
if (typeof value === "object") {
|
||||
value = JSON.stringify(value);
|
||||
}
|
||||
if (targetFieldSchema === null || targetFieldSchema === void 0 ? void 0 : targetFieldSchema.pattern) {
|
||||
const pattern = new RegExp(targetFieldSchema.pattern, targetFieldSchema.patternFlags || "");
|
||||
if (!pattern.test(value)) {
|
||||
console.log("DSQL: Pattern not matched =>", value);
|
||||
value = "";
|
||||
}
|
||||
}
|
||||
if (typeof value === "string" && value.match(/^null$/i)) {
|
||||
value = {
|
||||
toSqlString: function () {
|
||||
return "NULL";
|
||||
},
|
||||
};
|
||||
}
|
||||
if (typeof value === "string" && !value.match(/./i)) {
|
||||
value = {
|
||||
toSqlString: function () {
|
||||
return "NULL";
|
||||
},
|
||||
};
|
||||
}
|
||||
if (typeof parsedValue == "undefined")
|
||||
continue;
|
||||
updateKeyValueArray.push(`\`${dataKey}\`=?`);
|
||||
if (typeof value == "number") {
|
||||
updateValues.push(String(value));
|
||||
if (typeof parsedValue == "number") {
|
||||
updateValues.push(String(parsedValue));
|
||||
}
|
||||
else {
|
||||
updateValues.push(value);
|
||||
updateValues.push(parsedValue);
|
||||
}
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
|
@ -21,7 +21,7 @@ function handleBackup(_a) {
|
||||
return __awaiter(this, arguments, void 0, function* ({ appBackup, userId, }) {
|
||||
var _b;
|
||||
const { appConfig } = (0, grab_config_1.default)();
|
||||
const maxBackups = ((_b = appConfig.main.max_backups) === null || _b === void 0 ? void 0 : _b.value) || 20;
|
||||
const maxBackups = ((_b = appConfig.main.max_backups) === null || _b === void 0 ? void 0 : _b.value) || 4;
|
||||
const { count: existingAppBackupsCount } = yield (0, grab_user_resource_1.default)({
|
||||
tableName: "backups",
|
||||
isSuperUser: true,
|
||||
@ -36,7 +36,8 @@ function handleBackup(_a) {
|
||||
countOnly: true,
|
||||
});
|
||||
if (existingAppBackupsCount && existingAppBackupsCount >= maxBackups) {
|
||||
const { single: oldestAppBackup } = yield (0, grab_user_resource_1.default)({
|
||||
console.log(`Backups exceed Limit ...`);
|
||||
const { batch: oldestAppBackups } = yield (0, grab_user_resource_1.default)({
|
||||
tableName: "backups",
|
||||
isSuperUser: true,
|
||||
query: {
|
||||
@ -48,13 +49,19 @@ function handleBackup(_a) {
|
||||
},
|
||||
order: {
|
||||
field: "id",
|
||||
strategy: "ASC",
|
||||
strategy: "DESC",
|
||||
},
|
||||
limit: 1,
|
||||
},
|
||||
});
|
||||
if (oldestAppBackup === null || oldestAppBackup === void 0 ? void 0 : oldestAppBackup.id) {
|
||||
yield (0, delete_backup_1.default)({ backup: oldestAppBackup });
|
||||
if (oldestAppBackups) {
|
||||
for (let i = 0; i < oldestAppBackups.length; i++) {
|
||||
const backup = oldestAppBackups[i];
|
||||
console.log(`Handling Backup ${backup.uuid} ...`);
|
||||
if (i < maxBackups - 1)
|
||||
continue;
|
||||
console.log(`Deleting Backup ${backup.uuid} ...`);
|
||||
yield (0, delete_backup_1.default)({ backup: backup });
|
||||
}
|
||||
}
|
||||
}
|
||||
yield (0, add_backup_1.default)({ targetUserId: userId });
|
||||
|
@ -3,6 +3,7 @@ type Param = {
|
||||
url?: string;
|
||||
method?: string;
|
||||
hostname?: string;
|
||||
host?: string;
|
||||
path?: string;
|
||||
port?: number | string;
|
||||
headers?: object;
|
||||
@ -11,5 +12,5 @@ type Param = {
|
||||
/**
|
||||
* # Make Https Request
|
||||
*/
|
||||
export default function httpsRequest({ url, method, hostname, path, headers, body, port, scheme, }: Param): Promise<unknown>;
|
||||
export default function httpsRequest<Res extends any = any>({ url, method, hostname, host, path, headers, body, port, scheme, }: Param): Promise<Res>;
|
||||
export {};
|
||||
|
@ -10,17 +10,13 @@ const url_1 = require("url");
|
||||
/**
|
||||
* # Make Https Request
|
||||
*/
|
||||
function httpsRequest({ url, method, hostname, path, headers, body, port, scheme, }) {
|
||||
function httpsRequest({ url, method, hostname, host, path, headers, body, port, scheme, }) {
|
||||
var _a;
|
||||
const reqPayloadString = body ? JSON.stringify(body) : null;
|
||||
const PARSED_URL = url ? new url_1.URL(url) : null;
|
||||
////////////////////////////////////////////////
|
||||
////////////////////////////////////////////////
|
||||
////////////////////////////////////////////////
|
||||
/** @type {any} */
|
||||
let requestOptions = {
|
||||
method: method || "GET",
|
||||
hostname: PARSED_URL ? PARSED_URL.hostname : hostname,
|
||||
hostname: PARSED_URL ? PARSED_URL.hostname : host || hostname,
|
||||
port: (scheme === null || scheme === void 0 ? void 0 : scheme.match(/https/i))
|
||||
? 443
|
||||
: PARSED_URL
|
||||
@ -34,7 +30,6 @@ function httpsRequest({ url, method, hostname, path, headers, body, port, scheme
|
||||
};
|
||||
if (path)
|
||||
requestOptions.path = path;
|
||||
// if (href) requestOptions.href = href;
|
||||
if (headers)
|
||||
requestOptions.headers = headers;
|
||||
if (body) {
|
||||
@ -43,31 +38,24 @@ function httpsRequest({ url, method, hostname, path, headers, body, port, scheme
|
||||
? Buffer.from(reqPayloadString).length
|
||||
: undefined;
|
||||
}
|
||||
////////////////////////////////////////////////
|
||||
////////////////////////////////////////////////
|
||||
////////////////////////////////////////////////
|
||||
return new Promise((res, rej) => {
|
||||
var _a;
|
||||
const httpsRequest = ((scheme === null || scheme === void 0 ? void 0 : scheme.match(/https/i))
|
||||
? https_1.default
|
||||
: ((_a = PARSED_URL === null || PARSED_URL === void 0 ? void 0 : PARSED_URL.protocol) === null || _a === void 0 ? void 0 : _a.match(/https/i))
|
||||
? https_1.default
|
||||
: http_1.default).request(
|
||||
/* ====== Request Options object ====== */
|
||||
requestOptions,
|
||||
////////////////////////////////////////////////
|
||||
////////////////////////////////////////////////
|
||||
////////////////////////////////////////////////
|
||||
/* ====== Callback function ====== */
|
||||
(response) => {
|
||||
: http_1.default).request(requestOptions, (response) => {
|
||||
var str = "";
|
||||
// ## another chunk of data has been received, so append it to `str`
|
||||
response.on("data", function (chunk) {
|
||||
str += chunk;
|
||||
});
|
||||
// ## the whole response has been received, so we just print it out here
|
||||
response.on("end", function () {
|
||||
try {
|
||||
res(JSON.parse(str));
|
||||
}
|
||||
catch (error) {
|
||||
res(str);
|
||||
}
|
||||
});
|
||||
response.on("error", (error) => {
|
||||
console.log("HTTP response error =>", error.message);
|
||||
|
19
dist/package-shared/types/dsql.d.ts
vendored
19
dist/package-shared/types/dsql.d.ts
vendored
@ -1,4 +1,4 @@
|
||||
export declare const DsqlTables: readonly ["users", "api_keys", "api_keys_scoped_resources", "invitations", "user_databases", "user_database_tables", "user_media", "user_private_folders", "delegated_users", "delegated_resources", "mariadb_user_privileges", "unsubscribes", "notifications", "deleted_api_keys", "servers", "process_queue", "backups", "mariadb_users", "mariadb_user_databases", "mariadb_user_tables"];
|
||||
export declare const DsqlTables: readonly ["users", "api_keys", "api_keys_scoped_resources", "invitations", "user_databases", "user_database_tables", "user_media", "user_private_folders", "delegated_users", "delegated_resources", "mariadb_user_privileges", "unsubscribes", "notifications", "deleted_api_keys", "servers", "process_queue", "backups", "mariadb_users", "mariadb_user_databases", "mariadb_user_tables", "user_private_media_keys"];
|
||||
export type DSQL_DATASQUIREL_USERS = {
|
||||
id?: number;
|
||||
uuid?: string;
|
||||
@ -352,3 +352,20 @@ export type DSQL_DATASQUIREL_MARIADB_USER_TABLES = {
|
||||
date_updated_code?: number;
|
||||
date_updated_timestamp?: string;
|
||||
};
|
||||
export type DSQL_DATASQUIREL_USER_PRIVATE_MEDIA_KEYS = {
|
||||
id?: number;
|
||||
uuid?: string;
|
||||
user_id?: number;
|
||||
media_id?: number;
|
||||
key?: string;
|
||||
description?: string;
|
||||
expiration?: number;
|
||||
expiration_paradigm?: "seconds" | "minutes" | "hours" | "days" | "weeks" | "months" | "years";
|
||||
expiration_milliseconds?: number;
|
||||
date_created?: string;
|
||||
date_created_code?: number;
|
||||
date_created_timestamp?: string;
|
||||
date_updated?: string;
|
||||
date_updated_code?: number;
|
||||
date_updated_timestamp?: string;
|
||||
};
|
||||
|
1
dist/package-shared/types/dsql.js
vendored
1
dist/package-shared/types/dsql.js
vendored
@ -22,4 +22,5 @@ exports.DsqlTables = [
|
||||
"mariadb_users",
|
||||
"mariadb_user_databases",
|
||||
"mariadb_user_tables",
|
||||
"user_private_media_keys",
|
||||
];
|
||||
|
148
dist/package-shared/types/index.d.ts
vendored
148
dist/package-shared/types/index.d.ts
vendored
@ -371,6 +371,8 @@ export interface PostInsertReturn {
|
||||
export type UserType = DATASQUIREL_LoggedInUser & {
|
||||
isSuperUser?: boolean;
|
||||
staticHost?: string;
|
||||
appHost?: string;
|
||||
appName?: string;
|
||||
};
|
||||
export interface ApiKeyDef {
|
||||
name: string;
|
||||
@ -1377,6 +1379,9 @@ export type PagePropsType = {
|
||||
appVersion?: (typeof AppVersions)[number];
|
||||
isMailAvailable?: boolean;
|
||||
websocketURL?: string;
|
||||
docsObject?: DocsServerProps | null;
|
||||
docsPages?: DocsLinkType[] | null;
|
||||
docsPageEditURL?: string | null;
|
||||
};
|
||||
export type APIResponseObject<T extends any = any> = {
|
||||
success: boolean;
|
||||
@ -1413,24 +1418,18 @@ export declare const SignUpParadigms: readonly [{
|
||||
readonly name: "google";
|
||||
}];
|
||||
export declare const QueueJobTypes: readonly ["dummy", "import-database"];
|
||||
export declare const WebSocketEvents: readonly ["client:check-queue", "client:dev:queue", "client:delete-queue", "client:pty-shell", "server:error", "server:message", "server:ready", "server:success", "server:update", "server:queue", "server:dev:queue", "server:queue-deleted", "server:pty-shell"];
|
||||
export declare const WebSocketEvents: readonly ["client:check-queue", "client:dev:queue", "client:delete-queue", "client:pty-shell", "client:su-logs", "client:su-kill-logs", "server:error", "server:message", "server:ready", "server:success", "server:update", "server:queue", "server:dev:queue", "server:queue-deleted", "server:pty-shell", "server:su-logs", "server:su-kill-logs"];
|
||||
export type WebSocketDataType = {
|
||||
event: (typeof WebSocketEvents)[number];
|
||||
data?: {
|
||||
queue?: DSQL_DATASQUIREL_PROCESS_QUEUE;
|
||||
containerName?: string;
|
||||
killLogs?: boolean;
|
||||
};
|
||||
error?: string;
|
||||
message?: string;
|
||||
};
|
||||
export declare const DatasquirelWindowEvents: readonly ["queue-started", "queue-complete", "queue-running"];
|
||||
export type DatasquirelWindowEventPayloadType = {
|
||||
event: (typeof DatasquirelWindowEvents)[number];
|
||||
data?: {
|
||||
queue?: DSQL_DATASQUIREL_PROCESS_QUEUE;
|
||||
};
|
||||
error?: string;
|
||||
message?: string;
|
||||
};
|
||||
/**
|
||||
* # Docker Compose Types
|
||||
*/
|
||||
@ -1510,6 +1509,7 @@ export type DsqlAppData = {
|
||||
DSQL_FACEBOOK_APP_ID?: string | null;
|
||||
DSQL_GITHUB_ID?: string | null;
|
||||
DSQL_HOST_MACHINE_IP?: string | null;
|
||||
DSQL_DEPLOYMENT_NAME?: string | null;
|
||||
};
|
||||
export declare const MediaTypes: readonly ["image", "file", "video"];
|
||||
export type MediaUploadDataType = ImageObjectType & FileObjectType & {
|
||||
@ -1643,6 +1643,7 @@ export type DefaultLocalResourcesHookParams<T extends {
|
||||
}> = {
|
||||
refresh?: number;
|
||||
setLoading?: React.Dispatch<React.SetStateAction<boolean>>;
|
||||
setReady?: React.Dispatch<React.SetStateAction<boolean>>;
|
||||
loadingEndTimeout?: number;
|
||||
user?: UserType | null;
|
||||
ready?: boolean;
|
||||
@ -1878,6 +1879,10 @@ export type SendEmailCodeParams = {
|
||||
useLocal?: boolean;
|
||||
apiVersion?: string;
|
||||
dbUserId?: string | number;
|
||||
/**
|
||||
* HTML string with {{code}} placeholder for the code
|
||||
*/
|
||||
html?: string;
|
||||
};
|
||||
export type UpdateUserParams<T extends DSQL_DATASQUIREL_USERS = DSQL_DATASQUIREL_USERS & {
|
||||
[k: string]: any;
|
||||
@ -1929,5 +1934,130 @@ export type GoogleAuthParams = {
|
||||
*/
|
||||
useLocal?: boolean;
|
||||
apiVersion?: string;
|
||||
skipWriteAuthFile?: boolean;
|
||||
cleanupTokens?: boolean;
|
||||
};
|
||||
export type ContactFormType = {
|
||||
first_name?: string;
|
||||
last_name?: string;
|
||||
email?: string;
|
||||
phone?: string;
|
||||
message?: string;
|
||||
};
|
||||
export declare const TimeParadigms: readonly [{
|
||||
readonly value: "seconds";
|
||||
readonly label: "Seconds";
|
||||
}, {
|
||||
readonly value: "minutes";
|
||||
readonly label: "Minutes";
|
||||
}, {
|
||||
readonly value: "hours";
|
||||
readonly label: "Hours";
|
||||
}, {
|
||||
readonly value: "days";
|
||||
readonly label: "Days";
|
||||
}, {
|
||||
readonly value: "weeks";
|
||||
readonly label: "Weeks";
|
||||
}, {
|
||||
readonly value: "months";
|
||||
readonly label: "Months";
|
||||
}, {
|
||||
readonly value: "years";
|
||||
readonly label: "Years";
|
||||
}];
|
||||
export type GithubPublicAPIResJSON = {
|
||||
name: string;
|
||||
path: string;
|
||||
sha: string;
|
||||
size: number;
|
||||
url: string;
|
||||
html_url: string;
|
||||
git_url: string;
|
||||
download_url: string;
|
||||
type: string;
|
||||
content: string;
|
||||
encoding: string;
|
||||
_links: {
|
||||
self: string;
|
||||
git: string;
|
||||
html: string;
|
||||
};
|
||||
};
|
||||
export type DocsServerProps = {
|
||||
md?: string | null;
|
||||
mdx_source?: any | null;
|
||||
meta_title?: string | null;
|
||||
meta_description?: string | null;
|
||||
page_title?: string | null;
|
||||
page_description?: string | null;
|
||||
page_path?: string | null;
|
||||
};
|
||||
export type DocsLinkType = {
|
||||
title: string;
|
||||
href: string;
|
||||
strict?: boolean;
|
||||
children?: DocsLinkType[];
|
||||
editPage?: string;
|
||||
};
|
||||
export interface GiteaBranchRes {
|
||||
name: string;
|
||||
commit: GiteaBranchResCommit;
|
||||
protected: boolean;
|
||||
required_approvals: number;
|
||||
enable_status_check: boolean;
|
||||
status_check_contexts: any[];
|
||||
user_can_push: boolean;
|
||||
user_can_merge: boolean;
|
||||
effective_branch_protection_name: string;
|
||||
}
|
||||
export interface GiteaBranchResCommit {
|
||||
id: string;
|
||||
message: string;
|
||||
url: string;
|
||||
author: GiteaBranchResCommitAuthor;
|
||||
committer: GiteaBranchResCommitCommitter;
|
||||
verification: GiteaBranchResCommitVerification;
|
||||
timestamp: string;
|
||||
added: any;
|
||||
removed: any;
|
||||
modified: any;
|
||||
}
|
||||
export interface GiteaBranchResCommitAuthor {
|
||||
name: string;
|
||||
email: string;
|
||||
username: string;
|
||||
}
|
||||
export interface GiteaBranchResCommitCommitter {
|
||||
name: string;
|
||||
email: string;
|
||||
username: string;
|
||||
}
|
||||
export interface GiteaBranchResCommitVerification {
|
||||
verified: boolean;
|
||||
reason: string;
|
||||
signature: string;
|
||||
signer: any;
|
||||
payload: string;
|
||||
}
|
||||
export interface GiteaTreeRes {
|
||||
sha: string;
|
||||
url: string;
|
||||
tree: GiteaTreeResTree[];
|
||||
truncated: boolean;
|
||||
page: number;
|
||||
total_count: number;
|
||||
}
|
||||
export interface GiteaTreeResTree {
|
||||
path: string;
|
||||
mode: string;
|
||||
type: "tree" | "blob";
|
||||
size: number;
|
||||
sha: string;
|
||||
url: string;
|
||||
}
|
||||
export declare const OpsActions: readonly ["exit", "test", "restart-web-app", "restart-db", "restart-all", "clear"];
|
||||
export type OpsObject = {
|
||||
action: (typeof OpsActions)[number];
|
||||
};
|
||||
export {};
|
||||
|
44
dist/package-shared/types/index.js
vendored
44
dist/package-shared/types/index.js
vendored
@ -1,6 +1,6 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.UserAPIAuthActions = exports.UserAPIParadigms = exports.TargetMediaParadigms = exports.EnvKeys = exports.AppVersions = exports.APIParadigms = exports.UserSQLPermissions = exports.SQLPermissions = exports.InvitedUserSelectFields = exports.DelegatedUserSelectFields = exports.UserSelectFields = exports.IndexTypes = exports.DefaultSQLValuesLiteral = exports.CurrentlyEditedFieldActions = exports.VideoMimeTypes = exports.FileMimeTypes = exports.ImageMimeTypes = exports.MediaTypes = exports.DockerComposeServices = exports.DatasquirelWindowEvents = exports.WebSocketEvents = exports.QueueJobTypes = exports.SignUpParadigms = exports.UserTypes = exports.QueryFields = exports.DsqlCrudActions = exports.DataCrudRequestMethodsLowerCase = exports.DataCrudRequestMethods = exports.ServerQueryEqualities = exports.ServerQueryOperators = exports.TextFieldTypesArray = exports.UsersOmitedFields = void 0;
|
||||
exports.OpsActions = exports.TimeParadigms = exports.UserAPIAuthActions = exports.UserAPIParadigms = exports.TargetMediaParadigms = exports.EnvKeys = exports.AppVersions = exports.APIParadigms = exports.UserSQLPermissions = exports.SQLPermissions = exports.InvitedUserSelectFields = exports.DelegatedUserSelectFields = exports.UserSelectFields = exports.IndexTypes = exports.DefaultSQLValuesLiteral = exports.CurrentlyEditedFieldActions = exports.VideoMimeTypes = exports.FileMimeTypes = exports.ImageMimeTypes = exports.MediaTypes = exports.DockerComposeServices = exports.DatasquirelWindowEvents = exports.WebSocketEvents = exports.QueueJobTypes = exports.SignUpParadigms = exports.UserTypes = exports.QueryFields = exports.DsqlCrudActions = exports.DataCrudRequestMethodsLowerCase = exports.DataCrudRequestMethods = exports.ServerQueryEqualities = exports.ServerQueryOperators = exports.TextFieldTypesArray = exports.UsersOmitedFields = void 0;
|
||||
exports.UsersOmitedFields = [
|
||||
"password",
|
||||
"social_id",
|
||||
@ -90,6 +90,8 @@ exports.WebSocketEvents = [
|
||||
"client:dev:queue",
|
||||
"client:delete-queue",
|
||||
"client:pty-shell",
|
||||
"client:su-logs",
|
||||
"client:su-kill-logs",
|
||||
/**
|
||||
* # Server Events
|
||||
* @description Events sent from Server to Client
|
||||
@ -103,6 +105,8 @@ exports.WebSocketEvents = [
|
||||
"server:dev:queue",
|
||||
"server:queue-deleted",
|
||||
"server:pty-shell",
|
||||
"server:su-logs",
|
||||
"server:su-kill-logs",
|
||||
];
|
||||
exports.DatasquirelWindowEvents = [
|
||||
"queue-started",
|
||||
@ -409,3 +413,41 @@ exports.UserAPIAuthActions = [
|
||||
"reset-password",
|
||||
"google-login",
|
||||
];
|
||||
exports.TimeParadigms = [
|
||||
{
|
||||
value: "seconds",
|
||||
label: "Seconds",
|
||||
},
|
||||
{
|
||||
value: "minutes",
|
||||
label: "Minutes",
|
||||
},
|
||||
{
|
||||
value: "hours",
|
||||
label: "Hours",
|
||||
},
|
||||
{
|
||||
value: "days",
|
||||
label: "Days",
|
||||
},
|
||||
{
|
||||
value: "weeks",
|
||||
label: "Weeks",
|
||||
},
|
||||
{
|
||||
value: "months",
|
||||
label: "Months",
|
||||
},
|
||||
{
|
||||
value: "years",
|
||||
label: "Years",
|
||||
},
|
||||
];
|
||||
exports.OpsActions = [
|
||||
"exit",
|
||||
"test",
|
||||
"restart-web-app",
|
||||
"restart-db",
|
||||
"restart-all",
|
||||
"clear",
|
||||
];
|
||||
|
@ -98,5 +98,7 @@ export default function grabDirNames(param?: Param): {
|
||||
dsqlDbDockerComposeFileAlt: string;
|
||||
dsqlDbDockerComposeFileName: string;
|
||||
dsqlDbDockerComposeFileNameAlt: string;
|
||||
tempBackupExportName: string;
|
||||
opsJSONFileName: string;
|
||||
};
|
||||
export {};
|
||||
|
@ -153,6 +153,8 @@ function grabDirNames(param) {
|
||||
const distroEnterpriseExportTarName = `${distroEnterpriseName}.tar.xz`;
|
||||
const communityDistroTempDir = path_1.default.resolve(appDir, "build", "community", ".tmp");
|
||||
const communityDistroDir = path_1.default.resolve(communityDistroTempDir, distroDirName);
|
||||
const tempBackupExportName = "tmp-export-backup.tar.xz";
|
||||
const opsJSONFileName = "ops.json";
|
||||
return {
|
||||
appDir,
|
||||
privateDataDir,
|
||||
@ -246,5 +248,7 @@ function grabDirNames(param) {
|
||||
dsqlDbDockerComposeFileAlt,
|
||||
dsqlDbDockerComposeFileName,
|
||||
dsqlDbDockerComposeFileNameAlt,
|
||||
tempBackupExportName,
|
||||
opsJSONFileName,
|
||||
};
|
||||
}
|
||||
|
@ -1,4 +1,7 @@
|
||||
export default function grabDockerStackServicesNames(): {
|
||||
type Params = {
|
||||
deploymentName?: string | null;
|
||||
};
|
||||
export default function grabDockerStackServicesNames(params?: Params): {
|
||||
deploymentName: string;
|
||||
maxScaleServiceName: string;
|
||||
dbServiceName: string;
|
||||
@ -14,3 +17,4 @@ export default function grabDockerStackServicesNames(): {
|
||||
websocketServiceName: string;
|
||||
cronServiceName: string;
|
||||
};
|
||||
export {};
|
||||
|
@ -1,8 +1,8 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.default = grabDockerStackServicesNames;
|
||||
function grabDockerStackServicesNames() {
|
||||
const deploymentName = process.env.DSQL_DEPLOYMENT_NAME || "dsql";
|
||||
function grabDockerStackServicesNames(params) {
|
||||
const deploymentName = (params === null || params === void 0 ? void 0 : params.deploymentName) || process.env.DSQL_DEPLOYMENT_NAME || "dsql";
|
||||
const maxScaleServiceName = `${deploymentName}-dsql-maxscale`;
|
||||
const dbServiceName = `${deploymentName}-dsql-db`;
|
||||
const dbCronServiceName = `${deploymentName}-dsql-db-cron`;
|
||||
|
@ -7,4 +7,5 @@ export default function grabIPAddresses(): {
|
||||
globalIPPrefix: string;
|
||||
webSocketIP: string;
|
||||
dbCronIP: string;
|
||||
reverseProxyIP: string;
|
||||
};
|
||||
|
@ -14,6 +14,7 @@ function grabIPAddresses() {
|
||||
const mainDBIP = `${globalIPPrefix}.${db}`;
|
||||
const webSocketIP = `${globalIPPrefix}.${websocket}`;
|
||||
const dbCronIP = `${globalIPPrefix}.${db_cron}`;
|
||||
const reverseProxyIP = `${globalIPPrefix}.${reverse_proxy}`;
|
||||
const localHostIP = `${globalIPPrefix}.1`;
|
||||
return {
|
||||
webAppIP,
|
||||
@ -24,5 +25,6 @@ function grabIPAddresses() {
|
||||
globalIPPrefix,
|
||||
webSocketIP,
|
||||
dbCronIP,
|
||||
reverseProxyIP,
|
||||
};
|
||||
}
|
||||
|
1
dist/package-shared/utils/generate-random-string.d.ts
vendored
Normal file
1
dist/package-shared/utils/generate-random-string.d.ts
vendored
Normal file
@ -0,0 +1 @@
|
||||
export default function genRndStr(length?: number, symbols?: boolean): string;
|
15
dist/package-shared/utils/generate-random-string.js
vendored
Normal file
15
dist/package-shared/utils/generate-random-string.js
vendored
Normal file
@ -0,0 +1,15 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.default = genRndStr;
|
||||
function genRndStr(length, symbols) {
|
||||
let characters = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
|
||||
if (symbols)
|
||||
characters += "-_[]()@";
|
||||
let result = "";
|
||||
const finalLength = length || 12;
|
||||
for (let i = 0; i < finalLength; i++) {
|
||||
const randomIndex = Math.floor(Math.random() * characters.length);
|
||||
result += characters.charAt(randomIndex);
|
||||
}
|
||||
return result;
|
||||
}
|
1
dist/package-shared/utils/grab-machine-ip.d.ts
vendored
Normal file
1
dist/package-shared/utils/grab-machine-ip.d.ts
vendored
Normal file
@ -0,0 +1 @@
|
||||
export default function getMachineIPAddress(): string | null;
|
24
dist/package-shared/utils/grab-machine-ip.js
vendored
Normal file
24
dist/package-shared/utils/grab-machine-ip.js
vendored
Normal file
@ -0,0 +1,24 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.default = getMachineIPAddress;
|
||||
const os_1 = require("os");
|
||||
function getMachineIPAddress() {
|
||||
try {
|
||||
const interfaces = (0, os_1.networkInterfaces)();
|
||||
for (const ifaceName in interfaces) {
|
||||
const iface = interfaces[ifaceName];
|
||||
if (Array.isArray(iface)) {
|
||||
for (const address of iface) {
|
||||
if (address.family === "IPv4" && !address.internal) {
|
||||
return address.address;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
catch (error) {
|
||||
console.error(`Error accessing network interfaces: ${error.message}`);
|
||||
return null;
|
||||
}
|
||||
}
|
@ -11,7 +11,7 @@ function grabUserMainSqlUserName({ HOST, user, username, }) {
|
||||
const { webAppIP, maxScaleIP } = (0, grab_ip_addresses_1.default)();
|
||||
const finalUsername = username || sqlUsername;
|
||||
const finalHost = HOST || maxScaleIP || "127.0.0.1";
|
||||
const fullName = `${finalUsername}@${webAppIP}`;
|
||||
const fullName = `${finalUsername}@${finalHost}`;
|
||||
return {
|
||||
username: finalUsername,
|
||||
host: finalHost,
|
||||
|
11
dist/package-shared/utils/setup-db.d.ts
vendored
11
dist/package-shared/utils/setup-db.d.ts
vendored
@ -1,11 +0,0 @@
|
||||
import mariadb, { ConnectionConfig } from "mariadb";
|
||||
type Params = {
|
||||
useLocal?: boolean;
|
||||
dbConfig?: ConnectionConfig;
|
||||
ssl?: boolean;
|
||||
connectionLimit?: number;
|
||||
};
|
||||
export default function setupDSQLDb({ dbConfig, ssl }: Params): Promise<{
|
||||
conn: mariadb.Connection;
|
||||
}>;
|
||||
export {};
|
53
dist/package-shared/utils/setup-db.js
vendored
53
dist/package-shared/utils/setup-db.js
vendored
@ -1,53 +0,0 @@
|
||||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.default = setupDSQLDb;
|
||||
const grabDbSSL_1 = __importDefault(require("./backend/grabDbSSL"));
|
||||
const mariadb_1 = __importDefault(require("mariadb"));
|
||||
function setupDSQLDb(_a) {
|
||||
return __awaiter(this, arguments, void 0, function* ({ dbConfig, ssl }) {
|
||||
const conn = yield mariadb_1.default.createConnection(Object.assign(Object.assign({ host: process.env.DSQL_DB_HOST, user: process.env.DSQL_DB_USERNAME, password: process.env.DSQL_DB_PASSWORD, database: process.env.DSQL_DB_NAME, charset: "utf8mb4" }, dbConfig), { ssl: ssl ? (0, grabDbSSL_1.default)() : undefined, supportBigNumbers: true, bigNumberStrings: false, dateStrings: true, bigIntAsNumber: true, metaAsArray: true }));
|
||||
// const conn = mariadb.createPool({
|
||||
// host: process.env.DSQL_DB_HOST,
|
||||
// user: process.env.DSQL_DB_USERNAME,
|
||||
// password: process.env.DSQL_DB_PASSWORD,
|
||||
// database: process.env.DSQL_DB_NAME,
|
||||
// charset: "utf8mb4",
|
||||
// ...dbConfig,
|
||||
// ssl: ssl ? grabDbSSL() : undefined,
|
||||
// connectionLimit,
|
||||
// supportBigNumbers: true,
|
||||
// bigNumberStrings: false,
|
||||
// dateStrings: true,
|
||||
// });
|
||||
// let readOnlyConnection;
|
||||
// if (addReadOnlyConn) {
|
||||
// readOnlyConnection = mariadb.createPool({
|
||||
// host: process.env.DSQL_DB_HOST,
|
||||
// user: process.env.DSQL_DB_READ_ONLY_USERNAME,
|
||||
// password: process.env.DSQL_DB_READ_ONLY_PASSWORD,
|
||||
// database: process.env.DSQL_DB_NAME,
|
||||
// charset: "utf8mb4",
|
||||
// ...readOnlyDbConfig,
|
||||
// ssl: ssl ? grabDbSSL() : undefined,
|
||||
// connectionLimit,
|
||||
// });
|
||||
// global.DSQL_READ_ONLY_DB_CONN = readOnlyConnection;
|
||||
// }
|
||||
return {
|
||||
conn,
|
||||
// readOnlyConnection,
|
||||
};
|
||||
});
|
||||
}
|
3
index.ts
3
index.ts
@ -35,6 +35,7 @@ import user from "./package-shared/api/user";
|
||||
import localUser from "./package-shared/api/user/local-user";
|
||||
import media from "./package-shared/api/media";
|
||||
import dbHandler from "./package-shared/functions/backend/dbHandler";
|
||||
import httpsRequest from "./package-shared/functions/backend/httpsRequest";
|
||||
|
||||
/**
|
||||
* User Functions Object
|
||||
@ -107,6 +108,8 @@ const datasquirel = {
|
||||
connDbHandler,
|
||||
debugLog,
|
||||
parseEnv,
|
||||
httpsRequest,
|
||||
httpRequest: httpsRequest,
|
||||
},
|
||||
/**
|
||||
* Run Crud actions `get`, `insert`, `update`, `delete`
|
||||
|
@ -4,4 +4,16 @@ This directory contains data (mostly type definitions) shared by both the datasq
|
||||
|
||||
## Functions
|
||||
|
||||
### Actions
|
||||
|
||||
These are functions that are used by both the datasquirel NPM package and the datasquirel web app
|
||||
|
||||
### Utils
|
||||
|
||||
These are utility functions that are used by both the datasquirel NPM package and the datasquirel web app
|
||||
|
||||
### API
|
||||
|
||||
These are API functions that are used by both the datasquirel NPM package and the datasquirel web app
|
||||
|
||||
## Types
|
||||
|
@ -12,6 +12,7 @@ import debugLog from "../../utils/logging/debug-log";
|
||||
import grabCookieExpiryDate from "../../utils/grab-cookie-expirt-date";
|
||||
import grabUserDSQLAPIPath from "../../utils/backend/users/grab-api-path";
|
||||
import queryDSQLAPI from "../../functions/api/query-dsql-api";
|
||||
import postLoginResponseHandler from "../../functions/backend/auth/post-login-response-handler";
|
||||
|
||||
function debugFn(log: any, label?: string) {
|
||||
debugLog({ log, addTime: true, title: "loginUser", label });
|
||||
@ -130,55 +131,18 @@ export default async function loginUser<
|
||||
*/
|
||||
|
||||
if (httpResponse?.success) {
|
||||
let encryptedPayload = encrypt({
|
||||
data: JSON.stringify(httpResponse.payload),
|
||||
encryptionKey: finalEncryptionKey,
|
||||
encryptionSalt: finalEncryptionSalt,
|
||||
});
|
||||
|
||||
try {
|
||||
if (token && encryptedPayload)
|
||||
httpResponse["token"] = encryptedPayload;
|
||||
} catch (error: any) {
|
||||
console.log("Login User HTTP Response Error:", error.message);
|
||||
}
|
||||
|
||||
const cookieNames = getAuthCookieNames({
|
||||
postLoginResponseHandler({
|
||||
database,
|
||||
httpResponse,
|
||||
cleanupTokens,
|
||||
debug,
|
||||
encryptionKey,
|
||||
encryptionSalt,
|
||||
response,
|
||||
secureCookie,
|
||||
skipWriteAuthFile,
|
||||
token,
|
||||
});
|
||||
|
||||
if (httpResponse.csrf && !skipWriteAuthFile) {
|
||||
writeAuthFile(
|
||||
httpResponse.csrf,
|
||||
JSON.stringify(httpResponse.payload),
|
||||
cleanupTokens && httpResponse.payload?.id
|
||||
? { userId: httpResponse.payload.id }
|
||||
: undefined
|
||||
);
|
||||
}
|
||||
|
||||
httpResponse["cookieNames"] = cookieNames;
|
||||
httpResponse["key"] = String(encryptedPayload);
|
||||
|
||||
const authKeyName = cookieNames.keyCookieName;
|
||||
const csrfName = cookieNames.csrfCookieName;
|
||||
|
||||
if (debug) {
|
||||
debugFn(authKeyName, "authKeyName");
|
||||
debugFn(csrfName, "csrfName");
|
||||
debugFn(encryptedPayload, "encryptedPayload");
|
||||
}
|
||||
|
||||
response?.setHeader("Set-Cookie", [
|
||||
`${authKeyName}=${encryptedPayload};samesite=strict;path=/;HttpOnly=true;Expires=${COOKIE_EXPIRY_DATE}${
|
||||
secureCookie ? ";Secure=true" : ""
|
||||
}`,
|
||||
`${csrfName}=${httpResponse.payload?.csrf_k};samesite=strict;path=/;HttpOnly=true;Expires=${COOKIE_EXPIRY_DATE}`,
|
||||
]);
|
||||
|
||||
if (debug) {
|
||||
debugFn("Response Sent!");
|
||||
}
|
||||
}
|
||||
|
||||
return httpResponse;
|
||||
|
@ -28,6 +28,7 @@ export default async function sendEmailCode(
|
||||
useLocal,
|
||||
apiVersion,
|
||||
dbUserId,
|
||||
html,
|
||||
} = params;
|
||||
|
||||
const defaultTempLoginFieldName = "temp_login_code";
|
||||
@ -35,7 +36,9 @@ export default async function sendEmailCode(
|
||||
? temp_code_field_name
|
||||
: defaultTempLoginFieldName;
|
||||
|
||||
const emailHtml = `<p>Please use this code to login</p>\n<h2>{{code}}</h2>\n<p>Please note that this code expires after 15 minutes</p>`;
|
||||
const emailHtml =
|
||||
html ||
|
||||
`<p>Please use this code to login</p>\n<h2>{{code}}</h2>\n<p>Please note that this code expires after 15 minutes</p>`;
|
||||
|
||||
const apiSendEmailCodeParams: APISendEmailCodeFunctionParams = {
|
||||
database,
|
||||
|
@ -1,15 +1,12 @@
|
||||
import encrypt from "../../../functions/dsql/encrypt";
|
||||
import apiGoogleLogin from "../../../functions/api/users/social/api-google-login";
|
||||
import getAuthCookieNames from "../../../functions/backend/cookies/get-auth-cookie-names";
|
||||
import { writeAuthFile } from "../../../functions/backend/auth/write-auth-files";
|
||||
import {
|
||||
APIGoogleLoginFunctionParams,
|
||||
APIResponseObject,
|
||||
GoogleAuthParams,
|
||||
} from "../../../types";
|
||||
import grabCookieExpiryDate from "../../../utils/grab-cookie-expirt-date";
|
||||
import queryDSQLAPI from "../../../functions/api/query-dsql-api";
|
||||
import grabUserDSQLAPIPath from "../../../utils/backend/users/grab-api-path";
|
||||
import postLoginResponseHandler from "../../../functions/backend/auth/post-login-response-handler";
|
||||
|
||||
/**
|
||||
* # SERVER FUNCTION: Login with google Function
|
||||
@ -29,9 +26,9 @@ export default async function googleAuth({
|
||||
loginOnly,
|
||||
useLocal,
|
||||
apiVersion,
|
||||
skipWriteAuthFile,
|
||||
cleanupTokens,
|
||||
}: GoogleAuthParams): Promise<APIResponseObject> {
|
||||
const COOKIE_EXPIRY_DATE = grabCookieExpiryDate();
|
||||
|
||||
const finalEncryptionKey =
|
||||
encryptionKey || process.env.DSQL_ENCRYPTION_PASSWORD;
|
||||
const finalEncryptionSalt =
|
||||
@ -111,36 +108,18 @@ export default async function googleAuth({
|
||||
*
|
||||
* @description make a request to datasquirel.com
|
||||
*/
|
||||
if (httpResponse?.success && httpResponse?.payload) {
|
||||
let encryptedPayload = encrypt({
|
||||
data: JSON.stringify(httpResponse.payload),
|
||||
encryptionKey: finalEncryptionKey,
|
||||
encryptionSalt: finalEncryptionSalt,
|
||||
});
|
||||
|
||||
const cookieNames = getAuthCookieNames({
|
||||
if (httpResponse?.success && httpResponse?.payload && database) {
|
||||
postLoginResponseHandler({
|
||||
database,
|
||||
httpResponse,
|
||||
cleanupTokens,
|
||||
debug,
|
||||
encryptionKey,
|
||||
encryptionSalt,
|
||||
response,
|
||||
secureCookie,
|
||||
skipWriteAuthFile,
|
||||
});
|
||||
|
||||
if (httpResponse.csrf) {
|
||||
writeAuthFile(
|
||||
httpResponse.csrf,
|
||||
JSON.stringify(httpResponse.payload)
|
||||
);
|
||||
}
|
||||
|
||||
httpResponse["cookieNames"] = cookieNames;
|
||||
httpResponse["key"] = String(encryptedPayload);
|
||||
|
||||
const authKeyName = cookieNames.keyCookieName;
|
||||
const csrfName = cookieNames.csrfCookieName;
|
||||
|
||||
response?.setHeader("Set-Cookie", [
|
||||
`${authKeyName}=${encryptedPayload};samesite=strict;path=/;HttpOnly=true;;Expires=${COOKIE_EXPIRY_DATE}${
|
||||
secureCookie ? ";Secure=true" : ""
|
||||
}`,
|
||||
`${csrfName}=${httpResponse.payload?.csrf_k};samesite=strict;path=/;HttpOnly=true;;Expires=${COOKIE_EXPIRY_DATE}`,
|
||||
]);
|
||||
}
|
||||
|
||||
return httpResponse;
|
||||
|
@ -6,4 +6,6 @@ export const AppNames = {
|
||||
PrivateMediaInsertTriggerName: "dsql_trg_user_private_folders_insert",
|
||||
PrivateMediaDeleteTriggerName: "dsql_trg_user_private_folders_delete",
|
||||
WebsocketPathname: "dsql-websocket",
|
||||
ReverseProxyForwardURLHeaderName: "x-original-uri",
|
||||
PrivateAPIAuthHeaderName: "x-api-auth-key",
|
||||
} as const;
|
||||
|
@ -9,6 +9,7 @@ import {
|
||||
import grabHostNames from "../../utils/grab-host-names";
|
||||
import serializeQuery from "../../utils/serialize-query";
|
||||
import { RequestOptions } from "https";
|
||||
import _ from "lodash";
|
||||
|
||||
type Param<T = { [k: string]: any }> = {
|
||||
body?: T;
|
||||
@ -115,7 +116,12 @@ export default async function queryDSQLAPI<
|
||||
payload: undefined,
|
||||
msg: `An error occurred while parsing the response`,
|
||||
error: error.message,
|
||||
errorData: { requestOptions, grabedHostNames },
|
||||
errorData: {
|
||||
requestOptions,
|
||||
grabedHostNames: _.omit(grabedHostNames, [
|
||||
"scheme",
|
||||
]),
|
||||
},
|
||||
} as APIResponseObject);
|
||||
}
|
||||
});
|
||||
@ -138,7 +144,10 @@ export default async function queryDSQLAPI<
|
||||
payload: undefined,
|
||||
msg: `An error occurred while making the request`,
|
||||
error: err.message,
|
||||
errorData: { requestOptions, grabedHostNames },
|
||||
errorData: {
|
||||
requestOptions,
|
||||
grabedHostNames: _.omit(grabedHostNames, ["scheme"]),
|
||||
},
|
||||
} as APIResponseObject);
|
||||
});
|
||||
|
||||
|
@ -7,7 +7,7 @@ import encrypt from "../../dsql/encrypt";
|
||||
import addDbEntry from "../../backend/db/addDbEntry";
|
||||
import loginSocialUser from "./loginSocialUser";
|
||||
import {
|
||||
APILoginFunctionReturn,
|
||||
APIResponseObject,
|
||||
HandleSocialDbFunctionParams,
|
||||
} from "../../../types";
|
||||
import grabDirNames from "../../../utils/backend/names/grab-dir-names";
|
||||
@ -27,7 +27,7 @@ export default async function handleSocialDb({
|
||||
debug,
|
||||
loginOnly,
|
||||
apiUserId,
|
||||
}: HandleSocialDbFunctionParams): Promise<APILoginFunctionReturn> {
|
||||
}: HandleSocialDbFunctionParams): Promise<APIResponseObject> {
|
||||
try {
|
||||
const finalDbName = grabDbFullName({
|
||||
dbName: database,
|
||||
@ -200,16 +200,9 @@ export default async function handleSocialDb({
|
||||
}).then(() => {});
|
||||
}
|
||||
|
||||
const { STATIC_ROOT } = grabDirNames();
|
||||
|
||||
if (!STATIC_ROOT) {
|
||||
console.log("Static File ENV not Found!");
|
||||
return {
|
||||
success: false,
|
||||
payload: null,
|
||||
msg: "Static File ENV not Found!",
|
||||
};
|
||||
}
|
||||
const { userPrivateMediaDir, userPublicMediaDir } = grabDirNames({
|
||||
userId: newUser.payload.insertId,
|
||||
});
|
||||
|
||||
/**
|
||||
* Create new user folder and file
|
||||
@ -217,21 +210,10 @@ export default async function handleSocialDb({
|
||||
* @description Create new user folder and file
|
||||
*/
|
||||
if (!database || database?.match(/^datasquirel$/)) {
|
||||
let newUserSchemaFolderPath = `${process.env.DSQL_USER_DB_SCHEMA_PATH}/user-${newUser.payload.insertId}`;
|
||||
|
||||
let newUserMediaFolderPath = path.join(
|
||||
STATIC_ROOT,
|
||||
`images/user-images/user-${newUser.payload.insertId}`
|
||||
);
|
||||
|
||||
fs.mkdirSync(newUserSchemaFolderPath);
|
||||
fs.mkdirSync(newUserMediaFolderPath);
|
||||
|
||||
fs.writeFileSync(
|
||||
`${newUserSchemaFolderPath}/main.json`,
|
||||
JSON.stringify([]),
|
||||
"utf8"
|
||||
);
|
||||
userPublicMediaDir &&
|
||||
fs.mkdirSync(userPublicMediaDir, { recursive: true });
|
||||
userPrivateMediaDir &&
|
||||
fs.mkdirSync(userPrivateMediaDir, { recursive: true });
|
||||
}
|
||||
|
||||
return await loginSocialUser({
|
||||
|
@ -1,16 +1,9 @@
|
||||
import addAdminUserOnLogin from "../../backend/addAdminUserOnLogin";
|
||||
import dbHandler from "../../backend/dbHandler";
|
||||
import {
|
||||
APILoginFunctionReturn,
|
||||
DATASQUIREL_LoggedInUser,
|
||||
} from "../../../types";
|
||||
import { APIResponseObject } from "../../../types";
|
||||
import loginUser from "../../../actions/users/login-user";
|
||||
|
||||
type Param = {
|
||||
user: {
|
||||
first_name: string;
|
||||
last_name: string;
|
||||
email: string;
|
||||
social_id: string | number;
|
||||
};
|
||||
social_platform: string;
|
||||
invitation?: any;
|
||||
@ -32,68 +25,18 @@ export default async function loginSocialUser({
|
||||
database,
|
||||
additionalFields,
|
||||
debug,
|
||||
}: Param): Promise<APILoginFunctionReturn> {
|
||||
}: Param): Promise<APIResponseObject> {
|
||||
const finalDbName = database ? database : "datasquirel";
|
||||
const dbAppend = database ? `\`${finalDbName}\`.` : "";
|
||||
|
||||
const foundUserQuery = `SELECT * FROM ${dbAppend}\`users\` WHERE email=?`;
|
||||
const foundUserValues = [user.email];
|
||||
|
||||
const foundUser = (await dbHandler({
|
||||
query: foundUserQuery,
|
||||
values: foundUserValues,
|
||||
let userPayload = await loginUser({
|
||||
database: finalDbName,
|
||||
})) as any[];
|
||||
|
||||
if (!foundUser?.[0])
|
||||
return {
|
||||
success: false,
|
||||
payload: null,
|
||||
msg: "Couldn't find Social User.",
|
||||
};
|
||||
|
||||
let csrfKey =
|
||||
Math.random().toString(36).substring(2) +
|
||||
"-" +
|
||||
Math.random().toString(36).substring(2);
|
||||
|
||||
let userPayload: DATASQUIREL_LoggedInUser = {
|
||||
id: foundUser[0].id,
|
||||
uuid: foundUser[0].uuid,
|
||||
first_name: foundUser[0].first_name,
|
||||
last_name: foundUser[0].last_name,
|
||||
username: foundUser[0].username,
|
||||
user_type: foundUser[0].user_type,
|
||||
email: foundUser[0].email,
|
||||
social_id: foundUser[0].social_id,
|
||||
image: foundUser[0].image,
|
||||
image_thumbnail: foundUser[0].image_thumbnail,
|
||||
verification_status: foundUser[0].verification_status,
|
||||
social_login: foundUser[0].social_login,
|
||||
social_platform: foundUser[0].social_platform,
|
||||
csrf_k: csrfKey,
|
||||
logged_in_status: true,
|
||||
date: Date.now(),
|
||||
};
|
||||
|
||||
if (additionalFields?.[0]) {
|
||||
additionalFields.forEach((key) => {
|
||||
userPayload[key] = foundUser[0][key];
|
||||
payload: { email: user.email },
|
||||
skipPassword: true,
|
||||
skipWriteAuthFile: true,
|
||||
additionalFields,
|
||||
debug,
|
||||
useLocal: true,
|
||||
});
|
||||
}
|
||||
|
||||
if (invitation && (!database || database?.match(/^datasquirel$/))) {
|
||||
addAdminUserOnLogin({
|
||||
query: invitation,
|
||||
user: userPayload,
|
||||
});
|
||||
}
|
||||
|
||||
let result: APILoginFunctionReturn = {
|
||||
success: true,
|
||||
payload: userPayload,
|
||||
csrf: csrfKey,
|
||||
};
|
||||
|
||||
return result;
|
||||
return userPayload;
|
||||
}
|
||||
|
@ -3,9 +3,9 @@ import handleSocialDb from "../../social-login/handleSocialDb";
|
||||
import EJSON from "../../../../utils/ejson";
|
||||
import {
|
||||
APIGoogleLoginFunctionParams,
|
||||
APILoginFunctionReturn,
|
||||
GoogleOauth2User,
|
||||
} from "../../../../types";
|
||||
import { APIResponseObject } from "../../../../types";
|
||||
|
||||
/**
|
||||
* # API google login
|
||||
@ -18,7 +18,7 @@ export default async function apiGoogleLogin({
|
||||
debug,
|
||||
loginOnly,
|
||||
apiUserId,
|
||||
}: APIGoogleLoginFunctionParams): Promise<APILoginFunctionReturn> {
|
||||
}: APIGoogleLoginFunctionParams): Promise<APIResponseObject> {
|
||||
try {
|
||||
const gUser: GoogleOauth2User | undefined = await new Promise(
|
||||
(resolve, reject) => {
|
||||
|
@ -0,0 +1,97 @@
|
||||
import { ServerResponse } from "http";
|
||||
import { APIResponseObject } from "../../../types";
|
||||
import encrypt from "../../dsql/encrypt";
|
||||
import debugLog from "../../../utils/logging/debug-log";
|
||||
import getAuthCookieNames from "../cookies/get-auth-cookie-names";
|
||||
import { writeAuthFile } from "./write-auth-files";
|
||||
import grabCookieExpiryDate from "../../../utils/grab-cookie-expirt-date";
|
||||
|
||||
function debugFn(log: any, label?: string) {
|
||||
debugLog({ log, addTime: true, title: "loginUser", label });
|
||||
}
|
||||
|
||||
type Params = {
|
||||
database: string;
|
||||
httpResponse: APIResponseObject;
|
||||
response?: ServerResponse & { [s: string]: any };
|
||||
encryptionKey?: string;
|
||||
encryptionSalt?: string;
|
||||
debug?: boolean;
|
||||
skipWriteAuthFile?: boolean;
|
||||
token?: boolean;
|
||||
cleanupTokens?: boolean;
|
||||
secureCookie?: boolean;
|
||||
};
|
||||
|
||||
/**
|
||||
* # Login A user
|
||||
*/
|
||||
export default function postLoginResponseHandler({
|
||||
database,
|
||||
httpResponse,
|
||||
response,
|
||||
encryptionKey,
|
||||
encryptionSalt,
|
||||
debug,
|
||||
token,
|
||||
skipWriteAuthFile,
|
||||
cleanupTokens,
|
||||
secureCookie,
|
||||
}: Params): boolean {
|
||||
const COOKIE_EXPIRY_DATE = grabCookieExpiryDate();
|
||||
|
||||
if (httpResponse?.success) {
|
||||
let encryptedPayload = encrypt({
|
||||
data: JSON.stringify(httpResponse.payload),
|
||||
encryptionKey,
|
||||
encryptionSalt,
|
||||
});
|
||||
|
||||
try {
|
||||
if (token && encryptedPayload)
|
||||
httpResponse["token"] = encryptedPayload;
|
||||
} catch (error: any) {
|
||||
console.log("Login User HTTP Response Error:", error.message);
|
||||
}
|
||||
|
||||
const cookieNames = getAuthCookieNames({
|
||||
database,
|
||||
});
|
||||
|
||||
if (httpResponse.csrf && !skipWriteAuthFile) {
|
||||
writeAuthFile(
|
||||
httpResponse.csrf,
|
||||
JSON.stringify(httpResponse.payload),
|
||||
cleanupTokens && httpResponse.payload?.id
|
||||
? { userId: httpResponse.payload.id }
|
||||
: undefined
|
||||
);
|
||||
}
|
||||
|
||||
httpResponse["cookieNames"] = cookieNames;
|
||||
httpResponse["key"] = String(encryptedPayload);
|
||||
|
||||
const authKeyName = cookieNames.keyCookieName;
|
||||
const csrfName = cookieNames.csrfCookieName;
|
||||
|
||||
if (debug) {
|
||||
debugFn(authKeyName, "authKeyName");
|
||||
debugFn(csrfName, "csrfName");
|
||||
debugFn(encryptedPayload, "encryptedPayload");
|
||||
}
|
||||
|
||||
response?.setHeader("Set-Cookie", [
|
||||
`${authKeyName}=${encryptedPayload};samesite=strict;path=/;HttpOnly=true;Expires=${COOKIE_EXPIRY_DATE}${
|
||||
secureCookie ? ";Secure=true" : ""
|
||||
}`,
|
||||
`${csrfName}=${httpResponse.payload?.csrf_k};samesite=strict;path=/;HttpOnly=true;Expires=${COOKIE_EXPIRY_DATE}`,
|
||||
]);
|
||||
|
||||
if (debug) {
|
||||
debugFn("Response Sent!");
|
||||
}
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
@ -1,6 +1,6 @@
|
||||
import _ from "lodash";
|
||||
import path from "path";
|
||||
import writeBacupFiles from "./write-backup-files";
|
||||
import writeBackupFiles from "./write-backup-files";
|
||||
import { APIResponseObject } from "../../../../../types";
|
||||
import grabDirNames from "../../../../../utils/backend/names/grab-dir-names";
|
||||
import {
|
||||
@ -19,7 +19,8 @@ export default async function suAddBackup({
|
||||
targetUserId,
|
||||
}: Params): Promise<APIResponseObject> {
|
||||
try {
|
||||
const { mainBackupDir, userBackupDir } = grabDirNames({
|
||||
const { mainBackupDir, userBackupDir, STATIC_ROOT, privateDataDir } =
|
||||
grabDirNames({
|
||||
userId: targetUserId,
|
||||
});
|
||||
|
||||
@ -63,7 +64,7 @@ export default async function suAddBackup({
|
||||
};
|
||||
}
|
||||
|
||||
const writeBackup = await writeBacupFiles({
|
||||
const writeBackup = await writeBackupFiles({
|
||||
backup: newlyAddedBackup,
|
||||
});
|
||||
|
||||
|
@ -22,6 +22,8 @@ export default async function writeBackupFiles({
|
||||
schemasBackupDirName,
|
||||
targetUserPrivateDir,
|
||||
oldSchemasDir,
|
||||
STATIC_ROOT,
|
||||
privateDataDir,
|
||||
} = grabDirNames({
|
||||
userId: backup.user_id,
|
||||
});
|
||||
|
@ -0,0 +1,73 @@
|
||||
import fs from "fs";
|
||||
import path from "path";
|
||||
import _ from "lodash";
|
||||
import { DSQL_DATASQUIREL_BACKUPS } from "../../../../types/dsql";
|
||||
import { APIResponseObject } from "../../../../types";
|
||||
import grabDirNames from "../../../../utils/backend/names/grab-dir-names";
|
||||
import { NextApiResponse } from "next";
|
||||
import { execSync } from "child_process";
|
||||
|
||||
type Params = {
|
||||
backup: DSQL_DATASQUIREL_BACKUPS;
|
||||
res: NextApiResponse;
|
||||
};
|
||||
|
||||
export default async function downloadBackup({
|
||||
backup,
|
||||
res,
|
||||
}: Params): Promise<APIResponseObject> {
|
||||
try {
|
||||
const { mainBackupDir, userBackupDir, tempBackupExportName } =
|
||||
grabDirNames({
|
||||
userId: backup.user_id,
|
||||
});
|
||||
|
||||
if (backup.user_id && !userBackupDir) {
|
||||
return {
|
||||
success: false,
|
||||
msg: `Error grabbing user backup directory`,
|
||||
};
|
||||
}
|
||||
|
||||
if (!backup.uuid) {
|
||||
return {
|
||||
success: false,
|
||||
msg: `No UUID found for backup`,
|
||||
};
|
||||
}
|
||||
|
||||
const allBackupsDir =
|
||||
backup.user_id && userBackupDir ? userBackupDir : mainBackupDir;
|
||||
|
||||
const targetBackupDir = path.join(allBackupsDir, backup.uuid);
|
||||
|
||||
const zipFilesCmd = execSync(
|
||||
`tar -cJf ${tempBackupExportName} ${backup.uuid}`,
|
||||
{
|
||||
cwd: allBackupsDir,
|
||||
}
|
||||
);
|
||||
|
||||
const exportFilePath = path.join(allBackupsDir, tempBackupExportName);
|
||||
|
||||
const readStream = fs.createReadStream(exportFilePath);
|
||||
readStream.pipe(res);
|
||||
|
||||
readStream.on("end", () => {
|
||||
console.log("Pipe Complete!");
|
||||
setTimeout(() => {
|
||||
execSync(`rm -f ${tempBackupExportName}`, {
|
||||
cwd: allBackupsDir,
|
||||
});
|
||||
}, 1000);
|
||||
});
|
||||
|
||||
return { success: true };
|
||||
} catch (error: any) {
|
||||
return {
|
||||
success: false,
|
||||
msg: `Failed to write backup files`,
|
||||
error: error.message,
|
||||
};
|
||||
}
|
||||
}
|
@ -1,8 +1,5 @@
|
||||
import sanitizeHtml from "sanitize-html";
|
||||
import sanitizeHtmlOptions from "../html/sanitizeHtmlOptions";
|
||||
import updateDbEntry from "./updateDbEntry";
|
||||
import _ from "lodash";
|
||||
import encrypt from "../../dsql/encrypt";
|
||||
import connDbHandler from "../../../utils/db/conn-db-handler";
|
||||
import checkIfIsMaster from "../../../utils/check-if-is-master";
|
||||
import { DbContextsArray } from "./runQuery";
|
||||
@ -13,6 +10,7 @@ import {
|
||||
PostInsertReturn,
|
||||
} from "../../../types";
|
||||
import purgeDefaultFields from "../../../utils/purge-default-fields";
|
||||
import grabParsedValue from "./grab-parsed-value";
|
||||
|
||||
export type AddDbEntryParam<
|
||||
T extends { [k: string]: any } = any,
|
||||
@ -127,64 +125,22 @@ export default async function addDbEntry<
|
||||
const dataKey = dataKeys[i];
|
||||
let value = data[dataKey];
|
||||
|
||||
const targetFieldSchemaArray = tableSchema
|
||||
? tableSchema?.fields?.filter(
|
||||
(field) => field.fieldName == dataKey
|
||||
)
|
||||
: null;
|
||||
const targetFieldSchema =
|
||||
targetFieldSchemaArray && targetFieldSchemaArray[0]
|
||||
? targetFieldSchemaArray[0]
|
||||
: null;
|
||||
|
||||
if (value == null || value == undefined) continue;
|
||||
|
||||
if (
|
||||
targetFieldSchema?.dataType?.match(/int$/i) &&
|
||||
typeof value == "string" &&
|
||||
!value?.match(/./)
|
||||
)
|
||||
continue;
|
||||
|
||||
if (targetFieldSchema?.encrypted) {
|
||||
value = encrypt({
|
||||
data: value,
|
||||
const parsedValue = grabParsedValue({
|
||||
dataKey,
|
||||
encryptionKey,
|
||||
encryptionSalt,
|
||||
tableSchema,
|
||||
value,
|
||||
});
|
||||
console.log("DSQL: Encrypted value =>", value);
|
||||
}
|
||||
|
||||
const htmlRegex = /<[^>]+>/g;
|
||||
|
||||
if (
|
||||
targetFieldSchema?.richText ||
|
||||
String(value).match(htmlRegex)
|
||||
) {
|
||||
value = sanitizeHtml(value, sanitizeHtmlOptions);
|
||||
}
|
||||
|
||||
if (targetFieldSchema?.pattern) {
|
||||
const pattern = new RegExp(
|
||||
targetFieldSchema.pattern,
|
||||
targetFieldSchema.patternFlags || ""
|
||||
);
|
||||
if (!pattern.test(value)) {
|
||||
console.log("DSQL: Pattern not matched =>", value);
|
||||
value = "";
|
||||
}
|
||||
}
|
||||
if (typeof parsedValue == "undefined") continue;
|
||||
|
||||
insertKeysArray.push("`" + dataKey + "`");
|
||||
|
||||
if (typeof value === "object") {
|
||||
value = JSON.stringify(value);
|
||||
}
|
||||
|
||||
if (typeof value == "number") {
|
||||
insertValuesArray.push(String(value));
|
||||
if (typeof parsedValue == "number") {
|
||||
insertValuesArray.push(String(parsedValue));
|
||||
} else {
|
||||
insertValuesArray.push(value);
|
||||
insertValuesArray.push(parsedValue);
|
||||
}
|
||||
} catch (error: any) {
|
||||
console.log(
|
||||
|
93
package-shared/functions/backend/db/grab-parsed-value.ts
Normal file
93
package-shared/functions/backend/db/grab-parsed-value.ts
Normal file
@ -0,0 +1,93 @@
|
||||
import sanitizeHtml from "sanitize-html";
|
||||
import sanitizeHtmlOptions from "../html/sanitizeHtmlOptions";
|
||||
import encrypt from "../../dsql/encrypt";
|
||||
import { DSQL_TableSchemaType } from "../../../types";
|
||||
import _ from "lodash";
|
||||
|
||||
type Param = {
|
||||
value?: any;
|
||||
tableSchema?: DSQL_TableSchemaType;
|
||||
encryptionKey?: string;
|
||||
encryptionSalt?: string;
|
||||
dataKey: string;
|
||||
};
|
||||
|
||||
/**
|
||||
* # Update DB Function
|
||||
* @description
|
||||
*/
|
||||
export default function grabParsedValue({
|
||||
value,
|
||||
tableSchema,
|
||||
encryptionKey,
|
||||
encryptionSalt,
|
||||
dataKey,
|
||||
}: Param): any {
|
||||
let newValue = value;
|
||||
|
||||
const targetFieldSchemaArray = tableSchema
|
||||
? tableSchema?.fields?.filter((field) => field.fieldName === dataKey)
|
||||
: null;
|
||||
const targetFieldSchema =
|
||||
targetFieldSchemaArray && targetFieldSchemaArray[0]
|
||||
? targetFieldSchemaArray[0]
|
||||
: null;
|
||||
|
||||
if (typeof newValue == "undefined") return;
|
||||
if (typeof newValue == "object" && !newValue) newValue = "";
|
||||
|
||||
const htmlRegex = /<[^>]+>/g;
|
||||
|
||||
if (targetFieldSchema?.richText || String(newValue).match(htmlRegex)) {
|
||||
newValue = sanitizeHtml(newValue, sanitizeHtmlOptions);
|
||||
}
|
||||
|
||||
if (
|
||||
targetFieldSchema?.dataType?.match(/int$/i) &&
|
||||
typeof value == "string" &&
|
||||
!value?.match(/./)
|
||||
) {
|
||||
value = "";
|
||||
}
|
||||
|
||||
if (targetFieldSchema?.encrypted) {
|
||||
newValue = encrypt({
|
||||
data: newValue,
|
||||
encryptionKey,
|
||||
encryptionSalt,
|
||||
});
|
||||
}
|
||||
|
||||
if (typeof newValue === "object") {
|
||||
newValue = JSON.stringify(newValue);
|
||||
}
|
||||
|
||||
if (targetFieldSchema?.pattern) {
|
||||
const pattern = new RegExp(
|
||||
targetFieldSchema.pattern,
|
||||
targetFieldSchema.patternFlags || ""
|
||||
);
|
||||
if (!pattern.test(newValue)) {
|
||||
console.log("DSQL: Pattern not matched =>", newValue);
|
||||
newValue = "";
|
||||
}
|
||||
}
|
||||
|
||||
if (typeof newValue === "string" && newValue.match(/^null$/i)) {
|
||||
newValue = {
|
||||
toSqlString: function () {
|
||||
return "NULL";
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (typeof newValue === "string" && !newValue.match(/./i)) {
|
||||
newValue = {
|
||||
toSqlString: function () {
|
||||
return "NULL";
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
return newValue;
|
||||
}
|
@ -1,6 +1,3 @@
|
||||
import sanitizeHtml from "sanitize-html";
|
||||
import sanitizeHtmlOptions from "../html/sanitizeHtmlOptions";
|
||||
import encrypt from "../../dsql/encrypt";
|
||||
import checkIfIsMaster from "../../../utils/check-if-is-master";
|
||||
import connDbHandler from "../../../utils/db/conn-db-handler";
|
||||
import { DbContextsArray } from "./runQuery";
|
||||
@ -11,6 +8,7 @@ import {
|
||||
} from "../../../types";
|
||||
import _ from "lodash";
|
||||
import purgeDefaultFields from "../../../utils/purge-default-fields";
|
||||
import grabParsedValue from "./grab-parsed-value";
|
||||
|
||||
type Param<T extends { [k: string]: any } = any> = {
|
||||
dbContext?: (typeof DbContextsArray)[number];
|
||||
@ -82,69 +80,22 @@ export default async function updateDbEntry<
|
||||
const dataKey = dataKeys[i];
|
||||
let value = newData[dataKey];
|
||||
|
||||
const targetFieldSchemaArray = tableSchema
|
||||
? tableSchema?.fields?.filter(
|
||||
(field) => field.fieldName === dataKey
|
||||
)
|
||||
: null;
|
||||
const targetFieldSchema =
|
||||
targetFieldSchemaArray && targetFieldSchemaArray[0]
|
||||
? targetFieldSchemaArray[0]
|
||||
: null;
|
||||
|
||||
if (value == null || value == undefined) continue;
|
||||
|
||||
const htmlRegex = /<[^>]+>/g;
|
||||
|
||||
if (targetFieldSchema?.richText || String(value).match(htmlRegex)) {
|
||||
value = sanitizeHtml(value, sanitizeHtmlOptions);
|
||||
}
|
||||
|
||||
if (targetFieldSchema?.encrypted) {
|
||||
value = encrypt({
|
||||
data: value,
|
||||
const parsedValue = grabParsedValue({
|
||||
dataKey,
|
||||
encryptionKey,
|
||||
encryptionSalt,
|
||||
tableSchema,
|
||||
value,
|
||||
});
|
||||
}
|
||||
|
||||
if (typeof value === "object") {
|
||||
value = JSON.stringify(value);
|
||||
}
|
||||
|
||||
if (targetFieldSchema?.pattern) {
|
||||
const pattern = new RegExp(
|
||||
targetFieldSchema.pattern,
|
||||
targetFieldSchema.patternFlags || ""
|
||||
);
|
||||
if (!pattern.test(value)) {
|
||||
console.log("DSQL: Pattern not matched =>", value);
|
||||
value = "";
|
||||
}
|
||||
}
|
||||
|
||||
if (typeof value === "string" && value.match(/^null$/i)) {
|
||||
value = {
|
||||
toSqlString: function () {
|
||||
return "NULL";
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (typeof value === "string" && !value.match(/./i)) {
|
||||
value = {
|
||||
toSqlString: function () {
|
||||
return "NULL";
|
||||
},
|
||||
};
|
||||
}
|
||||
if (typeof parsedValue == "undefined") continue;
|
||||
|
||||
updateKeyValueArray.push(`\`${dataKey}\`=?`);
|
||||
|
||||
if (typeof value == "number") {
|
||||
updateValues.push(String(value));
|
||||
if (typeof parsedValue == "number") {
|
||||
updateValues.push(String(parsedValue));
|
||||
} else {
|
||||
updateValues.push(value);
|
||||
updateValues.push(parsedValue);
|
||||
}
|
||||
|
||||
////////////////////////////////////////
|
||||
|
@ -15,7 +15,7 @@ export default async function handleBackup({
|
||||
}: HandleBackupParams) {
|
||||
const { appConfig } = grabConfig();
|
||||
|
||||
const maxBackups = appConfig.main.max_backups?.value || 20;
|
||||
const maxBackups = appConfig.main.max_backups?.value || 4;
|
||||
|
||||
const { count: existingAppBackupsCount } =
|
||||
await dbGrabUserResource<DSQL_DATASQUIREL_BACKUPS>({
|
||||
@ -33,7 +33,9 @@ export default async function handleBackup({
|
||||
});
|
||||
|
||||
if (existingAppBackupsCount && existingAppBackupsCount >= maxBackups) {
|
||||
const { single: oldestAppBackup } =
|
||||
console.log(`Backups exceed Limit ...`);
|
||||
|
||||
const { batch: oldestAppBackups } =
|
||||
await dbGrabUserResource<DSQL_DATASQUIREL_BACKUPS>({
|
||||
tableName: "backups",
|
||||
isSuperUser: true,
|
||||
@ -46,14 +48,19 @@ export default async function handleBackup({
|
||||
},
|
||||
order: {
|
||||
field: "id",
|
||||
strategy: "ASC",
|
||||
strategy: "DESC",
|
||||
},
|
||||
limit: 1,
|
||||
},
|
||||
});
|
||||
|
||||
if (oldestAppBackup?.id) {
|
||||
await deleteBackup({ backup: oldestAppBackup });
|
||||
if (oldestAppBackups) {
|
||||
for (let i = 0; i < oldestAppBackups.length; i++) {
|
||||
const backup = oldestAppBackups[i];
|
||||
console.log(`Handling Backup ${backup.uuid} ...`);
|
||||
if (i < maxBackups - 1) continue;
|
||||
console.log(`Deleting Backup ${backup.uuid} ...`);
|
||||
await deleteBackup({ backup: backup });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -7,6 +7,7 @@ type Param = {
|
||||
url?: string;
|
||||
method?: string;
|
||||
hostname?: string;
|
||||
host?: string;
|
||||
path?: string;
|
||||
port?: number | string;
|
||||
headers?: object;
|
||||
@ -16,28 +17,24 @@ type Param = {
|
||||
/**
|
||||
* # Make Https Request
|
||||
*/
|
||||
export default function httpsRequest({
|
||||
export default function httpsRequest<Res extends any = any>({
|
||||
url,
|
||||
method,
|
||||
hostname,
|
||||
host,
|
||||
path,
|
||||
headers,
|
||||
body,
|
||||
port,
|
||||
scheme,
|
||||
}: Param) {
|
||||
}: Param): Promise<Res> {
|
||||
const reqPayloadString = body ? JSON.stringify(body) : null;
|
||||
|
||||
const PARSED_URL = url ? new URL(url) : null;
|
||||
|
||||
////////////////////////////////////////////////
|
||||
////////////////////////////////////////////////
|
||||
////////////////////////////////////////////////
|
||||
|
||||
/** @type {any} */
|
||||
let requestOptions: any = {
|
||||
method: method || "GET",
|
||||
hostname: PARSED_URL ? PARSED_URL.hostname : hostname,
|
||||
hostname: PARSED_URL ? PARSED_URL.hostname : host || hostname,
|
||||
port: scheme?.match(/https/i)
|
||||
? 443
|
||||
: PARSED_URL
|
||||
@ -51,7 +48,6 @@ export default function httpsRequest({
|
||||
};
|
||||
|
||||
if (path) requestOptions.path = path;
|
||||
// if (href) requestOptions.href = href;
|
||||
|
||||
if (headers) requestOptions.headers = headers;
|
||||
if (body) {
|
||||
@ -61,10 +57,6 @@ export default function httpsRequest({
|
||||
: undefined;
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////
|
||||
////////////////////////////////////////////////
|
||||
////////////////////////////////////////////////
|
||||
|
||||
return new Promise((res, rej) => {
|
||||
const httpsRequest = (
|
||||
scheme?.match(/https/i)
|
||||
@ -73,25 +65,21 @@ export default function httpsRequest({
|
||||
? https
|
||||
: http
|
||||
).request(
|
||||
/* ====== Request Options object ====== */
|
||||
requestOptions,
|
||||
|
||||
////////////////////////////////////////////////
|
||||
////////////////////////////////////////////////
|
||||
////////////////////////////////////////////////
|
||||
|
||||
/* ====== Callback function ====== */
|
||||
(response) => {
|
||||
var str = "";
|
||||
|
||||
// ## another chunk of data has been received, so append it to `str`
|
||||
response.on("data", function (chunk) {
|
||||
str += chunk;
|
||||
});
|
||||
|
||||
// ## the whole response has been received, so we just print it out here
|
||||
response.on("end", function () {
|
||||
res(str);
|
||||
try {
|
||||
res(JSON.parse(str));
|
||||
} catch (error) {
|
||||
res(str as any);
|
||||
}
|
||||
});
|
||||
|
||||
response.on("error", (error) => {
|
||||
|
@ -19,6 +19,7 @@ export const DsqlTables = [
|
||||
"mariadb_users",
|
||||
"mariadb_user_databases",
|
||||
"mariadb_user_tables",
|
||||
"user_private_media_keys",
|
||||
] as const
|
||||
|
||||
export type DSQL_DATASQUIREL_USERS = {
|
||||
@ -393,3 +394,21 @@ export type DSQL_DATASQUIREL_MARIADB_USER_TABLES = {
|
||||
date_updated_code?: number;
|
||||
date_updated_timestamp?: string;
|
||||
}
|
||||
|
||||
export type DSQL_DATASQUIREL_USER_PRIVATE_MEDIA_KEYS = {
|
||||
id?: number;
|
||||
uuid?: string;
|
||||
user_id?: number;
|
||||
media_id?: number;
|
||||
key?: string;
|
||||
description?: string;
|
||||
expiration?: number;
|
||||
expiration_paradigm?: "seconds" | "minutes" | "hours" | "days" | "weeks" | "months" | "years";
|
||||
expiration_milliseconds?: number;
|
||||
date_created?: string;
|
||||
date_created_code?: number;
|
||||
date_created_timestamp?: string;
|
||||
date_updated?: string;
|
||||
date_updated_code?: number;
|
||||
date_updated_timestamp?: string;
|
||||
}
|
@ -429,6 +429,8 @@ export interface PostInsertReturn {
|
||||
export type UserType = DATASQUIREL_LoggedInUser & {
|
||||
isSuperUser?: boolean;
|
||||
staticHost?: string;
|
||||
appHost?: string;
|
||||
appName?: string;
|
||||
};
|
||||
|
||||
export interface ApiKeyDef {
|
||||
@ -1596,6 +1598,9 @@ export type PagePropsType = {
|
||||
appVersion?: (typeof AppVersions)[number];
|
||||
isMailAvailable?: boolean;
|
||||
websocketURL?: string;
|
||||
docsObject?: DocsServerProps | null;
|
||||
docsPages?: DocsLinkType[] | null;
|
||||
docsPageEditURL?: string | null;
|
||||
};
|
||||
|
||||
export type APIResponseObject<T extends any = any> = {
|
||||
@ -1649,6 +1654,8 @@ export const WebSocketEvents = [
|
||||
"client:dev:queue",
|
||||
"client:delete-queue",
|
||||
"client:pty-shell",
|
||||
"client:su-logs",
|
||||
"client:su-kill-logs",
|
||||
|
||||
/**
|
||||
* # Server Events
|
||||
@ -1663,12 +1670,16 @@ export const WebSocketEvents = [
|
||||
"server:dev:queue",
|
||||
"server:queue-deleted",
|
||||
"server:pty-shell",
|
||||
"server:su-logs",
|
||||
"server:su-kill-logs",
|
||||
] as const;
|
||||
|
||||
export type WebSocketDataType = {
|
||||
event: (typeof WebSocketEvents)[number];
|
||||
data?: {
|
||||
queue?: DSQL_DATASQUIREL_PROCESS_QUEUE;
|
||||
containerName?: string;
|
||||
killLogs?: boolean;
|
||||
};
|
||||
error?: string;
|
||||
message?: string;
|
||||
@ -1680,15 +1691,6 @@ export const DatasquirelWindowEvents = [
|
||||
"queue-running",
|
||||
] as const;
|
||||
|
||||
export type DatasquirelWindowEventPayloadType = {
|
||||
event: (typeof DatasquirelWindowEvents)[number];
|
||||
data?: {
|
||||
queue?: DSQL_DATASQUIREL_PROCESS_QUEUE;
|
||||
};
|
||||
error?: string;
|
||||
message?: string;
|
||||
};
|
||||
|
||||
/**
|
||||
* # Docker Compose Types
|
||||
*/
|
||||
@ -1798,6 +1800,7 @@ export type DsqlAppData = {
|
||||
DSQL_FACEBOOK_APP_ID?: string | null;
|
||||
DSQL_GITHUB_ID?: string | null;
|
||||
DSQL_HOST_MACHINE_IP?: string | null;
|
||||
DSQL_DEPLOYMENT_NAME?: string | null;
|
||||
};
|
||||
|
||||
export const MediaTypes = ["image", "file", "video"] as const;
|
||||
@ -2008,6 +2011,7 @@ export type DefaultLocalResourcesHookParams<
|
||||
> = {
|
||||
refresh?: number;
|
||||
setLoading?: React.Dispatch<React.SetStateAction<boolean>>;
|
||||
setReady?: React.Dispatch<React.SetStateAction<boolean>>;
|
||||
loadingEndTimeout?: number;
|
||||
user?: UserType | null;
|
||||
ready?: boolean;
|
||||
@ -2422,6 +2426,10 @@ export type SendEmailCodeParams = {
|
||||
useLocal?: boolean;
|
||||
apiVersion?: string;
|
||||
dbUserId?: string | number;
|
||||
/**
|
||||
* HTML string with {{code}} placeholder for the code
|
||||
*/
|
||||
html?: string;
|
||||
};
|
||||
|
||||
export type UpdateUserParams<
|
||||
@ -2479,4 +2487,158 @@ export type GoogleAuthParams = {
|
||||
*/
|
||||
useLocal?: boolean;
|
||||
apiVersion?: string;
|
||||
skipWriteAuthFile?: boolean;
|
||||
cleanupTokens?: boolean;
|
||||
};
|
||||
|
||||
export type ContactFormType = {
|
||||
first_name?: string;
|
||||
last_name?: string;
|
||||
email?: string;
|
||||
phone?: string;
|
||||
message?: string;
|
||||
};
|
||||
|
||||
export const TimeParadigms = [
|
||||
{
|
||||
value: "seconds",
|
||||
label: "Seconds",
|
||||
},
|
||||
{
|
||||
value: "minutes",
|
||||
label: "Minutes",
|
||||
},
|
||||
{
|
||||
value: "hours",
|
||||
label: "Hours",
|
||||
},
|
||||
{
|
||||
value: "days",
|
||||
label: "Days",
|
||||
},
|
||||
{
|
||||
value: "weeks",
|
||||
label: "Weeks",
|
||||
},
|
||||
{
|
||||
value: "months",
|
||||
label: "Months",
|
||||
},
|
||||
{
|
||||
value: "years",
|
||||
label: "Years",
|
||||
},
|
||||
] as const;
|
||||
|
||||
export type GithubPublicAPIResJSON = {
|
||||
name: string;
|
||||
path: string;
|
||||
sha: string;
|
||||
size: number;
|
||||
url: string;
|
||||
html_url: string;
|
||||
git_url: string;
|
||||
download_url: string;
|
||||
type: string;
|
||||
content: string;
|
||||
encoding: string;
|
||||
_links: {
|
||||
self: string;
|
||||
git: string;
|
||||
html: string;
|
||||
};
|
||||
};
|
||||
|
||||
export type DocsServerProps = {
|
||||
md?: string | null;
|
||||
mdx_source?: any | null;
|
||||
meta_title?: string | null;
|
||||
meta_description?: string | null;
|
||||
page_title?: string | null;
|
||||
page_description?: string | null;
|
||||
page_path?: string | null;
|
||||
};
|
||||
|
||||
export type DocsLinkType = {
|
||||
title: string;
|
||||
href: string;
|
||||
strict?: boolean;
|
||||
children?: DocsLinkType[];
|
||||
editPage?: string;
|
||||
};
|
||||
|
||||
export interface GiteaBranchRes {
|
||||
name: string;
|
||||
commit: GiteaBranchResCommit;
|
||||
protected: boolean;
|
||||
required_approvals: number;
|
||||
enable_status_check: boolean;
|
||||
status_check_contexts: any[];
|
||||
user_can_push: boolean;
|
||||
user_can_merge: boolean;
|
||||
effective_branch_protection_name: string;
|
||||
}
|
||||
|
||||
export interface GiteaBranchResCommit {
|
||||
id: string;
|
||||
message: string;
|
||||
url: string;
|
||||
author: GiteaBranchResCommitAuthor;
|
||||
committer: GiteaBranchResCommitCommitter;
|
||||
verification: GiteaBranchResCommitVerification;
|
||||
timestamp: string;
|
||||
added: any;
|
||||
removed: any;
|
||||
modified: any;
|
||||
}
|
||||
|
||||
export interface GiteaBranchResCommitAuthor {
|
||||
name: string;
|
||||
email: string;
|
||||
username: string;
|
||||
}
|
||||
|
||||
export interface GiteaBranchResCommitCommitter {
|
||||
name: string;
|
||||
email: string;
|
||||
username: string;
|
||||
}
|
||||
|
||||
export interface GiteaBranchResCommitVerification {
|
||||
verified: boolean;
|
||||
reason: string;
|
||||
signature: string;
|
||||
signer: any;
|
||||
payload: string;
|
||||
}
|
||||
|
||||
export interface GiteaTreeRes {
|
||||
sha: string;
|
||||
url: string;
|
||||
tree: GiteaTreeResTree[];
|
||||
truncated: boolean;
|
||||
page: number;
|
||||
total_count: number;
|
||||
}
|
||||
|
||||
export interface GiteaTreeResTree {
|
||||
path: string;
|
||||
mode: string;
|
||||
type: "tree" | "blob";
|
||||
size: number;
|
||||
sha: string;
|
||||
url: string;
|
||||
}
|
||||
|
||||
export const OpsActions = [
|
||||
"exit",
|
||||
"test",
|
||||
"restart-web-app",
|
||||
"restart-db",
|
||||
"restart-all",
|
||||
"clear",
|
||||
] as const;
|
||||
|
||||
export type OpsObject = {
|
||||
action: (typeof OpsActions)[number];
|
||||
};
|
||||
|
@ -269,6 +269,10 @@ export default function grabDirNames(param?: Param) {
|
||||
distroDirName
|
||||
);
|
||||
|
||||
const tempBackupExportName = "tmp-export-backup.tar.xz";
|
||||
|
||||
const opsJSONFileName = "ops.json";
|
||||
|
||||
return {
|
||||
appDir,
|
||||
privateDataDir,
|
||||
@ -362,5 +366,7 @@ export default function grabDirNames(param?: Param) {
|
||||
dsqlDbDockerComposeFileAlt,
|
||||
dsqlDbDockerComposeFileName,
|
||||
dsqlDbDockerComposeFileNameAlt,
|
||||
tempBackupExportName,
|
||||
opsJSONFileName,
|
||||
};
|
||||
}
|
||||
|
@ -1,5 +1,10 @@
|
||||
export default function grabDockerStackServicesNames() {
|
||||
const deploymentName = process.env.DSQL_DEPLOYMENT_NAME || "dsql";
|
||||
type Params = {
|
||||
deploymentName?: string | null;
|
||||
};
|
||||
|
||||
export default function grabDockerStackServicesNames(params?: Params) {
|
||||
const deploymentName =
|
||||
params?.deploymentName || process.env.DSQL_DEPLOYMENT_NAME || "dsql";
|
||||
|
||||
const maxScaleServiceName = `${deploymentName}-dsql-maxscale`;
|
||||
const dbServiceName = `${deploymentName}-dsql-db`;
|
||||
|
@ -21,6 +21,7 @@ export default function grabIPAddresses() {
|
||||
const mainDBIP = `${globalIPPrefix}.${db}`;
|
||||
const webSocketIP = `${globalIPPrefix}.${websocket}`;
|
||||
const dbCronIP = `${globalIPPrefix}.${db_cron}`;
|
||||
const reverseProxyIP = `${globalIPPrefix}.${reverse_proxy}`;
|
||||
const localHostIP = `${globalIPPrefix}.1`;
|
||||
|
||||
return {
|
||||
@ -32,5 +33,6 @@ export default function grabIPAddresses() {
|
||||
globalIPPrefix,
|
||||
webSocketIP,
|
||||
dbCronIP,
|
||||
reverseProxyIP,
|
||||
};
|
||||
}
|
||||
|
16
package-shared/utils/generate-random-string.ts
Normal file
16
package-shared/utils/generate-random-string.ts
Normal file
@ -0,0 +1,16 @@
|
||||
export default function genRndStr(length?: number, symbols?: boolean) {
|
||||
let characters =
|
||||
"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
|
||||
if (symbols) characters += "-_[]()@";
|
||||
|
||||
let result = "";
|
||||
|
||||
const finalLength = length || 12;
|
||||
|
||||
for (let i = 0; i < finalLength; i++) {
|
||||
const randomIndex = Math.floor(Math.random() * characters.length);
|
||||
result += characters.charAt(randomIndex);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
21
package-shared/utils/grab-machine-ip.ts
Normal file
21
package-shared/utils/grab-machine-ip.ts
Normal file
@ -0,0 +1,21 @@
|
||||
import { networkInterfaces } from "os";
|
||||
|
||||
export default function getMachineIPAddress() {
|
||||
try {
|
||||
const interfaces = networkInterfaces();
|
||||
for (const ifaceName in interfaces) {
|
||||
const iface = interfaces[ifaceName];
|
||||
if (Array.isArray(iface)) {
|
||||
for (const address of iface) {
|
||||
if (address.family === "IPv4" && !address.internal) {
|
||||
return address.address;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return null;
|
||||
} catch (error: any) {
|
||||
console.error(`Error accessing network interfaces: ${error.message}`);
|
||||
return null;
|
||||
}
|
||||
}
|
@ -18,7 +18,7 @@ export default function grabUserMainSqlUserName({
|
||||
|
||||
const finalUsername = username || sqlUsername;
|
||||
const finalHost = HOST || maxScaleIP || "127.0.0.1";
|
||||
const fullName = `${finalUsername}@${webAppIP}`;
|
||||
const fullName = `${finalUsername}@${finalHost}`;
|
||||
|
||||
return {
|
||||
username: finalUsername,
|
||||
|
@ -1,62 +0,0 @@
|
||||
import grabDbSSL from "./backend/grabDbSSL";
|
||||
import mariadb, { ConnectionConfig } from "mariadb";
|
||||
|
||||
type Params = {
|
||||
useLocal?: boolean;
|
||||
dbConfig?: ConnectionConfig;
|
||||
ssl?: boolean;
|
||||
connectionLimit?: number;
|
||||
};
|
||||
|
||||
export default async function setupDSQLDb({ dbConfig, ssl }: Params) {
|
||||
const conn = await mariadb.createConnection({
|
||||
host: process.env.DSQL_DB_HOST,
|
||||
user: process.env.DSQL_DB_USERNAME,
|
||||
password: process.env.DSQL_DB_PASSWORD,
|
||||
database: process.env.DSQL_DB_NAME,
|
||||
charset: "utf8mb4",
|
||||
...dbConfig,
|
||||
ssl: ssl ? grabDbSSL() : undefined,
|
||||
supportBigNumbers: true,
|
||||
bigNumberStrings: false,
|
||||
dateStrings: true,
|
||||
bigIntAsNumber: true,
|
||||
metaAsArray: true,
|
||||
});
|
||||
|
||||
// const conn = mariadb.createPool({
|
||||
// host: process.env.DSQL_DB_HOST,
|
||||
// user: process.env.DSQL_DB_USERNAME,
|
||||
// password: process.env.DSQL_DB_PASSWORD,
|
||||
// database: process.env.DSQL_DB_NAME,
|
||||
// charset: "utf8mb4",
|
||||
// ...dbConfig,
|
||||
// ssl: ssl ? grabDbSSL() : undefined,
|
||||
// connectionLimit,
|
||||
// supportBigNumbers: true,
|
||||
// bigNumberStrings: false,
|
||||
// dateStrings: true,
|
||||
// });
|
||||
|
||||
// let readOnlyConnection;
|
||||
|
||||
// if (addReadOnlyConn) {
|
||||
// readOnlyConnection = mariadb.createPool({
|
||||
// host: process.env.DSQL_DB_HOST,
|
||||
// user: process.env.DSQL_DB_READ_ONLY_USERNAME,
|
||||
// password: process.env.DSQL_DB_READ_ONLY_PASSWORD,
|
||||
// database: process.env.DSQL_DB_NAME,
|
||||
// charset: "utf8mb4",
|
||||
// ...readOnlyDbConfig,
|
||||
// ssl: ssl ? grabDbSSL() : undefined,
|
||||
// connectionLimit,
|
||||
// });
|
||||
|
||||
// global.DSQL_READ_ONLY_DB_CONN = readOnlyConnection;
|
||||
// }
|
||||
|
||||
return {
|
||||
conn,
|
||||
// readOnlyConnection,
|
||||
};
|
||||
}
|
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@moduletrace/datasquirel",
|
||||
"version": "5.0.4",
|
||||
"version": "5.0.5",
|
||||
"description": "Cloud-based SQL data management tool",
|
||||
"main": "dist/index.js",
|
||||
"bin": {
|
||||
|
Loading…
Reference in New Issue
Block a user