Refactor types
This commit is contained in:
parent
11a4c1f4d9
commit
d84b15c35c
3
.gitignore
vendored
3
.gitignore
vendored
@ -130,9 +130,6 @@ dist
|
||||
.yarn/install-state.gz
|
||||
.pnp.*
|
||||
|
||||
# typescript
|
||||
tsconfig.json
|
||||
|
||||
# others
|
||||
deprecated
|
||||
.tmp
|
||||
|
@ -18,7 +18,17 @@ if (!fs.existsSync(path.resolve(process.cwd(), ".env"))) {
|
||||
process.exit();
|
||||
}
|
||||
|
||||
const { DSQL_HOST, DSQL_USER, DSQL_PASS, DSQL_DB_NAME, DSQL_KEY, DSQL_REF_DB_NAME, DSQL_FULL_SYNC, DSQL_ENCRYPTION_KEY, DSQL_ENCRYPTION_SALT } = process.env;
|
||||
const {
|
||||
DSQL_HOST,
|
||||
DSQL_USER,
|
||||
DSQL_PASS,
|
||||
DSQL_DB_NAME,
|
||||
DSQL_KEY,
|
||||
DSQL_REF_DB_NAME,
|
||||
DSQL_FULL_SYNC,
|
||||
DSQL_ENCRYPTION_KEY,
|
||||
DSQL_ENCRYPTION_SALT,
|
||||
} = process.env;
|
||||
|
||||
if (!DSQL_HOST?.match(/./)) {
|
||||
console.log("DSQL_HOST is required in your `.env` file");
|
||||
@ -38,6 +48,7 @@ if (!DSQL_PASS?.match(/./)) {
|
||||
const dbSchemaLocalFilePath = path.resolve(process.cwd(), "dsql.schema.json");
|
||||
|
||||
async function run() {
|
||||
/** @type {any} */
|
||||
let schemaData;
|
||||
|
||||
if (DSQL_KEY && DSQL_REF_DB_NAME?.match(/./)) {
|
||||
@ -46,8 +57,13 @@ async function run() {
|
||||
database: DSQL_REF_DB_NAME || undefined,
|
||||
});
|
||||
|
||||
if (!dbSchemaDataResponse.payload || Array.isArray(dbSchemaDataResponse.payload)) {
|
||||
console.log("DSQL_KEY+DSQL_REF_DB_NAME => Error in fetching DB schema");
|
||||
if (
|
||||
!dbSchemaDataResponse.payload ||
|
||||
Array.isArray(dbSchemaDataResponse.payload)
|
||||
) {
|
||||
console.log(
|
||||
"DSQL_KEY+DSQL_REF_DB_NAME => Error in fetching DB schema"
|
||||
);
|
||||
console.log(dbSchemaDataResponse);
|
||||
process.exit();
|
||||
}
|
||||
@ -62,7 +78,10 @@ async function run() {
|
||||
database: DSQL_REF_DB_NAME || undefined,
|
||||
});
|
||||
|
||||
if (!dbSchemaDataResponse.payload || !Array.isArray(dbSchemaDataResponse.payload)) {
|
||||
if (
|
||||
!dbSchemaDataResponse.payload ||
|
||||
!Array.isArray(dbSchemaDataResponse.payload)
|
||||
) {
|
||||
console.log("DSQL_KEY => Error in fetching DB schema");
|
||||
console.log(dbSchemaDataResponse);
|
||||
process.exit();
|
||||
@ -75,9 +94,13 @@ async function run() {
|
||||
|
||||
schemaData = fetchedDbSchemaObject;
|
||||
} else if (fs.existsSync(dbSchemaLocalFilePath)) {
|
||||
schemaData = [JSON.parse(fs.readFileSync(dbSchemaLocalFilePath, "utf8"))];
|
||||
schemaData = [
|
||||
JSON.parse(fs.readFileSync(dbSchemaLocalFilePath, "utf8")),
|
||||
];
|
||||
} else {
|
||||
console.log("No source for DB Schema. Please provide a local `dsql.schema.json` file, or provide `DSQL_KEY` and `DSQL_REF_DB_NAME` environment variables.");
|
||||
console.log(
|
||||
"No source for DB Schema. Please provide a local `dsql.schema.json` file, or provide `DSQL_KEY` and `DSQL_REF_DB_NAME` environment variables."
|
||||
);
|
||||
process.exit();
|
||||
}
|
||||
|
||||
@ -87,14 +110,22 @@ async function run() {
|
||||
}
|
||||
|
||||
if (DSQL_FULL_SYNC?.match(/true/i)) {
|
||||
fs.writeFileSync(dbSchemaLocalFilePath, JSON.stringify(schemaData[0], null, 4), "utf8");
|
||||
fs.writeFileSync(
|
||||
dbSchemaLocalFilePath,
|
||||
JSON.stringify(schemaData[0], null, 4),
|
||||
"utf8"
|
||||
);
|
||||
}
|
||||
|
||||
console.log(` - ${colors.FgBlue}Info:${colors.Reset} Now generating and mapping databases ...`);
|
||||
console.log(
|
||||
` - ${colors.FgBlue}Info:${colors.Reset} Now generating and mapping databases ...`
|
||||
);
|
||||
|
||||
// deepcode ignore reDOS: <please specify a reason of ignoring this>
|
||||
await createDbFromSchema(schemaData);
|
||||
console.log(` - ${colors.FgGreen}Success:${colors.Reset} Databases created Successfully!`);
|
||||
console.log(
|
||||
` - ${colors.FgGreen}Success:${colors.Reset} Databases created Successfully!`
|
||||
);
|
||||
}
|
||||
|
||||
// let timeout;
|
||||
@ -103,12 +134,16 @@ let interval;
|
||||
|
||||
if (fs.existsSync(dbSchemaLocalFilePath) && !DSQL_KEY?.match(/....../)) {
|
||||
fs.watchFile(dbSchemaLocalFilePath, { interval: 1000 }, (curr, prev) => {
|
||||
console.log(` - ${colors.FgBlue}Info:${colors.Reset} Syncing Databases Locally ...`);
|
||||
console.log(
|
||||
` - ${colors.FgBlue}Info:${colors.Reset} Syncing Databases Locally ...`
|
||||
);
|
||||
run();
|
||||
});
|
||||
} else if (DSQL_KEY?.match(/....../)) {
|
||||
interval = setInterval(() => {
|
||||
console.log(` - ${colors.FgMagenta}Info:${colors.Reset} Syncing Databases from the cloud ...`);
|
||||
console.log(
|
||||
` - ${colors.FgMagenta}Info:${colors.Reset} Syncing Databases from the cloud ...`
|
||||
);
|
||||
run();
|
||||
}, 20000);
|
||||
}
|
||||
|
@ -9,10 +9,26 @@ require("dotenv").config({
|
||||
path: path.resolve(process.cwd(), ".env"),
|
||||
});
|
||||
|
||||
const mysqlPath = process.platform?.match(/win/i) ? "'" + "C:\\Program Files\\MySQL\\MySQL Server 8.0\\bin\\mysql.exe" + "'" : "mysql";
|
||||
const mysqlDumpPath = process.platform?.match(/win/i) ? "'" + "C:\\Program Files\\MySQL\\MySQL Server 8.0\\bin\\mysqldump.exe" + "'" : "mysqldump";
|
||||
const mysqlPath = process.platform?.match(/win/i)
|
||||
? "'" + "C:\\Program Files\\MySQL\\MySQL Server 8.0\\bin\\mysql.exe" + "'"
|
||||
: "mysql";
|
||||
const mysqlDumpPath = process.platform?.match(/win/i)
|
||||
? "'" +
|
||||
"C:\\Program Files\\MySQL\\MySQL Server 8.0\\bin\\mysqldump.exe" +
|
||||
"'"
|
||||
: "mysqldump";
|
||||
|
||||
const { DSQL_HOST, DSQL_USER, DSQL_PASS, DSQL_DB_NAME, DSQL_KEY, DSQL_REF_DB_NAME, DSQL_FULL_SYNC, DSQL_ENCRYPTION_KEY, DSQL_ENCRYPTION_SALT } = process.env;
|
||||
const {
|
||||
DSQL_HOST,
|
||||
DSQL_USER,
|
||||
DSQL_PASS,
|
||||
DSQL_DB_NAME,
|
||||
DSQL_KEY,
|
||||
DSQL_REF_DB_NAME,
|
||||
DSQL_FULL_SYNC,
|
||||
DSQL_ENCRYPTION_KEY,
|
||||
DSQL_ENCRYPTION_SALT,
|
||||
} = process.env;
|
||||
|
||||
const dbName = DSQL_DB_NAME || "";
|
||||
const dumpFilePathArg = process.argv.indexOf("--file");
|
||||
@ -39,9 +55,12 @@ try {
|
||||
cwd: process.cwd(),
|
||||
};
|
||||
|
||||
if (process.platform.match(/win/i)) execSyncOptions.shell = "bash.exe";
|
||||
// if (process.platform.match(/win/i)) execSyncOptions.shell = "bash.exe";
|
||||
|
||||
const dump = execSync(`${mysqlPath} -u ${DSQL_USER} -p${DSQL_PASS} ${dbName} < ${dumpFilePath}`, execSyncOptions);
|
||||
const dump = execSync(
|
||||
`${mysqlPath} -u ${DSQL_USER} -p${DSQL_PASS} ${dbName} < ${dumpFilePath}`,
|
||||
execSyncOptions
|
||||
);
|
||||
|
||||
console.log("Dumped successfully", dump.toString());
|
||||
|
||||
|
@ -22,7 +22,7 @@ const updateApiSchemaFromLocalDb = require("../query/update-api-schema-from-loca
|
||||
* ==============================================================================
|
||||
*
|
||||
* @param {object} params - Single object passed
|
||||
* @param {import("../../types/database-schema.td").DSQL_DatabaseSchemaType} params.dbSchema - Database Schema Object
|
||||
* @param {import("@/package-shared/types/database-schema.td").DSQL_DatabaseSchemaType | undefined} params.dbSchema - Database Schema Object
|
||||
*
|
||||
* @returns {Promise<*>} new user auth object payload
|
||||
*/
|
||||
@ -33,7 +33,7 @@ module.exports = async function addUsersTableToDb({ dbSchema }) {
|
||||
* @description Initialize
|
||||
*/
|
||||
const database = process.env.DSQL_DB_NAME || "";
|
||||
/** @type {import("../../types/database-schema.td").DSQL_TableSchemaType} */
|
||||
/** @type {import("@/package-shared/types/database-schema.td").DSQL_TableSchemaType} */
|
||||
const userPreset = require("./data/presets/users.json");
|
||||
|
||||
try {
|
||||
@ -42,15 +42,22 @@ module.exports = async function addUsersTableToDb({ dbSchema }) {
|
||||
*
|
||||
* @description Fetch user from db
|
||||
*/
|
||||
const userSchemaMainFilePath = path.resolve(process.cwd(), "dsql.schema.json");
|
||||
const userSchemaMainFilePath = path.resolve(
|
||||
process.cwd(),
|
||||
"dsql.schema.json"
|
||||
);
|
||||
let targetDatabase = dbSchema;
|
||||
|
||||
let existingTableIndex = targetDatabase.tables.findIndex((table, index) => {
|
||||
if (!targetDatabase) throw new Error("Target database not found!");
|
||||
|
||||
let existingTableIndex = targetDatabase.tables.findIndex(
|
||||
(table, index) => {
|
||||
if (table.tableName === "users") {
|
||||
existingTableIndex = index;
|
||||
return true;
|
||||
}
|
||||
});
|
||||
}
|
||||
);
|
||||
|
||||
if (existingTableIndex >= 0) {
|
||||
targetDatabase.tables[existingTableIndex] = userPreset;
|
||||
@ -58,7 +65,11 @@ module.exports = async function addUsersTableToDb({ dbSchema }) {
|
||||
targetDatabase.tables.push(userPreset);
|
||||
}
|
||||
|
||||
fs.writeFileSync(`${userSchemaMainFilePath}`, JSON.stringify(dbSchema, null, 4), "utf8");
|
||||
fs.writeFileSync(
|
||||
`${userSchemaMainFilePath}`,
|
||||
JSON.stringify(dbSchema, null, 4),
|
||||
"utf8"
|
||||
);
|
||||
|
||||
////////////////////////////////////////
|
||||
|
||||
|
@ -27,7 +27,7 @@ const updateTable = require("./utils/updateTable");
|
||||
* runs the "dsql create" command. `NOTE`: there must be a "dsql.schema.json" file
|
||||
* in the root of the project for this function to work
|
||||
*
|
||||
* @param {import("../../types/database-schema.td").DSQL_DatabaseSchemaType[]} dbSchema - An array of database schema objects
|
||||
* @param {import("@/package-shared/types/database-schema.td").DSQL_DatabaseSchemaType | undefined} dbSchema - An array of database schema objects
|
||||
*/
|
||||
async function createDbFromSchema(dbSchema) {
|
||||
try {
|
||||
@ -42,8 +42,13 @@ async function createDbFromSchema(dbSchema) {
|
||||
}
|
||||
|
||||
for (let i = 0; i < dbSchema.length; i++) {
|
||||
/** @type {import("../../types/database-schema.td").DSQL_DatabaseSchemaType} */
|
||||
/** @type {import("@/package-shared/types/database-schema.td").DSQL_DatabaseSchemaType | undefined} */
|
||||
const database = dbSchema[i];
|
||||
|
||||
if (!database) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const { dbFullName, tables } = database;
|
||||
|
||||
////////////////////////////////////////
|
||||
@ -51,12 +56,16 @@ async function createDbFromSchema(dbSchema) {
|
||||
////////////////////////////////////////
|
||||
|
||||
/** @type {{ dbFullName: string }[] | null} */
|
||||
const dbCheck = await noDatabaseDbHandler({ query: `SELECT SCHEMA_NAME AS dbFullName FROM INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME = '${dbFullName}'` });
|
||||
const dbCheck = await noDatabaseDbHandler({
|
||||
query: `SELECT SCHEMA_NAME AS dbFullName FROM INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME = '${dbFullName}'`,
|
||||
});
|
||||
|
||||
if (dbCheck && dbCheck[0]?.dbFullName) {
|
||||
// Database Exists
|
||||
} else {
|
||||
const newDatabase = await noDatabaseDbHandler({ query: `CREATE DATABASE IF NOT EXISTS \`${dbFullName}\` CHARACTER SET utf8mb4 COLLATE utf8mb4_bin` });
|
||||
const newDatabase = await noDatabaseDbHandler({
|
||||
query: `CREATE DATABASE IF NOT EXISTS \`${dbFullName}\` CHARACTER SET utf8mb4 COLLATE utf8mb4_bin`,
|
||||
});
|
||||
}
|
||||
|
||||
////////////////////////////////////////
|
||||
@ -68,7 +77,9 @@ async function createDbFromSchema(dbSchema) {
|
||||
* @type {{ TABLE_NAME: string }[] | null}
|
||||
* @description Select All tables in target database
|
||||
*/
|
||||
const allTables = await noDatabaseDbHandler({ query: `SELECT TABLE_NAME FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_SCHEMA='${dbFullName}'` });
|
||||
const allTables = await noDatabaseDbHandler({
|
||||
query: `SELECT TABLE_NAME FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_SCHEMA='${dbFullName}'`,
|
||||
});
|
||||
|
||||
let tableDropped;
|
||||
|
||||
@ -85,8 +96,16 @@ async function createDbFromSchema(dbSchema) {
|
||||
* in the user schema JSON. If it's not, the table is either deleted
|
||||
* or the table name has been recently changed
|
||||
*/
|
||||
if (!tables.filter((_table) => _table.tableName === TABLE_NAME)[0]) {
|
||||
const oldTableFilteredArray = tables.filter((_table) => _table.tableNameOld && _table.tableNameOld === TABLE_NAME);
|
||||
if (
|
||||
!tables.filter(
|
||||
(_table) => _table.tableName === TABLE_NAME
|
||||
)[0]
|
||||
) {
|
||||
const oldTableFilteredArray = tables.filter(
|
||||
(_table) =>
|
||||
_table.tableNameOld &&
|
||||
_table.tableNameOld === TABLE_NAME
|
||||
);
|
||||
|
||||
/**
|
||||
* @description Check if this table has been recently renamed. Rename
|
||||
@ -159,7 +178,11 @@ async function createDbFromSchema(dbSchema) {
|
||||
});
|
||||
|
||||
if (table.childrenTables && table.childrenTables[0]) {
|
||||
for (let ch = 0; ch < table.childrenTables.length; ch++) {
|
||||
for (
|
||||
let ch = 0;
|
||||
ch < table.childrenTables.length;
|
||||
ch++
|
||||
) {
|
||||
const childTable = table.childrenTables[ch];
|
||||
|
||||
const updateExistingChildTable = await updateTable({
|
||||
@ -199,7 +222,12 @@ async function createDbFromSchema(dbSchema) {
|
||||
*/
|
||||
if (indexes && indexes[0]) {
|
||||
for (let g = 0; g < indexes.length; g++) {
|
||||
const { indexType, indexName, indexTableFields, alias } = indexes[g];
|
||||
const {
|
||||
indexType,
|
||||
indexName,
|
||||
indexTableFields,
|
||||
alias,
|
||||
} = indexes[g];
|
||||
|
||||
if (!alias?.match(/./)) continue;
|
||||
|
||||
@ -207,7 +235,8 @@ async function createDbFromSchema(dbSchema) {
|
||||
* @type {any[] | null}
|
||||
* @description All indexes from MYSQL db
|
||||
*/
|
||||
const allExistingIndexes = await varDatabaseDbHandler({
|
||||
const allExistingIndexes =
|
||||
await varDatabaseDbHandler({
|
||||
queryString: `SHOW INDEXES FROM \`${tableName}\``,
|
||||
database: dbFullName,
|
||||
});
|
||||
@ -216,18 +245,32 @@ async function createDbFromSchema(dbSchema) {
|
||||
* @description Check for existing Index in MYSQL db
|
||||
*/
|
||||
try {
|
||||
const existingKeyInDb = allExistingIndexes ? allExistingIndexes.filter((indexObject) => indexObject.Key_name === alias) : null;
|
||||
if (!existingKeyInDb?.[0]) throw new Error("This Index Does not Exist");
|
||||
const existingKeyInDb = allExistingIndexes
|
||||
? allExistingIndexes.filter(
|
||||
(indexObject) =>
|
||||
indexObject.Key_name === alias
|
||||
)
|
||||
: null;
|
||||
if (!existingKeyInDb?.[0])
|
||||
throw new Error(
|
||||
"This Index Does not Exist"
|
||||
);
|
||||
} catch (error) {
|
||||
/**
|
||||
* @description Create new index if determined that it
|
||||
* doesn't exist in MYSQL db
|
||||
*/
|
||||
await varDatabaseDbHandler({
|
||||
queryString: `CREATE${indexType.match(/fullText/i) ? " FULLTEXT" : ""} INDEX \`${alias}\` ON ${tableName}(${indexTableFields
|
||||
queryString: `CREATE${
|
||||
indexType.match(/fullText/i)
|
||||
? " FULLTEXT"
|
||||
: ""
|
||||
} INDEX \`${alias}\` ON ${tableName}(${indexTableFields
|
||||
.map((nm) => nm.value)
|
||||
.map((nm) => `\`${nm}\``)
|
||||
.join(",")}) COMMENT 'schema_index'`,
|
||||
.join(
|
||||
","
|
||||
)}) COMMENT 'schema_index'`,
|
||||
database: dbFullName,
|
||||
});
|
||||
}
|
||||
|
@ -10,7 +10,23 @@ const supplementTable = require("./supplementTable");
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
|
||||
module.exports = async function createTable({ dbFullName, tableName, tableInfoArray, varDatabaseDbHandler, dbSchema }) {
|
||||
/**
|
||||
*
|
||||
* @param {object} param0
|
||||
* @param {string} param0.dbFullName
|
||||
* @param {string} param0.tableName
|
||||
* @param {any[]} param0.tableInfoArray
|
||||
* @param {(params: import("./varDatabaseDbHandler").VarDbHandlerParam)=>any} param0.varDatabaseDbHandler
|
||||
* @param {import("@/package-shared/types/database-schema.td").DSQL_DatabaseSchemaType} [param0.dbSchema]
|
||||
* @returns
|
||||
*/
|
||||
module.exports = async function createTable({
|
||||
dbFullName,
|
||||
tableName,
|
||||
tableInfoArray,
|
||||
varDatabaseDbHandler,
|
||||
dbSchema,
|
||||
}) {
|
||||
/**
|
||||
* Format tableInfoArray
|
||||
*
|
||||
@ -36,7 +52,7 @@ module.exports = async function createTable({ dbFullName, tableName, tableInfoAr
|
||||
|
||||
for (let i = 0; i < finalTable.length; i++) {
|
||||
const column = finalTable[i];
|
||||
const { fieldName, dataType, nullValue, primaryKey, autoIncrement, defaultValue, defaultValueLiteral, foreignKey, updatedField } = column;
|
||||
const { fieldName, foreignKey } = column;
|
||||
|
||||
if (foreignKey) {
|
||||
foreignKeys.push({
|
||||
@ -45,7 +61,10 @@ module.exports = async function createTable({ dbFullName, tableName, tableInfoAr
|
||||
});
|
||||
}
|
||||
|
||||
let { fieldEntryText, newPrimaryKeySet } = generateColumnDescription({ columnData: column, primaryKeySet: primaryKeySet });
|
||||
let { fieldEntryText, newPrimaryKeySet } = generateColumnDescription({
|
||||
columnData: column,
|
||||
primaryKeySet: primaryKeySet,
|
||||
});
|
||||
|
||||
primaryKeySet = newPrimaryKeySet;
|
||||
|
||||
@ -74,20 +93,33 @@ module.exports = async function createTable({ dbFullName, tableName, tableInfoAr
|
||||
|
||||
if (foreignKeys[0]) {
|
||||
foreignKeys.forEach((foreighKey, index, array) => {
|
||||
const { fieldName, destinationTableName, destinationTableColumnName, cascadeDelete, cascadeUpdate, foreignKeyName } = foreighKey;
|
||||
const {
|
||||
fieldName,
|
||||
destinationTableName,
|
||||
destinationTableColumnName,
|
||||
cascadeDelete,
|
||||
cascadeUpdate,
|
||||
foreignKeyName,
|
||||
} = foreighKey;
|
||||
|
||||
const comma = (() => {
|
||||
if (index === foreignKeys.length - 1) return "";
|
||||
return ",";
|
||||
})();
|
||||
|
||||
createTableQueryArray.push(` CONSTRAINT \`${foreignKeyName}\` FOREIGN KEY (\`${fieldName}\`) REFERENCES \`${destinationTableName}\`(${destinationTableColumnName})${cascadeDelete ? " ON DELETE CASCADE" : ""}${cascadeUpdate ? " ON UPDATE CASCADE" : ""}${comma}`);
|
||||
createTableQueryArray.push(
|
||||
` CONSTRAINT \`${foreignKeyName}\` FOREIGN KEY (\`${fieldName}\`) REFERENCES \`${destinationTableName}\`(${destinationTableColumnName})${
|
||||
cascadeDelete ? " ON DELETE CASCADE" : ""
|
||||
}${cascadeUpdate ? " ON UPDATE CASCADE" : ""}${comma}`
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
////////////////////////////////////////
|
||||
|
||||
createTableQueryArray.push(`) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin;`);
|
||||
createTableQueryArray.push(
|
||||
`) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin;`
|
||||
);
|
||||
|
||||
const createTableQuery = createTableQueryArray.join("\n");
|
||||
|
||||
|
@ -65,7 +65,7 @@ const connection = mysql.createConnection({
|
||||
* @param {object} params - Single Param object containing params
|
||||
* @param {string} params.query - Query String
|
||||
* @param {(string | number)[]} [params.values] - Values
|
||||
* @param {import("../../../types/database-schema.td").DSQL_DatabaseSchemaType} [params.dbSchema] - Database Schema
|
||||
* @param {import("../../../package-shared/types/database-schema.td").DSQL_DatabaseSchemaType} [params.dbSchema] - Database Schema
|
||||
* @param {string} [params.database] - Target Database
|
||||
* @param {string} [params.tableName] - Target Table Name
|
||||
*
|
||||
|
@ -11,18 +11,30 @@
|
||||
* Generate SQL text for Field
|
||||
* ==============================================================================
|
||||
* @param {object} params - Single object params
|
||||
* @param {import("../../../types/database-schema.td").DSQL_FieldSchemaType} params.columnData - Field object
|
||||
* @param {import("@/package-shared/types/database-schema.td").DSQL_FieldSchemaType} params.columnData - Field object
|
||||
* @param {boolean} [params.primaryKeySet] - Table Name(slug)
|
||||
*
|
||||
* @returns {{fieldEntryText: string, newPrimaryKeySet: boolean}}
|
||||
*/
|
||||
module.exports = function generateColumnDescription({ columnData, primaryKeySet }) {
|
||||
module.exports = function generateColumnDescription({
|
||||
columnData,
|
||||
primaryKeySet,
|
||||
}) {
|
||||
/**
|
||||
* Format tableInfoArray
|
||||
*
|
||||
* @description Format tableInfoArray
|
||||
*/
|
||||
const { fieldName, dataType, nullValue, primaryKey, autoIncrement, defaultValue, defaultValueLiteral, notNullValue } = columnData;
|
||||
const {
|
||||
fieldName,
|
||||
dataType,
|
||||
nullValue,
|
||||
primaryKey,
|
||||
autoIncrement,
|
||||
defaultValue,
|
||||
defaultValueLiteral,
|
||||
notNullValue,
|
||||
} = columnData;
|
||||
|
||||
let fieldEntryText = "";
|
||||
|
||||
|
@ -27,7 +27,7 @@ const connection = mysql.createConnection({
|
||||
* @param {string} params.query - Query String
|
||||
* @param {string[]} [params.values] - Values
|
||||
*
|
||||
* @returns {Promise<object[] | null>}
|
||||
* @returns {Promise<any[] | null>}
|
||||
*/
|
||||
module.exports = async function noDatabaseDbHandler({ query, values }) {
|
||||
/**
|
||||
|
@ -14,10 +14,13 @@ const defaultFieldsRegexp = require("./defaultFieldsRegexp");
|
||||
* @param {object} params - Single object params
|
||||
* @param {*[]} params.unparsedResults - Array of data objects containing Fields(keys)
|
||||
* and corresponding values of the fields(values)
|
||||
* @param {import("../../../types/database-schema.td").DSQL_TableSchemaType} [params.tableSchema] - Table schema
|
||||
* @param {import("../../../package-shared/types/database-schema.td").DSQL_TableSchemaType} [params.tableSchema] - Table schema
|
||||
* @returns {Promise<object[]|null>}
|
||||
*/
|
||||
module.exports = async function parseDbResults({ unparsedResults, tableSchema }) {
|
||||
module.exports = async function parseDbResults({
|
||||
unparsedResults,
|
||||
tableSchema,
|
||||
}) {
|
||||
/**
|
||||
* Declare variables
|
||||
*
|
||||
@ -56,7 +59,11 @@ module.exports = async function parseDbResults({ unparsedResults, tableSchema })
|
||||
}
|
||||
|
||||
if (resultFieldSchema?.encrypted && value?.match(/./)) {
|
||||
result[resultFieldName] = decrypt({ encryptedString: value, encryptionKey, encryptionSalt });
|
||||
result[resultFieldName] = decrypt({
|
||||
encryptedString: value,
|
||||
encryptionKey,
|
||||
encryptionSalt,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,10 +1,17 @@
|
||||
// @ts-check
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} text
|
||||
* @returns
|
||||
*/
|
||||
module.exports = function slugToCamelTitle(text) {
|
||||
if (text) {
|
||||
let addArray = text.split("-").filter((item) => item !== "");
|
||||
let camelArray = addArray.map((item) => {
|
||||
return item.substr(0, 1).toUpperCase() + item.substr(1).toLowerCase();
|
||||
return (
|
||||
item.substr(0, 1).toUpperCase() + item.substr(1).toLowerCase()
|
||||
);
|
||||
});
|
||||
|
||||
let parsedAddress = camelArray.join(" ");
|
||||
|
@ -7,6 +7,12 @@
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {object} param0
|
||||
* @param {import("@/package-shared/types/database-schema.td").DSQL_FieldSchemaType[]} param0.tableInfoArray
|
||||
* @returns
|
||||
*/
|
||||
module.exports = function supplementTable({ tableInfoArray }) {
|
||||
/**
|
||||
* Format tableInfoArray
|
||||
@ -18,12 +24,16 @@ module.exports = function supplementTable({ tableInfoArray }) {
|
||||
|
||||
////////////////////////////////////////
|
||||
|
||||
let primaryKeyExists = finalTableArray.filter((_field) => _field.primaryKey);
|
||||
let primaryKeyExists = finalTableArray.filter(
|
||||
(_field) => _field.primaryKey
|
||||
);
|
||||
|
||||
////////////////////////////////////////
|
||||
|
||||
defaultFields.forEach((field) => {
|
||||
let fieldExists = finalTableArray.filter((_field) => _field.fieldName === field.fieldName);
|
||||
let fieldExists = finalTableArray.filter(
|
||||
(_field) => _field.fieldName === field.fieldName
|
||||
);
|
||||
|
||||
if (fieldExists && fieldExists[0]) {
|
||||
return;
|
||||
|
@ -10,7 +10,8 @@
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
|
||||
const defaultFieldsRegexp = /^id$|^date_created$|^date_created_code$|^date_created_timestamp$|^date_updated$|^date_updated_code$|^date_updated_timestamp$/;
|
||||
const defaultFieldsRegexp =
|
||||
/^id$|^date_created$|^date_created_code$|^date_created_timestamp$|^date_updated$|^date_updated_code$|^date_updated_timestamp$/;
|
||||
|
||||
const generateColumnDescription = require("./generateColumnDescription");
|
||||
const varDatabaseDbHandler = require("./varDatabaseDbHandler");
|
||||
@ -30,15 +31,23 @@ const schemaPath = path.resolve(process.cwd(), "dsql.schema.json");
|
||||
* @param {object} params - Single object params
|
||||
* @param {string} params.dbFullName - Database full name => "datasquirel_user_4394_db_name"
|
||||
* @param {string} params.tableName - Table Name(slug)
|
||||
* @param {import("../../../types/database-schema.td").DSQL_FieldSchemaType[]} params.tableInfoArray - Table Info Array
|
||||
* @param {import("../../../types/database-schema.td").DSQL_DatabaseSchemaType[]} params.dbSchema - Single post
|
||||
* @param {import("../../../types/database-schema.td").DSQL_IndexSchemaType[]} [params.tableIndexes] - Table Indexes
|
||||
* @param {import("@/package-shared/types/database-schema.td").DSQL_FieldSchemaType[]} params.tableInfoArray - Table Info Array
|
||||
* @param {import("@/package-shared/types/database-schema.td").DSQL_DatabaseSchemaType[]} params.dbSchema - Single post
|
||||
* @param {import("@/package-shared/types/database-schema.td").DSQL_IndexSchemaType[]} [params.tableIndexes] - Table Indexes
|
||||
* @param {boolean} [params.clone] - Is this a newly cloned table?
|
||||
* @param {number} [params.tableIndex] - The number index of the table in the dbSchema array
|
||||
*
|
||||
* @returns {Promise<string|object[]|null>}
|
||||
*/
|
||||
module.exports = async function updateTable({ dbFullName, tableName, tableInfoArray, dbSchema, tableIndexes, clone, tableIndex }) {
|
||||
module.exports = async function updateTable({
|
||||
dbFullName,
|
||||
tableName,
|
||||
tableInfoArray,
|
||||
dbSchema,
|
||||
tableIndexes,
|
||||
clone,
|
||||
tableIndex,
|
||||
}) {
|
||||
/**
|
||||
* Initialize
|
||||
* ==========================================
|
||||
@ -79,7 +88,7 @@ module.exports = async function updateTable({ dbFullName, tableName, tableInfoAr
|
||||
////////////////////////////////////////
|
||||
|
||||
/**
|
||||
* @type {*}
|
||||
* @type {DSQL_MYSQL_SHOW_INDEXES_Type[]}
|
||||
* @description All indexes from MYSQL db
|
||||
*/
|
||||
const allExistingIndexes = await varDatabaseDbHandler({
|
||||
@ -88,7 +97,7 @@ module.exports = async function updateTable({ dbFullName, tableName, tableInfoAr
|
||||
});
|
||||
|
||||
/**
|
||||
* @type {*}
|
||||
* @type {DSQL_MYSQL_SHOW_COLUMNS_Type[]}
|
||||
* @description All columns from MYSQL db
|
||||
*/
|
||||
const allExistingColumns = await varDatabaseDbHandler({
|
||||
@ -121,7 +130,11 @@ module.exports = async function updateTable({ dbFullName, tableName, tableInfoAr
|
||||
* @description This finds out whether the fieldName corresponds with the MSQL Field name
|
||||
* if the fildName doesn't match any MYSQL Field name, the field is deleted.
|
||||
*/
|
||||
let existingEntry = upToDateTableFieldsArray.filter((column) => column.fieldName === Field || column.originName === Field);
|
||||
let existingEntry = upToDateTableFieldsArray.filter(
|
||||
(column) =>
|
||||
column.fieldName === Field ||
|
||||
column.originName === Field
|
||||
);
|
||||
|
||||
if (existingEntry && existingEntry[0]) {
|
||||
/**
|
||||
@ -135,7 +148,9 @@ module.exports = async function updateTable({ dbFullName, tableName, tableInfoAr
|
||||
database: dbFullName,
|
||||
});
|
||||
|
||||
console.log(`Column Renamed from "${existingEntry[0].originName}" to "${existingEntry[0].fieldName}"`);
|
||||
console.log(
|
||||
`Column Renamed from "${existingEntry[0].originName}" to "${existingEntry[0].fieldName}"`
|
||||
);
|
||||
|
||||
/**
|
||||
* Update Db Schema
|
||||
@ -145,21 +160,47 @@ module.exports = async function updateTable({ dbFullName, tableName, tableInfoAr
|
||||
try {
|
||||
const userSchemaData = dbSchema;
|
||||
|
||||
const targetDbIndex = userSchemaData.findIndex((db) => db.dbFullName === dbFullName);
|
||||
const targetTableIndex = userSchemaData[targetDbIndex].tables.findIndex((table) => table.tableName === tableName);
|
||||
const targetFieldIndex = userSchemaData[targetDbIndex].tables[targetTableIndex].fields.findIndex((field) => field.fieldName === existingEntry[0].fieldName);
|
||||
const targetDbIndex = userSchemaData.findIndex(
|
||||
(db) => db.dbFullName === dbFullName
|
||||
);
|
||||
const targetTableIndex = userSchemaData[
|
||||
targetDbIndex
|
||||
].tables.findIndex(
|
||||
(table) => table.tableName === tableName
|
||||
);
|
||||
const targetFieldIndex = userSchemaData[
|
||||
targetDbIndex
|
||||
].tables[targetTableIndex].fields.findIndex(
|
||||
(field) =>
|
||||
field.fieldName ===
|
||||
existingEntry[0].fieldName
|
||||
);
|
||||
|
||||
delete userSchemaData[targetDbIndex].tables[targetTableIndex].fields[targetFieldIndex]["originName"];
|
||||
delete userSchemaData[targetDbIndex].tables[targetTableIndex].fields[targetFieldIndex]["updatedField"];
|
||||
delete userSchemaData[targetDbIndex].tables[
|
||||
targetTableIndex
|
||||
].fields[targetFieldIndex]["originName"];
|
||||
delete userSchemaData[targetDbIndex].tables[
|
||||
targetTableIndex
|
||||
].fields[targetFieldIndex]["updatedField"];
|
||||
|
||||
/**
|
||||
* @description Set New Table Fields Array
|
||||
*/
|
||||
upToDateTableFieldsArray = userSchemaData[targetDbIndex].tables[targetTableIndex].fields;
|
||||
upToDateTableFieldsArray =
|
||||
userSchemaData[targetDbIndex].tables[
|
||||
targetTableIndex
|
||||
].fields;
|
||||
|
||||
fs.writeFileSync(schemaPath, JSON.stringify(userSchemaData), "utf8");
|
||||
fs.writeFileSync(
|
||||
schemaPath,
|
||||
JSON.stringify(userSchemaData),
|
||||
"utf8"
|
||||
);
|
||||
} catch (/** @type {*} */ error) {
|
||||
console.log("Error in updating Table =>", error.message);
|
||||
console.log(
|
||||
"Error in updating Table =>",
|
||||
error.message
|
||||
);
|
||||
}
|
||||
|
||||
////////////////////////////////////////
|
||||
@ -199,8 +240,16 @@ module.exports = async function updateTable({ dbFullName, tableName, tableInfoAr
|
||||
*/
|
||||
if (Index_comment?.match(/schema_index/)) {
|
||||
try {
|
||||
const existingKeyInSchema = tableIndexes ? tableIndexes.filter((indexObject) => indexObject.alias === Key_name) : null;
|
||||
if (!existingKeyInSchema?.[0]) throw new Error(`This Index(${Key_name}) Has been Deleted!`);
|
||||
const existingKeyInSchema = tableIndexes
|
||||
? tableIndexes.filter(
|
||||
(indexObject) =>
|
||||
indexObject.alias === Key_name
|
||||
)
|
||||
: null;
|
||||
if (!existingKeyInSchema?.[0])
|
||||
throw new Error(
|
||||
`This Index(${Key_name}) Has been Deleted!`
|
||||
);
|
||||
} catch (error) {
|
||||
/**
|
||||
* @description Drop Index: This happens when the MYSQL index is not
|
||||
@ -222,7 +271,8 @@ module.exports = async function updateTable({ dbFullName, tableName, tableInfoAr
|
||||
*/
|
||||
if (tableIndexes && tableIndexes[0]) {
|
||||
for (let g = 0; g < tableIndexes.length; g++) {
|
||||
const { indexType, indexName, indexTableFields, alias } = tableIndexes[g];
|
||||
const { indexType, indexName, indexTableFields, alias } =
|
||||
tableIndexes[g];
|
||||
|
||||
if (!alias?.match(/./)) continue;
|
||||
|
||||
@ -230,15 +280,21 @@ module.exports = async function updateTable({ dbFullName, tableName, tableInfoAr
|
||||
* @description Check for existing Index in MYSQL db
|
||||
*/
|
||||
try {
|
||||
const existingKeyInDb = allExistingIndexes?.filter((indexObject) => indexObject.Key_name === alias);
|
||||
if (!existingKeyInDb?.[0]) throw new Error("This Index Does not Exist");
|
||||
const existingKeyInDb = allExistingIndexes?.filter(
|
||||
(/** @type {any} */ indexObject) =>
|
||||
indexObject.Key_name === alias
|
||||
);
|
||||
if (!existingKeyInDb?.[0])
|
||||
throw new Error("This Index Does not Exist");
|
||||
} catch (error) {
|
||||
/**
|
||||
* @description Create new index if determined that it
|
||||
* doesn't exist in MYSQL db
|
||||
*/
|
||||
await varDatabaseDbHandler({
|
||||
queryString: `CREATE${indexType.match(/fullText/i) ? " FULLTEXT" : ""} INDEX \`${alias}\` ON ${tableName}(${indexTableFields
|
||||
queryString: `CREATE${
|
||||
indexType.match(/fullText/i) ? " FULLTEXT" : ""
|
||||
} INDEX \`${alias}\` ON ${tableName}(${indexTableFields
|
||||
.map((nm) => nm.value)
|
||||
.map((nm) => `\`${nm}\``)
|
||||
.join(",")}) COMMENT 'schema_index'`,
|
||||
@ -302,7 +358,17 @@ module.exports = async function updateTable({ dbFullName, tableName, tableInfoAr
|
||||
const prevColumn = upToDateTableFieldsArray[i - 1];
|
||||
const nextColumn = upToDateTableFieldsArray[i + 1];
|
||||
|
||||
const { fieldName, dataType, nullValue, primaryKey, autoIncrement, defaultValue, defaultValueLiteral, foreignKey, updatedField } = column;
|
||||
const {
|
||||
fieldName,
|
||||
dataType,
|
||||
nullValue,
|
||||
primaryKey,
|
||||
autoIncrement,
|
||||
defaultValue,
|
||||
defaultValueLiteral,
|
||||
foreignKey,
|
||||
updatedField,
|
||||
} = column;
|
||||
|
||||
////////////////////////////////////////
|
||||
|
||||
@ -339,22 +405,37 @@ module.exports = async function updateTable({ dbFullName, tableName, tableInfoAr
|
||||
/**
|
||||
* @description Construct SQL text snippet for this field
|
||||
*/
|
||||
let { fieldEntryText } = generateColumnDescription({ columnData: column });
|
||||
let { fieldEntryText } = generateColumnDescription({
|
||||
columnData: column,
|
||||
});
|
||||
|
||||
/**
|
||||
* @description Modify Column(Field) if it already exists
|
||||
* in MYSQL database
|
||||
*/
|
||||
if (existingColumn && existingColumn[0]?.Field) {
|
||||
const { Field, Type, Null, Key, Default, Extra } = existingColumn[0];
|
||||
const { Field, Type, Null, Key, Default, Extra } =
|
||||
existingColumn[0];
|
||||
|
||||
let isColumnReordered = existingColumnIndex ? i < existingColumnIndex : false;
|
||||
let isColumnReordered = existingColumnIndex
|
||||
? i < existingColumnIndex
|
||||
: false;
|
||||
|
||||
if (Field === fieldName && !isColumnReordered && dataType.toUpperCase() === Type.toUpperCase()) {
|
||||
if (
|
||||
Field === fieldName &&
|
||||
!isColumnReordered &&
|
||||
dataType.toUpperCase() === Type.toUpperCase()
|
||||
) {
|
||||
updateText += `MODIFY COLUMN ${fieldEntryText}`;
|
||||
// continue;
|
||||
} else {
|
||||
updateText += `MODIFY COLUMN ${fieldEntryText}${isColumnReordered ? (prevColumn?.fieldName ? " AFTER `" + prevColumn.fieldName + "`" : " AFTER `id`") : ""}`;
|
||||
updateText += `MODIFY COLUMN ${fieldEntryText}${
|
||||
isColumnReordered
|
||||
? prevColumn?.fieldName
|
||||
? " AFTER `" + prevColumn.fieldName + "`"
|
||||
: " AFTER `id`"
|
||||
: ""
|
||||
}`;
|
||||
// if (userId) {
|
||||
// } else {
|
||||
// updateText += `MODIFY COLUMN ${fieldEntryText}`;
|
||||
@ -396,9 +477,17 @@ module.exports = async function updateTable({ dbFullName, tableName, tableInfoAr
|
||||
* "clone" boolean = true
|
||||
*/
|
||||
if (!clone && foreignKey) {
|
||||
const { destinationTableName, destinationTableColumnName, cascadeDelete, cascadeUpdate, foreignKeyName } = foreignKey;
|
||||
const {
|
||||
destinationTableName,
|
||||
destinationTableColumnName,
|
||||
cascadeDelete,
|
||||
cascadeUpdate,
|
||||
foreignKeyName,
|
||||
} = foreignKey;
|
||||
|
||||
const foreinKeyText = `ADD CONSTRAINT \`${foreignKeyName}\` FOREIGN KEY (${fieldName}) REFERENCES ${destinationTableName}(${destinationTableColumnName})${cascadeDelete ? " ON DELETE CASCADE" : ""}${cascadeUpdate ? " ON UPDATE CASCADE" : ""}`;
|
||||
const foreinKeyText = `ADD CONSTRAINT \`${foreignKeyName}\` FOREIGN KEY (${fieldName}) REFERENCES ${destinationTableName}(${destinationTableColumnName})${
|
||||
cascadeDelete ? " ON DELETE CASCADE" : ""
|
||||
}${cascadeUpdate ? " ON UPDATE CASCADE" : ""}`;
|
||||
// const foreinKeyText = `ADD CONSTRAINT \`${foreignKeyName}\` FOREIGN KEY (${fieldName}) REFERENCES ${destinationTableName}(${destinationTableColumnName})${cascadeDelete ? " ON DELETE CASCADE" : ""}${cascadeUpdate ? " ON UPDATE CASCADE" : ""}` + ",";
|
||||
|
||||
const finalQueryString = `ALTER TABLE \`${tableName}\` ${foreinKeyText}`;
|
||||
@ -416,7 +505,9 @@ module.exports = async function updateTable({ dbFullName, tableName, tableInfoAr
|
||||
* @description Construct final SQL query by combning all SQL snippets in
|
||||
* updateTableQueryArray Arry, and trimming the final comma(,)
|
||||
*/
|
||||
const updateTableQuery = updateTableQueryArray.join(" ").replace(/,$/, "");
|
||||
const updateTableQuery = updateTableQueryArray
|
||||
.join(" ")
|
||||
.replace(/,$/, "");
|
||||
|
||||
////////////////////////////////////////
|
||||
|
||||
|
@ -19,18 +19,27 @@ const fs = require("fs");
|
||||
const parseDbResults = require("./parseDbResults");
|
||||
const dbHandler = require("./dbHandler");
|
||||
|
||||
/**
|
||||
* @typedef {object} VarDbHandlerParam
|
||||
* @property {string} queryString - SQL string
|
||||
* @property {string[]} [queryValuesArray] - Values Array
|
||||
* @property {string} database - Database name
|
||||
* @property {import("@/package-shared/types/database-schema.td").DSQL_TableSchemaType} [tableSchema] - Table schema
|
||||
*/
|
||||
|
||||
/**
|
||||
* DB handler for specific database
|
||||
* ==============================================================================
|
||||
* @async
|
||||
* @param {object} params - Single object params
|
||||
* @param {string} params.queryString - SQL string
|
||||
* @param {string[]} [params.queryValuesArray] - Values Array
|
||||
* @param {string} params.database - Database name
|
||||
* @param {import("../../../types/database-schema.td").DSQL_TableSchemaType} [params.tableSchema] - Table schema
|
||||
* @returns {Promise<*>}
|
||||
* @param {VarDbHandlerParam} params
|
||||
* @returns {Promise<any>}
|
||||
*/
|
||||
module.exports = async function varDatabaseDbHandler({ queryString, queryValuesArray, database, tableSchema }) {
|
||||
module.exports = async function varDatabaseDbHandler({
|
||||
queryString,
|
||||
queryValuesArray,
|
||||
database,
|
||||
tableSchema,
|
||||
}) {
|
||||
/**
|
||||
* Create Connection
|
||||
*
|
||||
@ -54,17 +63,32 @@ module.exports = async function varDatabaseDbHandler({ queryString, queryValuesA
|
||||
* @description Fetch data from db if no cache
|
||||
*/
|
||||
try {
|
||||
if (queryString && Array.isArray(queryValuesArray) && queryValuesArray[0]) {
|
||||
results = await dbHandler({ query: queryString, values: queryValuesArray, database: database });
|
||||
if (
|
||||
queryString &&
|
||||
Array.isArray(queryValuesArray) &&
|
||||
queryValuesArray[0]
|
||||
) {
|
||||
results = await dbHandler({
|
||||
query: queryString,
|
||||
values: queryValuesArray,
|
||||
database: database,
|
||||
});
|
||||
} else if (queryString && !Array.isArray(queryValuesArray)) {
|
||||
results = await dbHandler({ query: queryString, database: database });
|
||||
results = await dbHandler({
|
||||
query: queryString,
|
||||
database: database,
|
||||
});
|
||||
}
|
||||
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
} catch (error) {
|
||||
console.log("\x1b[31mvarDatabaseDbHandler ERROR\x1b[0m =>", database, error);
|
||||
console.log(
|
||||
"\x1b[31mvarDatabaseDbHandler ERROR\x1b[0m =>",
|
||||
database,
|
||||
error
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -76,10 +100,17 @@ module.exports = async function varDatabaseDbHandler({ queryString, queryValuesA
|
||||
try {
|
||||
const unparsedResults = results;
|
||||
// deepcode ignore reDOS: <please specify a reason of ignoring this>
|
||||
const parsedResults = await parseDbResults({ unparsedResults: unparsedResults, tableSchema: tableSchema });
|
||||
const parsedResults = await parseDbResults({
|
||||
unparsedResults: unparsedResults,
|
||||
tableSchema: tableSchema,
|
||||
});
|
||||
return parsedResults;
|
||||
} catch (error) {
|
||||
console.log("\x1b[31mvarDatabaseDbHandler ERROR\x1b[0m =>", database, error);
|
||||
console.log(
|
||||
"\x1b[31mvarDatabaseDbHandler ERROR\x1b[0m =>",
|
||||
database,
|
||||
error
|
||||
);
|
||||
return null;
|
||||
}
|
||||
|
||||
|
@ -29,7 +29,7 @@ const runQuery = require("./utils/runQuery");
|
||||
*
|
||||
* @param {Object} params - Single object passed
|
||||
* @param {LocalQueryObject} params.options - SQL Query
|
||||
* @param {import("../../types/database-schema.td").DSQL_DatabaseSchemaType} [params.dbSchema] - Name of the table to query
|
||||
* @param {import("@/package-shared/types/database-schema.td").DSQL_DatabaseSchemaType | undefined} [params.dbSchema] - Name of the table to query
|
||||
*
|
||||
* @returns { Promise<LocalGetReturn> } - Return Object
|
||||
*/
|
||||
@ -47,7 +47,13 @@ async function localGet({ options, dbSchema }) {
|
||||
*
|
||||
* @description Input Validation
|
||||
*/
|
||||
if (typeof query == "string" && (query.match(/^alter|^delete|information_schema|databases|^create/i) || !query.match(/^select/i))) {
|
||||
if (
|
||||
typeof query == "string" &&
|
||||
(query.match(
|
||||
/^alter|^delete|information_schema|databases|^create/i
|
||||
) ||
|
||||
!query.match(/^select/i))
|
||||
) {
|
||||
return { success: false, msg: "Wrong Input" };
|
||||
}
|
||||
|
||||
@ -68,7 +74,8 @@ async function localGet({ options, dbSchema }) {
|
||||
});
|
||||
|
||||
if (error) throw error;
|
||||
if (!result) throw new Error("No Result received for query => " + query);
|
||||
if (!result)
|
||||
throw new Error("No Result received for query => " + query);
|
||||
if (result?.error) throw new Error(result.error);
|
||||
|
||||
results = result;
|
||||
|
@ -24,7 +24,7 @@ const runQuery = require("./utils/runQuery");
|
||||
*
|
||||
* @param {Object} params - Single object passed
|
||||
* @param {LocalPostQueryObject} params.options - SQL Query
|
||||
* @param {import("../../types/database-schema.td").DSQL_DatabaseSchemaType} [params.dbSchema] - Name of the table to query
|
||||
* @param {import("@/package-shared/types/database-schema.td").DSQL_DatabaseSchemaType | undefined} [params.dbSchema] - Name of the table to query
|
||||
*
|
||||
* @returns { Promise<LocalPostReturn> } - Return Object
|
||||
*/
|
||||
@ -41,11 +41,17 @@ async function localPost({ options, dbSchema }) {
|
||||
*
|
||||
* @description Input Validation
|
||||
*/
|
||||
if (typeof query === "string" && query?.match(/^create |^alter |^drop /i)) {
|
||||
if (
|
||||
typeof query === "string" &&
|
||||
query?.match(/^create |^alter |^drop /i)
|
||||
) {
|
||||
return { success: false, msg: "Wrong Input" };
|
||||
}
|
||||
|
||||
if (typeof query === "object" && query?.action?.match(/^create |^alter |^drop /i)) {
|
||||
if (
|
||||
typeof query === "object" &&
|
||||
query?.action?.match(/^create |^alter |^drop /i)
|
||||
) {
|
||||
return { success: false, msg: "Wrong Input" };
|
||||
}
|
||||
|
||||
|
@ -19,7 +19,7 @@ const updateDbEntry = require("./updateDbEntry");
|
||||
* @param {string} params.dbFullName - Database full name
|
||||
* @param {string} params.tableName - Table name
|
||||
* @param {*} params.data - Data to add
|
||||
* @param {import("../../../types/database-schema.td").DSQL_TableSchemaType} [params.tableSchema] - Table schema
|
||||
* @param {import("@/package-shared/types/database-schema.td").DSQL_TableSchemaType} [params.tableSchema] - Table schema
|
||||
* @param {string} [params.duplicateColumnName] - Duplicate column name
|
||||
* @param {string} [params.duplicateColumnValue] - Duplicate column value
|
||||
* @param {boolean} [params.update] - Update this row if it exists
|
||||
@ -28,7 +28,17 @@ const updateDbEntry = require("./updateDbEntry");
|
||||
*
|
||||
* @returns {Promise<*>}
|
||||
*/
|
||||
async function addDbEntry({ dbFullName, tableName, data, tableSchema, duplicateColumnName, duplicateColumnValue, update, encryptionKey, encryptionSalt }) {
|
||||
async function addDbEntry({
|
||||
dbFullName,
|
||||
tableName,
|
||||
data,
|
||||
tableSchema,
|
||||
duplicateColumnName,
|
||||
duplicateColumnValue,
|
||||
update,
|
||||
encryptionKey,
|
||||
encryptionSalt,
|
||||
}) {
|
||||
/**
|
||||
* Initialize variables
|
||||
*/
|
||||
@ -79,8 +89,15 @@ async function addDbEntry({ dbFullName, tableName, data, tableSchema, duplicateC
|
||||
const dataKey = dataKeys[i];
|
||||
let value = data[dataKey];
|
||||
|
||||
const targetFieldSchemaArray = tableSchema ? tableSchema?.fields?.filter((field) => field.fieldName == dataKey) : null;
|
||||
const targetFieldSchema = targetFieldSchemaArray && targetFieldSchemaArray[0] ? targetFieldSchemaArray[0] : null;
|
||||
const targetFieldSchemaArray = tableSchema
|
||||
? tableSchema?.fields?.filter(
|
||||
(field) => field.fieldName == dataKey
|
||||
)
|
||||
: null;
|
||||
const targetFieldSchema =
|
||||
targetFieldSchemaArray && targetFieldSchemaArray[0]
|
||||
? targetFieldSchemaArray[0]
|
||||
: null;
|
||||
|
||||
if (!value) continue;
|
||||
|
||||
@ -90,7 +107,10 @@ async function addDbEntry({ dbFullName, tableName, data, tableSchema, duplicateC
|
||||
}
|
||||
|
||||
if (targetFieldSchema?.pattern) {
|
||||
const pattern = new RegExp(targetFieldSchema.pattern, targetFieldSchema.patternFlags || "");
|
||||
const pattern = new RegExp(
|
||||
targetFieldSchema.pattern,
|
||||
targetFieldSchema.patternFlags || ""
|
||||
);
|
||||
if (!value?.toString()?.match(pattern)) {
|
||||
console.log("DSQL: Pattern not matched =>", value);
|
||||
value = "";
|
||||
@ -136,7 +156,9 @@ async function addDbEntry({ dbFullName, tableName, data, tableSchema, duplicateC
|
||||
|
||||
////////////////////////////////////////
|
||||
|
||||
const query = `INSERT INTO \`${tableName}\` (${insertKeysArray.join(",")}) VALUES (${insertValuesArray.map(() => "?").join(",")})`;
|
||||
const query = `INSERT INTO \`${tableName}\` (${insertKeysArray.join(
|
||||
","
|
||||
)}) VALUES (${insertValuesArray.map(() => "?").join(",")})`;
|
||||
const queryValuesArray = insertValuesArray;
|
||||
|
||||
const newInsert = await dbHandler({
|
||||
|
@ -19,13 +19,20 @@ const dbHandler = require("../../engine/utils/dbHandler");
|
||||
* "Read only" or "Full Access"? Defaults to "Read Only"
|
||||
* @param {string} params.dbFullName - Database full name
|
||||
* @param {string} params.tableName - Table name
|
||||
* @param {import("../../../types/database-schema.td").DSQL_TableSchemaType} [params.tableSchema] - Table schema
|
||||
* @param {import("@/package-shared/types/database-schema.td").DSQL_TableSchemaType} [params.tableSchema] - Table schema
|
||||
* @param {string} params.identifierColumnName - Update row identifier column name
|
||||
* @param {string|number} params.identifierValue - Update row identifier column value
|
||||
*
|
||||
* @returns {Promise<object|null>}
|
||||
*/
|
||||
async function deleteDbEntry({ dbContext, paradigm, dbFullName, tableName, identifierColumnName, identifierValue }) {
|
||||
async function deleteDbEntry({
|
||||
dbContext,
|
||||
paradigm,
|
||||
dbFullName,
|
||||
tableName,
|
||||
identifierColumnName,
|
||||
identifierValue,
|
||||
}) {
|
||||
try {
|
||||
/**
|
||||
* Check if data is valid
|
||||
|
@ -31,13 +31,20 @@ const varDatabaseDbHandler = require("../../engine/utils/varDatabaseDbHandler");
|
||||
* @param {string} params.dbFullName - Database full name. Eg. "datasquire_user_2_test"
|
||||
* @param {*} params.query - Query string or object
|
||||
* @param {boolean} [params.readOnly] - Is this operation read only?
|
||||
* @param {import("../../../types/database-schema.td").DSQL_DatabaseSchemaType} [params.dbSchema] - Database schema
|
||||
* @param {import("@/package-shared/types/database-schema.td").DSQL_DatabaseSchemaType} [params.dbSchema] - Database schema
|
||||
* @param {string[]} [params.queryValuesArray] - An optional array of query values if "?" is used in the query string
|
||||
* @param {string} [params.tableName] - Table Name
|
||||
*
|
||||
* @return {Promise<{result: *, error?: *}>}
|
||||
*/
|
||||
async function runQuery({ dbFullName, query, readOnly, dbSchema, queryValuesArray, tableName }) {
|
||||
async function runQuery({
|
||||
dbFullName,
|
||||
query,
|
||||
readOnly,
|
||||
dbSchema,
|
||||
queryValuesArray,
|
||||
tableName,
|
||||
}) {
|
||||
/**
|
||||
* Declare variables
|
||||
*
|
||||
@ -50,9 +57,17 @@ async function runQuery({ dbFullName, query, readOnly, dbSchema, queryValuesArra
|
||||
|
||||
if (dbSchema) {
|
||||
try {
|
||||
const table = tableName ? tableName : typeof query == "string" ? null : query ? query?.table : null;
|
||||
const table = tableName
|
||||
? tableName
|
||||
: typeof query == "string"
|
||||
? null
|
||||
: query
|
||||
? query?.table
|
||||
: null;
|
||||
if (!table) throw new Error("No table name provided");
|
||||
tableSchema = dbSchema.tables.filter((tb) => tb?.tableName === table)[0];
|
||||
tableSchema = dbSchema.tables.filter(
|
||||
(tb) => tb?.tableName === table
|
||||
)[0];
|
||||
} catch (_err) {}
|
||||
}
|
||||
|
||||
@ -75,7 +90,16 @@ async function runQuery({ dbFullName, query, readOnly, dbSchema, queryValuesArra
|
||||
*
|
||||
* @description Declare "results" variable
|
||||
*/
|
||||
const { data, action, table, identifierColumnName, identifierValue, update, duplicateColumnName, duplicateColumnValue } = query;
|
||||
const {
|
||||
data,
|
||||
action,
|
||||
table,
|
||||
identifierColumnName,
|
||||
identifierValue,
|
||||
update,
|
||||
duplicateColumnName,
|
||||
duplicateColumnValue,
|
||||
} = query;
|
||||
|
||||
switch (action.toLowerCase()) {
|
||||
case "insert":
|
||||
|
@ -21,7 +21,7 @@ const dbHandler = require("../../engine/utils/dbHandler");
|
||||
* @param {string} params.dbFullName - Database full name
|
||||
* @param {string} params.tableName - Table name
|
||||
* @param {*} params.data - Data to add
|
||||
* @param {import("../../../types/database-schema.td").DSQL_TableSchemaType} [params.tableSchema] - Table schema
|
||||
* @param {import("@/package-shared/types/database-schema.td").DSQL_TableSchemaType} [params.tableSchema] - Table schema
|
||||
* @param {string} params.identifierColumnName - Update row identifier column name
|
||||
* @param {string | number} params.identifierValue - Update row identifier column value
|
||||
* @param {string} params.encryptionKey - Encryption key
|
||||
@ -29,7 +29,18 @@ const dbHandler = require("../../engine/utils/dbHandler");
|
||||
*
|
||||
* @returns {Promise<object|null>}
|
||||
*/
|
||||
async function updateDbEntry({ dbContext, paradigm, dbFullName, tableName, data, tableSchema, identifierColumnName, identifierValue, encryptionKey, encryptionSalt }) {
|
||||
async function updateDbEntry({
|
||||
dbContext,
|
||||
paradigm,
|
||||
dbFullName,
|
||||
tableName,
|
||||
data,
|
||||
tableSchema,
|
||||
identifierColumnName,
|
||||
identifierValue,
|
||||
encryptionKey,
|
||||
encryptionSalt,
|
||||
}) {
|
||||
/**
|
||||
* Check if data is valid
|
||||
*/
|
||||
@ -59,11 +70,23 @@ async function updateDbEntry({ dbContext, paradigm, dbFullName, tableName, data,
|
||||
const dataKey = dataKeys[i];
|
||||
let value = data[dataKey];
|
||||
|
||||
const targetFieldSchemaArray = tableSchema ? tableSchema?.fields?.filter((field) => field.fieldName === dataKey) : null;
|
||||
const targetFieldSchema = targetFieldSchemaArray && targetFieldSchemaArray[0] ? targetFieldSchemaArray[0] : null;
|
||||
const targetFieldSchemaArray = tableSchema
|
||||
? tableSchema?.fields?.filter(
|
||||
(field) => field.fieldName === dataKey
|
||||
)
|
||||
: null;
|
||||
const targetFieldSchema =
|
||||
targetFieldSchemaArray && targetFieldSchemaArray[0]
|
||||
? targetFieldSchemaArray[0]
|
||||
: null;
|
||||
|
||||
if (typeof value == "undefined") continue;
|
||||
if (typeof value !== "string" && typeof value !== "number" && !value) continue;
|
||||
if (
|
||||
typeof value !== "string" &&
|
||||
typeof value !== "number" &&
|
||||
!value
|
||||
)
|
||||
continue;
|
||||
|
||||
if (targetFieldSchema?.encrypted) {
|
||||
value = encrypt({ data: value, encryptionKey, encryptionSalt });
|
||||
@ -82,7 +105,10 @@ async function updateDbEntry({ dbContext, paradigm, dbFullName, tableName, data,
|
||||
}
|
||||
|
||||
if (targetFieldSchema?.pattern) {
|
||||
const pattern = new RegExp(targetFieldSchema.pattern, targetFieldSchema.patternFlags || "");
|
||||
const pattern = new RegExp(
|
||||
targetFieldSchema.pattern,
|
||||
targetFieldSchema.patternFlags || ""
|
||||
);
|
||||
if (!value?.toString()?.match(pattern)) {
|
||||
console.log("DSQL: Pattern not matched =>", value);
|
||||
value = "";
|
||||
@ -108,7 +134,10 @@ async function updateDbEntry({ dbContext, paradigm, dbFullName, tableName, data,
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
|
||||
console.log("DSQL: Error in parsing data keys in update function =>", error.message);
|
||||
console.log(
|
||||
"DSQL: Error in parsing data keys in update function =>",
|
||||
error.message
|
||||
);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
@ -122,7 +151,9 @@ async function updateDbEntry({ dbContext, paradigm, dbFullName, tableName, data,
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
|
||||
const query = `UPDATE ${tableName} SET ${updateKeyValueArray.join(",")} WHERE \`${identifierColumnName}\`=?`;
|
||||
const query = `UPDATE ${tableName} SET ${updateKeyValueArray.join(
|
||||
","
|
||||
)} WHERE \`${identifierColumnName}\`=?`;
|
||||
|
||||
updateValues.push(identifierValue);
|
||||
|
||||
|
@ -20,8 +20,8 @@ const runQuery = require("../query/utils/runQuery");
|
||||
* @async
|
||||
*
|
||||
* @param {Object} params - Single object passed
|
||||
* @param {import("../../users/add-user").UserDataPayload} params.payload - SQL Query
|
||||
* @param {import("../../types/database-schema.td").DSQL_DatabaseSchemaType} params.dbSchema - Name of the table to query
|
||||
* @param {import("@/types/user.td").UserDataPayload} params.payload - SQL Query
|
||||
* @param {import("@/package-shared/types/database-schema.td").DSQL_DatabaseSchemaType | undefined} params.dbSchema - Name of the table to query
|
||||
* @param {string} [params.encryptionKey]
|
||||
* @param {string} [params.encryptionSalt]
|
||||
*
|
||||
@ -104,6 +104,10 @@ async function localAddUser({
|
||||
};
|
||||
}
|
||||
|
||||
if (!dbSchema) {
|
||||
throw new Error("Db Schema not found!");
|
||||
}
|
||||
|
||||
const tableSchema = dbSchema.tables.find(
|
||||
(tb) => tb?.tableName === "users"
|
||||
);
|
||||
|
@ -7,7 +7,7 @@ const varDatabaseDbHandler = require("../engine/utils/varDatabaseDbHandler");
|
||||
* @param {object} param0
|
||||
* @param {number} param0.userId
|
||||
* @param {string[]} param0.fields
|
||||
* @param {import("../../types/database-schema.td").DSQL_DatabaseSchemaType} [param0.dbSchema]
|
||||
* @param {import("@/package-shared/types/database-schema.td").DSQL_DatabaseSchemaType | undefined} [param0.dbSchema]
|
||||
* @returns
|
||||
*/
|
||||
async function getLocalUser({ userId, fields, dbSchema }) {
|
||||
@ -19,7 +19,9 @@ async function getLocalUser({ userId, fields, dbSchema }) {
|
||||
const sanitizedFields = fields.map((fld) => fld.replace(/[^a-z\_]/g, ""));
|
||||
const query = `SELECT ${sanitizedFields.join(",")} FROM users WHERE id = ?`;
|
||||
|
||||
const tableSchema = dbSchema?.tables.find((tb) => tb?.tableName === "users");
|
||||
const tableSchema = dbSchema?.tables.find(
|
||||
(tb) => tb?.tableName === "users"
|
||||
);
|
||||
|
||||
let foundUser = await varDatabaseDbHandler({
|
||||
queryString: query,
|
||||
|
@ -5,17 +5,7 @@ const varDatabaseDbHandler = require("../engine/utils/varDatabaseDbHandler");
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {object} param0
|
||||
* @param {{
|
||||
* email?: string,
|
||||
* username?: string,
|
||||
* password: string,
|
||||
* }} param0.payload
|
||||
* @param {string[]} [param0.additionalFields]
|
||||
* @param {import("../../types/database-schema.td").DSQL_DatabaseSchemaType} [param0.dbSchema]
|
||||
* @param {boolean} [param0.email_login]
|
||||
* @param {string} [param0.email_login_code]
|
||||
* @param {string | null} [param0.email_login_field]
|
||||
* @param {PackageUserLoginLocalBody} param0
|
||||
* @returns
|
||||
*/
|
||||
async function loginLocalUser({
|
||||
|
@ -7,7 +7,7 @@ const varDatabaseDbHandler = require("../engine/utils/varDatabaseDbHandler");
|
||||
* @param {object} param0
|
||||
* @param {*} param0.existingUser
|
||||
* @param {string[]} [param0.additionalFields]
|
||||
* @param {import("../../types/database-schema.td").DSQL_DatabaseSchemaType} [param0.dbSchema]
|
||||
* @param {import("@/package-shared/types/database-schema.td").DSQL_DatabaseSchemaType | undefined} [param0.dbSchema]
|
||||
* @returns
|
||||
*/
|
||||
async function localReauthUser({ existingUser, additionalFields, dbSchema }) {
|
||||
@ -24,7 +24,9 @@ async function localReauthUser({ existingUser, additionalFields, dbSchema }) {
|
||||
*
|
||||
* @description GRAB user
|
||||
*/
|
||||
const tableSchema = dbSchema?.tables.find((tb) => tb?.tableName === "users");
|
||||
const tableSchema = dbSchema?.tables.find(
|
||||
(tb) => tb?.tableName === "users"
|
||||
);
|
||||
|
||||
let foundUser =
|
||||
existingUser?.id && existingUser.id.toString().match(/./)
|
||||
@ -51,7 +53,10 @@ async function localReauthUser({ existingUser, additionalFields, dbSchema }) {
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
|
||||
let csrfKey = Math.random().toString(36).substring(2) + "-" + Math.random().toString(36).substring(2);
|
||||
let csrfKey =
|
||||
Math.random().toString(36).substring(2) +
|
||||
"-" +
|
||||
Math.random().toString(36).substring(2);
|
||||
|
||||
let userPayload = {
|
||||
id: foundUser[0].id,
|
||||
@ -72,7 +77,11 @@ async function localReauthUser({ existingUser, additionalFields, dbSchema }) {
|
||||
date: Date.now(),
|
||||
};
|
||||
|
||||
if (additionalFields && Array.isArray(additionalFields) && additionalFields.length > 0) {
|
||||
if (
|
||||
additionalFields &&
|
||||
Array.isArray(additionalFields) &&
|
||||
additionalFields.length > 0
|
||||
) {
|
||||
additionalFields.forEach((key) => {
|
||||
// @ts-ignore
|
||||
userPayload[key] = foundUser?.[0][key];
|
||||
|
@ -10,7 +10,7 @@ const path = require("path");
|
||||
*
|
||||
* @param {object} param0
|
||||
* @param {string} param0.email
|
||||
* @param {import("../../types/database-schema.td").DSQL_DatabaseSchemaType} [param0.dbSchema]
|
||||
* @param {import("@/package-shared/types/database-schema.td").DSQL_DatabaseSchemaType | undefined} [param0.dbSchema]
|
||||
* @param {string} param0.email_login_field
|
||||
* @param {string} [param0.mail_domain]
|
||||
* @param {string} [param0.mail_username]
|
||||
|
@ -36,9 +36,17 @@ const encryptionSalt = process.env.DSQL_ENCRYPTION_SALT || "";
|
||||
* @param {string} params.clientId
|
||||
* @param {string} params.clientSecret
|
||||
* @param {object} [params.additionalFields]
|
||||
* @param {import("../../../types/database-schema.td").DSQL_DatabaseSchemaType} params.dbSchema
|
||||
* @param {import("@/package-shared/types/database-schema.td").DSQL_DatabaseSchemaType} params.dbSchema
|
||||
*/
|
||||
async function localGithubAuth({ res, code, email, clientId, clientSecret, additionalFields, dbSchema }) {
|
||||
async function localGithubAuth({
|
||||
res,
|
||||
code,
|
||||
email,
|
||||
clientId,
|
||||
clientSecret,
|
||||
additionalFields,
|
||||
dbSchema,
|
||||
}) {
|
||||
try {
|
||||
/**
|
||||
* User auth
|
||||
@ -52,7 +60,12 @@ async function localGithubAuth({ res, code, email, clientId, clientSecret, addit
|
||||
};
|
||||
}
|
||||
|
||||
if (typeof code !== "string" || typeof clientId !== "string" || typeof clientSecret !== "string" || typeof database !== "string") {
|
||||
if (
|
||||
typeof code !== "string" ||
|
||||
typeof clientId !== "string" ||
|
||||
typeof clientSecret !== "string" ||
|
||||
typeof database !== "string"
|
||||
) {
|
||||
return {
|
||||
success: false,
|
||||
msg: "Wrong Parameters",
|
||||
@ -81,7 +94,11 @@ async function localGithubAuth({ res, code, email, clientId, clientSecret, addit
|
||||
|
||||
const socialId = gitHubUser.name || gitHubUser.id || gitHubUser.login;
|
||||
const targetName = gitHubUser.name || gitHubUser.login;
|
||||
const nameArray = targetName?.match(/ /) ? targetName?.split(" ") : targetName?.match(/\-/) ? targetName?.split("-") : [targetName];
|
||||
const nameArray = targetName?.match(/ /)
|
||||
? targetName?.split(" ")
|
||||
: targetName?.match(/\-/)
|
||||
? targetName?.split("-")
|
||||
: [targetName];
|
||||
|
||||
const payload = {
|
||||
email: gitHubUser.email,
|
||||
|
@ -44,7 +44,7 @@ const encryptionSalt = process.env.DSQL_ENCRYPTION_SALT || "";
|
||||
* @param {string} params.clientId - Google client id
|
||||
* @param {http.ServerResponse} params.response - HTTPS response object
|
||||
* @param {object} [params.additionalFields] - Additional Fields to be added to the user object
|
||||
* @param {import("../../../types/database-schema.td").DSQL_DatabaseSchemaType} [params.dbSchema] - Database Schema
|
||||
* @param {import("@/package-shared/types/database-schema.td").DSQL_DatabaseSchemaType} [params.dbSchema] - Database Schema
|
||||
*
|
||||
* @returns { Promise<FunctionReturn> }
|
||||
*/
|
||||
|
@ -74,13 +74,24 @@ const encryptionSalt = process.env.DSQL_ENCRYPTION_SALT || "";
|
||||
* res: http.ServerResponse,
|
||||
* supEmail?: string | null,
|
||||
* additionalFields?: object,
|
||||
* dbSchema: import("../../../../types/database-schema.td").DSQL_DatabaseSchemaType | undefined
|
||||
* dbSchema: import("@/package-shared/types/database-schema.td").DSQL_DatabaseSchemaType | undefined
|
||||
* }} params - function parameters inside an object
|
||||
*
|
||||
* @returns {Promise<FunctionReturn>} - Response object
|
||||
*/
|
||||
async function handleSocialDb({ social_id, email, social_platform, payload, res, supEmail, additionalFields, dbSchema }) {
|
||||
const tableSchema = dbSchema?.tables.find((tb) => tb?.tableName === "users");
|
||||
async function handleSocialDb({
|
||||
social_id,
|
||||
email,
|
||||
social_platform,
|
||||
payload,
|
||||
res,
|
||||
supEmail,
|
||||
additionalFields,
|
||||
dbSchema,
|
||||
}) {
|
||||
const tableSchema = dbSchema?.tables.find(
|
||||
(tb) => tb?.tableName === "users"
|
||||
);
|
||||
|
||||
try {
|
||||
////////////////////////////////////////////////
|
||||
@ -244,7 +255,10 @@ async function handleSocialDb({ social_id, email, social_platform, payload, res,
|
||||
////////////////////////////////////////////////
|
||||
////////////////////////////////////////////////
|
||||
} else {
|
||||
console.log("Social User Failed to insert in 'handleSocialDb.js' backend function =>", newUser);
|
||||
console.log(
|
||||
"Social User Failed to insert in 'handleSocialDb.js' backend function =>",
|
||||
newUser
|
||||
);
|
||||
|
||||
return {
|
||||
success: false,
|
||||
@ -258,7 +272,10 @@ async function handleSocialDb({ social_id, email, social_platform, payload, res,
|
||||
////////////////////////////////////////////////
|
||||
////////////////////////////////////////////////
|
||||
} catch (/** @type {*} */ error) {
|
||||
console.log("ERROR in 'handleSocialDb.js' backend function =>", error.message);
|
||||
console.log(
|
||||
"ERROR in 'handleSocialDb.js' backend function =>",
|
||||
error.message
|
||||
);
|
||||
|
||||
return {
|
||||
success: false,
|
||||
@ -311,13 +328,22 @@ async function handleSocialDb({ social_id, email, social_platform, payload, res,
|
||||
* msg?: string
|
||||
* }>}
|
||||
*/
|
||||
async function loginSocialUser({ user, social_platform, res, database, additionalFields }) {
|
||||
async function loginSocialUser({
|
||||
user,
|
||||
social_platform,
|
||||
res,
|
||||
database,
|
||||
additionalFields,
|
||||
}) {
|
||||
const foundUser = await varDatabaseDbHandler({
|
||||
database: database ? database : "datasquirel",
|
||||
queryString: `SELECT * FROM users WHERE email='${user.email}' AND social_id='${user.social_id}' AND social_platform='${social_platform}'`,
|
||||
});
|
||||
|
||||
let csrfKey = Math.random().toString(36).substring(2) + "-" + Math.random().toString(36).substring(2);
|
||||
let csrfKey =
|
||||
Math.random().toString(36).substring(2) +
|
||||
"-" +
|
||||
Math.random().toString(36).substring(2);
|
||||
|
||||
if (!foundUser?.[0]) {
|
||||
return {
|
||||
@ -360,7 +386,10 @@ async function loginSocialUser({ user, social_platform, res, database, additiona
|
||||
});
|
||||
|
||||
if (res?.setHeader) {
|
||||
res.setHeader("Set-Cookie", [`datasquirelAuthKey=${encryptedPayload};samesite=strict;path=/;HttpOnly=true;Secure=true`, `csrf=${csrfKey};samesite=strict;path=/;HttpOnly=true`]);
|
||||
res.setHeader("Set-Cookie", [
|
||||
`datasquirelAuthKey=${encryptedPayload};samesite=strict;path=/;HttpOnly=true;Secure=true`,
|
||||
`csrf=${csrfKey};samesite=strict;path=/;HttpOnly=true`,
|
||||
]);
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////
|
||||
|
@ -17,7 +17,7 @@ const updateDbEntry = require("../query/utils/updateDbEntry");
|
||||
*
|
||||
* @param {Object} params - Single object passed
|
||||
* @param {*} params.payload - SQL Query
|
||||
* @param {import("../../types/database-schema.td").DSQL_DatabaseSchemaType} params.dbSchema - Name of the table to query
|
||||
* @param {import("@/package-shared/types/database-schema.td").DSQL_DatabaseSchemaType | undefined} params.dbSchema - Name of the table to query
|
||||
*
|
||||
* @returns { Promise<LocalPostReturn> } - Return Object
|
||||
*/
|
||||
@ -49,7 +49,13 @@ async function localUpdateUser({ payload, dbSchema }) {
|
||||
return finalData;
|
||||
})();
|
||||
|
||||
const tableSchema = dbSchema.tables.find((tb) => tb?.tableName === "users");
|
||||
if (!dbSchema) {
|
||||
throw new Error("Db Schema not found!");
|
||||
}
|
||||
|
||||
const tableSchema = dbSchema.tables.find(
|
||||
(tb) => tb?.tableName === "users"
|
||||
);
|
||||
|
||||
const updateUser = await updateDbEntry({
|
||||
dbContext: "Dsql User",
|
||||
|
22
package-lock.json
generated
22
package-lock.json
generated
@ -18,7 +18,8 @@
|
||||
"dsql-watch": "engine/dsql.js"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/mysql": "^2.15.21"
|
||||
"@types/mysql": "^2.15.21",
|
||||
"@types/node": "^22.7.5"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/mysql": {
|
||||
@ -31,10 +32,14 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@types/node": {
|
||||
"version": "20.4.10",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.4.10.tgz",
|
||||
"integrity": "sha512-vwzFiiy8Rn6E0MtA13/Cxxgpan/N6UeNYR9oUu6kuJWxu6zCk98trcDp8CBhbtaeuq9SykCmXkFr2lWLoPcvLg==",
|
||||
"dev": true
|
||||
"version": "22.7.5",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-22.7.5.tgz",
|
||||
"integrity": "sha512-jML7s2NAzMWc//QSJ1a3prpk78cOPchGvXJsC3C6R6PSMoooztvRVQEz89gmBTBY1SPMaqo5teB4uNHPdetShQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"undici-types": "~6.19.2"
|
||||
}
|
||||
},
|
||||
"node_modules/bignumber.js": {
|
||||
"version": "9.0.0",
|
||||
@ -133,6 +138,13 @@
|
||||
"safe-buffer": "~5.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/undici-types": {
|
||||
"version": "6.19.8",
|
||||
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.19.8.tgz",
|
||||
"integrity": "sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/util-deprecate": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz",
|
||||
|
1
package-shared
Symbolic link
1
package-shared
Symbolic link
@ -0,0 +1 @@
|
||||
/media/benoti/Ubuntu-Disk/Github/datasquirel/package-shared
|
@ -31,6 +31,7 @@
|
||||
"nodemailer": "^6.9.14"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/mysql": "^2.15.21"
|
||||
"@types/mysql": "^2.15.21",
|
||||
"@types/node": "^22.7.5"
|
||||
}
|
||||
}
|
||||
|
36
tsconfig.json
Normal file
36
tsconfig.json
Normal file
@ -0,0 +1,36 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "ES2015",
|
||||
"module": "commonjs",
|
||||
"paths": {
|
||||
"@/*": ["./*"]
|
||||
},
|
||||
"maxNodeModuleJsDepth": 10,
|
||||
"esModuleInterop": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"strict": true,
|
||||
"skipLibCheck": true,
|
||||
"lib": ["dom", "dom.iterable", "esnext"],
|
||||
"allowJs": true,
|
||||
"noEmit": true,
|
||||
"incremental": true,
|
||||
"resolveJsonModule": true,
|
||||
"isolatedModules": true,
|
||||
"jsx": "preserve",
|
||||
"moduleResolution": "node",
|
||||
"plugins": [
|
||||
{
|
||||
"name": "next"
|
||||
}
|
||||
]
|
||||
},
|
||||
"include": [
|
||||
"engine",
|
||||
"functions",
|
||||
"types",
|
||||
"users",
|
||||
"utils",
|
||||
"package-shared/types"
|
||||
],
|
||||
"exclude": ["node_modules", "dump"]
|
||||
}
|
@ -1,95 +0,0 @@
|
||||
/**
|
||||
* @typedef {string} DSQL_DatabaseFullName - Database full name(slug) including datasquirel data => "datasquirel_user_7_new_database"
|
||||
*/
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
/**
|
||||
* @typedef {object} DSQL_DatabaseSchemaType
|
||||
* @property {string} dbName - Database Full name with spaces => "New Database"
|
||||
* @property {string} dbSlug - Database Slug => "new_database"
|
||||
* @property {string} dbFullName - Database full name(slug) including datasquirel data => "datasquirel_user_7_new_database"
|
||||
* @property {string} [dbDescription] - Database brief description
|
||||
* @property {string} [dbImage] - Database image - Defaults to "/images/default.png"
|
||||
* @property {DSQL_TableSchemaType[]} tables - List of database tables
|
||||
* @property {{ dbFullName: string }[]} [childrenDatabases] - List of children databases for current database which is parent
|
||||
* @property {boolean} [childDatabase] - If current database is a child of a different parent database
|
||||
* @property {string} [childDatabaseDbFullName] - Parent database full name => "datasquirel_user_7_new_database"
|
||||
*/
|
||||
|
||||
////////////////////////////////////////
|
||||
|
||||
/**
|
||||
* @typedef {object} DSQL_TableSchemaType
|
||||
* @property {string} tableName - Table slug (blog_posts)
|
||||
* @property {string} tableFullName - Table full name with spaces => "Blog Posts"
|
||||
* @property {string} [tableDescription] - Brief description of table
|
||||
* @property {DSQL_FieldSchemaType[]} fields - List of table Fields
|
||||
* @property {DSQL_IndexSchemaType[]} [indexes] - List of table indexes, if available
|
||||
* @property {DSQL_ChildrenTablesType[]} [childrenTables] - List of children tables
|
||||
* @property {boolean} [childTable] -If current table is a child clone
|
||||
* @property {string} [childTableName] - Table slug of parent table => "blog_posts"
|
||||
* @property {string} [childTableDbFullName] - Database full name(slug) including datasquirel data => "datasquirel_user_7_new_database"
|
||||
* @property {string} [tableNameOld] - Old table name, incase of renaming table
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {object} DSQL_ChildrenTablesType
|
||||
* @property {string} dbNameFull - Database full name(slug) including datasquirel data => "datasquirel_user_7_new_database"
|
||||
* @property {string} tableName - Table slug => "blog_posts"
|
||||
*/
|
||||
|
||||
////////////////////////////////////////
|
||||
|
||||
/**
|
||||
* @typedef {object} DSQL_FieldSchemaType
|
||||
* @property {string} fieldName - Field Name(slug) => "long_description"
|
||||
* @property {string} [originName] - Field origin name(optional)
|
||||
* @property {boolean} [updatedField] - Has this field been renamed?
|
||||
* @property {string} dataType - Field Data type => "BIGIN" | "LONGTEXT" | "VARCHAR(***)" | ...
|
||||
* @property {boolean} [nullValue] - Is this a null value or not?
|
||||
* @property {boolean} [notNullValue] - Is this NOT a null value?
|
||||
* @property {boolean} [primaryKey] - Is this the primary key for table?
|
||||
* @property {boolean} [encrypted] - Is this field value encrypted?
|
||||
* @property {boolean} [autoIncrement] - Does this table primary key increment automatically?
|
||||
* @property {string|number} [defaultValue] - Value of field by default
|
||||
* @property {string} [defaultValueLiteral] - SQL key word which generates value automatically => "CURRENT_TIMESTAMP"
|
||||
* @property {DSQL_ForeignKeyType} [foreignKey] - Field foreign key reference object
|
||||
* @property {boolean} [richText] - Rich text field
|
||||
* @property {string | RegExp} [pattern] - Field pattern for validation. Can be a string or a regular expression. Example: "^[a-zA-Z0-9_]*$"
|
||||
* @property {string} [patternFlags] - Field pattern flags for validation. Example: "i"
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {object} DSQL_ForeignKeyType
|
||||
* @property {string} foreignKeyName - Unique Name of foreign key
|
||||
* @property {string} destinationTableName - Reference table name(slug) => "blog_posts"
|
||||
* @property {string} destinationTableColumnName - Reference column name(slug) => "id"
|
||||
* @property {string} destinationTableColumnType - Reference table field type => "BIGINT" | "VARCHAR(***)" | ...
|
||||
* @property {boolean} [cascadeDelete] - Does the reference table entry delete when this key is deleted?
|
||||
* @property {boolean} [cascadeUpdate] - Does the reference table entry update when this key is updated?
|
||||
*/
|
||||
|
||||
////////////////////////////////////////
|
||||
|
||||
/**
|
||||
* @typedef {object} DSQL_IndexSchemaType
|
||||
* @property {string} indexName - Unique Name of index => "blog_text_index"
|
||||
* @property {string} indexType - "regular" or "fullText"
|
||||
* @property {DSQL_IndexTableFieldType[]} indexTableFields - List of Index table fields
|
||||
* @property {string} [alias] - List of Index table fields
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {object} DSQL_IndexTableFieldType
|
||||
* @property {string} value - Table Field Name
|
||||
* @property {string} dataType - Table Field data type "VARCHAR(***)" | "BIGINT" | ...
|
||||
*/
|
||||
|
||||
////////////////////////////////////////
|
||||
|
||||
exports.DSQL_TableSchemaType = DSQL_TableSchemaType;
|
@ -1,89 +0,0 @@
|
||||
/**
|
||||
* @typedef {string} DSQL_DatabaseFullName - Database full name(slug) including datasquirel data => "datasquirel_user_7_new_database"
|
||||
*/
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
/**
|
||||
* @typedef {object} DSQL_DatabaseSchemaType
|
||||
* @property {string} dbName - Database Full name with spaces => "New Database"
|
||||
* @property {string} dbSlug - Database Slug => "new_database"
|
||||
* @property {string} dbFullName - Database full name(slug) including datasquirel data => "datasquirel_user_7_new_database"
|
||||
* @property {string} [dbDescription] - Database brief description
|
||||
* @property {string} [dbImage] - Database image - Defaults to "/images/default.png"
|
||||
* @property {DSQL_TableSchemaType[]} tables - List of database tables
|
||||
* @property {{ dbFullName: string }[]} [childrenDatabases] - List of children databases for current database which is parent
|
||||
* @property {boolean} [childDatabase] - If current database is a child of a different parent database
|
||||
* @property {string} [childDatabaseDbFullName] - Parent database full name => "datasquirel_user_7_new_database"
|
||||
*/
|
||||
|
||||
////////////////////////////////////////
|
||||
|
||||
/**
|
||||
* @typedef {object} DSQL_TableSchemaType
|
||||
* @property {string} tableName - Table slug (blog_posts)
|
||||
* @property {string} tableFullName - Table full name with spaces => "Blog Posts"
|
||||
* @property {string} [tableDescription] - Brief description of table
|
||||
* @property {DSQL_FieldSchemaType[]} fields - List of table Fields
|
||||
* @property {DSQL_IndexSchemaType[]} [indexes] - List of table indexes, if available
|
||||
* @property {DSQL_ChildrenTablesType[]} childrenTables - List of children tables
|
||||
* @property {boolean} [childTable] -If current table is a child clone
|
||||
* @property {string} [childTableName] - Table slug of parent table => "blog_posts"
|
||||
* @property {string} [childTableDbFullName] - Database full name(slug) including datasquirel data => "datasquirel_user_7_new_database"
|
||||
* @property {string} [tableNameOld] - Old table name, incase of renaming table
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {object} DSQL_ChildrenTablesType
|
||||
* @property {string} dbNameFull - Database full name(slug) including datasquirel data => "datasquirel_user_7_new_database"
|
||||
* @property {string} tableName - Table slug => "blog_posts"
|
||||
*/
|
||||
|
||||
////////////////////////////////////////
|
||||
|
||||
/**
|
||||
* @typedef {object} DSQL_FieldSchemaType
|
||||
* @property {string} fieldName - Field Name(slug) => "long_description"
|
||||
* @property {string} [originName] - Field origin name(optional)
|
||||
* @property {boolean} [updatedField] - Has this field been renamed?
|
||||
* @property {string} dataType - Field Data type => "BIGIN" | "LONGTEXT" | "VARCHAR(***)" | ...
|
||||
* @property {boolean} [nullValue] - Is this a null value or not?
|
||||
* @property {boolean} [notNullValue] - Is this NOT a null value?
|
||||
* @property {boolean} [primaryKey] - Is this the primary key for table?
|
||||
* @property {boolean} [encrypted] - Is this field value encrypted?
|
||||
* @property {boolean} [autoIncrement] - Does this table primary key increment automatically?
|
||||
* @property {string|number} [defaultValue] - Value of field by default
|
||||
* @property {string} [defaultValueLiteral] - SQL key word which generates value automatically => "CURRENT_TIMESTAMP"
|
||||
* @property {DSQL_ForeignKeyType} [foreignKey] - Field foreign key reference object
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {object} DSQL_ForeignKeyType
|
||||
* @property {string} foreignKeyName - Unique Name of foreign key
|
||||
* @property {string} destinationTableName - Reference table name(slug) => "blog_posts"
|
||||
* @property {string} destinationTableColumnName - Reference column name(slug) => "id"
|
||||
* @property {string} destinationTableColumnType - Reference table field type => "BIGINT" | "VARCHAR(***)" | ...
|
||||
* @property {boolean} [cascadeDelete] - Does the reference table entry delete when this key is deleted?
|
||||
* @property {boolean} [cascadeUpdate] - Does the reference table entry update when this key is updated?
|
||||
*/
|
||||
|
||||
////////////////////////////////////////
|
||||
|
||||
/**
|
||||
* @typedef {object} DSQL_IndexSchemaType
|
||||
* @property {string} indexName - Unique Name of index => "blog_text_index"
|
||||
* @property {string} indexType - "regular" or "fullText"
|
||||
* @property {DSQL_IndexTableFieldType[]} indexTableFields - List of Index table fields
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {object} DSQL_IndexTableFieldType
|
||||
* @property {string} value - Table Field Name
|
||||
* @property {string} dataType - Table Field data type "VARCHAR(***)" | "BIGINT" | ...
|
||||
*/
|
||||
|
||||
////////////////////////////////////////
|
@ -64,7 +64,7 @@ async function addUser({
|
||||
DSQL_PASS?.match(/./) &&
|
||||
DSQL_DB_NAME?.match(/./)
|
||||
) {
|
||||
/** @type {import("../types/database-schema.td").DSQL_DatabaseSchemaType | undefined} */
|
||||
/** @type {import("@/package-shared/types/database-schema.td").DSQL_DatabaseSchemaType | undefined} */
|
||||
let dbSchema;
|
||||
|
||||
try {
|
||||
|
@ -83,7 +83,7 @@ async function getUser({ key, userId, database, fields }) {
|
||||
DSQL_PASS?.match(/./) &&
|
||||
DSQL_DB_NAME?.match(/./)
|
||||
) {
|
||||
/** @type {import("../types/database-schema.td").DSQL_DatabaseSchemaType | undefined} */
|
||||
/** @type {import("@/package-shared/types/database-schema.td").DSQL_DatabaseSchemaType | undefined} */
|
||||
let dbSchema;
|
||||
|
||||
try {
|
||||
|
@ -65,7 +65,7 @@ async function loginUser({
|
||||
? temp_code_field
|
||||
? temp_code_field
|
||||
: defaultTempLoginFieldName
|
||||
: null;
|
||||
: undefined;
|
||||
|
||||
/**
|
||||
* Check Encryption Keys
|
||||
@ -126,7 +126,7 @@ async function loginUser({
|
||||
DSQL_PASS?.match(/./) &&
|
||||
DSQL_DB_NAME?.match(/./)
|
||||
) {
|
||||
/** @type {import("../types/database-schema.td").DSQL_DatabaseSchemaType | undefined} */
|
||||
/** @type {import("@/package-shared/types/database-schema.td").DSQL_DatabaseSchemaType | undefined} */
|
||||
let dbSchema;
|
||||
|
||||
try {
|
||||
@ -145,6 +145,7 @@ async function loginUser({
|
||||
email_login,
|
||||
email_login_code,
|
||||
email_login_field: emailLoginTempCodeFieldName,
|
||||
token,
|
||||
});
|
||||
}
|
||||
} else {
|
||||
@ -156,7 +157,8 @@ async function loginUser({
|
||||
* @type {{ success: boolean, payload: import("../types/user.td").DATASQUIREL_LoggedInUser | null, userId?: number, msg?: string }}
|
||||
*/
|
||||
httpResponse = await new Promise((resolve, reject) => {
|
||||
const reqPayload = JSON.stringify({
|
||||
/** @type {PackageUserLoginRequestBody} */
|
||||
const reqPayload = {
|
||||
encryptionKey,
|
||||
payload,
|
||||
database,
|
||||
@ -164,7 +166,10 @@ async function loginUser({
|
||||
email_login,
|
||||
email_login_code,
|
||||
email_login_field: emailLoginTempCodeFieldName,
|
||||
});
|
||||
token,
|
||||
};
|
||||
|
||||
const reqPayloadJSON = JSON.stringify(reqPayload);
|
||||
|
||||
const httpsRequest = (
|
||||
scheme?.match(/^http$/i) ? http : https
|
||||
@ -173,7 +178,7 @@ async function loginUser({
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
"Content-Length": Buffer.from(reqPayload).length,
|
||||
"Content-Length": Buffer.from(reqPayloadJSON).length,
|
||||
Authorization: key,
|
||||
},
|
||||
port: localHostPort || 443,
|
||||
@ -203,7 +208,7 @@ async function loginUser({
|
||||
}
|
||||
);
|
||||
|
||||
httpsRequest.write(reqPayload);
|
||||
httpsRequest.write(reqPayloadJSON);
|
||||
httpsRequest.end();
|
||||
});
|
||||
}
|
||||
|
@ -103,7 +103,7 @@ async function reauthUser({
|
||||
DSQL_PASS?.match(/./) &&
|
||||
DSQL_DB_NAME?.match(/./)
|
||||
) {
|
||||
/** @type {import("../types/database-schema.td").DSQL_DatabaseSchemaType | undefined} */
|
||||
/** @type {import("@/package-shared/types/database-schema.td").DSQL_DatabaseSchemaType | undefined} */
|
||||
let dbSchema;
|
||||
|
||||
try {
|
||||
|
@ -114,7 +114,7 @@ async function sendEmailCode({
|
||||
DSQL_PASS?.match(/./) &&
|
||||
DSQL_DB_NAME?.match(/./)
|
||||
) {
|
||||
/** @type {import("../types/database-schema.td").DSQL_DatabaseSchemaType | undefined} */
|
||||
/** @type {import("@/package-shared/types/database-schema.td").DSQL_DatabaseSchemaType | undefined} */
|
||||
let dbSchema;
|
||||
|
||||
try {
|
||||
|
@ -154,7 +154,7 @@ async function githubAuth({
|
||||
DSQL_PASS?.match(/./) &&
|
||||
DSQL_DB_NAME?.match(/./)
|
||||
) {
|
||||
/** @type {import("../../types/database-schema.td").DSQL_DatabaseSchemaType | undefined} */
|
||||
/** @type {import("@/package-shared/types/database-schema.td").DSQL_DatabaseSchemaType | undefined | undefined} */
|
||||
let dbSchema;
|
||||
|
||||
try {
|
||||
|
@ -150,7 +150,7 @@ async function googleAuth({
|
||||
DSQL_PASS?.match(/./) &&
|
||||
DSQL_DB_NAME?.match(/./)
|
||||
) {
|
||||
/** @type {import("../../types/database-schema.td").DSQL_DatabaseSchemaType | undefined} */
|
||||
/** @type {import("@/package-shared/types/database-schema.td").DSQL_DatabaseSchemaType | undefined | undefined} */
|
||||
let dbSchema;
|
||||
|
||||
try {
|
||||
|
@ -57,7 +57,7 @@ async function updateUser({ key, payload, database }) {
|
||||
DSQL_PASS?.match(/./) &&
|
||||
DSQL_DB_NAME?.match(/./)
|
||||
) {
|
||||
/** @type {import("../types/database-schema.td").DSQL_DatabaseSchemaType | undefined} */
|
||||
/** @type {import("@/package-shared/types/database-schema.td").DSQL_DatabaseSchemaType | undefined} */
|
||||
let dbSchema;
|
||||
|
||||
try {
|
||||
|
@ -18,7 +18,7 @@ const https = require("https");
|
||||
/**
|
||||
* @typedef {Object} GetSchemaReturn
|
||||
* @property {boolean} success - Did the function run successfully?
|
||||
* @property {import("../types/database-schema.td").DSQL_DatabaseSchemaType[] | import("../types/database-schema.td").DSQL_DatabaseSchemaType | null} payload - Response payload
|
||||
* @property {import("@/package-shared/types/database-schema.td").DSQL_DatabaseSchemaType[] | import("@/package-shared/types/database-schema.td").DSQL_DatabaseSchemaType | null} payload - Response payload
|
||||
*/
|
||||
|
||||
/**
|
||||
|
@ -66,7 +66,7 @@ async function get({ key, db, query, queryValues, tableName }) {
|
||||
DSQL_PASS?.match(/./) &&
|
||||
DSQL_DB_NAME?.match(/./)
|
||||
) {
|
||||
/** @type {import("../types/database-schema.td").DSQL_DatabaseSchemaType | undefined} */
|
||||
/** @type {import("@/package-shared/types/database-schema.td").DSQL_DatabaseSchemaType | undefined} */
|
||||
let dbSchema;
|
||||
|
||||
try {
|
||||
|
@ -78,7 +78,7 @@ async function post({ key, query, queryValues, database, tableName }) {
|
||||
DSQL_PASS?.match(/./) &&
|
||||
DSQL_DB_NAME?.match(/./)
|
||||
) {
|
||||
/** @type {import("../types/database-schema.td").DSQL_DatabaseSchemaType | undefined} */
|
||||
/** @type {import("@/package-shared/types/database-schema.td").DSQL_DatabaseSchemaType | undefined} */
|
||||
let dbSchema;
|
||||
|
||||
try {
|
||||
|
Loading…
Reference in New Issue
Block a user