Refactor types

This commit is contained in:
Benjamin Toby 2024-10-14 07:49:01 +01:00
parent 11a4c1f4d9
commit d84b15c35c
46 changed files with 696 additions and 376 deletions

5
.gitignore vendored
View File

@ -130,10 +130,7 @@ dist
.yarn/install-state.gz .yarn/install-state.gz
.pnp.* .pnp.*
# typescript
tsconfig.json
# others # others
deprecated deprecated
.tmp .tmp
test/ test/

View File

@ -18,7 +18,17 @@ if (!fs.existsSync(path.resolve(process.cwd(), ".env"))) {
process.exit(); process.exit();
} }
const { DSQL_HOST, DSQL_USER, DSQL_PASS, DSQL_DB_NAME, DSQL_KEY, DSQL_REF_DB_NAME, DSQL_FULL_SYNC, DSQL_ENCRYPTION_KEY, DSQL_ENCRYPTION_SALT } = process.env; const {
DSQL_HOST,
DSQL_USER,
DSQL_PASS,
DSQL_DB_NAME,
DSQL_KEY,
DSQL_REF_DB_NAME,
DSQL_FULL_SYNC,
DSQL_ENCRYPTION_KEY,
DSQL_ENCRYPTION_SALT,
} = process.env;
if (!DSQL_HOST?.match(/./)) { if (!DSQL_HOST?.match(/./)) {
console.log("DSQL_HOST is required in your `.env` file"); console.log("DSQL_HOST is required in your `.env` file");
@ -38,6 +48,7 @@ if (!DSQL_PASS?.match(/./)) {
const dbSchemaLocalFilePath = path.resolve(process.cwd(), "dsql.schema.json"); const dbSchemaLocalFilePath = path.resolve(process.cwd(), "dsql.schema.json");
async function run() { async function run() {
/** @type {any} */
let schemaData; let schemaData;
if (DSQL_KEY && DSQL_REF_DB_NAME?.match(/./)) { if (DSQL_KEY && DSQL_REF_DB_NAME?.match(/./)) {
@ -46,8 +57,13 @@ async function run() {
database: DSQL_REF_DB_NAME || undefined, database: DSQL_REF_DB_NAME || undefined,
}); });
if (!dbSchemaDataResponse.payload || Array.isArray(dbSchemaDataResponse.payload)) { if (
console.log("DSQL_KEY+DSQL_REF_DB_NAME => Error in fetching DB schema"); !dbSchemaDataResponse.payload ||
Array.isArray(dbSchemaDataResponse.payload)
) {
console.log(
"DSQL_KEY+DSQL_REF_DB_NAME => Error in fetching DB schema"
);
console.log(dbSchemaDataResponse); console.log(dbSchemaDataResponse);
process.exit(); process.exit();
} }
@ -62,7 +78,10 @@ async function run() {
database: DSQL_REF_DB_NAME || undefined, database: DSQL_REF_DB_NAME || undefined,
}); });
if (!dbSchemaDataResponse.payload || !Array.isArray(dbSchemaDataResponse.payload)) { if (
!dbSchemaDataResponse.payload ||
!Array.isArray(dbSchemaDataResponse.payload)
) {
console.log("DSQL_KEY => Error in fetching DB schema"); console.log("DSQL_KEY => Error in fetching DB schema");
console.log(dbSchemaDataResponse); console.log(dbSchemaDataResponse);
process.exit(); process.exit();
@ -75,9 +94,13 @@ async function run() {
schemaData = fetchedDbSchemaObject; schemaData = fetchedDbSchemaObject;
} else if (fs.existsSync(dbSchemaLocalFilePath)) { } else if (fs.existsSync(dbSchemaLocalFilePath)) {
schemaData = [JSON.parse(fs.readFileSync(dbSchemaLocalFilePath, "utf8"))]; schemaData = [
JSON.parse(fs.readFileSync(dbSchemaLocalFilePath, "utf8")),
];
} else { } else {
console.log("No source for DB Schema. Please provide a local `dsql.schema.json` file, or provide `DSQL_KEY` and `DSQL_REF_DB_NAME` environment variables."); console.log(
"No source for DB Schema. Please provide a local `dsql.schema.json` file, or provide `DSQL_KEY` and `DSQL_REF_DB_NAME` environment variables."
);
process.exit(); process.exit();
} }
@ -87,14 +110,22 @@ async function run() {
} }
if (DSQL_FULL_SYNC?.match(/true/i)) { if (DSQL_FULL_SYNC?.match(/true/i)) {
fs.writeFileSync(dbSchemaLocalFilePath, JSON.stringify(schemaData[0], null, 4), "utf8"); fs.writeFileSync(
dbSchemaLocalFilePath,
JSON.stringify(schemaData[0], null, 4),
"utf8"
);
} }
console.log(` - ${colors.FgBlue}Info:${colors.Reset} Now generating and mapping databases ...`); console.log(
` - ${colors.FgBlue}Info:${colors.Reset} Now generating and mapping databases ...`
);
// deepcode ignore reDOS: <please specify a reason of ignoring this> // deepcode ignore reDOS: <please specify a reason of ignoring this>
await createDbFromSchema(schemaData); await createDbFromSchema(schemaData);
console.log(` - ${colors.FgGreen}Success:${colors.Reset} Databases created Successfully!`); console.log(
` - ${colors.FgGreen}Success:${colors.Reset} Databases created Successfully!`
);
} }
// let timeout; // let timeout;
@ -103,12 +134,16 @@ let interval;
if (fs.existsSync(dbSchemaLocalFilePath) && !DSQL_KEY?.match(/....../)) { if (fs.existsSync(dbSchemaLocalFilePath) && !DSQL_KEY?.match(/....../)) {
fs.watchFile(dbSchemaLocalFilePath, { interval: 1000 }, (curr, prev) => { fs.watchFile(dbSchemaLocalFilePath, { interval: 1000 }, (curr, prev) => {
console.log(` - ${colors.FgBlue}Info:${colors.Reset} Syncing Databases Locally ...`); console.log(
` - ${colors.FgBlue}Info:${colors.Reset} Syncing Databases Locally ...`
);
run(); run();
}); });
} else if (DSQL_KEY?.match(/....../)) { } else if (DSQL_KEY?.match(/....../)) {
interval = setInterval(() => { interval = setInterval(() => {
console.log(` - ${colors.FgMagenta}Info:${colors.Reset} Syncing Databases from the cloud ...`); console.log(
` - ${colors.FgMagenta}Info:${colors.Reset} Syncing Databases from the cloud ...`
);
run(); run();
}, 20000); }, 20000);
} }

View File

@ -9,10 +9,26 @@ require("dotenv").config({
path: path.resolve(process.cwd(), ".env"), path: path.resolve(process.cwd(), ".env"),
}); });
const mysqlPath = process.platform?.match(/win/i) ? "'" + "C:\\Program Files\\MySQL\\MySQL Server 8.0\\bin\\mysql.exe" + "'" : "mysql"; const mysqlPath = process.platform?.match(/win/i)
const mysqlDumpPath = process.platform?.match(/win/i) ? "'" + "C:\\Program Files\\MySQL\\MySQL Server 8.0\\bin\\mysqldump.exe" + "'" : "mysqldump"; ? "'" + "C:\\Program Files\\MySQL\\MySQL Server 8.0\\bin\\mysql.exe" + "'"
: "mysql";
const mysqlDumpPath = process.platform?.match(/win/i)
? "'" +
"C:\\Program Files\\MySQL\\MySQL Server 8.0\\bin\\mysqldump.exe" +
"'"
: "mysqldump";
const { DSQL_HOST, DSQL_USER, DSQL_PASS, DSQL_DB_NAME, DSQL_KEY, DSQL_REF_DB_NAME, DSQL_FULL_SYNC, DSQL_ENCRYPTION_KEY, DSQL_ENCRYPTION_SALT } = process.env; const {
DSQL_HOST,
DSQL_USER,
DSQL_PASS,
DSQL_DB_NAME,
DSQL_KEY,
DSQL_REF_DB_NAME,
DSQL_FULL_SYNC,
DSQL_ENCRYPTION_KEY,
DSQL_ENCRYPTION_SALT,
} = process.env;
const dbName = DSQL_DB_NAME || ""; const dbName = DSQL_DB_NAME || "";
const dumpFilePathArg = process.argv.indexOf("--file"); const dumpFilePathArg = process.argv.indexOf("--file");
@ -39,9 +55,12 @@ try {
cwd: process.cwd(), cwd: process.cwd(),
}; };
if (process.platform.match(/win/i)) execSyncOptions.shell = "bash.exe"; // if (process.platform.match(/win/i)) execSyncOptions.shell = "bash.exe";
const dump = execSync(`${mysqlPath} -u ${DSQL_USER} -p${DSQL_PASS} ${dbName} < ${dumpFilePath}`, execSyncOptions); const dump = execSync(
`${mysqlPath} -u ${DSQL_USER} -p${DSQL_PASS} ${dbName} < ${dumpFilePath}`,
execSyncOptions
);
console.log("Dumped successfully", dump.toString()); console.log("Dumped successfully", dump.toString());

View File

@ -22,7 +22,7 @@ const updateApiSchemaFromLocalDb = require("../query/update-api-schema-from-loca
* ============================================================================== * ==============================================================================
* *
* @param {object} params - Single object passed * @param {object} params - Single object passed
* @param {import("../../types/database-schema.td").DSQL_DatabaseSchemaType} params.dbSchema - Database Schema Object * @param {import("@/package-shared/types/database-schema.td").DSQL_DatabaseSchemaType | undefined} params.dbSchema - Database Schema Object
* *
* @returns {Promise<*>} new user auth object payload * @returns {Promise<*>} new user auth object payload
*/ */
@ -33,7 +33,7 @@ module.exports = async function addUsersTableToDb({ dbSchema }) {
* @description Initialize * @description Initialize
*/ */
const database = process.env.DSQL_DB_NAME || ""; const database = process.env.DSQL_DB_NAME || "";
/** @type {import("../../types/database-schema.td").DSQL_TableSchemaType} */ /** @type {import("@/package-shared/types/database-schema.td").DSQL_TableSchemaType} */
const userPreset = require("./data/presets/users.json"); const userPreset = require("./data/presets/users.json");
try { try {
@ -42,15 +42,22 @@ module.exports = async function addUsersTableToDb({ dbSchema }) {
* *
* @description Fetch user from db * @description Fetch user from db
*/ */
const userSchemaMainFilePath = path.resolve(process.cwd(), "dsql.schema.json"); const userSchemaMainFilePath = path.resolve(
process.cwd(),
"dsql.schema.json"
);
let targetDatabase = dbSchema; let targetDatabase = dbSchema;
let existingTableIndex = targetDatabase.tables.findIndex((table, index) => { if (!targetDatabase) throw new Error("Target database not found!");
if (table.tableName === "users") {
existingTableIndex = index; let existingTableIndex = targetDatabase.tables.findIndex(
return true; (table, index) => {
if (table.tableName === "users") {
existingTableIndex = index;
return true;
}
} }
}); );
if (existingTableIndex >= 0) { if (existingTableIndex >= 0) {
targetDatabase.tables[existingTableIndex] = userPreset; targetDatabase.tables[existingTableIndex] = userPreset;
@ -58,7 +65,11 @@ module.exports = async function addUsersTableToDb({ dbSchema }) {
targetDatabase.tables.push(userPreset); targetDatabase.tables.push(userPreset);
} }
fs.writeFileSync(`${userSchemaMainFilePath}`, JSON.stringify(dbSchema, null, 4), "utf8"); fs.writeFileSync(
`${userSchemaMainFilePath}`,
JSON.stringify(dbSchema, null, 4),
"utf8"
);
//////////////////////////////////////// ////////////////////////////////////////

View File

@ -27,7 +27,7 @@ const updateTable = require("./utils/updateTable");
* runs the "dsql create" command. `NOTE`: there must be a "dsql.schema.json" file * runs the "dsql create" command. `NOTE`: there must be a "dsql.schema.json" file
* in the root of the project for this function to work * in the root of the project for this function to work
* *
* @param {import("../../types/database-schema.td").DSQL_DatabaseSchemaType[]} dbSchema - An array of database schema objects * @param {import("@/package-shared/types/database-schema.td").DSQL_DatabaseSchemaType | undefined} dbSchema - An array of database schema objects
*/ */
async function createDbFromSchema(dbSchema) { async function createDbFromSchema(dbSchema) {
try { try {
@ -42,8 +42,13 @@ async function createDbFromSchema(dbSchema) {
} }
for (let i = 0; i < dbSchema.length; i++) { for (let i = 0; i < dbSchema.length; i++) {
/** @type {import("../../types/database-schema.td").DSQL_DatabaseSchemaType} */ /** @type {import("@/package-shared/types/database-schema.td").DSQL_DatabaseSchemaType | undefined} */
const database = dbSchema[i]; const database = dbSchema[i];
if (!database) {
continue;
}
const { dbFullName, tables } = database; const { dbFullName, tables } = database;
//////////////////////////////////////// ////////////////////////////////////////
@ -51,12 +56,16 @@ async function createDbFromSchema(dbSchema) {
//////////////////////////////////////// ////////////////////////////////////////
/** @type {{ dbFullName: string }[] | null} */ /** @type {{ dbFullName: string }[] | null} */
const dbCheck = await noDatabaseDbHandler({ query: `SELECT SCHEMA_NAME AS dbFullName FROM INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME = '${dbFullName}'` }); const dbCheck = await noDatabaseDbHandler({
query: `SELECT SCHEMA_NAME AS dbFullName FROM INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME = '${dbFullName}'`,
});
if (dbCheck && dbCheck[0]?.dbFullName) { if (dbCheck && dbCheck[0]?.dbFullName) {
// Database Exists // Database Exists
} else { } else {
const newDatabase = await noDatabaseDbHandler({ query: `CREATE DATABASE IF NOT EXISTS \`${dbFullName}\` CHARACTER SET utf8mb4 COLLATE utf8mb4_bin` }); const newDatabase = await noDatabaseDbHandler({
query: `CREATE DATABASE IF NOT EXISTS \`${dbFullName}\` CHARACTER SET utf8mb4 COLLATE utf8mb4_bin`,
});
} }
//////////////////////////////////////// ////////////////////////////////////////
@ -68,7 +77,9 @@ async function createDbFromSchema(dbSchema) {
* @type {{ TABLE_NAME: string }[] | null} * @type {{ TABLE_NAME: string }[] | null}
* @description Select All tables in target database * @description Select All tables in target database
*/ */
const allTables = await noDatabaseDbHandler({ query: `SELECT TABLE_NAME FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_SCHEMA='${dbFullName}'` }); const allTables = await noDatabaseDbHandler({
query: `SELECT TABLE_NAME FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_SCHEMA='${dbFullName}'`,
});
let tableDropped; let tableDropped;
@ -85,8 +96,16 @@ async function createDbFromSchema(dbSchema) {
* in the user schema JSON. If it's not, the table is either deleted * in the user schema JSON. If it's not, the table is either deleted
* or the table name has been recently changed * or the table name has been recently changed
*/ */
if (!tables.filter((_table) => _table.tableName === TABLE_NAME)[0]) { if (
const oldTableFilteredArray = tables.filter((_table) => _table.tableNameOld && _table.tableNameOld === TABLE_NAME); !tables.filter(
(_table) => _table.tableName === TABLE_NAME
)[0]
) {
const oldTableFilteredArray = tables.filter(
(_table) =>
_table.tableNameOld &&
_table.tableNameOld === TABLE_NAME
);
/** /**
* @description Check if this table has been recently renamed. Rename * @description Check if this table has been recently renamed. Rename
@ -159,7 +178,11 @@ async function createDbFromSchema(dbSchema) {
}); });
if (table.childrenTables && table.childrenTables[0]) { if (table.childrenTables && table.childrenTables[0]) {
for (let ch = 0; ch < table.childrenTables.length; ch++) { for (
let ch = 0;
ch < table.childrenTables.length;
ch++
) {
const childTable = table.childrenTables[ch]; const childTable = table.childrenTables[ch];
const updateExistingChildTable = await updateTable({ const updateExistingChildTable = await updateTable({
@ -199,7 +222,12 @@ async function createDbFromSchema(dbSchema) {
*/ */
if (indexes && indexes[0]) { if (indexes && indexes[0]) {
for (let g = 0; g < indexes.length; g++) { for (let g = 0; g < indexes.length; g++) {
const { indexType, indexName, indexTableFields, alias } = indexes[g]; const {
indexType,
indexName,
indexTableFields,
alias,
} = indexes[g];
if (!alias?.match(/./)) continue; if (!alias?.match(/./)) continue;
@ -207,27 +235,42 @@ async function createDbFromSchema(dbSchema) {
* @type {any[] | null} * @type {any[] | null}
* @description All indexes from MYSQL db * @description All indexes from MYSQL db
*/ */
const allExistingIndexes = await varDatabaseDbHandler({ const allExistingIndexes =
queryString: `SHOW INDEXES FROM \`${tableName}\``, await varDatabaseDbHandler({
database: dbFullName, queryString: `SHOW INDEXES FROM \`${tableName}\``,
}); database: dbFullName,
});
/** /**
* @description Check for existing Index in MYSQL db * @description Check for existing Index in MYSQL db
*/ */
try { try {
const existingKeyInDb = allExistingIndexes ? allExistingIndexes.filter((indexObject) => indexObject.Key_name === alias) : null; const existingKeyInDb = allExistingIndexes
if (!existingKeyInDb?.[0]) throw new Error("This Index Does not Exist"); ? allExistingIndexes.filter(
(indexObject) =>
indexObject.Key_name === alias
)
: null;
if (!existingKeyInDb?.[0])
throw new Error(
"This Index Does not Exist"
);
} catch (error) { } catch (error) {
/** /**
* @description Create new index if determined that it * @description Create new index if determined that it
* doesn't exist in MYSQL db * doesn't exist in MYSQL db
*/ */
await varDatabaseDbHandler({ await varDatabaseDbHandler({
queryString: `CREATE${indexType.match(/fullText/i) ? " FULLTEXT" : ""} INDEX \`${alias}\` ON ${tableName}(${indexTableFields queryString: `CREATE${
indexType.match(/fullText/i)
? " FULLTEXT"
: ""
} INDEX \`${alias}\` ON ${tableName}(${indexTableFields
.map((nm) => nm.value) .map((nm) => nm.value)
.map((nm) => `\`${nm}\``) .map((nm) => `\`${nm}\``)
.join(",")}) COMMENT 'schema_index'`, .join(
","
)}) COMMENT 'schema_index'`,
database: dbFullName, database: dbFullName,
}); });
} }

View File

@ -10,7 +10,23 @@ const supplementTable = require("./supplementTable");
/** ****************************************************************************** */ /** ****************************************************************************** */
/** ****************************************************************************** */ /** ****************************************************************************** */
module.exports = async function createTable({ dbFullName, tableName, tableInfoArray, varDatabaseDbHandler, dbSchema }) { /**
*
* @param {object} param0
* @param {string} param0.dbFullName
* @param {string} param0.tableName
* @param {any[]} param0.tableInfoArray
* @param {(params: import("./varDatabaseDbHandler").VarDbHandlerParam)=>any} param0.varDatabaseDbHandler
* @param {import("@/package-shared/types/database-schema.td").DSQL_DatabaseSchemaType} [param0.dbSchema]
* @returns
*/
module.exports = async function createTable({
dbFullName,
tableName,
tableInfoArray,
varDatabaseDbHandler,
dbSchema,
}) {
/** /**
* Format tableInfoArray * Format tableInfoArray
* *
@ -36,7 +52,7 @@ module.exports = async function createTable({ dbFullName, tableName, tableInfoAr
for (let i = 0; i < finalTable.length; i++) { for (let i = 0; i < finalTable.length; i++) {
const column = finalTable[i]; const column = finalTable[i];
const { fieldName, dataType, nullValue, primaryKey, autoIncrement, defaultValue, defaultValueLiteral, foreignKey, updatedField } = column; const { fieldName, foreignKey } = column;
if (foreignKey) { if (foreignKey) {
foreignKeys.push({ foreignKeys.push({
@ -45,7 +61,10 @@ module.exports = async function createTable({ dbFullName, tableName, tableInfoAr
}); });
} }
let { fieldEntryText, newPrimaryKeySet } = generateColumnDescription({ columnData: column, primaryKeySet: primaryKeySet }); let { fieldEntryText, newPrimaryKeySet } = generateColumnDescription({
columnData: column,
primaryKeySet: primaryKeySet,
});
primaryKeySet = newPrimaryKeySet; primaryKeySet = newPrimaryKeySet;
@ -74,20 +93,33 @@ module.exports = async function createTable({ dbFullName, tableName, tableInfoAr
if (foreignKeys[0]) { if (foreignKeys[0]) {
foreignKeys.forEach((foreighKey, index, array) => { foreignKeys.forEach((foreighKey, index, array) => {
const { fieldName, destinationTableName, destinationTableColumnName, cascadeDelete, cascadeUpdate, foreignKeyName } = foreighKey; const {
fieldName,
destinationTableName,
destinationTableColumnName,
cascadeDelete,
cascadeUpdate,
foreignKeyName,
} = foreighKey;
const comma = (() => { const comma = (() => {
if (index === foreignKeys.length - 1) return ""; if (index === foreignKeys.length - 1) return "";
return ","; return ",";
})(); })();
createTableQueryArray.push(` CONSTRAINT \`${foreignKeyName}\` FOREIGN KEY (\`${fieldName}\`) REFERENCES \`${destinationTableName}\`(${destinationTableColumnName})${cascadeDelete ? " ON DELETE CASCADE" : ""}${cascadeUpdate ? " ON UPDATE CASCADE" : ""}${comma}`); createTableQueryArray.push(
` CONSTRAINT \`${foreignKeyName}\` FOREIGN KEY (\`${fieldName}\`) REFERENCES \`${destinationTableName}\`(${destinationTableColumnName})${
cascadeDelete ? " ON DELETE CASCADE" : ""
}${cascadeUpdate ? " ON UPDATE CASCADE" : ""}${comma}`
);
}); });
} }
//////////////////////////////////////// ////////////////////////////////////////
createTableQueryArray.push(`) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin;`); createTableQueryArray.push(
`) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin;`
);
const createTableQuery = createTableQueryArray.join("\n"); const createTableQuery = createTableQueryArray.join("\n");

View File

@ -65,7 +65,7 @@ const connection = mysql.createConnection({
* @param {object} params - Single Param object containing params * @param {object} params - Single Param object containing params
* @param {string} params.query - Query String * @param {string} params.query - Query String
* @param {(string | number)[]} [params.values] - Values * @param {(string | number)[]} [params.values] - Values
* @param {import("../../../types/database-schema.td").DSQL_DatabaseSchemaType} [params.dbSchema] - Database Schema * @param {import("../../../package-shared/types/database-schema.td").DSQL_DatabaseSchemaType} [params.dbSchema] - Database Schema
* @param {string} [params.database] - Target Database * @param {string} [params.database] - Target Database
* @param {string} [params.tableName] - Target Table Name * @param {string} [params.tableName] - Target Table Name
* *

View File

@ -11,18 +11,30 @@
* Generate SQL text for Field * Generate SQL text for Field
* ============================================================================== * ==============================================================================
* @param {object} params - Single object params * @param {object} params - Single object params
* @param {import("../../../types/database-schema.td").DSQL_FieldSchemaType} params.columnData - Field object * @param {import("@/package-shared/types/database-schema.td").DSQL_FieldSchemaType} params.columnData - Field object
* @param {boolean} [params.primaryKeySet] - Table Name(slug) * @param {boolean} [params.primaryKeySet] - Table Name(slug)
* *
* @returns {{fieldEntryText: string, newPrimaryKeySet: boolean}} * @returns {{fieldEntryText: string, newPrimaryKeySet: boolean}}
*/ */
module.exports = function generateColumnDescription({ columnData, primaryKeySet }) { module.exports = function generateColumnDescription({
columnData,
primaryKeySet,
}) {
/** /**
* Format tableInfoArray * Format tableInfoArray
* *
* @description Format tableInfoArray * @description Format tableInfoArray
*/ */
const { fieldName, dataType, nullValue, primaryKey, autoIncrement, defaultValue, defaultValueLiteral, notNullValue } = columnData; const {
fieldName,
dataType,
nullValue,
primaryKey,
autoIncrement,
defaultValue,
defaultValueLiteral,
notNullValue,
} = columnData;
let fieldEntryText = ""; let fieldEntryText = "";

View File

@ -27,7 +27,7 @@ const connection = mysql.createConnection({
* @param {string} params.query - Query String * @param {string} params.query - Query String
* @param {string[]} [params.values] - Values * @param {string[]} [params.values] - Values
* *
* @returns {Promise<object[] | null>} * @returns {Promise<any[] | null>}
*/ */
module.exports = async function noDatabaseDbHandler({ query, values }) { module.exports = async function noDatabaseDbHandler({ query, values }) {
/** /**

View File

@ -14,10 +14,13 @@ const defaultFieldsRegexp = require("./defaultFieldsRegexp");
* @param {object} params - Single object params * @param {object} params - Single object params
* @param {*[]} params.unparsedResults - Array of data objects containing Fields(keys) * @param {*[]} params.unparsedResults - Array of data objects containing Fields(keys)
* and corresponding values of the fields(values) * and corresponding values of the fields(values)
* @param {import("../../../types/database-schema.td").DSQL_TableSchemaType} [params.tableSchema] - Table schema * @param {import("../../../package-shared/types/database-schema.td").DSQL_TableSchemaType} [params.tableSchema] - Table schema
* @returns {Promise<object[]|null>} * @returns {Promise<object[]|null>}
*/ */
module.exports = async function parseDbResults({ unparsedResults, tableSchema }) { module.exports = async function parseDbResults({
unparsedResults,
tableSchema,
}) {
/** /**
* Declare variables * Declare variables
* *
@ -56,7 +59,11 @@ module.exports = async function parseDbResults({ unparsedResults, tableSchema })
} }
if (resultFieldSchema?.encrypted && value?.match(/./)) { if (resultFieldSchema?.encrypted && value?.match(/./)) {
result[resultFieldName] = decrypt({ encryptedString: value, encryptionKey, encryptionSalt }); result[resultFieldName] = decrypt({
encryptedString: value,
encryptionKey,
encryptionSalt,
});
} }
} }

View File

@ -1,10 +1,17 @@
// @ts-check // @ts-check
/**
*
* @param {string} text
* @returns
*/
module.exports = function slugToCamelTitle(text) { module.exports = function slugToCamelTitle(text) {
if (text) { if (text) {
let addArray = text.split("-").filter((item) => item !== ""); let addArray = text.split("-").filter((item) => item !== "");
let camelArray = addArray.map((item) => { let camelArray = addArray.map((item) => {
return item.substr(0, 1).toUpperCase() + item.substr(1).toLowerCase(); return (
item.substr(0, 1).toUpperCase() + item.substr(1).toLowerCase()
);
}); });
let parsedAddress = camelArray.join(" "); let parsedAddress = camelArray.join(" ");

View File

@ -7,6 +7,12 @@
/** ****************************************************************************** */ /** ****************************************************************************** */
/** ****************************************************************************** */ /** ****************************************************************************** */
/**
*
* @param {object} param0
* @param {import("@/package-shared/types/database-schema.td").DSQL_FieldSchemaType[]} param0.tableInfoArray
* @returns
*/
module.exports = function supplementTable({ tableInfoArray }) { module.exports = function supplementTable({ tableInfoArray }) {
/** /**
* Format tableInfoArray * Format tableInfoArray
@ -18,12 +24,16 @@ module.exports = function supplementTable({ tableInfoArray }) {
//////////////////////////////////////// ////////////////////////////////////////
let primaryKeyExists = finalTableArray.filter((_field) => _field.primaryKey); let primaryKeyExists = finalTableArray.filter(
(_field) => _field.primaryKey
);
//////////////////////////////////////// ////////////////////////////////////////
defaultFields.forEach((field) => { defaultFields.forEach((field) => {
let fieldExists = finalTableArray.filter((_field) => _field.fieldName === field.fieldName); let fieldExists = finalTableArray.filter(
(_field) => _field.fieldName === field.fieldName
);
if (fieldExists && fieldExists[0]) { if (fieldExists && fieldExists[0]) {
return; return;

View File

@ -10,7 +10,8 @@
const fs = require("fs"); const fs = require("fs");
const path = require("path"); const path = require("path");
const defaultFieldsRegexp = /^id$|^date_created$|^date_created_code$|^date_created_timestamp$|^date_updated$|^date_updated_code$|^date_updated_timestamp$/; const defaultFieldsRegexp =
/^id$|^date_created$|^date_created_code$|^date_created_timestamp$|^date_updated$|^date_updated_code$|^date_updated_timestamp$/;
const generateColumnDescription = require("./generateColumnDescription"); const generateColumnDescription = require("./generateColumnDescription");
const varDatabaseDbHandler = require("./varDatabaseDbHandler"); const varDatabaseDbHandler = require("./varDatabaseDbHandler");
@ -30,15 +31,23 @@ const schemaPath = path.resolve(process.cwd(), "dsql.schema.json");
* @param {object} params - Single object params * @param {object} params - Single object params
* @param {string} params.dbFullName - Database full name => "datasquirel_user_4394_db_name" * @param {string} params.dbFullName - Database full name => "datasquirel_user_4394_db_name"
* @param {string} params.tableName - Table Name(slug) * @param {string} params.tableName - Table Name(slug)
* @param {import("../../../types/database-schema.td").DSQL_FieldSchemaType[]} params.tableInfoArray - Table Info Array * @param {import("@/package-shared/types/database-schema.td").DSQL_FieldSchemaType[]} params.tableInfoArray - Table Info Array
* @param {import("../../../types/database-schema.td").DSQL_DatabaseSchemaType[]} params.dbSchema - Single post * @param {import("@/package-shared/types/database-schema.td").DSQL_DatabaseSchemaType[]} params.dbSchema - Single post
* @param {import("../../../types/database-schema.td").DSQL_IndexSchemaType[]} [params.tableIndexes] - Table Indexes * @param {import("@/package-shared/types/database-schema.td").DSQL_IndexSchemaType[]} [params.tableIndexes] - Table Indexes
* @param {boolean} [params.clone] - Is this a newly cloned table? * @param {boolean} [params.clone] - Is this a newly cloned table?
* @param {number} [params.tableIndex] - The number index of the table in the dbSchema array * @param {number} [params.tableIndex] - The number index of the table in the dbSchema array
* *
* @returns {Promise<string|object[]|null>} * @returns {Promise<string|object[]|null>}
*/ */
module.exports = async function updateTable({ dbFullName, tableName, tableInfoArray, dbSchema, tableIndexes, clone, tableIndex }) { module.exports = async function updateTable({
dbFullName,
tableName,
tableInfoArray,
dbSchema,
tableIndexes,
clone,
tableIndex,
}) {
/** /**
* Initialize * Initialize
* ========================================== * ==========================================
@ -79,7 +88,7 @@ module.exports = async function updateTable({ dbFullName, tableName, tableInfoAr
//////////////////////////////////////// ////////////////////////////////////////
/** /**
* @type {*} * @type {DSQL_MYSQL_SHOW_INDEXES_Type[]}
* @description All indexes from MYSQL db * @description All indexes from MYSQL db
*/ */
const allExistingIndexes = await varDatabaseDbHandler({ const allExistingIndexes = await varDatabaseDbHandler({
@ -88,7 +97,7 @@ module.exports = async function updateTable({ dbFullName, tableName, tableInfoAr
}); });
/** /**
* @type {*} * @type {DSQL_MYSQL_SHOW_COLUMNS_Type[]}
* @description All columns from MYSQL db * @description All columns from MYSQL db
*/ */
const allExistingColumns = await varDatabaseDbHandler({ const allExistingColumns = await varDatabaseDbHandler({
@ -121,7 +130,11 @@ module.exports = async function updateTable({ dbFullName, tableName, tableInfoAr
* @description This finds out whether the fieldName corresponds with the MSQL Field name * @description This finds out whether the fieldName corresponds with the MSQL Field name
* if the fildName doesn't match any MYSQL Field name, the field is deleted. * if the fildName doesn't match any MYSQL Field name, the field is deleted.
*/ */
let existingEntry = upToDateTableFieldsArray.filter((column) => column.fieldName === Field || column.originName === Field); let existingEntry = upToDateTableFieldsArray.filter(
(column) =>
column.fieldName === Field ||
column.originName === Field
);
if (existingEntry && existingEntry[0]) { if (existingEntry && existingEntry[0]) {
/** /**
@ -135,7 +148,9 @@ module.exports = async function updateTable({ dbFullName, tableName, tableInfoAr
database: dbFullName, database: dbFullName,
}); });
console.log(`Column Renamed from "${existingEntry[0].originName}" to "${existingEntry[0].fieldName}"`); console.log(
`Column Renamed from "${existingEntry[0].originName}" to "${existingEntry[0].fieldName}"`
);
/** /**
* Update Db Schema * Update Db Schema
@ -145,21 +160,47 @@ module.exports = async function updateTable({ dbFullName, tableName, tableInfoAr
try { try {
const userSchemaData = dbSchema; const userSchemaData = dbSchema;
const targetDbIndex = userSchemaData.findIndex((db) => db.dbFullName === dbFullName); const targetDbIndex = userSchemaData.findIndex(
const targetTableIndex = userSchemaData[targetDbIndex].tables.findIndex((table) => table.tableName === tableName); (db) => db.dbFullName === dbFullName
const targetFieldIndex = userSchemaData[targetDbIndex].tables[targetTableIndex].fields.findIndex((field) => field.fieldName === existingEntry[0].fieldName); );
const targetTableIndex = userSchemaData[
targetDbIndex
].tables.findIndex(
(table) => table.tableName === tableName
);
const targetFieldIndex = userSchemaData[
targetDbIndex
].tables[targetTableIndex].fields.findIndex(
(field) =>
field.fieldName ===
existingEntry[0].fieldName
);
delete userSchemaData[targetDbIndex].tables[targetTableIndex].fields[targetFieldIndex]["originName"]; delete userSchemaData[targetDbIndex].tables[
delete userSchemaData[targetDbIndex].tables[targetTableIndex].fields[targetFieldIndex]["updatedField"]; targetTableIndex
].fields[targetFieldIndex]["originName"];
delete userSchemaData[targetDbIndex].tables[
targetTableIndex
].fields[targetFieldIndex]["updatedField"];
/** /**
* @description Set New Table Fields Array * @description Set New Table Fields Array
*/ */
upToDateTableFieldsArray = userSchemaData[targetDbIndex].tables[targetTableIndex].fields; upToDateTableFieldsArray =
userSchemaData[targetDbIndex].tables[
targetTableIndex
].fields;
fs.writeFileSync(schemaPath, JSON.stringify(userSchemaData), "utf8"); fs.writeFileSync(
schemaPath,
JSON.stringify(userSchemaData),
"utf8"
);
} catch (/** @type {*} */ error) { } catch (/** @type {*} */ error) {
console.log("Error in updating Table =>", error.message); console.log(
"Error in updating Table =>",
error.message
);
} }
//////////////////////////////////////// ////////////////////////////////////////
@ -199,8 +240,16 @@ module.exports = async function updateTable({ dbFullName, tableName, tableInfoAr
*/ */
if (Index_comment?.match(/schema_index/)) { if (Index_comment?.match(/schema_index/)) {
try { try {
const existingKeyInSchema = tableIndexes ? tableIndexes.filter((indexObject) => indexObject.alias === Key_name) : null; const existingKeyInSchema = tableIndexes
if (!existingKeyInSchema?.[0]) throw new Error(`This Index(${Key_name}) Has been Deleted!`); ? tableIndexes.filter(
(indexObject) =>
indexObject.alias === Key_name
)
: null;
if (!existingKeyInSchema?.[0])
throw new Error(
`This Index(${Key_name}) Has been Deleted!`
);
} catch (error) { } catch (error) {
/** /**
* @description Drop Index: This happens when the MYSQL index is not * @description Drop Index: This happens when the MYSQL index is not
@ -222,7 +271,8 @@ module.exports = async function updateTable({ dbFullName, tableName, tableInfoAr
*/ */
if (tableIndexes && tableIndexes[0]) { if (tableIndexes && tableIndexes[0]) {
for (let g = 0; g < tableIndexes.length; g++) { for (let g = 0; g < tableIndexes.length; g++) {
const { indexType, indexName, indexTableFields, alias } = tableIndexes[g]; const { indexType, indexName, indexTableFields, alias } =
tableIndexes[g];
if (!alias?.match(/./)) continue; if (!alias?.match(/./)) continue;
@ -230,15 +280,21 @@ module.exports = async function updateTable({ dbFullName, tableName, tableInfoAr
* @description Check for existing Index in MYSQL db * @description Check for existing Index in MYSQL db
*/ */
try { try {
const existingKeyInDb = allExistingIndexes?.filter((indexObject) => indexObject.Key_name === alias); const existingKeyInDb = allExistingIndexes?.filter(
if (!existingKeyInDb?.[0]) throw new Error("This Index Does not Exist"); (/** @type {any} */ indexObject) =>
indexObject.Key_name === alias
);
if (!existingKeyInDb?.[0])
throw new Error("This Index Does not Exist");
} catch (error) { } catch (error) {
/** /**
* @description Create new index if determined that it * @description Create new index if determined that it
* doesn't exist in MYSQL db * doesn't exist in MYSQL db
*/ */
await varDatabaseDbHandler({ await varDatabaseDbHandler({
queryString: `CREATE${indexType.match(/fullText/i) ? " FULLTEXT" : ""} INDEX \`${alias}\` ON ${tableName}(${indexTableFields queryString: `CREATE${
indexType.match(/fullText/i) ? " FULLTEXT" : ""
} INDEX \`${alias}\` ON ${tableName}(${indexTableFields
.map((nm) => nm.value) .map((nm) => nm.value)
.map((nm) => `\`${nm}\``) .map((nm) => `\`${nm}\``)
.join(",")}) COMMENT 'schema_index'`, .join(",")}) COMMENT 'schema_index'`,
@ -302,7 +358,17 @@ module.exports = async function updateTable({ dbFullName, tableName, tableInfoAr
const prevColumn = upToDateTableFieldsArray[i - 1]; const prevColumn = upToDateTableFieldsArray[i - 1];
const nextColumn = upToDateTableFieldsArray[i + 1]; const nextColumn = upToDateTableFieldsArray[i + 1];
const { fieldName, dataType, nullValue, primaryKey, autoIncrement, defaultValue, defaultValueLiteral, foreignKey, updatedField } = column; const {
fieldName,
dataType,
nullValue,
primaryKey,
autoIncrement,
defaultValue,
defaultValueLiteral,
foreignKey,
updatedField,
} = column;
//////////////////////////////////////// ////////////////////////////////////////
@ -339,22 +405,37 @@ module.exports = async function updateTable({ dbFullName, tableName, tableInfoAr
/** /**
* @description Construct SQL text snippet for this field * @description Construct SQL text snippet for this field
*/ */
let { fieldEntryText } = generateColumnDescription({ columnData: column }); let { fieldEntryText } = generateColumnDescription({
columnData: column,
});
/** /**
* @description Modify Column(Field) if it already exists * @description Modify Column(Field) if it already exists
* in MYSQL database * in MYSQL database
*/ */
if (existingColumn && existingColumn[0]?.Field) { if (existingColumn && existingColumn[0]?.Field) {
const { Field, Type, Null, Key, Default, Extra } = existingColumn[0]; const { Field, Type, Null, Key, Default, Extra } =
existingColumn[0];
let isColumnReordered = existingColumnIndex ? i < existingColumnIndex : false; let isColumnReordered = existingColumnIndex
? i < existingColumnIndex
: false;
if (Field === fieldName && !isColumnReordered && dataType.toUpperCase() === Type.toUpperCase()) { if (
Field === fieldName &&
!isColumnReordered &&
dataType.toUpperCase() === Type.toUpperCase()
) {
updateText += `MODIFY COLUMN ${fieldEntryText}`; updateText += `MODIFY COLUMN ${fieldEntryText}`;
// continue; // continue;
} else { } else {
updateText += `MODIFY COLUMN ${fieldEntryText}${isColumnReordered ? (prevColumn?.fieldName ? " AFTER `" + prevColumn.fieldName + "`" : " AFTER `id`") : ""}`; updateText += `MODIFY COLUMN ${fieldEntryText}${
isColumnReordered
? prevColumn?.fieldName
? " AFTER `" + prevColumn.fieldName + "`"
: " AFTER `id`"
: ""
}`;
// if (userId) { // if (userId) {
// } else { // } else {
// updateText += `MODIFY COLUMN ${fieldEntryText}`; // updateText += `MODIFY COLUMN ${fieldEntryText}`;
@ -396,9 +477,17 @@ module.exports = async function updateTable({ dbFullName, tableName, tableInfoAr
* "clone" boolean = true * "clone" boolean = true
*/ */
if (!clone && foreignKey) { if (!clone && foreignKey) {
const { destinationTableName, destinationTableColumnName, cascadeDelete, cascadeUpdate, foreignKeyName } = foreignKey; const {
destinationTableName,
destinationTableColumnName,
cascadeDelete,
cascadeUpdate,
foreignKeyName,
} = foreignKey;
const foreinKeyText = `ADD CONSTRAINT \`${foreignKeyName}\` FOREIGN KEY (${fieldName}) REFERENCES ${destinationTableName}(${destinationTableColumnName})${cascadeDelete ? " ON DELETE CASCADE" : ""}${cascadeUpdate ? " ON UPDATE CASCADE" : ""}`; const foreinKeyText = `ADD CONSTRAINT \`${foreignKeyName}\` FOREIGN KEY (${fieldName}) REFERENCES ${destinationTableName}(${destinationTableColumnName})${
cascadeDelete ? " ON DELETE CASCADE" : ""
}${cascadeUpdate ? " ON UPDATE CASCADE" : ""}`;
// const foreinKeyText = `ADD CONSTRAINT \`${foreignKeyName}\` FOREIGN KEY (${fieldName}) REFERENCES ${destinationTableName}(${destinationTableColumnName})${cascadeDelete ? " ON DELETE CASCADE" : ""}${cascadeUpdate ? " ON UPDATE CASCADE" : ""}` + ","; // const foreinKeyText = `ADD CONSTRAINT \`${foreignKeyName}\` FOREIGN KEY (${fieldName}) REFERENCES ${destinationTableName}(${destinationTableColumnName})${cascadeDelete ? " ON DELETE CASCADE" : ""}${cascadeUpdate ? " ON UPDATE CASCADE" : ""}` + ",";
const finalQueryString = `ALTER TABLE \`${tableName}\` ${foreinKeyText}`; const finalQueryString = `ALTER TABLE \`${tableName}\` ${foreinKeyText}`;
@ -416,7 +505,9 @@ module.exports = async function updateTable({ dbFullName, tableName, tableInfoAr
* @description Construct final SQL query by combning all SQL snippets in * @description Construct final SQL query by combning all SQL snippets in
* updateTableQueryArray Arry, and trimming the final comma(,) * updateTableQueryArray Arry, and trimming the final comma(,)
*/ */
const updateTableQuery = updateTableQueryArray.join(" ").replace(/,$/, ""); const updateTableQuery = updateTableQueryArray
.join(" ")
.replace(/,$/, "");
//////////////////////////////////////// ////////////////////////////////////////

View File

@ -19,18 +19,27 @@ const fs = require("fs");
const parseDbResults = require("./parseDbResults"); const parseDbResults = require("./parseDbResults");
const dbHandler = require("./dbHandler"); const dbHandler = require("./dbHandler");
/**
* @typedef {object} VarDbHandlerParam
* @property {string} queryString - SQL string
* @property {string[]} [queryValuesArray] - Values Array
* @property {string} database - Database name
* @property {import("@/package-shared/types/database-schema.td").DSQL_TableSchemaType} [tableSchema] - Table schema
*/
/** /**
* DB handler for specific database * DB handler for specific database
* ============================================================================== * ==============================================================================
* @async * @async
* @param {object} params - Single object params * @param {VarDbHandlerParam} params
* @param {string} params.queryString - SQL string * @returns {Promise<any>}
* @param {string[]} [params.queryValuesArray] - Values Array
* @param {string} params.database - Database name
* @param {import("../../../types/database-schema.td").DSQL_TableSchemaType} [params.tableSchema] - Table schema
* @returns {Promise<*>}
*/ */
module.exports = async function varDatabaseDbHandler({ queryString, queryValuesArray, database, tableSchema }) { module.exports = async function varDatabaseDbHandler({
queryString,
queryValuesArray,
database,
tableSchema,
}) {
/** /**
* Create Connection * Create Connection
* *
@ -54,17 +63,32 @@ module.exports = async function varDatabaseDbHandler({ queryString, queryValuesA
* @description Fetch data from db if no cache * @description Fetch data from db if no cache
*/ */
try { try {
if (queryString && Array.isArray(queryValuesArray) && queryValuesArray[0]) { if (
results = await dbHandler({ query: queryString, values: queryValuesArray, database: database }); queryString &&
Array.isArray(queryValuesArray) &&
queryValuesArray[0]
) {
results = await dbHandler({
query: queryString,
values: queryValuesArray,
database: database,
});
} else if (queryString && !Array.isArray(queryValuesArray)) { } else if (queryString && !Array.isArray(queryValuesArray)) {
results = await dbHandler({ query: queryString, database: database }); results = await dbHandler({
query: queryString,
database: database,
});
} }
//////////////////////////////////////// ////////////////////////////////////////
//////////////////////////////////////// ////////////////////////////////////////
//////////////////////////////////////// ////////////////////////////////////////
} catch (error) { } catch (error) {
console.log("\x1b[31mvarDatabaseDbHandler ERROR\x1b[0m =>", database, error); console.log(
"\x1b[31mvarDatabaseDbHandler ERROR\x1b[0m =>",
database,
error
);
} }
/** /**
@ -76,10 +100,17 @@ module.exports = async function varDatabaseDbHandler({ queryString, queryValuesA
try { try {
const unparsedResults = results; const unparsedResults = results;
// deepcode ignore reDOS: <please specify a reason of ignoring this> // deepcode ignore reDOS: <please specify a reason of ignoring this>
const parsedResults = await parseDbResults({ unparsedResults: unparsedResults, tableSchema: tableSchema }); const parsedResults = await parseDbResults({
unparsedResults: unparsedResults,
tableSchema: tableSchema,
});
return parsedResults; return parsedResults;
} catch (error) { } catch (error) {
console.log("\x1b[31mvarDatabaseDbHandler ERROR\x1b[0m =>", database, error); console.log(
"\x1b[31mvarDatabaseDbHandler ERROR\x1b[0m =>",
database,
error
);
return null; return null;
} }

View File

@ -29,7 +29,7 @@ const runQuery = require("./utils/runQuery");
* *
* @param {Object} params - Single object passed * @param {Object} params - Single object passed
* @param {LocalQueryObject} params.options - SQL Query * @param {LocalQueryObject} params.options - SQL Query
* @param {import("../../types/database-schema.td").DSQL_DatabaseSchemaType} [params.dbSchema] - Name of the table to query * @param {import("@/package-shared/types/database-schema.td").DSQL_DatabaseSchemaType | undefined} [params.dbSchema] - Name of the table to query
* *
* @returns { Promise<LocalGetReturn> } - Return Object * @returns { Promise<LocalGetReturn> } - Return Object
*/ */
@ -47,7 +47,13 @@ async function localGet({ options, dbSchema }) {
* *
* @description Input Validation * @description Input Validation
*/ */
if (typeof query == "string" && (query.match(/^alter|^delete|information_schema|databases|^create/i) || !query.match(/^select/i))) { if (
typeof query == "string" &&
(query.match(
/^alter|^delete|information_schema|databases|^create/i
) ||
!query.match(/^select/i))
) {
return { success: false, msg: "Wrong Input" }; return { success: false, msg: "Wrong Input" };
} }
@ -68,7 +74,8 @@ async function localGet({ options, dbSchema }) {
}); });
if (error) throw error; if (error) throw error;
if (!result) throw new Error("No Result received for query => " + query); if (!result)
throw new Error("No Result received for query => " + query);
if (result?.error) throw new Error(result.error); if (result?.error) throw new Error(result.error);
results = result; results = result;

View File

@ -24,7 +24,7 @@ const runQuery = require("./utils/runQuery");
* *
* @param {Object} params - Single object passed * @param {Object} params - Single object passed
* @param {LocalPostQueryObject} params.options - SQL Query * @param {LocalPostQueryObject} params.options - SQL Query
* @param {import("../../types/database-schema.td").DSQL_DatabaseSchemaType} [params.dbSchema] - Name of the table to query * @param {import("@/package-shared/types/database-schema.td").DSQL_DatabaseSchemaType | undefined} [params.dbSchema] - Name of the table to query
* *
* @returns { Promise<LocalPostReturn> } - Return Object * @returns { Promise<LocalPostReturn> } - Return Object
*/ */
@ -41,11 +41,17 @@ async function localPost({ options, dbSchema }) {
* *
* @description Input Validation * @description Input Validation
*/ */
if (typeof query === "string" && query?.match(/^create |^alter |^drop /i)) { if (
typeof query === "string" &&
query?.match(/^create |^alter |^drop /i)
) {
return { success: false, msg: "Wrong Input" }; return { success: false, msg: "Wrong Input" };
} }
if (typeof query === "object" && query?.action?.match(/^create |^alter |^drop /i)) { if (
typeof query === "object" &&
query?.action?.match(/^create |^alter |^drop /i)
) {
return { success: false, msg: "Wrong Input" }; return { success: false, msg: "Wrong Input" };
} }

View File

@ -19,7 +19,7 @@ const updateDbEntry = require("./updateDbEntry");
* @param {string} params.dbFullName - Database full name * @param {string} params.dbFullName - Database full name
* @param {string} params.tableName - Table name * @param {string} params.tableName - Table name
* @param {*} params.data - Data to add * @param {*} params.data - Data to add
* @param {import("../../../types/database-schema.td").DSQL_TableSchemaType} [params.tableSchema] - Table schema * @param {import("@/package-shared/types/database-schema.td").DSQL_TableSchemaType} [params.tableSchema] - Table schema
* @param {string} [params.duplicateColumnName] - Duplicate column name * @param {string} [params.duplicateColumnName] - Duplicate column name
* @param {string} [params.duplicateColumnValue] - Duplicate column value * @param {string} [params.duplicateColumnValue] - Duplicate column value
* @param {boolean} [params.update] - Update this row if it exists * @param {boolean} [params.update] - Update this row if it exists
@ -28,7 +28,17 @@ const updateDbEntry = require("./updateDbEntry");
* *
* @returns {Promise<*>} * @returns {Promise<*>}
*/ */
async function addDbEntry({ dbFullName, tableName, data, tableSchema, duplicateColumnName, duplicateColumnValue, update, encryptionKey, encryptionSalt }) { async function addDbEntry({
dbFullName,
tableName,
data,
tableSchema,
duplicateColumnName,
duplicateColumnValue,
update,
encryptionKey,
encryptionSalt,
}) {
/** /**
* Initialize variables * Initialize variables
*/ */
@ -79,8 +89,15 @@ async function addDbEntry({ dbFullName, tableName, data, tableSchema, duplicateC
const dataKey = dataKeys[i]; const dataKey = dataKeys[i];
let value = data[dataKey]; let value = data[dataKey];
const targetFieldSchemaArray = tableSchema ? tableSchema?.fields?.filter((field) => field.fieldName == dataKey) : null; const targetFieldSchemaArray = tableSchema
const targetFieldSchema = targetFieldSchemaArray && targetFieldSchemaArray[0] ? targetFieldSchemaArray[0] : null; ? tableSchema?.fields?.filter(
(field) => field.fieldName == dataKey
)
: null;
const targetFieldSchema =
targetFieldSchemaArray && targetFieldSchemaArray[0]
? targetFieldSchemaArray[0]
: null;
if (!value) continue; if (!value) continue;
@ -90,7 +107,10 @@ async function addDbEntry({ dbFullName, tableName, data, tableSchema, duplicateC
} }
if (targetFieldSchema?.pattern) { if (targetFieldSchema?.pattern) {
const pattern = new RegExp(targetFieldSchema.pattern, targetFieldSchema.patternFlags || ""); const pattern = new RegExp(
targetFieldSchema.pattern,
targetFieldSchema.patternFlags || ""
);
if (!value?.toString()?.match(pattern)) { if (!value?.toString()?.match(pattern)) {
console.log("DSQL: Pattern not matched =>", value); console.log("DSQL: Pattern not matched =>", value);
value = ""; value = "";
@ -136,7 +156,9 @@ async function addDbEntry({ dbFullName, tableName, data, tableSchema, duplicateC
//////////////////////////////////////// ////////////////////////////////////////
const query = `INSERT INTO \`${tableName}\` (${insertKeysArray.join(",")}) VALUES (${insertValuesArray.map(() => "?").join(",")})`; const query = `INSERT INTO \`${tableName}\` (${insertKeysArray.join(
","
)}) VALUES (${insertValuesArray.map(() => "?").join(",")})`;
const queryValuesArray = insertValuesArray; const queryValuesArray = insertValuesArray;
const newInsert = await dbHandler({ const newInsert = await dbHandler({

View File

@ -19,13 +19,20 @@ const dbHandler = require("../../engine/utils/dbHandler");
* "Read only" or "Full Access"? Defaults to "Read Only" * "Read only" or "Full Access"? Defaults to "Read Only"
* @param {string} params.dbFullName - Database full name * @param {string} params.dbFullName - Database full name
* @param {string} params.tableName - Table name * @param {string} params.tableName - Table name
* @param {import("../../../types/database-schema.td").DSQL_TableSchemaType} [params.tableSchema] - Table schema * @param {import("@/package-shared/types/database-schema.td").DSQL_TableSchemaType} [params.tableSchema] - Table schema
* @param {string} params.identifierColumnName - Update row identifier column name * @param {string} params.identifierColumnName - Update row identifier column name
* @param {string|number} params.identifierValue - Update row identifier column value * @param {string|number} params.identifierValue - Update row identifier column value
* *
* @returns {Promise<object|null>} * @returns {Promise<object|null>}
*/ */
async function deleteDbEntry({ dbContext, paradigm, dbFullName, tableName, identifierColumnName, identifierValue }) { async function deleteDbEntry({
dbContext,
paradigm,
dbFullName,
tableName,
identifierColumnName,
identifierValue,
}) {
try { try {
/** /**
* Check if data is valid * Check if data is valid

View File

@ -31,13 +31,20 @@ const varDatabaseDbHandler = require("../../engine/utils/varDatabaseDbHandler");
* @param {string} params.dbFullName - Database full name. Eg. "datasquire_user_2_test" * @param {string} params.dbFullName - Database full name. Eg. "datasquire_user_2_test"
* @param {*} params.query - Query string or object * @param {*} params.query - Query string or object
* @param {boolean} [params.readOnly] - Is this operation read only? * @param {boolean} [params.readOnly] - Is this operation read only?
* @param {import("../../../types/database-schema.td").DSQL_DatabaseSchemaType} [params.dbSchema] - Database schema * @param {import("@/package-shared/types/database-schema.td").DSQL_DatabaseSchemaType} [params.dbSchema] - Database schema
* @param {string[]} [params.queryValuesArray] - An optional array of query values if "?" is used in the query string * @param {string[]} [params.queryValuesArray] - An optional array of query values if "?" is used in the query string
* @param {string} [params.tableName] - Table Name * @param {string} [params.tableName] - Table Name
* *
* @return {Promise<{result: *, error?: *}>} * @return {Promise<{result: *, error?: *}>}
*/ */
async function runQuery({ dbFullName, query, readOnly, dbSchema, queryValuesArray, tableName }) { async function runQuery({
dbFullName,
query,
readOnly,
dbSchema,
queryValuesArray,
tableName,
}) {
/** /**
* Declare variables * Declare variables
* *
@ -50,9 +57,17 @@ async function runQuery({ dbFullName, query, readOnly, dbSchema, queryValuesArra
if (dbSchema) { if (dbSchema) {
try { try {
const table = tableName ? tableName : typeof query == "string" ? null : query ? query?.table : null; const table = tableName
? tableName
: typeof query == "string"
? null
: query
? query?.table
: null;
if (!table) throw new Error("No table name provided"); if (!table) throw new Error("No table name provided");
tableSchema = dbSchema.tables.filter((tb) => tb?.tableName === table)[0]; tableSchema = dbSchema.tables.filter(
(tb) => tb?.tableName === table
)[0];
} catch (_err) {} } catch (_err) {}
} }
@ -75,7 +90,16 @@ async function runQuery({ dbFullName, query, readOnly, dbSchema, queryValuesArra
* *
* @description Declare "results" variable * @description Declare "results" variable
*/ */
const { data, action, table, identifierColumnName, identifierValue, update, duplicateColumnName, duplicateColumnValue } = query; const {
data,
action,
table,
identifierColumnName,
identifierValue,
update,
duplicateColumnName,
duplicateColumnValue,
} = query;
switch (action.toLowerCase()) { switch (action.toLowerCase()) {
case "insert": case "insert":

View File

@ -21,7 +21,7 @@ const dbHandler = require("../../engine/utils/dbHandler");
* @param {string} params.dbFullName - Database full name * @param {string} params.dbFullName - Database full name
* @param {string} params.tableName - Table name * @param {string} params.tableName - Table name
* @param {*} params.data - Data to add * @param {*} params.data - Data to add
* @param {import("../../../types/database-schema.td").DSQL_TableSchemaType} [params.tableSchema] - Table schema * @param {import("@/package-shared/types/database-schema.td").DSQL_TableSchemaType} [params.tableSchema] - Table schema
* @param {string} params.identifierColumnName - Update row identifier column name * @param {string} params.identifierColumnName - Update row identifier column name
* @param {string | number} params.identifierValue - Update row identifier column value * @param {string | number} params.identifierValue - Update row identifier column value
* @param {string} params.encryptionKey - Encryption key * @param {string} params.encryptionKey - Encryption key
@ -29,7 +29,18 @@ const dbHandler = require("../../engine/utils/dbHandler");
* *
* @returns {Promise<object|null>} * @returns {Promise<object|null>}
*/ */
async function updateDbEntry({ dbContext, paradigm, dbFullName, tableName, data, tableSchema, identifierColumnName, identifierValue, encryptionKey, encryptionSalt }) { async function updateDbEntry({
dbContext,
paradigm,
dbFullName,
tableName,
data,
tableSchema,
identifierColumnName,
identifierValue,
encryptionKey,
encryptionSalt,
}) {
/** /**
* Check if data is valid * Check if data is valid
*/ */
@ -59,11 +70,23 @@ async function updateDbEntry({ dbContext, paradigm, dbFullName, tableName, data,
const dataKey = dataKeys[i]; const dataKey = dataKeys[i];
let value = data[dataKey]; let value = data[dataKey];
const targetFieldSchemaArray = tableSchema ? tableSchema?.fields?.filter((field) => field.fieldName === dataKey) : null; const targetFieldSchemaArray = tableSchema
const targetFieldSchema = targetFieldSchemaArray && targetFieldSchemaArray[0] ? targetFieldSchemaArray[0] : null; ? tableSchema?.fields?.filter(
(field) => field.fieldName === dataKey
)
: null;
const targetFieldSchema =
targetFieldSchemaArray && targetFieldSchemaArray[0]
? targetFieldSchemaArray[0]
: null;
if (typeof value == "undefined") continue; if (typeof value == "undefined") continue;
if (typeof value !== "string" && typeof value !== "number" && !value) continue; if (
typeof value !== "string" &&
typeof value !== "number" &&
!value
)
continue;
if (targetFieldSchema?.encrypted) { if (targetFieldSchema?.encrypted) {
value = encrypt({ data: value, encryptionKey, encryptionSalt }); value = encrypt({ data: value, encryptionKey, encryptionSalt });
@ -82,7 +105,10 @@ async function updateDbEntry({ dbContext, paradigm, dbFullName, tableName, data,
} }
if (targetFieldSchema?.pattern) { if (targetFieldSchema?.pattern) {
const pattern = new RegExp(targetFieldSchema.pattern, targetFieldSchema.patternFlags || ""); const pattern = new RegExp(
targetFieldSchema.pattern,
targetFieldSchema.patternFlags || ""
);
if (!value?.toString()?.match(pattern)) { if (!value?.toString()?.match(pattern)) {
console.log("DSQL: Pattern not matched =>", value); console.log("DSQL: Pattern not matched =>", value);
value = ""; value = "";
@ -108,7 +134,10 @@ async function updateDbEntry({ dbContext, paradigm, dbFullName, tableName, data,
//////////////////////////////////////// ////////////////////////////////////////
//////////////////////////////////////// ////////////////////////////////////////
console.log("DSQL: Error in parsing data keys in update function =>", error.message); console.log(
"DSQL: Error in parsing data keys in update function =>",
error.message
);
continue; continue;
} }
} }
@ -122,7 +151,9 @@ async function updateDbEntry({ dbContext, paradigm, dbFullName, tableName, data,
//////////////////////////////////////// ////////////////////////////////////////
//////////////////////////////////////// ////////////////////////////////////////
const query = `UPDATE ${tableName} SET ${updateKeyValueArray.join(",")} WHERE \`${identifierColumnName}\`=?`; const query = `UPDATE ${tableName} SET ${updateKeyValueArray.join(
","
)} WHERE \`${identifierColumnName}\`=?`;
updateValues.push(identifierValue); updateValues.push(identifierValue);

View File

@ -20,8 +20,8 @@ const runQuery = require("../query/utils/runQuery");
* @async * @async
* *
* @param {Object} params - Single object passed * @param {Object} params - Single object passed
* @param {import("../../users/add-user").UserDataPayload} params.payload - SQL Query * @param {import("@/types/user.td").UserDataPayload} params.payload - SQL Query
* @param {import("../../types/database-schema.td").DSQL_DatabaseSchemaType} params.dbSchema - Name of the table to query * @param {import("@/package-shared/types/database-schema.td").DSQL_DatabaseSchemaType | undefined} params.dbSchema - Name of the table to query
* @param {string} [params.encryptionKey] * @param {string} [params.encryptionKey]
* @param {string} [params.encryptionSalt] * @param {string} [params.encryptionSalt]
* *
@ -104,6 +104,10 @@ async function localAddUser({
}; };
} }
if (!dbSchema) {
throw new Error("Db Schema not found!");
}
const tableSchema = dbSchema.tables.find( const tableSchema = dbSchema.tables.find(
(tb) => tb?.tableName === "users" (tb) => tb?.tableName === "users"
); );

View File

@ -7,7 +7,7 @@ const varDatabaseDbHandler = require("../engine/utils/varDatabaseDbHandler");
* @param {object} param0 * @param {object} param0
* @param {number} param0.userId * @param {number} param0.userId
* @param {string[]} param0.fields * @param {string[]} param0.fields
* @param {import("../../types/database-schema.td").DSQL_DatabaseSchemaType} [param0.dbSchema] * @param {import("@/package-shared/types/database-schema.td").DSQL_DatabaseSchemaType | undefined} [param0.dbSchema]
* @returns * @returns
*/ */
async function getLocalUser({ userId, fields, dbSchema }) { async function getLocalUser({ userId, fields, dbSchema }) {
@ -19,7 +19,9 @@ async function getLocalUser({ userId, fields, dbSchema }) {
const sanitizedFields = fields.map((fld) => fld.replace(/[^a-z\_]/g, "")); const sanitizedFields = fields.map((fld) => fld.replace(/[^a-z\_]/g, ""));
const query = `SELECT ${sanitizedFields.join(",")} FROM users WHERE id = ?`; const query = `SELECT ${sanitizedFields.join(",")} FROM users WHERE id = ?`;
const tableSchema = dbSchema?.tables.find((tb) => tb?.tableName === "users"); const tableSchema = dbSchema?.tables.find(
(tb) => tb?.tableName === "users"
);
let foundUser = await varDatabaseDbHandler({ let foundUser = await varDatabaseDbHandler({
queryString: query, queryString: query,

View File

@ -5,17 +5,7 @@ const varDatabaseDbHandler = require("../engine/utils/varDatabaseDbHandler");
/** /**
* *
* @param {object} param0 * @param {PackageUserLoginLocalBody} param0
* @param {{
* email?: string,
* username?: string,
* password: string,
* }} param0.payload
* @param {string[]} [param0.additionalFields]
* @param {import("../../types/database-schema.td").DSQL_DatabaseSchemaType} [param0.dbSchema]
* @param {boolean} [param0.email_login]
* @param {string} [param0.email_login_code]
* @param {string | null} [param0.email_login_field]
* @returns * @returns
*/ */
async function loginLocalUser({ async function loginLocalUser({

View File

@ -7,7 +7,7 @@ const varDatabaseDbHandler = require("../engine/utils/varDatabaseDbHandler");
* @param {object} param0 * @param {object} param0
* @param {*} param0.existingUser * @param {*} param0.existingUser
* @param {string[]} [param0.additionalFields] * @param {string[]} [param0.additionalFields]
* @param {import("../../types/database-schema.td").DSQL_DatabaseSchemaType} [param0.dbSchema] * @param {import("@/package-shared/types/database-schema.td").DSQL_DatabaseSchemaType | undefined} [param0.dbSchema]
* @returns * @returns
*/ */
async function localReauthUser({ existingUser, additionalFields, dbSchema }) { async function localReauthUser({ existingUser, additionalFields, dbSchema }) {
@ -24,7 +24,9 @@ async function localReauthUser({ existingUser, additionalFields, dbSchema }) {
* *
* @description GRAB user * @description GRAB user
*/ */
const tableSchema = dbSchema?.tables.find((tb) => tb?.tableName === "users"); const tableSchema = dbSchema?.tables.find(
(tb) => tb?.tableName === "users"
);
let foundUser = let foundUser =
existingUser?.id && existingUser.id.toString().match(/./) existingUser?.id && existingUser.id.toString().match(/./)
@ -51,7 +53,10 @@ async function localReauthUser({ existingUser, additionalFields, dbSchema }) {
//////////////////////////////////////// ////////////////////////////////////////
//////////////////////////////////////// ////////////////////////////////////////
let csrfKey = Math.random().toString(36).substring(2) + "-" + Math.random().toString(36).substring(2); let csrfKey =
Math.random().toString(36).substring(2) +
"-" +
Math.random().toString(36).substring(2);
let userPayload = { let userPayload = {
id: foundUser[0].id, id: foundUser[0].id,
@ -72,7 +77,11 @@ async function localReauthUser({ existingUser, additionalFields, dbSchema }) {
date: Date.now(), date: Date.now(),
}; };
if (additionalFields && Array.isArray(additionalFields) && additionalFields.length > 0) { if (
additionalFields &&
Array.isArray(additionalFields) &&
additionalFields.length > 0
) {
additionalFields.forEach((key) => { additionalFields.forEach((key) => {
// @ts-ignore // @ts-ignore
userPayload[key] = foundUser?.[0][key]; userPayload[key] = foundUser?.[0][key];

View File

@ -10,7 +10,7 @@ const path = require("path");
* *
* @param {object} param0 * @param {object} param0
* @param {string} param0.email * @param {string} param0.email
* @param {import("../../types/database-schema.td").DSQL_DatabaseSchemaType} [param0.dbSchema] * @param {import("@/package-shared/types/database-schema.td").DSQL_DatabaseSchemaType | undefined} [param0.dbSchema]
* @param {string} param0.email_login_field * @param {string} param0.email_login_field
* @param {string} [param0.mail_domain] * @param {string} [param0.mail_domain]
* @param {string} [param0.mail_username] * @param {string} [param0.mail_username]

View File

@ -36,9 +36,17 @@ const encryptionSalt = process.env.DSQL_ENCRYPTION_SALT || "";
* @param {string} params.clientId * @param {string} params.clientId
* @param {string} params.clientSecret * @param {string} params.clientSecret
* @param {object} [params.additionalFields] * @param {object} [params.additionalFields]
* @param {import("../../../types/database-schema.td").DSQL_DatabaseSchemaType} params.dbSchema * @param {import("@/package-shared/types/database-schema.td").DSQL_DatabaseSchemaType} params.dbSchema
*/ */
async function localGithubAuth({ res, code, email, clientId, clientSecret, additionalFields, dbSchema }) { async function localGithubAuth({
res,
code,
email,
clientId,
clientSecret,
additionalFields,
dbSchema,
}) {
try { try {
/** /**
* User auth * User auth
@ -52,7 +60,12 @@ async function localGithubAuth({ res, code, email, clientId, clientSecret, addit
}; };
} }
if (typeof code !== "string" || typeof clientId !== "string" || typeof clientSecret !== "string" || typeof database !== "string") { if (
typeof code !== "string" ||
typeof clientId !== "string" ||
typeof clientSecret !== "string" ||
typeof database !== "string"
) {
return { return {
success: false, success: false,
msg: "Wrong Parameters", msg: "Wrong Parameters",
@ -81,7 +94,11 @@ async function localGithubAuth({ res, code, email, clientId, clientSecret, addit
const socialId = gitHubUser.name || gitHubUser.id || gitHubUser.login; const socialId = gitHubUser.name || gitHubUser.id || gitHubUser.login;
const targetName = gitHubUser.name || gitHubUser.login; const targetName = gitHubUser.name || gitHubUser.login;
const nameArray = targetName?.match(/ /) ? targetName?.split(" ") : targetName?.match(/\-/) ? targetName?.split("-") : [targetName]; const nameArray = targetName?.match(/ /)
? targetName?.split(" ")
: targetName?.match(/\-/)
? targetName?.split("-")
: [targetName];
const payload = { const payload = {
email: gitHubUser.email, email: gitHubUser.email,

View File

@ -44,7 +44,7 @@ const encryptionSalt = process.env.DSQL_ENCRYPTION_SALT || "";
* @param {string} params.clientId - Google client id * @param {string} params.clientId - Google client id
* @param {http.ServerResponse} params.response - HTTPS response object * @param {http.ServerResponse} params.response - HTTPS response object
* @param {object} [params.additionalFields] - Additional Fields to be added to the user object * @param {object} [params.additionalFields] - Additional Fields to be added to the user object
* @param {import("../../../types/database-schema.td").DSQL_DatabaseSchemaType} [params.dbSchema] - Database Schema * @param {import("@/package-shared/types/database-schema.td").DSQL_DatabaseSchemaType} [params.dbSchema] - Database Schema
* *
* @returns { Promise<FunctionReturn> } * @returns { Promise<FunctionReturn> }
*/ */

View File

@ -74,13 +74,24 @@ const encryptionSalt = process.env.DSQL_ENCRYPTION_SALT || "";
* res: http.ServerResponse, * res: http.ServerResponse,
* supEmail?: string | null, * supEmail?: string | null,
* additionalFields?: object, * additionalFields?: object,
* dbSchema: import("../../../../types/database-schema.td").DSQL_DatabaseSchemaType | undefined * dbSchema: import("@/package-shared/types/database-schema.td").DSQL_DatabaseSchemaType | undefined
* }} params - function parameters inside an object * }} params - function parameters inside an object
* *
* @returns {Promise<FunctionReturn>} - Response object * @returns {Promise<FunctionReturn>} - Response object
*/ */
async function handleSocialDb({ social_id, email, social_platform, payload, res, supEmail, additionalFields, dbSchema }) { async function handleSocialDb({
const tableSchema = dbSchema?.tables.find((tb) => tb?.tableName === "users"); social_id,
email,
social_platform,
payload,
res,
supEmail,
additionalFields,
dbSchema,
}) {
const tableSchema = dbSchema?.tables.find(
(tb) => tb?.tableName === "users"
);
try { try {
//////////////////////////////////////////////// ////////////////////////////////////////////////
@ -244,7 +255,10 @@ async function handleSocialDb({ social_id, email, social_platform, payload, res,
//////////////////////////////////////////////// ////////////////////////////////////////////////
//////////////////////////////////////////////// ////////////////////////////////////////////////
} else { } else {
console.log("Social User Failed to insert in 'handleSocialDb.js' backend function =>", newUser); console.log(
"Social User Failed to insert in 'handleSocialDb.js' backend function =>",
newUser
);
return { return {
success: false, success: false,
@ -258,7 +272,10 @@ async function handleSocialDb({ social_id, email, social_platform, payload, res,
//////////////////////////////////////////////// ////////////////////////////////////////////////
//////////////////////////////////////////////// ////////////////////////////////////////////////
} catch (/** @type {*} */ error) { } catch (/** @type {*} */ error) {
console.log("ERROR in 'handleSocialDb.js' backend function =>", error.message); console.log(
"ERROR in 'handleSocialDb.js' backend function =>",
error.message
);
return { return {
success: false, success: false,
@ -311,13 +328,22 @@ async function handleSocialDb({ social_id, email, social_platform, payload, res,
* msg?: string * msg?: string
* }>} * }>}
*/ */
async function loginSocialUser({ user, social_platform, res, database, additionalFields }) { async function loginSocialUser({
user,
social_platform,
res,
database,
additionalFields,
}) {
const foundUser = await varDatabaseDbHandler({ const foundUser = await varDatabaseDbHandler({
database: database ? database : "datasquirel", database: database ? database : "datasquirel",
queryString: `SELECT * FROM users WHERE email='${user.email}' AND social_id='${user.social_id}' AND social_platform='${social_platform}'`, queryString: `SELECT * FROM users WHERE email='${user.email}' AND social_id='${user.social_id}' AND social_platform='${social_platform}'`,
}); });
let csrfKey = Math.random().toString(36).substring(2) + "-" + Math.random().toString(36).substring(2); let csrfKey =
Math.random().toString(36).substring(2) +
"-" +
Math.random().toString(36).substring(2);
if (!foundUser?.[0]) { if (!foundUser?.[0]) {
return { return {
@ -360,7 +386,10 @@ async function loginSocialUser({ user, social_platform, res, database, additiona
}); });
if (res?.setHeader) { if (res?.setHeader) {
res.setHeader("Set-Cookie", [`datasquirelAuthKey=${encryptedPayload};samesite=strict;path=/;HttpOnly=true;Secure=true`, `csrf=${csrfKey};samesite=strict;path=/;HttpOnly=true`]); res.setHeader("Set-Cookie", [
`datasquirelAuthKey=${encryptedPayload};samesite=strict;path=/;HttpOnly=true;Secure=true`,
`csrf=${csrfKey};samesite=strict;path=/;HttpOnly=true`,
]);
} }
//////////////////////////////////////////////// ////////////////////////////////////////////////

View File

@ -17,7 +17,7 @@ const updateDbEntry = require("../query/utils/updateDbEntry");
* *
* @param {Object} params - Single object passed * @param {Object} params - Single object passed
* @param {*} params.payload - SQL Query * @param {*} params.payload - SQL Query
* @param {import("../../types/database-schema.td").DSQL_DatabaseSchemaType} params.dbSchema - Name of the table to query * @param {import("@/package-shared/types/database-schema.td").DSQL_DatabaseSchemaType | undefined} params.dbSchema - Name of the table to query
* *
* @returns { Promise<LocalPostReturn> } - Return Object * @returns { Promise<LocalPostReturn> } - Return Object
*/ */
@ -49,7 +49,13 @@ async function localUpdateUser({ payload, dbSchema }) {
return finalData; return finalData;
})(); })();
const tableSchema = dbSchema.tables.find((tb) => tb?.tableName === "users"); if (!dbSchema) {
throw new Error("Db Schema not found!");
}
const tableSchema = dbSchema.tables.find(
(tb) => tb?.tableName === "users"
);
const updateUser = await updateDbEntry({ const updateUser = await updateDbEntry({
dbContext: "Dsql User", dbContext: "Dsql User",

22
package-lock.json generated
View File

@ -18,7 +18,8 @@
"dsql-watch": "engine/dsql.js" "dsql-watch": "engine/dsql.js"
}, },
"devDependencies": { "devDependencies": {
"@types/mysql": "^2.15.21" "@types/mysql": "^2.15.21",
"@types/node": "^22.7.5"
} }
}, },
"node_modules/@types/mysql": { "node_modules/@types/mysql": {
@ -31,10 +32,14 @@
} }
}, },
"node_modules/@types/node": { "node_modules/@types/node": {
"version": "20.4.10", "version": "22.7.5",
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.4.10.tgz", "resolved": "https://registry.npmjs.org/@types/node/-/node-22.7.5.tgz",
"integrity": "sha512-vwzFiiy8Rn6E0MtA13/Cxxgpan/N6UeNYR9oUu6kuJWxu6zCk98trcDp8CBhbtaeuq9SykCmXkFr2lWLoPcvLg==", "integrity": "sha512-jML7s2NAzMWc//QSJ1a3prpk78cOPchGvXJsC3C6R6PSMoooztvRVQEz89gmBTBY1SPMaqo5teB4uNHPdetShQ==",
"dev": true "dev": true,
"license": "MIT",
"dependencies": {
"undici-types": "~6.19.2"
}
}, },
"node_modules/bignumber.js": { "node_modules/bignumber.js": {
"version": "9.0.0", "version": "9.0.0",
@ -133,6 +138,13 @@
"safe-buffer": "~5.1.0" "safe-buffer": "~5.1.0"
} }
}, },
"node_modules/undici-types": {
"version": "6.19.8",
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.19.8.tgz",
"integrity": "sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==",
"dev": true,
"license": "MIT"
},
"node_modules/util-deprecate": { "node_modules/util-deprecate": {
"version": "1.0.2", "version": "1.0.2",
"resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz",

1
package-shared Symbolic link
View File

@ -0,0 +1 @@
/media/benoti/Ubuntu-Disk/Github/datasquirel/package-shared

View File

@ -31,6 +31,7 @@
"nodemailer": "^6.9.14" "nodemailer": "^6.9.14"
}, },
"devDependencies": { "devDependencies": {
"@types/mysql": "^2.15.21" "@types/mysql": "^2.15.21",
"@types/node": "^22.7.5"
} }
} }

36
tsconfig.json Normal file
View File

@ -0,0 +1,36 @@
{
"compilerOptions": {
"target": "ES2015",
"module": "commonjs",
"paths": {
"@/*": ["./*"]
},
"maxNodeModuleJsDepth": 10,
"esModuleInterop": true,
"forceConsistentCasingInFileNames": true,
"strict": true,
"skipLibCheck": true,
"lib": ["dom", "dom.iterable", "esnext"],
"allowJs": true,
"noEmit": true,
"incremental": true,
"resolveJsonModule": true,
"isolatedModules": true,
"jsx": "preserve",
"moduleResolution": "node",
"plugins": [
{
"name": "next"
}
]
},
"include": [
"engine",
"functions",
"types",
"users",
"utils",
"package-shared/types"
],
"exclude": ["node_modules", "dump"]
}

View File

@ -1,95 +0,0 @@
/**
* @typedef {string} DSQL_DatabaseFullName - Database full name(slug) including datasquirel data => "datasquirel_user_7_new_database"
*/
////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////
/**
* @typedef {object} DSQL_DatabaseSchemaType
* @property {string} dbName - Database Full name with spaces => "New Database"
* @property {string} dbSlug - Database Slug => "new_database"
* @property {string} dbFullName - Database full name(slug) including datasquirel data => "datasquirel_user_7_new_database"
* @property {string} [dbDescription] - Database brief description
* @property {string} [dbImage] - Database image - Defaults to "/images/default.png"
* @property {DSQL_TableSchemaType[]} tables - List of database tables
* @property {{ dbFullName: string }[]} [childrenDatabases] - List of children databases for current database which is parent
* @property {boolean} [childDatabase] - If current database is a child of a different parent database
* @property {string} [childDatabaseDbFullName] - Parent database full name => "datasquirel_user_7_new_database"
*/
////////////////////////////////////////
/**
* @typedef {object} DSQL_TableSchemaType
* @property {string} tableName - Table slug (blog_posts)
* @property {string} tableFullName - Table full name with spaces => "Blog Posts"
* @property {string} [tableDescription] - Brief description of table
* @property {DSQL_FieldSchemaType[]} fields - List of table Fields
* @property {DSQL_IndexSchemaType[]} [indexes] - List of table indexes, if available
* @property {DSQL_ChildrenTablesType[]} [childrenTables] - List of children tables
* @property {boolean} [childTable] -If current table is a child clone
* @property {string} [childTableName] - Table slug of parent table => "blog_posts"
* @property {string} [childTableDbFullName] - Database full name(slug) including datasquirel data => "datasquirel_user_7_new_database"
* @property {string} [tableNameOld] - Old table name, incase of renaming table
*/
/**
* @typedef {object} DSQL_ChildrenTablesType
* @property {string} dbNameFull - Database full name(slug) including datasquirel data => "datasquirel_user_7_new_database"
* @property {string} tableName - Table slug => "blog_posts"
*/
////////////////////////////////////////
/**
* @typedef {object} DSQL_FieldSchemaType
* @property {string} fieldName - Field Name(slug) => "long_description"
* @property {string} [originName] - Field origin name(optional)
* @property {boolean} [updatedField] - Has this field been renamed?
* @property {string} dataType - Field Data type => "BIGIN" | "LONGTEXT" | "VARCHAR(***)" | ...
* @property {boolean} [nullValue] - Is this a null value or not?
* @property {boolean} [notNullValue] - Is this NOT a null value?
* @property {boolean} [primaryKey] - Is this the primary key for table?
* @property {boolean} [encrypted] - Is this field value encrypted?
* @property {boolean} [autoIncrement] - Does this table primary key increment automatically?
* @property {string|number} [defaultValue] - Value of field by default
* @property {string} [defaultValueLiteral] - SQL key word which generates value automatically => "CURRENT_TIMESTAMP"
* @property {DSQL_ForeignKeyType} [foreignKey] - Field foreign key reference object
* @property {boolean} [richText] - Rich text field
* @property {string | RegExp} [pattern] - Field pattern for validation. Can be a string or a regular expression. Example: "^[a-zA-Z0-9_]*$"
* @property {string} [patternFlags] - Field pattern flags for validation. Example: "i"
*/
/**
* @typedef {object} DSQL_ForeignKeyType
* @property {string} foreignKeyName - Unique Name of foreign key
* @property {string} destinationTableName - Reference table name(slug) => "blog_posts"
* @property {string} destinationTableColumnName - Reference column name(slug) => "id"
* @property {string} destinationTableColumnType - Reference table field type => "BIGINT" | "VARCHAR(***)" | ...
* @property {boolean} [cascadeDelete] - Does the reference table entry delete when this key is deleted?
* @property {boolean} [cascadeUpdate] - Does the reference table entry update when this key is updated?
*/
////////////////////////////////////////
/**
* @typedef {object} DSQL_IndexSchemaType
* @property {string} indexName - Unique Name of index => "blog_text_index"
* @property {string} indexType - "regular" or "fullText"
* @property {DSQL_IndexTableFieldType[]} indexTableFields - List of Index table fields
* @property {string} [alias] - List of Index table fields
*/
/**
* @typedef {object} DSQL_IndexTableFieldType
* @property {string} value - Table Field Name
* @property {string} dataType - Table Field data type "VARCHAR(***)" | "BIGINT" | ...
*/
////////////////////////////////////////
exports.DSQL_TableSchemaType = DSQL_TableSchemaType;

View File

@ -1,89 +0,0 @@
/**
* @typedef {string} DSQL_DatabaseFullName - Database full name(slug) including datasquirel data => "datasquirel_user_7_new_database"
*/
////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////
/**
* @typedef {object} DSQL_DatabaseSchemaType
* @property {string} dbName - Database Full name with spaces => "New Database"
* @property {string} dbSlug - Database Slug => "new_database"
* @property {string} dbFullName - Database full name(slug) including datasquirel data => "datasquirel_user_7_new_database"
* @property {string} [dbDescription] - Database brief description
* @property {string} [dbImage] - Database image - Defaults to "/images/default.png"
* @property {DSQL_TableSchemaType[]} tables - List of database tables
* @property {{ dbFullName: string }[]} [childrenDatabases] - List of children databases for current database which is parent
* @property {boolean} [childDatabase] - If current database is a child of a different parent database
* @property {string} [childDatabaseDbFullName] - Parent database full name => "datasquirel_user_7_new_database"
*/
////////////////////////////////////////
/**
* @typedef {object} DSQL_TableSchemaType
* @property {string} tableName - Table slug (blog_posts)
* @property {string} tableFullName - Table full name with spaces => "Blog Posts"
* @property {string} [tableDescription] - Brief description of table
* @property {DSQL_FieldSchemaType[]} fields - List of table Fields
* @property {DSQL_IndexSchemaType[]} [indexes] - List of table indexes, if available
* @property {DSQL_ChildrenTablesType[]} childrenTables - List of children tables
* @property {boolean} [childTable] -If current table is a child clone
* @property {string} [childTableName] - Table slug of parent table => "blog_posts"
* @property {string} [childTableDbFullName] - Database full name(slug) including datasquirel data => "datasquirel_user_7_new_database"
* @property {string} [tableNameOld] - Old table name, incase of renaming table
*/
/**
* @typedef {object} DSQL_ChildrenTablesType
* @property {string} dbNameFull - Database full name(slug) including datasquirel data => "datasquirel_user_7_new_database"
* @property {string} tableName - Table slug => "blog_posts"
*/
////////////////////////////////////////
/**
* @typedef {object} DSQL_FieldSchemaType
* @property {string} fieldName - Field Name(slug) => "long_description"
* @property {string} [originName] - Field origin name(optional)
* @property {boolean} [updatedField] - Has this field been renamed?
* @property {string} dataType - Field Data type => "BIGIN" | "LONGTEXT" | "VARCHAR(***)" | ...
* @property {boolean} [nullValue] - Is this a null value or not?
* @property {boolean} [notNullValue] - Is this NOT a null value?
* @property {boolean} [primaryKey] - Is this the primary key for table?
* @property {boolean} [encrypted] - Is this field value encrypted?
* @property {boolean} [autoIncrement] - Does this table primary key increment automatically?
* @property {string|number} [defaultValue] - Value of field by default
* @property {string} [defaultValueLiteral] - SQL key word which generates value automatically => "CURRENT_TIMESTAMP"
* @property {DSQL_ForeignKeyType} [foreignKey] - Field foreign key reference object
*/
/**
* @typedef {object} DSQL_ForeignKeyType
* @property {string} foreignKeyName - Unique Name of foreign key
* @property {string} destinationTableName - Reference table name(slug) => "blog_posts"
* @property {string} destinationTableColumnName - Reference column name(slug) => "id"
* @property {string} destinationTableColumnType - Reference table field type => "BIGINT" | "VARCHAR(***)" | ...
* @property {boolean} [cascadeDelete] - Does the reference table entry delete when this key is deleted?
* @property {boolean} [cascadeUpdate] - Does the reference table entry update when this key is updated?
*/
////////////////////////////////////////
/**
* @typedef {object} DSQL_IndexSchemaType
* @property {string} indexName - Unique Name of index => "blog_text_index"
* @property {string} indexType - "regular" or "fullText"
* @property {DSQL_IndexTableFieldType[]} indexTableFields - List of Index table fields
*/
/**
* @typedef {object} DSQL_IndexTableFieldType
* @property {string} value - Table Field Name
* @property {string} dataType - Table Field data type "VARCHAR(***)" | "BIGINT" | ...
*/
////////////////////////////////////////

View File

@ -64,7 +64,7 @@ async function addUser({
DSQL_PASS?.match(/./) && DSQL_PASS?.match(/./) &&
DSQL_DB_NAME?.match(/./) DSQL_DB_NAME?.match(/./)
) { ) {
/** @type {import("../types/database-schema.td").DSQL_DatabaseSchemaType | undefined} */ /** @type {import("@/package-shared/types/database-schema.td").DSQL_DatabaseSchemaType | undefined} */
let dbSchema; let dbSchema;
try { try {

View File

@ -83,7 +83,7 @@ async function getUser({ key, userId, database, fields }) {
DSQL_PASS?.match(/./) && DSQL_PASS?.match(/./) &&
DSQL_DB_NAME?.match(/./) DSQL_DB_NAME?.match(/./)
) { ) {
/** @type {import("../types/database-schema.td").DSQL_DatabaseSchemaType | undefined} */ /** @type {import("@/package-shared/types/database-schema.td").DSQL_DatabaseSchemaType | undefined} */
let dbSchema; let dbSchema;
try { try {

View File

@ -65,7 +65,7 @@ async function loginUser({
? temp_code_field ? temp_code_field
? temp_code_field ? temp_code_field
: defaultTempLoginFieldName : defaultTempLoginFieldName
: null; : undefined;
/** /**
* Check Encryption Keys * Check Encryption Keys
@ -126,7 +126,7 @@ async function loginUser({
DSQL_PASS?.match(/./) && DSQL_PASS?.match(/./) &&
DSQL_DB_NAME?.match(/./) DSQL_DB_NAME?.match(/./)
) { ) {
/** @type {import("../types/database-schema.td").DSQL_DatabaseSchemaType | undefined} */ /** @type {import("@/package-shared/types/database-schema.td").DSQL_DatabaseSchemaType | undefined} */
let dbSchema; let dbSchema;
try { try {
@ -145,6 +145,7 @@ async function loginUser({
email_login, email_login,
email_login_code, email_login_code,
email_login_field: emailLoginTempCodeFieldName, email_login_field: emailLoginTempCodeFieldName,
token,
}); });
} }
} else { } else {
@ -156,7 +157,8 @@ async function loginUser({
* @type {{ success: boolean, payload: import("../types/user.td").DATASQUIREL_LoggedInUser | null, userId?: number, msg?: string }} * @type {{ success: boolean, payload: import("../types/user.td").DATASQUIREL_LoggedInUser | null, userId?: number, msg?: string }}
*/ */
httpResponse = await new Promise((resolve, reject) => { httpResponse = await new Promise((resolve, reject) => {
const reqPayload = JSON.stringify({ /** @type {PackageUserLoginRequestBody} */
const reqPayload = {
encryptionKey, encryptionKey,
payload, payload,
database, database,
@ -164,7 +166,10 @@ async function loginUser({
email_login, email_login,
email_login_code, email_login_code,
email_login_field: emailLoginTempCodeFieldName, email_login_field: emailLoginTempCodeFieldName,
}); token,
};
const reqPayloadJSON = JSON.stringify(reqPayload);
const httpsRequest = ( const httpsRequest = (
scheme?.match(/^http$/i) ? http : https scheme?.match(/^http$/i) ? http : https
@ -173,7 +178,7 @@ async function loginUser({
method: "POST", method: "POST",
headers: { headers: {
"Content-Type": "application/json", "Content-Type": "application/json",
"Content-Length": Buffer.from(reqPayload).length, "Content-Length": Buffer.from(reqPayloadJSON).length,
Authorization: key, Authorization: key,
}, },
port: localHostPort || 443, port: localHostPort || 443,
@ -203,7 +208,7 @@ async function loginUser({
} }
); );
httpsRequest.write(reqPayload); httpsRequest.write(reqPayloadJSON);
httpsRequest.end(); httpsRequest.end();
}); });
} }

View File

@ -103,7 +103,7 @@ async function reauthUser({
DSQL_PASS?.match(/./) && DSQL_PASS?.match(/./) &&
DSQL_DB_NAME?.match(/./) DSQL_DB_NAME?.match(/./)
) { ) {
/** @type {import("../types/database-schema.td").DSQL_DatabaseSchemaType | undefined} */ /** @type {import("@/package-shared/types/database-schema.td").DSQL_DatabaseSchemaType | undefined} */
let dbSchema; let dbSchema;
try { try {

View File

@ -114,7 +114,7 @@ async function sendEmailCode({
DSQL_PASS?.match(/./) && DSQL_PASS?.match(/./) &&
DSQL_DB_NAME?.match(/./) DSQL_DB_NAME?.match(/./)
) { ) {
/** @type {import("../types/database-schema.td").DSQL_DatabaseSchemaType | undefined} */ /** @type {import("@/package-shared/types/database-schema.td").DSQL_DatabaseSchemaType | undefined} */
let dbSchema; let dbSchema;
try { try {

View File

@ -154,7 +154,7 @@ async function githubAuth({
DSQL_PASS?.match(/./) && DSQL_PASS?.match(/./) &&
DSQL_DB_NAME?.match(/./) DSQL_DB_NAME?.match(/./)
) { ) {
/** @type {import("../../types/database-schema.td").DSQL_DatabaseSchemaType | undefined} */ /** @type {import("@/package-shared/types/database-schema.td").DSQL_DatabaseSchemaType | undefined | undefined} */
let dbSchema; let dbSchema;
try { try {

View File

@ -150,7 +150,7 @@ async function googleAuth({
DSQL_PASS?.match(/./) && DSQL_PASS?.match(/./) &&
DSQL_DB_NAME?.match(/./) DSQL_DB_NAME?.match(/./)
) { ) {
/** @type {import("../../types/database-schema.td").DSQL_DatabaseSchemaType | undefined} */ /** @type {import("@/package-shared/types/database-schema.td").DSQL_DatabaseSchemaType | undefined | undefined} */
let dbSchema; let dbSchema;
try { try {

View File

@ -57,7 +57,7 @@ async function updateUser({ key, payload, database }) {
DSQL_PASS?.match(/./) && DSQL_PASS?.match(/./) &&
DSQL_DB_NAME?.match(/./) DSQL_DB_NAME?.match(/./)
) { ) {
/** @type {import("../types/database-schema.td").DSQL_DatabaseSchemaType | undefined} */ /** @type {import("@/package-shared/types/database-schema.td").DSQL_DatabaseSchemaType | undefined} */
let dbSchema; let dbSchema;
try { try {

View File

@ -18,7 +18,7 @@ const https = require("https");
/** /**
* @typedef {Object} GetSchemaReturn * @typedef {Object} GetSchemaReturn
* @property {boolean} success - Did the function run successfully? * @property {boolean} success - Did the function run successfully?
* @property {import("../types/database-schema.td").DSQL_DatabaseSchemaType[] | import("../types/database-schema.td").DSQL_DatabaseSchemaType | null} payload - Response payload * @property {import("@/package-shared/types/database-schema.td").DSQL_DatabaseSchemaType[] | import("@/package-shared/types/database-schema.td").DSQL_DatabaseSchemaType | null} payload - Response payload
*/ */
/** /**

View File

@ -66,7 +66,7 @@ async function get({ key, db, query, queryValues, tableName }) {
DSQL_PASS?.match(/./) && DSQL_PASS?.match(/./) &&
DSQL_DB_NAME?.match(/./) DSQL_DB_NAME?.match(/./)
) { ) {
/** @type {import("../types/database-schema.td").DSQL_DatabaseSchemaType | undefined} */ /** @type {import("@/package-shared/types/database-schema.td").DSQL_DatabaseSchemaType | undefined} */
let dbSchema; let dbSchema;
try { try {

View File

@ -78,7 +78,7 @@ async function post({ key, query, queryValues, database, tableName }) {
DSQL_PASS?.match(/./) && DSQL_PASS?.match(/./) &&
DSQL_DB_NAME?.match(/./) DSQL_DB_NAME?.match(/./)
) { ) {
/** @type {import("../types/database-schema.td").DSQL_DatabaseSchemaType | undefined} */ /** @type {import("@/package-shared/types/database-schema.td").DSQL_DatabaseSchemaType | undefined} */
let dbSchema; let dbSchema;
try { try {