updates
This commit is contained in:
parent
f34d90d147
commit
16be9117e9
3
.gitignore
vendored
3
.gitignore
vendored
@ -73,6 +73,7 @@ web_modules/
|
||||
.yarn-integrity
|
||||
|
||||
# dotenv environment variable files
|
||||
*.env
|
||||
.env
|
||||
.env.development.local
|
||||
.env.test.local
|
||||
@ -134,3 +135,5 @@ tsconfig.json
|
||||
|
||||
# others
|
||||
deprecated
|
||||
.tmp
|
||||
test/
|
117
bin/dsql.js
Normal file
117
bin/dsql.js
Normal file
@ -0,0 +1,117 @@
|
||||
#! /usr/bin/env node
|
||||
// @ts-check
|
||||
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
const { execSync } = require("child_process");
|
||||
|
||||
require("dotenv").config({
|
||||
path: path.resolve(process.cwd(), ".env"),
|
||||
});
|
||||
|
||||
const datasquirel = require("../index");
|
||||
const createDbFromSchema = require("./engine/createDbFromSchema");
|
||||
|
||||
if (!fs.existsSync(path.resolve(process.cwd(), ".env"))) {
|
||||
console.log(".env file not found");
|
||||
process.exit();
|
||||
}
|
||||
|
||||
const { DSQL_HOST, DSQL_USER, DSQL_PASS, DSQL_DB_NAME, DSQL_KEY, DSQL_REF_DB_NAME, DSQL_FULL_SYNC, DSQL_ENCRYPTION_KEY, DSQL_ENCRYPTION_SALT } = process.env;
|
||||
|
||||
if (!DSQL_HOST?.match(/./)) {
|
||||
console.log("DSQL_HOST is required in your `.env` file");
|
||||
process.exit();
|
||||
}
|
||||
|
||||
if (!DSQL_USER?.match(/./)) {
|
||||
console.log("DSQL_USER is required in your `.env` file");
|
||||
process.exit();
|
||||
}
|
||||
|
||||
if (!DSQL_PASS?.match(/./)) {
|
||||
console.log("DSQL_PASS is required in your `.env` file");
|
||||
process.exit();
|
||||
}
|
||||
|
||||
const dbSchemaLocalFilePath = path.resolve(process.cwd(), "dsql.schema.json");
|
||||
|
||||
async function run() {
|
||||
let schemaData;
|
||||
|
||||
if (DSQL_KEY && DSQL_REF_DB_NAME?.match(/./)) {
|
||||
const dbSchemaDataResponse = await datasquirel.getSchema({
|
||||
key: DSQL_KEY,
|
||||
database: DSQL_REF_DB_NAME || undefined,
|
||||
});
|
||||
|
||||
if (!dbSchemaDataResponse.payload || Array.isArray(dbSchemaDataResponse.payload)) {
|
||||
console.log("DSQL_KEY+DSQL_REF_DB_NAME => Error in fetching DB schema");
|
||||
console.log(dbSchemaDataResponse);
|
||||
process.exit();
|
||||
}
|
||||
|
||||
let fetchedDbSchemaObject = dbSchemaDataResponse.payload;
|
||||
if (DSQL_DB_NAME) fetchedDbSchemaObject.dbFullName = DSQL_DB_NAME;
|
||||
|
||||
schemaData = [fetchedDbSchemaObject];
|
||||
} else if (DSQL_KEY) {
|
||||
const dbSchemaDataResponse = await datasquirel.getSchema({
|
||||
key: DSQL_KEY,
|
||||
database: DSQL_REF_DB_NAME || undefined,
|
||||
});
|
||||
|
||||
if (!dbSchemaDataResponse.payload || !Array.isArray(dbSchemaDataResponse.payload)) {
|
||||
console.log("DSQL_KEY => Error in fetching DB schema");
|
||||
console.log(dbSchemaDataResponse);
|
||||
process.exit();
|
||||
}
|
||||
|
||||
let fetchedDbSchemaObject = dbSchemaDataResponse.payload;
|
||||
// fetchedDbSchemaObject.forEach((db, index) => {
|
||||
// db.dbFullName = db.dbFullName?.replace(/^datasquirel_user_\d+_/, "");
|
||||
// });
|
||||
|
||||
schemaData = fetchedDbSchemaObject;
|
||||
} else if (fs.existsSync(dbSchemaLocalFilePath)) {
|
||||
schemaData = JSON.parse(fs.readFileSync(dbSchemaLocalFilePath, "utf8"));
|
||||
} else {
|
||||
console.log("No source for DB Schema. Please provide a local `dsql.schema.json` file, or provide `DSQL_KEY` and `DSQL_REF_DB_NAME` environment variables.");
|
||||
process.exit();
|
||||
}
|
||||
|
||||
if (!schemaData) {
|
||||
console.log("No schema found");
|
||||
process.exit();
|
||||
}
|
||||
|
||||
if (DSQL_FULL_SYNC?.match(/true/i)) {
|
||||
fs.writeFileSync(dbSchemaLocalFilePath, JSON.stringify(schemaData[0], null, 4), "utf8");
|
||||
}
|
||||
|
||||
console.log("Now generating and mapping databases ...");
|
||||
// deepcode ignore reDOS: <please specify a reason of ignoring this>
|
||||
await createDbFromSchema(schemaData);
|
||||
console.log("Databases created Successfully!");
|
||||
}
|
||||
|
||||
// let timeout;
|
||||
|
||||
// if (fs.existsSync(dbSchemaLocalFilePath)) {
|
||||
// fs.watchFile(dbSchemaLocalFilePath, { interval: 1000 }, (curr, prev) => {
|
||||
// clearTimeout(timeout);
|
||||
|
||||
// timeout = setTimeout(() => {
|
||||
// console.log("`dsql.schema.json` file changed. Now syncing databases ...");
|
||||
// run();
|
||||
// }, 5000);
|
||||
// });
|
||||
// }
|
||||
|
||||
let interval;
|
||||
interval = setInterval(() => {
|
||||
console.log("Syncing Databases ...");
|
||||
run();
|
||||
}, 20000);
|
||||
|
||||
run();
|
254
bin/engine/createDbFromSchema.js
Normal file
254
bin/engine/createDbFromSchema.js
Normal file
@ -0,0 +1,254 @@
|
||||
// @ts-check
|
||||
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
|
||||
const path = require("path");
|
||||
|
||||
////////////////////////////////////////
|
||||
|
||||
const noDatabaseDbHandler = require("./utils/noDatabaseDbHandler");
|
||||
const varDatabaseDbHandler = require("./utils/varDatabaseDbHandler");
|
||||
const createTable = require("./utils/createTable");
|
||||
const updateTable = require("./utils/updateTable");
|
||||
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
|
||||
/**
|
||||
* Create database from Schema
|
||||
* ==============================================================================
|
||||
* @description Create database from Schema. This function is called when the user
|
||||
* runs the "dsql create" command. `NOTE`: there must be a "dsql.schema.json" file
|
||||
* in the root of the project for this function to work
|
||||
*
|
||||
* @param {import("../../types/database-schema.td").DSQL_DatabaseSchemaType[]} dbSchema - An array of database schema objects
|
||||
*/
|
||||
async function createDbFromSchema(dbSchema) {
|
||||
/**
|
||||
* Grab Schema
|
||||
*
|
||||
* @description Grab Schema
|
||||
*/
|
||||
|
||||
if (!dbSchema || !Array.isArray(dbSchema) || !dbSchema[0]) {
|
||||
console.log("Invalid DB schema data");
|
||||
return;
|
||||
}
|
||||
|
||||
for (let i = 0; i < dbSchema.length; i++) {
|
||||
/** @type {import("../../types/database-schema.td").DSQL_DatabaseSchemaType} */
|
||||
const database = dbSchema[i];
|
||||
const { dbFullName, tables } = database;
|
||||
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
|
||||
/** @type {{ dbFullName: string }[] | null} */
|
||||
const dbCheck = await noDatabaseDbHandler({ query: `SELECT SCHEMA_NAME AS dbFullName FROM INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME = '${dbFullName}'` });
|
||||
|
||||
if (dbCheck && dbCheck[0]?.dbFullName) {
|
||||
// Database Exists
|
||||
} else {
|
||||
const newDatabase = await noDatabaseDbHandler({ query: `CREATE DATABASE IF NOT EXISTS \`${dbFullName}\` CHARACTER SET utf8mb4 COLLATE utf8mb4_bin` });
|
||||
}
|
||||
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
|
||||
/**
|
||||
* Select all tables
|
||||
* @type {{ TABLE_NAME: string }[] | null}
|
||||
* @description Select All tables in target database
|
||||
*/
|
||||
const allTables = await noDatabaseDbHandler({ query: `SELECT TABLE_NAME FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_SCHEMA='${dbFullName}'` });
|
||||
|
||||
let tableDropped;
|
||||
|
||||
if (!allTables) {
|
||||
console.log("No Tables to Update");
|
||||
continue;
|
||||
}
|
||||
|
||||
for (let tb = 0; tb < allTables.length; tb++) {
|
||||
const { TABLE_NAME } = allTables[tb];
|
||||
|
||||
/**
|
||||
* @description Check if TABLE_NAME is part of the tables contained
|
||||
* in the user schema JSON. If it's not, the table is either deleted
|
||||
* or the table name has been recently changed
|
||||
*/
|
||||
if (!tables.filter((_table) => _table.tableName === TABLE_NAME)[0]) {
|
||||
const oldTableFilteredArray = tables.filter((_table) => _table.tableNameOld && _table.tableNameOld === TABLE_NAME);
|
||||
|
||||
/**
|
||||
* @description Check if this table has been recently renamed. Rename
|
||||
* table id true. Drop table if false
|
||||
*/
|
||||
if (oldTableFilteredArray && oldTableFilteredArray[0]) {
|
||||
console.log("Renaming Table");
|
||||
await varDatabaseDbHandler({
|
||||
queryString: `RENAME TABLE \`${oldTableFilteredArray[0].tableNameOld}\` TO \`${oldTableFilteredArray[0].tableName}\``,
|
||||
database: dbFullName,
|
||||
});
|
||||
} else {
|
||||
console.log(`Dropping Table from ${dbFullName}`);
|
||||
// deepcode ignore reDOS: <NO user input>
|
||||
await varDatabaseDbHandler({
|
||||
queryString: `DROP TABLE \`${TABLE_NAME}\``,
|
||||
database: dbFullName,
|
||||
});
|
||||
|
||||
tableDropped = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
|
||||
/**
|
||||
* @description Iterate through each table and perform table actions
|
||||
*/
|
||||
for (let t = 0; t < tables.length; t++) {
|
||||
const table = tables[t];
|
||||
|
||||
if (tableDropped) continue;
|
||||
|
||||
const { tableName, fields, indexes } = table;
|
||||
|
||||
/**
|
||||
* @description Check if table exists
|
||||
*/
|
||||
const tableCheck = await varDatabaseDbHandler({
|
||||
queryString: `
|
||||
SELECT EXISTS (
|
||||
SELECT
|
||||
TABLE_NAME
|
||||
FROM
|
||||
information_schema.TABLES
|
||||
WHERE
|
||||
TABLE_SCHEMA = ? AND
|
||||
TABLE_NAME = ?
|
||||
) AS tableExists`,
|
||||
queryValuesArray: [dbFullName, table.tableName],
|
||||
database: dbFullName,
|
||||
});
|
||||
|
||||
////////////////////////////////////////
|
||||
|
||||
if (tableCheck && tableCheck[0]?.tableExists > 0) {
|
||||
/**
|
||||
* @description Update table if table exists
|
||||
*/
|
||||
const updateExistingTable = await updateTable({
|
||||
dbFullName: dbFullName,
|
||||
tableName: tableName,
|
||||
tableInfoArray: fields,
|
||||
dbSchema,
|
||||
tableIndexes: indexes,
|
||||
tableIndex: t,
|
||||
});
|
||||
|
||||
if (table.childrenTables && table.childrenTables[0]) {
|
||||
for (let ch = 0; ch < table.childrenTables.length; ch++) {
|
||||
const childTable = table.childrenTables[ch];
|
||||
|
||||
const updateExistingChildTable = await updateTable({
|
||||
dbFullName: childTable.dbNameFull,
|
||||
tableName: childTable.tableName,
|
||||
tableInfoArray: fields,
|
||||
dbSchema,
|
||||
tableIndexes: indexes,
|
||||
clone: true,
|
||||
});
|
||||
|
||||
// console.log(updateExistingChildTable);
|
||||
}
|
||||
}
|
||||
|
||||
////////////////////////////////////////
|
||||
} else {
|
||||
////////////////////////////////////////
|
||||
|
||||
/**
|
||||
* @description Create new Table if table doesnt exist
|
||||
*/
|
||||
const createNewTable = await createTable({
|
||||
tableName: tableName,
|
||||
tableInfoArray: fields,
|
||||
varDatabaseDbHandler,
|
||||
dbFullName: dbFullName,
|
||||
dbSchema,
|
||||
});
|
||||
|
||||
if (indexes && indexes[0]) {
|
||||
/**
|
||||
* Handle DATASQUIREL Table Indexes
|
||||
* ===================================================
|
||||
* @description Iterate through each datasquirel schema
|
||||
* table index(if available), and perform operations
|
||||
*/
|
||||
if (indexes && indexes[0]) {
|
||||
for (let g = 0; g < indexes.length; g++) {
|
||||
const { indexType, indexName, indexTableFields, alias } = indexes[g];
|
||||
|
||||
if (!alias?.match(/./)) continue;
|
||||
|
||||
/**
|
||||
* @type {DSQL_MYSQL_SHOW_INDEXES_Type[] | null}
|
||||
* @description All indexes from MYSQL db
|
||||
*/
|
||||
const allExistingIndexes = await varDatabaseDbHandler({
|
||||
queryString: `SHOW INDEXES FROM \`${tableName}\``,
|
||||
database: dbFullName,
|
||||
});
|
||||
|
||||
/**
|
||||
* @description Check for existing Index in MYSQL db
|
||||
*/
|
||||
try {
|
||||
const existingKeyInDb = allExistingIndexes ? allExistingIndexes.filter((indexObject) => indexObject.Key_name === alias) : null;
|
||||
if (!existingKeyInDb?.[0]) throw new Error("This Index Does not Exist");
|
||||
} catch (error) {
|
||||
/**
|
||||
* @description Create new index if determined that it
|
||||
* doesn't exist in MYSQL db
|
||||
*/
|
||||
await varDatabaseDbHandler({
|
||||
queryString: `CREATE${indexType.match(/fullText/i) ? " FULLTEXT" : ""} INDEX \`${alias}\` ON ${tableName}(${indexTableFields
|
||||
.map((nm) => nm.value)
|
||||
.map((nm) => `\`${nm}\``)
|
||||
.join(",")}) COMMENT 'schema_index'`,
|
||||
database: dbFullName,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
////////////////////////////////////////
|
||||
}
|
||||
}
|
||||
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
}
|
||||
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
|
||||
module.exports = createDbFromSchema;
|
73
bin/engine/data/dataTypes.json
Normal file
73
bin/engine/data/dataTypes.json
Normal file
@ -0,0 +1,73 @@
|
||||
[
|
||||
{
|
||||
"title": "VARCHAR",
|
||||
"name": "VARCHAR",
|
||||
"value": "0-255",
|
||||
"argument": true,
|
||||
"description": "Varchar is simply letters and numbers within the range 0 - 255",
|
||||
"maxValue": 255
|
||||
},
|
||||
{
|
||||
"title": "TINYINT",
|
||||
"name": "TINYINT",
|
||||
"value": "0-100",
|
||||
"description": "TINYINT means Integers: 0 to 100",
|
||||
"maxValue": 127
|
||||
},
|
||||
{
|
||||
"title": "SMALLINT",
|
||||
"name": "SMALLINT",
|
||||
"value": "0-255",
|
||||
"description": "SMALLINT means Integers: 0 to 240933",
|
||||
"maxValue": 32767
|
||||
},
|
||||
{
|
||||
"title": "MEDIUMINT",
|
||||
"name": "MEDIUMINT",
|
||||
"value": "0-255",
|
||||
"description": "MEDIUMINT means Integers: 0 to 1245568545560",
|
||||
"maxValue": 8388607
|
||||
},
|
||||
{
|
||||
"title": "INT",
|
||||
"name": "INT",
|
||||
"value": "0-255",
|
||||
"description": "INT means Integers: 0 to 12560",
|
||||
"maxValue": 2147483647
|
||||
},
|
||||
{
|
||||
"title": "BIGINT",
|
||||
"name": "BIGINT",
|
||||
"value": "0-255",
|
||||
"description": "BIGINT means Integers: 0 to 1245569056767568545560",
|
||||
"maxValue": 2e63
|
||||
},
|
||||
{
|
||||
"title": "TINYTEXT",
|
||||
"name": "TINYTEXT",
|
||||
"value": "0-255",
|
||||
"description": "Text with 255 max characters",
|
||||
"maxValue": 127
|
||||
},
|
||||
{
|
||||
"title": "TEXT",
|
||||
"name": "TEXT",
|
||||
"value": "0-100",
|
||||
"description": "MEDIUMTEXT is just text with max length 16,777,215",
|
||||
"maxValue": 127
|
||||
},
|
||||
{
|
||||
"title": "MEDIUMTEXT",
|
||||
"name": "MEDIUMTEXT",
|
||||
"value": "0-255",
|
||||
"description": "MEDIUMTEXT is just text with max length 16,777,215",
|
||||
"maxValue": 127
|
||||
},
|
||||
{
|
||||
"title": "LONGTEXT",
|
||||
"name": "LONGTEXT",
|
||||
"value": "0-255",
|
||||
"description": "LONGTEXT is just text with max length 4,294,967,295",
|
||||
"maxValue": 127
|
||||
}
|
||||
]
|
39
bin/engine/data/defaultFields.json
Normal file
39
bin/engine/data/defaultFields.json
Normal file
@ -0,0 +1,39 @@
|
||||
[
|
||||
{
|
||||
"fieldName": "id",
|
||||
"dataType": "BIGINT",
|
||||
"notNullValue": true,
|
||||
"primaryKey": true,
|
||||
"autoIncrement": true
|
||||
},
|
||||
{
|
||||
"fieldName": "date_created",
|
||||
"dataType": "VARCHAR(250)",
|
||||
"notNullValue": true
|
||||
},
|
||||
{
|
||||
"fieldName": "date_created_code",
|
||||
"dataType": "BIGINT",
|
||||
"notNullValue": true
|
||||
},
|
||||
{
|
||||
"fieldName": "date_created_timestamp",
|
||||
"dataType": "TIMESTAMP",
|
||||
"defaultValueLiteral": "CURRENT_TIMESTAMP"
|
||||
},
|
||||
{
|
||||
"fieldName": "date_updated",
|
||||
"dataType": "VARCHAR(250)",
|
||||
"notNullValue": true
|
||||
},
|
||||
{
|
||||
"fieldName": "date_updated_code",
|
||||
"dataType": "BIGINT",
|
||||
"notNullValue": true
|
||||
},
|
||||
{
|
||||
"fieldName": "date_updated_timestamp",
|
||||
"dataType": "TIMESTAMP",
|
||||
"defaultValueLiteral": "CURRENT_TIMESTAMP"
|
||||
}
|
||||
]
|
17
bin/engine/data/possibleFields.json
Normal file
17
bin/engine/data/possibleFields.json
Normal file
@ -0,0 +1,17 @@
|
||||
{
|
||||
"fieldName": "string",
|
||||
"dataType": "BIGINT",
|
||||
"nullValue": true,
|
||||
"primaryKey": true,
|
||||
"autoIncrement": true,
|
||||
"defaultValue": "CURRENT_TIMESTAMP",
|
||||
"defaultValueLiteral": "CURRENT_TIMESTAMP",
|
||||
"notNullValue": true,
|
||||
"foreignKey": {
|
||||
"foreignKeyName": "Name",
|
||||
"destinationTableName": "Table Name",
|
||||
"destinationTableColumnName": "Column Name",
|
||||
"cascadeDelete": true,
|
||||
"cascadeUpdate": true
|
||||
}
|
||||
}
|
5
bin/engine/deploy.js
Normal file
5
bin/engine/deploy.js
Normal file
@ -0,0 +1,5 @@
|
||||
const fs = require("fs");
|
||||
|
||||
async function deploy() {}
|
||||
|
||||
deploy();
|
53
bin/engine/encodingUpdate.js
Normal file
53
bin/engine/encodingUpdate.js
Normal file
@ -0,0 +1,53 @@
|
||||
require("dotenv").config({ path: "./../.env" });
|
||||
|
||||
////////////////////////////////////////
|
||||
|
||||
const noDatabaseDbHandler = require("../functions/backend/noDatabaseDbHandler");
|
||||
const varDatabaseDbHandler = require("../functions/backend/varDatabaseDbHandler");
|
||||
const createTable = require("./utils/createTable");
|
||||
const updateTable = require("./utils/updateTable");
|
||||
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
|
||||
/**
|
||||
* Grab Schema
|
||||
*
|
||||
* @description Grab Schema
|
||||
*/
|
||||
varDatabaseDbHandler({
|
||||
queryString: `SELECT user_database_tables.*,user_databases.db_full_name FROM user_database_tables JOIN user_databases ON user_database_tables.db_id=user_databases.id`,
|
||||
database: "datasquirel",
|
||||
}).then(async (tables) => {
|
||||
for (let i = 0; i < tables.length; i++) {
|
||||
const table = tables[i];
|
||||
const { id, user_id, db_id, db_full_name, table_name, table_slug, table_description } = table;
|
||||
|
||||
const tableInfo = await varDatabaseDbHandler({
|
||||
queryString: `SELECT COLUMN_NAME FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_SCHEMA='${db_full_name}' AND TABLE_NAME='${table_slug}'`,
|
||||
database: db_full_name,
|
||||
});
|
||||
|
||||
const updateDbCharset = await varDatabaseDbHandler({
|
||||
queryString: `ALTER DATABASE ${db_full_name} CHARACTER SET = utf8mb4 COLLATE = utf8mb4_bin;`,
|
||||
database: db_full_name,
|
||||
});
|
||||
|
||||
const updateEncoding = await varDatabaseDbHandler({
|
||||
queryString: `ALTER TABLE \`${table_slug}\` CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin`,
|
||||
database: db_full_name,
|
||||
});
|
||||
|
||||
console.log(updateEncoding);
|
||||
}
|
||||
|
||||
process.exit();
|
||||
});
|
||||
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
64
bin/engine/grantFullPriviledges.js
Normal file
64
bin/engine/grantFullPriviledges.js
Normal file
@ -0,0 +1,64 @@
|
||||
require("dotenv").config({ path: "./../.env" });
|
||||
|
||||
////////////////////////////////////////
|
||||
|
||||
const noDatabaseDbHandler = require("../functions/backend/noDatabaseDbHandler");
|
||||
const serverError = require("../functions/backend/serverError");
|
||||
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
|
||||
async function createDbFromSchema({ userId }) {
|
||||
/**
|
||||
* Grab Schema
|
||||
*
|
||||
* @description Grab Schema
|
||||
*/
|
||||
try {
|
||||
const allDatabases = await noDatabaseDbHandler(`SHOW DATABASES`);
|
||||
|
||||
const datasquirelUserDatabases = allDatabases.filter((database) => database.Database.match(/datasquirel_user_/));
|
||||
|
||||
for (let i = 0; i < datasquirelUserDatabases.length; i++) {
|
||||
const datasquirelUserDatabase = datasquirelUserDatabases[i];
|
||||
const { Database } = datasquirelUserDatabase;
|
||||
|
||||
const grantDbPriviledges = await noDatabaseDbHandler(`GRANT ALL PRIVILEGES ON ${Database}.* TO 'datasquirel_full_access'@'127.0.0.1' WITH GRANT OPTION`);
|
||||
|
||||
console.log(grantDbPriviledges);
|
||||
}
|
||||
|
||||
const flushPriviledged = await noDatabaseDbHandler(`FLUSH PRIVILEGES`);
|
||||
} catch (error) {
|
||||
serverError({
|
||||
component: "shell/grantDbPriviledges/main-catch-error",
|
||||
message: error.message,
|
||||
user: { id: userId },
|
||||
});
|
||||
}
|
||||
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
|
||||
process.exit();
|
||||
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
}
|
||||
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
|
||||
const userArg = process.argv[process.argv.indexOf("--user")];
|
||||
const externalUser = process.argv[process.argv.indexOf("--user") + 1];
|
||||
|
||||
createDbFromSchema({ userId: userArg ? externalUser : null });
|
61
bin/engine/lessWatch.js
Normal file
61
bin/engine/lessWatch.js
Normal file
@ -0,0 +1,61 @@
|
||||
const fs = require("fs");
|
||||
const { exec } = require("child_process");
|
||||
|
||||
require("dotenv").config({ path: "./../.env" });
|
||||
|
||||
const sourceFile = process.argv.indexOf("--src") >= 0 ? process.argv[process.argv.indexOf("--src") + 1] : null;
|
||||
const destinationFile = process.argv.indexOf("--dst") >= 0 ? process.argv[process.argv.indexOf("--dst") + 1] : null;
|
||||
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
|
||||
console.log("Running Less compiler ...");
|
||||
|
||||
const sourceFiles = sourceFile.split(",");
|
||||
const dstFiles = destinationFile.split(",");
|
||||
|
||||
for (let i = 0; i < sourceFiles.length; i++) {
|
||||
const srcFolder = sourceFiles[i];
|
||||
const dstFile = dstFiles[i];
|
||||
|
||||
fs.watch(srcFolder, { recursive: true }, (evtType, prev) => {
|
||||
if (prev?.match(/\(/) || prev?.match(/\.js$/i)) {
|
||||
return;
|
||||
}
|
||||
|
||||
let finalSrcPath = `${srcFolder}/main.less`;
|
||||
let finalDstPath = dstFile;
|
||||
|
||||
if (prev?.match(/\[/)) {
|
||||
const paths = prev.split("/");
|
||||
const targetPathFull = paths[paths.length - 1];
|
||||
const targetPath = targetPathFull.replace(/\[|\]/g, "").replace(/\.less/, "");
|
||||
|
||||
const destinationFileParentFolder = dstFile.replace(/\/[^\/]+\.css$/, "");
|
||||
|
||||
const targetDstFilePath = `${destinationFileParentFolder}/${targetPath}.css`;
|
||||
|
||||
finalSrcPath = `${srcFolder}/${targetPathFull}`;
|
||||
finalDstPath = targetDstFilePath;
|
||||
}
|
||||
|
||||
exec(`lessc ${finalSrcPath} ${finalDstPath?.match(/\.css$/) ? finalDstPath : finalDstPath.replace(/\/$/, "") + "/_main.css"}`, (error, stdout, stderr) => {
|
||||
/** @type {Error} */
|
||||
if (error) {
|
||||
console.log("ERROR =>", error.message);
|
||||
|
||||
if (!evtType?.match(/change/i) && prev.match(/\[/)) {
|
||||
fs.unlinkSync(finalDstPath);
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
console.log("Less Compilation \x1b[32msuccessful\x1b[0m!");
|
||||
});
|
||||
});
|
||||
}
|
21
bin/engine/readImage.js
Normal file
21
bin/engine/readImage.js
Normal file
@ -0,0 +1,21 @@
|
||||
const fs = require("fs");
|
||||
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
|
||||
/**
|
||||
* Grab Schema
|
||||
*
|
||||
* @description Grab Schema
|
||||
*/
|
||||
const imageBase64 = fs.readFileSync("./../public/images/unique-tokens-icon.png", "base64");
|
||||
|
||||
console.log(imageBase64.toString());
|
||||
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
102
bin/engine/recoverMainJsonFromDb.js
Normal file
102
bin/engine/recoverMainJsonFromDb.js
Normal file
@ -0,0 +1,102 @@
|
||||
const fs = require("fs");
|
||||
require("dotenv").config({ path: "./../.env" });
|
||||
|
||||
////////////////////////////////////////
|
||||
|
||||
const noDatabaseDbHandler = require("../functions/backend/noDatabaseDbHandler");
|
||||
const varDatabaseDbHandler = require("../functions/backend/varDatabaseDbHandler");
|
||||
const createTable = require("./utils/createTable");
|
||||
const slugToCamelTitle = require("./utils/slugToCamelTitle");
|
||||
const updateTable = require("./utils/updateTable");
|
||||
|
||||
/** ****************************************************************************** */
|
||||
|
||||
const userId = process.argv.indexOf("--userId") >= 0 ? process.argv[process.argv.indexOf("--userId") + 1] : null;
|
||||
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
|
||||
/**
|
||||
* Grab Schema
|
||||
*
|
||||
* @description Grab Schema
|
||||
*/
|
||||
async function recoverMainJsonFromDb() {
|
||||
if (!userId) {
|
||||
console.log("No user Id provided");
|
||||
return;
|
||||
}
|
||||
|
||||
const databases = await global.DB_HANDLER(`SELECT * FROM user_databases WHERE user_id='${userId}'`);
|
||||
|
||||
const dbWrite = [];
|
||||
|
||||
for (let i = 0; i < databases.length; i++) {
|
||||
const { id, db_name, db_slug, db_full_name, db_image, db_description } = databases[i];
|
||||
|
||||
const dbObject = {
|
||||
dbName: db_name,
|
||||
dbSlug: db_slug,
|
||||
dbFullName: db_full_name,
|
||||
dbDescription: db_description,
|
||||
dbImage: db_image,
|
||||
tables: [],
|
||||
};
|
||||
|
||||
const tables = await global.DB_HANDLER(`SELECT * FROM user_database_tables WHERE user_id='${userId}' AND db_id='${id}'`);
|
||||
|
||||
for (let j = 0; j < tables.length; j++) {
|
||||
const { table_name, table_slug, table_description } = tables[j];
|
||||
|
||||
const tableObject = {
|
||||
tableName: table_slug,
|
||||
tableFullName: table_name,
|
||||
fields: [],
|
||||
indexes: [],
|
||||
};
|
||||
|
||||
const tableFields = await varDatabaseDbHandler({
|
||||
database: db_full_name,
|
||||
queryString: `SHOW COLUMNS FROM ${table_slug}`,
|
||||
});
|
||||
|
||||
for (let k = 0; k < tableFields.length; k++) {
|
||||
const { Field, Type, Null, Default, Key } = tableFields[k];
|
||||
|
||||
const fieldObject = {
|
||||
fieldName: Field,
|
||||
dataType: Type.toUpperCase(),
|
||||
};
|
||||
|
||||
if (Default?.match(/./) && !Default?.match(/timestamp/i)) fieldObject["defaultValue"] = Default;
|
||||
if (Key?.match(/pri/i)) {
|
||||
fieldObject["primaryKey"] = true;
|
||||
fieldObject["autoIncrement"] = true;
|
||||
}
|
||||
if (Default?.match(/timestamp/i)) fieldObject["defaultValueLiteral"] = Default;
|
||||
if (Null?.match(/yes/i)) fieldObject["nullValue"] = true;
|
||||
if (Null?.match(/no/i)) fieldObject["notNullValue"] = true;
|
||||
|
||||
tableObject.fields.push(fieldObject);
|
||||
}
|
||||
|
||||
dbObject.tables.push(tableObject);
|
||||
}
|
||||
|
||||
dbWrite.push(dbObject);
|
||||
}
|
||||
|
||||
fs.writeFileSync(`./../jsonData/dbSchemas/users/user-${userId}/main.json`, JSON.stringify(dbWrite, null, 4), "utf-8");
|
||||
|
||||
process.exit();
|
||||
}
|
||||
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
|
||||
recoverMainJsonFromDb();
|
27
bin/engine/tailwindWatch.js
Normal file
27
bin/engine/tailwindWatch.js
Normal file
@ -0,0 +1,27 @@
|
||||
const fs = require("fs");
|
||||
const { exec } = require("child_process");
|
||||
|
||||
require("dotenv").config({ path: "./../.env" });
|
||||
|
||||
const sourceFile = process.argv.indexOf("--src") >= 0 ? process.argv[process.argv.indexOf("--src") + 1] : null;
|
||||
const destinationFile = process.argv.indexOf("--dst") >= 0 ? process.argv[process.argv.indexOf("--dst") + 1] : null;
|
||||
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
|
||||
console.log("Running Tailwind CSS compiler ...");
|
||||
|
||||
fs.watch("./../", (curr, prev) => {
|
||||
exec(`npx tailwindcss -i ./tailwind/main.css -o ./styles/tailwind.css`, (error, stdout, stderr) => {
|
||||
if (error) {
|
||||
console.log("ERROR =>", error.message);
|
||||
return;
|
||||
}
|
||||
|
||||
console.log("Tailwind CSS Compilation \x1b[32msuccessful\x1b[0m!");
|
||||
});
|
||||
});
|
221
bin/engine/test.js
Normal file
221
bin/engine/test.js
Normal file
@ -0,0 +1,221 @@
|
||||
require("dotenv").config({ path: "./../.env" });
|
||||
|
||||
const dbEngine = require("datasquirel/engine");
|
||||
const http = require("http");
|
||||
|
||||
const datasquirel = require("datasquirel");
|
||||
|
||||
`curl http://www.dataden.tech`;
|
||||
|
||||
datasquirel
|
||||
.get({
|
||||
db: "test",
|
||||
key: process.env.DATASQUIREL_READ_ONLY_KEY,
|
||||
query: "SELECT title, slug, body FROM blog_posts",
|
||||
})
|
||||
.then((response) => {
|
||||
console.log(response);
|
||||
});
|
||||
|
||||
// dbEngine.db
|
||||
// .query({
|
||||
// dbFullName: "datasquirel",
|
||||
// dbHost: process.env.DB_HOST,
|
||||
// dbPassword: process.env.DB_PASSWORD,
|
||||
// dbUsername: process.env.DB_USERNAME,
|
||||
// query: "SHOW TABLES",
|
||||
// })
|
||||
// .then((res) => {
|
||||
// console.log("res =>", res);
|
||||
// });
|
||||
|
||||
// run({
|
||||
// key: "bc057a2cd57922e085739c89b4985e5e676b655d7cc0ba7604659cad0a08c252040120c06597a5d22959a502a44bd816",
|
||||
// db: "showmerebates",
|
||||
// query: "SELECT * FROM test_table",
|
||||
// }).then((res) => {
|
||||
// console.log("res =>", res);
|
||||
// });
|
||||
|
||||
post({
|
||||
key: "3115fce7ea7772eda75f8f0e55a1414c5c018b4920f4bc99a2d4d7000bac203c15a7036fd3d7ef55ae67a002d4c757895b5c58ff82079a04ba6d42d23d4353256985090959a58a9af8e03cb277fc7895413e6f28ae11b1cc15329c7f94cdcf9a795f54d6e1d319adc287dc147143e62d",
|
||||
database: "showmerebates",
|
||||
query: {
|
||||
action: "delete",
|
||||
table: "test_table",
|
||||
identifierColumnName: "id",
|
||||
identifierValue: 6,
|
||||
},
|
||||
}).then((res) => {
|
||||
console.log("res =>", res);
|
||||
});
|
||||
|
||||
async function run({ key, db, query }) {
|
||||
const httpResponse = await new Promise((resolve, reject) => {
|
||||
http.request(
|
||||
{
|
||||
method: "GET",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
Authorization: key,
|
||||
},
|
||||
hostname: "localhost",
|
||||
port: 7070,
|
||||
path: `/api/query/get?db=${db}&query=${query
|
||||
.replace(/\n|\r|\n\r/g, "")
|
||||
.replace(/ {2,}/g, " ")
|
||||
.replace(/ /g, "+")}`,
|
||||
},
|
||||
|
||||
/**
|
||||
* Callback Function
|
||||
*
|
||||
* @description https request callback
|
||||
*/
|
||||
(response) => {
|
||||
var str = "";
|
||||
|
||||
response.on("data", function (chunk) {
|
||||
str += chunk;
|
||||
});
|
||||
|
||||
response.on("end", function () {
|
||||
resolve(JSON.parse(str));
|
||||
});
|
||||
|
||||
response.on("error", (err) => {
|
||||
reject(err);
|
||||
});
|
||||
}
|
||||
).end();
|
||||
});
|
||||
|
||||
return httpResponse;
|
||||
}
|
||||
|
||||
/**
|
||||
* @typedef {Object} PostReturn
|
||||
* @property {boolean} success - Did the function run successfully?
|
||||
* @property {(Object[]|string)} [payload=[]] - The Y Coordinate
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {object} PostDataPayload
|
||||
* @property {string} action - "insert" | "update" | "delete"
|
||||
* @property {string} table - Table name(slug) eg "blog_posts"
|
||||
* @property {string} identifierColumnName - Table identifier field name => eg. "id" OR "email"
|
||||
* @property {string} identifierValue - Corresponding value of the selected field name => This
|
||||
* checks identifies a the target row for "update" or "delete". Not needed for "insert"
|
||||
* @property {object} data - Table insert payload object => This must have keys that match
|
||||
* table fields
|
||||
* @property {string?} duplicateColumnName - Duplicate column name to check for
|
||||
* @property {string?} duplicateColumnValue - Duplicate column value to match. If no "update" param
|
||||
* provided, function will return null
|
||||
* @property {boolean?} update - Should the "insert" action update the existing entry if indeed
|
||||
* the entry with "duplicateColumnValue" exists?
|
||||
*/
|
||||
|
||||
/**
|
||||
* Post request
|
||||
* ==============================================================================
|
||||
* @async
|
||||
*
|
||||
* @param {Object} params - Single object passed
|
||||
* @param {string} params.key - FULL ACCESS API Key
|
||||
* @param {string} params.database - Database Name
|
||||
* @param {PostDataPayload} params.query - SQL query String or Request Object
|
||||
*
|
||||
* @returns { Promise<PostReturn> } - Return Object
|
||||
*/
|
||||
async function post({ key, query, database }) {
|
||||
/**
|
||||
* Make https request
|
||||
*
|
||||
* @description make a request to datasquirel.com
|
||||
*/
|
||||
const httpResponse = await new Promise((resolve, reject) => {
|
||||
const reqPayloadString = JSON.stringify({
|
||||
query,
|
||||
database,
|
||||
}).replace(/\n|\r|\n\r/gm, "");
|
||||
|
||||
try {
|
||||
JSON.parse(reqPayloadString);
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
console.log(reqPayloadString);
|
||||
|
||||
return {
|
||||
success: false,
|
||||
payload: null,
|
||||
error: "Query object is invalid. Please Check query data values",
|
||||
};
|
||||
}
|
||||
|
||||
const reqPayload = reqPayloadString;
|
||||
|
||||
const httpsRequest = http.request(
|
||||
{
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
"Content-Length": Buffer.from(reqPayload).length,
|
||||
Authorization: key,
|
||||
},
|
||||
hostname: "localhost",
|
||||
port: 7070,
|
||||
path: `/api/query/post`,
|
||||
},
|
||||
|
||||
/**
|
||||
* Callback Function
|
||||
*
|
||||
* @description https request callback
|
||||
*/
|
||||
(response) => {
|
||||
var str = "";
|
||||
|
||||
response.on("data", function (chunk) {
|
||||
str += chunk;
|
||||
});
|
||||
|
||||
response.on("end", function () {
|
||||
try {
|
||||
resolve(JSON.parse(str));
|
||||
} catch (error) {
|
||||
console.log(error.message);
|
||||
console.log("Fetched Payload =>", str);
|
||||
|
||||
resolve({
|
||||
success: false,
|
||||
payload: null,
|
||||
error: error.message,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
response.on("error", (err) => {
|
||||
resolve({
|
||||
success: false,
|
||||
payload: null,
|
||||
error: err.message,
|
||||
});
|
||||
});
|
||||
}
|
||||
);
|
||||
|
||||
httpsRequest.write(reqPayload);
|
||||
|
||||
httpsRequest.on("error", (error) => {
|
||||
console.log("HTTPS request ERROR =>", error);
|
||||
});
|
||||
|
||||
httpsRequest.end();
|
||||
});
|
||||
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
|
||||
return httpResponse;
|
||||
}
|
81
bin/engine/updateChildrenTablesOnDb.js
Normal file
81
bin/engine/updateChildrenTablesOnDb.js
Normal file
@ -0,0 +1,81 @@
|
||||
const fs = require("fs");
|
||||
require("dotenv").config({ path: "./../.env" });
|
||||
|
||||
////////////////////////////////////////
|
||||
|
||||
const noDatabaseDbHandler = require("../functions/backend/noDatabaseDbHandler");
|
||||
const serverError = require("../functions/backend/serverError");
|
||||
|
||||
const varDatabaseDbHandler = require("../functions/backend/varDatabaseDbHandler");
|
||||
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
|
||||
async function updateChildrenTablesOnDb() {
|
||||
/**
|
||||
* Grab Schema
|
||||
*
|
||||
* @description Grab Schema
|
||||
*/
|
||||
try {
|
||||
const rootDir = "./../jsonData/dbSchemas/users";
|
||||
const userFolders = fs.readdirSync(rootDir);
|
||||
|
||||
for (let i = 0; i < userFolders.length; i++) {
|
||||
const folder = userFolders[i];
|
||||
const userId = folder.replace(/user-/, "");
|
||||
const databases = JSON.parse(fs.readFileSync(`${rootDir}/${folder}/main.json`));
|
||||
|
||||
for (let j = 0; j < databases.length; j++) {
|
||||
const db = databases[j];
|
||||
const dbTables = db.tables;
|
||||
for (let k = 0; k < dbTables.length; k++) {
|
||||
const table = dbTables[k];
|
||||
|
||||
if (table?.childTable) {
|
||||
const originTableName = table.childTableName;
|
||||
const originDbName = table.childTableDbFullName;
|
||||
|
||||
const WHERE_CLAUSE = `WHERE user_id='${userId}' AND db_slug='${db.dbSlug}' AND table_slug='${table.tableName}'`;
|
||||
|
||||
const existingTableInDb = await global.DB_HANDLER(`SELECT * FROM user_database_tables ${WHERE_CLAUSE}`);
|
||||
console.log(existingTableInDb);
|
||||
|
||||
if (existingTableInDb && existingTableInDb[0]) {
|
||||
const updateChildrenTablesInfo = await global.DB_HANDLER(`UPDATE user_database_tables SET child_table='1',child_table_parent_database='${originDbName}',child_table_parent_table='${originTableName}' WHERE id='${existingTableInDb[0].id}'`);
|
||||
|
||||
console.log(updateChildrenTablesInfo);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
}
|
||||
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
|
||||
process.exit();
|
||||
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
}
|
||||
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
|
||||
// const userArg = process.argv[process.argv.indexOf("--user")];
|
||||
// const externalUser = process.argv[process.argv.indexOf("--user") + 1];
|
||||
|
||||
updateChildrenTablesOnDb();
|
53
bin/engine/updateDateTimestamps.js
Normal file
53
bin/engine/updateDateTimestamps.js
Normal file
@ -0,0 +1,53 @@
|
||||
require("dotenv").config({ path: "./../.env" });
|
||||
|
||||
////////////////////////////////////////
|
||||
|
||||
const noDatabaseDbHandler = require("../functions/backend/noDatabaseDbHandler");
|
||||
const varDatabaseDbHandler = require("../functions/backend/varDatabaseDbHandler");
|
||||
const createTable = require("./utils/createTable");
|
||||
const updateTable = require("./utils/updateTable");
|
||||
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
|
||||
/**
|
||||
* Grab Schema
|
||||
*
|
||||
* @description Grab Schema
|
||||
*/
|
||||
varDatabaseDbHandler({
|
||||
queryString: `SELECT user_database_tables.*,user_databases.db_full_name FROM user_database_tables JOIN user_databases ON user_database_tables.db_id=user_databases.id`,
|
||||
database: "datasquirel",
|
||||
}).then(async (tables) => {
|
||||
for (let i = 0; i < tables.length; i++) {
|
||||
const table = tables[i];
|
||||
const { id, user_id, db_id, db_full_name, table_name, table_slug, table_description } = table;
|
||||
|
||||
const tableInfo = await varDatabaseDbHandler({
|
||||
queryString: `SELECT COLUMN_NAME FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_SCHEMA='${db_full_name}' AND TABLE_NAME='${table_slug}'`,
|
||||
database: db_full_name,
|
||||
});
|
||||
|
||||
const updateCreationDateTimestamp = await varDatabaseDbHandler({
|
||||
queryString: `ALTER TABLE \`${table_slug}\` MODIFY COLUMN date_created_timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP`,
|
||||
database: db_full_name,
|
||||
});
|
||||
|
||||
const updateDateTimestamp = await varDatabaseDbHandler({
|
||||
queryString: `ALTER TABLE \`${table_slug}\` MODIFY COLUMN date_updated_timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP`,
|
||||
database: db_full_name,
|
||||
});
|
||||
|
||||
console.log("Date Updated Column updated");
|
||||
}
|
||||
|
||||
process.exit();
|
||||
});
|
||||
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
51
bin/engine/updateDbSlugsForTableRecords.js
Normal file
51
bin/engine/updateDbSlugsForTableRecords.js
Normal file
@ -0,0 +1,51 @@
|
||||
require("dotenv").config({ path: "./../.env" });
|
||||
|
||||
////////////////////////////////////////
|
||||
|
||||
const noDatabaseDbHandler = require("../functions/backend/noDatabaseDbHandler");
|
||||
const serverError = require("../functions/backend/serverError");
|
||||
const varDatabaseDbHandler = require("../functions/backend/varDatabaseDbHandler");
|
||||
const createTable = require("./utils/createTable");
|
||||
const updateTable = require("./utils/updateTable");
|
||||
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
|
||||
/**
|
||||
* Grab Schema
|
||||
*
|
||||
* @description Grab Schema
|
||||
*/
|
||||
varDatabaseDbHandler({
|
||||
queryString: `SELECT DISTINCT db_id FROM user_database_tables`,
|
||||
database: "datasquirel",
|
||||
}).then(async (tables) => {
|
||||
// console.log(tables);
|
||||
// process.exit();
|
||||
|
||||
for (let i = 0; i < tables.length; i++) {
|
||||
const table = tables[i];
|
||||
|
||||
try {
|
||||
const { db_id } = table;
|
||||
const dbSlug = await global.DB_HANDLER(`SELECT db_slug FROM user_databases WHERE id='${db_id}'`);
|
||||
const updateTableSlug = await global.DB_HANDLER(`UPDATE user_database_tables SET db_slug='${dbSlug[0].db_slug}' WHERE db_id='${db_id}'`);
|
||||
} catch (error) {
|
||||
serverError({
|
||||
component: "shell/updateDbSlugsForTableRecords/main-catch-error",
|
||||
message: error.message,
|
||||
user: {},
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
process.exit();
|
||||
});
|
||||
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
52
bin/engine/utils/camelJoinedtoCamelSpace.js
Normal file
52
bin/engine/utils/camelJoinedtoCamelSpace.js
Normal file
@ -0,0 +1,52 @@
|
||||
// @ts-check
|
||||
|
||||
/**
|
||||
* Convert Camel Joined Text to Camel Spaced Text
|
||||
* ==============================================================================
|
||||
* @description this function takes a camel cased text without spaces, and returns
|
||||
* a camel-case-spaced text
|
||||
*
|
||||
* @param {string} text - text string without spaces
|
||||
*
|
||||
* @returns {string | null}
|
||||
*/
|
||||
module.exports = function camelJoinedtoCamelSpace(text) {
|
||||
if (!text?.match(/./)) {
|
||||
return "";
|
||||
}
|
||||
|
||||
if (text?.match(/ /)) {
|
||||
return text;
|
||||
}
|
||||
|
||||
if (text) {
|
||||
let textArray = text.split("");
|
||||
|
||||
let capIndexes = [];
|
||||
|
||||
for (let i = 0; i < textArray.length; i++) {
|
||||
const char = textArray[i];
|
||||
|
||||
if (i === 0) continue;
|
||||
if (char.match(/[A-Z]/)) {
|
||||
capIndexes.push(i);
|
||||
}
|
||||
}
|
||||
|
||||
let textChunks = [`${textArray[0].toUpperCase()}${text.substring(1, capIndexes[0])}`];
|
||||
|
||||
for (let j = 0; j < capIndexes.length; j++) {
|
||||
const capIndex = capIndexes[j];
|
||||
if (capIndex === 0) continue;
|
||||
|
||||
const startIndex = capIndex + 1;
|
||||
const endIndex = capIndexes[j + 1];
|
||||
|
||||
textChunks.push(`${textArray[capIndex].toUpperCase()}${text.substring(startIndex, endIndex)}`);
|
||||
}
|
||||
|
||||
return textChunks.join(" ");
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
};
|
112
bin/engine/utils/createTable.js
Normal file
112
bin/engine/utils/createTable.js
Normal file
@ -0,0 +1,112 @@
|
||||
// @ts-check
|
||||
|
||||
const generateColumnDescription = require("./generateColumnDescription");
|
||||
const supplementTable = require("./supplementTable");
|
||||
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
|
||||
module.exports = async function createTable({ dbFullName, tableName, tableInfoArray, varDatabaseDbHandler, dbSchema }) {
|
||||
/**
|
||||
* Format tableInfoArray
|
||||
*
|
||||
* @description Format tableInfoArray
|
||||
*/
|
||||
const finalTable = supplementTable({ tableInfoArray: tableInfoArray });
|
||||
|
||||
/**
|
||||
* Grab Schema
|
||||
*
|
||||
* @description Grab Schema
|
||||
*/
|
||||
const createTableQueryArray = [];
|
||||
|
||||
createTableQueryArray.push(`CREATE TABLE IF NOT EXISTS \`${tableName}\` (`);
|
||||
|
||||
////////////////////////////////////////
|
||||
|
||||
let primaryKeySet = false;
|
||||
let foreignKeys = [];
|
||||
|
||||
////////////////////////////////////////
|
||||
|
||||
for (let i = 0; i < finalTable.length; i++) {
|
||||
const column = finalTable[i];
|
||||
const { fieldName, dataType, nullValue, primaryKey, autoIncrement, defaultValue, defaultValueLiteral, foreignKey, updatedField } = column;
|
||||
|
||||
if (foreignKey) {
|
||||
foreignKeys.push({
|
||||
fieldName: fieldName,
|
||||
...foreignKey,
|
||||
});
|
||||
}
|
||||
|
||||
let { fieldEntryText, newPrimaryKeySet } = generateColumnDescription({ columnData: column, primaryKeySet: primaryKeySet });
|
||||
|
||||
primaryKeySet = newPrimaryKeySet;
|
||||
|
||||
////////////////////////////////////////
|
||||
|
||||
if (fieldName?.match(/updated_timestamp/i)) {
|
||||
fieldEntryText += " ON UPDATE CURRENT_TIMESTAMP";
|
||||
}
|
||||
|
||||
////////////////////////////////////////
|
||||
|
||||
const comma = (() => {
|
||||
if (foreignKeys[0]) return ",";
|
||||
if (i === finalTable.length - 1) return "";
|
||||
return ",";
|
||||
})();
|
||||
|
||||
createTableQueryArray.push(" " + fieldEntryText + comma);
|
||||
|
||||
////////////////////////////////////////
|
||||
}
|
||||
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
|
||||
if (foreignKeys[0]) {
|
||||
foreignKeys.forEach((foreighKey, index, array) => {
|
||||
const { fieldName, destinationTableName, destinationTableColumnName, cascadeDelete, cascadeUpdate, foreignKeyName } = foreighKey;
|
||||
|
||||
const comma = (() => {
|
||||
if (index === foreignKeys.length - 1) return "";
|
||||
return ",";
|
||||
})();
|
||||
|
||||
createTableQueryArray.push(` CONSTRAINT \`${foreignKeyName}\` FOREIGN KEY (\`${fieldName}\`) REFERENCES \`${destinationTableName}\`(${destinationTableColumnName})${cascadeDelete ? " ON DELETE CASCADE" : ""}${cascadeUpdate ? " ON UPDATE CASCADE" : ""}${comma}`);
|
||||
});
|
||||
}
|
||||
|
||||
////////////////////////////////////////
|
||||
|
||||
createTableQueryArray.push(`) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin;`);
|
||||
|
||||
const createTableQuery = createTableQueryArray.join("\n");
|
||||
|
||||
////////////////////////////////////////
|
||||
|
||||
const newTable = await varDatabaseDbHandler({
|
||||
queryString: createTableQuery,
|
||||
database: dbFullName,
|
||||
});
|
||||
|
||||
return newTable;
|
||||
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
};
|
||||
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
126
bin/engine/utils/dbHandler.js
Normal file
126
bin/engine/utils/dbHandler.js
Normal file
@ -0,0 +1,126 @@
|
||||
// @ts-check
|
||||
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
|
||||
const fs = require("fs");
|
||||
const mysql = require("mysql");
|
||||
const endConnection = require("./endConnection");
|
||||
|
||||
const connection = mysql.createConnection({
|
||||
host: process.env.DSQL_HOST,
|
||||
user: process.env.DSQL_USER,
|
||||
database: process.env.DSQL_DB_NAME,
|
||||
password: process.env.DSQL_PASS,
|
||||
charset: "utf8mb4",
|
||||
port: process.env.DSQL_PORT?.match(/.../) ? parseInt(process.env.DSQL_PORT) : undefined,
|
||||
});
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////////////
|
||||
//////////////////////////////////////////////////////////////////////////////////
|
||||
//////////////////////////////////////////////////////////////////////////////////
|
||||
//////////////////////////////////////////////////////////////////////////////////
|
||||
//////////////////////////////////////////////////////////////////////////////////
|
||||
//////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
/**
|
||||
* Main DB Handler Function
|
||||
* ==============================================================================
|
||||
* @async
|
||||
* @param {object} params - Single Param object containing params
|
||||
* @param {string} params.query - Query String
|
||||
* @param {(string | number)[]} [params.values] - Values
|
||||
* @param {object} [params.dbSchema] - Database Schema
|
||||
* @param {string} [params.database] - Target Database
|
||||
*
|
||||
* @returns {Promise<object | null>}
|
||||
*/
|
||||
module.exports = async function dbHandler({ query, values, database }) {
|
||||
/**
|
||||
* Declare variables
|
||||
*
|
||||
* @description Declare "results" variable
|
||||
*/
|
||||
let changeDbError;
|
||||
|
||||
if (database) {
|
||||
connection.changeUser({ database: database }, (error) => {
|
||||
if (error) {
|
||||
console.log("DB handler error in switching database:", error.message);
|
||||
changeDbError = error.message;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (changeDbError) {
|
||||
return { error: changeDbError };
|
||||
}
|
||||
|
||||
/**
|
||||
* Declare variables
|
||||
*
|
||||
* @description Declare "results" variable
|
||||
*/
|
||||
let results;
|
||||
|
||||
/**
|
||||
* Fetch from db
|
||||
*
|
||||
* @description Fetch data from db if no cache
|
||||
*/
|
||||
try {
|
||||
results = await new Promise((resolve, reject) => {
|
||||
if (connection.state !== "disconnected") {
|
||||
if (values) {
|
||||
connection.query(query, values, (error, results, fields) => {
|
||||
if (error) {
|
||||
console.log("DB handler error:", error.message);
|
||||
resolve({
|
||||
error: error.message,
|
||||
});
|
||||
} else {
|
||||
resolve(JSON.parse(JSON.stringify(results)));
|
||||
}
|
||||
setTimeout(() => {
|
||||
endConnection(connection);
|
||||
}, 500);
|
||||
});
|
||||
} else {
|
||||
connection.query(query, (error, results, fields) => {
|
||||
if (error) {
|
||||
console.log("DB handler error:", error.message);
|
||||
resolve({
|
||||
error: error.message,
|
||||
});
|
||||
} else {
|
||||
resolve(JSON.parse(JSON.stringify(results)));
|
||||
}
|
||||
setTimeout(() => {
|
||||
endConnection(connection);
|
||||
}, 500);
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
} catch (error) {
|
||||
console.log("DB handler error:", error.message);
|
||||
|
||||
results = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return results
|
||||
*
|
||||
* @description Return results add to cache if "req" param is passed
|
||||
*/
|
||||
if (results) {
|
||||
return results;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
};
|
12
bin/engine/utils/defaultFieldsRegexp.js
Normal file
12
bin/engine/utils/defaultFieldsRegexp.js
Normal file
@ -0,0 +1,12 @@
|
||||
/**
|
||||
* Regular expression to match default fields
|
||||
*
|
||||
* @description Regular expression to match default fields
|
||||
*/
|
||||
const defaultFieldsRegexp = /^id$|^date_created$|^date_created_code$|^date_created_timestamp$|^date_updated$|^date_updated_code$|^date_updated_timestamp$/;
|
||||
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
|
||||
module.exports = defaultFieldsRegexp;
|
14
bin/engine/utils/endConnection.js
Normal file
14
bin/engine/utils/endConnection.js
Normal file
@ -0,0 +1,14 @@
|
||||
// @ts-check
|
||||
|
||||
const mysql = require("mysql");
|
||||
|
||||
/**
|
||||
* @param {mysql.Connection} connection - the active MYSQL connection
|
||||
*/
|
||||
module.exports = function endConnection(connection) {
|
||||
if (connection.state !== "disconnected") {
|
||||
connection.end((err) => {
|
||||
console.log(err?.message);
|
||||
});
|
||||
}
|
||||
};
|
68
bin/engine/utils/generateColumnDescription.js
Normal file
68
bin/engine/utils/generateColumnDescription.js
Normal file
@ -0,0 +1,68 @@
|
||||
// @ts-check
|
||||
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
|
||||
/**
|
||||
* Generate SQL text for Field
|
||||
* ==============================================================================
|
||||
* @param {object} params - Single object params
|
||||
* @param {import("../../../types/database-schema.td").DSQL_FieldSchemaType} params.columnData - Field object
|
||||
* @param {boolean} [params.primaryKeySet] - Table Name(slug)
|
||||
*
|
||||
* @returns {{fieldEntryText: string, newPrimaryKeySet: boolean}}
|
||||
*/
|
||||
module.exports = function generateColumnDescription({ columnData, primaryKeySet }) {
|
||||
/**
|
||||
* Format tableInfoArray
|
||||
*
|
||||
* @description Format tableInfoArray
|
||||
*/
|
||||
const { fieldName, dataType, nullValue, primaryKey, autoIncrement, defaultValue, defaultValueLiteral, notNullValue } = columnData;
|
||||
|
||||
let fieldEntryText = "";
|
||||
|
||||
fieldEntryText += `\`${fieldName}\` ${dataType}`;
|
||||
|
||||
////////////////////////////////////////
|
||||
|
||||
if (nullValue) {
|
||||
fieldEntryText += " DEFAULT NULL";
|
||||
} else if (defaultValueLiteral) {
|
||||
fieldEntryText += ` DEFAULT ${defaultValueLiteral}`;
|
||||
} else if (defaultValue) {
|
||||
fieldEntryText += ` DEFAULT '${defaultValue}'`;
|
||||
} else if (notNullValue) {
|
||||
fieldEntryText += ` NOT NULL`;
|
||||
}
|
||||
|
||||
////////////////////////////////////////
|
||||
|
||||
if (primaryKey && !primaryKeySet) {
|
||||
fieldEntryText += " PRIMARY KEY";
|
||||
primaryKeySet = true;
|
||||
}
|
||||
|
||||
////////////////////////////////////////
|
||||
|
||||
if (autoIncrement) {
|
||||
fieldEntryText += " AUTO_INCREMENT";
|
||||
primaryKeySet = true;
|
||||
}
|
||||
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
|
||||
return { fieldEntryText, newPrimaryKeySet: primaryKeySet || false };
|
||||
};
|
||||
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
90
bin/engine/utils/noDatabaseDbHandler.js
Normal file
90
bin/engine/utils/noDatabaseDbHandler.js
Normal file
@ -0,0 +1,90 @@
|
||||
// @ts-check
|
||||
|
||||
const fs = require("fs");
|
||||
const dbHandler = require("./dbHandler");
|
||||
const mysql = require("mysql");
|
||||
const endConnection = require("./endConnection");
|
||||
|
||||
const connection = mysql.createConnection({
|
||||
host: process.env.DSQL_HOST,
|
||||
user: process.env.DSQL_USER,
|
||||
password: process.env.DSQL_PASS,
|
||||
charset: "utf8mb4",
|
||||
port: process.env.DSQL_PORT?.match(/.../) ? parseInt(process.env.DSQL_PORT) : undefined,
|
||||
});
|
||||
|
||||
/**
|
||||
* Create database from Schema Function
|
||||
* ==============================================================================
|
||||
* @param {object} params - Single Param object containing params
|
||||
* @param {string} params.query - Query String
|
||||
* @param {string[]} [params.values] - Values
|
||||
*
|
||||
* @returns {Promise<object[] | null>}
|
||||
*/
|
||||
module.exports = async function noDatabaseDbHandler({ query, values }) {
|
||||
/**
|
||||
* Declare variables
|
||||
*
|
||||
* @description Declare "results" variable
|
||||
*/
|
||||
let results;
|
||||
|
||||
/**
|
||||
* Fetch from db
|
||||
*
|
||||
* @description Fetch data from db if no cache
|
||||
*/
|
||||
try {
|
||||
/** ********************* Run Query */
|
||||
results = await new Promise((resolve, reject) => {
|
||||
if (connection.state !== "disconnected") {
|
||||
if (values) {
|
||||
connection.query(query, values, (error, results, fields) => {
|
||||
if (error) {
|
||||
console.log("NO-DB handler error:", error.message);
|
||||
resolve({
|
||||
error: error.message,
|
||||
});
|
||||
} else {
|
||||
resolve(JSON.parse(JSON.stringify(results)));
|
||||
}
|
||||
setTimeout(() => {
|
||||
endConnection(connection);
|
||||
}, 500);
|
||||
});
|
||||
} else {
|
||||
connection.query(query, (error, results, fields) => {
|
||||
if (error) {
|
||||
console.log("NO-DB handler error:", error.message);
|
||||
resolve({
|
||||
error: error.message,
|
||||
});
|
||||
} else {
|
||||
resolve(JSON.parse(JSON.stringify(results)));
|
||||
}
|
||||
setTimeout(() => {
|
||||
endConnection(connection);
|
||||
}, 500);
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
} catch (error) {
|
||||
console.log("ERROR in noDatabaseDbHandler =>", error.message);
|
||||
}
|
||||
|
||||
/**
|
||||
* Return results
|
||||
*
|
||||
* @description Return results add to cache if "req" param is passed
|
||||
*/
|
||||
if (results) {
|
||||
return results;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
};
|
73
bin/engine/utils/parseDbResults.js
Normal file
73
bin/engine/utils/parseDbResults.js
Normal file
@ -0,0 +1,73 @@
|
||||
// @ts-check
|
||||
|
||||
const decrypt = require("../../../functions/decrypt");
|
||||
const defaultFieldsRegexp = require("./defaultFieldsRegexp");
|
||||
|
||||
/**
|
||||
* Parse Database results
|
||||
* ==============================================================================
|
||||
* @description this function takes a database results array gotten from a DB handler
|
||||
* function, decrypts encrypted fields, and returns an updated array with no encrypted
|
||||
* fields
|
||||
*
|
||||
* @param {object} params - Single object params
|
||||
* @param {{}[]} params.unparsedResults - Array of data objects containing Fields(keys)
|
||||
* and corresponding values of the fields(values)
|
||||
* @param {import("../../../types/database-schema.td").DSQL_TableSchemaType} [params.tableSchema] - Table schema
|
||||
* @returns {Promise<object[]|null>}
|
||||
*/
|
||||
module.exports = async function parseDbResults({ unparsedResults, tableSchema }) {
|
||||
/**
|
||||
* Declare variables
|
||||
*
|
||||
* @description Declare "results" variable
|
||||
*/
|
||||
let parsedResults = [];
|
||||
|
||||
try {
|
||||
/**
|
||||
* Declare variables
|
||||
*
|
||||
* @description Declare "results" variable
|
||||
*/
|
||||
for (let pr = 0; pr < unparsedResults.length; pr++) {
|
||||
let result = unparsedResults[pr];
|
||||
|
||||
let resultFieldNames = Object.keys(result);
|
||||
|
||||
for (let i = 0; i < resultFieldNames.length; i++) {
|
||||
const resultFieldName = resultFieldNames[i];
|
||||
let resultFieldSchema = tableSchema?.fields[i];
|
||||
|
||||
if (resultFieldName?.match(defaultFieldsRegexp)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let value = result[resultFieldName];
|
||||
|
||||
if (typeof value !== "number" && !value) {
|
||||
// parsedResults.push(result);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (resultFieldSchema?.encrypted) {
|
||||
if (value?.match(/./)) {
|
||||
result[resultFieldName] = decrypt(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
parsedResults.push(result);
|
||||
}
|
||||
|
||||
/**
|
||||
* Declare variables
|
||||
*
|
||||
* @description Declare "results" variable
|
||||
*/
|
||||
return parsedResults;
|
||||
} catch (error) {
|
||||
console.log("ERROR in parseDbResults Function =>", error.message);
|
||||
return unparsedResults;
|
||||
}
|
||||
};
|
16
bin/engine/utils/slugToCamelTitle.js
Normal file
16
bin/engine/utils/slugToCamelTitle.js
Normal file
@ -0,0 +1,16 @@
|
||||
// @ts-check
|
||||
|
||||
module.exports = function slugToCamelTitle(text) {
|
||||
if (text) {
|
||||
let addArray = text.split("-").filter((item) => item !== "");
|
||||
let camelArray = addArray.map((item) => {
|
||||
return item.substr(0, 1).toUpperCase() + item.substr(1).toLowerCase();
|
||||
});
|
||||
|
||||
let parsedAddress = camelArray.join(" ");
|
||||
|
||||
return parsedAddress;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
};
|
48
bin/engine/utils/supplementTable.js
Normal file
48
bin/engine/utils/supplementTable.js
Normal file
@ -0,0 +1,48 @@
|
||||
// @ts-check
|
||||
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
|
||||
module.exports = function supplementTable({ tableInfoArray }) {
|
||||
/**
|
||||
* Format tableInfoArray
|
||||
*
|
||||
* @description Format tableInfoArray
|
||||
*/
|
||||
let finalTableArray = tableInfoArray;
|
||||
const defaultFields = require("../data/defaultFields.json");
|
||||
|
||||
////////////////////////////////////////
|
||||
|
||||
let primaryKeyExists = finalTableArray.filter((_field) => _field.primaryKey);
|
||||
|
||||
////////////////////////////////////////
|
||||
|
||||
defaultFields.forEach((field) => {
|
||||
let fieldExists = finalTableArray.filter((_field) => _field.fieldName === field.fieldName);
|
||||
|
||||
if (fieldExists && fieldExists[0]) {
|
||||
return;
|
||||
} else if (field.fieldName === "id" && !primaryKeyExists[0]) {
|
||||
finalTableArray.unshift(field);
|
||||
} else {
|
||||
finalTableArray.push(field);
|
||||
}
|
||||
});
|
||||
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
|
||||
return finalTableArray;
|
||||
};
|
||||
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
457
bin/engine/utils/updateTable.js
Normal file
457
bin/engine/utils/updateTable.js
Normal file
@ -0,0 +1,457 @@
|
||||
// @ts-check
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
///////////////////////// - Update Table Function - ////////////////////////////
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
|
||||
const defaultFieldsRegexp = /^id$|^date_created$|^date_created_code$|^date_created_timestamp$|^date_updated$|^date_updated_code$|^date_updated_timestamp$/;
|
||||
|
||||
const generateColumnDescription = require("./generateColumnDescription");
|
||||
const varDatabaseDbHandler = require("./varDatabaseDbHandler");
|
||||
|
||||
const schemaPath = path.resolve(process.cwd(), "dsql.schema.json");
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
/**
|
||||
* Update table function
|
||||
* ==============================================================================
|
||||
* @param {object} params - Single object params
|
||||
* @param {string} params.dbFullName - Database full name => "datasquirel_user_4394_db_name"
|
||||
* @param {string} params.tableName - Table Name(slug)
|
||||
* @param {import("../../../types/database-schema.td").DSQL_FieldSchemaType[]} params.tableInfoArray - Table Info Array
|
||||
* @param {import("../../../types/database-schema.td").DSQL_DatabaseSchemaType[]} params.dbSchema - Single post
|
||||
* @param {import("../../../types/database-schema.td").DSQL_IndexSchemaType[]} [params.tableIndexes] - Table Indexes
|
||||
* @param {boolean} [params.clone] - Is this a newly cloned table?
|
||||
* @param {number} [params.tableIndex] - The number index of the table in the dbSchema array
|
||||
*
|
||||
* @returns {Promise<string|object[]|null>}
|
||||
*/
|
||||
module.exports = async function updateTable({ dbFullName, tableName, tableInfoArray, dbSchema, tableIndexes, clone, tableIndex }) {
|
||||
/**
|
||||
* Initialize
|
||||
* ==========================================
|
||||
* @description Initial setup
|
||||
*/
|
||||
|
||||
/**
|
||||
* @description Initialize table info array. This value will be
|
||||
* changing depending on if a field is renamed or not.
|
||||
*/
|
||||
let upToDateTableFieldsArray = tableInfoArray;
|
||||
|
||||
/**
|
||||
* Handle Table updates
|
||||
*
|
||||
* @description Try to undate table, catch error if anything goes wrong
|
||||
*/
|
||||
try {
|
||||
/**
|
||||
* @type {string[]}
|
||||
* @description Table update query string array
|
||||
*/
|
||||
const updateTableQueryArray = [];
|
||||
|
||||
/**
|
||||
* @type {string[]}
|
||||
* @description Constriants query string array
|
||||
*/
|
||||
const constraintsQueryArray = [];
|
||||
|
||||
/**
|
||||
* @description Push the query initial value
|
||||
*/
|
||||
updateTableQueryArray.push(`ALTER TABLE \`${tableName}\``);
|
||||
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
|
||||
/**
|
||||
* @type {DSQL_MYSQL_SHOW_INDEXES_Type[] | null}
|
||||
* @description All indexes from MYSQL db
|
||||
*/
|
||||
const allExistingIndexes = await varDatabaseDbHandler({
|
||||
queryString: `SHOW INDEXES FROM \`${tableName}\``,
|
||||
database: dbFullName,
|
||||
});
|
||||
|
||||
/**
|
||||
* @type {DSQL_MYSQL_SHOW_COLUMNS_Type[] | null}
|
||||
* @description All columns from MYSQL db
|
||||
*/
|
||||
const allExistingColumns = await varDatabaseDbHandler({
|
||||
queryString: `SHOW COLUMNS FROM \`${tableName}\``,
|
||||
database: dbFullName,
|
||||
});
|
||||
|
||||
////////////////////////////////////////
|
||||
|
||||
/**
|
||||
* @type {string[]}
|
||||
* @description Updated column names Array
|
||||
*/
|
||||
const updatedColumnsArray = [];
|
||||
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
|
||||
/**
|
||||
* @description Iterate through every existing column
|
||||
*/
|
||||
if (allExistingColumns)
|
||||
for (let e = 0; e < allExistingColumns.length; e++) {
|
||||
const { Field } = allExistingColumns[e];
|
||||
|
||||
if (Field.match(defaultFieldsRegexp)) continue;
|
||||
|
||||
/**
|
||||
* @description This finds out whether the fieldName corresponds with the MSQL Field name
|
||||
* if the fildName doesn't match any MYSQL Field name, the field is deleted.
|
||||
*/
|
||||
let existingEntry = upToDateTableFieldsArray.filter((column) => column.fieldName === Field || column.originName === Field);
|
||||
|
||||
if (existingEntry && existingEntry[0]) {
|
||||
/**
|
||||
* @description Check if Field name has been updated
|
||||
*/
|
||||
if (existingEntry[0].updatedField) {
|
||||
updatedColumnsArray.push(existingEntry[0].fieldName);
|
||||
|
||||
const renameColumn = await varDatabaseDbHandler({
|
||||
queryString: `ALTER TABLE ${tableName} RENAME COLUMN \`${existingEntry[0].originName}\` TO \`${existingEntry[0].fieldName}\``,
|
||||
database: dbFullName,
|
||||
});
|
||||
|
||||
console.log(`Column Renamed from "${existingEntry[0].originName}" to "${existingEntry[0].fieldName}"`);
|
||||
|
||||
/**
|
||||
* Update Db Schema
|
||||
* ===================================================
|
||||
* @description Update Db Schema after renaming column
|
||||
*/
|
||||
try {
|
||||
const userSchemaData = dbSchema;
|
||||
|
||||
const targetDbIndex = userSchemaData.findIndex((db) => db.dbFullName === dbFullName);
|
||||
const targetTableIndex = userSchemaData[targetDbIndex].tables.findIndex((table) => table.tableName === tableName);
|
||||
const targetFieldIndex = userSchemaData[targetDbIndex].tables[targetTableIndex].fields.findIndex((field) => field.fieldName === existingEntry[0].fieldName);
|
||||
|
||||
delete userSchemaData[targetDbIndex].tables[targetTableIndex].fields[targetFieldIndex]["originName"];
|
||||
delete userSchemaData[targetDbIndex].tables[targetTableIndex].fields[targetFieldIndex]["updatedField"];
|
||||
|
||||
/**
|
||||
* @description Set New Table Fields Array
|
||||
*/
|
||||
upToDateTableFieldsArray = userSchemaData[targetDbIndex].tables[targetTableIndex].fields;
|
||||
|
||||
fs.writeFileSync(schemaPath, JSON.stringify(userSchemaData), "utf8");
|
||||
} catch (error) {
|
||||
console.log("Error in updating Table =>", error.message);
|
||||
}
|
||||
|
||||
////////////////////////////////////////
|
||||
}
|
||||
|
||||
////////////////////////////////////////
|
||||
|
||||
continue;
|
||||
|
||||
////////////////////////////////////////
|
||||
} else {
|
||||
// console.log("Column Deleted =>", Field);
|
||||
await varDatabaseDbHandler({
|
||||
queryString: `ALTER TABLE ${tableName} DROP COLUMN \`${Field}\``,
|
||||
database: dbFullName,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
|
||||
/**
|
||||
* Handle MYSQL Table Indexes
|
||||
* ===================================================
|
||||
* @description Iterate through each table index(if available)
|
||||
* and perform operations
|
||||
*/
|
||||
if (allExistingIndexes)
|
||||
for (let f = 0; f < allExistingIndexes.length; f++) {
|
||||
const { Key_name, Index_comment } = allExistingIndexes[f];
|
||||
|
||||
/**
|
||||
* @description Check if this index was specifically created
|
||||
* by datasquirel
|
||||
*/
|
||||
if (Index_comment?.match(/schema_index/)) {
|
||||
try {
|
||||
const existingKeyInSchema = tableIndexes ? tableIndexes.filter((indexObject) => indexObject.alias === Key_name) : null;
|
||||
if (!existingKeyInSchema?.[0]) throw new Error(`This Index(${Key_name}) Has been Deleted!`);
|
||||
} catch (error) {
|
||||
/**
|
||||
* @description Drop Index: This happens when the MYSQL index is not
|
||||
* present in the datasquirel DB schema
|
||||
*/
|
||||
await varDatabaseDbHandler({
|
||||
queryString: `ALTER TABLE ${tableName} DROP INDEX \`${Key_name}\``,
|
||||
database: dbFullName,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle DATASQUIREL Table Indexes
|
||||
* ===================================================
|
||||
* @description Iterate through each datasquirel schema
|
||||
* table index(if available), and perform operations
|
||||
*/
|
||||
if (tableIndexes && tableIndexes[0]) {
|
||||
for (let g = 0; g < tableIndexes.length; g++) {
|
||||
const { indexType, indexName, indexTableFields, alias } = tableIndexes[g];
|
||||
|
||||
if (!alias?.match(/./)) continue;
|
||||
|
||||
/**
|
||||
* @description Check for existing Index in MYSQL db
|
||||
*/
|
||||
try {
|
||||
const existingKeyInDb = allExistingIndexes?.filter((indexObject) => indexObject.Key_name === alias);
|
||||
if (!existingKeyInDb?.[0]) throw new Error("This Index Does not Exist");
|
||||
} catch (error) {
|
||||
/**
|
||||
* @description Create new index if determined that it
|
||||
* doesn't exist in MYSQL db
|
||||
*/
|
||||
await varDatabaseDbHandler({
|
||||
queryString: `CREATE${indexType.match(/fullText/i) ? " FULLTEXT" : ""} INDEX \`${alias}\` ON ${tableName}(${indexTableFields
|
||||
.map((nm) => nm.value)
|
||||
.map((nm) => `\`${nm}\``)
|
||||
.join(",")}) COMMENT 'schema_index'`,
|
||||
database: dbFullName,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
|
||||
/**
|
||||
* Handle MYSQL Foreign Keys
|
||||
* ===================================================
|
||||
* @description Iterate through each datasquirel schema
|
||||
* table index(if available), and perform operations
|
||||
*/
|
||||
|
||||
/**
|
||||
* @description All MSQL Foreign Keys
|
||||
* @type {DSQL_MYSQL_FOREIGN_KEYS_Type[] | null}
|
||||
*/
|
||||
const allForeignKeys = await varDatabaseDbHandler({
|
||||
queryString: `SELECT CONSTRAINT_NAME FROM INFORMATION_SCHEMA.TABLE_CONSTRAINTS WHERE CONSTRAINT_SCHEMA = '${dbFullName}' AND TABLE_NAME='${tableName}' AND CONSTRAINT_TYPE='FOREIGN KEY'`,
|
||||
database: dbFullName,
|
||||
});
|
||||
|
||||
if (allForeignKeys)
|
||||
for (let c = 0; c < allForeignKeys.length; c++) {
|
||||
const { CONSTRAINT_NAME } = allForeignKeys[c];
|
||||
|
||||
/**
|
||||
* @description Skip if Key is the PRIMARY Key
|
||||
*/
|
||||
if (CONSTRAINT_NAME.match(/PRIMARY/)) continue;
|
||||
|
||||
/**
|
||||
* @description Drop all foreign Keys to avoid MYSQL errors when adding/updating
|
||||
* Foreign keys
|
||||
*/
|
||||
const dropForeignKey = await varDatabaseDbHandler({
|
||||
queryString: `ALTER TABLE ${tableName} DROP FOREIGN KEY \`${CONSTRAINT_NAME}\``,
|
||||
database: dbFullName,
|
||||
});
|
||||
}
|
||||
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
|
||||
/**
|
||||
* Handle DATASQUIREL schema fields for current table
|
||||
* ===================================================
|
||||
* @description Iterate through each field object and
|
||||
* perform operations
|
||||
*/
|
||||
for (let i = 0; i < upToDateTableFieldsArray.length; i++) {
|
||||
const column = upToDateTableFieldsArray[i];
|
||||
const prevColumn = upToDateTableFieldsArray[i - 1];
|
||||
const nextColumn = upToDateTableFieldsArray[i + 1];
|
||||
|
||||
const { fieldName, dataType, nullValue, primaryKey, autoIncrement, defaultValue, defaultValueLiteral, foreignKey, updatedField } = column;
|
||||
|
||||
////////////////////////////////////////
|
||||
|
||||
/**
|
||||
* @description Skip default fields
|
||||
*/
|
||||
if (fieldName.match(/^id$|^date_/)) continue;
|
||||
/**
|
||||
* @description Skip columns that have been updated recently
|
||||
*/
|
||||
// if (updatedColumnsArray.includes(fieldName)) continue;
|
||||
|
||||
////////////////////////////////////////
|
||||
|
||||
let updateText = "";
|
||||
|
||||
////////////////////////////////////////
|
||||
|
||||
let existingColumnIndex;
|
||||
|
||||
/**
|
||||
* @description Existing MYSQL field object
|
||||
*/
|
||||
let existingColumn =
|
||||
allExistingColumns && allExistingColumns[0]
|
||||
? allExistingColumns.filter((_column, _index) => {
|
||||
if (_column.Field === fieldName) {
|
||||
existingColumnIndex = _index;
|
||||
return true;
|
||||
}
|
||||
})
|
||||
: null;
|
||||
|
||||
/**
|
||||
* @description Construct SQL text snippet for this field
|
||||
*/
|
||||
let { fieldEntryText } = generateColumnDescription({ columnData: column });
|
||||
|
||||
/**
|
||||
* @description Modify Column(Field) if it already exists
|
||||
* in MYSQL database
|
||||
*/
|
||||
if (existingColumn && existingColumn[0]?.Field) {
|
||||
const { Field, Type, Null, Key, Default, Extra } = existingColumn[0];
|
||||
|
||||
let isColumnReordered = existingColumnIndex ? i < existingColumnIndex : false;
|
||||
|
||||
if (Field === fieldName && !isColumnReordered && dataType.toUpperCase() === Type.toUpperCase()) {
|
||||
updateText += `MODIFY COLUMN ${fieldEntryText}`;
|
||||
// continue;
|
||||
} else {
|
||||
updateText += `MODIFY COLUMN ${fieldEntryText}${isColumnReordered ? (prevColumn?.fieldName ? " AFTER `" + prevColumn.fieldName + "`" : nextColumn?.fieldName ? " BEFORE `" + nextColumn.fieldName + "`" : "") : ""}`;
|
||||
// if (userId) {
|
||||
// } else {
|
||||
// updateText += `MODIFY COLUMN ${fieldEntryText}`;
|
||||
// }
|
||||
}
|
||||
} else if (prevColumn && prevColumn.fieldName) {
|
||||
/**
|
||||
* @description Add new Column AFTER previous column, if
|
||||
* previous column exists
|
||||
*/
|
||||
updateText += `ADD COLUMN ${fieldEntryText} AFTER \`${prevColumn.fieldName}\``;
|
||||
} else if (nextColumn && nextColumn.fieldName) {
|
||||
/**
|
||||
* @description Add new Column BEFORE next column, if
|
||||
* next column exists
|
||||
*/
|
||||
updateText += `ADD COLUMN ${fieldEntryText} BEFORE \`${nextColumn.fieldName}\``;
|
||||
} else {
|
||||
/**
|
||||
* @description Append new column to the end of existing columns
|
||||
*/
|
||||
updateText += `ADD COLUMN ${fieldEntryText}`;
|
||||
}
|
||||
|
||||
////////////////////////////////////////
|
||||
|
||||
/**
|
||||
* @description Pust SQL code snippet to updateTableQueryArray Array
|
||||
* Add a comma(,) to separate from the next snippet
|
||||
*/
|
||||
updateTableQueryArray.push(updateText + ",");
|
||||
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
|
||||
/**
|
||||
* @description Handle foreing keys if available, and if there is no
|
||||
* "clone" boolean = true
|
||||
*/
|
||||
if (!clone && foreignKey) {
|
||||
const { destinationTableName, destinationTableColumnName, cascadeDelete, cascadeUpdate, foreignKeyName } = foreignKey;
|
||||
|
||||
const foreinKeyText = `ADD CONSTRAINT \`${foreignKeyName}\` FOREIGN KEY (${fieldName}) REFERENCES ${destinationTableName}(${destinationTableColumnName})${cascadeDelete ? " ON DELETE CASCADE" : ""}${cascadeUpdate ? " ON UPDATE CASCADE" : ""}`;
|
||||
// const foreinKeyText = `ADD CONSTRAINT \`${foreignKeyName}\` FOREIGN KEY (${fieldName}) REFERENCES ${destinationTableName}(${destinationTableColumnName})${cascadeDelete ? " ON DELETE CASCADE" : ""}${cascadeUpdate ? " ON UPDATE CASCADE" : ""}` + ",";
|
||||
|
||||
const finalQueryString = `ALTER TABLE \`${tableName}\` ${foreinKeyText}`;
|
||||
|
||||
const addForeignKey = await varDatabaseDbHandler({
|
||||
database: dbFullName,
|
||||
queryString: finalQueryString,
|
||||
});
|
||||
}
|
||||
|
||||
////////////////////////////////////////
|
||||
}
|
||||
|
||||
/**
|
||||
* @description Construct final SQL query by combning all SQL snippets in
|
||||
* updateTableQueryArray Arry, and trimming the final comma(,)
|
||||
*/
|
||||
const updateTableQuery = updateTableQueryArray.join(" ").replace(/,$/, "");
|
||||
|
||||
////////////////////////////////////////
|
||||
|
||||
/**
|
||||
* @description Check if SQL snippets array has more than 1 entries
|
||||
* This is because 1 entry means "ALTER TABLE table_name" only, without any
|
||||
* Alter directives like "ADD COLUMN" or "MODIFY COLUMN"
|
||||
*/
|
||||
if (updateTableQueryArray.length > 1) {
|
||||
const updateTable = await varDatabaseDbHandler({
|
||||
queryString: updateTableQuery,
|
||||
database: dbFullName,
|
||||
});
|
||||
|
||||
return updateTable;
|
||||
} else {
|
||||
/**
|
||||
* @description If only 1 SQL snippet is left in updateTableQueryArray, this
|
||||
* means that no updates have been made to the table
|
||||
*/
|
||||
return "No Changes Made to Table";
|
||||
}
|
||||
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
} catch (error) {
|
||||
console.log('Error in "updateTable" function =>', error.message);
|
||||
|
||||
return "Error in Updating Table";
|
||||
}
|
||||
};
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
////////////////////////////////////////////////////////////////////////////////
|
88
bin/engine/utils/varDatabaseDbHandler.js
Normal file
88
bin/engine/utils/varDatabaseDbHandler.js
Normal file
@ -0,0 +1,88 @@
|
||||
// @ts-check
|
||||
|
||||
const fs = require("fs");
|
||||
const mysql = require("mysql");
|
||||
const parseDbResults = require("./parseDbResults");
|
||||
const dbHandler = require("./dbHandler");
|
||||
|
||||
/**
|
||||
* DB handler for specific database
|
||||
* ==============================================================================
|
||||
* @async
|
||||
* @param {object} params - Single object params
|
||||
* @param {string} params.queryString - SQL string
|
||||
* @param {string[]} [params.queryValuesArray] - Values Array
|
||||
* @param {string} params.database - Database name
|
||||
* @param {import("../../../types/database-schema.td").DSQL_TableSchemaType} [params.tableSchema] - Table schema
|
||||
* @returns {Promise<any[]|null>}
|
||||
*/
|
||||
module.exports = async function varDatabaseDbHandler({ queryString, queryValuesArray, database, tableSchema }) {
|
||||
/**
|
||||
* Create Connection
|
||||
*
|
||||
* @description Create Connection
|
||||
*/
|
||||
const connection = mysql.createConnection({
|
||||
host: process.env.DSQL_SOCKET_HOST,
|
||||
user: process.env.DSQL_SOCKET_USER,
|
||||
password: process.env.DSQL_SOCKET_PASS || "",
|
||||
database: process.env.DSQL_SOCKET_DB_NAME,
|
||||
charset: "utf8mb4",
|
||||
port: parseInt(process.env.DSQL_SOCKET_DB_NAME || "") || undefined,
|
||||
});
|
||||
|
||||
/**
|
||||
* Declare variables
|
||||
*
|
||||
* @description Declare "results" variable
|
||||
*/
|
||||
let results;
|
||||
|
||||
/**
|
||||
* Fetch from db
|
||||
*
|
||||
* @description Fetch data from db if no cache
|
||||
*/
|
||||
try {
|
||||
if (queryString && queryValuesArray && Array.isArray(queryValuesArray) && queryValuesArray[0]) {
|
||||
results = await dbHandler({ query: queryString, values: queryValuesArray, database: database });
|
||||
} else {
|
||||
results = await dbHandler({ query: queryString, database: database });
|
||||
}
|
||||
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
} catch (error) {
|
||||
console.log("\x1b[31mvarDatabaseDbHandler ERROR\x1b[0m =>", database, error);
|
||||
}
|
||||
|
||||
/**
|
||||
* Return results
|
||||
*
|
||||
* @description Return results add to cache if "req" param is passed
|
||||
*/
|
||||
if (results && tableSchema) {
|
||||
try {
|
||||
const unparsedResults = results;
|
||||
// deepcode ignore reDOS: <please specify a reason of ignoring this>
|
||||
const parsedResults = await parseDbResults({ unparsedResults: unparsedResults, tableSchema: tableSchema });
|
||||
return parsedResults;
|
||||
} catch (error) {
|
||||
console.log("\x1b[31mvarDatabaseDbHandler ERROR\x1b[0m =>", database, error);
|
||||
return null;
|
||||
}
|
||||
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
} else if (results) {
|
||||
return results;
|
||||
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
};
|
57
bin/engine/utils/varReadOnlyDatabaseDbHandler.js
Normal file
57
bin/engine/utils/varReadOnlyDatabaseDbHandler.js
Normal file
@ -0,0 +1,57 @@
|
||||
// @ts-check
|
||||
|
||||
const fs = require("fs");
|
||||
const parseDbResults = require("./parseDbResults");
|
||||
const dbHandler = require("./dbHandler");
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {object} param0
|
||||
* @param {string} param0.queryString
|
||||
* @param {object} param0.database
|
||||
* @param {object[]} [param0.queryValuesArray]
|
||||
* @param {object | null} [param0.tableSchema]
|
||||
* @returns
|
||||
*/
|
||||
module.exports = async function varReadOnlyDatabaseDbHandler({ queryString, database, queryValuesArray, tableSchema }) {
|
||||
/**
|
||||
* Declare variables
|
||||
*
|
||||
* @description Declare "results" variable
|
||||
*/
|
||||
let results;
|
||||
|
||||
/**
|
||||
* Fetch from db
|
||||
*
|
||||
* @description Fetch data from db if no cache
|
||||
*/
|
||||
try {
|
||||
results = await dbHandler({ query: queryString, values: queryValuesArray, database: database });
|
||||
|
||||
////////////////////////////////////////
|
||||
} catch (error) {
|
||||
////////////////////////////////////////
|
||||
|
||||
console.log("\x1b[31mvarReadOnlyDatabaseDbHandler ERROR\x1b[0m =>", database, error.message);
|
||||
|
||||
/**
|
||||
* Return error
|
||||
*/
|
||||
return error.message;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return results
|
||||
*
|
||||
* @description Return results add to cache if "req" param is passed
|
||||
*/
|
||||
if (results) {
|
||||
const unparsedResults = results;
|
||||
// deepcode ignore reDOS: <please specify a reason of ignoring this>
|
||||
const parsedResults = await parseDbResults({ unparsedResults: unparsedResults, tableSchema: tableSchema });
|
||||
return parsedResults;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
};
|
100
bin/query/get.js
Normal file
100
bin/query/get.js
Normal file
@ -0,0 +1,100 @@
|
||||
// @ts-check
|
||||
|
||||
const runQuery = require("./utils/runQuery");
|
||||
|
||||
/**
|
||||
* @typedef {Object} LocalGetReturn
|
||||
* @property {boolean} success - Did the function run successfully?
|
||||
* @property {(Object[]|string|null|object)} [payload] - GET request results
|
||||
* @property {string} [msg] - Message
|
||||
* @property {string} [error] - Error Message
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} LocalQueryObject
|
||||
* @property {string} query - Table Name
|
||||
* @property {string} [tableName] - Table Name
|
||||
* @property {string[]} [queryValues] - GET request results
|
||||
*/
|
||||
|
||||
/**
|
||||
* Make a get request to Datasquirel API
|
||||
* ==============================================================================
|
||||
* @async
|
||||
*
|
||||
* @param {Object} params - Single object passed
|
||||
* @param {LocalQueryObject} params.options - SQL Query
|
||||
* @param {import("../../types/database-schema.td").DSQL_DatabaseSchemaType} [params.dbSchema] - Name of the table to query
|
||||
*
|
||||
* @returns { Promise<LocalGetReturn> } - Return Object
|
||||
*/
|
||||
async function localGet({ options, dbSchema }) {
|
||||
try {
|
||||
const { query } = options;
|
||||
|
||||
/** @type {string | undefined | any } */
|
||||
const tableName = options?.tableName ? options.tableName : undefined;
|
||||
|
||||
/** @type {string[] | undefined } */
|
||||
let queryValues;
|
||||
|
||||
if (options?.queryValues && typeof options?.queryValues === "string") {
|
||||
try {
|
||||
queryValues = JSON.parse(options.queryValues);
|
||||
} catch (error) {}
|
||||
}
|
||||
|
||||
const dbFullName = process.env.DSQL_DB_NAME || "";
|
||||
|
||||
/**
|
||||
* Input Validation
|
||||
*
|
||||
* @description Input Validation
|
||||
*/
|
||||
if (typeof query == "string" && (query.match(/^alter|^delete|information_schema|databases|^create/i) || !query.match(/^select/i))) {
|
||||
return { success: false, msg: "Wrong Input" };
|
||||
}
|
||||
|
||||
/**
|
||||
* Create new user folder and file
|
||||
*
|
||||
* @description Create new user folder and file
|
||||
*/
|
||||
let results;
|
||||
|
||||
try {
|
||||
let { result, error } = await runQuery({
|
||||
dbFullName: dbFullName,
|
||||
query: query,
|
||||
queryValuesArray: queryValues,
|
||||
dbSchema,
|
||||
tableName,
|
||||
});
|
||||
|
||||
if (error) throw error;
|
||||
if (result.error) throw new Error(result.error);
|
||||
|
||||
results = result;
|
||||
return { success: true, payload: results };
|
||||
|
||||
////////////////////////////////////////
|
||||
} catch (error) {
|
||||
////////////////////////////////////////
|
||||
|
||||
console.log("Error in local get Request =>", error.message);
|
||||
|
||||
return { success: false, payload: null, error: error.message };
|
||||
}
|
||||
|
||||
////////////////////////////////////////
|
||||
} catch (error) {
|
||||
////////////////////////////////////////
|
||||
console.log("Error in local get Request =>", error.message);
|
||||
|
||||
return { success: false, msg: "Something went wrong!" };
|
||||
|
||||
////////////////////////////////////////
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = localGet;
|
0
bin/query/post.js
Normal file
0
bin/query/post.js
Normal file
149
bin/query/utils/addDbEntry.js
Normal file
149
bin/query/utils/addDbEntry.js
Normal file
@ -0,0 +1,149 @@
|
||||
// @ts-check
|
||||
|
||||
/**
|
||||
* Imports: Handle imports
|
||||
*/
|
||||
const encrypt = require("../../../functions/encrypt");
|
||||
const dbHandler = require("../../engine/utils/dbHandler");
|
||||
const updateDb = require("./updateDbEntry");
|
||||
const updateDbEntry = require("./updateDbEntry");
|
||||
|
||||
/**
|
||||
* Add a db Entry Function
|
||||
* ==============================================================================
|
||||
* @description Description
|
||||
* @async
|
||||
*
|
||||
* @param {object} params - An object containing the function parameters.
|
||||
* @param {("Master" | "Dsql User")} [params.dbContext] - What is the database context? "Master"
|
||||
* or "Dsql User". Defaults to "Master"
|
||||
* @param {("Read Only" | "Full Access")} [params.paradigm] - What is the paradigm for "Dsql User"?
|
||||
* "Read only" or "Full Access"? Defaults to "Read Only"
|
||||
* @param {string} params.dbFullName - Database full name
|
||||
* @param {string} params.tableName - Table name
|
||||
* @param {object} params.data - Data to add
|
||||
* @param {import("../../../types/database-schema.td").DSQL_TableSchemaType} [params.tableSchema] - Table schema
|
||||
* @param {string} [params.duplicateColumnName] - Duplicate column name
|
||||
* @param {string} [params.duplicateColumnValue] - Duplicate column value
|
||||
* @param {boolean} [params.update] - Update this row if it exists
|
||||
* @param {string} [params.encryptionKey] - Update this row if it exists
|
||||
* @param {string} [params.encryptionSalt] - Update this row if it exists
|
||||
*
|
||||
* @returns {Promise<object|null>}
|
||||
*/
|
||||
async function addDbEntry({ dbContext, paradigm, dbFullName, tableName, data, tableSchema, duplicateColumnName, duplicateColumnValue, update, encryptionKey, encryptionSalt }) {
|
||||
/**
|
||||
* Initialize variables
|
||||
*/
|
||||
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
|
||||
/**
|
||||
* Handle function logic
|
||||
*/
|
||||
|
||||
if (duplicateColumnName && typeof duplicateColumnName === "string") {
|
||||
const duplicateValue = await dbHandler({
|
||||
database: dbFullName,
|
||||
query: `SELECT * FROM \`${tableName}\` WHERE \`${duplicateColumnName}\`=?`,
|
||||
values: [duplicateColumnValue || ""],
|
||||
});
|
||||
|
||||
if (duplicateValue && duplicateValue[0] && !update) {
|
||||
return null;
|
||||
} else if (duplicateValue && duplicateValue[0] && update) {
|
||||
return await updateDbEntry({
|
||||
dbContext,
|
||||
paradigm,
|
||||
dbFullName,
|
||||
tableName,
|
||||
data,
|
||||
tableSchema,
|
||||
identifierColumnName: duplicateColumnName,
|
||||
identifierValue: duplicateColumnValue || "",
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Declare variables
|
||||
*
|
||||
* @description Declare "results" variable
|
||||
*/
|
||||
const dataKeys = Object.keys(data);
|
||||
|
||||
let insertKeysArray = [];
|
||||
let insertValuesArray = [];
|
||||
|
||||
for (let i = 0; i < dataKeys.length; i++) {
|
||||
try {
|
||||
const dataKey = dataKeys[i];
|
||||
let value = data[dataKey];
|
||||
|
||||
const targetFieldSchemaArray = tableSchema ? tableSchema?.fields?.filter((field) => field.fieldName == dataKey) : null;
|
||||
const targetFieldSchema = targetFieldSchemaArray && targetFieldSchemaArray[0] ? targetFieldSchemaArray[0] : null;
|
||||
|
||||
if (!value) continue;
|
||||
|
||||
if (targetFieldSchema?.encrypted) {
|
||||
value = await encrypt({ data: value, encryptionKey, encryptionSalt });
|
||||
console.log("DSQL: Encrypted value =>", value);
|
||||
}
|
||||
|
||||
insertKeysArray.push("`" + dataKey + "`");
|
||||
|
||||
if (typeof value === "object") {
|
||||
value = JSON.stringify(value);
|
||||
}
|
||||
|
||||
insertValuesArray.push(value);
|
||||
} catch (error) {
|
||||
console.log("DSQL: Error in parsing data keys =>", error.message);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
////////////////////////////////////////
|
||||
|
||||
insertKeysArray.push("`date_created`");
|
||||
insertValuesArray.push(Date());
|
||||
|
||||
insertKeysArray.push("`date_created_code`");
|
||||
insertValuesArray.push(Date.now());
|
||||
|
||||
////////////////////////////////////////
|
||||
|
||||
insertKeysArray.push("`date_updated`");
|
||||
insertValuesArray.push(Date());
|
||||
|
||||
insertKeysArray.push("`date_updated_code`");
|
||||
insertValuesArray.push(Date.now());
|
||||
|
||||
////////////////////////////////////////
|
||||
|
||||
const query = `INSERT INTO \`${tableName}\` (${insertKeysArray.join(",")}) VALUES (${insertValuesArray.map(() => "?").join(",")})`;
|
||||
const queryValuesArray = insertValuesArray;
|
||||
|
||||
const newInsert = await dbHandler({
|
||||
database: dbFullName,
|
||||
query: query,
|
||||
values: queryValuesArray,
|
||||
});
|
||||
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
|
||||
/**
|
||||
* Return statement
|
||||
*/
|
||||
return newInsert;
|
||||
}
|
||||
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
|
||||
module.exports = addDbEntry;
|
76
bin/query/utils/deleteDbEntry.js
Normal file
76
bin/query/utils/deleteDbEntry.js
Normal file
@ -0,0 +1,76 @@
|
||||
// @ts-check
|
||||
|
||||
const dbHandler = require("../../engine/utils/dbHandler");
|
||||
|
||||
/**
|
||||
* Imports: Handle imports
|
||||
*/
|
||||
|
||||
/**
|
||||
* Delete DB Entry Function
|
||||
* ==============================================================================
|
||||
* @description Description
|
||||
* @async
|
||||
*
|
||||
* @param {object} params - An object containing the function parameters.
|
||||
* @param {string} [params.dbContext] - What is the database context? "Master"
|
||||
* or "Dsql User". Defaults to "Master"
|
||||
* @param {("Read Only" | "Full Access")} [params.paradigm] - What is the paradigm for "Dsql User"?
|
||||
* "Read only" or "Full Access"? Defaults to "Read Only"
|
||||
* @param {string} params.dbFullName - Database full name
|
||||
* @param {string} params.tableName - Table name
|
||||
* @param {import("../../../types/database-schema.td").DSQL_TableSchemaType} [params.tableSchema] - Table schema
|
||||
* @param {string} params.identifierColumnName - Update row identifier column name
|
||||
* @param {string|number} params.identifierValue - Update row identifier column value
|
||||
*
|
||||
* @returns {Promise<object|null>}
|
||||
*/
|
||||
async function deleteDbEntry({ dbContext, paradigm, dbFullName, tableName, identifierColumnName, identifierValue }) {
|
||||
try {
|
||||
/**
|
||||
* Check if data is valid
|
||||
*/
|
||||
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
|
||||
/**
|
||||
* Execution
|
||||
*
|
||||
* @description
|
||||
*/
|
||||
const query = `DELETE FROM ${tableName} WHERE \`${identifierColumnName}\`=?`;
|
||||
|
||||
const deletedEntry = await dbHandler({
|
||||
query: query,
|
||||
database: dbFullName,
|
||||
values: [identifierValue],
|
||||
});
|
||||
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
|
||||
/**
|
||||
* Return statement
|
||||
*/
|
||||
return deletedEntry;
|
||||
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
} catch (error) {
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
|
||||
module.exports = deleteDbEntry;
|
142
bin/query/utils/runQuery.js
Normal file
142
bin/query/utils/runQuery.js
Normal file
@ -0,0 +1,142 @@
|
||||
// @ts-check
|
||||
|
||||
const fs = require("fs");
|
||||
|
||||
const addDbEntry = require("./addDbEntry");
|
||||
const updateDbEntry = require("./updateDbEntry");
|
||||
const deleteDbEntry = require("./deleteDbEntry");
|
||||
const varReadOnlyDatabaseDbHandler = require("../../engine/utils/varReadOnlyDatabaseDbHandler");
|
||||
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
|
||||
/**
|
||||
* Run DSQL users queries
|
||||
* ==============================================================================
|
||||
* @param {object} params - An object containing the function parameters.
|
||||
* @param {string} params.dbFullName - Database full name. Eg. "datasquire_user_2_test"
|
||||
* @param {string|object} params.query - Query string or object
|
||||
* @param {boolean} [params.readOnly] - Is this operation read only?
|
||||
* @param {import("../../../types/database-schema.td").DSQL_DatabaseSchemaType} [params.dbSchema] - Database schema
|
||||
* @param {string[]} [params.queryValuesArray] - An optional array of query values if "?" is used in the query string
|
||||
* @param {string} [params.tableName] - Table Name
|
||||
*
|
||||
* @return {Promise<object>}
|
||||
*/
|
||||
async function runQuery({ dbFullName, query, readOnly, dbSchema, queryValuesArray, tableName }) {
|
||||
/**
|
||||
* Declare variables
|
||||
*
|
||||
* @description Declare "results" variable
|
||||
*/
|
||||
let result, error, tableSchema;
|
||||
|
||||
if (dbSchema) {
|
||||
try {
|
||||
const table = tableName ? tableName : typeof query == "string" ? null : query ? query?.table : null;
|
||||
if (!table) throw new Error("No table name provided");
|
||||
tableSchema = dbSchema.tables.filter((tb) => tb?.tableName === table)[0];
|
||||
} catch (_err) {
|
||||
// console.log("ERROR getting tableSchema: ", _err.message);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Declare variables
|
||||
*
|
||||
* @description Declare "results" variable
|
||||
*/
|
||||
try {
|
||||
if (typeof query === "string") {
|
||||
result = await varReadOnlyDatabaseDbHandler({
|
||||
queryString: query,
|
||||
queryValuesArray,
|
||||
database: dbFullName,
|
||||
tableSchema,
|
||||
});
|
||||
} else if (typeof query === "object") {
|
||||
/**
|
||||
* Declare variables
|
||||
*
|
||||
* @description Declare "results" variable
|
||||
*/
|
||||
const { data, action, table, identifierColumnName, identifierValue, update, duplicateColumnName, duplicateColumnValue } = query;
|
||||
|
||||
switch (action.toLowerCase()) {
|
||||
case "insert":
|
||||
result = await addDbEntry({
|
||||
dbContext: "Dsql User",
|
||||
paradigm: "Full Access",
|
||||
dbFullName: dbFullName,
|
||||
tableName: table,
|
||||
data: data,
|
||||
update,
|
||||
duplicateColumnName,
|
||||
duplicateColumnValue,
|
||||
tableSchema,
|
||||
});
|
||||
|
||||
if (!result?.insertId) {
|
||||
error = new Error("Couldn't insert data");
|
||||
}
|
||||
|
||||
break;
|
||||
|
||||
case "update":
|
||||
result = await updateDbEntry({
|
||||
dbContext: "Dsql User",
|
||||
paradigm: "Full Access",
|
||||
dbFullName: dbFullName,
|
||||
tableName: table,
|
||||
data: data,
|
||||
identifierColumnName,
|
||||
identifierValue,
|
||||
tableSchema,
|
||||
});
|
||||
|
||||
break;
|
||||
|
||||
case "delete":
|
||||
result = await deleteDbEntry({
|
||||
dbContext: "Dsql User",
|
||||
paradigm: "Full Access",
|
||||
dbFullName: dbFullName,
|
||||
tableName: table,
|
||||
identifierColumnName,
|
||||
identifierValue,
|
||||
tableSchema,
|
||||
});
|
||||
|
||||
break;
|
||||
|
||||
default:
|
||||
result = null;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
} catch (error) {
|
||||
console.log("Error in Running Query =>", error.message);
|
||||
result = null;
|
||||
error = error.message;
|
||||
}
|
||||
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
|
||||
return { result, error };
|
||||
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
}
|
||||
|
||||
module.exports = runQuery;
|
142
bin/query/utils/updateDbEntry.js
Normal file
142
bin/query/utils/updateDbEntry.js
Normal file
@ -0,0 +1,142 @@
|
||||
// @ts-check
|
||||
|
||||
const encrypt = require("../../../functions/encrypt");
|
||||
const dbHandler = require("../../engine/utils/dbHandler");
|
||||
|
||||
/**
|
||||
* Imports: Handle imports
|
||||
*/
|
||||
|
||||
/**
|
||||
* Update DB Function
|
||||
* ==============================================================================
|
||||
* @description Description
|
||||
* @async
|
||||
*
|
||||
* @param {object} params - An object containing the function parameters.
|
||||
* @param {("Master" | "Dsql User")} [params.dbContext] - What is the database context? "Master"
|
||||
* or "Dsql User". Defaults to "Master"
|
||||
* @param {("Read Only" | "Full Access")} [params.paradigm] - What is the paradigm for "Dsql User"?
|
||||
* "Read only" or "Full Access"? Defaults to "Read Only"
|
||||
* @param {string} params.dbFullName - Database full name
|
||||
* @param {string} params.tableName - Table name
|
||||
* @param {object} params.data - Data to add
|
||||
* @param {import("../../../types/database-schema.td").DSQL_TableSchemaType} [params.tableSchema] - Table schema
|
||||
* @param {string} params.identifierColumnName - Update row identifier column name
|
||||
* @param {string | number} params.identifierValue - Update row identifier column value
|
||||
*
|
||||
* @returns {Promise<object|null>}
|
||||
*/
|
||||
async function updateDbEntry({ dbContext, paradigm, dbFullName, tableName, data, tableSchema, identifierColumnName, identifierValue }) {
|
||||
/**
|
||||
* Check if data is valid
|
||||
*/
|
||||
if (!data || !Object.keys(data).length) return null;
|
||||
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
|
||||
/**
|
||||
* Declare variables
|
||||
*
|
||||
* @description Declare "results" variable
|
||||
*/
|
||||
const dataKeys = Object.keys(data);
|
||||
|
||||
let updateKeyValueArray = [];
|
||||
let updateValues = [];
|
||||
|
||||
const encryptionKey = process.env.DSQL_ENCRYPTION_KEY;
|
||||
const encryptionSalt = process.env.DSQL_ENCRYPTION_SALT;
|
||||
|
||||
/**
|
||||
* Declare variables
|
||||
*
|
||||
* @description Declare "results" variable
|
||||
*/
|
||||
for (let i = 0; i < dataKeys.length; i++) {
|
||||
try {
|
||||
const dataKey = dataKeys[i];
|
||||
let value = data[dataKey];
|
||||
|
||||
const targetFieldSchemaArray = tableSchema ? tableSchema?.fields?.filter((field) => field.fieldName === dataKey) : null;
|
||||
const targetFieldSchema = targetFieldSchemaArray && targetFieldSchemaArray[0] ? targetFieldSchemaArray[0] : null;
|
||||
|
||||
if (typeof value == "undefined") continue;
|
||||
if (typeof value !== "string" && typeof value !== "number" && !value) continue;
|
||||
|
||||
if (targetFieldSchema?.encrypted) {
|
||||
value = encrypt({ data: value, encryptionKey, encryptionSalt });
|
||||
}
|
||||
|
||||
if (typeof value === "object") {
|
||||
value = JSON.stringify(value);
|
||||
}
|
||||
|
||||
if (typeof value === "string" && value.match(/^null$/i)) {
|
||||
value = {
|
||||
toSqlString: function () {
|
||||
return "NULL";
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (typeof value === "string" && !value.match(/./i)) {
|
||||
value = {
|
||||
toSqlString: function () {
|
||||
return "NULL";
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (!value && typeof value == "number" && value != 0) continue;
|
||||
|
||||
updateKeyValueArray.push(`\`${dataKey}\`=?`);
|
||||
updateValues.push(value);
|
||||
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
} catch (error) {
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
|
||||
console.log("DSQL: Error in parsing data keys in update function =>", error.message);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
|
||||
updateKeyValueArray.push(`date_updated='${Date()}'`);
|
||||
updateKeyValueArray.push(`date_updated_code='${Date.now()}'`);
|
||||
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
|
||||
const query = `UPDATE ${tableName} SET ${updateKeyValueArray.join(",")} WHERE \`${identifierColumnName}\`=?`;
|
||||
|
||||
updateValues.push(identifierValue);
|
||||
|
||||
const updatedEntry = await dbHandler({
|
||||
database: dbFullName,
|
||||
query: query,
|
||||
values: updateValues,
|
||||
});
|
||||
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
|
||||
/**
|
||||
* Return statement
|
||||
*/
|
||||
return updatedEntry;
|
||||
}
|
||||
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
|
||||
module.exports = updateDbEntry;
|
@ -1,7 +1,18 @@
|
||||
// @ts-check
|
||||
|
||||
const { scryptSync, createCipheriv } = require("crypto");
|
||||
const { Buffer } = require("buffer");
|
||||
|
||||
const encrypt = ({ data, encryptionKey, encryptionSalt }) => {
|
||||
if (!encryptionKey?.match(/.{8,}/)) {
|
||||
console.log("Encryption key is invalid");
|
||||
return data;
|
||||
}
|
||||
if (!encryptionSalt?.match(/.{8,}/)) {
|
||||
console.log("Encryption salt is invalid");
|
||||
return data;
|
||||
}
|
||||
|
||||
const algorithm = "aes-192-cbc";
|
||||
const password = encryptionKey;
|
||||
|
||||
|
2
index.js
2
index.js
@ -5,6 +5,7 @@
|
||||
*/
|
||||
const get = require("./utils/get");
|
||||
const post = require("./utils/post");
|
||||
const getSchema = require("./utils/get-schema");
|
||||
|
||||
const uploadImage = require("./utils/upload-image");
|
||||
const uploadFile = require("./utils/upload-file");
|
||||
@ -66,6 +67,7 @@ const datasquirel = {
|
||||
post: post,
|
||||
media: media,
|
||||
user: user,
|
||||
getSchema: getSchema,
|
||||
sanitizeSql: sanitizeSql,
|
||||
};
|
||||
|
||||
|
106
package-lock.json
generated
106
package-lock.json
generated
@ -1,13 +1,113 @@
|
||||
{
|
||||
"name": "datasquirel",
|
||||
"version": "1.1.81",
|
||||
"version": "1.4.8",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "datasquirel",
|
||||
"version": "1.1.81",
|
||||
"license": "ISC"
|
||||
"version": "1.4.8",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"dotenv": "^16.3.1",
|
||||
"mysql": "^2.18.1"
|
||||
},
|
||||
"bin": {
|
||||
"dsql": "bin/dsql"
|
||||
}
|
||||
},
|
||||
"node_modules/bignumber.js": {
|
||||
"version": "9.0.0",
|
||||
"resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.0.0.tgz",
|
||||
"integrity": "sha512-t/OYhhJ2SD+YGBQcjY8GzzDHEk9f3nerxjtfa6tlMXfe7frs/WozhvCNoGvpM0P3bNf3Gq5ZRMlGr5f3r4/N8A==",
|
||||
"engines": {
|
||||
"node": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/core-util-is": {
|
||||
"version": "1.0.3",
|
||||
"resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz",
|
||||
"integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ=="
|
||||
},
|
||||
"node_modules/dotenv": {
|
||||
"version": "16.3.1",
|
||||
"resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.3.1.tgz",
|
||||
"integrity": "sha512-IPzF4w4/Rd94bA9imS68tZBaYyBWSCE47V1RGuMrB94iyTOIEwRmVL2x/4An+6mETpLrKJ5hQkB8W4kFAadeIQ==",
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/motdotla/dotenv?sponsor=1"
|
||||
}
|
||||
},
|
||||
"node_modules/inherits": {
|
||||
"version": "2.0.4",
|
||||
"resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz",
|
||||
"integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="
|
||||
},
|
||||
"node_modules/isarray": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz",
|
||||
"integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ=="
|
||||
},
|
||||
"node_modules/mysql": {
|
||||
"version": "2.18.1",
|
||||
"resolved": "https://registry.npmjs.org/mysql/-/mysql-2.18.1.tgz",
|
||||
"integrity": "sha512-Bca+gk2YWmqp2Uf6k5NFEurwY/0td0cpebAucFpY/3jhrwrVGuxU2uQFCHjU19SJfje0yQvi+rVWdq78hR5lig==",
|
||||
"dependencies": {
|
||||
"bignumber.js": "9.0.0",
|
||||
"readable-stream": "2.3.7",
|
||||
"safe-buffer": "5.1.2",
|
||||
"sqlstring": "2.3.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.6"
|
||||
}
|
||||
},
|
||||
"node_modules/process-nextick-args": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz",
|
||||
"integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag=="
|
||||
},
|
||||
"node_modules/readable-stream": {
|
||||
"version": "2.3.7",
|
||||
"resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz",
|
||||
"integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==",
|
||||
"dependencies": {
|
||||
"core-util-is": "~1.0.0",
|
||||
"inherits": "~2.0.3",
|
||||
"isarray": "~1.0.0",
|
||||
"process-nextick-args": "~2.0.0",
|
||||
"safe-buffer": "~5.1.1",
|
||||
"string_decoder": "~1.1.1",
|
||||
"util-deprecate": "~1.0.1"
|
||||
}
|
||||
},
|
||||
"node_modules/safe-buffer": {
|
||||
"version": "5.1.2",
|
||||
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz",
|
||||
"integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g=="
|
||||
},
|
||||
"node_modules/sqlstring": {
|
||||
"version": "2.3.1",
|
||||
"resolved": "https://registry.npmjs.org/sqlstring/-/sqlstring-2.3.1.tgz",
|
||||
"integrity": "sha512-ooAzh/7dxIG5+uDik1z/Rd1vli0+38izZhGzSa34FwR7IbelPWCCKSNIl8jlL/F7ERvy8CB2jNeM1E9i9mXMAQ==",
|
||||
"engines": {
|
||||
"node": ">= 0.6"
|
||||
}
|
||||
},
|
||||
"node_modules/string_decoder": {
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz",
|
||||
"integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==",
|
||||
"dependencies": {
|
||||
"safe-buffer": "~5.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/util-deprecate": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz",
|
||||
"integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw=="
|
||||
}
|
||||
}
|
||||
}
|
||||
|
11
package.json
11
package.json
@ -1,8 +1,11 @@
|
||||
{
|
||||
"name": "datasquirel",
|
||||
"version": "1.4.8",
|
||||
"version": "1.4.9",
|
||||
"description": "Cloud-based SQL data management tool",
|
||||
"main": "index.js",
|
||||
"bin": {
|
||||
"dsql-watch": "./bin/dsql"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
},
|
||||
@ -23,5 +26,9 @@
|
||||
"bugs": {
|
||||
"url": "https://github.com/BenjaminToby/dsql/issues"
|
||||
},
|
||||
"homepage": "https://datasquirel.com/"
|
||||
"homepage": "https://datasquirel.com/",
|
||||
"dependencies": {
|
||||
"dotenv": "^16.3.1",
|
||||
"mysql": "^2.18.1"
|
||||
}
|
||||
}
|
||||
|
93
types/database-schema.td.js
Normal file
93
types/database-schema.td.js
Normal file
@ -0,0 +1,93 @@
|
||||
/**
|
||||
* @typedef {string} DSQL_DatabaseFullName - Database full name(slug) including datasquirel data => "datasquirel_user_7_new_database"
|
||||
*/
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
/**
|
||||
* @typedef {object} DSQL_DatabaseSchemaType
|
||||
* @property {string} dbName - Database Full name with spaces => "New Database"
|
||||
* @property {string} dbSlug - Database Slug => "new_database"
|
||||
* @property {string} dbFullName - Database full name(slug) including datasquirel data => "datasquirel_user_7_new_database"
|
||||
* @property {string} [dbDescription] - Database brief description
|
||||
* @property {string} [dbImage] - Database image - Defaults to "/images/default.png"
|
||||
* @property {DSQL_TableSchemaType[]} tables - List of database tables
|
||||
* @property {{ dbFullName: string }[]} [childrenDatabases] - List of children databases for current database which is parent
|
||||
* @property {boolean} [childDatabase] - If current database is a child of a different parent database
|
||||
* @property {string} [childDatabaseDbFullName] - Parent database full name => "datasquirel_user_7_new_database"
|
||||
*/
|
||||
|
||||
////////////////////////////////////////
|
||||
|
||||
/**
|
||||
* @typedef {object} DSQL_TableSchemaType
|
||||
* @property {string} tableName - Table slug (blog_posts)
|
||||
* @property {string} tableFullName - Table full name with spaces => "Blog Posts"
|
||||
* @property {string} [tableDescription] - Brief description of table
|
||||
* @property {DSQL_FieldSchemaType[]} fields - List of table Fields
|
||||
* @property {DSQL_IndexSchemaType[]} [indexes] - List of table indexes, if available
|
||||
* @property {DSQL_ChildrenTablesType[]} childrenTables - List of children tables
|
||||
* @property {boolean} [childTable] -If current table is a child clone
|
||||
* @property {string} [childTableName] - Table slug of parent table => "blog_posts"
|
||||
* @property {string} [childTableDbFullName] - Database full name(slug) including datasquirel data => "datasquirel_user_7_new_database"
|
||||
* @property {string} [tableNameOld] - Old table name, incase of renaming table
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {object} DSQL_ChildrenTablesType
|
||||
* @property {string} dbNameFull - Database full name(slug) including datasquirel data => "datasquirel_user_7_new_database"
|
||||
* @property {string} tableName - Table slug => "blog_posts"
|
||||
*/
|
||||
|
||||
////////////////////////////////////////
|
||||
|
||||
/**
|
||||
* @typedef {object} DSQL_FieldSchemaType
|
||||
* @property {string} fieldName - Field Name(slug) => "long_description"
|
||||
* @property {string} [originName] - Field origin name(optional)
|
||||
* @property {boolean} [updatedField] - Has this field been renamed?
|
||||
* @property {string} dataType - Field Data type => "BIGIN" | "LONGTEXT" | "VARCHAR(***)" | ...
|
||||
* @property {boolean} [nullValue] - Is this a null value or not?
|
||||
* @property {boolean} [notNullValue] - Is this NOT a null value?
|
||||
* @property {boolean} [primaryKey] - Is this the primary key for table?
|
||||
* @property {boolean} [encrypted] - Is this field value encrypted?
|
||||
* @property {boolean} [autoIncrement] - Does this table primary key increment automatically?
|
||||
* @property {string|number} [defaultValue] - Value of field by default
|
||||
* @property {string} [defaultValueLiteral] - SQL key word which generates value automatically => "CURRENT_TIMESTAMP"
|
||||
* @property {DSQL_ForeignKeyType} [foreignKey] - Field foreign key reference object
|
||||
* @property {boolean} [richText] - Rich text field
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {object} DSQL_ForeignKeyType
|
||||
* @property {string} foreignKeyName - Unique Name of foreign key
|
||||
* @property {string} destinationTableName - Reference table name(slug) => "blog_posts"
|
||||
* @property {string} destinationTableColumnName - Reference column name(slug) => "id"
|
||||
* @property {string} destinationTableColumnType - Reference table field type => "BIGINT" | "VARCHAR(***)" | ...
|
||||
* @property {boolean} [cascadeDelete] - Does the reference table entry delete when this key is deleted?
|
||||
* @property {boolean} [cascadeUpdate] - Does the reference table entry update when this key is updated?
|
||||
*/
|
||||
|
||||
////////////////////////////////////////
|
||||
|
||||
/**
|
||||
* @typedef {object} DSQL_IndexSchemaType
|
||||
* @property {string} indexName - Unique Name of index => "blog_text_index"
|
||||
* @property {string} indexType - "regular" or "fullText"
|
||||
* @property {DSQL_IndexTableFieldType[]} indexTableFields - List of Index table fields
|
||||
* @property {string} [alias] - List of Index table fields
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {object} DSQL_IndexTableFieldType
|
||||
* @property {string} value - Table Field Name
|
||||
* @property {string} dataType - Table Field data type "VARCHAR(***)" | "BIGINT" | ...
|
||||
*/
|
||||
|
||||
////////////////////////////////////////
|
||||
|
||||
exports.DSQL_TableSchemaType = DSQL_TableSchemaType;
|
48
types/mysql.td.js
Normal file
48
types/mysql.td.js
Normal file
@ -0,0 +1,48 @@
|
||||
/**
|
||||
* @typedef {object} DSQL_MYSQL_SHOW_INDEXES_Type
|
||||
* @property {string} Key_name - MYSQL Index Name
|
||||
* @property {string} Table - Table Name(slug)
|
||||
* @property {string} Column_name
|
||||
* @property {string} Collation
|
||||
* @property {string} Index_type - "FULL_TEXT" | ...
|
||||
* @property {string} Cardinality
|
||||
* @property {string} Index_comment
|
||||
* @property {string} Comment
|
||||
*/
|
||||
|
||||
////////////////////////////////////////
|
||||
|
||||
/**
|
||||
* @typedef {object} DSQL_MYSQL_SHOW_COLUMNS_Type
|
||||
* @property {string} Field - Field Name as represented in MSQL database
|
||||
* @property {string} Type - varchar(***) | tinyint | bigint | ...
|
||||
* @property {string} Null
|
||||
* @property {string} Key
|
||||
* @property {string} Default
|
||||
* @property {string} Extra
|
||||
*/
|
||||
|
||||
////////////////////////////////////////
|
||||
|
||||
/**
|
||||
* @typedef {object} DSQL_MYSQL_FOREIGN_KEYS_Type
|
||||
* @property {string} CONSTRAINT_NAME - Constraint Name => "PRIMARY" | "MUL" | null | ...
|
||||
* @property {string} CONSTRAINT_SCHEMA - Database name
|
||||
* @property {string} TABLE_NAME - Table name
|
||||
*/
|
||||
|
||||
////////////////////////////////////////
|
||||
|
||||
/**
|
||||
* @typedef {object} DSQL_MYSQL_user_databases_Type
|
||||
* @property {number} user_id - User Id
|
||||
* @property {string} db_full_name - Database full name => eg. (dataasquirel_user_2_new_database)
|
||||
* @property {string} db_name - Database name with spaces => eg. (New Database)
|
||||
* @property {string} db_slug - Database slug => eg. (new_database)
|
||||
* @property {string} db_image - Database image path
|
||||
* @property {string} db_description - Database description
|
||||
* @property {number} active_clone - is Database active clone => 0 or 1
|
||||
* @property {string} active_clone_parent_db - Database parent db full name => eg. "datasquirel_user_7_wexculture"
|
||||
*/
|
||||
|
||||
////////////////////////////////////////
|
89
utils/get-schema.js
Normal file
89
utils/get-schema.js
Normal file
@ -0,0 +1,89 @@
|
||||
// @ts-check
|
||||
|
||||
/**
|
||||
* ==============================================================================
|
||||
* Imports
|
||||
* ==============================================================================
|
||||
*/
|
||||
const https = require("https");
|
||||
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
|
||||
/**
|
||||
* @typedef {Object} GetSchemaReturn
|
||||
* @property {boolean} success - Did the function run successfully?
|
||||
* @property {import("../types/database-schema.td").DSQL_DatabaseSchemaType[] | import("../types/database-schema.td").DSQL_DatabaseSchemaType | null} payload - Response payload
|
||||
*/
|
||||
|
||||
/**
|
||||
* Make a get request to Datasquirel API
|
||||
* ==============================================================================
|
||||
* @async
|
||||
*
|
||||
* @param {Object} params - Single object passed
|
||||
* @param {string} params.key - `FULL ACCESS` API Key
|
||||
* @param {string} [params.database] - The database schema to get
|
||||
*
|
||||
* @returns { Promise<GetSchemaReturn> } - Return Object
|
||||
*/
|
||||
async function getSchema({ key, database }) {
|
||||
/**
|
||||
* Make https request
|
||||
*
|
||||
* @description make a request to datasquirel.com
|
||||
*/
|
||||
const httpResponse = await new Promise((resolve, reject) => {
|
||||
https
|
||||
.request(
|
||||
{
|
||||
method: "GET",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
Authorization: key,
|
||||
},
|
||||
port: 443,
|
||||
hostname: "datasquirel.com",
|
||||
path: "/api/query/get-schema" + (database ? `?database=${database}` : ""),
|
||||
},
|
||||
|
||||
/**
|
||||
* Callback Function
|
||||
*
|
||||
* @description https request callback
|
||||
*/
|
||||
(response) => {
|
||||
var str = "";
|
||||
|
||||
response.on("data", function (chunk) {
|
||||
str += chunk;
|
||||
});
|
||||
|
||||
response.on("end", function () {
|
||||
resolve(JSON.parse(str));
|
||||
});
|
||||
|
||||
response.on("error", (err) => {
|
||||
reject(err);
|
||||
});
|
||||
}
|
||||
)
|
||||
.end();
|
||||
});
|
||||
|
||||
/** ********************************************** */
|
||||
/** ********************************************** */
|
||||
/** ********************************************** */
|
||||
|
||||
return httpResponse;
|
||||
}
|
||||
|
||||
/** ********************************************** */
|
||||
/** ********************************************** */
|
||||
/** ********************************************** */
|
||||
|
||||
module.exports = getSchema;
|
37
utils/get.js
37
utils/get.js
@ -1,9 +1,14 @@
|
||||
// @ts-check
|
||||
|
||||
/**
|
||||
* ==============================================================================
|
||||
* Imports
|
||||
* ==============================================================================
|
||||
*/
|
||||
const https = require("https");
|
||||
const path = require("path");
|
||||
const fs = require("fs");
|
||||
const localGet = require("../bin/query/get");
|
||||
|
||||
/** ****************************************************************************** */
|
||||
/** ****************************************************************************** */
|
||||
@ -15,7 +20,9 @@ const https = require("https");
|
||||
/**
|
||||
* @typedef {Object} GetReturn
|
||||
* @property {boolean} success - Did the function run successfully?
|
||||
* @property {(Object[]|string)} [payload=[]] - GET request results
|
||||
* @property {(Object[]|string|null|object)} [payload] - GET request results
|
||||
* @property {string} [msg] - Message
|
||||
* @property {string} [error] - Error Message
|
||||
*/
|
||||
|
||||
/**
|
||||
@ -33,6 +40,34 @@ const https = require("https");
|
||||
* @returns { Promise<GetReturn> } - Return Object
|
||||
*/
|
||||
async function get({ key, db, query, queryValues, tableName }) {
|
||||
/**
|
||||
* Check for local DB settings
|
||||
*
|
||||
* @description Look for local db settings in `.env` file and by pass the http request if available
|
||||
*/
|
||||
const { DSQL_HOST, DSQL_USER, DSQL_PASS, DSQL_DB_NAME, DSQL_KEY, DSQL_REF_DB_NAME, DSQL_FULL_SYNC } = process.env;
|
||||
|
||||
if (DSQL_HOST?.match(/./) && DSQL_USER?.match(/./) && DSQL_PASS?.match(/./) && DSQL_DB_NAME?.match(/./)) {
|
||||
/** @type {import("../types/database-schema.td").DSQL_DatabaseSchemaType | undefined} */
|
||||
let dbSchema;
|
||||
|
||||
try {
|
||||
const localDbSchemaPath = path.resolve(process.cwd(), "dsql.schema.json");
|
||||
dbSchema = JSON.parse(fs.readFileSync(localDbSchemaPath, "utf8"));
|
||||
} catch (error) {}
|
||||
|
||||
console.log("Reading from local database ...");
|
||||
|
||||
return await localGet({
|
||||
dbSchema: dbSchema,
|
||||
options: {
|
||||
query: query,
|
||||
queryValues: queryValues,
|
||||
tableName: tableName,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Make https request
|
||||
*
|
||||
|
Loading…
Reference in New Issue
Block a user