313 lines
9.5 KiB
TypeScript
313 lines
9.5 KiB
TypeScript
import sanitizeHtml from "sanitize-html";
|
|
import sanitizeHtmlOptions from "../html/sanitizeHtmlOptions";
|
|
import updateDbEntry from "./updateDbEntry";
|
|
import _ from "lodash";
|
|
import encrypt from "../../dsql/encrypt";
|
|
import connDbHandler from "../../../utils/db/conn-db-handler";
|
|
import checkIfIsMaster from "../../../utils/check-if-is-master";
|
|
import { DbContextsArray } from "./runQuery";
|
|
import debugLog from "../../../utils/logging/debug-log";
|
|
import {
|
|
APIResponseObject,
|
|
DSQL_TableSchemaType,
|
|
PostInsertReturn,
|
|
} from "../../../types";
|
|
import purgeDefaultFields from "../../../utils/purge-default-fields";
|
|
|
|
export type AddDbEntryParam<
|
|
T extends { [k: string]: any } = any,
|
|
K extends string = string
|
|
> = {
|
|
dbContext?: (typeof DbContextsArray)[number];
|
|
paradigm?: "Read Only" | "Full Access";
|
|
dbFullName?: string;
|
|
tableName: K;
|
|
data?: T;
|
|
batchData?: T[];
|
|
tableSchema?: DSQL_TableSchemaType;
|
|
duplicateColumnName?: keyof T;
|
|
duplicateColumnValue?: string | number;
|
|
/**
|
|
* Update Entry if a duplicate is found.
|
|
* Requires `duplicateColumnName` and `duplicateColumnValue` parameters
|
|
*/
|
|
update?: boolean;
|
|
encryptionKey?: string;
|
|
encryptionSalt?: string;
|
|
forceLocal?: boolean;
|
|
debug?: boolean;
|
|
};
|
|
|
|
/**
|
|
* Add a db Entry Function
|
|
*/
|
|
export default async function addDbEntry<
|
|
T extends { [k: string]: any } = any,
|
|
K extends string = string
|
|
>({
|
|
dbContext,
|
|
paradigm,
|
|
dbFullName,
|
|
tableName,
|
|
data,
|
|
batchData,
|
|
tableSchema,
|
|
duplicateColumnName,
|
|
duplicateColumnValue,
|
|
update,
|
|
encryptionKey,
|
|
encryptionSalt,
|
|
forceLocal,
|
|
debug,
|
|
}: AddDbEntryParam<T, K>): Promise<APIResponseObject<PostInsertReturn>> {
|
|
const isMaster = forceLocal
|
|
? true
|
|
: checkIfIsMaster({ dbContext, dbFullName });
|
|
|
|
if (debug) {
|
|
debugLog({
|
|
log: isMaster,
|
|
addTime: true,
|
|
label: "isMaster",
|
|
});
|
|
}
|
|
|
|
const DB_CONN = isMaster
|
|
? global.DSQL_DB_CONN
|
|
: global.DSQL_FULL_ACCESS_DB_CONN || global.DSQL_DB_CONN;
|
|
const DB_RO_CONN = isMaster
|
|
? global.DSQL_DB_CONN
|
|
: global.DSQL_READ_ONLY_DB_CONN || global.DSQL_DB_CONN;
|
|
|
|
let newData = _.cloneDeep(data);
|
|
if (newData) {
|
|
newData = purgeDefaultFields(newData);
|
|
}
|
|
|
|
let newBatchData = _.cloneDeep(batchData) as any[];
|
|
if (newBatchData) {
|
|
newBatchData = purgeDefaultFields(newBatchData);
|
|
}
|
|
|
|
if (
|
|
duplicateColumnName &&
|
|
typeof duplicateColumnName === "string" &&
|
|
newData
|
|
) {
|
|
const checkDuplicateQuery = `SELECT * FROM ${
|
|
isMaster ? "" : `\`${dbFullName}\`.`
|
|
}\`${tableName}\` WHERE \`${duplicateColumnName}\`=?`;
|
|
|
|
const duplicateValue = await connDbHandler(
|
|
DB_RO_CONN,
|
|
checkDuplicateQuery,
|
|
[duplicateColumnValue]
|
|
);
|
|
|
|
if (duplicateValue?.[0] && !update) {
|
|
return {
|
|
success: false,
|
|
payload: undefined,
|
|
msg: "Duplicate entry found",
|
|
};
|
|
} else if (duplicateValue?.[0] && update) {
|
|
return await updateDbEntry({
|
|
dbContext,
|
|
dbFullName,
|
|
tableName,
|
|
data: newData,
|
|
tableSchema,
|
|
encryptionKey,
|
|
encryptionSalt,
|
|
identifierColumnName: duplicateColumnName,
|
|
identifierValue: duplicateColumnValue || "",
|
|
});
|
|
}
|
|
}
|
|
|
|
function generateQuery(data: T) {
|
|
const dataKeys = Object.keys(data);
|
|
|
|
let insertKeysArray = [];
|
|
let insertValuesArray = [];
|
|
|
|
for (let i = 0; i < dataKeys.length; i++) {
|
|
try {
|
|
const dataKey = dataKeys[i];
|
|
let value = data[dataKey];
|
|
|
|
const targetFieldSchemaArray = tableSchema
|
|
? tableSchema?.fields?.filter(
|
|
(field) => field.fieldName == dataKey
|
|
)
|
|
: null;
|
|
const targetFieldSchema =
|
|
targetFieldSchemaArray && targetFieldSchemaArray[0]
|
|
? targetFieldSchemaArray[0]
|
|
: null;
|
|
|
|
if (value == null || value == undefined) continue;
|
|
|
|
if (
|
|
targetFieldSchema?.dataType?.match(/int$/i) &&
|
|
typeof value == "string" &&
|
|
!value?.match(/./)
|
|
)
|
|
continue;
|
|
|
|
if (targetFieldSchema?.encrypted) {
|
|
value = encrypt({
|
|
data: value,
|
|
encryptionKey,
|
|
encryptionSalt,
|
|
});
|
|
console.log("DSQL: Encrypted value =>", value);
|
|
}
|
|
|
|
const htmlRegex = /<[^>]+>/g;
|
|
|
|
if (
|
|
targetFieldSchema?.richText ||
|
|
String(value).match(htmlRegex)
|
|
) {
|
|
value = sanitizeHtml(value, sanitizeHtmlOptions);
|
|
}
|
|
|
|
if (targetFieldSchema?.pattern) {
|
|
const pattern = new RegExp(
|
|
targetFieldSchema.pattern,
|
|
targetFieldSchema.patternFlags || ""
|
|
);
|
|
if (!pattern.test(value)) {
|
|
console.log("DSQL: Pattern not matched =>", value);
|
|
value = "";
|
|
}
|
|
}
|
|
|
|
insertKeysArray.push("`" + dataKey + "`");
|
|
|
|
if (typeof value === "object") {
|
|
value = JSON.stringify(value);
|
|
}
|
|
|
|
if (typeof value == "number") {
|
|
insertValuesArray.push(String(value));
|
|
} else {
|
|
insertValuesArray.push(value);
|
|
}
|
|
} catch (error: any) {
|
|
console.log(
|
|
"DSQL: Error in parsing data keys =>",
|
|
error.message
|
|
);
|
|
global.ERROR_CALLBACK?.(
|
|
`Error parsing Data Keys`,
|
|
error as Error
|
|
);
|
|
continue;
|
|
}
|
|
}
|
|
|
|
insertKeysArray.push("`date_created`");
|
|
insertValuesArray.push(Date());
|
|
|
|
insertKeysArray.push("`date_created_code`");
|
|
insertValuesArray.push(Date.now());
|
|
|
|
insertKeysArray.push("`date_updated`");
|
|
insertValuesArray.push(Date());
|
|
|
|
insertKeysArray.push("`date_updated_code`");
|
|
insertValuesArray.push(Date.now());
|
|
|
|
const queryValuesArray = insertValuesArray;
|
|
|
|
return { queryValuesArray, insertValuesArray, insertKeysArray };
|
|
}
|
|
|
|
if (newData) {
|
|
const { insertKeysArray, insertValuesArray, queryValuesArray } =
|
|
generateQuery(newData);
|
|
|
|
const query = `INSERT INTO ${
|
|
isMaster && !dbFullName ? "" : `\`${dbFullName}\`.`
|
|
}\`${tableName}\` (${insertKeysArray.join(
|
|
","
|
|
)}) VALUES (${insertValuesArray.map(() => "?").join(",")})`;
|
|
|
|
const newInsert = await connDbHandler(
|
|
DB_CONN,
|
|
query,
|
|
queryValuesArray,
|
|
debug
|
|
);
|
|
|
|
return {
|
|
success: Boolean(newInsert?.insertId),
|
|
payload: newInsert,
|
|
queryObject: {
|
|
sql: query,
|
|
params: queryValuesArray,
|
|
},
|
|
};
|
|
} else if (newBatchData) {
|
|
let batchInsertKeysArray: string[] | undefined;
|
|
let batchInsertValuesArray: any[][] = [];
|
|
let batchQueryValuesArray: any[][] = [];
|
|
|
|
for (let i = 0; i < newBatchData.length; i++) {
|
|
const singleBatchData = newBatchData[i];
|
|
const { insertKeysArray, insertValuesArray, queryValuesArray } =
|
|
generateQuery(singleBatchData);
|
|
|
|
if (!batchInsertKeysArray) {
|
|
batchInsertKeysArray = insertKeysArray;
|
|
}
|
|
|
|
batchInsertValuesArray.push(insertValuesArray);
|
|
batchQueryValuesArray.push(queryValuesArray);
|
|
}
|
|
|
|
const query = `INSERT INTO ${
|
|
isMaster && !dbFullName ? "" : `\`${dbFullName}\`.`
|
|
}\`${tableName}\` (${batchInsertKeysArray?.join(
|
|
","
|
|
)}) VALUES ${batchInsertValuesArray
|
|
.map((vl) => `(${vl.map(() => "?").join(",")})`)
|
|
.join(",")}`;
|
|
|
|
console.log("query", query);
|
|
console.log("batchQueryValuesArray", batchQueryValuesArray);
|
|
|
|
const newInsert = await connDbHandler(
|
|
DB_CONN,
|
|
query,
|
|
batchQueryValuesArray.flat(),
|
|
debug
|
|
);
|
|
|
|
if (debug) {
|
|
debugLog({
|
|
log: newInsert,
|
|
addTime: true,
|
|
label: "newInsert",
|
|
});
|
|
}
|
|
|
|
return {
|
|
success: Boolean(newInsert?.insertId),
|
|
payload: newInsert,
|
|
queryObject: {
|
|
sql: query,
|
|
params: batchQueryValuesArray.flat(),
|
|
},
|
|
};
|
|
} else {
|
|
return {
|
|
success: false,
|
|
payload: undefined,
|
|
msg: "No data provided",
|
|
};
|
|
}
|
|
}
|