datasquirel/dist/package-shared/functions/backend/db/addDbEntry.js
Benjamin Toby ab13c68c8e Updates
2025-08-04 08:20:32 +01:00

194 lines
9.2 KiB
JavaScript

"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.default = addDbEntry;
const updateDbEntry_1 = __importDefault(require("./updateDbEntry"));
const lodash_1 = __importDefault(require("lodash"));
const conn_db_handler_1 = __importDefault(require("../../../utils/db/conn-db-handler"));
const check_if_is_master_1 = __importDefault(require("../../../utils/check-if-is-master"));
const debug_log_1 = __importDefault(require("../../../utils/logging/debug-log"));
const purge_default_fields_1 = __importDefault(require("../../../utils/purge-default-fields"));
const grab_parsed_value_1 = __importDefault(require("./grab-parsed-value"));
/**
* Add a db Entry Function
*/
function addDbEntry(_a) {
return __awaiter(this, arguments, void 0, function* ({ dbContext, paradigm, dbFullName, tableName, data, batchData, tableSchema, duplicateColumnName, duplicateColumnValue, update, encryptionKey, encryptionSalt, forceLocal, debug, }) {
const isMaster = forceLocal
? true
: (0, check_if_is_master_1.default)({ dbContext, dbFullName });
if (debug) {
(0, debug_log_1.default)({
log: isMaster,
addTime: true,
label: "isMaster",
});
}
let newData = lodash_1.default.cloneDeep(data);
if (newData) {
newData = (0, purge_default_fields_1.default)(newData);
}
let newBatchData = lodash_1.default.cloneDeep(batchData);
if (newBatchData) {
newBatchData = (0, purge_default_fields_1.default)(newBatchData);
}
if (duplicateColumnName &&
typeof duplicateColumnName === "string" &&
newData) {
const checkDuplicateQuery = `SELECT * FROM ${isMaster ? "" : `\`${dbFullName}\`.`}\`${tableName}\` WHERE \`${duplicateColumnName}\`=?`;
const duplicateValue = yield (0, conn_db_handler_1.default)(null, checkDuplicateQuery, [
duplicateColumnValue,
]);
if ((duplicateValue === null || duplicateValue === void 0 ? void 0 : duplicateValue[0]) && !update) {
return {
success: false,
payload: undefined,
msg: "Duplicate entry found",
};
}
else if ((duplicateValue === null || duplicateValue === void 0 ? void 0 : duplicateValue[0]) && update) {
return yield (0, updateDbEntry_1.default)({
dbContext,
dbFullName,
tableName,
data: newData,
tableSchema,
encryptionKey,
encryptionSalt,
identifierColumnName: duplicateColumnName,
identifierValue: duplicateColumnValue || "",
});
}
}
function generateQuery(data) {
var _a, _b;
const dataKeys = Object.keys(data);
let insertKeysArray = [];
let insertValuesArray = [];
for (let i = 0; i < dataKeys.length; i++) {
try {
const dataKey = dataKeys[i];
let value = data[dataKey];
const targetFieldSchema = tableSchema
? (_a = tableSchema === null || tableSchema === void 0 ? void 0 : tableSchema.fields) === null || _a === void 0 ? void 0 : _a.find((field) => field.fieldName === dataKey)
: null;
const parsedValue = (0, grab_parsed_value_1.default)({
dataKey,
encryptionKey,
encryptionSalt,
tableSchema,
value,
});
if (typeof parsedValue == "undefined")
continue;
insertKeysArray.push("`" + dataKey + "`");
if ((_b = targetFieldSchema === null || targetFieldSchema === void 0 ? void 0 : targetFieldSchema.dataType) === null || _b === void 0 ? void 0 : _b.match(/vector/i)) {
insertValuesArray.push(`VEC_FromText('${parsedValue}')`);
}
else if (typeof parsedValue == "number") {
insertValuesArray.push(String(parsedValue));
}
else {
insertValuesArray.push(parsedValue);
}
}
catch (error) {
console.log("DSQL: Error in parsing data keys =>", error.message);
continue;
}
}
insertKeysArray.push("`date_created`");
insertValuesArray.push(Date());
insertKeysArray.push("`date_created_code`");
insertValuesArray.push(Date.now());
insertKeysArray.push("`date_updated`");
insertValuesArray.push(Date());
insertKeysArray.push("`date_updated_code`");
insertValuesArray.push(Date.now());
const queryValuesArray = insertValuesArray;
return { queryValuesArray, insertValuesArray, insertKeysArray };
}
function grabQueryValuesString(arr) {
return arr
.map((v, i) => {
if (v === null || v === void 0 ? void 0 : v.toString().match(/VEC_FromText/i)) {
return v;
}
return "?";
})
.join(",");
}
function grabFinalQueryValuesArr(arr) {
return arr
.filter((v) => !(v === null || v === void 0 ? void 0 : v.toString().match(/VEC_FromText/i)))
.map((v) => String(v));
}
if (newData) {
const { insertKeysArray, insertValuesArray, queryValuesArray } = generateQuery(newData);
const query = `INSERT INTO ${isMaster && !dbFullName ? "" : `\`${dbFullName}\`.`}\`${tableName}\` (${insertKeysArray.join(",")}) VALUES (${grabQueryValuesString(insertValuesArray)})`;
const finalQueryValues = grabFinalQueryValuesArr(queryValuesArray);
const newInsert = yield (0, conn_db_handler_1.default)(null, query, finalQueryValues, debug);
return {
success: Boolean(newInsert === null || newInsert === void 0 ? void 0 : newInsert.insertId),
payload: newInsert,
queryObject: {
sql: query,
params: finalQueryValues,
},
};
}
else if (newBatchData) {
let batchInsertKeysArray;
let batchInsertValuesArray = [];
let batchQueryValuesArray = [];
for (let i = 0; i < newBatchData.length; i++) {
const singleBatchData = newBatchData[i];
const { insertKeysArray, insertValuesArray, queryValuesArray } = generateQuery(singleBatchData);
if (!batchInsertKeysArray) {
batchInsertKeysArray = insertKeysArray;
}
batchInsertValuesArray.push(insertValuesArray);
batchQueryValuesArray.push(queryValuesArray);
}
const query = `INSERT INTO ${isMaster && !dbFullName ? "" : `\`${dbFullName}\`.`}\`${tableName}\` (${batchInsertKeysArray === null || batchInsertKeysArray === void 0 ? void 0 : batchInsertKeysArray.join(",")}) VALUES ${batchInsertValuesArray
.map((vl) => `(${grabQueryValuesString(vl)})`)
.join(",")}`;
const finalQueryValues = grabFinalQueryValuesArr(batchQueryValuesArray.flat());
const newInsert = yield (0, conn_db_handler_1.default)(null, query, finalQueryValues, debug);
if (debug) {
(0, debug_log_1.default)({
log: newInsert,
addTime: true,
label: "newInsert",
});
}
return {
success: Boolean(newInsert === null || newInsert === void 0 ? void 0 : newInsert.insertId),
payload: newInsert,
queryObject: {
sql: query,
params: finalQueryValues,
},
};
}
else {
return {
success: false,
payload: undefined,
msg: "No data provided",
};
}
});
}