This commit is contained in:
Benjamin Toby 2025-08-02 17:14:46 +01:00
parent 9ca64cf25e
commit 71a8431de5
24 changed files with 164 additions and 48 deletions

View File

@ -4,7 +4,7 @@ declare const DataTypes: readonly [{
readonly value: "0-255";
readonly argument: true;
readonly description: "Varchar is simply letters and numbers within the range 0 - 255";
readonly maxValue: 255;
readonly maxValue: 2000;
}, {
readonly title: "TINYINT";
readonly name: "TINYINT";
@ -91,5 +91,10 @@ declare const DataTypes: readonly [{
readonly title: "TIMESTAMP";
readonly name: "TIMESTAMP";
readonly description: "Time Stamp";
}, {
readonly title: "VECTOR";
readonly name: "VECTOR";
readonly description: "Vector Field for vector-based applications";
readonly maxValue: 2147483647;
}];
export default DataTypes;

View File

@ -7,7 +7,7 @@ const DataTypes = [
value: "0-255",
argument: true,
description: "Varchar is simply letters and numbers within the range 0 - 255",
maxValue: 255,
maxValue: 2000,
},
{
title: "TINYINT",
@ -111,5 +111,11 @@ const DataTypes = [
name: "TIMESTAMP",
description: "Time Stamp",
},
{
title: "VECTOR",
name: "VECTOR",
description: "Vector Field for vector-based applications",
maxValue: 2147483647,
},
];
exports.default = DataTypes;

View File

@ -6,7 +6,7 @@ const DataTypes = [
argument: true,
description:
"Varchar is simply letters and numbers within the range 0 - 255",
maxValue: 255,
maxValue: 2000,
},
{
title: "TINYINT",
@ -110,6 +110,12 @@ const DataTypes = [
name: "TIMESTAMP",
description: "Time Stamp",
},
{
title: "VECTOR",
name: "VECTOR",
description: "Vector Field for vector-based applications",
maxValue: 2147483647,
},
] as const;
export default DataTypes;

View File

@ -72,6 +72,7 @@ function addDbEntry(_a) {
}
}
function generateQuery(data) {
var _a, _b;
const dataKeys = Object.keys(data);
let insertKeysArray = [];
let insertValuesArray = [];
@ -79,6 +80,9 @@ function addDbEntry(_a) {
try {
const dataKey = dataKeys[i];
let value = data[dataKey];
const targetFieldSchema = tableSchema
? (_a = tableSchema === null || tableSchema === void 0 ? void 0 : tableSchema.fields) === null || _a === void 0 ? void 0 : _a.find((field) => field.fieldName === dataKey)
: null;
const parsedValue = (0, grab_parsed_value_1.default)({
dataKey,
encryptionKey,
@ -89,7 +93,10 @@ function addDbEntry(_a) {
if (typeof parsedValue == "undefined")
continue;
insertKeysArray.push("`" + dataKey + "`");
if (typeof parsedValue == "number") {
if ((_b = targetFieldSchema === null || targetFieldSchema === void 0 ? void 0 : targetFieldSchema.dataType) === null || _b === void 0 ? void 0 : _b.match(/vector/i)) {
insertValuesArray.push(`VEC_FromText('${parsedValue}')`);
}
else if (typeof parsedValue == "number") {
insertValuesArray.push(String(parsedValue));
}
else {
@ -112,16 +119,32 @@ function addDbEntry(_a) {
const queryValuesArray = insertValuesArray;
return { queryValuesArray, insertValuesArray, insertKeysArray };
}
function grabQueryValuesString(arr) {
return arr
.map((v, i) => {
if (v.toString().match(/VEC_FromText/i)) {
return v;
}
return "?";
})
.join(",");
}
function grabFinalQueryValuesArr(arr) {
return arr
.filter((v) => !v.toString().match(/VEC_FromText/i))
.map((v) => String(v));
}
if (newData) {
const { insertKeysArray, insertValuesArray, queryValuesArray } = generateQuery(newData);
const query = `INSERT INTO ${isMaster && !dbFullName ? "" : `\`${dbFullName}\`.`}\`${tableName}\` (${insertKeysArray.join(",")}) VALUES (${insertValuesArray.map(() => "?").join(",")})`;
const newInsert = yield (0, conn_db_handler_1.default)(null, query, queryValuesArray, debug);
const query = `INSERT INTO ${isMaster && !dbFullName ? "" : `\`${dbFullName}\`.`}\`${tableName}\` (${insertKeysArray.join(",")}) VALUES (${grabQueryValuesString(insertValuesArray)})`;
const finalQueryValues = grabFinalQueryValuesArr(queryValuesArray);
const newInsert = yield (0, conn_db_handler_1.default)(null, query, finalQueryValues, debug);
return {
success: Boolean(newInsert === null || newInsert === void 0 ? void 0 : newInsert.insertId),
payload: newInsert,
queryObject: {
sql: query,
params: queryValuesArray,
params: finalQueryValues,
},
};
}
@ -139,11 +162,10 @@ function addDbEntry(_a) {
batchQueryValuesArray.push(queryValuesArray);
}
const query = `INSERT INTO ${isMaster && !dbFullName ? "" : `\`${dbFullName}\`.`}\`${tableName}\` (${batchInsertKeysArray === null || batchInsertKeysArray === void 0 ? void 0 : batchInsertKeysArray.join(",")}) VALUES ${batchInsertValuesArray
.map((vl) => `(${vl.map(() => "?").join(",")})`)
.map((vl) => `(${grabQueryValuesString(vl)})`)
.join(",")}`;
console.log("query", query);
console.log("batchQueryValuesArray", batchQueryValuesArray);
const newInsert = yield (0, conn_db_handler_1.default)(null, query, batchQueryValuesArray.flat(), debug);
const finalQueryValues = grabFinalQueryValuesArr(batchQueryValuesArray.flat());
const newInsert = yield (0, conn_db_handler_1.default)(null, query, finalQueryValues, debug);
if (debug) {
(0, debug_log_1.default)({
log: newInsert,
@ -156,7 +178,7 @@ function addDbEntry(_a) {
payload: newInsert,
queryObject: {
sql: query,
params: batchQueryValuesArray.flat(),
params: finalQueryValues,
},
};
}

View File

@ -14,11 +14,8 @@ const encrypt_1 = __importDefault(require("../../dsql/encrypt"));
function grabParsedValue({ value, tableSchema, encryptionKey, encryptionSalt, dataKey, }) {
var _a, _b;
let newValue = value;
const targetFieldSchemaArray = tableSchema
? (_a = tableSchema === null || tableSchema === void 0 ? void 0 : tableSchema.fields) === null || _a === void 0 ? void 0 : _a.filter((field) => field.fieldName === dataKey)
: null;
const targetFieldSchema = targetFieldSchemaArray && targetFieldSchemaArray[0]
? targetFieldSchemaArray[0]
const targetFieldSchema = tableSchema
? (_a = tableSchema === null || tableSchema === void 0 ? void 0 : tableSchema.fields) === null || _a === void 0 ? void 0 : _a.find((field) => field.fieldName === dataKey)
: null;
if (typeof newValue == "undefined")
return;

View File

@ -24,6 +24,7 @@ const grab_parsed_value_1 = __importDefault(require("./grab-parsed-value"));
*/
function updateDbEntry(_a) {
return __awaiter(this, arguments, void 0, function* ({ dbContext, dbFullName, tableName, data, tableSchema, identifierColumnName, identifierValue, encryptionKey, encryptionSalt, forceLocal, debug, }) {
var _b, _c;
/**
* Check if data is valid
*/
@ -54,6 +55,9 @@ function updateDbEntry(_a) {
try {
const dataKey = dataKeys[i];
let value = newData[dataKey];
const targetFieldSchema = tableSchema
? (_b = tableSchema === null || tableSchema === void 0 ? void 0 : tableSchema.fields) === null || _b === void 0 ? void 0 : _b.find((field) => field.fieldName === dataKey)
: null;
const parsedValue = (0, grab_parsed_value_1.default)({
dataKey,
encryptionKey,
@ -63,7 +67,12 @@ function updateDbEntry(_a) {
});
if (typeof parsedValue == "undefined")
continue;
if ((_c = targetFieldSchema === null || targetFieldSchema === void 0 ? void 0 : targetFieldSchema.dataType) === null || _c === void 0 ? void 0 : _c.match(/vector/i)) {
updateKeyValueArray.push(`\`${dataKey}\`=VEC_FromText(?)`);
}
else {
updateKeyValueArray.push(`\`${dataKey}\`=?`);
}
if (typeof parsedValue == "number") {
updateValues.push(String(parsedValue));
}

View File

@ -36,6 +36,9 @@ function parseDbResults({ unparsedResults, tableSchema, }) {
});
}
}
if (value && typeof value == "object") {
result[resultFieldName] = "";
}
}
parsedResults.push(result);
}

View File

@ -43,7 +43,11 @@ function handleIndexescreateDbFromSchema(_a) {
* @description Create new index if determined that it
* doesn't exist in MYSQL db
*/
const queryString = `CREATE${indexType == "full_text" ? " FULLTEXT" : ""} INDEX \`${alias}\` ON \`${dbFullName}\`.\`${tableName}\`(${indexTableFields === null || indexTableFields === void 0 ? void 0 : indexTableFields.map((nm) => nm.value).map((nm) => `\`${nm}\``).join(",")}) COMMENT '${(0, grab_dsql_schema_index_comment_1.default)()} ${indexName}'`;
const queryString = `CREATE${indexType == "full_text"
? " FULLTEXT"
: indexType == "vector"
? " VECTOR"
: ""} INDEX \`${alias}\` ON \`${dbFullName}\`.\`${tableName}\`(${indexTableFields === null || indexTableFields === void 0 ? void 0 : indexTableFields.map((nm) => nm.value).map((nm) => `\`${nm}\``).join(",")}) COMMENT '${(0, grab_dsql_schema_index_comment_1.default)()} ${indexName}'`;
const addIndex = yield (0, dbHandler_1.default)({ query: queryString });
}
}

View File

@ -1521,6 +1521,7 @@ export type MediaUploadDataType = ImageObjectType & FileObjectType & {
privateFolder?: boolean;
overwrite?: boolean;
updatedMediaRecord?: DSQL_DATASQUIREL_USER_MEDIA;
existingMediaRecord?: DSQL_DATASQUIREL_USER_MEDIA;
existingMediaRecordId?: number;
};
export declare const ImageMimeTypes: (keyof sharp.FormatEnum)[];
@ -1569,7 +1570,7 @@ export type DefaultEntryType = {
} & {
[k: string]: string | number | null;
};
export declare const IndexTypes: readonly ["regular", "full_text"];
export declare const IndexTypes: readonly ["regular", "full_text", "vector"];
export type LoginUserParam = {
apiKey?: string;
database: string;
@ -1812,6 +1813,10 @@ export type AddMediaAPIBody = {
useDefault?: boolean;
update?: boolean;
};
export type ReplaceMediaAPIBody = {
mediaId: number;
media: MediaUploadDataType;
};
export declare const TargetMediaParadigms: readonly ["info", "preview"];
export type TargetMediaDataType = {
media: DSQL_DATASQUIREL_USER_MEDIA;

View File

@ -186,7 +186,7 @@ exports.DefaultSQLValuesLiteral = [
dataType: "UUID",
},
];
exports.IndexTypes = ["regular", "full_text"];
exports.IndexTypes = ["regular", "full_text", "vector"];
exports.UserSelectFields = [
{
field: "first_name",

View File

@ -4,6 +4,7 @@ export declare const DataTypesWithTwoNumbers: (typeof DataTypes)[number]["name"]
type Return = {
type: (typeof DataTypes)[number]["name"];
limit?: number;
defaultNumber?: number;
decimal?: number;
};
export default function dataTypeParser(dataType?: string): Return;

View File

@ -11,6 +11,7 @@ exports.DataTypesWithNumbers = [
"DOUBLE",
"FLOAT",
"VARCHAR",
"VECTOR",
];
exports.DataTypesWithTwoNumbers = [
"DECIMAL",
@ -21,7 +22,7 @@ function dataTypeParser(dataType) {
if (!dataType) {
return {
type: "VARCHAR",
limit: 250,
defaultNumber: 250,
};
}
const dataTypeArray = dataType.split("(");
@ -43,5 +44,6 @@ function dataTypeParser(dataType) {
return {
type,
limit: number ? (0, numberfy_1.default)(number) : undefined,
defaultNumber: type == "VECTOR" ? 120 : 10,
};
}

View File

@ -1,4 +1,4 @@
declare const APIParadigms: readonly ["crud", "media", "schema"];
import { APIParadigms } from "../types";
type Params = {
version?: string;
paradigm?: (typeof APIParadigms)[number];

View File

@ -1,7 +1,6 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.default = grabAPIBasePath;
const APIParadigms = ["crud", "media", "schema"];
function grabAPIBasePath({ version, paradigm }) {
let basePath = `/api/v${version || "1"}`;
if (paradigm) {

View File

@ -6,7 +6,7 @@ const DataTypes = [
argument: true,
description:
"Varchar is simply letters and numbers within the range 0 - 255",
maxValue: 255,
maxValue: 2000,
},
{
title: "TINYINT",
@ -110,6 +110,12 @@ const DataTypes = [
name: "TIMESTAMP",
description: "Time Stamp",
},
{
title: "VECTOR",
name: "VECTOR",
description: "Vector Field for vector-based applications",
maxValue: 2147483647,
},
] as const;
export default DataTypes;

View File

@ -118,13 +118,19 @@ export default async function addDbEntry<
const dataKeys = Object.keys(data);
let insertKeysArray = [];
let insertValuesArray = [];
let insertValuesArray: (string | number)[] = [];
for (let i = 0; i < dataKeys.length; i++) {
try {
const dataKey = dataKeys[i];
let value = data[dataKey];
const targetFieldSchema = tableSchema
? tableSchema?.fields?.find(
(field) => field.fieldName === dataKey
)
: null;
const parsedValue = grabParsedValue({
dataKey,
encryptionKey,
@ -137,7 +143,9 @@ export default async function addDbEntry<
insertKeysArray.push("`" + dataKey + "`");
if (typeof parsedValue == "number") {
if (targetFieldSchema?.dataType?.match(/vector/i)) {
insertValuesArray.push(`VEC_FromText('${parsedValue}')`);
} else if (typeof parsedValue == "number") {
insertValuesArray.push(String(parsedValue));
} else {
insertValuesArray.push(parsedValue);
@ -163,11 +171,28 @@ export default async function addDbEntry<
insertKeysArray.push("`date_updated_code`");
insertValuesArray.push(Date.now());
const queryValuesArray = insertValuesArray;
const queryValuesArray = insertValuesArray as (string | number)[];
return { queryValuesArray, insertValuesArray, insertKeysArray };
}
function grabQueryValuesString(arr: (string | number)[]) {
return arr
.map((v, i) => {
if (v.toString().match(/VEC_FromText/i)) {
return v;
}
return "?";
})
.join(",");
}
function grabFinalQueryValuesArr(arr: (string | number)[]) {
return arr
.filter((v) => !v.toString().match(/VEC_FromText/i))
.map((v) => String(v));
}
if (newData) {
const { insertKeysArray, insertValuesArray, queryValuesArray } =
generateQuery(newData);
@ -176,12 +201,14 @@ export default async function addDbEntry<
isMaster && !dbFullName ? "" : `\`${dbFullName}\`.`
}\`${tableName}\` (${insertKeysArray.join(
","
)}) VALUES (${insertValuesArray.map(() => "?").join(",")})`;
)}) VALUES (${grabQueryValuesString(insertValuesArray)})`;
const finalQueryValues = grabFinalQueryValuesArr(queryValuesArray);
const newInsert = await connDbHandler(
null,
query,
queryValuesArray,
finalQueryValues,
debug
);
@ -190,7 +217,7 @@ export default async function addDbEntry<
payload: newInsert,
queryObject: {
sql: query,
params: queryValuesArray,
params: finalQueryValues,
},
};
} else if (newBatchData) {
@ -216,16 +243,17 @@ export default async function addDbEntry<
}\`${tableName}\` (${batchInsertKeysArray?.join(
","
)}) VALUES ${batchInsertValuesArray
.map((vl) => `(${vl.map(() => "?").join(",")})`)
.map((vl) => `(${grabQueryValuesString(vl)})`)
.join(",")}`;
console.log("query", query);
console.log("batchQueryValuesArray", batchQueryValuesArray);
const finalQueryValues = grabFinalQueryValuesArr(
batchQueryValuesArray.flat()
);
const newInsert = await connDbHandler(
null,
query,
batchQueryValuesArray.flat(),
finalQueryValues,
debug
);
@ -242,7 +270,7 @@ export default async function addDbEntry<
payload: newInsert,
queryObject: {
sql: query,
params: batchQueryValuesArray.flat(),
params: finalQueryValues,
},
};
} else {

View File

@ -25,12 +25,8 @@ export default function grabParsedValue({
}: Param): any {
let newValue = value;
const targetFieldSchemaArray = tableSchema
? tableSchema?.fields?.filter((field) => field.fieldName === dataKey)
: null;
const targetFieldSchema =
targetFieldSchemaArray && targetFieldSchemaArray[0]
? targetFieldSchemaArray[0]
const targetFieldSchema = tableSchema
? tableSchema?.fields?.find((field) => field.fieldName === dataKey)
: null;
if (typeof newValue == "undefined") return;

View File

@ -80,6 +80,12 @@ export default async function updateDbEntry<
const dataKey = dataKeys[i];
let value = newData[dataKey];
const targetFieldSchema = tableSchema
? tableSchema?.fields?.find(
(field) => field.fieldName === dataKey
)
: null;
const parsedValue = grabParsedValue({
dataKey,
encryptionKey,
@ -90,7 +96,11 @@ export default async function updateDbEntry<
if (typeof parsedValue == "undefined") continue;
if (targetFieldSchema?.dataType?.match(/vector/i)) {
updateKeyValueArray.push(`\`${dataKey}\`=VEC_FromText(?)`);
} else {
updateKeyValueArray.push(`\`${dataKey}\`=?`);
}
if (typeof parsedValue == "number") {
updateValues.push(String(parsedValue));

View File

@ -49,6 +49,10 @@ export default function parseDbResults({
});
}
}
if (value && typeof value == "object") {
result[resultFieldName] = "";
}
}
parsedResults.push(result);

View File

@ -47,7 +47,11 @@ export default async function handleIndexescreateDbFromSchema({
* doesn't exist in MYSQL db
*/
const queryString = `CREATE${
indexType == "full_text" ? " FULLTEXT" : ""
indexType == "full_text"
? " FULLTEXT"
: indexType == "vector"
? " VECTOR"
: ""
} INDEX \`${alias}\` ON \`${dbFullName}\`.\`${tableName}\`(${indexTableFields
?.map((nm) => nm.value)
.map((nm) => `\`${nm}\``)

View File

@ -1813,6 +1813,7 @@ export type MediaUploadDataType = ImageObjectType &
privateFolder?: boolean;
overwrite?: boolean;
updatedMediaRecord?: DSQL_DATASQUIREL_USER_MEDIA;
existingMediaRecord?: DSQL_DATASQUIREL_USER_MEDIA;
existingMediaRecordId?: number;
};
@ -1921,7 +1922,7 @@ export type DefaultEntryType = {
[k: string]: string | number | null;
};
export const IndexTypes = ["regular", "full_text"] as const;
export const IndexTypes = ["regular", "full_text", "vector"] as const;
export type LoginUserParam = {
apiKey?: string;
@ -2344,6 +2345,11 @@ export type AddMediaAPIBody = {
update?: boolean;
};
export type ReplaceMediaAPIBody = {
mediaId: number;
media: MediaUploadDataType;
};
export const TargetMediaParadigms = ["info", "preview"] as const;
export type TargetMediaDataType = {

View File

@ -6,6 +6,7 @@ export const DataTypesWithNumbers: (typeof DataTypes)[number]["name"][] = [
"DOUBLE",
"FLOAT",
"VARCHAR",
"VECTOR",
];
export const DataTypesWithTwoNumbers: (typeof DataTypes)[number]["name"][] = [
@ -17,6 +18,7 @@ export const DataTypesWithTwoNumbers: (typeof DataTypes)[number]["name"][] = [
type Return = {
type: (typeof DataTypes)[number]["name"];
limit?: number;
defaultNumber?: number;
decimal?: number;
};
@ -24,7 +26,7 @@ export default function dataTypeParser(dataType?: string): Return {
if (!dataType) {
return {
type: "VARCHAR",
limit: 250,
defaultNumber: 250,
};
}
@ -50,5 +52,6 @@ export default function dataTypeParser(dataType?: string): Return {
return {
type,
limit: number ? numberfy(number) : undefined,
defaultNumber: type == "VECTOR" ? 120 : 10,
};
}

View File

@ -1,4 +1,4 @@
const APIParadigms = ["crud", "media", "schema"] as const;
import { APIParadigms } from "../types";
type Params = {
version?: string;

View File

@ -1,6 +1,6 @@
{
"name": "@moduletrace/datasquirel",
"version": "5.1.3",
"version": "5.1.4",
"description": "Cloud-based SQL data management tool",
"main": "dist/index.js",
"bin": {