This commit is contained in:
Benjamin Toby 2025-06-04 08:43:16 +01:00
parent 1364074c2c
commit 761348de08
37 changed files with 577 additions and 140 deletions

View File

@ -22,7 +22,7 @@ const grab_query_and_values_1 = __importDefault(require("../../../utils/grab-que
*/
function apiGet(_a) {
return __awaiter(this, arguments, void 0, function* ({ query, dbFullName, queryValues, tableName, dbSchema, debug, dbContext, forceLocal, }) {
var _b;
var _b, _c;
const queryAndValues = (0, grab_query_and_values_1.default)({
query,
values: queryValues,
@ -49,7 +49,7 @@ function apiGet(_a) {
}
let tableSchema;
if (dbSchema) {
const targetTable = dbSchema.tables.find((table) => table.tableName === tableName);
const targetTable = (_b = dbSchema.tables) === null || _b === void 0 ? void 0 : _b.find((table) => table.tableName === tableName);
if (targetTable) {
const clonedTargetTable = lodash_1.default.cloneDeep(targetTable);
delete clonedTargetTable.childTable;
@ -79,7 +79,7 @@ function apiGet(_a) {
component: "/api/query/get/lines-85-94",
message: error.message,
});
(_b = global.ERROR_CALLBACK) === null || _b === void 0 ? void 0 : _b.call(global, `API Get Error`, error);
(_c = global.ERROR_CALLBACK) === null || _c === void 0 ? void 0 : _c.call(global, `API Get Error`, error);
if (debug && global.DSQL_USE_LOCAL) {
console.log("apiGet:error", error.message);
console.log("queryAndValues", queryAndValues);

View File

@ -20,6 +20,6 @@ export default function apiCreateUser({ encryptionKey, payload, database, userId
} | {
success: boolean;
msg: string;
sqlResult: any;
sqlResult: import("../../../types").PostInsertReturn | null;
payload: null;
}>;

View File

@ -0,0 +1,7 @@
import { DSQL_DATASQUIREL_USER_DATABASES } from "@/package-shared/types/dsql";
type Params = {
userId: number | string;
database: DSQL_DATASQUIREL_USER_DATABASES;
};
export default function createDbSchemaFromDb({ userId, database, }: Params): Promise<boolean | undefined>;
export {};

View File

@ -0,0 +1,132 @@
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.default = createDbSchemaFromDb;
const varDatabaseDbHandler_1 = __importDefault(require("../../functions/backend/varDatabaseDbHandler"));
const grabUserSchemaData_1 = __importDefault(require("../../functions/backend/grabUserSchemaData"));
const setUserSchemaData_1 = __importDefault(require("../../functions/backend/setUserSchemaData"));
const addDbEntry_1 = __importDefault(require("../../functions/backend/db/addDbEntry"));
const slugToCamelTitle_1 = __importDefault(require("../../shell/utils/slugToCamelTitle"));
function createDbSchemaFromDb(_a) {
return __awaiter(this, arguments, void 0, function* ({ userId, database, }) {
var _b, _c, _d, _e, _f, _g;
try {
if (!userId) {
console.log("No user Id provided");
return;
}
const userSchemaData = (0, grabUserSchemaData_1.default)({ userId });
if (!userSchemaData)
throw new Error("User schema data not found!");
const targetDb = userSchemaData.filter((dbObject) => dbObject.dbFullName === database.db_full_name)[0];
const existingTables = yield (0, varDatabaseDbHandler_1.default)({
database: database.db_full_name,
queryString: `SHOW TABLES FROM ${database.db_full_name}`,
});
if (!existingTables)
throw new Error("No Existing Tables");
for (let i = 0; i < existingTables.length; i++) {
const table = existingTables[i];
const tableName = Object.values(table)[0];
const tableInsert = yield (0, addDbEntry_1.default)({
dbFullName: "datasquirel",
tableName: "user_database_tables",
data: {
user_id: userId,
db_id: database.id,
db_slug: database.db_slug,
table_name: (0, slugToCamelTitle_1.default)(tableName),
table_slug: tableName,
},
});
const tableObject = {
tableName: tableName,
tableFullName: (0, slugToCamelTitle_1.default)(tableName) || "",
fields: [],
indexes: [],
};
const tableColumns = yield (0, varDatabaseDbHandler_1.default)({
database: database.db_full_name,
queryString: `SHOW COLUMNS FROM ${database.db_full_name}.${tableName}`,
});
if (tableColumns) {
for (let k = 0; k < tableColumns.length; k++) {
const tableColumn = tableColumns[k];
const { Field, Type, Null, Key, Default, Extra } = tableColumn;
const fieldObject = {
fieldName: Field,
dataType: Type.toUpperCase(),
};
if (Null === null || Null === void 0 ? void 0 : Null.match(/^no$/i))
fieldObject.notNullValue = true;
if (Key === null || Key === void 0 ? void 0 : Key.match(/^pri$/i))
fieldObject.primaryKey = true;
if ((_b = Default === null || Default === void 0 ? void 0 : Default.toString()) === null || _b === void 0 ? void 0 : _b.match(/./))
fieldObject.defaultValue = Default;
if ((_c = Default === null || Default === void 0 ? void 0 : Default.toString()) === null || _c === void 0 ? void 0 : _c.match(/timestamp/i)) {
delete fieldObject.defaultValue;
fieldObject.defaultValueLiteral = Default;
}
if ((_d = Extra === null || Extra === void 0 ? void 0 : Extra.toString()) === null || _d === void 0 ? void 0 : _d.match(/auto_increment/i))
fieldObject.autoIncrement = true;
tableObject.fields.push(fieldObject);
}
}
const tableIndexes = yield (0, varDatabaseDbHandler_1.default)({
database: database.db_full_name,
queryString: `SHOW INDEXES FROM ${database.db_full_name}.${tableName}`,
});
if (tableIndexes) {
for (let m = 0; m < tableIndexes.length; m++) {
const indexObject = tableIndexes[m];
const { Table, Key_name, Column_name, Null, Index_type, Index_comment, } = indexObject;
if (!(Index_comment === null || Index_comment === void 0 ? void 0 : Index_comment.match(/^schema_index$/)))
continue;
const indexNewObject = {
indexType: (Index_type === null || Index_type === void 0 ? void 0 : Index_type.match(/fulltext/i))
? "fullText"
: "regular",
indexName: Key_name,
indexTableFields: [],
};
const targetTableFieldObject = tableColumns === null || tableColumns === void 0 ? void 0 : tableColumns.filter((col) => col.Field === Column_name)[0];
const existingIndexField = (_e = tableObject.indexes) === null || _e === void 0 ? void 0 : _e.filter((indx) => indx.indexName == Key_name);
if (existingIndexField && existingIndexField[0]) {
(_f = existingIndexField[0].indexTableFields) === null || _f === void 0 ? void 0 : _f.push({
value: Column_name,
dataType: targetTableFieldObject.Type.toUpperCase(),
});
}
else {
indexNewObject.indexTableFields = [
{
value: Column_name,
dataType: targetTableFieldObject.Type.toUpperCase(),
},
];
(_g = tableObject.indexes) === null || _g === void 0 ? void 0 : _g.push(indexNewObject);
}
}
}
targetDb.tables.push(tableObject);
}
(0, setUserSchemaData_1.default)({ schemaData: userSchemaData, userId });
return true;
}
catch (error) {
console.log(error);
return false;
}
});
}

View File

@ -1,10 +1,13 @@
import { DbContextsArray } from "./runQuery";
type Param = {
import { PostInsertReturn } from "../../../types";
type Param<T extends {
[k: string]: any;
} = any> = {
dbContext?: (typeof DbContextsArray)[number];
paradigm?: "Read Only" | "Full Access";
dbFullName?: string;
tableName: string;
data: any;
data: T;
tableSchema?: import("../../../types").DSQL_TableSchemaType;
duplicateColumnName?: string;
duplicateColumnValue?: string;
@ -17,5 +20,7 @@ type Param = {
/**
* Add a db Entry Function
*/
export default function addDbEntry({ dbContext, paradigm, dbFullName, tableName, data, tableSchema, duplicateColumnName, duplicateColumnValue, update, encryptionKey, encryptionSalt, forceLocal, debug, }: Param): Promise<any>;
export default function addDbEntry<T extends {
[k: string]: any;
} = any>({ dbContext, paradigm, dbFullName, tableName, data, tableSchema, duplicateColumnName, duplicateColumnValue, update, encryptionKey, encryptionSalt, forceLocal, debug, }: Param<T>): Promise<PostInsertReturn | null>;
export {};

View File

@ -26,9 +26,6 @@ const debug_log_1 = __importDefault(require("../../../utils/logging/debug-log"))
function addDbEntry(_a) {
return __awaiter(this, arguments, void 0, function* ({ dbContext, paradigm, dbFullName, tableName, data, tableSchema, duplicateColumnName, duplicateColumnValue, update, encryptionKey, encryptionSalt, forceLocal, debug, }) {
var _b, _c, _d;
/**
* Initialize variables
*/
const isMaster = forceLocal
? true
: (0, check_if_is_master_1.default)({ dbContext, dbFullName });
@ -45,9 +42,6 @@ function addDbEntry(_a) {
const DB_RO_CONN = isMaster
? global.DSQL_DB_CONN
: global.DSQL_READ_ONLY_DB_CONN || global.DSQL_DB_CONN;
////////////////////////////////////////
////////////////////////////////////////
////////////////////////////////////////
if (data === null || data === void 0 ? void 0 : data["date_created_timestamp"])
delete data["date_created_timestamp"];
if (data === null || data === void 0 ? void 0 : data["date_updated_timestamp"])
@ -60,9 +54,6 @@ function addDbEntry(_a) {
delete data["date_created"];
if (data === null || data === void 0 ? void 0 : data["date_created_code"])
delete data["date_created_code"];
////////////////////////////////////////
////////////////////////////////////////
////////////////////////////////////////
if (duplicateColumnName && typeof duplicateColumnName === "string") {
const checkDuplicateQuery = `SELECT * FROM ${isMaster ? "" : `\`${dbFullName}\`.`}\`${tableName}\` WHERE \`${duplicateColumnName}\`=?`;
const duplicateValue = yield (0, conn_db_handler_1.default)(DB_RO_CONN, checkDuplicateQuery, [duplicateColumnValue]);
@ -83,18 +74,12 @@ function addDbEntry(_a) {
});
}
}
/**
* Declare variables
*
* @description Declare "results" variable
*/
const dataKeys = Object.keys(data);
let insertKeysArray = [];
let insertValuesArray = [];
for (let i = 0; i < dataKeys.length; i++) {
try {
const dataKey = dataKeys[i];
// @ts-ignore
let value = data === null || data === void 0 ? void 0 : data[dataKey];
const targetFieldSchemaArray = tableSchema
? (_b = tableSchema === null || tableSchema === void 0 ? void 0 : tableSchema.fields) === null || _b === void 0 ? void 0 : _b.filter((field) => field.fieldName == dataKey)
@ -144,7 +129,6 @@ function addDbEntry(_a) {
continue;
}
}
////////////////////////////////////////
if (!(data === null || data === void 0 ? void 0 : data["date_created"])) {
insertKeysArray.push("`date_created`");
insertValuesArray.push(Date());
@ -153,7 +137,6 @@ function addDbEntry(_a) {
insertKeysArray.push("`date_created_code`");
insertValuesArray.push(Date.now());
}
////////////////////////////////////////
if (!(data === null || data === void 0 ? void 0 : data["date_updated"])) {
insertKeysArray.push("`date_updated`");
insertValuesArray.push(Date());
@ -162,7 +145,6 @@ function addDbEntry(_a) {
insertKeysArray.push("`date_updated_code`");
insertValuesArray.push(Date.now());
}
////////////////////////////////////////
const query = `INSERT INTO ${isMaster ? "" : `\`${dbFullName}\`.`}\`${tableName}\` (${insertKeysArray.join(",")}) VALUES (${insertValuesArray
.map(() => "?")
.join(",")})`;
@ -192,9 +174,6 @@ function addDbEntry(_a) {
label: "newInsert",
});
}
/**
* Return statement
*/
return newInsert;
});
}

View File

@ -1,5 +1,8 @@
import { DbContextsArray } from "./runQuery";
type Param = {
import { PostInsertReturn } from "../../../types";
type Param<T extends {
[k: string]: any;
} = any> = {
dbContext?: (typeof DbContextsArray)[number];
dbFullName?: string;
tableName: string;
@ -7,7 +10,7 @@ type Param = {
encryptionSalt?: string;
data: any;
tableSchema?: import("../../../types").DSQL_TableSchemaType;
identifierColumnName: string;
identifierColumnName: keyof T;
identifierValue: string | number;
forceLocal?: boolean;
};
@ -15,5 +18,7 @@ type Param = {
* # Update DB Function
* @description
*/
export default function updateDbEntry({ dbContext, dbFullName, tableName, data, tableSchema, identifierColumnName, identifierValue, encryptionKey, encryptionSalt, forceLocal, }: Param): Promise<object | null>;
export default function updateDbEntry<T extends {
[k: string]: any;
} = any>({ dbContext, dbFullName, tableName, data, tableSchema, identifierColumnName, identifierValue, encryptionKey, encryptionSalt, forceLocal, }: Param<T>): Promise<PostInsertReturn | null>;
export {};

View File

@ -1,6 +1,9 @@
import { DSQL_DatabaseSchemaType } from "@/package-shared/types";
type Params = {
userId?: string | number;
};
/**
* # Grab User Schema Data
*/
export default function grabUserSchemaData({ userId, }: {
userId: string | number;
}): import("../../types").DSQL_DatabaseSchemaType[] | null;
export default function grabUserSchemaData({ userId, }: Params): DSQL_DatabaseSchemaType[] | null;
export {};

View File

@ -6,16 +6,18 @@ Object.defineProperty(exports, "__esModule", { value: true });
exports.default = grabUserSchemaData;
const serverError_1 = __importDefault(require("./serverError"));
const fs_1 = __importDefault(require("fs"));
const path_1 = __importDefault(require("path"));
const grab_dir_names_1 = __importDefault(require("@/package-shared/utils/backend/names/grab-dir-names"));
const client_exports_1 = require("@/client-exports");
/**
* # Grab User Schema Data
*/
function grabUserSchemaData({ userId, }) {
var _a;
try {
const userSchemaFilePath = path_1.default.resolve(process.cwd(), `${process.env.DSQL_USER_DB_SCHEMA_PATH}/user-${userId}/main.json`);
const userSchemaData = JSON.parse(fs_1.default.readFileSync(userSchemaFilePath, "utf-8"));
return userSchemaData;
const { userSchemaMainJSONFilePath } = (0, grab_dir_names_1.default)({ userId });
const schemaJSON = fs_1.default.readFileSync(userSchemaMainJSONFilePath || "", "utf-8");
const schemaObj = client_exports_1.EJSON.parse(schemaJSON);
return schemaObj;
}
catch (error) {
(0, serverError_1.default)({

View File

@ -6,15 +6,18 @@ Object.defineProperty(exports, "__esModule", { value: true });
exports.default = setUserSchemaData;
const serverError_1 = __importDefault(require("./serverError"));
const fs_1 = __importDefault(require("fs"));
const path_1 = __importDefault(require("path"));
const grab_dir_names_1 = __importDefault(require("@/package-shared/utils/backend/names/grab-dir-names"));
/**
* # Set User Schema Data
*/
function setUserSchemaData({ userId, schemaData, }) {
var _a;
try {
const userSchemaFilePath = path_1.default.resolve(process.cwd(), `${process.env.DSQL_USER_DB_SCHEMA_PATH}/user-${userId}/main.json`);
fs_1.default.writeFileSync(userSchemaFilePath, JSON.stringify(schemaData), "utf8");
const { userSchemaMainJSONFilePath } = (0, grab_dir_names_1.default)({ userId });
if (!userSchemaMainJSONFilePath) {
throw new Error(`No User Schema JSON found!`);
}
fs_1.default.writeFileSync(userSchemaMainJSONFilePath, JSON.stringify(schemaData), "utf8");
return true;
}
catch (error) {

View File

@ -11,7 +11,7 @@ const ejson_1 = __importDefault(require("../../utils/ejson"));
const generate_type_definitions_1 = __importDefault(require("./generate-type-definitions"));
const path_1 = __importDefault(require("path"));
function dbSchemaToType(params) {
var _a;
var _a, _b;
let datasquirelSchema;
const defaultTableFieldsJSONFilePath = path_1.default.resolve(__dirname, "../../data/defaultFields.json");
if (params === null || params === void 0 ? void 0 : params.dbSchema) {
@ -39,10 +39,8 @@ function dbSchemaToType(params) {
...newDefaultFields,
] });
});
const defDbName = (datasquirelSchema.dbName ||
((_a = datasquirelSchema.dbFullName) === null || _a === void 0 ? void 0 : _a.replace(/datasquirel_user_\d+_/, "")))
.toUpperCase()
.replace(/ /g, "_");
const defDbName = (_b = (datasquirelSchema.dbName ||
((_a = datasquirelSchema.dbFullName) === null || _a === void 0 ? void 0 : _a.replace(/datasquirel_user_\d+_/, "")))) === null || _b === void 0 ? void 0 : _b.toUpperCase().replace(/ /g, "_");
const schemas = dbTablesSchemas
.map((table) => (0, generate_type_definitions_1.default)({
paradigm: "TypeScript",

View File

@ -24,7 +24,10 @@ function sqlGenerator({ tableName, genObject, dbFullName, count }) {
return field;
})();
let str = `${finalFieldName}=?`;
if (typeof queryObj.value == "string" ||
if (queryObj.nullValue) {
str = `${finalFieldName} IS NULL`;
}
else if (typeof queryObj.value == "string" ||
typeof queryObj.value == "number") {
const valueParsed = String(queryObj.value);
if (queryObj.equality == "LIKE") {

View File

@ -28,7 +28,7 @@ function checkDbRecordCreateDbSchema(_a) {
let recordedDbEntryArray = userId
? yield (0, varDatabaseDbHandler_1.default)({
queryString: `SELECT * FROM datasquirel.user_databases WHERE db_full_name = ?`,
queryValuesArray: [dbFullName],
queryValuesArray: [dbFullName || "NULL"],
})
: undefined;
let recordedDbEntry = recordedDbEntryArray === null || recordedDbEntryArray === void 0 ? void 0 : recordedDbEntryArray[0];
@ -51,7 +51,7 @@ function checkDbRecordCreateDbSchema(_a) {
if (newDbEntry.insertId) {
recordedDbEntryArray = yield (0, varDatabaseDbHandler_1.default)({
queryString: `SELECT * FROM datasquirel.user_databases WHERE db_full_name = ?`,
queryValuesArray: [dbFullName],
queryValuesArray: [dbFullName || "NULL"],
});
recordedDbEntry = recordedDbEntryArray === null || recordedDbEntryArray === void 0 ? void 0 : recordedDbEntryArray[0];
}

View File

@ -44,6 +44,8 @@ function createDbFromSchema(_a) {
for (let i = 0; i < dbSchema.length; i++) {
const database = dbSchema[i];
const { dbFullName, tables, dbSlug, childrenDatabases } = database;
if (!dbFullName)
continue;
if (targetDatabase && dbFullName != targetDatabase) {
continue;
}
@ -188,10 +190,11 @@ function createDbFromSchema(_a) {
if (childrenDatabases === null || childrenDatabases === void 0 ? void 0 : childrenDatabases[0]) {
for (let ch = 0; ch < childrenDatabases.length; ch++) {
const childDb = childrenDatabases[ch];
const { dbFullName } = childDb;
const { dbId } = childDb;
const targetDatabase = dbSchema.find((dbSch) => dbSch.id == dbId);
yield createDbFromSchema({
userId,
targetDatabase: dbFullName,
targetDatabase: targetDatabase === null || targetDatabase === void 0 ? void 0 : targetDatabase.dbFullName,
});
}
}

View File

@ -1,11 +1,13 @@
import type { RequestOptions } from "https";
import { DSQL_DATASQUIREL_PROCESS_QUEUE, DSQL_DATASQUIREL_USER_DATABASES } from "./dsql";
import { DSQL_DATASQUIREL_PROCESS_QUEUE, DSQL_DATASQUIREL_USER_DATABASE_TABLES, DSQL_DATASQUIREL_USER_DATABASES, DSQL_DATASQUIREL_USER_MEDIA } from "./dsql";
import { Editor } from "tinymce";
import sharp from "sharp";
export type DSQL_DatabaseFullName = string;
export interface DSQL_DatabaseSchemaType {
dbName: string;
dbSlug: string;
dbFullName: string;
id?: number | string;
dbName?: string;
dbSlug?: string;
dbFullName?: string;
dbDescription?: string;
dbImage?: string;
tables: DSQL_TableSchemaType[];
@ -15,9 +17,11 @@ export interface DSQL_DatabaseSchemaType {
updateData?: boolean;
}
export interface DSQL_ChildrenDatabaseObject {
dbFullName: string;
dbId?: string | number;
dbFullName?: string;
}
export interface DSQL_TableSchemaType {
id?: number | string;
tableName: string;
tableFullName: string;
tableDescription?: string;
@ -482,10 +486,10 @@ export interface LoginFormContextType {
}
export interface CreateAccountContextType {
user?: UserType | null;
query: CreateAccountQueryType;
query: InviteObjectType;
invitingUser: any;
}
export interface CreateAccountQueryType {
export interface InviteObjectType {
invite?: number;
database_access?: string;
priviledge?: string;
@ -707,8 +711,9 @@ export interface DbConnectContextType {
}
export interface ImageObjectType {
imageName?: string;
mimeType?: string;
mimeType?: keyof sharp.FormatEnum | sharp.AvailableFormatInfo;
imageSize?: number;
thumbnailSize?: number;
private?: boolean;
imageBase64?: string;
imageBase64Full?: string;
@ -971,6 +976,7 @@ export type ServerQueryObject<T extends object = {
[key: string]: any;
}> = {
value?: string | string[];
nullValue?: boolean;
operator?: (typeof ServerQueryOperators)[number];
equality?: (typeof ServerQueryEqualities)[number];
tableName?: string;
@ -1422,10 +1428,20 @@ export type PagePropsType = {
pageUrl?: string | null;
query?: any;
databases?: DSQL_DATASQUIREL_USER_DATABASES[] | null;
database?: DSQL_DATASQUIREL_USER_DATABASES | null;
databaseTables?: DSQL_DATASQUIREL_USER_DATABASE_TABLES[] | null;
databaseTable?: DSQL_DATASQUIREL_USER_DATABASE_TABLES | null;
dbCount?: number | null;
tableCount?: number | null;
mediaCount?: number | null;
apiKeysCount?: number | null;
databaseSchema?: DSQL_DatabaseSchemaType | null;
tableSchema?: DSQL_TableSchemaType | null;
userMedia?: DSQL_DATASQUIREL_USER_MEDIA[] | null;
mediaCurrentFolder?: string | null;
appData?: DsqlAppData | null;
staticHost?: string | null;
folders?: string[] | null;
};
export type APIResponseObject<T extends any = any> = {
success: boolean;
@ -1535,4 +1551,9 @@ export type DsqlAppData = {
DSQL_FACEBOOK_APP_ID?: string;
DSQL_GITHUB_ID?: string;
};
export declare const MediaTypes: readonly ["image", "file"];
export type MediaUploadDataType = ImageObjectType & FileObjectType & {
private?: boolean;
};
export declare const ImageMimeTypes: (keyof sharp.FormatEnum)[];
export {};

View File

@ -1,6 +1,6 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.DockerComposeServices = exports.DatasquirelWindowEvents = exports.WebSocketEvents = exports.QueueJobTypes = exports.SignUpParadigms = exports.UserTypes = exports.DsqlCrudActions = exports.DataCrudRequestMethods = exports.ServerQueryEqualities = exports.ServerQueryOperators = exports.TextFieldTypesArray = void 0;
exports.ImageMimeTypes = exports.MediaTypes = exports.DockerComposeServices = exports.DatasquirelWindowEvents = exports.WebSocketEvents = exports.QueueJobTypes = exports.SignUpParadigms = exports.UserTypes = exports.DsqlCrudActions = exports.DataCrudRequestMethods = exports.ServerQueryEqualities = exports.ServerQueryOperators = exports.TextFieldTypesArray = void 0;
exports.TextFieldTypesArray = [
{ title: "Plain Text", value: "plain" },
{ title: "Rich Text", value: "richText" },
@ -70,3 +70,12 @@ exports.DockerComposeServices = [
"db-load-balancer",
"post-db-setup",
];
exports.MediaTypes = ["image", "file"];
exports.ImageMimeTypes = [
"webp",
"gif",
"svg",
"png",
"jpeg",
"jpg",
];

View File

@ -0,0 +1,8 @@
import { DATASQUIREL_LoggedInUser, UserType } from "../../../types";
type Param = {
user?: DATASQUIREL_LoggedInUser | UserType;
userId?: string | number | null;
dbSlug?: string;
};
export default function grabUserDbFullName({ dbSlug, user, userId }: Param): string;
export {};

View File

@ -0,0 +1,12 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.default = grabUserDbFullName;
function grabUserDbFullName({ dbSlug, user, userId }) {
const finalUserId = (user === null || user === void 0 ? void 0 : user.id) || userId;
if (!finalUserId || !dbSlug)
throw new Error(`Couldn't grab full DB name. Missing parameters finalUserId || dbSlug`);
if (dbSlug.match(/[^a-zA-Z0-9-_]/)) {
throw new Error(`Invalid Database slug`);
}
return `datasquirel_user_${finalUserId}_${dbSlug}`;
}

View File

@ -13,6 +13,7 @@ export default function grabDirNames(param?: Param): {
tempDirName: string;
defaultTableFieldsJSONFilePath: string;
usersSchemaDir: string;
targetUserSchemaDir: string | undefined;
userSchemaMainJSONFilePath: string | undefined;
userPrivateMediaDir: string | undefined;
userPrivateExportsDir: string | undefined;
@ -33,5 +34,7 @@ export default function grabDirNames(param?: Param): {
siteSetupFile: string;
envFile: string;
testEnvFile: string;
userPublicMediaDir: string | undefined;
userTempSQLFilePath: string | undefined;
};
export {};

View File

@ -8,6 +8,9 @@ const path_1 = __importDefault(require("path"));
function grabDirNames(param) {
var _a;
const appDir = (param === null || param === void 0 ? void 0 : param.appDir) || process.env.DSQL_APP_DIR;
const STATIC_ROOT = process.env.DSQL_STATIC_SERVER_DIR || "/static";
const finalUserId = ((_a = param === null || param === void 0 ? void 0 : param.user) === null || _a === void 0 ? void 0 : _a.id) || (param === null || param === void 0 ? void 0 : param.userId);
const publicImagesDir = path_1.default.join(STATIC_ROOT, `images`);
if (!appDir)
throw new Error("Please provide the `DSQL_APP_DIR` env variable.");
const schemasDir = process.env.DSQL_DB_SCHEMA_DIR ||
@ -20,10 +23,14 @@ function grabDirNames(param) {
const mainShemaJSONFilePath = path_1.default.join(schemasDir, `main.json`);
const defaultTableFieldsJSONFilePath = path_1.default.join(pakageSharedDir, `data/defaultFields.json`);
const usersSchemaDir = path_1.default.join(schemasDir, `users`);
const userDirPath = ((_a = param === null || param === void 0 ? void 0 : param.user) === null || _a === void 0 ? void 0 : _a.id)
? path_1.default.join(usersSchemaDir, `user-${param.user.id}`)
: (param === null || param === void 0 ? void 0 : param.userId)
? path_1.default.join(usersSchemaDir, `user-${param.userId}`)
const targetUserSchemaDir = finalUserId
? path_1.default.join(usersSchemaDir, `user-${finalUserId}`)
: undefined;
const userTempSQLFilePath = targetUserSchemaDir
? path_1.default.join(targetUserSchemaDir, `tmp.sql`)
: undefined;
const userDirPath = finalUserId
? path_1.default.join(usersSchemaDir, `user-${finalUserId}`)
: undefined;
const userSchemaMainJSONFilePath = userDirPath
? path_1.default.join(userDirPath, `main.json`)
@ -47,6 +54,9 @@ function grabDirNames(param) {
const userPrivateDbExportZipFilePath = userPrivateSQLExportsDir
? path_1.default.join(userPrivateSQLExportsDir, userPrivateDbExportZipFileName)
: undefined;
const userPublicMediaDir = finalUserId
? path_1.default.join(publicImagesDir, `user-images/user-${finalUserId}`)
: undefined;
const userPrivateDbImportZipFileName = `db-export.zip`;
const userPrivateDbImportZipFilePath = userPrivateSQLExportsDir
? path_1.default.join(userPrivateSQLExportsDir, userPrivateDbImportZipFileName)
@ -70,6 +80,7 @@ function grabDirNames(param) {
tempDirName,
defaultTableFieldsJSONFilePath,
usersSchemaDir,
targetUserSchemaDir,
userSchemaMainJSONFilePath,
userPrivateMediaDir,
userPrivateExportsDir,
@ -90,5 +101,7 @@ function grabDirNames(param) {
siteSetupFile,
envFile,
testEnvFile,
userPublicMediaDir,
userTempSQLFilePath,
};
}

View File

@ -1,5 +1,5 @@
type Param = {
dbName: string;
dbName?: string;
userId?: string | number;
};
/**

View File

@ -5,6 +5,8 @@ exports.default = grabDbFullName;
* # Grab Database Full Name
*/
function grabDbFullName({ dbName, userId }) {
if (!dbName)
throw new Error(`Database name not provided to db name parser funciton`);
const sanitizedName = dbName.replace(/[^a-z0-9\_]/g, "");
const cleanedDbName = sanitizedName.replace(/datasquirel_user_\d+_/, "");
if (!userId)

View File

@ -66,7 +66,7 @@ export default async function apiGet<
let tableSchema: DSQL_TableSchemaType | undefined;
if (dbSchema) {
const targetTable = dbSchema.tables.find(
const targetTable = dbSchema.tables?.find(
(table) => table.tableName === tableName
);

View File

@ -0,0 +1,163 @@
import varDatabaseDbHandler from "../../functions/backend/varDatabaseDbHandler";
import { default as grabUserSchemaData } from "../../functions/backend/grabUserSchemaData";
import { default as setUserSchemaData } from "../../functions/backend/setUserSchemaData";
import addDbEntry from "../../functions/backend/db/addDbEntry";
import slugToCamelTitle from "../../shell/utils/slugToCamelTitle";
import { DSQL_DATASQUIREL_USER_DATABASES } from "@/package-shared/types/dsql";
import {
DSQL_FieldSchemaType,
DSQL_MYSQL_SHOW_COLUMNS_Type,
DSQL_TableSchemaType,
} from "@/package-shared/types";
type Params = {
userId: number | string;
database: DSQL_DATASQUIREL_USER_DATABASES;
};
export default async function createDbSchemaFromDb({
userId,
database,
}: Params) {
try {
if (!userId) {
console.log("No user Id provided");
return;
}
const userSchemaData = grabUserSchemaData({ userId });
if (!userSchemaData) throw new Error("User schema data not found!");
const targetDb: { tables: object[] } = userSchemaData.filter(
(dbObject) => dbObject.dbFullName === database.db_full_name
)[0];
const existingTables = await varDatabaseDbHandler({
database: database.db_full_name,
queryString: `SHOW TABLES FROM ${database.db_full_name}`,
});
if (!existingTables) throw new Error("No Existing Tables");
for (let i = 0; i < existingTables.length; i++) {
const table = existingTables[i];
const tableName = Object.values(table)[0] as string;
const tableInsert = await addDbEntry({
dbFullName: "datasquirel",
tableName: "user_database_tables",
data: {
user_id: userId,
db_id: database.id,
db_slug: database.db_slug,
table_name: slugToCamelTitle(tableName),
table_slug: tableName,
},
});
const tableObject: DSQL_TableSchemaType = {
tableName: tableName,
tableFullName: slugToCamelTitle(tableName) || "",
fields: [],
indexes: [],
};
const tableColumns: DSQL_MYSQL_SHOW_COLUMNS_Type[] =
await varDatabaseDbHandler({
database: database.db_full_name,
queryString: `SHOW COLUMNS FROM ${database.db_full_name}.${tableName}`,
});
if (tableColumns) {
for (let k = 0; k < tableColumns.length; k++) {
const tableColumn = tableColumns[k];
const { Field, Type, Null, Key, Default, Extra } =
tableColumn;
const fieldObject: DSQL_FieldSchemaType = {
fieldName: Field,
dataType: Type.toUpperCase(),
};
if (Null?.match(/^no$/i)) fieldObject.notNullValue = true;
if (Key?.match(/^pri$/i)) fieldObject.primaryKey = true;
if (Default?.toString()?.match(/./))
fieldObject.defaultValue = Default;
if (Default?.toString()?.match(/timestamp/i)) {
delete fieldObject.defaultValue;
fieldObject.defaultValueLiteral = Default;
}
if (Extra?.toString()?.match(/auto_increment/i))
fieldObject.autoIncrement = true;
tableObject.fields.push(fieldObject);
}
}
const tableIndexes = await varDatabaseDbHandler({
database: database.db_full_name,
queryString: `SHOW INDEXES FROM ${database.db_full_name}.${tableName}`,
});
if (tableIndexes) {
for (let m = 0; m < tableIndexes.length; m++) {
const indexObject = tableIndexes[m];
const {
Table,
Key_name,
Column_name,
Null,
Index_type,
Index_comment,
} = indexObject;
if (!Index_comment?.match(/^schema_index$/)) continue;
const indexNewObject: import("@/package-shared/types").DSQL_IndexSchemaType =
{
indexType: Index_type?.match(/fulltext/i)
? "fullText"
: "regular",
indexName: Key_name,
indexTableFields: [],
};
const targetTableFieldObject = tableColumns?.filter(
(col) => col.Field === Column_name
)[0];
const existingIndexField = tableObject.indexes?.filter(
(indx) => indx.indexName == Key_name
);
if (existingIndexField && existingIndexField[0]) {
existingIndexField[0].indexTableFields?.push({
value: Column_name,
dataType: targetTableFieldObject.Type.toUpperCase(),
});
} else {
indexNewObject.indexTableFields = [
{
value: Column_name,
dataType:
targetTableFieldObject.Type.toUpperCase(),
},
];
tableObject.indexes?.push(indexNewObject);
}
}
}
targetDb.tables.push(tableObject);
}
setUserSchemaData({ schemaData: userSchemaData, userId });
return true;
} catch (error) {
console.log(error);
return false;
}
}

View File

@ -7,13 +7,14 @@ import connDbHandler from "../../../utils/db/conn-db-handler";
import checkIfIsMaster from "../../../utils/check-if-is-master";
import { DbContextsArray } from "./runQuery";
import debugLog from "../../../utils/logging/debug-log";
import { PostInsertReturn } from "../../../types";
type Param = {
type Param<T extends { [k: string]: any } = any> = {
dbContext?: (typeof DbContextsArray)[number];
paradigm?: "Read Only" | "Full Access";
dbFullName?: string;
tableName: string;
data: any;
data: T;
tableSchema?: import("../../../types").DSQL_TableSchemaType;
duplicateColumnName?: string;
duplicateColumnValue?: string;
@ -27,7 +28,7 @@ type Param = {
/**
* Add a db Entry Function
*/
export default async function addDbEntry({
export default async function addDbEntry<T extends { [k: string]: any } = any>({
dbContext,
paradigm,
dbFullName,
@ -41,10 +42,7 @@ export default async function addDbEntry({
encryptionSalt,
forceLocal,
debug,
}: Param): Promise<any> {
/**
* Initialize variables
*/
}: Param<T>): Promise<PostInsertReturn | null> {
const isMaster = forceLocal
? true
: checkIfIsMaster({ dbContext, dbFullName });
@ -64,10 +62,6 @@ export default async function addDbEntry({
? global.DSQL_DB_CONN
: global.DSQL_READ_ONLY_DB_CONN || global.DSQL_DB_CONN;
////////////////////////////////////////
////////////////////////////////////////
////////////////////////////////////////
if (data?.["date_created_timestamp"]) delete data["date_created_timestamp"];
if (data?.["date_updated_timestamp"]) delete data["date_updated_timestamp"];
if (data?.["date_updated"]) delete data["date_updated"];
@ -75,10 +69,6 @@ export default async function addDbEntry({
if (data?.["date_created"]) delete data["date_created"];
if (data?.["date_created_code"]) delete data["date_created_code"];
////////////////////////////////////////
////////////////////////////////////////
////////////////////////////////////////
if (duplicateColumnName && typeof duplicateColumnName === "string") {
const checkDuplicateQuery = `SELECT * FROM ${
isMaster ? "" : `\`${dbFullName}\`.`
@ -107,11 +97,6 @@ export default async function addDbEntry({
}
}
/**
* Declare variables
*
* @description Declare "results" variable
*/
const dataKeys = Object.keys(data);
let insertKeysArray = [];
@ -120,7 +105,6 @@ export default async function addDbEntry({
for (let i = 0; i < dataKeys.length; i++) {
try {
const dataKey = dataKeys[i];
// @ts-ignore
let value = data?.[dataKey];
const targetFieldSchemaArray = tableSchema
@ -186,8 +170,6 @@ export default async function addDbEntry({
}
}
////////////////////////////////////////
if (!data?.["date_created"]) {
insertKeysArray.push("`date_created`");
insertValuesArray.push(Date());
@ -198,8 +180,6 @@ export default async function addDbEntry({
insertValuesArray.push(Date.now());
}
////////////////////////////////////////
if (!data?.["date_updated"]) {
insertKeysArray.push("`date_updated`");
insertValuesArray.push(Date());
@ -210,8 +190,6 @@ export default async function addDbEntry({
insertValuesArray.push(Date.now());
}
////////////////////////////////////////
const query = `INSERT INTO ${
isMaster ? "" : `\`${dbFullName}\`.`
}\`${tableName}\` (${insertKeysArray.join(",")}) VALUES (${insertValuesArray
@ -254,8 +232,5 @@ export default async function addDbEntry({
});
}
/**
* Return statement
*/
return newInsert;
}

View File

@ -4,8 +4,9 @@ import encrypt from "../../dsql/encrypt";
import checkIfIsMaster from "../../../utils/check-if-is-master";
import connDbHandler from "../../../utils/db/conn-db-handler";
import { DbContextsArray } from "./runQuery";
import { PostInsertReturn } from "../../../types";
type Param = {
type Param<T extends { [k: string]: any } = any> = {
dbContext?: (typeof DbContextsArray)[number];
dbFullName?: string;
tableName: string;
@ -13,7 +14,7 @@ type Param = {
encryptionSalt?: string;
data: any;
tableSchema?: import("../../../types").DSQL_TableSchemaType;
identifierColumnName: string;
identifierColumnName: keyof T;
identifierValue: string | number;
forceLocal?: boolean;
};
@ -22,7 +23,9 @@ type Param = {
* # Update DB Function
* @description
*/
export default async function updateDbEntry({
export default async function updateDbEntry<
T extends { [k: string]: any } = any
>({
dbContext,
dbFullName,
tableName,
@ -33,7 +36,7 @@ export default async function updateDbEntry({
encryptionKey,
encryptionSalt,
forceLocal,
}: Param): Promise<object | null> {
}: Param<T>): Promise<PostInsertReturn | null> {
/**
* Check if data is valid
*/
@ -157,9 +160,9 @@ export default async function updateDbEntry({
const query = `UPDATE ${
isMaster ? "" : `\`${dbFullName}\`.`
}\`${tableName}\` SET ${updateKeyValueArray.join(
","
)} WHERE \`${identifierColumnName}\`=?`;
}\`${tableName}\` SET ${updateKeyValueArray.join(",")} WHERE \`${
identifierColumnName as string
}\`=?`;
updateValues.push(identifierValue);

View File

@ -1,25 +1,27 @@
import { DSQL_DatabaseSchemaType, UserType } from "@/package-shared/types";
import serverError from "./serverError";
import fs from "fs";
import path from "path";
import grabDirNames from "@/package-shared/utils/backend/names/grab-dir-names";
import { EJSON } from "@/client-exports";
type Params = {
userId?: string | number;
};
/**
* # Grab User Schema Data
*/
export default function grabUserSchemaData({
userId,
}: {
userId: string | number;
}): import("../../types").DSQL_DatabaseSchemaType[] | null {
}: Params): DSQL_DatabaseSchemaType[] | null {
try {
const userSchemaFilePath = path.resolve(
process.cwd(),
`${process.env.DSQL_USER_DB_SCHEMA_PATH}/user-${userId}/main.json`
const { userSchemaMainJSONFilePath } = grabDirNames({ userId });
const schemaJSON = fs.readFileSync(
userSchemaMainJSONFilePath || "",
"utf-8"
);
const userSchemaData = JSON.parse(
fs.readFileSync(userSchemaFilePath, "utf-8")
);
return userSchemaData;
const schemaObj = EJSON.parse(schemaJSON) as DSQL_DatabaseSchemaType[];
return schemaObj;
} catch (error: any) {
serverError({
component: "grabUserSchemaData",

View File

@ -2,6 +2,7 @@ import serverError from "./serverError";
import fs from "fs";
import path from "path";
import { DSQL_DatabaseSchemaType } from "../../types";
import grabDirNames from "@/package-shared/utils/backend/names/grab-dir-names";
type Param = {
userId: string | number;
@ -16,12 +17,14 @@ export default function setUserSchemaData({
schemaData,
}: Param): boolean {
try {
const userSchemaFilePath = path.resolve(
process.cwd(),
`${process.env.DSQL_USER_DB_SCHEMA_PATH}/user-${userId}/main.json`
);
const { userSchemaMainJSONFilePath } = grabDirNames({ userId });
if (!userSchemaMainJSONFilePath) {
throw new Error(`No User Schema JSON found!`);
}
fs.writeFileSync(
userSchemaFilePath,
userSchemaMainJSONFilePath,
JSON.stringify(schemaData),
"utf8"
);

View File

@ -64,7 +64,7 @@ export default function dbSchemaToType(params?: Params): string[] | undefined {
datasquirelSchema.dbName ||
datasquirelSchema.dbFullName?.replace(/datasquirel_user_\d+_/, "")
)
.toUpperCase()
?.toUpperCase()
.replace(/ /g, "_");
const schemas = dbTablesSchemas

View File

@ -55,7 +55,9 @@ export default function sqlGenerator<
let str = `${finalFieldName}=?`;
if (
if (queryObj.nullValue) {
str = `${finalFieldName} IS NULL`;
} else if (
typeof queryObj.value == "string" ||
typeof queryObj.value == "number"
) {

View File

@ -31,7 +31,7 @@ export default async function checkDbRecordCreateDbSchema({
let recordedDbEntryArray = userId
? await varDatabaseDbHandler({
queryString: `SELECT * FROM datasquirel.user_databases WHERE db_full_name = ?`,
queryValuesArray: [dbFullName],
queryValuesArray: [dbFullName || "NULL"],
})
: undefined;
@ -59,7 +59,7 @@ export default async function checkDbRecordCreateDbSchema({
if (newDbEntry.insertId) {
recordedDbEntryArray = await varDatabaseDbHandler({
queryString: `SELECT * FROM datasquirel.user_databases WHERE db_full_name = ?`,
queryValuesArray: [dbFullName],
queryValuesArray: [dbFullName || "NULL"],
});
recordedDbEntry = recordedDbEntryArray?.[0];
}

View File

@ -49,6 +49,8 @@ export default async function createDbFromSchema({
const { dbFullName, tables, dbSlug, childrenDatabases } = database;
if (!dbFullName) continue;
if (targetDatabase && dbFullName != targetDatabase) {
continue;
}
@ -221,11 +223,15 @@ export default async function createDbFromSchema({
if (childrenDatabases?.[0]) {
for (let ch = 0; ch < childrenDatabases.length; ch++) {
const childDb = childrenDatabases[ch];
const { dbFullName } = childDb;
const { dbId } = childDb;
const targetDatabase = dbSchema.find(
(dbSch) => dbSch.id == dbId
);
await createDbFromSchema({
userId,
targetDatabase: dbFullName,
targetDatabase: targetDatabase?.dbFullName,
});
}
}

View File

@ -1,16 +1,20 @@
import type { RequestOptions } from "https";
import {
DSQL_DATASQUIREL_PROCESS_QUEUE,
DSQL_DATASQUIREL_USER_DATABASE_TABLES,
DSQL_DATASQUIREL_USER_DATABASES,
DSQL_DATASQUIREL_USER_MEDIA,
} from "./dsql";
import { Editor } from "tinymce";
import sharp from "sharp";
export type DSQL_DatabaseFullName = string;
export interface DSQL_DatabaseSchemaType {
dbName: string;
dbSlug: string;
dbFullName: string;
id?: number | string;
dbName?: string;
dbSlug?: string;
dbFullName?: string;
dbDescription?: string;
dbImage?: string;
tables: DSQL_TableSchemaType[];
@ -21,10 +25,12 @@ export interface DSQL_DatabaseSchemaType {
}
export interface DSQL_ChildrenDatabaseObject {
dbFullName: string;
dbId?: string | number;
dbFullName?: string;
}
export interface DSQL_TableSchemaType {
id?: number | string;
tableName: string;
tableFullName: string;
tableDescription?: string;
@ -540,11 +546,11 @@ export interface LoginFormContextType {
export interface CreateAccountContextType {
user?: UserType | null;
query: CreateAccountQueryType;
query: InviteObjectType;
invitingUser: any;
}
export interface CreateAccountQueryType {
export interface InviteObjectType {
invite?: number;
database_access?: string;
priviledge?: string;
@ -824,8 +830,9 @@ export interface DbConnectContextType {
export interface ImageObjectType {
imageName?: string;
mimeType?: string;
mimeType?: keyof sharp.FormatEnum | sharp.AvailableFormatInfo;
imageSize?: number;
thumbnailSize?: number;
private?: boolean;
imageBase64?: string;
imageBase64Full?: string;
@ -1127,6 +1134,7 @@ export type ServerQueryParam<
export type ServerQueryObject<T extends object = { [key: string]: any }> = {
value?: string | string[];
nullValue?: boolean;
operator?: (typeof ServerQueryOperators)[number];
equality?: (typeof ServerQueryEqualities)[number];
tableName?: string;
@ -1606,10 +1614,20 @@ export type PagePropsType = {
pageUrl?: string | null;
query?: any;
databases?: DSQL_DATASQUIREL_USER_DATABASES[] | null;
database?: DSQL_DATASQUIREL_USER_DATABASES | null;
databaseTables?: DSQL_DATASQUIREL_USER_DATABASE_TABLES[] | null;
databaseTable?: DSQL_DATASQUIREL_USER_DATABASE_TABLES | null;
dbCount?: number | null;
tableCount?: number | null;
mediaCount?: number | null;
apiKeysCount?: number | null;
databaseSchema?: DSQL_DatabaseSchemaType | null;
tableSchema?: DSQL_TableSchemaType | null;
userMedia?: DSQL_DATASQUIREL_USER_MEDIA[] | null;
mediaCurrentFolder?: string | null;
appData?: DsqlAppData | null;
staticHost?: string | null;
folders?: string[] | null;
};
export type APIResponseObject<T extends any = any> = {
@ -1782,3 +1800,17 @@ export type DsqlAppData = {
DSQL_FACEBOOK_APP_ID?: string;
DSQL_GITHUB_ID?: string;
};
export const MediaTypes = ["image", "file"] as const;
export type MediaUploadDataType = ImageObjectType &
FileObjectType & { private?: boolean };
export const ImageMimeTypes: (keyof sharp.FormatEnum)[] = [
"webp",
"gif",
"svg",
"png",
"jpeg",
"jpg",
] as const;

View File

@ -0,0 +1,22 @@
import { DATASQUIREL_LoggedInUser, UserType } from "../../../types";
type Param = {
user?: DATASQUIREL_LoggedInUser | UserType;
userId?: string | number | null;
dbSlug?: string;
};
export default function grabUserDbFullName({ dbSlug, user, userId }: Param) {
const finalUserId = user?.id || userId;
if (!finalUserId || !dbSlug)
throw new Error(
`Couldn't grab full DB name. Missing parameters finalUserId || dbSlug`
);
if (dbSlug.match(/[^a-zA-Z0-9-_]/)) {
throw new Error(`Invalid Database slug`);
}
return `datasquirel_user_${finalUserId}_${dbSlug}`;
}

View File

@ -8,6 +8,11 @@ type Param = {
};
export default function grabDirNames(param?: Param) {
const appDir = param?.appDir || process.env.DSQL_APP_DIR;
const STATIC_ROOT = process.env.DSQL_STATIC_SERVER_DIR || "/static";
const finalUserId = param?.user?.id || param?.userId;
const publicImagesDir = path.join(STATIC_ROOT, `images`);
if (!appDir)
throw new Error("Please provide the `DSQL_APP_DIR` env variable.");
@ -32,11 +37,15 @@ export default function grabDirNames(param?: Param) {
);
const usersSchemaDir = path.join(schemasDir, `users`);
const targetUserSchemaDir = finalUserId
? path.join(usersSchemaDir, `user-${finalUserId}`)
: undefined;
const userTempSQLFilePath = targetUserSchemaDir
? path.join(targetUserSchemaDir, `tmp.sql`)
: undefined;
const userDirPath = param?.user?.id
? path.join(usersSchemaDir, `user-${param.user.id}`)
: param?.userId
? path.join(usersSchemaDir, `user-${param.userId}`)
const userDirPath = finalUserId
? path.join(usersSchemaDir, `user-${finalUserId}`)
: undefined;
const userSchemaMainJSONFilePath = userDirPath
? path.join(userDirPath, `main.json`)
@ -61,6 +70,10 @@ export default function grabDirNames(param?: Param) {
? path.join(userPrivateSQLExportsDir, userPrivateDbExportZipFileName)
: undefined;
const userPublicMediaDir = finalUserId
? path.join(publicImagesDir, `user-images/user-${finalUserId}`)
: undefined;
const userPrivateDbImportZipFileName = `db-export.zip`;
const userPrivateDbImportZipFilePath = userPrivateSQLExportsDir
? path.join(userPrivateSQLExportsDir, userPrivateDbImportZipFileName)
@ -101,6 +114,7 @@ export default function grabDirNames(param?: Param) {
tempDirName,
defaultTableFieldsJSONFilePath,
usersSchemaDir,
targetUserSchemaDir,
userSchemaMainJSONFilePath,
userPrivateMediaDir,
userPrivateExportsDir,
@ -121,5 +135,7 @@ export default function grabDirNames(param?: Param) {
siteSetupFile,
envFile,
testEnvFile,
userPublicMediaDir,
userTempSQLFilePath,
};
}

View File

@ -1,5 +1,5 @@
type Param = {
dbName: string;
dbName?: string;
userId?: string | number;
};
@ -7,6 +7,11 @@ type Param = {
* # Grab Database Full Name
*/
export default function grabDbFullName({ dbName, userId }: Param): string {
if (!dbName)
throw new Error(
`Database name not provided to db name parser funciton`
);
const sanitizedName = dbName.replace(/[^a-z0-9\_]/g, "");
const cleanedDbName = sanitizedName.replace(/datasquirel_user_\d+_/, "");

View File

@ -1,6 +1,6 @@
{
"name": "@moduletrace/datasquirel",
"version": "4.7.1",
"version": "4.7.2",
"description": "Cloud-based SQL data management tool",
"main": "dist/index.js",
"bin": {