This commit is contained in:
Benjamin Toby 2025-04-17 10:49:34 +01:00
parent bcaaaae347
commit 6e32bbb4e0
14 changed files with 468 additions and 2 deletions

BIN
bun.lockb

Binary file not shown.

2
dist/engine/schema-to-typedef.d.ts vendored Normal file
View File

@ -0,0 +1,2 @@
#! /usr/bin/env node
export {};

71
dist/engine/schema-to-typedef.js vendored Normal file
View File

@ -0,0 +1,71 @@
#! /usr/bin/env node
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const fs_1 = __importDefault(require("fs"));
const __1 = __importDefault(require(".."));
const util_1 = require("util");
const db_schema_to_type_1 = __importDefault(require("../package-shared/functions/dsql/db-schema-to-type"));
const path_1 = __importDefault(require("path"));
const args = (0, util_1.parseArgs)({
args: process.argv,
options: {
apiKey: {
type: "string",
default: process.env.DSQL_FULL_ACCESS_API_KEY,
short: "k",
},
database: {
type: "string",
default: process.env.DSQL_DB_NAME,
short: "d",
},
outfile: {
type: "string",
default: process.env.DSQL_DB_NAME,
short: "o",
},
},
strict: false,
});
(() => __awaiter(void 0, void 0, void 0, function* () {
try {
const { apiKey: key, database, outfile } = args.values;
if (!database || typeof database !== "string")
throw new Error("Database name is required");
if (!key || typeof key !== "string")
throw new Error("API key is required");
if (!outfile || typeof outfile !== "string")
throw new Error("Outfile are required");
const schema = yield __1.default.getSchema({
key,
database,
user_id: 1,
});
const dbSchema = schema.payload;
if (!dbSchema)
throw new Error("No schema found");
const definitions = (0, db_schema_to_type_1.default)({ dbSchema });
const finalOutfile = path_1.default.resolve(process.cwd(), outfile);
const ourfileDir = path_1.default.dirname(finalOutfile);
if (!fs_1.default.existsSync(ourfileDir)) {
fs_1.default.mkdirSync(ourfileDir, { recursive: true });
}
fs_1.default.writeFileSync(finalOutfile, (definitions === null || definitions === void 0 ? void 0 : definitions.join("\n\n")) || "", "utf-8");
}
catch (error) {
console.log("Error:", error.message);
process.exit(1);
}
}))();

View File

@ -0,0 +1,6 @@
import { DSQL_DatabaseSchemaType } from "../../types";
type Params = {
dbSchema?: DSQL_DatabaseSchemaType;
};
export default function dbSchemaToType(params?: Params): string[] | undefined;
export {};

View File

@ -0,0 +1,51 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.default = dbSchemaToType;
const fs_1 = __importDefault(require("fs"));
const grab_dir_names_1 = __importDefault(require("../../utils/backend/names/grab-dir-names"));
const lodash_1 = __importDefault(require("lodash"));
const ejson_1 = __importDefault(require("../../utils/ejson"));
const generate_type_definitions_1 = __importDefault(require("./generate-type-definitions"));
const path_1 = __importDefault(require("path"));
function dbSchemaToType(params) {
let datasquirelSchema;
const defaultTableFieldsJSONFilePath = path_1.default.resolve(__dirname, "../../data/defaultFields.json");
if (params === null || params === void 0 ? void 0 : params.dbSchema) {
datasquirelSchema = params.dbSchema;
}
else {
const { mainShemaJSONFilePath } = (0, grab_dir_names_1.default)();
const mainSchema = ejson_1.default.parse(fs_1.default.readFileSync(mainShemaJSONFilePath, "utf-8"));
datasquirelSchema = mainSchema.find((sch) => sch.dbFullName == "datasquirel");
}
if (!datasquirelSchema)
return;
let tableNames = `export const DsqlTables = [\n${datasquirelSchema.tables
.map((tbl) => ` "${tbl.tableName}",`)
.join("\n")}\n] as const`;
const defaultFields = ejson_1.default.parse(fs_1.default.readFileSync(defaultTableFieldsJSONFilePath, "utf-8"));
const dbTablesSchemas = datasquirelSchema.tables.map((tblSchm) => {
let newDefaultFields = lodash_1.default.cloneDeep(defaultFields);
return Object.assign(Object.assign({}, tblSchm), { fields: (params === null || params === void 0 ? void 0 : params.dbSchema)
? tblSchm.fields
: [
newDefaultFields.shift(),
newDefaultFields.shift(),
...tblSchm.fields,
...newDefaultFields,
] });
});
const schemas = dbTablesSchemas
.map((table) => (0, generate_type_definitions_1.default)({
paradigm: "TypeScript",
table,
typeDefName: `DSQL_DATASQUIREL_${table.tableName.toUpperCase()}`,
allValuesOptional: true,
addExport: true,
}))
.filter((schm) => typeof schm == "string");
return [tableNames, ...schemas];
}

View File

@ -0,0 +1,7 @@
/**
* Check for user in local storage
*
* @description Preventdefault, declare variables
*/
declare const defaultFieldsRegexp: RegExp;
export default defaultFieldsRegexp;

View File

@ -0,0 +1,13 @@
"use strict";
// @ts-check
Object.defineProperty(exports, "__esModule", { value: true });
/**
* Check for user in local storage
*
* @description Preventdefault, declare variables
*/
const defaultFieldsRegexp = /^id$|^uuid$|^date_created$|^date_created_code$|^date_created_timestamp$|^date_updated$|^date_updated_code$|^date_updated_timestamp$/;
////////////////////////////////////////
////////////////////////////////////////
////////////////////////////////////////
exports.default = defaultFieldsRegexp;

View File

@ -0,0 +1,11 @@
import { DSQL_TableSchemaType } from "../../types";
type Param = {
paradigm: "JavaScript" | "TypeScript" | undefined;
table: DSQL_TableSchemaType;
query?: any;
typeDefName?: string;
allValuesOptional?: boolean;
addExport?: boolean;
};
export default function generateTypeDefinition({ paradigm, table, query, typeDefName, allValuesOptional, addExport, }: Param): string | null;
export {};

View File

@ -0,0 +1,53 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.default = generateTypeDefinition;
const default_fields_regexp_1 = __importDefault(require("./default-fields-regexp"));
function generateTypeDefinition({ paradigm, table, query, typeDefName, allValuesOptional, addExport, }) {
let typeDefinition = ``;
try {
const tdName = typeDefName ||
`DSQL_${query.single}_${query.single_table}`.toUpperCase();
const fields = table.fields;
function typeMap(type) {
if (type === null || type === void 0 ? void 0 : type.match(/int/i)) {
return "number";
}
if (type === null || type === void 0 ? void 0 : type.match(/text|varchar|timestamp/i)) {
return "string";
}
return "string";
}
const typesArrayTypeScript = [];
const typesArrayJavascript = [];
typesArrayTypeScript.push(`${addExport ? "export " : ""}type ${tdName} = {`);
typesArrayJavascript.push(`/**\n * @typedef {object} ${tdName}`);
fields.forEach((field) => {
var _a;
const nullValue = allValuesOptional
? "?"
: field.nullValue
? "?"
: ((_a = field.fieldName) === null || _a === void 0 ? void 0 : _a.match(default_fields_regexp_1.default))
? "?"
: "";
typesArrayTypeScript.push(` ${field.fieldName}${nullValue}: ${typeMap(field.dataType || "")};`);
typesArrayJavascript.push(` * @property {${typeMap(field.dataType || "")}${nullValue}} ${field.fieldName}`);
});
typesArrayTypeScript.push(`}`);
typesArrayJavascript.push(` */`);
if (paradigm === null || paradigm === void 0 ? void 0 : paradigm.match(/javascript/i)) {
typeDefinition = typesArrayJavascript.join("\n");
}
if (paradigm === null || paradigm === void 0 ? void 0 : paradigm.match(/typescript/i)) {
typeDefinition = typesArrayTypeScript.join("\n");
}
}
catch (error) {
console.log(error.message);
typeDefinition = null;
}
return typeDefinition;
}

View File

@ -0,0 +1,71 @@
#! /usr/bin/env node
import inquirer from "inquirer";
import fs from "fs";
import datasquirel from "..";
import { parseArgs } from "util";
import { DSQL_DatabaseSchemaType } from "../package-shared/types";
import dbSchemaToType from "../package-shared/functions/dsql/db-schema-to-type";
import path from "path";
const args = parseArgs({
args: process.argv,
options: {
apiKey: {
type: "string",
default: process.env.DSQL_FULL_ACCESS_API_KEY,
short: "k",
},
database: {
type: "string",
default: process.env.DSQL_DB_NAME,
short: "d",
},
outfile: {
type: "string",
default: process.env.DSQL_DB_NAME,
short: "o",
},
},
strict: false,
});
(async () => {
try {
const { apiKey: key, database, outfile } = args.values;
if (!database || typeof database !== "string")
throw new Error("Database name is required");
if (!key || typeof key !== "string")
throw new Error("API key is required");
if (!outfile || typeof outfile !== "string")
throw new Error("Outfile are required");
const schema = await datasquirel.getSchema({
key,
database,
user_id: 1,
});
const dbSchema = schema.payload as DSQL_DatabaseSchemaType | undefined;
if (!dbSchema) throw new Error("No schema found");
const definitions = dbSchemaToType({ dbSchema });
const finalOutfile = path.resolve(process.cwd(), outfile);
const ourfileDir = path.dirname(finalOutfile);
if (!fs.existsSync(ourfileDir)) {
fs.mkdirSync(ourfileDir, { recursive: true });
}
fs.writeFileSync(
finalOutfile,
definitions?.join("\n\n") || "",
"utf-8"
);
} catch (error: any) {
console.log("Error:", error.message);
process.exit(1);
}
})();

View File

@ -0,0 +1,76 @@
import fs from "fs";
import grabDirNames from "../../utils/backend/names/grab-dir-names";
import {
DSQL_DatabaseSchemaType,
DSQL_FieldSchemaType,
DSQL_TableSchemaType,
} from "../../types";
import _ from "lodash";
import EJSON from "../../utils/ejson";
import generateTypeDefinition from "./generate-type-definitions";
import path from "path";
type Params = {
dbSchema?: DSQL_DatabaseSchemaType;
};
export default function dbSchemaToType(params?: Params): string[] | undefined {
let datasquirelSchema;
const defaultTableFieldsJSONFilePath = path.resolve(
__dirname,
"../../data/defaultFields.json"
);
if (params?.dbSchema) {
datasquirelSchema = params.dbSchema;
} else {
const { mainShemaJSONFilePath } = grabDirNames();
const mainSchema = EJSON.parse(
fs.readFileSync(mainShemaJSONFilePath, "utf-8")
) as DSQL_DatabaseSchemaType[];
datasquirelSchema = mainSchema.find(
(sch) => sch.dbFullName == "datasquirel"
);
}
if (!datasquirelSchema) return;
let tableNames = `export const DsqlTables = [\n${datasquirelSchema.tables
.map((tbl) => ` "${tbl.tableName}",`)
.join("\n")}\n] as const`;
const defaultFields = EJSON.parse(
fs.readFileSync(defaultTableFieldsJSONFilePath, "utf-8")
) as DSQL_FieldSchemaType[];
const dbTablesSchemas = datasquirelSchema.tables.map((tblSchm) => {
let newDefaultFields = _.cloneDeep(defaultFields);
return {
...tblSchm,
fields: params?.dbSchema
? tblSchm.fields
: [
newDefaultFields.shift(),
newDefaultFields.shift(),
...tblSchm.fields,
...newDefaultFields,
],
} as DSQL_TableSchemaType;
});
const schemas = dbTablesSchemas
.map((table) =>
generateTypeDefinition({
paradigm: "TypeScript",
table,
typeDefName: `DSQL_DATASQUIREL_${table.tableName.toUpperCase()}`,
allValuesOptional: true,
addExport: true,
})
)
.filter((schm) => typeof schm == "string");
return [tableNames, ...schemas];
}

View File

@ -0,0 +1,16 @@
// @ts-check
/**
* Check for user in local storage
*
* @description Preventdefault, declare variables
*/
const defaultFieldsRegexp =
/^id$|^uuid$|^date_created$|^date_created_code$|^date_created_timestamp$|^date_updated$|^date_updated_code$|^date_updated_timestamp$/;
////////////////////////////////////////
////////////////////////////////////////
////////////////////////////////////////
export default defaultFieldsRegexp;

View File

@ -0,0 +1,87 @@
import { DSQL_TableSchemaType } from "../../types";
import defaultFieldsRegexp from "./default-fields-regexp";
type Param = {
paradigm: "JavaScript" | "TypeScript" | undefined;
table: DSQL_TableSchemaType;
query?: any;
typeDefName?: string;
allValuesOptional?: boolean;
addExport?: boolean;
};
export default function generateTypeDefinition({
paradigm,
table,
query,
typeDefName,
allValuesOptional,
addExport,
}: Param): string | null {
let typeDefinition: string | null = ``;
try {
const tdName =
typeDefName ||
`DSQL_${query.single}_${query.single_table}`.toUpperCase();
const fields = table.fields;
function typeMap(type: string) {
if (type?.match(/int/i)) {
return "number";
}
if (type?.match(/text|varchar|timestamp/i)) {
return "string";
}
return "string";
}
const typesArrayTypeScript = [];
const typesArrayJavascript = [];
typesArrayTypeScript.push(
`${addExport ? "export " : ""}type ${tdName} = {`
);
typesArrayJavascript.push(`/**\n * @typedef {object} ${tdName}`);
fields.forEach((field) => {
const nullValue = allValuesOptional
? "?"
: field.nullValue
? "?"
: field.fieldName?.match(defaultFieldsRegexp)
? "?"
: "";
typesArrayTypeScript.push(
` ${field.fieldName}${nullValue}: ${typeMap(
field.dataType || ""
)};`
);
typesArrayJavascript.push(
` * @property {${typeMap(field.dataType || "")}${nullValue}} ${
field.fieldName
}`
);
});
typesArrayTypeScript.push(`}`);
typesArrayJavascript.push(` */`);
if (paradigm?.match(/javascript/i)) {
typeDefinition = typesArrayJavascript.join("\n");
}
if (paradigm?.match(/typescript/i)) {
typeDefinition = typesArrayTypeScript.join("\n");
}
} catch (error: any) {
console.log(error.message);
typeDefinition = null;
}
return typeDefinition;
}

View File

@ -1,11 +1,12 @@
{ {
"name": "@moduletrace/datasquirel", "name": "@moduletrace/datasquirel",
"version": "4.2.9", "version": "4.3.0",
"description": "Cloud-based SQL data management tool", "description": "Cloud-based SQL data management tool",
"main": "dist/index.js", "main": "dist/index.js",
"bin": { "bin": {
"dsql-watch": "dist/engine/dsql.js", "dsql-watch": "dist/engine/dsql.js",
"dsql-dump": "dist/engine/dump.js" "dsql-dump": "dist/engine/dump.js",
"dsql-schema-to-typedef": "dist/engine/schema-to-typedef.js"
}, },
"scripts": { "scripts": {
"dev": "tsc --watch" "dev": "tsc --watch"
@ -41,6 +42,7 @@
"dotenv": "^16.3.1", "dotenv": "^16.3.1",
"generate-password": "^1.7.1", "generate-password": "^1.7.1",
"google-auth-library": "^9.15.0", "google-auth-library": "^9.15.0",
"inquirer": "^12.5.2",
"lodash": "^4.17.21", "lodash": "^4.17.21",
"mysql": "^2.18.1", "mysql": "^2.18.1",
"nodemailer": "^6.9.14", "nodemailer": "^6.9.14",