first Commit
This commit is contained in:
commit
5555a8e917
34
.gitignore
vendored
Normal file
34
.gitignore
vendored
Normal file
@ -0,0 +1,34 @@
|
||||
# dependencies (bun install)
|
||||
node_modules
|
||||
|
||||
# output
|
||||
out
|
||||
dist
|
||||
*.tgz
|
||||
|
||||
# code coverage
|
||||
coverage
|
||||
*.lcov
|
||||
|
||||
# logs
|
||||
logs
|
||||
_.log
|
||||
report.[0-9]_.[0-9]_.[0-9]_.[0-9]_.json
|
||||
|
||||
# dotenv environment variable files
|
||||
.env
|
||||
.env.development.local
|
||||
.env.test.local
|
||||
.env.production.local
|
||||
.env.local
|
||||
|
||||
# caches
|
||||
.eslintcache
|
||||
.cache
|
||||
*.tsbuildinfo
|
||||
|
||||
# IntelliJ based IDEs
|
||||
.idea
|
||||
|
||||
# Finder (MacOS) folder config
|
||||
.DS_Store
|
||||
3
.gitignore copy
Normal file
3
.gitignore copy
Normal file
@ -0,0 +1,3 @@
|
||||
.env
|
||||
.data
|
||||
node_modules
|
||||
1
.npmrc
Normal file
1
.npmrc
Normal file
@ -0,0 +1 @@
|
||||
@moduletrace:registry=https://git.tben.me/api/packages/moduletrace/npm/
|
||||
106
CLAUDE.md
Normal file
106
CLAUDE.md
Normal file
@ -0,0 +1,106 @@
|
||||
|
||||
Default to using Bun instead of Node.js.
|
||||
|
||||
- Use `bun <file>` instead of `node <file>` or `ts-node <file>`
|
||||
- Use `bun test` instead of `jest` or `vitest`
|
||||
- Use `bun build <file.html|file.ts|file.css>` instead of `webpack` or `esbuild`
|
||||
- Use `bun install` instead of `npm install` or `yarn install` or `pnpm install`
|
||||
- Use `bun run <script>` instead of `npm run <script>` or `yarn run <script>` or `pnpm run <script>`
|
||||
- Bun automatically loads .env, so don't use dotenv.
|
||||
|
||||
## APIs
|
||||
|
||||
- `Bun.serve()` supports WebSockets, HTTPS, and routes. Don't use `express`.
|
||||
- `bun:sqlite` for SQLite. Don't use `better-sqlite3`.
|
||||
- `Bun.redis` for Redis. Don't use `ioredis`.
|
||||
- `Bun.sql` for Postgres. Don't use `pg` or `postgres.js`.
|
||||
- `WebSocket` is built-in. Don't use `ws`.
|
||||
- Prefer `Bun.file` over `node:fs`'s readFile/writeFile
|
||||
- Bun.$`ls` instead of execa.
|
||||
|
||||
## Testing
|
||||
|
||||
Use `bun test` to run tests.
|
||||
|
||||
```ts#index.test.ts
|
||||
import { test, expect } from "bun:test";
|
||||
|
||||
test("hello world", () => {
|
||||
expect(1).toBe(1);
|
||||
});
|
||||
```
|
||||
|
||||
## Frontend
|
||||
|
||||
Use HTML imports with `Bun.serve()`. Don't use `vite`. HTML imports fully support React, CSS, Tailwind.
|
||||
|
||||
Server:
|
||||
|
||||
```ts#index.ts
|
||||
import index from "./index.html"
|
||||
|
||||
Bun.serve({
|
||||
routes: {
|
||||
"/": index,
|
||||
"/api/users/:id": {
|
||||
GET: (req) => {
|
||||
return new Response(JSON.stringify({ id: req.params.id }));
|
||||
},
|
||||
},
|
||||
},
|
||||
// optional websocket support
|
||||
websocket: {
|
||||
open: (ws) => {
|
||||
ws.send("Hello, world!");
|
||||
},
|
||||
message: (ws, message) => {
|
||||
ws.send(message);
|
||||
},
|
||||
close: (ws) => {
|
||||
// handle close
|
||||
}
|
||||
},
|
||||
development: {
|
||||
hmr: true,
|
||||
console: true,
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
HTML files can import .tsx, .jsx or .js files directly and Bun's bundler will transpile & bundle automatically. `<link>` tags can point to stylesheets and Bun's CSS bundler will bundle.
|
||||
|
||||
```html#index.html
|
||||
<html>
|
||||
<body>
|
||||
<h1>Hello, world!</h1>
|
||||
<script type="module" src="./frontend.tsx"></script>
|
||||
</body>
|
||||
</html>
|
||||
```
|
||||
|
||||
With the following `frontend.tsx`:
|
||||
|
||||
```tsx#frontend.tsx
|
||||
import React from "react";
|
||||
|
||||
// import .css files directly and it works
|
||||
import './index.css';
|
||||
|
||||
import { createRoot } from "react-dom/client";
|
||||
|
||||
const root = createRoot(document.body);
|
||||
|
||||
export default function Frontend() {
|
||||
return <h1>Hello, world!</h1>;
|
||||
}
|
||||
|
||||
root.render(<Frontend />);
|
||||
```
|
||||
|
||||
Then, run index.ts
|
||||
|
||||
```sh
|
||||
bun --hot ./index.ts
|
||||
```
|
||||
|
||||
For more information, read the Bun API docs in `node_modules/bun-types/docs/**.md`.
|
||||
15
README.md
Normal file
15
README.md
Normal file
@ -0,0 +1,15 @@
|
||||
# bun-sqlite
|
||||
|
||||
To install dependencies:
|
||||
|
||||
```bash
|
||||
bun install
|
||||
```
|
||||
|
||||
To run:
|
||||
|
||||
```bash
|
||||
bun run index.ts
|
||||
```
|
||||
|
||||
This project was created using `bun init` in bun v1.3.0. [Bun](https://bun.com) is a fast all-in-one JavaScript runtime.
|
||||
25
bun.lock
Normal file
25
bun.lock
Normal file
@ -0,0 +1,25 @@
|
||||
{
|
||||
"lockfileVersion": 1,
|
||||
"workspaces": {
|
||||
"": {
|
||||
"name": "bun-sqlite",
|
||||
"devDependencies": {
|
||||
"@types/bun": "latest",
|
||||
},
|
||||
"peerDependencies": {
|
||||
"typescript": "^5",
|
||||
},
|
||||
},
|
||||
},
|
||||
"packages": {
|
||||
"@types/bun": ["@types/bun@1.3.9", "", { "dependencies": { "bun-types": "1.3.9" } }, "sha512-KQ571yULOdWJiMH+RIWIOZ7B2RXQGpL1YQrBtLIV3FqDcCu6FsbFUBwhdKUlCKUpS3PJDsHlJ1QKlpxoVR+xtw=="],
|
||||
|
||||
"@types/node": ["@types/node@25.3.3", "", { "dependencies": { "undici-types": "~7.18.0" } }, "sha512-DpzbrH7wIcBaJibpKo9nnSQL0MTRdnWttGyE5haGwK86xgMOkFLp7vEyfQPGLOJh5wNYiJ3V9PmUMDhV9u8kkQ=="],
|
||||
|
||||
"bun-types": ["bun-types@1.3.9", "", { "dependencies": { "@types/node": "*" } }, "sha512-+UBWWOakIP4Tswh0Bt0QD0alpTY8cb5hvgiYeWCMet9YukHbzuruIEeXC2D7nMJPB12kbh8C7XJykSexEqGKJg=="],
|
||||
|
||||
"typescript": ["typescript@5.9.3", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw=="],
|
||||
|
||||
"undici-types": ["undici-types@7.18.2", "", {}, "sha512-AsuCzffGHJybSaRrmr5eHr81mwJU3kjw6M+uprWvCXiNeN9SOGwQ3Jn8jb8m3Z6izVgknn1R0FTCEAP2QrLY/w=="],
|
||||
}
|
||||
}
|
||||
12
package.json
Normal file
12
package.json
Normal file
@ -0,0 +1,12 @@
|
||||
{
|
||||
"name": "bun-sqlite",
|
||||
"module": "index.ts",
|
||||
"type": "module",
|
||||
"private": true,
|
||||
"devDependencies": {
|
||||
"@types/bun": "latest"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"typescript": "^5"
|
||||
}
|
||||
}
|
||||
70
src/lib/sqlite/db-delete.ts
Normal file
70
src/lib/sqlite/db-delete.ts
Normal file
@ -0,0 +1,70 @@
|
||||
import type { DSQL_TRAVIS_AI_ALL_TYPEDEFS, DsqlTables } from "@/types/db";
|
||||
import datasquirel from "@moduletrace/datasquirel";
|
||||
import type {
|
||||
APIResponseObject,
|
||||
ServerQueryParam,
|
||||
} from "@moduletrace/datasquirel/dist/package-shared/types";
|
||||
import DbClient from ".";
|
||||
import _ from "lodash";
|
||||
|
||||
type Params<T extends { [k: string]: any } = DSQL_TRAVIS_AI_ALL_TYPEDEFS> = {
|
||||
table: (typeof DsqlTables)[number];
|
||||
query?: ServerQueryParam<T>;
|
||||
targetId?: number | string;
|
||||
};
|
||||
|
||||
export default async function DbDelete<
|
||||
T extends { [k: string]: any } = DSQL_TRAVIS_AI_ALL_TYPEDEFS,
|
||||
>({ table, query, targetId }: Params<T>): Promise<APIResponseObject> {
|
||||
try {
|
||||
let finalQuery = query || {};
|
||||
|
||||
if (targetId) {
|
||||
finalQuery = _.merge<ServerQueryParam<any>, ServerQueryParam<any>>(
|
||||
finalQuery,
|
||||
{
|
||||
query: {
|
||||
id: {
|
||||
value: String(targetId),
|
||||
},
|
||||
},
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
const sqlQueryObj = datasquirel.sql.sqlGenerator({
|
||||
tableName: table,
|
||||
genObject: finalQuery,
|
||||
});
|
||||
|
||||
const whereClause = sqlQueryObj.string.match(/WHERE .*/)?.[0];
|
||||
|
||||
if (whereClause) {
|
||||
let sql = `DELETE FROM ${table} ${whereClause}`;
|
||||
|
||||
const res = DbClient.run(sql, sqlQueryObj.values);
|
||||
|
||||
return {
|
||||
success: Boolean(res.changes),
|
||||
postInsertReturn: {
|
||||
affectedRows: res.changes,
|
||||
insertId: Number(res.lastInsertRowid),
|
||||
},
|
||||
debug: {
|
||||
sql,
|
||||
values: sqlQueryObj.values,
|
||||
},
|
||||
};
|
||||
} else {
|
||||
return {
|
||||
success: false,
|
||||
msg: `No WHERE clause`,
|
||||
};
|
||||
}
|
||||
} catch (error: any) {
|
||||
return {
|
||||
success: false,
|
||||
error: error.message,
|
||||
};
|
||||
}
|
||||
}
|
||||
112
src/lib/sqlite/db-generate-type-defs.ts
Normal file
112
src/lib/sqlite/db-generate-type-defs.ts
Normal file
@ -0,0 +1,112 @@
|
||||
import type {
|
||||
DSQL_FieldSchemaType,
|
||||
DSQL_TableSchemaType,
|
||||
} from "@moduletrace/datasquirel/dist/package-shared/types";
|
||||
|
||||
type Param = {
|
||||
paradigm: "JavaScript" | "TypeScript" | undefined;
|
||||
table: DSQL_TableSchemaType;
|
||||
query?: any;
|
||||
typeDefName?: string;
|
||||
allValuesOptional?: boolean;
|
||||
addExport?: boolean;
|
||||
dbName?: string;
|
||||
};
|
||||
|
||||
export default function generateTypeDefinition({
|
||||
paradigm,
|
||||
table,
|
||||
query,
|
||||
typeDefName,
|
||||
allValuesOptional,
|
||||
addExport,
|
||||
dbName,
|
||||
}: Param) {
|
||||
let typeDefinition: string | null = ``;
|
||||
let tdName: string | null = ``;
|
||||
|
||||
try {
|
||||
tdName = typeDefName
|
||||
? typeDefName
|
||||
: dbName
|
||||
? `DSQL_${dbName}_${table.tableName}`.toUpperCase()
|
||||
: `DSQL_${query.single}_${query.single_table}`.toUpperCase();
|
||||
|
||||
const fields = table.fields;
|
||||
|
||||
function typeMap(schemaType: DSQL_FieldSchemaType) {
|
||||
if (schemaType.options && schemaType.options.length > 0) {
|
||||
return schemaType.options
|
||||
.map((opt) =>
|
||||
schemaType.dataType?.match(/int/i) ||
|
||||
typeof opt == "number"
|
||||
? `${opt}`
|
||||
: `"${opt}"`,
|
||||
)
|
||||
.join(" | ");
|
||||
}
|
||||
|
||||
if (schemaType.dataType?.match(/int|double|decimal/i)) {
|
||||
return "number";
|
||||
}
|
||||
|
||||
if (schemaType.dataType?.match(/text|varchar|timestamp/i)) {
|
||||
return "string";
|
||||
}
|
||||
|
||||
if (schemaType.dataType?.match(/boolean/i)) {
|
||||
return "0 | 1";
|
||||
}
|
||||
|
||||
return "string";
|
||||
}
|
||||
|
||||
const typesArrayTypeScript = [];
|
||||
const typesArrayJavascript = [];
|
||||
|
||||
typesArrayTypeScript.push(
|
||||
`${addExport ? "export " : ""}type ${tdName} = {`,
|
||||
);
|
||||
typesArrayJavascript.push(`/**\n * @typedef {object} ${tdName}`);
|
||||
|
||||
fields.forEach((field) => {
|
||||
if (field.fieldDescription) {
|
||||
typesArrayTypeScript.push(
|
||||
` /** \n * ${field.fieldDescription}\n */`,
|
||||
);
|
||||
}
|
||||
|
||||
const nullValue = allValuesOptional
|
||||
? "?"
|
||||
: field.notNullValue
|
||||
? ""
|
||||
: "?";
|
||||
|
||||
typesArrayTypeScript.push(
|
||||
` ${field.fieldName}${nullValue}: ${typeMap(field)};`,
|
||||
);
|
||||
|
||||
typesArrayJavascript.push(
|
||||
` * @property {${typeMap(field)}${nullValue}} ${
|
||||
field.fieldName
|
||||
}`,
|
||||
);
|
||||
});
|
||||
|
||||
typesArrayTypeScript.push(`}`);
|
||||
typesArrayJavascript.push(` */`);
|
||||
|
||||
if (paradigm?.match(/javascript/i)) {
|
||||
typeDefinition = typesArrayJavascript.join("\n");
|
||||
}
|
||||
|
||||
if (paradigm?.match(/typescript/i)) {
|
||||
typeDefinition = typesArrayTypeScript.join("\n");
|
||||
}
|
||||
} catch (error: any) {
|
||||
console.log(error.message);
|
||||
typeDefinition = null;
|
||||
}
|
||||
|
||||
return { typeDefinition, tdName };
|
||||
}
|
||||
45
src/lib/sqlite/db-insert.ts
Normal file
45
src/lib/sqlite/db-insert.ts
Normal file
@ -0,0 +1,45 @@
|
||||
import type { DSQL_TRAVIS_AI_ALL_TYPEDEFS, DsqlTables } from "@/types/db";
|
||||
import datasquirel from "@moduletrace/datasquirel";
|
||||
import type { APIResponseObject } from "@moduletrace/datasquirel/dist/package-shared/types";
|
||||
import DbClient from ".";
|
||||
import type { DBChanges } from "@/types/general";
|
||||
|
||||
type Params<T extends { [k: string]: any } = DSQL_TRAVIS_AI_ALL_TYPEDEFS> = {
|
||||
table: (typeof DsqlTables)[number];
|
||||
data: T[];
|
||||
};
|
||||
|
||||
export default async function DbInsert<
|
||||
T extends { [k: string]: any } = DSQL_TRAVIS_AI_ALL_TYPEDEFS,
|
||||
>({ table, data }: Params<T>): Promise<APIResponseObject<DBChanges>> {
|
||||
try {
|
||||
const finalData: DSQL_TRAVIS_AI_ALL_TYPEDEFS[] = data.map((d) => ({
|
||||
...d,
|
||||
created_at: Date.now(),
|
||||
updated_at: Date.now(),
|
||||
}));
|
||||
|
||||
const sqlObj = datasquirel.sql.sqlInsertGenerator({
|
||||
tableName: table,
|
||||
data: finalData as any[],
|
||||
});
|
||||
|
||||
const res = DbClient.run(sqlObj?.query || "", sqlObj?.values || []);
|
||||
|
||||
return {
|
||||
success: Boolean(Number(res.lastInsertRowid)),
|
||||
postInsertReturn: {
|
||||
affectedRows: res.changes,
|
||||
insertId: Number(res.lastInsertRowid),
|
||||
},
|
||||
debug: {
|
||||
sqlObj,
|
||||
},
|
||||
};
|
||||
} catch (error: any) {
|
||||
return {
|
||||
success: false,
|
||||
error: error.message,
|
||||
};
|
||||
}
|
||||
}
|
||||
624
src/lib/sqlite/db-schema-manager.ts
Normal file
624
src/lib/sqlite/db-schema-manager.ts
Normal file
@ -0,0 +1,624 @@
|
||||
#!/usr/bin/env bun
|
||||
|
||||
import type {
|
||||
DSQL_DatabaseSchemaType,
|
||||
DSQL_FieldSchemaType,
|
||||
DSQL_TableSchemaType,
|
||||
} from "@moduletrace/datasquirel/dist/package-shared/types";
|
||||
import { Database } from "bun:sqlite";
|
||||
import _ from "lodash";
|
||||
import DbClient from ".";
|
||||
|
||||
// Schema Manager Class
|
||||
class SQLiteSchemaManager {
|
||||
private db: Database;
|
||||
private db_manager_table_name: string;
|
||||
private recreate_vector_table: boolean;
|
||||
private db_schema: DSQL_DatabaseSchemaType;
|
||||
|
||||
constructor({
|
||||
schema,
|
||||
recreate_vector_table = false,
|
||||
}: {
|
||||
schema: DSQL_DatabaseSchemaType;
|
||||
recreate_vector_table?: boolean;
|
||||
}) {
|
||||
this.db = DbClient;
|
||||
this.db_manager_table_name = "__db_schema_manager__";
|
||||
this.db.run("PRAGMA foreign_keys = ON;");
|
||||
this.recreate_vector_table = recreate_vector_table;
|
||||
this.createDbManagerTable();
|
||||
this.db_schema = schema;
|
||||
}
|
||||
|
||||
private createDbManagerTable() {
|
||||
this.db.run(`
|
||||
CREATE TABLE IF NOT EXISTS ${this.db_manager_table_name} (
|
||||
table_name TEXT NOT NULL,
|
||||
created_at INTEGER NOT NULL,
|
||||
updated_at INTEGER NOT NULL
|
||||
)
|
||||
`);
|
||||
}
|
||||
|
||||
private insertDbManagerTable(tableName: string) {
|
||||
this.db.run(
|
||||
`INSERT INTO ${this.db_manager_table_name} (table_name,created_at,updated_at) VALUES (?, ?, ?)`,
|
||||
[tableName, Date.now(), Date.now()],
|
||||
);
|
||||
}
|
||||
|
||||
private removeDbManagerTable(tableName: string) {
|
||||
this.db.run(
|
||||
`DELETE FROM ${this.db_manager_table_name} WHERE table_name = ?`,
|
||||
[tableName],
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Main synchronization method
|
||||
*/
|
||||
async syncSchema(): Promise<void> {
|
||||
console.log("Starting schema synchronization...");
|
||||
|
||||
const existingTables = this.getExistingTables();
|
||||
const schemaTables = this.db_schema.tables.map((t) => t.tableName);
|
||||
|
||||
// 2. Create or update tables
|
||||
for (const table of this.db_schema.tables) {
|
||||
await this.syncTable(table, existingTables);
|
||||
}
|
||||
|
||||
// 1. Drop tables that no longer exist in schema
|
||||
await this.dropRemovedTables(existingTables, schemaTables);
|
||||
|
||||
console.log("Schema synchronization complete!");
|
||||
}
|
||||
|
||||
/**
|
||||
* Get list of existing tables in the database
|
||||
*/
|
||||
private getExistingTables(): string[] {
|
||||
let sql = `SELECT table_name FROM ${this.db_manager_table_name}`;
|
||||
|
||||
const query = this.db.query(sql);
|
||||
const results = query.all() as { table_name: string }[];
|
||||
|
||||
return results.map((r) => r.table_name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Drop tables that are no longer in the schema
|
||||
*/
|
||||
private async dropRemovedTables(
|
||||
existingTables: string[],
|
||||
schemaTables: string[],
|
||||
): Promise<void> {
|
||||
const tablesToDrop = existingTables.filter(
|
||||
(t) =>
|
||||
!schemaTables.includes(t) &&
|
||||
!schemaTables.find((scT) => t.startsWith(scT + "_")),
|
||||
);
|
||||
|
||||
for (const tableName of tablesToDrop) {
|
||||
console.log(`Dropping table: ${tableName}`);
|
||||
this.db.run(`DROP TABLE IF EXISTS "${tableName}"`);
|
||||
this.db.run(
|
||||
`DELETE FROM ${this.db_manager_table_name} WHERE table_name = "${tableName}"`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Sync a single table (create or update)
|
||||
*/
|
||||
private async syncTable(
|
||||
table: DSQL_TableSchemaType,
|
||||
existingTables: string[],
|
||||
): Promise<void> {
|
||||
let tableExists = existingTables.includes(table.tableName);
|
||||
|
||||
// Handle table rename
|
||||
if (table.tableNameOld && table.tableNameOld !== table.tableName) {
|
||||
if (existingTables.includes(table.tableNameOld)) {
|
||||
console.log(
|
||||
`Renaming table: ${table.tableNameOld} -> ${table.tableName}`,
|
||||
);
|
||||
this.db.run(
|
||||
`ALTER TABLE "${table.tableNameOld}" RENAME TO "${table.tableName}"`,
|
||||
);
|
||||
this.insertDbManagerTable(table.tableName);
|
||||
this.removeDbManagerTable(table.tableNameOld);
|
||||
tableExists = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (!tableExists) {
|
||||
// Create new table
|
||||
await this.createTable(table);
|
||||
this.insertDbManagerTable(table.tableName);
|
||||
} else {
|
||||
// Update existing table
|
||||
await this.updateTable(table);
|
||||
}
|
||||
|
||||
// Sync indexes
|
||||
await this.syncIndexes(table);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new table
|
||||
*/
|
||||
private async createTable(table: DSQL_TableSchemaType): Promise<void> {
|
||||
console.log(`Creating table: ${table.tableName}`);
|
||||
|
||||
let new_table = _.cloneDeep(table);
|
||||
|
||||
if (new_table.parentTableName) {
|
||||
const parent_table = this.db_schema.tables.find(
|
||||
(t) => t.tableName === new_table.parentTableName,
|
||||
);
|
||||
|
||||
if (!parent_table) {
|
||||
throw new Error(
|
||||
`Parent table \`${new_table.parentTableName}\` not found for \`${new_table.tableName}\``,
|
||||
);
|
||||
}
|
||||
|
||||
new_table = _.merge(parent_table, {
|
||||
tableName: new_table.tableName,
|
||||
tableDescription: new_table.tableDescription,
|
||||
});
|
||||
}
|
||||
|
||||
const columns: string[] = [];
|
||||
const foreignKeys: string[] = [];
|
||||
|
||||
for (const field of new_table.fields) {
|
||||
const columnDef = this.buildColumnDefinition(field);
|
||||
columns.push(columnDef);
|
||||
|
||||
if (field.foreignKey) {
|
||||
foreignKeys.push(this.buildForeignKeyConstraint(field));
|
||||
}
|
||||
}
|
||||
|
||||
// Add unique constraints
|
||||
if (new_table.uniqueConstraints) {
|
||||
for (const constraint of new_table.uniqueConstraints) {
|
||||
if (
|
||||
constraint.constraintTableFields &&
|
||||
constraint.constraintTableFields.length > 0
|
||||
) {
|
||||
const fields = constraint.constraintTableFields
|
||||
.map((f) => `"${f.value}"`)
|
||||
.join(", ");
|
||||
const constraintName =
|
||||
constraint.constraintName ||
|
||||
`unique_${fields.replace(/"/g, "")}`;
|
||||
columns.push(
|
||||
`CONSTRAINT "${constraintName}" UNIQUE (${fields})`,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const allConstraints = [...columns, ...foreignKeys];
|
||||
|
||||
const sql = new_table.isVector
|
||||
? `CREATE VIRTUAL TABLE "${new_table.tableName}" USING ${new_table.vectorType || "vec0"}(${allConstraints.join(", ")})`
|
||||
: `CREATE TABLE "${new_table.tableName}" (${allConstraints.join(", ")})`;
|
||||
|
||||
this.db.run(sql);
|
||||
}
|
||||
|
||||
/**
|
||||
* Update an existing table
|
||||
*/
|
||||
private async updateTable(table: DSQL_TableSchemaType): Promise<void> {
|
||||
console.log(`Updating table: ${table.tableName}`);
|
||||
|
||||
const existingColumns = this.getTableColumns(table.tableName);
|
||||
const schemaColumns = table.fields.map((f) => f.fieldName || "");
|
||||
|
||||
// SQLite has limited ALTER TABLE support
|
||||
// We need to use the recreation strategy for complex changes
|
||||
|
||||
const columnsToAdd = table.fields.filter(
|
||||
(f) =>
|
||||
f.fieldName &&
|
||||
!existingColumns.find(
|
||||
(c) =>
|
||||
c.name == f.fieldName && c.type == this.mapDataType(f),
|
||||
),
|
||||
);
|
||||
const columnsToRemove = existingColumns.filter(
|
||||
(c) => !schemaColumns.includes(c.name),
|
||||
);
|
||||
const columnsToUpdate = table.fields.filter(
|
||||
(f) =>
|
||||
f.fieldName &&
|
||||
f.updatedField &&
|
||||
existingColumns.find(
|
||||
(c) =>
|
||||
c.name == f.fieldName && c.type == this.mapDataType(f),
|
||||
),
|
||||
);
|
||||
|
||||
// Simple case: only adding columns
|
||||
if (columnsToRemove.length === 0 && columnsToUpdate.length === 0) {
|
||||
for (const field of columnsToAdd) {
|
||||
await this.addColumn(table.tableName, field);
|
||||
}
|
||||
} else {
|
||||
// Complex case: need to recreate table
|
||||
await this.recreateTable(table);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get existing columns for a table
|
||||
*/
|
||||
private getTableColumns(
|
||||
tableName: string,
|
||||
): { name: string; type: string }[] {
|
||||
const query = this.db.query(`PRAGMA table_info("${tableName}")`);
|
||||
const results = query.all() as { name: string; type: string }[];
|
||||
return results;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a new column to existing table
|
||||
*/
|
||||
private async addColumn(
|
||||
tableName: string,
|
||||
field: DSQL_FieldSchemaType,
|
||||
): Promise<void> {
|
||||
console.log(`Adding column: ${tableName}.${field.fieldName}`);
|
||||
|
||||
const columnDef = this.buildColumnDefinition(field);
|
||||
// Remove PRIMARY KEY and UNIQUE constraints for ALTER TABLE ADD COLUMN
|
||||
const cleanDef = columnDef
|
||||
.replace(/PRIMARY KEY/gi, "")
|
||||
.replace(/AUTOINCREMENT/gi, "")
|
||||
.replace(/UNIQUE/gi, "")
|
||||
.trim();
|
||||
|
||||
const sql = `ALTER TABLE "${tableName}" ADD COLUMN ${cleanDef}`;
|
||||
|
||||
this.db.run(sql);
|
||||
}
|
||||
|
||||
/**
|
||||
* Recreate table (for complex schema changes)
|
||||
*/
|
||||
private async recreateTable(table: DSQL_TableSchemaType): Promise<void> {
|
||||
if (table.isVector) {
|
||||
if (!this.recreate_vector_table) {
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`Recreating vector table: ${table.tableName}`);
|
||||
|
||||
const existingRows = this.db
|
||||
.query(`SELECT * FROM "${table.tableName}"`)
|
||||
.all() as { [k: string]: any }[];
|
||||
|
||||
this.db.run(`DROP TABLE "${table.tableName}"`);
|
||||
await this.createTable(table);
|
||||
|
||||
if (existingRows.length > 0) {
|
||||
for (let i = 0; i < existingRows.length; i++) {
|
||||
const row = existingRows[i];
|
||||
if (!row) continue;
|
||||
|
||||
const columns = Object.keys(row);
|
||||
const placeholders = columns.map(() => "?").join(", ");
|
||||
|
||||
this.db.run(
|
||||
`INSERT INTO "${table.tableName}" (${columns.join(", ")}) VALUES (${placeholders})`,
|
||||
Object.values(row),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
const tempTableName = `${table.tableName}_temp_${Date.now()}`;
|
||||
|
||||
// Get existing data
|
||||
const existingColumns = this.getTableColumns(table.tableName);
|
||||
const columnsToKeep = table.fields
|
||||
.filter(
|
||||
(f) =>
|
||||
f.fieldName &&
|
||||
existingColumns.find(
|
||||
(c) =>
|
||||
c.name == f.fieldName &&
|
||||
c.type == this.mapDataType(f),
|
||||
),
|
||||
)
|
||||
.map((f) => f.fieldName);
|
||||
|
||||
// Create temp table with new schema
|
||||
const tempTable = { ...table, tableName: tempTableName };
|
||||
await this.createTable(tempTable);
|
||||
|
||||
// Copy data if there are common columns
|
||||
if (columnsToKeep.length > 0) {
|
||||
const columnList = columnsToKeep.map((c) => `"${c}"`).join(", ");
|
||||
this.db.run(
|
||||
`INSERT INTO "${tempTableName}" (${columnList}) SELECT ${columnList} FROM "${table.tableName}"`,
|
||||
);
|
||||
}
|
||||
|
||||
// Drop old table
|
||||
this.db.run(`DROP TABLE "${table.tableName}"`);
|
||||
|
||||
// Rename temp table
|
||||
this.db.run(
|
||||
`ALTER TABLE "${tempTableName}" RENAME TO "${table.tableName}"`,
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Build column definition SQL
|
||||
*/
|
||||
private buildColumnDefinition(field: DSQL_FieldSchemaType): string {
|
||||
if (!field.fieldName) {
|
||||
throw new Error("Field name is required");
|
||||
}
|
||||
|
||||
const fieldName = field.sideCar
|
||||
? `+${field.fieldName}`
|
||||
: `${field.fieldName}`;
|
||||
|
||||
const parts: string[] = [fieldName];
|
||||
|
||||
// Data type mapping
|
||||
const dataType = this.mapDataType(field);
|
||||
parts.push(dataType);
|
||||
|
||||
// Primary key
|
||||
if (field.primaryKey) {
|
||||
parts.push("PRIMARY KEY");
|
||||
if (field.autoIncrement) {
|
||||
parts.push("AUTOINCREMENT");
|
||||
}
|
||||
}
|
||||
|
||||
// Not null
|
||||
if (field.notNullValue || field.primaryKey) {
|
||||
if (!field.primaryKey) {
|
||||
parts.push("NOT NULL");
|
||||
}
|
||||
}
|
||||
|
||||
// Unique
|
||||
if (field.unique && !field.primaryKey) {
|
||||
parts.push("UNIQUE");
|
||||
}
|
||||
|
||||
// Default value
|
||||
if (field.defaultValue !== undefined) {
|
||||
if (typeof field.defaultValue === "string") {
|
||||
parts.push(
|
||||
// Escape single quotes by doubling them to prevent SQL injection and wrap in single quotes
|
||||
`DEFAULT '${field.defaultValue.replace(/'/g, "''")}'`,
|
||||
);
|
||||
} else {
|
||||
parts.push(`DEFAULT ${field.defaultValue}`);
|
||||
}
|
||||
} else if (field.defaultValueLiteral) {
|
||||
parts.push(`DEFAULT ${field.defaultValueLiteral}`);
|
||||
}
|
||||
|
||||
return parts.join(" ");
|
||||
}
|
||||
|
||||
/**
|
||||
* Map DSQL data types to SQLite types
|
||||
*/
|
||||
private mapDataType(field: DSQL_FieldSchemaType): string {
|
||||
const dataType = field.dataType?.toLowerCase() || "text";
|
||||
const vectorSize = field.vectorSize || 1536;
|
||||
|
||||
// Vector Embeddings
|
||||
if (field.isVector) {
|
||||
return `FLOAT[${vectorSize}]`;
|
||||
}
|
||||
|
||||
// Integer types
|
||||
if (
|
||||
dataType.includes("int") ||
|
||||
dataType === "bigint" ||
|
||||
dataType === "smallint" ||
|
||||
dataType === "tinyint"
|
||||
) {
|
||||
return "INTEGER";
|
||||
}
|
||||
|
||||
// Real/Float types
|
||||
if (
|
||||
dataType.includes("real") ||
|
||||
dataType.includes("float") ||
|
||||
dataType.includes("double") ||
|
||||
dataType === "decimal" ||
|
||||
dataType === "numeric"
|
||||
) {
|
||||
return "REAL";
|
||||
}
|
||||
|
||||
// Blob types
|
||||
if (dataType.includes("blob") || dataType.includes("binary")) {
|
||||
return "BLOB";
|
||||
}
|
||||
|
||||
// Boolean
|
||||
if (dataType === "boolean" || dataType === "bool") {
|
||||
return "INTEGER"; // SQLite uses INTEGER for boolean (0/1)
|
||||
}
|
||||
|
||||
// Date/Time types
|
||||
if (dataType.includes("date") || dataType.includes("time")) {
|
||||
return "TEXT"; // SQLite stores dates as TEXT or INTEGER
|
||||
}
|
||||
|
||||
// Default to TEXT for all text-based types
|
||||
return "TEXT";
|
||||
}
|
||||
|
||||
/**
|
||||
* Build foreign key constraint
|
||||
*/
|
||||
private buildForeignKeyConstraint(field: DSQL_FieldSchemaType): string {
|
||||
const fk = field.foreignKey!;
|
||||
let constraint = `FOREIGN KEY ("${field.fieldName}") REFERENCES "${fk.destinationTableName}"("${fk.destinationTableColumnName}")`;
|
||||
|
||||
if (fk.cascadeDelete) {
|
||||
constraint += " ON DELETE CASCADE";
|
||||
}
|
||||
|
||||
if (fk.cascadeUpdate) {
|
||||
constraint += " ON UPDATE CASCADE";
|
||||
}
|
||||
|
||||
return constraint;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sync indexes for a table
|
||||
*/
|
||||
private async syncIndexes(table: DSQL_TableSchemaType): Promise<void> {
|
||||
if (!table.indexes || table.indexes.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Get existing indexes
|
||||
const query = this.db.query(
|
||||
`SELECT name FROM sqlite_master WHERE type='index' AND tbl_name='${table.tableName}' AND name NOT LIKE 'sqlite_%'`,
|
||||
);
|
||||
const existingIndexes = (query.all() as { name: string }[]).map(
|
||||
(r) => r.name,
|
||||
);
|
||||
|
||||
// Drop indexes not in schema
|
||||
for (const indexName of existingIndexes) {
|
||||
const stillExists = table.indexes.some(
|
||||
(idx) => idx.indexName === indexName,
|
||||
);
|
||||
if (!stillExists) {
|
||||
console.log(`Dropping index: ${indexName}`);
|
||||
this.db.run(`DROP INDEX IF EXISTS "${indexName}"`);
|
||||
}
|
||||
}
|
||||
|
||||
// Create new indexes
|
||||
for (const index of table.indexes) {
|
||||
if (
|
||||
!index.indexName ||
|
||||
!index.indexTableFields ||
|
||||
index.indexTableFields.length === 0
|
||||
) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!existingIndexes.includes(index.indexName)) {
|
||||
console.log(`Creating index: ${index.indexName}`);
|
||||
const fields = index.indexTableFields
|
||||
.map((f) => `"${f.value}"`)
|
||||
.join(", ");
|
||||
const unique = index.indexType === "regular" ? "" : ""; // SQLite doesn't have FULLTEXT in CREATE INDEX
|
||||
this.db.run(
|
||||
`CREATE ${unique}INDEX "${index.indexName}" ON "${table.tableName}" (${fields})`,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Close database connection
|
||||
*/
|
||||
close(): void {
|
||||
this.db.close();
|
||||
}
|
||||
}
|
||||
|
||||
// Example usage
|
||||
async function main() {
|
||||
const schema: DSQL_DatabaseSchemaType = {
|
||||
dbName: "example_db",
|
||||
tables: [
|
||||
{
|
||||
tableName: "users",
|
||||
tableDescription: "User accounts",
|
||||
fields: [
|
||||
{
|
||||
fieldName: "id",
|
||||
dataType: "INTEGER",
|
||||
primaryKey: true,
|
||||
autoIncrement: true,
|
||||
},
|
||||
{
|
||||
fieldName: "username",
|
||||
dataType: "TEXT",
|
||||
notNullValue: true,
|
||||
unique: true,
|
||||
},
|
||||
{
|
||||
fieldName: "email",
|
||||
dataType: "TEXT",
|
||||
notNullValue: true,
|
||||
},
|
||||
{
|
||||
fieldName: "created_at",
|
||||
dataType: "TEXT",
|
||||
defaultValueLiteral: "CURRENT_TIMESTAMP",
|
||||
},
|
||||
],
|
||||
indexes: [
|
||||
{
|
||||
indexName: "idx_users_email",
|
||||
indexType: "regular",
|
||||
indexTableFields: [
|
||||
{ value: "email", dataType: "TEXT" },
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
tableName: "posts",
|
||||
fields: [
|
||||
{
|
||||
fieldName: "id",
|
||||
dataType: "INTEGER",
|
||||
primaryKey: true,
|
||||
autoIncrement: true,
|
||||
},
|
||||
{
|
||||
fieldName: "user_id",
|
||||
dataType: "INTEGER",
|
||||
notNullValue: true,
|
||||
foreignKey: {
|
||||
destinationTableName: "users",
|
||||
destinationTableColumnName: "id",
|
||||
cascadeDelete: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
fieldName: "title",
|
||||
dataType: "TEXT",
|
||||
notNullValue: true,
|
||||
},
|
||||
{
|
||||
fieldName: "content",
|
||||
dataType: "TEXT",
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
export { SQLiteSchemaManager };
|
||||
67
src/lib/sqlite/db-schema-to-typedef.ts
Normal file
67
src/lib/sqlite/db-schema-to-typedef.ts
Normal file
@ -0,0 +1,67 @@
|
||||
import type {
|
||||
DSQL_DatabaseSchemaType,
|
||||
DSQL_TableSchemaType,
|
||||
} from "@moduletrace/datasquirel/dist/package-shared/types";
|
||||
import _ from "lodash";
|
||||
import generateTypeDefinition from "./generate-type-definitions";
|
||||
|
||||
type Params = {
|
||||
dbSchema?: DSQL_DatabaseSchemaType;
|
||||
};
|
||||
|
||||
export default function dbSchemaToType(params?: Params): string[] | undefined {
|
||||
let datasquirelSchema = params?.dbSchema;
|
||||
|
||||
if (!datasquirelSchema) return;
|
||||
|
||||
let tableNames = `export const DsqlTables = [\n${datasquirelSchema.tables
|
||||
.map((tbl) => ` "${tbl.tableName}",`)
|
||||
.join("\n")}\n] as const`;
|
||||
|
||||
const dbTablesSchemas = datasquirelSchema.tables;
|
||||
|
||||
const defDbName = datasquirelSchema.dbName
|
||||
?.toUpperCase()
|
||||
.replace(/ |\-/g, "_");
|
||||
|
||||
const defNames: string[] = [];
|
||||
|
||||
const schemas = dbTablesSchemas
|
||||
.map((table) => {
|
||||
let final_table = _.cloneDeep(table);
|
||||
|
||||
if (final_table.parentTableName) {
|
||||
const parent_table = dbTablesSchemas.find(
|
||||
(t) => t.tableName === final_table.parentTableName,
|
||||
);
|
||||
|
||||
if (parent_table) {
|
||||
final_table = _.merge(parent_table, {
|
||||
tableName: final_table.tableName,
|
||||
tableDescription: final_table.tableDescription,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const defObj = generateTypeDefinition({
|
||||
paradigm: "TypeScript",
|
||||
table: final_table,
|
||||
typeDefName: `DSQL_${defDbName}_${final_table.tableName.toUpperCase()}`,
|
||||
allValuesOptional: true,
|
||||
addExport: true,
|
||||
});
|
||||
|
||||
if (defObj.tdName?.match(/./)) {
|
||||
defNames.push(defObj.tdName);
|
||||
}
|
||||
|
||||
return defObj.typeDefinition;
|
||||
})
|
||||
.filter((schm) => typeof schm == "string");
|
||||
|
||||
const allTd = defNames?.[0]
|
||||
? `export type DSQL_${defDbName}_ALL_TYPEDEFS = ${defNames.join(` & `)}`
|
||||
: ``;
|
||||
|
||||
return [tableNames, ...schemas, allTd];
|
||||
}
|
||||
65
src/lib/sqlite/db-select.ts
Normal file
65
src/lib/sqlite/db-select.ts
Normal file
@ -0,0 +1,65 @@
|
||||
import mysql from "mysql";
|
||||
import type { DSQL_TRAVIS_AI_ALL_TYPEDEFS, DsqlTables } from "@/types/db";
|
||||
import datasquirel from "@moduletrace/datasquirel";
|
||||
import type {
|
||||
APIResponseObject,
|
||||
ServerQueryParam,
|
||||
} from "@moduletrace/datasquirel/dist/package-shared/types";
|
||||
import DbClient from ".";
|
||||
import _ from "lodash";
|
||||
|
||||
type Params<
|
||||
T extends DSQL_TRAVIS_AI_ALL_TYPEDEFS = DSQL_TRAVIS_AI_ALL_TYPEDEFS,
|
||||
> = {
|
||||
query?: ServerQueryParam<T>;
|
||||
table: (typeof DsqlTables)[number];
|
||||
count?: boolean;
|
||||
targetId?: number | string;
|
||||
};
|
||||
|
||||
export default async function DbSelect<
|
||||
T extends DSQL_TRAVIS_AI_ALL_TYPEDEFS = DSQL_TRAVIS_AI_ALL_TYPEDEFS,
|
||||
>({ table, query, count, targetId }: Params<T>): Promise<APIResponseObject<T>> {
|
||||
try {
|
||||
let finalQuery = query || {};
|
||||
|
||||
if (targetId) {
|
||||
finalQuery = _.merge<ServerQueryParam<any>, ServerQueryParam<any>>(
|
||||
finalQuery,
|
||||
{
|
||||
query: {
|
||||
id: {
|
||||
value: String(targetId),
|
||||
},
|
||||
},
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
const sqlObj = datasquirel.sql.sqlGenerator({
|
||||
tableName: table,
|
||||
genObject: finalQuery,
|
||||
count,
|
||||
});
|
||||
|
||||
const sql = mysql.format(sqlObj.string, sqlObj.values);
|
||||
|
||||
const res = DbClient.query<T, T[]>(sql);
|
||||
const batchRes = res.all();
|
||||
|
||||
return {
|
||||
success: true,
|
||||
payload: batchRes,
|
||||
singleRes: batchRes[0],
|
||||
debug: {
|
||||
sqlObj,
|
||||
sql,
|
||||
},
|
||||
};
|
||||
} catch (error: any) {
|
||||
return {
|
||||
success: false,
|
||||
error: error.message,
|
||||
};
|
||||
}
|
||||
}
|
||||
100
src/lib/sqlite/db-update.ts
Normal file
100
src/lib/sqlite/db-update.ts
Normal file
@ -0,0 +1,100 @@
|
||||
import mysql from "mysql";
|
||||
import type { DSQL_TRAVIS_AI_ALL_TYPEDEFS, DsqlTables } from "@/types/db";
|
||||
import datasquirel from "@moduletrace/datasquirel";
|
||||
import type {
|
||||
APIResponseObject,
|
||||
ServerQueryParam,
|
||||
} from "@moduletrace/datasquirel/dist/package-shared/types";
|
||||
import DbClient from ".";
|
||||
import _ from "lodash";
|
||||
|
||||
type Params<T extends { [k: string]: any } = DSQL_TRAVIS_AI_ALL_TYPEDEFS> = {
|
||||
table: (typeof DsqlTables)[number];
|
||||
data: T;
|
||||
query?: ServerQueryParam<T>;
|
||||
targetId?: number | string;
|
||||
};
|
||||
|
||||
export default async function DbUpdate<
|
||||
T extends { [k: string]: any } = DSQL_TRAVIS_AI_ALL_TYPEDEFS,
|
||||
>({ table, data, query, targetId }: Params<T>): Promise<APIResponseObject> {
|
||||
try {
|
||||
let finalQuery = query || {};
|
||||
|
||||
if (targetId) {
|
||||
finalQuery = _.merge<ServerQueryParam<any>, ServerQueryParam<any>>(
|
||||
finalQuery,
|
||||
{
|
||||
query: {
|
||||
id: {
|
||||
value: String(targetId),
|
||||
},
|
||||
},
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
const sqlQueryObj = datasquirel.sql.sqlGenerator({
|
||||
tableName: table,
|
||||
genObject: finalQuery,
|
||||
});
|
||||
|
||||
let values: (string | number)[] = [];
|
||||
|
||||
const whereClause = sqlQueryObj.string.match(/WHERE .*/)?.[0];
|
||||
|
||||
if (whereClause) {
|
||||
let sql = `UPDATE ${table} SET`;
|
||||
|
||||
const finalData: DSQL_TRAVIS_AI_ALL_TYPEDEFS = {
|
||||
...data,
|
||||
updated_at: Date.now(),
|
||||
};
|
||||
|
||||
const keys = Object.keys(finalData);
|
||||
|
||||
for (let i = 0; i < keys.length; i++) {
|
||||
const key = keys[i];
|
||||
if (!key) continue;
|
||||
|
||||
const isLast = i == keys.length - 1;
|
||||
|
||||
sql += ` ${key}=?`;
|
||||
values.push(
|
||||
String(finalData[key as keyof DSQL_TRAVIS_AI_ALL_TYPEDEFS]),
|
||||
);
|
||||
|
||||
if (!isLast) {
|
||||
sql += `,`;
|
||||
}
|
||||
}
|
||||
|
||||
sql += ` ${whereClause}`;
|
||||
values = [...values, ...sqlQueryObj.values];
|
||||
|
||||
const res = DbClient.run(sql, values);
|
||||
|
||||
return {
|
||||
success: Boolean(res.changes),
|
||||
postInsertReturn: {
|
||||
affectedRows: res.changes,
|
||||
insertId: Number(res.lastInsertRowid),
|
||||
},
|
||||
debug: {
|
||||
sql,
|
||||
values,
|
||||
},
|
||||
};
|
||||
} else {
|
||||
return {
|
||||
success: false,
|
||||
msg: `No WHERE clause`,
|
||||
};
|
||||
}
|
||||
} catch (error: any) {
|
||||
return {
|
||||
success: false,
|
||||
error: error.message,
|
||||
};
|
||||
}
|
||||
}
|
||||
26
src/lib/sqlite/index.ts
Normal file
26
src/lib/sqlite/index.ts
Normal file
@ -0,0 +1,26 @@
|
||||
import AppData from "@/data/app-data";
|
||||
import grabDirNames from "@/utils/grab-dir-names";
|
||||
import { Database } from "bun:sqlite";
|
||||
import path from "node:path";
|
||||
import * as sqliteVec from "sqlite-vec";
|
||||
|
||||
const { ROOT_DIR } = grabDirNames();
|
||||
|
||||
const DBFilePath = path.join(ROOT_DIR, AppData["DbName"]);
|
||||
const DBVecPluginFilePath = path.join(ROOT_DIR, AppData["DbVecPluginName"]);
|
||||
|
||||
const DbClient = new Database(DBFilePath, {
|
||||
create: true,
|
||||
});
|
||||
|
||||
// DbClient.loadExtension(DBVecPluginFilePath);
|
||||
|
||||
sqliteVec.load(DbClient);
|
||||
|
||||
// Test if it's working
|
||||
// const { vec_version } = DbClient.prepare(
|
||||
// "select vec_version() as vec_version",
|
||||
// ).get();
|
||||
// console.log(`sqlite-vec version: ${vec_version}`);
|
||||
|
||||
export default DbClient;
|
||||
28
src/lib/sqlite/schema-to-typedef.ts
Normal file
28
src/lib/sqlite/schema-to-typedef.ts
Normal file
@ -0,0 +1,28 @@
|
||||
import type { DSQL_DatabaseSchemaType } from "@moduletrace/datasquirel/dist/package-shared/types";
|
||||
import dbSchemaToType from "./db-schema-to-type";
|
||||
import path from "node:path";
|
||||
import { existsSync, mkdirSync, writeFileSync } from "node:fs";
|
||||
|
||||
type Params = {
|
||||
dbSchema: DSQL_DatabaseSchemaType;
|
||||
};
|
||||
|
||||
export default function dbSchemaToTypeDef({ dbSchema }: Params) {
|
||||
try {
|
||||
if (!dbSchema) throw new Error("No schema found");
|
||||
|
||||
const definitions = dbSchemaToType({ dbSchema });
|
||||
|
||||
const finalOutfile = path.resolve(__dirname, "../types/db/index.ts");
|
||||
|
||||
const ourfileDir = path.dirname(finalOutfile);
|
||||
|
||||
if (!existsSync(ourfileDir)) {
|
||||
mkdirSync(ourfileDir, { recursive: true });
|
||||
}
|
||||
|
||||
writeFileSync(finalOutfile, definitions?.join("\n\n") || "", "utf-8");
|
||||
} catch (error: any) {
|
||||
console.log(`Schema to Typedef Error =>`, error.message);
|
||||
}
|
||||
}
|
||||
35
src/lib/sqlite/schema.ts
Normal file
35
src/lib/sqlite/schema.ts
Normal file
@ -0,0 +1,35 @@
|
||||
import type {
|
||||
DSQL_DatabaseSchemaType,
|
||||
DSQL_FieldSchemaType,
|
||||
} from "@moduletrace/datasquirel/dist/package-shared/types";
|
||||
import _ from "lodash";
|
||||
|
||||
const DefaultFields: DSQL_FieldSchemaType[] = [
|
||||
{
|
||||
fieldName: "id",
|
||||
dataType: "INTEGER",
|
||||
primaryKey: true,
|
||||
autoIncrement: true,
|
||||
notNullValue: true,
|
||||
fieldDescription: "The unique identifier of the record.",
|
||||
},
|
||||
{
|
||||
fieldName: "created_at",
|
||||
dataType: "INTEGER",
|
||||
notNullValue: true,
|
||||
fieldDescription:
|
||||
"The time when the record was created. (Unix Timestamp)",
|
||||
},
|
||||
{
|
||||
fieldName: "updated_at",
|
||||
dataType: "INTEGER",
|
||||
notNullValue: true,
|
||||
fieldDescription:
|
||||
"The time when the record was updated. (Unix Timestamp)",
|
||||
},
|
||||
];
|
||||
|
||||
export const DbSchema: DSQL_DatabaseSchemaType = {
|
||||
dbName: "travis-ai",
|
||||
tables: [],
|
||||
};
|
||||
29
tsconfig.json
Normal file
29
tsconfig.json
Normal file
@ -0,0 +1,29 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
// Environment setup & latest features
|
||||
"lib": ["ESNext"],
|
||||
"target": "ESNext",
|
||||
"module": "Preserve",
|
||||
"moduleDetection": "force",
|
||||
"jsx": "react-jsx",
|
||||
"allowJs": true,
|
||||
|
||||
// Bundler mode
|
||||
"moduleResolution": "bundler",
|
||||
"allowImportingTsExtensions": true,
|
||||
"verbatimModuleSyntax": true,
|
||||
"noEmit": true,
|
||||
|
||||
// Best practices
|
||||
"strict": true,
|
||||
"skipLibCheck": true,
|
||||
"noFallthroughCasesInSwitch": true,
|
||||
"noUncheckedIndexedAccess": true,
|
||||
"noImplicitOverride": true,
|
||||
|
||||
// Some stricter flags (disabled by default)
|
||||
"noUnusedLocals": false,
|
||||
"noUnusedParameters": false,
|
||||
"noPropertyAccessFromIndexSignature": false
|
||||
}
|
||||
}
|
||||
Loading…
Reference in New Issue
Block a user