First Commit
This commit is contained in:
commit
df53cdb4e5
35
.gitignore
vendored
Normal file
35
.gitignore
vendored
Normal file
@ -0,0 +1,35 @@
|
||||
# dependencies (bun install)
|
||||
node_modules
|
||||
|
||||
# output
|
||||
out
|
||||
*.tgz
|
||||
|
||||
# code coverage
|
||||
coverage
|
||||
*.lcov
|
||||
|
||||
# logs
|
||||
logs
|
||||
_.log
|
||||
report.[0-9]_.[0-9]_.[0-9]_.[0-9]_.json
|
||||
|
||||
# dotenv environment variable files
|
||||
.env
|
||||
.env.development.local
|
||||
.env.test.local
|
||||
.env.production.local
|
||||
.env.local
|
||||
|
||||
# caches
|
||||
.eslintcache
|
||||
.cache
|
||||
*.tsbuildinfo
|
||||
|
||||
# IntelliJ based IDEs
|
||||
.idea
|
||||
|
||||
# Finder (MacOS) folder config
|
||||
.DS_Store
|
||||
/test
|
||||
.vscode
|
||||
2
.npmrc
Normal file
2
.npmrc
Normal file
@ -0,0 +1,2 @@
|
||||
@moduletrace:registry=https://git.tben.me/api/packages/moduletrace/npm/
|
||||
//git.tben.me/api/packages/moduletrace/npm/:_authToken=${GITBEN_NPM_TOKEN}
|
||||
106
CLAUDE.md
Normal file
106
CLAUDE.md
Normal file
@ -0,0 +1,106 @@
|
||||
|
||||
Default to using Bun instead of Node.js.
|
||||
|
||||
- Use `bun <file>` instead of `node <file>` or `ts-node <file>`
|
||||
- Use `bun test` instead of `jest` or `vitest`
|
||||
- Use `bun build <file.html|file.ts|file.css>` instead of `webpack` or `esbuild`
|
||||
- Use `bun install` instead of `npm install` or `yarn install` or `pnpm install`
|
||||
- Use `bun run <script>` instead of `npm run <script>` or `yarn run <script>` or `pnpm run <script>`
|
||||
- Bun automatically loads .env, so don't use dotenv.
|
||||
|
||||
## APIs
|
||||
|
||||
- `Bun.serve()` supports WebSockets, HTTPS, and routes. Don't use `express`.
|
||||
- `bun:sqlite` for SQLite. Don't use `better-sqlite3`.
|
||||
- `Bun.redis` for Redis. Don't use `ioredis`.
|
||||
- `Bun.sql` for Postgres. Don't use `pg` or `postgres.js`.
|
||||
- `WebSocket` is built-in. Don't use `ws`.
|
||||
- Prefer `Bun.file` over `node:fs`'s readFile/writeFile
|
||||
- Bun.$`ls` instead of execa.
|
||||
|
||||
## Testing
|
||||
|
||||
Use `bun test` to run tests.
|
||||
|
||||
```ts#index.test.ts
|
||||
import { test, expect } from "bun:test";
|
||||
|
||||
test("hello world", () => {
|
||||
expect(1).toBe(1);
|
||||
});
|
||||
```
|
||||
|
||||
## Frontend
|
||||
|
||||
Use HTML imports with `Bun.serve()`. Don't use `vite`. HTML imports fully support React, CSS, Tailwind.
|
||||
|
||||
Server:
|
||||
|
||||
```ts#index.ts
|
||||
import index from "./index.html"
|
||||
|
||||
Bun.serve({
|
||||
routes: {
|
||||
"/": index,
|
||||
"/api/users/:id": {
|
||||
GET: (req) => {
|
||||
return new Response(JSON.stringify({ id: req.params.id }));
|
||||
},
|
||||
},
|
||||
},
|
||||
// optional websocket support
|
||||
websocket: {
|
||||
open: (ws) => {
|
||||
ws.send("Hello, world!");
|
||||
},
|
||||
message: (ws, message) => {
|
||||
ws.send(message);
|
||||
},
|
||||
close: (ws) => {
|
||||
// handle close
|
||||
}
|
||||
},
|
||||
development: {
|
||||
hmr: true,
|
||||
console: true,
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
HTML files can import .tsx, .jsx or .js files directly and Bun's bundler will transpile & bundle automatically. `<link>` tags can point to stylesheets and Bun's CSS bundler will bundle.
|
||||
|
||||
```html#index.html
|
||||
<html>
|
||||
<body>
|
||||
<h1>Hello, world!</h1>
|
||||
<script type="module" src="./frontend.tsx"></script>
|
||||
</body>
|
||||
</html>
|
||||
```
|
||||
|
||||
With the following `frontend.tsx`:
|
||||
|
||||
```tsx#frontend.tsx
|
||||
import React from "react";
|
||||
|
||||
// import .css files directly and it works
|
||||
import './index.css';
|
||||
|
||||
import { createRoot } from "react-dom/client";
|
||||
|
||||
const root = createRoot(document.body);
|
||||
|
||||
export default function Frontend() {
|
||||
return <h1>Hello, world!</h1>;
|
||||
}
|
||||
|
||||
root.render(<Frontend />);
|
||||
```
|
||||
|
||||
Then, run index.ts
|
||||
|
||||
```sh
|
||||
bun --hot ./index.ts
|
||||
```
|
||||
|
||||
For more information, read the Bun API docs in `node_modules/bun-types/docs/**.md`.
|
||||
789
README.md
Normal file
789
README.md
Normal file
@ -0,0 +1,789 @@
|
||||
# Bun SQLite
|
||||
|
||||
@moduletrace/bun-sqlite
|
||||
|
||||
A schema-driven SQLite manager for [Bun](https://bun.sh), featuring automatic schema synchronization, type-safe CRUD operations, vector embedding support (via `sqlite-vec`), and TypeScript type definition generation.
|
||||
|
||||
---
|
||||
|
||||
## Table of Contents
|
||||
|
||||
- [Features](#features)
|
||||
- [Prerequisites](#prerequisites)
|
||||
- [Installation](#installation)
|
||||
- [Quick Start](#quick-start)
|
||||
- [Configuration](#configuration)
|
||||
- [Schema Definition](#schema-definition)
|
||||
- [CLI Commands](#cli-commands)
|
||||
- [`schema`](#schema--sync-database-to-schema)
|
||||
- [`typedef`](#typedef--generate-typescript-types-only)
|
||||
- [`backup`](#backup--back-up-the-database)
|
||||
- [`restore`](#restore--restore-the-database-from-a-backup)
|
||||
- [CRUD API](#crud-api)
|
||||
- [Select](#select)
|
||||
- [Insert](#insert)
|
||||
- [Update](#update)
|
||||
- [Delete](#delete)
|
||||
- [Raw SQL](#raw-sql)
|
||||
- [Query API Reference](#query-api-reference)
|
||||
- [Vector Table Support](#vector-table-support)
|
||||
- [TypeScript Type Generation](#typescript-type-generation)
|
||||
- [Default Fields](#default-fields)
|
||||
- [Project Structure](#project-structure)
|
||||
|
||||
---
|
||||
|
||||
## Features
|
||||
|
||||
- **Schema-first design** — define your database in TypeScript; the library syncs your SQLite file to match
|
||||
- **Automatic migrations** — adds new columns, recreates tables for complex changes, drops removed tables
|
||||
- **Type-safe CRUD** — fully generic `select`, `insert`, `update`, `delete` functions with TypeScript generics
|
||||
- **Rich query DSL** — filtering, ordering, pagination, joins, grouping, full-text search, sub-query counts
|
||||
- **Vector table support** — create and manage `sqlite-vec` virtual tables for AI/ML embeddings
|
||||
- **TypeScript codegen** — generate `.ts` type definitions from your schema automatically
|
||||
- **Zero-config defaults** — `id`, `created_at`, and `updated_at` fields are added to every table automatically
|
||||
|
||||
---
|
||||
|
||||
## Prerequisites
|
||||
|
||||
`@moduletrace/bun-sqlite` is published to a private Gitea npm registry. You must configure your package manager to resolve the `@moduletrace` scope from that registry before installing.
|
||||
|
||||
Add the following to your project's `.npmrc` file (create it at the root of your project if it doesn't exist):
|
||||
|
||||
```ini
|
||||
@moduletrace:registry=https://git.tben.me/api/packages/moduletrace/npm/
|
||||
```
|
||||
|
||||
This works for both `bun` and `npm`.
|
||||
|
||||
---
|
||||
|
||||
## Installation
|
||||
|
||||
```bash
|
||||
bun add @moduletrace/bun-sqlite
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Quick Start
|
||||
|
||||
### 1. Create the config file
|
||||
|
||||
Create `bun-sqlite.config.ts` at your project root:
|
||||
|
||||
```ts
|
||||
import type { BunSQLiteConfig } from "@moduletrace/bun-sqlite";
|
||||
|
||||
const config: BunSQLiteConfig = {
|
||||
db_name: "my-app.db",
|
||||
db_schema_file_name: "schema.ts",
|
||||
db_dir: "./db", // optional: where to store the db file
|
||||
db_backup_dir: ".backups", // optional: name of backups directory. Relative to the db dir.
|
||||
typedef_file_path: "./db/types/db.ts", // optional: where to write generated types
|
||||
};
|
||||
|
||||
export default config;
|
||||
```
|
||||
|
||||
### 2. Define your schema
|
||||
|
||||
Create `./db/schema.ts` (matching `db_schema_file_name` above):
|
||||
|
||||
```ts
|
||||
import type { BUN_SQLITE_DatabaseSchemaType } from "@moduletrace/bun-sqlite";
|
||||
|
||||
const schema: BUN_SQLITE_DatabaseSchemaType = {
|
||||
dbName: "my-app",
|
||||
tables: [
|
||||
{
|
||||
tableName: "users",
|
||||
fields: [
|
||||
{ fieldName: "first_name", dataType: "TEXT" },
|
||||
{ fieldName: "last_name", dataType: "TEXT" },
|
||||
{ fieldName: "email", dataType: "TEXT", unique: true },
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
export default schema;
|
||||
```
|
||||
|
||||
### 3. Sync the schema to SQLite
|
||||
|
||||
```bash
|
||||
bunx bun-sqlite schema
|
||||
```
|
||||
|
||||
This creates the SQLite database file and creates/updates all tables to match your schema.
|
||||
|
||||
### 4. Use the CRUD API
|
||||
|
||||
```ts
|
||||
import BunSQLite from "@moduletrace/bun-sqlite";
|
||||
|
||||
// Insert
|
||||
await BunSQLite.insert({
|
||||
table: "users",
|
||||
data: [{ first_name: "Alice", email: "alice@example.com" }],
|
||||
});
|
||||
|
||||
// Select
|
||||
const result = await BunSQLite.select({ table: "users" });
|
||||
console.log(result.payload); // Alice's row
|
||||
|
||||
// Update
|
||||
await BunSQLite.update({
|
||||
table: "users",
|
||||
targetId: 1,
|
||||
data: { first_name: "Alicia" },
|
||||
});
|
||||
|
||||
// Delete
|
||||
await BunSQLite.delete({ table: "users", targetId: 1 });
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Configuration
|
||||
|
||||
The config file must be named `bun-sqlite.config.ts` and placed at the root of your project.
|
||||
|
||||
| Field | Type | Required | Description |
|
||||
| --------------------- | -------- | -------- | ------------------------------------------------------------------------------------------ |
|
||||
| `db_name` | `string` | Yes | Filename for the SQLite database (e.g. `"app.db"`) |
|
||||
| `db_schema_file_name` | `string` | Yes | Filename of the schema file relative to `db_dir` (or root if `db_dir` is not set) |
|
||||
| `db_backup_dir` | `string` | No | Directory for database backups, relative to `db_dir` |
|
||||
| `db_dir` | `string` | No | Root directory for the database file and schema. Defaults to project root |
|
||||
| `typedef_file_path` | `string` | No | Output path for generated TypeScript types, relative to project root |
|
||||
| `max_backups` | `number` | No | Maximum number of backup files to keep. Oldest are deleted automatically. Defaults to `10` |
|
||||
|
||||
---
|
||||
|
||||
## Schema Definition
|
||||
|
||||
### Database Schema
|
||||
|
||||
```ts
|
||||
interface BUN_SQLITE_DatabaseSchemaType {
|
||||
dbName?: string;
|
||||
tables: BUN_SQLITE_TableSchemaType[];
|
||||
}
|
||||
```
|
||||
|
||||
### Table Schema
|
||||
|
||||
```ts
|
||||
interface BUN_SQLITE_TableSchemaType {
|
||||
tableName: string;
|
||||
tableDescription?: string;
|
||||
fields: BUN_SQLITE_FieldSchemaType[];
|
||||
indexes?: BUN_SQLITE_IndexSchemaType[];
|
||||
uniqueConstraints?: BUN_SQLITE_UniqueConstraintSchemaType[];
|
||||
parentTableName?: string; // inherit fields from another table in the schema
|
||||
tableNameOld?: string; // rename: set this to the old name to trigger ALTER TABLE RENAME
|
||||
isVector?: boolean; // mark this as a sqlite-vec virtual table
|
||||
vectorType?: string; // virtual table type, defaults to "vec0"
|
||||
}
|
||||
```
|
||||
|
||||
### Field Schema
|
||||
|
||||
```ts
|
||||
type BUN_SQLITE_FieldSchemaType = {
|
||||
fieldName?: string;
|
||||
dataType: "TEXT" | "INTEGER";
|
||||
primaryKey?: boolean;
|
||||
autoIncrement?: boolean;
|
||||
notNullValue?: boolean;
|
||||
unique?: boolean;
|
||||
defaultValue?: string | number;
|
||||
defaultValueLiteral?: string; // raw SQL literal, e.g. "CURRENT_TIMESTAMP"
|
||||
foreignKey?: BUN_SQLITE_ForeignKeyType;
|
||||
isVector?: boolean; // vector embedding column
|
||||
vectorSize?: number; // embedding dimensions (default: 1536)
|
||||
sideCar?: boolean; // sqlite-vec "+" prefix for side-car columns
|
||||
updatedField?: boolean; // flag that this field definition has changed
|
||||
};
|
||||
```
|
||||
|
||||
### Foreign Key
|
||||
|
||||
```ts
|
||||
interface BUN_SQLITE_ForeignKeyType {
|
||||
destinationTableName: string;
|
||||
destinationTableColumnName: string;
|
||||
cascadeDelete?: boolean;
|
||||
cascadeUpdate?: boolean;
|
||||
}
|
||||
```
|
||||
|
||||
### Index
|
||||
|
||||
```ts
|
||||
interface BUN_SQLITE_IndexSchemaType {
|
||||
indexName?: string;
|
||||
indexType?: "regular" | "full_text" | "vector";
|
||||
indexTableFields?: { value: string; dataType: string }[];
|
||||
}
|
||||
```
|
||||
|
||||
### Unique Constraint
|
||||
|
||||
```ts
|
||||
interface BUN_SQLITE_UniqueConstraintSchemaType {
|
||||
constraintName?: string;
|
||||
constraintTableFields?: { value: string }[];
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## CLI Commands
|
||||
|
||||
The package provides a `bun-sqlite` CLI binary.
|
||||
|
||||
### `schema` — Sync database to schema
|
||||
|
||||
```bash
|
||||
bunx bun-sqlite schema [options]
|
||||
```
|
||||
|
||||
| Option | Description |
|
||||
| ----------------- | ---------------------------------------------------------- |
|
||||
| `-v`, `--vector` | Drop and recreate all vector (`sqlite-vec`) virtual tables |
|
||||
| `-t`, `--typedef` | Also generate TypeScript type definitions after syncing |
|
||||
|
||||
**Examples:**
|
||||
|
||||
```bash
|
||||
# Sync schema only
|
||||
bunx bun-sqlite schema
|
||||
|
||||
# Sync schema and regenerate types
|
||||
bunx bun-sqlite schema --typedef
|
||||
|
||||
# Sync schema, recreate vector tables, and regenerate types
|
||||
bunx bun-sqlite schema --vector --typedef
|
||||
```
|
||||
|
||||
### `typedef` — Generate TypeScript types only
|
||||
|
||||
```bash
|
||||
bunx bun-sqlite typedef
|
||||
```
|
||||
|
||||
Reads the schema and writes TypeScript type definitions to the path configured in `typedef_file_path`.
|
||||
|
||||
---
|
||||
|
||||
### `backup` — Back up the database
|
||||
|
||||
```bash
|
||||
bunx bun-sqlite backup
|
||||
```
|
||||
|
||||
Copies the current database file into `db_backup_dir` with a timestamped filename. After copying, the oldest backups are automatically pruned so the number of stored backups never exceeds `max_backups` (default: 10).
|
||||
|
||||
**Example:**
|
||||
|
||||
```bash
|
||||
bunx bun-sqlite backup
|
||||
# Backing up database ...
|
||||
# DB Backup Success!
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### `restore` — Restore the database from a backup
|
||||
|
||||
```bash
|
||||
bunx bun-sqlite restore
|
||||
```
|
||||
|
||||
Presents an interactive list of available backups sorted by date (newest first). Select a backup to overwrite the current database file with it.
|
||||
|
||||
**Example:**
|
||||
|
||||
```bash
|
||||
bunx bun-sqlite restore
|
||||
# Restoring up database ...
|
||||
# ? Select a backup: (Use arrow keys)
|
||||
# ❯ Backup #1: Mon Mar 02 2026 14:30:00
|
||||
# Backup #2: Sun Mar 01 2026 09:15:42
|
||||
# DB Restore Success!
|
||||
```
|
||||
|
||||
> If no backups exist, the command exits with an error and a reminder to run `backup` first.
|
||||
|
||||
---
|
||||
|
||||
## CRUD API
|
||||
|
||||
Import the default export:
|
||||
|
||||
```ts
|
||||
import BunSQLite from "@moduletrace/bun-sqlite";
|
||||
```
|
||||
|
||||
All methods return an `APIResponseObject<T>`:
|
||||
|
||||
```ts
|
||||
{
|
||||
success: boolean;
|
||||
payload?: T[]; // array of rows (select)
|
||||
singleRes?: T; // first row (select)
|
||||
count?: number; // total count (when count: true)
|
||||
postInsertReturn?: {
|
||||
affectedRows?: number;
|
||||
insertId?: number;
|
||||
};
|
||||
error?: string;
|
||||
msg?: string;
|
||||
debug?: any;
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### Select
|
||||
|
||||
```ts
|
||||
BunSQLite.select<T>({ table, query?, count?, targetId? })
|
||||
```
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| ---------- | --------------------- | ------------------------------------------------------------ |
|
||||
| `table` | `string` | Table name |
|
||||
| `query` | `ServerQueryParam<T>` | Query/filter options (see [Query API](#query-api-reference)) |
|
||||
| `count` | `boolean` | Return row count instead of rows |
|
||||
| `targetId` | `string \| number` | Shorthand to filter by `id` |
|
||||
|
||||
**Examples:**
|
||||
|
||||
```ts
|
||||
// Get all users
|
||||
const res = await BunSQLite.select({ table: "users" });
|
||||
|
||||
// Get by ID
|
||||
const res = await BunSQLite.select({ table: "users", targetId: 42 });
|
||||
|
||||
// Filter with LIKE
|
||||
const res = await BunSQLite.select<UserType>({
|
||||
table: "users",
|
||||
query: {
|
||||
query: {
|
||||
first_name: { value: "Ali", equality: "LIKE" },
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Count rows
|
||||
const res = await BunSQLite.select({ table: "users", count: true });
|
||||
console.log(res.count);
|
||||
|
||||
// Pagination
|
||||
const res = await BunSQLite.select({
|
||||
table: "users",
|
||||
query: { limit: 10, page: 2 },
|
||||
});
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### Insert
|
||||
|
||||
```ts
|
||||
BunSQLite.insert<T>({ table, data });
|
||||
```
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | -------- | ------------------------------ |
|
||||
| `table` | `string` | Table name |
|
||||
| `data` | `T[]` | Array of row objects to insert |
|
||||
|
||||
`created_at` and `updated_at` are set automatically to `Date.now()`.
|
||||
|
||||
**Example:**
|
||||
|
||||
```ts
|
||||
const res = await BunSQLite.insert({
|
||||
table: "users",
|
||||
data: [
|
||||
{ first_name: "Alice", last_name: "Smith", email: "alice@example.com" },
|
||||
{ first_name: "Bob", last_name: "Jones", email: "bob@example.com" },
|
||||
],
|
||||
});
|
||||
|
||||
console.log(res.postInsertReturn?.insertId); // last inserted row ID
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### Update
|
||||
|
||||
```ts
|
||||
BunSQLite.update<T>({ table, data, query?, targetId? })
|
||||
```
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| ---------- | --------------------- | --------------------------- |
|
||||
| `table` | `string` | Table name |
|
||||
| `data` | `Partial<T>` | Fields to update |
|
||||
| `query` | `ServerQueryParam<T>` | WHERE clause conditions |
|
||||
| `targetId` | `string \| number` | Shorthand to filter by `id` |
|
||||
|
||||
A WHERE clause is required. If no condition matches, `success` is `false`.
|
||||
|
||||
`updated_at` is set automatically to `Date.now()`.
|
||||
|
||||
**Examples:**
|
||||
|
||||
```ts
|
||||
// Update by ID
|
||||
await BunSQLite.update({
|
||||
table: "users",
|
||||
targetId: 1,
|
||||
data: { first_name: "Alicia" },
|
||||
});
|
||||
|
||||
// Update with custom query
|
||||
await BunSQLite.update({
|
||||
table: "users",
|
||||
data: { last_name: "Doe" },
|
||||
query: {
|
||||
query: {
|
||||
email: { value: "alice@example.com", equality: "EQUAL" },
|
||||
},
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### Delete
|
||||
|
||||
```ts
|
||||
BunSQLite.delete<T>({ table, query?, targetId? })
|
||||
```
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| ---------- | --------------------- | --------------------------- |
|
||||
| `table` | `string` | Table name |
|
||||
| `query` | `ServerQueryParam<T>` | WHERE clause conditions |
|
||||
| `targetId` | `string \| number` | Shorthand to filter by `id` |
|
||||
|
||||
A WHERE clause is required. If no condition is provided, `success` is `false`.
|
||||
|
||||
**Examples:**
|
||||
|
||||
```ts
|
||||
// Delete by ID
|
||||
await BunSQLite.delete({ table: "users", targetId: 1 });
|
||||
|
||||
// Delete with condition
|
||||
await BunSQLite.delete({
|
||||
table: "users",
|
||||
query: {
|
||||
query: {
|
||||
first_name: { value: "Ben", equality: "LIKE" },
|
||||
},
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### Raw SQL
|
||||
|
||||
```ts
|
||||
BunSQLite.sql<T>({ sql, values? })
|
||||
```
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---------------------- | -------------------- |
|
||||
| `sql` | `string` | Raw SQL statement |
|
||||
| `values` | `(string \| number)[]` | Parameterized values |
|
||||
|
||||
SELECT statements return rows; all other statements return `postInsertReturn`.
|
||||
|
||||
**Examples:**
|
||||
|
||||
```ts
|
||||
// SELECT
|
||||
const res = await BunSQLite.sql<UserType>({ sql: "SELECT * FROM users" });
|
||||
console.log(res.payload);
|
||||
|
||||
// INSERT with params
|
||||
await BunSQLite.sql({
|
||||
sql: "INSERT INTO users (first_name, email) VALUES (?, ?)",
|
||||
values: ["Charlie", "charlie@example.com"],
|
||||
});
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Query API Reference
|
||||
|
||||
The `query` parameter on `select`, `update`, and `delete` accepts a `ServerQueryParam<T>` object:
|
||||
|
||||
```ts
|
||||
type ServerQueryParam<T> = {
|
||||
query?: { [key in keyof T]: ServerQueryObject };
|
||||
selectFields?: (keyof T | { fieldName: keyof T; alias?: string })[];
|
||||
omitFields?: (keyof T)[];
|
||||
limit?: number;
|
||||
page?: number;
|
||||
offset?: number;
|
||||
order?:
|
||||
| { field: keyof T; strategy: "ASC" | "DESC" }
|
||||
| { field: keyof T; strategy: "ASC" | "DESC" }[];
|
||||
searchOperator?: "AND" | "OR"; // how multiple query fields are joined (default: AND)
|
||||
join?: ServerQueryParamsJoin[];
|
||||
group?:
|
||||
| keyof T
|
||||
| { field: keyof T; table?: string }
|
||||
| (keyof T | { field: keyof T; table?: string })[];
|
||||
countSubQueries?: ServerQueryParamsCount[];
|
||||
fullTextSearch?: {
|
||||
fields: (keyof T)[];
|
||||
searchTerm: string;
|
||||
scoreAlias: string;
|
||||
};
|
||||
};
|
||||
```
|
||||
|
||||
### Equality Operators
|
||||
|
||||
Set `equality` on any query field to control the comparison:
|
||||
|
||||
| Equality | SQL Equivalent |
|
||||
| ----------------------- | ------------------------------------------------------ |
|
||||
| `EQUAL` (default) | `=` |
|
||||
| `NOT EQUAL` | `!=` |
|
||||
| `LIKE` | `LIKE '%value%'` |
|
||||
| `LIKE_RAW` | `LIKE 'value'` (no auto-wrapping) |
|
||||
| `LIKE_LOWER` | `LOWER(field) LIKE '%value%'` |
|
||||
| `NOT LIKE` | `NOT LIKE '%value%'` |
|
||||
| `GREATER THAN` | `>` |
|
||||
| `GREATER THAN OR EQUAL` | `>=` |
|
||||
| `LESS THAN` | `<` |
|
||||
| `LESS THAN OR EQUAL` | `<=` |
|
||||
| `IN` | `IN (val1, val2, ...)` — pass array as value |
|
||||
| `NOT IN` | `NOT IN (...)` |
|
||||
| `BETWEEN` | `BETWEEN val1 AND val2` — pass `[val1, val2]` as value |
|
||||
| `IS NULL` | `IS NULL` |
|
||||
| `IS NOT NULL` | `IS NOT NULL` |
|
||||
| `MATCH` | sqlite-vec vector nearest-neighbor search |
|
||||
|
||||
**Example:**
|
||||
|
||||
```ts
|
||||
// Find users with email NOT NULL, ordered by created_at DESC, limit 20
|
||||
const res = await BunSQLite.select<UserType>({
|
||||
table: "users",
|
||||
query: {
|
||||
query: {
|
||||
email: { equality: "IS NOT NULL" },
|
||||
},
|
||||
order: { field: "created_at", strategy: "DESC" },
|
||||
limit: 20,
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
### JOIN
|
||||
|
||||
```ts
|
||||
const res = await BunSQLite.select({
|
||||
table: "posts",
|
||||
query: {
|
||||
join: [
|
||||
{
|
||||
joinType: "LEFT JOIN",
|
||||
tableName: "users",
|
||||
match: { source: "user_id", target: "id" },
|
||||
selectFields: ["first_name", "email"],
|
||||
},
|
||||
],
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Vector Table Support
|
||||
|
||||
`@moduletrace/bun-sqlite` integrates with [`sqlite-vec`](https://github.com/asg017/sqlite-vec) for storing and querying vector embeddings.
|
||||
|
||||
### Define a vector table in the schema
|
||||
|
||||
```ts
|
||||
{
|
||||
tableName: "documents",
|
||||
isVector: true,
|
||||
vectorType: "vec0", // defaults to "vec0"
|
||||
fields: [
|
||||
{
|
||||
fieldName: "embedding",
|
||||
dataType: "TEXT",
|
||||
isVector: true,
|
||||
vectorSize: 1536, // embedding dimensions
|
||||
},
|
||||
{
|
||||
fieldName: "title",
|
||||
dataType: "TEXT",
|
||||
sideCar: true, // stored as a side-car column (+title) for efficiency
|
||||
},
|
||||
{
|
||||
fieldName: "body",
|
||||
dataType: "TEXT",
|
||||
sideCar: true,
|
||||
},
|
||||
],
|
||||
}
|
||||
```
|
||||
|
||||
> **Side-car columns** (`sideCar: true`) use sqlite-vec's `+column` syntax. They are stored separately from the vector index, keeping the index lean and fast while still being queryable alongside vector results.
|
||||
|
||||
### Sync vector tables
|
||||
|
||||
```bash
|
||||
# Initial sync
|
||||
bunx bun-sqlite schema
|
||||
|
||||
# Recreate vector tables (e.g. after changing vectorSize)
|
||||
bunx bun-sqlite schema --vector
|
||||
```
|
||||
|
||||
### Query vectors
|
||||
|
||||
```ts
|
||||
const res = await BunSQLite.select({
|
||||
table: "documents",
|
||||
query: {
|
||||
query: {
|
||||
embedding: {
|
||||
equality: "MATCH",
|
||||
value: "<serialized-vector>",
|
||||
vector: true,
|
||||
vectorFunction: "vec_f32",
|
||||
},
|
||||
},
|
||||
limit: 5,
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## TypeScript Type Generation
|
||||
|
||||
Run the `typedef` command (or pass `--typedef` to `schema`) to generate a `.ts` file containing:
|
||||
|
||||
- A `const` array of all table names (`BunSQLiteTables`)
|
||||
- A `type` for each table (named `BUN_SQLITE_<DB_NAME>_<TABLE_NAME>`)
|
||||
- A union type `BUN_SQLITE_<DB_NAME>_ALL_TYPEDEFS`
|
||||
|
||||
**Example output** (`db/types/db.ts`):
|
||||
|
||||
```ts
|
||||
export const BunSQLiteTables = ["users"] as const;
|
||||
|
||||
export type BUN_SQLITE_MY_APP_USERS = {
|
||||
/** The unique identifier of the record. */
|
||||
id?: number;
|
||||
/** The time when the record was created. (Unix Timestamp) */
|
||||
created_at?: number;
|
||||
/** The time when the record was updated. (Unix Timestamp) */
|
||||
updated_at?: number;
|
||||
first_name?: string;
|
||||
last_name?: string;
|
||||
email?: string;
|
||||
};
|
||||
|
||||
export type BUN_SQLITE_MY_APP_ALL_TYPEDEFS = BUN_SQLITE_MY_APP_USERS;
|
||||
```
|
||||
|
||||
Use the generated types with the CRUD API for full type safety:
|
||||
|
||||
```ts
|
||||
import BunSQLite from "@moduletrace/bun-sqlite";
|
||||
import { BUN_SQLITE_MY_APP_USERS, BunSQLiteTables } from "./db/types/db";
|
||||
|
||||
const res = await BunSQLite.select<BUN_SQLITE_MY_APP_USERS>({
|
||||
table: "users" as (typeof BunSQLiteTables)[number],
|
||||
});
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Default Fields
|
||||
|
||||
Every table automatically receives the following fields — you do not need to declare them in your schema:
|
||||
|
||||
| Field | Type | Description |
|
||||
| ------------ | -------------------------------------------- | -------------------------------------- |
|
||||
| `id` | `INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL` | Unique row identifier |
|
||||
| `created_at` | `INTEGER` | Unix timestamp set on insert |
|
||||
| `updated_at` | `INTEGER` | Unix timestamp updated on every update |
|
||||
|
||||
---
|
||||
|
||||
## Project Structure
|
||||
|
||||
```
|
||||
bun-sqlite/
|
||||
├── src/
|
||||
│ ├── index.ts # Main export (BunSQLite object)
|
||||
│ ├── commands/
|
||||
│ │ ├── index.ts # CLI entry point
|
||||
│ │ ├── schema.ts # `schema` command
|
||||
│ │ ├── typedef.ts # `typedef` command
|
||||
│ │ ├── backup.ts # `backup` command
|
||||
│ │ └── restore.ts # `restore` command
|
||||
│ ├── functions/
|
||||
│ │ └── init.ts # Config + schema loader
|
||||
│ ├── lib/sqlite/
|
||||
│ │ ├── index.ts # Database client (bun:sqlite + sqlite-vec)
|
||||
│ │ ├── db-schema-manager.ts # Schema synchronization engine
|
||||
│ │ ├── db-select.ts # Select implementation
|
||||
│ │ ├── db-insert.ts # Insert implementation
|
||||
│ │ ├── db-update.ts # Update implementation
|
||||
│ │ ├── db-delete.ts # Delete implementation
|
||||
│ │ ├── db-sql.ts # Raw SQL implementation
|
||||
│ │ ├── db-generate-type-defs.ts # Type def generator
|
||||
│ │ └── schema-to-typedef.ts # Schema-to-TypeScript converter
|
||||
│ ├── types/
|
||||
│ │ └── index.ts # All TypeScript types and interfaces
|
||||
│ └── utils/
|
||||
│ ├── sql-generator.ts # SELECT query builder
|
||||
│ ├── sql-insert-generator.ts # INSERT query builder
|
||||
│ ├── sql-gen-operator-gen.ts # Equality operator mapper
|
||||
│ ├── sql-equality-parser.ts # Equality string parser
|
||||
│ ├── append-default-fields-to-db-schema.ts
|
||||
│ ├── grab-db-dir.ts # Resolve db/backup directory paths
|
||||
│ ├── grab-db-backup-file-name.ts # Generate timestamped backup filename
|
||||
│ ├── grab-sorted-backups.ts # List backups sorted newest-first
|
||||
│ ├── grab-backup-data.ts # Parse metadata from a backup filename
|
||||
│ └── trim-backups.ts # Prune oldest backups over max_backups
|
||||
└── test/
|
||||
└── test-01/ # Example project using the library
|
||||
├── bun-sqlite.config.ts
|
||||
├── db/
|
||||
│ ├── bun-sqlite-schema.ts
|
||||
│ └── types/bun-sqlite.ts # Generated types
|
||||
└── src/
|
||||
├── insert.ts
|
||||
├── select.ts
|
||||
├── delete.ts
|
||||
└── sql.ts
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## License
|
||||
|
||||
MIT
|
||||
2
dist/commands/backup.d.ts
vendored
Normal file
2
dist/commands/backup.d.ts
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
import { Command } from "commander";
|
||||
export default function (): Command;
|
||||
22
dist/commands/backup.js
vendored
Normal file
22
dist/commands/backup.js
vendored
Normal file
@ -0,0 +1,22 @@
|
||||
import { Command } from "commander";
|
||||
import init from "../functions/init";
|
||||
import path from "path";
|
||||
import grabDBDir from "../utils/grab-db-dir";
|
||||
import fs from "fs";
|
||||
import grabDBBackupFileName from "../utils/grab-db-backup-file-name";
|
||||
import chalk from "chalk";
|
||||
import trimBackups from "../utils/trim-backups";
|
||||
export default function () {
|
||||
return new Command("backup")
|
||||
.description("Backup Database")
|
||||
.action(async (opts) => {
|
||||
console.log(`Backing up database ...`);
|
||||
const { config } = await init();
|
||||
const { backup_dir, db_file_path } = grabDBDir({ config });
|
||||
const new_db_file_name = grabDBBackupFileName({ config });
|
||||
fs.cpSync(db_file_path, path.join(backup_dir, new_db_file_name));
|
||||
trimBackups({ config });
|
||||
console.log(`${chalk.bold(chalk.green(`DB Backup Success!`))}`);
|
||||
process.exit();
|
||||
});
|
||||
}
|
||||
6
dist/commands/index.d.ts
vendored
Normal file
6
dist/commands/index.d.ts
vendored
Normal file
@ -0,0 +1,6 @@
|
||||
#!/usr/bin/env bun
|
||||
/**
|
||||
* # Declare Global Variables
|
||||
*/
|
||||
declare global { }
|
||||
export {};
|
||||
31
dist/commands/index.js
vendored
Normal file
31
dist/commands/index.js
vendored
Normal file
@ -0,0 +1,31 @@
|
||||
#!/usr/bin/env bun
|
||||
import { program } from "commander";
|
||||
import schema from "./schema";
|
||||
import typedef from "./typedef";
|
||||
import backup from "./backup";
|
||||
import restore from "./restore";
|
||||
/**
|
||||
* # Describe Program
|
||||
*/
|
||||
program
|
||||
.name(`bun-sqlite`)
|
||||
.description(`SQLite manager for Bun`)
|
||||
.version(`1.0.0`);
|
||||
/**
|
||||
* # Declare Commands
|
||||
*/
|
||||
program.addCommand(schema());
|
||||
program.addCommand(typedef());
|
||||
program.addCommand(backup());
|
||||
program.addCommand(restore());
|
||||
/**
|
||||
* # Handle Unavailable Commands
|
||||
*/
|
||||
program.on("command:*", () => {
|
||||
console.error("Invalid command: %s\nSee --help for a list of available commands.", program.args.join(" "));
|
||||
process.exit(1);
|
||||
});
|
||||
/**
|
||||
* # Parse Arguments
|
||||
*/
|
||||
program.parse(Bun.argv);
|
||||
2
dist/commands/restore.d.ts
vendored
Normal file
2
dist/commands/restore.d.ts
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
import { Command } from "commander";
|
||||
export default function (): Command;
|
||||
44
dist/commands/restore.js
vendored
Normal file
44
dist/commands/restore.js
vendored
Normal file
@ -0,0 +1,44 @@
|
||||
import { Command } from "commander";
|
||||
import init from "../functions/init";
|
||||
import grabDBDir from "../utils/grab-db-dir";
|
||||
import fs from "fs";
|
||||
import chalk from "chalk";
|
||||
import grabSortedBackups from "../utils/grab-sorted-backups";
|
||||
import { select } from "@inquirer/prompts";
|
||||
import grabBackupData from "../utils/grab-backup-data";
|
||||
import path from "path";
|
||||
export default function () {
|
||||
return new Command("restore")
|
||||
.description("Restore Database")
|
||||
.action(async (opts) => {
|
||||
console.log(`Restoring up database ...`);
|
||||
const { config } = await init();
|
||||
const { backup_dir, db_file_path } = grabDBDir({ config });
|
||||
const backups = grabSortedBackups({ config });
|
||||
if (!backups?.[0]) {
|
||||
console.error(`No Backups to restore. Use the \`backup\` command to create a backup`);
|
||||
process.exit(1);
|
||||
}
|
||||
try {
|
||||
const selected_backup = await select({
|
||||
message: "Select a backup:",
|
||||
choices: backups.map((b, i) => {
|
||||
const { backup_date } = grabBackupData({
|
||||
backup_name: b,
|
||||
});
|
||||
return {
|
||||
name: `Backup #${i + 1}: ${backup_date.toDateString()} ${backup_date.getHours()}:${backup_date.getMinutes()}:${backup_date.getSeconds().toString().padStart(2, "0")}`,
|
||||
value: b,
|
||||
};
|
||||
}),
|
||||
});
|
||||
fs.cpSync(path.join(backup_dir, selected_backup), db_file_path);
|
||||
console.log(`${chalk.bold(chalk.green(`DB Restore Success!`))}`);
|
||||
process.exit();
|
||||
}
|
||||
catch (error) {
|
||||
console.error(`Backup Restore ERROR => ${error.message}`);
|
||||
process.exit();
|
||||
}
|
||||
});
|
||||
}
|
||||
2
dist/commands/schema.d.ts
vendored
Normal file
2
dist/commands/schema.d.ts
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
import { Command } from "commander";
|
||||
export default function (): Command;
|
||||
38
dist/commands/schema.js
vendored
Normal file
38
dist/commands/schema.js
vendored
Normal file
@ -0,0 +1,38 @@
|
||||
import { Command } from "commander";
|
||||
import { SQLiteSchemaManager } from "../lib/sqlite/db-schema-manager";
|
||||
import init from "../functions/init";
|
||||
import grabDirNames from "../data/grab-dir-names";
|
||||
import path from "path";
|
||||
import dbSchemaToTypeDef from "../lib/sqlite/schema-to-typedef";
|
||||
import _ from "lodash";
|
||||
import appendDefaultFieldsToDbSchema from "../utils/append-default-fields-to-db-schema";
|
||||
import chalk from "chalk";
|
||||
export default function () {
|
||||
return new Command("schema")
|
||||
.description("Build DB From Schema")
|
||||
.option("-v, --vector", "Recreate Vector Tables. This will drop and rebuild all vector tables")
|
||||
.option("-t, --typedef", "Generate typescript type definitions")
|
||||
.action(async (opts) => {
|
||||
console.log(`Starting process ...`);
|
||||
const { config, dbSchema } = await init();
|
||||
const { ROOT_DIR } = grabDirNames();
|
||||
const isVector = Boolean(opts.vector || opts.v);
|
||||
const isTypeDef = Boolean(opts.typedef || opts.t);
|
||||
const finaldbSchema = appendDefaultFieldsToDbSchema({ dbSchema });
|
||||
const manager = new SQLiteSchemaManager({
|
||||
schema: finaldbSchema,
|
||||
recreate_vector_table: isVector,
|
||||
});
|
||||
await manager.syncSchema();
|
||||
manager.close();
|
||||
if (isTypeDef && config.typedef_file_path) {
|
||||
const out_file = path.resolve(ROOT_DIR, config.typedef_file_path);
|
||||
dbSchemaToTypeDef({
|
||||
dbSchema: finaldbSchema,
|
||||
dst_file: out_file,
|
||||
});
|
||||
}
|
||||
console.log(`${chalk.bold(chalk.green(`DB Schema setup success!`))}`);
|
||||
process.exit();
|
||||
});
|
||||
}
|
||||
2
dist/commands/typedef.d.ts
vendored
Normal file
2
dist/commands/typedef.d.ts
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
import { Command } from "commander";
|
||||
export default function (): Command;
|
||||
30
dist/commands/typedef.js
vendored
Normal file
30
dist/commands/typedef.js
vendored
Normal file
@ -0,0 +1,30 @@
|
||||
import { Command } from "commander";
|
||||
import init from "../functions/init";
|
||||
import dbSchemaToTypeDef from "../lib/sqlite/schema-to-typedef";
|
||||
import path from "path";
|
||||
import grabDirNames from "../data/grab-dir-names";
|
||||
import appendDefaultFieldsToDbSchema from "../utils/append-default-fields-to-db-schema";
|
||||
import chalk from "chalk";
|
||||
export default function () {
|
||||
return new Command("typedef")
|
||||
.description("Build DB From Schema")
|
||||
.action(async (opts) => {
|
||||
console.log(`Creating Type Definition From DB Schema ...`);
|
||||
const { config, dbSchema } = await init();
|
||||
const { ROOT_DIR } = grabDirNames();
|
||||
const finaldbSchema = appendDefaultFieldsToDbSchema({ dbSchema });
|
||||
if (config.typedef_file_path) {
|
||||
const out_file = path.resolve(ROOT_DIR, config.typedef_file_path);
|
||||
dbSchemaToTypeDef({
|
||||
dbSchema: finaldbSchema,
|
||||
dst_file: out_file,
|
||||
});
|
||||
}
|
||||
else {
|
||||
console.error(``);
|
||||
process.exit(1);
|
||||
}
|
||||
console.log(`${chalk.bold(chalk.green(`Typedef gen success!`))}`);
|
||||
process.exit();
|
||||
});
|
||||
}
|
||||
5
dist/data/app-data.d.ts
vendored
Normal file
5
dist/data/app-data.d.ts
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
export declare const AppData: {
|
||||
readonly ConfigFileName: "bun-sqlite.config.ts";
|
||||
readonly MaxBackups: 10;
|
||||
readonly DefaultBackupDirName: ".backups";
|
||||
};
|
||||
5
dist/data/app-data.js
vendored
Normal file
5
dist/data/app-data.js
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
export const AppData = {
|
||||
ConfigFileName: "bun-sqlite.config.ts",
|
||||
MaxBackups: 10,
|
||||
DefaultBackupDirName: ".backups",
|
||||
};
|
||||
3
dist/data/grab-dir-names.d.ts
vendored
Normal file
3
dist/data/grab-dir-names.d.ts
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
export default function grabDirNames(): {
|
||||
ROOT_DIR: string;
|
||||
};
|
||||
7
dist/data/grab-dir-names.js
vendored
Normal file
7
dist/data/grab-dir-names.js
vendored
Normal file
@ -0,0 +1,7 @@
|
||||
import path from "path";
|
||||
export default function grabDirNames() {
|
||||
const ROOT_DIR = process.cwd();
|
||||
return {
|
||||
ROOT_DIR,
|
||||
};
|
||||
}
|
||||
2
dist/functions/init.d.ts
vendored
Normal file
2
dist/functions/init.d.ts
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
import type { BunSQLiteConfigReturn } from "../types";
|
||||
export default function init(): Promise<BunSQLiteConfigReturn>;
|
||||
46
dist/functions/init.js
vendored
Normal file
46
dist/functions/init.js
vendored
Normal file
@ -0,0 +1,46 @@
|
||||
import path from "path";
|
||||
import fs from "fs";
|
||||
import { AppData } from "../data/app-data";
|
||||
import grabDirNames from "../data/grab-dir-names";
|
||||
export default async function init() {
|
||||
try {
|
||||
const { ROOT_DIR } = grabDirNames();
|
||||
const { ConfigFileName } = AppData;
|
||||
const ConfigFilePath = path.join(ROOT_DIR, ConfigFileName);
|
||||
if (!fs.existsSync(ConfigFilePath)) {
|
||||
console.log("ConfigFilePath", ConfigFilePath);
|
||||
console.error(`Please create a \`${ConfigFileName}\` file at the root of your project.`);
|
||||
process.exit(1);
|
||||
}
|
||||
const ConfigImport = await import(ConfigFilePath);
|
||||
const Config = ConfigImport["default"];
|
||||
if (!Config.db_name) {
|
||||
console.error(`\`db_name\` is required in your config`);
|
||||
process.exit(1);
|
||||
}
|
||||
if (!Config.db_schema_file_name) {
|
||||
console.error(`\`db_schema_file_name\` is required in your config`);
|
||||
process.exit(1);
|
||||
}
|
||||
let db_dir = ROOT_DIR;
|
||||
if (Config.db_dir) {
|
||||
db_dir = path.resolve(ROOT_DIR, Config.db_dir);
|
||||
if (!fs.existsSync(Config.db_dir)) {
|
||||
fs.mkdirSync(Config.db_dir, { recursive: true });
|
||||
}
|
||||
}
|
||||
const DBSchemaFilePath = path.join(db_dir, Config.db_schema_file_name);
|
||||
const DbSchemaImport = await import(DBSchemaFilePath);
|
||||
const DbSchema = DbSchemaImport["default"];
|
||||
const backup_dir = Config.db_backup_dir || AppData["DefaultBackupDirName"];
|
||||
const BackupDir = path.resolve(db_dir, backup_dir);
|
||||
if (!fs.existsSync(BackupDir)) {
|
||||
fs.mkdirSync(BackupDir, { recursive: true });
|
||||
}
|
||||
return { config: Config, dbSchema: DbSchema };
|
||||
}
|
||||
catch (error) {
|
||||
console.error(`Initialization ERROR => ` + error.message);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
13
dist/index.d.ts
vendored
Normal file
13
dist/index.d.ts
vendored
Normal file
@ -0,0 +1,13 @@
|
||||
import DbDelete from "./lib/sqlite/db-delete";
|
||||
import DbInsert from "./lib/sqlite/db-insert";
|
||||
import DbSelect from "./lib/sqlite/db-select";
|
||||
import DbSQL from "./lib/sqlite/db-sql";
|
||||
import DbUpdate from "./lib/sqlite/db-update";
|
||||
declare const BunSQLite: {
|
||||
readonly select: typeof DbSelect;
|
||||
readonly insert: typeof DbInsert;
|
||||
readonly update: typeof DbUpdate;
|
||||
readonly delete: typeof DbDelete;
|
||||
readonly sql: typeof DbSQL;
|
||||
};
|
||||
export default BunSQLite;
|
||||
13
dist/index.js
vendored
Normal file
13
dist/index.js
vendored
Normal file
@ -0,0 +1,13 @@
|
||||
import DbDelete from "./lib/sqlite/db-delete";
|
||||
import DbInsert from "./lib/sqlite/db-insert";
|
||||
import DbSelect from "./lib/sqlite/db-select";
|
||||
import DbSQL from "./lib/sqlite/db-sql";
|
||||
import DbUpdate from "./lib/sqlite/db-update";
|
||||
const BunSQLite = {
|
||||
select: DbSelect,
|
||||
insert: DbInsert,
|
||||
update: DbUpdate,
|
||||
delete: DbDelete,
|
||||
sql: DbSQL,
|
||||
};
|
||||
export default BunSQLite;
|
||||
16
dist/lib/sqlite/db-delete.d.ts
vendored
Normal file
16
dist/lib/sqlite/db-delete.d.ts
vendored
Normal file
@ -0,0 +1,16 @@
|
||||
import type { APIResponseObject, ServerQueryParam } from "../../types";
|
||||
type Params<Schema extends {
|
||||
[k: string]: any;
|
||||
} = {
|
||||
[k: string]: any;
|
||||
}, Table extends string = string> = {
|
||||
table: Table;
|
||||
query?: ServerQueryParam<Schema>;
|
||||
targetId?: number | string;
|
||||
};
|
||||
export default function DbDelete<Schema extends {
|
||||
[k: string]: any;
|
||||
} = {
|
||||
[k: string]: any;
|
||||
}, Table extends string = string>({ table, query, targetId, }: Params<Schema, Table>): Promise<APIResponseObject>;
|
||||
export {};
|
||||
49
dist/lib/sqlite/db-delete.js
vendored
Normal file
49
dist/lib/sqlite/db-delete.js
vendored
Normal file
@ -0,0 +1,49 @@
|
||||
import DbClient from ".";
|
||||
import _ from "lodash";
|
||||
import sqlGenerator from "../../utils/sql-generator";
|
||||
export default async function DbDelete({ table, query, targetId, }) {
|
||||
try {
|
||||
let finalQuery = query || {};
|
||||
if (targetId) {
|
||||
finalQuery = _.merge(finalQuery, {
|
||||
query: {
|
||||
id: {
|
||||
value: String(targetId),
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
const sqlQueryObj = sqlGenerator({
|
||||
tableName: table,
|
||||
genObject: finalQuery,
|
||||
});
|
||||
const whereClause = sqlQueryObj.string.match(/WHERE .*/)?.[0];
|
||||
if (whereClause) {
|
||||
let sql = `DELETE FROM ${table} ${whereClause}`;
|
||||
const res = DbClient.run(sql, sqlQueryObj.values);
|
||||
return {
|
||||
success: Boolean(res.changes),
|
||||
postInsertReturn: {
|
||||
affectedRows: res.changes,
|
||||
insertId: Number(res.lastInsertRowid),
|
||||
},
|
||||
debug: {
|
||||
sql,
|
||||
values: sqlQueryObj.values,
|
||||
},
|
||||
};
|
||||
}
|
||||
else {
|
||||
return {
|
||||
success: false,
|
||||
msg: `No WHERE clause`,
|
||||
};
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
return {
|
||||
success: false,
|
||||
error: error.message,
|
||||
};
|
||||
}
|
||||
}
|
||||
15
dist/lib/sqlite/db-generate-type-defs.d.ts
vendored
Normal file
15
dist/lib/sqlite/db-generate-type-defs.d.ts
vendored
Normal file
@ -0,0 +1,15 @@
|
||||
import type { BUN_SQLITE_TableSchemaType } from "../../types";
|
||||
type Param = {
|
||||
paradigm: "JavaScript" | "TypeScript" | undefined;
|
||||
table: BUN_SQLITE_TableSchemaType;
|
||||
query?: any;
|
||||
typeDefName?: string;
|
||||
allValuesOptional?: boolean;
|
||||
addExport?: boolean;
|
||||
dbName?: string;
|
||||
};
|
||||
export default function generateTypeDefinition({ paradigm, table, query, typeDefName, allValuesOptional, addExport, dbName, }: Param): {
|
||||
typeDefinition: string | null;
|
||||
tdName: string;
|
||||
};
|
||||
export {};
|
||||
61
dist/lib/sqlite/db-generate-type-defs.js
vendored
Normal file
61
dist/lib/sqlite/db-generate-type-defs.js
vendored
Normal file
@ -0,0 +1,61 @@
|
||||
export default function generateTypeDefinition({ paradigm, table, query, typeDefName, allValuesOptional, addExport, dbName, }) {
|
||||
let typeDefinition = ``;
|
||||
let tdName = ``;
|
||||
try {
|
||||
tdName = typeDefName
|
||||
? typeDefName
|
||||
: dbName
|
||||
? `BUN_SQLITE_${dbName}_${table.tableName}`.toUpperCase()
|
||||
: `BUN_SQLITE_${query.single}_${query.single_table}`.toUpperCase();
|
||||
const fields = table.fields;
|
||||
function typeMap(schemaType) {
|
||||
if (schemaType.options && schemaType.options.length > 0) {
|
||||
return schemaType.options
|
||||
.map((opt) => schemaType.dataType?.match(/int/i) ||
|
||||
typeof opt == "number"
|
||||
? `${opt}`
|
||||
: `"${opt}"`)
|
||||
.join(" | ");
|
||||
}
|
||||
if (schemaType.dataType?.match(/int|double|decimal/i)) {
|
||||
return "number";
|
||||
}
|
||||
if (schemaType.dataType?.match(/text|varchar|timestamp/i)) {
|
||||
return "string";
|
||||
}
|
||||
if (schemaType.dataType?.match(/boolean/i)) {
|
||||
return "0 | 1";
|
||||
}
|
||||
return "string";
|
||||
}
|
||||
const typesArrayTypeScript = [];
|
||||
const typesArrayJavascript = [];
|
||||
typesArrayTypeScript.push(`${addExport ? "export " : ""}type ${tdName} = {`);
|
||||
typesArrayJavascript.push(`/**\n * @typedef {object} ${tdName}`);
|
||||
fields.forEach((field) => {
|
||||
if (field.fieldDescription) {
|
||||
typesArrayTypeScript.push(` /** \n * ${field.fieldDescription}\n */`);
|
||||
}
|
||||
const nullValue = allValuesOptional
|
||||
? "?"
|
||||
: field.notNullValue
|
||||
? ""
|
||||
: "?";
|
||||
typesArrayTypeScript.push(` ${field.fieldName}${nullValue}: ${typeMap(field)};`);
|
||||
typesArrayJavascript.push(` * @property {${typeMap(field)}${nullValue}} ${field.fieldName}`);
|
||||
});
|
||||
typesArrayTypeScript.push(`}`);
|
||||
typesArrayJavascript.push(` */`);
|
||||
if (paradigm?.match(/javascript/i)) {
|
||||
typeDefinition = typesArrayJavascript.join("\n");
|
||||
}
|
||||
if (paradigm?.match(/typescript/i)) {
|
||||
typeDefinition = typesArrayTypeScript.join("\n");
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
console.log(error.message);
|
||||
typeDefinition = null;
|
||||
}
|
||||
return { typeDefinition, tdName };
|
||||
}
|
||||
15
dist/lib/sqlite/db-insert.d.ts
vendored
Normal file
15
dist/lib/sqlite/db-insert.d.ts
vendored
Normal file
@ -0,0 +1,15 @@
|
||||
import type { APIResponseObject } from "../../types";
|
||||
type Params<Schema extends {
|
||||
[k: string]: any;
|
||||
} = {
|
||||
[k: string]: any;
|
||||
}, Table extends string = string> = {
|
||||
table: Table;
|
||||
data: Schema[];
|
||||
};
|
||||
export default function DbInsert<Schema extends {
|
||||
[k: string]: any;
|
||||
} = {
|
||||
[k: string]: any;
|
||||
}, Table extends string = string>({ table, data }: Params<Schema, Table>): Promise<APIResponseObject>;
|
||||
export {};
|
||||
32
dist/lib/sqlite/db-insert.js
vendored
Normal file
32
dist/lib/sqlite/db-insert.js
vendored
Normal file
@ -0,0 +1,32 @@
|
||||
import DbClient from ".";
|
||||
import sqlInsertGenerator from "../../utils/sql-insert-generator";
|
||||
export default async function DbInsert({ table, data }) {
|
||||
try {
|
||||
const finalData = data.map((d) => ({
|
||||
...d,
|
||||
created_at: Date.now(),
|
||||
updated_at: Date.now(),
|
||||
}));
|
||||
const sqlObj = sqlInsertGenerator({
|
||||
tableName: table,
|
||||
data: finalData,
|
||||
});
|
||||
const res = DbClient.run(sqlObj?.query || "", sqlObj?.values || []);
|
||||
return {
|
||||
success: Boolean(Number(res.lastInsertRowid)),
|
||||
postInsertReturn: {
|
||||
affectedRows: res.changes,
|
||||
insertId: Number(res.lastInsertRowid),
|
||||
},
|
||||
debug: {
|
||||
sqlObj,
|
||||
},
|
||||
};
|
||||
}
|
||||
catch (error) {
|
||||
return {
|
||||
success: false,
|
||||
error: error.message,
|
||||
};
|
||||
}
|
||||
}
|
||||
72
dist/lib/sqlite/db-schema-manager.d.ts
vendored
Normal file
72
dist/lib/sqlite/db-schema-manager.d.ts
vendored
Normal file
@ -0,0 +1,72 @@
|
||||
#!/usr/bin/env bun
|
||||
import type { BUN_SQLITE_DatabaseSchemaType } from "../../types";
|
||||
declare class SQLiteSchemaManager {
|
||||
private db;
|
||||
private db_manager_table_name;
|
||||
private recreate_vector_table;
|
||||
private db_schema;
|
||||
constructor({ schema, recreate_vector_table, }: {
|
||||
schema: BUN_SQLITE_DatabaseSchemaType;
|
||||
recreate_vector_table?: boolean;
|
||||
});
|
||||
private createDbManagerTable;
|
||||
private insertDbManagerTable;
|
||||
private removeDbManagerTable;
|
||||
/**
|
||||
* Main synchronization method
|
||||
*/
|
||||
syncSchema(): Promise<void>;
|
||||
/**
|
||||
* Get list of existing tables in the database
|
||||
*/
|
||||
private getExistingTables;
|
||||
/**
|
||||
* Drop tables that are no longer in the schema
|
||||
*/
|
||||
private dropRemovedTables;
|
||||
/**
|
||||
* Sync a single table (create or update)
|
||||
*/
|
||||
private syncTable;
|
||||
/**
|
||||
* Create a new table
|
||||
*/
|
||||
private createTable;
|
||||
/**
|
||||
* Update an existing table
|
||||
*/
|
||||
private updateTable;
|
||||
/**
|
||||
* Get existing columns for a table
|
||||
*/
|
||||
private getTableColumns;
|
||||
/**
|
||||
* Add a new column to existing table
|
||||
*/
|
||||
private addColumn;
|
||||
/**
|
||||
* Recreate table (for complex schema changes)
|
||||
*/
|
||||
private recreateTable;
|
||||
/**
|
||||
* Build column definition SQL
|
||||
*/
|
||||
private buildColumnDefinition;
|
||||
/**
|
||||
* Map DSQL data types to SQLite types
|
||||
*/
|
||||
private mapDataType;
|
||||
/**
|
||||
* Build foreign key constraint
|
||||
*/
|
||||
private buildForeignKeyConstraint;
|
||||
/**
|
||||
* Sync indexes for a table
|
||||
*/
|
||||
private syncIndexes;
|
||||
/**
|
||||
* Close database connection
|
||||
*/
|
||||
close(): void;
|
||||
}
|
||||
export { SQLiteSchemaManager };
|
||||
456
dist/lib/sqlite/db-schema-manager.js
vendored
Normal file
456
dist/lib/sqlite/db-schema-manager.js
vendored
Normal file
@ -0,0 +1,456 @@
|
||||
#!/usr/bin/env bun
|
||||
import { Database } from "bun:sqlite";
|
||||
import _ from "lodash";
|
||||
import DbClient from ".";
|
||||
// Schema Manager Class
|
||||
class SQLiteSchemaManager {
|
||||
db;
|
||||
db_manager_table_name;
|
||||
recreate_vector_table;
|
||||
db_schema;
|
||||
constructor({ schema, recreate_vector_table = false, }) {
|
||||
this.db = DbClient;
|
||||
this.db_manager_table_name = "__db_schema_manager__";
|
||||
this.db.run("PRAGMA foreign_keys = ON;");
|
||||
this.recreate_vector_table = recreate_vector_table;
|
||||
this.createDbManagerTable();
|
||||
this.db_schema = schema;
|
||||
}
|
||||
createDbManagerTable() {
|
||||
this.db.run(`
|
||||
CREATE TABLE IF NOT EXISTS ${this.db_manager_table_name} (
|
||||
table_name TEXT NOT NULL,
|
||||
created_at INTEGER NOT NULL,
|
||||
updated_at INTEGER NOT NULL
|
||||
)
|
||||
`);
|
||||
}
|
||||
insertDbManagerTable(tableName) {
|
||||
this.db.run(`INSERT INTO ${this.db_manager_table_name} (table_name,created_at,updated_at) VALUES (?, ?, ?)`, [tableName, Date.now(), Date.now()]);
|
||||
}
|
||||
removeDbManagerTable(tableName) {
|
||||
this.db.run(`DELETE FROM ${this.db_manager_table_name} WHERE table_name = ?`, [tableName]);
|
||||
}
|
||||
/**
|
||||
* Main synchronization method
|
||||
*/
|
||||
async syncSchema() {
|
||||
console.log("Starting schema synchronization...");
|
||||
const existingTables = this.getExistingTables();
|
||||
const schemaTables = this.db_schema.tables.map((t) => t.tableName);
|
||||
// 2. Create or update tables
|
||||
for (const table of this.db_schema.tables) {
|
||||
await this.syncTable(table, existingTables);
|
||||
}
|
||||
// 1. Drop tables that no longer exist in schema
|
||||
await this.dropRemovedTables(existingTables, schemaTables);
|
||||
console.log("Schema synchronization complete!");
|
||||
}
|
||||
/**
|
||||
* Get list of existing tables in the database
|
||||
*/
|
||||
getExistingTables() {
|
||||
let sql = `SELECT table_name FROM ${this.db_manager_table_name}`;
|
||||
const query = this.db.query(sql);
|
||||
const results = query.all();
|
||||
return results.map((r) => r.table_name);
|
||||
}
|
||||
/**
|
||||
* Drop tables that are no longer in the schema
|
||||
*/
|
||||
async dropRemovedTables(existingTables, schemaTables) {
|
||||
const tablesToDrop = existingTables.filter((t) => !schemaTables.includes(t) &&
|
||||
!schemaTables.find((scT) => t.startsWith(scT + "_")));
|
||||
for (const tableName of tablesToDrop) {
|
||||
console.log(`Dropping table: ${tableName}`);
|
||||
this.db.run(`DROP TABLE IF EXISTS "${tableName}"`);
|
||||
this.db.run(`DELETE FROM ${this.db_manager_table_name} WHERE table_name = "${tableName}"`);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Sync a single table (create or update)
|
||||
*/
|
||||
async syncTable(table, existingTables) {
|
||||
let tableExists = existingTables.includes(table.tableName);
|
||||
// Handle table rename
|
||||
if (table.tableNameOld && table.tableNameOld !== table.tableName) {
|
||||
if (existingTables.includes(table.tableNameOld)) {
|
||||
console.log(`Renaming table: ${table.tableNameOld} -> ${table.tableName}`);
|
||||
this.db.run(`ALTER TABLE "${table.tableNameOld}" RENAME TO "${table.tableName}"`);
|
||||
this.insertDbManagerTable(table.tableName);
|
||||
this.removeDbManagerTable(table.tableNameOld);
|
||||
tableExists = true;
|
||||
}
|
||||
}
|
||||
if (!tableExists) {
|
||||
// Create new table
|
||||
await this.createTable(table);
|
||||
this.insertDbManagerTable(table.tableName);
|
||||
}
|
||||
else {
|
||||
// Update existing table
|
||||
await this.updateTable(table);
|
||||
}
|
||||
// Sync indexes
|
||||
await this.syncIndexes(table);
|
||||
}
|
||||
/**
|
||||
* Create a new table
|
||||
*/
|
||||
async createTable(table) {
|
||||
console.log(`Creating table: ${table.tableName}`);
|
||||
let new_table = _.cloneDeep(table);
|
||||
if (new_table.parentTableName) {
|
||||
const parent_table = this.db_schema.tables.find((t) => t.tableName === new_table.parentTableName);
|
||||
if (!parent_table) {
|
||||
throw new Error(`Parent table \`${new_table.parentTableName}\` not found for \`${new_table.tableName}\``);
|
||||
}
|
||||
new_table = _.merge(parent_table, {
|
||||
tableName: new_table.tableName,
|
||||
tableDescription: new_table.tableDescription,
|
||||
});
|
||||
}
|
||||
const columns = [];
|
||||
const foreignKeys = [];
|
||||
for (const field of new_table.fields) {
|
||||
const columnDef = this.buildColumnDefinition(field);
|
||||
columns.push(columnDef);
|
||||
if (field.foreignKey) {
|
||||
foreignKeys.push(this.buildForeignKeyConstraint(field));
|
||||
}
|
||||
}
|
||||
// Add unique constraints
|
||||
if (new_table.uniqueConstraints) {
|
||||
for (const constraint of new_table.uniqueConstraints) {
|
||||
if (constraint.constraintTableFields &&
|
||||
constraint.constraintTableFields.length > 0) {
|
||||
const fields = constraint.constraintTableFields
|
||||
.map((f) => `"${f.value}"`)
|
||||
.join(", ");
|
||||
const constraintName = constraint.constraintName ||
|
||||
`unique_${fields.replace(/"/g, "")}`;
|
||||
columns.push(`CONSTRAINT "${constraintName}" UNIQUE (${fields})`);
|
||||
}
|
||||
}
|
||||
}
|
||||
const allConstraints = [...columns, ...foreignKeys];
|
||||
const sql = new_table.isVector
|
||||
? `CREATE VIRTUAL TABLE "${new_table.tableName}" USING ${new_table.vectorType || "vec0"}(${allConstraints.join(", ")})`
|
||||
: `CREATE TABLE "${new_table.tableName}" (${allConstraints.join(", ")})`;
|
||||
this.db.run(sql);
|
||||
}
|
||||
/**
|
||||
* Update an existing table
|
||||
*/
|
||||
async updateTable(table) {
|
||||
console.log(`Updating table: ${table.tableName}`);
|
||||
const existingColumns = this.getTableColumns(table.tableName);
|
||||
const schemaColumns = table.fields.map((f) => f.fieldName || "");
|
||||
// SQLite has limited ALTER TABLE support
|
||||
// We need to use the recreation strategy for complex changes
|
||||
const columnsToAdd = table.fields.filter((f) => f.fieldName &&
|
||||
!existingColumns.find((c) => c.name == f.fieldName && c.type == this.mapDataType(f)));
|
||||
const columnsToRemove = existingColumns.filter((c) => !schemaColumns.includes(c.name));
|
||||
const columnsToUpdate = table.fields.filter((f) => f.fieldName &&
|
||||
f.updatedField &&
|
||||
existingColumns.find((c) => c.name == f.fieldName && c.type == this.mapDataType(f)));
|
||||
// Simple case: only adding columns
|
||||
if (columnsToRemove.length === 0 && columnsToUpdate.length === 0) {
|
||||
for (const field of columnsToAdd) {
|
||||
await this.addColumn(table.tableName, field);
|
||||
}
|
||||
}
|
||||
else {
|
||||
// Complex case: need to recreate table
|
||||
await this.recreateTable(table);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Get existing columns for a table
|
||||
*/
|
||||
getTableColumns(tableName) {
|
||||
const query = this.db.query(`PRAGMA table_info("${tableName}")`);
|
||||
const results = query.all();
|
||||
return results;
|
||||
}
|
||||
/**
|
||||
* Add a new column to existing table
|
||||
*/
|
||||
async addColumn(tableName, field) {
|
||||
console.log(`Adding column: ${tableName}.${field.fieldName}`);
|
||||
const columnDef = this.buildColumnDefinition(field);
|
||||
// Remove PRIMARY KEY and UNIQUE constraints for ALTER TABLE ADD COLUMN
|
||||
const cleanDef = columnDef
|
||||
.replace(/PRIMARY KEY/gi, "")
|
||||
.replace(/AUTOINCREMENT/gi, "")
|
||||
.replace(/UNIQUE/gi, "")
|
||||
.trim();
|
||||
const sql = `ALTER TABLE "${tableName}" ADD COLUMN ${cleanDef}`;
|
||||
this.db.run(sql);
|
||||
}
|
||||
/**
|
||||
* Recreate table (for complex schema changes)
|
||||
*/
|
||||
async recreateTable(table) {
|
||||
if (table.isVector) {
|
||||
if (!this.recreate_vector_table) {
|
||||
return;
|
||||
}
|
||||
console.log(`Recreating vector table: ${table.tableName}`);
|
||||
const existingRows = this.db
|
||||
.query(`SELECT * FROM "${table.tableName}"`)
|
||||
.all();
|
||||
this.db.run(`DROP TABLE "${table.tableName}"`);
|
||||
await this.createTable(table);
|
||||
if (existingRows.length > 0) {
|
||||
for (let i = 0; i < existingRows.length; i++) {
|
||||
const row = existingRows[i];
|
||||
if (!row)
|
||||
continue;
|
||||
const columns = Object.keys(row);
|
||||
const placeholders = columns.map(() => "?").join(", ");
|
||||
this.db.run(`INSERT INTO "${table.tableName}" (${columns.join(", ")}) VALUES (${placeholders})`, Object.values(row));
|
||||
}
|
||||
}
|
||||
return;
|
||||
}
|
||||
const tempTableName = `${table.tableName}_temp_${Date.now()}`;
|
||||
// Get existing data
|
||||
const existingColumns = this.getTableColumns(table.tableName);
|
||||
const columnsToKeep = table.fields
|
||||
.filter((f) => f.fieldName &&
|
||||
existingColumns.find((c) => c.name == f.fieldName &&
|
||||
c.type == this.mapDataType(f)))
|
||||
.map((f) => f.fieldName);
|
||||
// Create temp table with new schema
|
||||
const tempTable = { ...table, tableName: tempTableName };
|
||||
await this.createTable(tempTable);
|
||||
// Copy data if there are common columns
|
||||
if (columnsToKeep.length > 0) {
|
||||
const columnList = columnsToKeep.map((c) => `"${c}"`).join(", ");
|
||||
this.db.run(`INSERT INTO "${tempTableName}" (${columnList}) SELECT ${columnList} FROM "${table.tableName}"`);
|
||||
}
|
||||
// Drop old table
|
||||
this.db.run(`DROP TABLE "${table.tableName}"`);
|
||||
// Rename temp table
|
||||
this.db.run(`ALTER TABLE "${tempTableName}" RENAME TO "${table.tableName}"`);
|
||||
}
|
||||
/**
|
||||
* Build column definition SQL
|
||||
*/
|
||||
buildColumnDefinition(field) {
|
||||
if (!field.fieldName) {
|
||||
throw new Error("Field name is required");
|
||||
}
|
||||
const fieldName = field.sideCar
|
||||
? `+${field.fieldName}`
|
||||
: `${field.fieldName}`;
|
||||
const parts = [fieldName];
|
||||
// Data type mapping
|
||||
const dataType = this.mapDataType(field);
|
||||
parts.push(dataType);
|
||||
// Primary key
|
||||
if (field.primaryKey) {
|
||||
parts.push("PRIMARY KEY");
|
||||
if (field.autoIncrement) {
|
||||
parts.push("AUTOINCREMENT");
|
||||
}
|
||||
}
|
||||
// Not null
|
||||
if (field.notNullValue || field.primaryKey) {
|
||||
if (!field.primaryKey) {
|
||||
parts.push("NOT NULL");
|
||||
}
|
||||
}
|
||||
// Unique
|
||||
if (field.unique && !field.primaryKey) {
|
||||
parts.push("UNIQUE");
|
||||
}
|
||||
// Default value
|
||||
if (field.defaultValue !== undefined) {
|
||||
if (typeof field.defaultValue === "string") {
|
||||
parts.push(
|
||||
// Escape single quotes by doubling them to prevent SQL injection and wrap in single quotes
|
||||
`DEFAULT '${field.defaultValue.replace(/'/g, "''")}'`);
|
||||
}
|
||||
else {
|
||||
parts.push(`DEFAULT ${field.defaultValue}`);
|
||||
}
|
||||
}
|
||||
else if (field.defaultValueLiteral) {
|
||||
parts.push(`DEFAULT ${field.defaultValueLiteral}`);
|
||||
}
|
||||
return parts.join(" ");
|
||||
}
|
||||
/**
|
||||
* Map DSQL data types to SQLite types
|
||||
*/
|
||||
mapDataType(field) {
|
||||
const dataType = field.dataType?.toLowerCase() || "text";
|
||||
const vectorSize = field.vectorSize || 1536;
|
||||
// Vector Embeddings
|
||||
if (field.isVector) {
|
||||
return `FLOAT[${vectorSize}]`;
|
||||
}
|
||||
// Integer types
|
||||
if (dataType.includes("int") ||
|
||||
dataType === "bigint" ||
|
||||
dataType === "smallint" ||
|
||||
dataType === "tinyint") {
|
||||
return "INTEGER";
|
||||
}
|
||||
// Real/Float types
|
||||
if (dataType.includes("real") ||
|
||||
dataType.includes("float") ||
|
||||
dataType.includes("double") ||
|
||||
dataType === "decimal" ||
|
||||
dataType === "numeric") {
|
||||
return "REAL";
|
||||
}
|
||||
// Blob types
|
||||
if (dataType.includes("blob") || dataType.includes("binary")) {
|
||||
return "BLOB";
|
||||
}
|
||||
// Boolean
|
||||
if (dataType === "boolean" || dataType === "bool") {
|
||||
return "INTEGER"; // SQLite uses INTEGER for boolean (0/1)
|
||||
}
|
||||
// Date/Time types
|
||||
if (dataType.includes("date") || dataType.includes("time")) {
|
||||
return "TEXT"; // SQLite stores dates as TEXT or INTEGER
|
||||
}
|
||||
// Default to TEXT for all text-based types
|
||||
return "TEXT";
|
||||
}
|
||||
/**
|
||||
* Build foreign key constraint
|
||||
*/
|
||||
buildForeignKeyConstraint(field) {
|
||||
const fk = field.foreignKey;
|
||||
let constraint = `FOREIGN KEY ("${field.fieldName}") REFERENCES "${fk.destinationTableName}"("${fk.destinationTableColumnName}")`;
|
||||
if (fk.cascadeDelete) {
|
||||
constraint += " ON DELETE CASCADE";
|
||||
}
|
||||
if (fk.cascadeUpdate) {
|
||||
constraint += " ON UPDATE CASCADE";
|
||||
}
|
||||
return constraint;
|
||||
}
|
||||
/**
|
||||
* Sync indexes for a table
|
||||
*/
|
||||
async syncIndexes(table) {
|
||||
if (!table.indexes || table.indexes.length === 0) {
|
||||
return;
|
||||
}
|
||||
// Get existing indexes
|
||||
const query = this.db.query(`SELECT name FROM sqlite_master WHERE type='index' AND tbl_name='${table.tableName}' AND name NOT LIKE 'sqlite_%'`);
|
||||
const existingIndexes = query.all().map((r) => r.name);
|
||||
// Drop indexes not in schema
|
||||
for (const indexName of existingIndexes) {
|
||||
const stillExists = table.indexes.some((idx) => idx.indexName === indexName);
|
||||
if (!stillExists) {
|
||||
console.log(`Dropping index: ${indexName}`);
|
||||
this.db.run(`DROP INDEX IF EXISTS "${indexName}"`);
|
||||
}
|
||||
}
|
||||
// Create new indexes
|
||||
for (const index of table.indexes) {
|
||||
if (!index.indexName ||
|
||||
!index.indexTableFields ||
|
||||
index.indexTableFields.length === 0) {
|
||||
continue;
|
||||
}
|
||||
if (!existingIndexes.includes(index.indexName)) {
|
||||
console.log(`Creating index: ${index.indexName}`);
|
||||
const fields = index.indexTableFields
|
||||
.map((f) => `"${f.value}"`)
|
||||
.join(", ");
|
||||
const unique = index.indexType === "regular" ? "" : ""; // SQLite doesn't have FULLTEXT in CREATE INDEX
|
||||
this.db.run(`CREATE ${unique}INDEX "${index.indexName}" ON "${table.tableName}" (${fields})`);
|
||||
}
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Close database connection
|
||||
*/
|
||||
close() {
|
||||
this.db.close();
|
||||
}
|
||||
}
|
||||
// Example usage
|
||||
async function main() {
|
||||
const schema = {
|
||||
dbName: "example_db",
|
||||
tables: [
|
||||
{
|
||||
tableName: "users",
|
||||
tableDescription: "User accounts",
|
||||
fields: [
|
||||
{
|
||||
fieldName: "id",
|
||||
dataType: "INTEGER",
|
||||
primaryKey: true,
|
||||
autoIncrement: true,
|
||||
},
|
||||
{
|
||||
fieldName: "username",
|
||||
dataType: "TEXT",
|
||||
notNullValue: true,
|
||||
unique: true,
|
||||
},
|
||||
{
|
||||
fieldName: "email",
|
||||
dataType: "TEXT",
|
||||
notNullValue: true,
|
||||
},
|
||||
{
|
||||
fieldName: "created_at",
|
||||
dataType: "TEXT",
|
||||
defaultValueLiteral: "CURRENT_TIMESTAMP",
|
||||
},
|
||||
],
|
||||
indexes: [
|
||||
{
|
||||
indexName: "idx_users_email",
|
||||
indexType: "regular",
|
||||
indexTableFields: [
|
||||
{ value: "email", dataType: "TEXT" },
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
tableName: "posts",
|
||||
fields: [
|
||||
{
|
||||
fieldName: "id",
|
||||
dataType: "INTEGER",
|
||||
primaryKey: true,
|
||||
autoIncrement: true,
|
||||
},
|
||||
{
|
||||
fieldName: "user_id",
|
||||
dataType: "INTEGER",
|
||||
notNullValue: true,
|
||||
foreignKey: {
|
||||
destinationTableName: "users",
|
||||
destinationTableColumnName: "id",
|
||||
cascadeDelete: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
fieldName: "title",
|
||||
dataType: "TEXT",
|
||||
notNullValue: true,
|
||||
},
|
||||
{
|
||||
fieldName: "content",
|
||||
dataType: "TEXT",
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
export { SQLiteSchemaManager };
|
||||
6
dist/lib/sqlite/db-schema-to-typedef.d.ts
vendored
Normal file
6
dist/lib/sqlite/db-schema-to-typedef.d.ts
vendored
Normal file
@ -0,0 +1,6 @@
|
||||
import type { BUN_SQLITE_DatabaseSchemaType } from "../../types";
|
||||
type Params = {
|
||||
dbSchema?: BUN_SQLITE_DatabaseSchemaType;
|
||||
};
|
||||
export default function dbSchemaToType(params?: Params): string[] | undefined;
|
||||
export {};
|
||||
44
dist/lib/sqlite/db-schema-to-typedef.js
vendored
Normal file
44
dist/lib/sqlite/db-schema-to-typedef.js
vendored
Normal file
@ -0,0 +1,44 @@
|
||||
import _ from "lodash";
|
||||
import generateTypeDefinition from "./db-generate-type-defs";
|
||||
export default function dbSchemaToType(params) {
|
||||
let datasquirelSchema = params?.dbSchema;
|
||||
if (!datasquirelSchema)
|
||||
return;
|
||||
let tableNames = `export const BunSQLiteTables = [\n${datasquirelSchema.tables
|
||||
.map((tbl) => ` "${tbl.tableName}",`)
|
||||
.join("\n")}\n] as const`;
|
||||
const dbTablesSchemas = datasquirelSchema.tables;
|
||||
const defDbName = datasquirelSchema.dbName
|
||||
?.toUpperCase()
|
||||
.replace(/ |\-/g, "_");
|
||||
const defNames = [];
|
||||
const schemas = dbTablesSchemas
|
||||
.map((table) => {
|
||||
let final_table = _.cloneDeep(table);
|
||||
if (final_table.parentTableName) {
|
||||
const parent_table = dbTablesSchemas.find((t) => t.tableName === final_table.parentTableName);
|
||||
if (parent_table) {
|
||||
final_table = _.merge(parent_table, {
|
||||
tableName: final_table.tableName,
|
||||
tableDescription: final_table.tableDescription,
|
||||
});
|
||||
}
|
||||
}
|
||||
const defObj = generateTypeDefinition({
|
||||
paradigm: "TypeScript",
|
||||
table: final_table,
|
||||
typeDefName: `BUN_SQLITE_${defDbName}_${final_table.tableName.toUpperCase()}`,
|
||||
allValuesOptional: true,
|
||||
addExport: true,
|
||||
});
|
||||
if (defObj.tdName?.match(/./)) {
|
||||
defNames.push(defObj.tdName);
|
||||
}
|
||||
return defObj.typeDefinition;
|
||||
})
|
||||
.filter((schm) => typeof schm == "string");
|
||||
const allTd = defNames?.[0]
|
||||
? `export type BUN_SQLITE_${defDbName}_ALL_TYPEDEFS = ${defNames.join(` & `)}`
|
||||
: ``;
|
||||
return [tableNames, ...schemas, allTd];
|
||||
}
|
||||
17
dist/lib/sqlite/db-select.d.ts
vendored
Normal file
17
dist/lib/sqlite/db-select.d.ts
vendored
Normal file
@ -0,0 +1,17 @@
|
||||
import type { APIResponseObject, ServerQueryParam } from "../../types";
|
||||
type Params<Schema extends {
|
||||
[k: string]: any;
|
||||
} = {
|
||||
[k: string]: any;
|
||||
}, Table extends string = string> = {
|
||||
query?: ServerQueryParam<Schema>;
|
||||
table: Table;
|
||||
count?: boolean;
|
||||
targetId?: number | string;
|
||||
};
|
||||
export default function DbSelect<Schema extends {
|
||||
[k: string]: any;
|
||||
} = {
|
||||
[k: string]: any;
|
||||
}, Table extends string = string>({ table, query, count, targetId, }: Params<Schema, Table>): Promise<APIResponseObject<Schema>>;
|
||||
export {};
|
||||
48
dist/lib/sqlite/db-select.js
vendored
Normal file
48
dist/lib/sqlite/db-select.js
vendored
Normal file
@ -0,0 +1,48 @@
|
||||
import mysql from "mysql";
|
||||
import DbClient from ".";
|
||||
import _ from "lodash";
|
||||
import sqlGenerator from "../../utils/sql-generator";
|
||||
export default async function DbSelect({ table, query, count, targetId, }) {
|
||||
try {
|
||||
let finalQuery = query || {};
|
||||
if (targetId) {
|
||||
finalQuery = _.merge(finalQuery, {
|
||||
query: {
|
||||
id: {
|
||||
value: String(targetId),
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
const sqlObj = sqlGenerator({
|
||||
tableName: table,
|
||||
genObject: finalQuery,
|
||||
count,
|
||||
});
|
||||
const sql = mysql.format(sqlObj.string, sqlObj.values);
|
||||
const res = DbClient.query(sql);
|
||||
const batchRes = res.all();
|
||||
let resp = {
|
||||
success: Boolean(batchRes[0]),
|
||||
payload: batchRes,
|
||||
singleRes: batchRes[0],
|
||||
debug: {
|
||||
sqlObj,
|
||||
sql,
|
||||
},
|
||||
};
|
||||
if (count) {
|
||||
const count_val = count ? batchRes[0]?.["COUNT(*)"] : undefined;
|
||||
resp["count"] = Number(count_val);
|
||||
delete resp.payload;
|
||||
delete resp.singleRes;
|
||||
}
|
||||
return resp;
|
||||
}
|
||||
catch (error) {
|
||||
return {
|
||||
success: false,
|
||||
error: error.message,
|
||||
};
|
||||
}
|
||||
}
|
||||
11
dist/lib/sqlite/db-sql.d.ts
vendored
Normal file
11
dist/lib/sqlite/db-sql.d.ts
vendored
Normal file
@ -0,0 +1,11 @@
|
||||
import type { APIResponseObject } from "../../types";
|
||||
type Params = {
|
||||
sql: string;
|
||||
values?: (string | number)[];
|
||||
};
|
||||
export default function DbSQL<T extends {
|
||||
[k: string]: any;
|
||||
} = {
|
||||
[k: string]: any;
|
||||
}>({ sql, values }: Params): Promise<APIResponseObject<T>>;
|
||||
export {};
|
||||
33
dist/lib/sqlite/db-sql.js
vendored
Normal file
33
dist/lib/sqlite/db-sql.js
vendored
Normal file
@ -0,0 +1,33 @@
|
||||
import DbClient from ".";
|
||||
import _ from "lodash";
|
||||
export default async function DbSQL({ sql, values }) {
|
||||
try {
|
||||
const res = sql.match(/^select/i)
|
||||
? DbClient.query(sql).all(...(values || []))
|
||||
: DbClient.run(sql, values || []);
|
||||
return {
|
||||
success: true,
|
||||
payload: Array.isArray(res) ? res : undefined,
|
||||
singleRes: Array.isArray(res) ? res?.[0] : undefined,
|
||||
postInsertReturn: Array.isArray(res)
|
||||
? undefined
|
||||
: {
|
||||
affectedRows: res.changes,
|
||||
insertId: Number(res.lastInsertRowid),
|
||||
},
|
||||
debug: {
|
||||
sqlObj: {
|
||||
sql,
|
||||
values,
|
||||
},
|
||||
sql,
|
||||
},
|
||||
};
|
||||
}
|
||||
catch (error) {
|
||||
return {
|
||||
success: false,
|
||||
error: error.message,
|
||||
};
|
||||
}
|
||||
}
|
||||
17
dist/lib/sqlite/db-update.d.ts
vendored
Normal file
17
dist/lib/sqlite/db-update.d.ts
vendored
Normal file
@ -0,0 +1,17 @@
|
||||
import type { APIResponseObject, ServerQueryParam } from "../../types";
|
||||
type Params<Schema extends {
|
||||
[k: string]: any;
|
||||
} = {
|
||||
[k: string]: any;
|
||||
}, Table extends string = string> = {
|
||||
table: Table;
|
||||
data: Schema;
|
||||
query?: ServerQueryParam<Schema>;
|
||||
targetId?: number | string;
|
||||
};
|
||||
export default function DbUpdate<Schema extends {
|
||||
[k: string]: any;
|
||||
} = {
|
||||
[k: string]: any;
|
||||
}, Table extends string = string>({ table, data, query, targetId, }: Params<Schema, Table>): Promise<APIResponseObject>;
|
||||
export {};
|
||||
68
dist/lib/sqlite/db-update.js
vendored
Normal file
68
dist/lib/sqlite/db-update.js
vendored
Normal file
@ -0,0 +1,68 @@
|
||||
import DbClient from ".";
|
||||
import _ from "lodash";
|
||||
import sqlGenerator from "../../utils/sql-generator";
|
||||
export default async function DbUpdate({ table, data, query, targetId, }) {
|
||||
try {
|
||||
let finalQuery = query || {};
|
||||
if (targetId) {
|
||||
finalQuery = _.merge(finalQuery, {
|
||||
query: {
|
||||
id: {
|
||||
value: String(targetId),
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
const sqlQueryObj = sqlGenerator({
|
||||
tableName: table,
|
||||
genObject: finalQuery,
|
||||
});
|
||||
let values = [];
|
||||
const whereClause = sqlQueryObj.string.match(/WHERE .*/)?.[0];
|
||||
if (whereClause) {
|
||||
let sql = `UPDATE ${table} SET`;
|
||||
const finalData = {
|
||||
...data,
|
||||
updated_at: Date.now(),
|
||||
};
|
||||
const keys = Object.keys(finalData);
|
||||
for (let i = 0; i < keys.length; i++) {
|
||||
const key = keys[i];
|
||||
if (!key)
|
||||
continue;
|
||||
const isLast = i == keys.length - 1;
|
||||
sql += ` ${key}=?`;
|
||||
values.push(String(finalData[key]));
|
||||
if (!isLast) {
|
||||
sql += `,`;
|
||||
}
|
||||
}
|
||||
sql += ` ${whereClause}`;
|
||||
values = [...values, ...sqlQueryObj.values];
|
||||
const res = DbClient.run(sql, values);
|
||||
return {
|
||||
success: Boolean(res.changes),
|
||||
postInsertReturn: {
|
||||
affectedRows: res.changes,
|
||||
insertId: Number(res.lastInsertRowid),
|
||||
},
|
||||
debug: {
|
||||
sql,
|
||||
values,
|
||||
},
|
||||
};
|
||||
}
|
||||
else {
|
||||
return {
|
||||
success: false,
|
||||
msg: `No WHERE clause`,
|
||||
};
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
return {
|
||||
success: false,
|
||||
error: error.message,
|
||||
};
|
||||
}
|
||||
}
|
||||
3
dist/lib/sqlite/index.d.ts
vendored
Normal file
3
dist/lib/sqlite/index.d.ts
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
import { Database } from "bun:sqlite";
|
||||
declare const DbClient: Database;
|
||||
export default DbClient;
|
||||
17
dist/lib/sqlite/index.js
vendored
Normal file
17
dist/lib/sqlite/index.js
vendored
Normal file
@ -0,0 +1,17 @@
|
||||
import { Database } from "bun:sqlite";
|
||||
import * as sqliteVec from "sqlite-vec";
|
||||
import grabDirNames from "../../data/grab-dir-names";
|
||||
import init from "../../functions/init";
|
||||
import grabDBDir from "../../utils/grab-db-dir";
|
||||
const { ROOT_DIR } = grabDirNames();
|
||||
const { config } = await init();
|
||||
let db_dir = ROOT_DIR;
|
||||
if (config.db_dir) {
|
||||
db_dir = config.db_dir;
|
||||
}
|
||||
const { db_file_path } = grabDBDir({ config });
|
||||
const DbClient = new Database(db_file_path, {
|
||||
create: true,
|
||||
});
|
||||
sqliteVec.load(DbClient);
|
||||
export default DbClient;
|
||||
7
dist/lib/sqlite/schema-to-typedef.d.ts
vendored
Normal file
7
dist/lib/sqlite/schema-to-typedef.d.ts
vendored
Normal file
@ -0,0 +1,7 @@
|
||||
import type { BUN_SQLITE_DatabaseSchemaType } from "../../types";
|
||||
type Params = {
|
||||
dbSchema: BUN_SQLITE_DatabaseSchemaType;
|
||||
dst_file: string;
|
||||
};
|
||||
export default function dbSchemaToTypeDef({ dbSchema, dst_file }: Params): void;
|
||||
export {};
|
||||
18
dist/lib/sqlite/schema-to-typedef.js
vendored
Normal file
18
dist/lib/sqlite/schema-to-typedef.js
vendored
Normal file
@ -0,0 +1,18 @@
|
||||
import path from "node:path";
|
||||
import { existsSync, mkdirSync, writeFileSync } from "node:fs";
|
||||
import dbSchemaToType from "./db-schema-to-typedef";
|
||||
export default function dbSchemaToTypeDef({ dbSchema, dst_file }) {
|
||||
try {
|
||||
if (!dbSchema)
|
||||
throw new Error("No schema found");
|
||||
const definitions = dbSchemaToType({ dbSchema });
|
||||
const ourfileDir = path.dirname(dst_file);
|
||||
if (!existsSync(ourfileDir)) {
|
||||
mkdirSync(ourfileDir, { recursive: true });
|
||||
}
|
||||
writeFileSync(dst_file, definitions?.join("\n\n") || "", "utf-8");
|
||||
}
|
||||
catch (error) {
|
||||
console.log(`Schema to Typedef Error =>`, error.message);
|
||||
}
|
||||
}
|
||||
2
dist/lib/sqlite/schema.d.ts
vendored
Normal file
2
dist/lib/sqlite/schema.d.ts
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
import type { BUN_SQLITE_DatabaseSchemaType } from "../../types";
|
||||
export declare const DbSchema: BUN_SQLITE_DatabaseSchemaType;
|
||||
5
dist/lib/sqlite/schema.js
vendored
Normal file
5
dist/lib/sqlite/schema.js
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
import _ from "lodash";
|
||||
export const DbSchema = {
|
||||
dbName: "travis-ai",
|
||||
tables: [],
|
||||
};
|
||||
1015
dist/types/index.d.ts
vendored
Normal file
1015
dist/types/index.d.ts
vendored
Normal file
File diff suppressed because it is too large
Load Diff
123
dist/types/index.js
vendored
Normal file
123
dist/types/index.js
vendored
Normal file
@ -0,0 +1,123 @@
|
||||
export const UsersOmitedFields = [
|
||||
"password",
|
||||
"social_id",
|
||||
"verification_status",
|
||||
"date_created",
|
||||
"date_created_code",
|
||||
"date_created_timestamp",
|
||||
"date_updated",
|
||||
"date_updated_code",
|
||||
"date_updated_timestamp",
|
||||
];
|
||||
export const MariaDBCollations = [
|
||||
"utf8mb4_bin",
|
||||
"utf8mb4_unicode_520_ci",
|
||||
];
|
||||
export const TextFieldTypesArray = [
|
||||
{ title: "Plain Text", value: "plain" },
|
||||
{ title: "Rich Text", value: "richText" },
|
||||
{ title: "Markdown", value: "markdown" },
|
||||
{ title: "JSON", value: "json" },
|
||||
{ title: "YAML", value: "yaml" },
|
||||
{ title: "HTML", value: "html" },
|
||||
{ title: "CSS", value: "css" },
|
||||
{ title: "Javascript", value: "javascript" },
|
||||
{ title: "Shell", value: "shell" },
|
||||
{ title: "Code", value: "code" },
|
||||
];
|
||||
export const BUN_SQLITE_DATATYPES = [
|
||||
{ value: "TEXT" },
|
||||
{ value: "INTEGER" },
|
||||
];
|
||||
export const ServerQueryOperators = ["AND", "OR"];
|
||||
export const ServerQueryEqualities = [
|
||||
"EQUAL",
|
||||
"LIKE",
|
||||
"LIKE_RAW",
|
||||
"LIKE_LOWER",
|
||||
"LIKE_LOWER_RAW",
|
||||
"NOT LIKE",
|
||||
"NOT LIKE_RAW",
|
||||
"NOT_LIKE_LOWER",
|
||||
"NOT_LIKE_LOWER_RAW",
|
||||
"NOT EQUAL",
|
||||
"REGEXP",
|
||||
"FULLTEXT",
|
||||
"IN",
|
||||
"NOT IN",
|
||||
"BETWEEN",
|
||||
"NOT BETWEEN",
|
||||
"IS NULL",
|
||||
"IS NOT NULL",
|
||||
"EXISTS",
|
||||
"NOT EXISTS",
|
||||
"GREATER THAN",
|
||||
"GREATER THAN OR EQUAL",
|
||||
"LESS THAN",
|
||||
"LESS THAN OR EQUAL",
|
||||
"MATCH",
|
||||
"MATCH_BOOLEAN",
|
||||
];
|
||||
export const DataCrudRequestMethods = [
|
||||
"GET",
|
||||
"POST",
|
||||
"PUT",
|
||||
"PATCH",
|
||||
"DELETE",
|
||||
"OPTIONS",
|
||||
];
|
||||
export const DataCrudRequestMethodsLowerCase = [
|
||||
"get",
|
||||
"post",
|
||||
"put",
|
||||
"patch",
|
||||
"delete",
|
||||
"options",
|
||||
];
|
||||
export const DsqlCrudActions = ["insert", "update", "delete", "get"];
|
||||
export const QueryFields = [
|
||||
"duplicate",
|
||||
"user_id",
|
||||
"delegated_user_id",
|
||||
"db_id",
|
||||
"table_id",
|
||||
"db_slug",
|
||||
];
|
||||
export const DockerComposeServices = [
|
||||
"setup",
|
||||
"cron",
|
||||
"reverse-proxy",
|
||||
"webapp",
|
||||
"websocket",
|
||||
"static",
|
||||
"db",
|
||||
"maxscale",
|
||||
"post-db-setup",
|
||||
"web-app-post-db-setup",
|
||||
"post-replica-db-setup",
|
||||
"db-replica-1",
|
||||
"db-replica-2",
|
||||
"db-cron",
|
||||
"web-app-post-db-setup",
|
||||
];
|
||||
export const IndexTypes = ["regular", "full_text", "vector"];
|
||||
export const DefaultFields = [
|
||||
{
|
||||
fieldName: "id",
|
||||
dataType: "INTEGER",
|
||||
primaryKey: true,
|
||||
autoIncrement: true,
|
||||
notNullValue: true,
|
||||
fieldDescription: "The unique identifier of the record.",
|
||||
},
|
||||
{
|
||||
fieldName: "created_at",
|
||||
dataType: "INTEGER",
|
||||
fieldDescription: "The time when the record was created. (Unix Timestamp)",
|
||||
},
|
||||
{
|
||||
fieldName: "updated_at",
|
||||
dataType: "INTEGER",
|
||||
fieldDescription: "The time when the record was updated. (Unix Timestamp)",
|
||||
},
|
||||
];
|
||||
6
dist/utils/append-default-fields-to-db-schema.d.ts
vendored
Normal file
6
dist/utils/append-default-fields-to-db-schema.d.ts
vendored
Normal file
@ -0,0 +1,6 @@
|
||||
import { type BUN_SQLITE_DatabaseSchemaType } from "../types";
|
||||
type Params = {
|
||||
dbSchema: BUN_SQLITE_DatabaseSchemaType;
|
||||
};
|
||||
export default function ({ dbSchema }: Params): BUN_SQLITE_DatabaseSchemaType;
|
||||
export {};
|
||||
12
dist/utils/append-default-fields-to-db-schema.js
vendored
Normal file
12
dist/utils/append-default-fields-to-db-schema.js
vendored
Normal file
@ -0,0 +1,12 @@
|
||||
import _ from "lodash";
|
||||
import { DefaultFields } from "../types";
|
||||
export default function ({ dbSchema }) {
|
||||
const finaldbSchema = _.cloneDeep(dbSchema);
|
||||
finaldbSchema.tables = finaldbSchema.tables.map((t) => {
|
||||
const newTable = _.cloneDeep(t);
|
||||
newTable.fields = newTable.fields.filter((f) => !f.fieldName?.match(/^(id|created_at|updated_at)$/));
|
||||
newTable.fields.unshift(...DefaultFields);
|
||||
return newTable;
|
||||
});
|
||||
return finaldbSchema;
|
||||
}
|
||||
9
dist/utils/grab-backup-data.d.ts
vendored
Normal file
9
dist/utils/grab-backup-data.d.ts
vendored
Normal file
@ -0,0 +1,9 @@
|
||||
type Params = {
|
||||
backup_name: string;
|
||||
};
|
||||
export default function grabBackupData({ backup_name }: Params): {
|
||||
backup_date: Date;
|
||||
backup_date_timestamp: number;
|
||||
origin_backup_name: string;
|
||||
};
|
||||
export {};
|
||||
7
dist/utils/grab-backup-data.js
vendored
Normal file
7
dist/utils/grab-backup-data.js
vendored
Normal file
@ -0,0 +1,7 @@
|
||||
export default function grabBackupData({ backup_name }) {
|
||||
const backup_parts = backup_name.split("-");
|
||||
const backup_date_timestamp = Number(backup_parts.pop());
|
||||
const origin_backup_name = backup_parts.join("-");
|
||||
const backup_date = new Date(backup_date_timestamp);
|
||||
return { backup_date, backup_date_timestamp, origin_backup_name };
|
||||
}
|
||||
6
dist/utils/grab-db-backup-file-name.d.ts
vendored
Normal file
6
dist/utils/grab-db-backup-file-name.d.ts
vendored
Normal file
@ -0,0 +1,6 @@
|
||||
import type { BunSQLiteConfig } from "../types";
|
||||
type Params = {
|
||||
config: BunSQLiteConfig;
|
||||
};
|
||||
export default function grabDBBackupFileName({ config }: Params): string;
|
||||
export {};
|
||||
4
dist/utils/grab-db-backup-file-name.js
vendored
Normal file
4
dist/utils/grab-db-backup-file-name.js
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
export default function grabDBBackupFileName({ config }) {
|
||||
const new_db_file_name = `${config.db_name}-${Date.now()}`;
|
||||
return new_db_file_name;
|
||||
}
|
||||
10
dist/utils/grab-db-dir.d.ts
vendored
Normal file
10
dist/utils/grab-db-dir.d.ts
vendored
Normal file
@ -0,0 +1,10 @@
|
||||
import type { BunSQLiteConfig } from "../types";
|
||||
type Params = {
|
||||
config: BunSQLiteConfig;
|
||||
};
|
||||
export default function grabDBDir({ config }: Params): {
|
||||
db_dir: string;
|
||||
backup_dir: string;
|
||||
db_file_path: string;
|
||||
};
|
||||
export {};
|
||||
14
dist/utils/grab-db-dir.js
vendored
Normal file
14
dist/utils/grab-db-dir.js
vendored
Normal file
@ -0,0 +1,14 @@
|
||||
import path from "path";
|
||||
import grabDirNames from "../data/grab-dir-names";
|
||||
import { AppData } from "../data/app-data";
|
||||
export default function grabDBDir({ config }) {
|
||||
const { ROOT_DIR } = grabDirNames();
|
||||
let db_dir = ROOT_DIR;
|
||||
if (config.db_dir) {
|
||||
db_dir = config.db_dir;
|
||||
}
|
||||
const backup_dir_name = config.db_backup_dir || AppData["DefaultBackupDirName"];
|
||||
const backup_dir = path.resolve(db_dir, backup_dir_name);
|
||||
const db_file_path = path.resolve(db_dir, config.db_name);
|
||||
return { db_dir, backup_dir, db_file_path };
|
||||
}
|
||||
6
dist/utils/grab-sorted-backups.d.ts
vendored
Normal file
6
dist/utils/grab-sorted-backups.d.ts
vendored
Normal file
@ -0,0 +1,6 @@
|
||||
import type { BunSQLiteConfig } from "../types";
|
||||
type Params = {
|
||||
config: BunSQLiteConfig;
|
||||
};
|
||||
export default function grabSortedBackups({ config }: Params): string[];
|
||||
export {};
|
||||
18
dist/utils/grab-sorted-backups.js
vendored
Normal file
18
dist/utils/grab-sorted-backups.js
vendored
Normal file
@ -0,0 +1,18 @@
|
||||
import grabDBDir from "../utils/grab-db-dir";
|
||||
import fs from "fs";
|
||||
export default function grabSortedBackups({ config }) {
|
||||
const { backup_dir } = grabDBDir({ config });
|
||||
const backups = fs.readdirSync(backup_dir);
|
||||
/**
|
||||
* Order Backups. Most recent first.
|
||||
*/
|
||||
const ordered_backups = backups.sort((a, b) => {
|
||||
const a_date = Number(a.split("-").pop());
|
||||
const b_date = Number(b.split("-").pop());
|
||||
if (a_date > b_date) {
|
||||
return -1;
|
||||
}
|
||||
return 1;
|
||||
});
|
||||
return ordered_backups;
|
||||
}
|
||||
2
dist/utils/sql-equality-parser.d.ts
vendored
Normal file
2
dist/utils/sql-equality-parser.d.ts
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
import { ServerQueryEqualities } from "../types";
|
||||
export default function sqlEqualityParser(eq: (typeof ServerQueryEqualities)[number]): string;
|
||||
39
dist/utils/sql-equality-parser.js
vendored
Normal file
39
dist/utils/sql-equality-parser.js
vendored
Normal file
@ -0,0 +1,39 @@
|
||||
import { ServerQueryEqualities } from "../types";
|
||||
export default function sqlEqualityParser(eq) {
|
||||
switch (eq) {
|
||||
case "EQUAL":
|
||||
return "=";
|
||||
case "LIKE":
|
||||
return "LIKE";
|
||||
case "NOT LIKE":
|
||||
return "NOT LIKE";
|
||||
case "NOT EQUAL":
|
||||
return "<>";
|
||||
case "IN":
|
||||
return "IN";
|
||||
case "NOT IN":
|
||||
return "NOT IN";
|
||||
case "BETWEEN":
|
||||
return "BETWEEN";
|
||||
case "NOT BETWEEN":
|
||||
return "NOT BETWEEN";
|
||||
case "IS NULL":
|
||||
return "IS NULL";
|
||||
case "IS NOT NULL":
|
||||
return "IS NOT NULL";
|
||||
case "EXISTS":
|
||||
return "EXISTS";
|
||||
case "NOT EXISTS":
|
||||
return "NOT EXISTS";
|
||||
case "GREATER THAN":
|
||||
return ">";
|
||||
case "GREATER THAN OR EQUAL":
|
||||
return ">=";
|
||||
case "LESS THAN":
|
||||
return "<";
|
||||
case "LESS THAN OR EQUAL":
|
||||
return "<=";
|
||||
default:
|
||||
return "=";
|
||||
}
|
||||
}
|
||||
20
dist/utils/sql-gen-operator-gen.d.ts
vendored
Normal file
20
dist/utils/sql-gen-operator-gen.d.ts
vendored
Normal file
@ -0,0 +1,20 @@
|
||||
import type { ServerQueryEqualities, ServerQueryObject } from "../types";
|
||||
type Params = {
|
||||
fieldName: string;
|
||||
value?: string;
|
||||
equality?: (typeof ServerQueryEqualities)[number];
|
||||
queryObj: ServerQueryObject<{
|
||||
[key: string]: any;
|
||||
}, string>;
|
||||
isValueFieldValue?: boolean;
|
||||
};
|
||||
type Return = {
|
||||
str?: string;
|
||||
param?: string;
|
||||
};
|
||||
/**
|
||||
* # SQL Gen Operator Gen
|
||||
* @description Generates an SQL operator for node module `mysql` or `serverless-mysql`
|
||||
*/
|
||||
export default function sqlGenOperatorGen({ fieldName, value, equality, queryObj, isValueFieldValue, }: Params): Return;
|
||||
export {};
|
||||
127
dist/utils/sql-gen-operator-gen.js
vendored
Normal file
127
dist/utils/sql-gen-operator-gen.js
vendored
Normal file
@ -0,0 +1,127 @@
|
||||
import sqlEqualityParser from "./sql-equality-parser";
|
||||
/**
|
||||
* # SQL Gen Operator Gen
|
||||
* @description Generates an SQL operator for node module `mysql` or `serverless-mysql`
|
||||
*/
|
||||
export default function sqlGenOperatorGen({ fieldName, value, equality, queryObj, isValueFieldValue, }) {
|
||||
if (queryObj.nullValue) {
|
||||
return { str: `${fieldName} IS NULL` };
|
||||
}
|
||||
if (queryObj.notNullValue) {
|
||||
return { str: `${fieldName} IS NOT NULL` };
|
||||
}
|
||||
if (value) {
|
||||
const finalValue = isValueFieldValue ? value : "?";
|
||||
const finalParams = isValueFieldValue ? undefined : value;
|
||||
if (equality == "MATCH") {
|
||||
return {
|
||||
str: `MATCH(${fieldName}) AGAINST(${finalValue} IN NATURAL LANGUAGE MODE)`,
|
||||
param: finalParams,
|
||||
};
|
||||
}
|
||||
else if (equality == "MATCH_BOOLEAN") {
|
||||
return {
|
||||
str: `MATCH(${fieldName}) AGAINST(${finalValue} IN BOOLEAN MODE)`,
|
||||
param: finalParams,
|
||||
};
|
||||
}
|
||||
else if (equality == "LIKE_LOWER") {
|
||||
return {
|
||||
str: `LOWER(${fieldName}) LIKE LOWER(${finalValue})`,
|
||||
param: `%${finalParams}%`,
|
||||
};
|
||||
}
|
||||
else if (equality == "LIKE_LOWER_RAW") {
|
||||
return {
|
||||
str: `LOWER(${fieldName}) LIKE LOWER(${finalValue})`,
|
||||
param: finalParams,
|
||||
};
|
||||
}
|
||||
else if (equality == "LIKE") {
|
||||
return {
|
||||
str: `${fieldName} LIKE ${finalValue}`,
|
||||
param: `%${finalParams}%`,
|
||||
};
|
||||
}
|
||||
else if (equality == "LIKE_RAW") {
|
||||
return {
|
||||
str: `${fieldName} LIKE ${finalValue}`,
|
||||
param: finalParams,
|
||||
};
|
||||
}
|
||||
else if (equality == "NOT_LIKE_LOWER") {
|
||||
return {
|
||||
str: `LOWER(${fieldName}) NOT LIKE LOWER(${finalValue})`,
|
||||
param: `%${finalParams}%`,
|
||||
};
|
||||
}
|
||||
else if (equality == "NOT_LIKE_LOWER_RAW") {
|
||||
return {
|
||||
str: `LOWER(${fieldName}) NOT LIKE LOWER(${finalValue})`,
|
||||
param: finalParams,
|
||||
};
|
||||
}
|
||||
else if (equality == "NOT LIKE") {
|
||||
return {
|
||||
str: `${fieldName} NOT LIKE ${finalValue}`,
|
||||
param: finalParams,
|
||||
};
|
||||
}
|
||||
else if (equality == "NOT LIKE_RAW") {
|
||||
return {
|
||||
str: `${fieldName} NOT LIKE ${finalValue}`,
|
||||
param: finalParams,
|
||||
};
|
||||
}
|
||||
else if (equality == "REGEXP") {
|
||||
return {
|
||||
str: `LOWER(${fieldName}) REGEXP LOWER(${finalValue})`,
|
||||
param: finalParams,
|
||||
};
|
||||
}
|
||||
else if (equality == "FULLTEXT") {
|
||||
return {
|
||||
str: `MATCH(${fieldName}) AGAINST(${finalValue} IN BOOLEAN MODE)`,
|
||||
param: finalParams,
|
||||
};
|
||||
}
|
||||
else if (equality == "NOT EQUAL") {
|
||||
return {
|
||||
str: `${fieldName} != ${finalValue}`,
|
||||
param: finalParams,
|
||||
};
|
||||
}
|
||||
else if (equality) {
|
||||
return {
|
||||
str: `${fieldName} ${sqlEqualityParser(equality)} ${finalValue}`,
|
||||
param: finalParams,
|
||||
};
|
||||
}
|
||||
else {
|
||||
return {
|
||||
str: `${fieldName} = ${finalValue}`,
|
||||
param: finalParams,
|
||||
};
|
||||
}
|
||||
}
|
||||
else {
|
||||
if (equality == "IS NULL") {
|
||||
return { str: `${fieldName} IS NULL` };
|
||||
}
|
||||
else if (equality == "IS NOT NULL") {
|
||||
return { str: `${fieldName} IS NOT NULL` };
|
||||
}
|
||||
else if (equality) {
|
||||
return {
|
||||
str: `${fieldName} ${sqlEqualityParser(equality)} ?`,
|
||||
param: value,
|
||||
};
|
||||
}
|
||||
else {
|
||||
return {
|
||||
str: `${fieldName} = ?`,
|
||||
param: value,
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
25
dist/utils/sql-generator.d.ts
vendored
Normal file
25
dist/utils/sql-generator.d.ts
vendored
Normal file
@ -0,0 +1,25 @@
|
||||
import type { ServerQueryParam } from "../types";
|
||||
type Param<T extends {
|
||||
[key: string]: any;
|
||||
} = {
|
||||
[key: string]: any;
|
||||
}> = {
|
||||
genObject?: ServerQueryParam<T>;
|
||||
tableName: string;
|
||||
dbFullName?: string;
|
||||
count?: boolean;
|
||||
};
|
||||
type Return = {
|
||||
string: string;
|
||||
values: (string | number)[];
|
||||
};
|
||||
/**
|
||||
* # SQL Query Generator
|
||||
* @description Generates an SQL Query for node module `mysql` or `serverless-mysql`
|
||||
*/
|
||||
export default function sqlGenerator<T extends {
|
||||
[key: string]: any;
|
||||
} = {
|
||||
[key: string]: any;
|
||||
}>({ tableName, genObject, dbFullName, count }: Param<T>): Return;
|
||||
export {};
|
||||
392
dist/utils/sql-generator.js
vendored
Normal file
392
dist/utils/sql-generator.js
vendored
Normal file
@ -0,0 +1,392 @@
|
||||
import { isUndefined } from "lodash";
|
||||
import sqlGenOperatorGen from "./sql-gen-operator-gen";
|
||||
/**
|
||||
* # SQL Query Generator
|
||||
* @description Generates an SQL Query for node module `mysql` or `serverless-mysql`
|
||||
*/
|
||||
export default function sqlGenerator({ tableName, genObject, dbFullName, count }) {
|
||||
const finalQuery = genObject?.query ? genObject.query : undefined;
|
||||
const queryKeys = finalQuery ? Object.keys(finalQuery) : undefined;
|
||||
const sqlSearhValues = [];
|
||||
const finalDbName = dbFullName ? `${dbFullName}.` : "";
|
||||
/**
|
||||
* # Generate Query
|
||||
*/
|
||||
function genSqlSrchStr({ queryObj, join, field, }) {
|
||||
const finalFieldName = (() => {
|
||||
if (queryObj?.tableName) {
|
||||
return `${finalDbName}${queryObj.tableName}.${field}`;
|
||||
}
|
||||
if (join) {
|
||||
return `${finalDbName}${tableName}.${field}`;
|
||||
}
|
||||
return field;
|
||||
})();
|
||||
let str = `${finalFieldName}=?`;
|
||||
function grabValue(val) {
|
||||
const valueParsed = val;
|
||||
if (!valueParsed)
|
||||
return;
|
||||
const valueString = typeof valueParsed == "string"
|
||||
? valueParsed
|
||||
: valueParsed
|
||||
? valueParsed.fieldName && valueParsed.tableName
|
||||
? `${valueParsed.tableName}.${valueParsed.fieldName}`
|
||||
: valueParsed.value?.toString()
|
||||
: undefined;
|
||||
const valueEquality = typeof valueParsed == "object"
|
||||
? valueParsed.equality || queryObj.equality
|
||||
: queryObj.equality;
|
||||
const operatorStrParam = sqlGenOperatorGen({
|
||||
queryObj,
|
||||
equality: valueEquality,
|
||||
fieldName: finalFieldName || "",
|
||||
value: valueString?.toString() || "",
|
||||
isValueFieldValue: Boolean(typeof valueParsed == "object" &&
|
||||
valueParsed.fieldName &&
|
||||
valueParsed.tableName),
|
||||
});
|
||||
return operatorStrParam;
|
||||
}
|
||||
if (Array.isArray(queryObj.value)) {
|
||||
const strArray = [];
|
||||
queryObj.value.forEach((val) => {
|
||||
const operatorStrParam = grabValue(val);
|
||||
if (!operatorStrParam)
|
||||
return;
|
||||
if (operatorStrParam.str && operatorStrParam.param) {
|
||||
strArray.push(operatorStrParam.str);
|
||||
sqlSearhValues.push(operatorStrParam.param);
|
||||
}
|
||||
else if (operatorStrParam.str) {
|
||||
strArray.push(operatorStrParam.str);
|
||||
}
|
||||
});
|
||||
str = "(" + strArray.join(` ${queryObj.operator || "AND"} `) + ")";
|
||||
}
|
||||
else if (typeof queryObj.value == "object") {
|
||||
const operatorStrParam = grabValue(queryObj.value);
|
||||
if (operatorStrParam?.str) {
|
||||
str = operatorStrParam.str;
|
||||
if (operatorStrParam.param) {
|
||||
sqlSearhValues.push(operatorStrParam.param);
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
const valueParsed = queryObj.value
|
||||
? String(queryObj.value)
|
||||
: undefined;
|
||||
const operatorStrParam = sqlGenOperatorGen({
|
||||
equality: queryObj.equality,
|
||||
fieldName: finalFieldName || "",
|
||||
value: valueParsed,
|
||||
queryObj,
|
||||
});
|
||||
if (operatorStrParam.str && operatorStrParam.param) {
|
||||
str = operatorStrParam.str;
|
||||
sqlSearhValues.push(operatorStrParam.param);
|
||||
}
|
||||
else if (operatorStrParam.str) {
|
||||
str = operatorStrParam.str;
|
||||
}
|
||||
}
|
||||
return str;
|
||||
}
|
||||
function generateJoinStr(mtch, join) {
|
||||
if (mtch.__batch) {
|
||||
let btch_mtch = ``;
|
||||
btch_mtch += `(`;
|
||||
for (let i = 0; i < mtch.__batch.matches.length; i++) {
|
||||
const __mtch = mtch.__batch.matches[i];
|
||||
btch_mtch += `${generateJoinStr(__mtch, join)}`;
|
||||
if (i < mtch.__batch.matches.length - 1) {
|
||||
btch_mtch += ` ${mtch.__batch.operator || "OR"} `;
|
||||
}
|
||||
}
|
||||
btch_mtch += `)`;
|
||||
return btch_mtch;
|
||||
}
|
||||
return `${finalDbName}${typeof mtch.source == "object" ? mtch.source.tableName : tableName}.${typeof mtch.source == "object" ? mtch.source.fieldName : mtch.source}=${(() => {
|
||||
if (mtch.targetLiteral) {
|
||||
if (typeof mtch.targetLiteral == "number") {
|
||||
return `${mtch.targetLiteral}`;
|
||||
}
|
||||
return `'${mtch.targetLiteral}'`;
|
||||
}
|
||||
if (join.alias) {
|
||||
return `${finalDbName}${typeof mtch.target == "object"
|
||||
? mtch.target.tableName
|
||||
: join.alias}.${typeof mtch.target == "object"
|
||||
? mtch.target.fieldName
|
||||
: mtch.target}`;
|
||||
}
|
||||
return `${finalDbName}${typeof mtch.target == "object"
|
||||
? mtch.target.tableName
|
||||
: join.tableName}.${typeof mtch.target == "object"
|
||||
? mtch.target.fieldName
|
||||
: mtch.target}`;
|
||||
})()}`;
|
||||
}
|
||||
let fullTextMatchStr = genObject?.fullTextSearch
|
||||
? ` MATCH(${genObject.fullTextSearch.fields
|
||||
.map((f) => genObject.join ? `${tableName}.${String(f)}` : `${String(f)}`)
|
||||
.join(",")}) AGAINST (? IN BOOLEAN MODE)`
|
||||
: undefined;
|
||||
const fullTextSearchStr = genObject?.fullTextSearch
|
||||
? genObject.fullTextSearch.searchTerm
|
||||
.split(` `)
|
||||
.map((t) => `${t}`)
|
||||
.join(" ")
|
||||
: undefined;
|
||||
let queryString = (() => {
|
||||
let str = "SELECT";
|
||||
if (count) {
|
||||
str += ` COUNT(*)`;
|
||||
}
|
||||
else if (genObject?.selectFields?.[0]) {
|
||||
if (genObject.join) {
|
||||
str += ` ${genObject.selectFields
|
||||
?.map((fld) => typeof fld == "object"
|
||||
? `${finalDbName}${tableName}.${fld.fieldName.toString()}` +
|
||||
(fld.alias ? ` as ${fld.alias}` : ``)
|
||||
: `${finalDbName}${tableName}.${String(fld)}`)
|
||||
.join(",")}`;
|
||||
}
|
||||
else {
|
||||
str += ` ${genObject.selectFields
|
||||
?.map((fld) => typeof fld == "object"
|
||||
? `${fld.fieldName.toString()}` +
|
||||
(fld.alias ? ` as ${fld.alias}` : ``)
|
||||
: fld)
|
||||
.join(",")}`;
|
||||
}
|
||||
}
|
||||
else {
|
||||
if (genObject?.join) {
|
||||
str += ` ${finalDbName}${tableName}.*`;
|
||||
}
|
||||
else {
|
||||
str += " *";
|
||||
}
|
||||
}
|
||||
if (genObject?.countSubQueries) {
|
||||
let countSqls = [];
|
||||
for (let i = 0; i < genObject.countSubQueries.length; i++) {
|
||||
const countSubQuery = genObject.countSubQueries[i];
|
||||
if (!countSubQuery)
|
||||
continue;
|
||||
const tableAlias = countSubQuery.table_alias;
|
||||
let subQStr = `(SELECT COUNT(*)`;
|
||||
subQStr += ` FROM ${countSubQuery.table}${tableAlias ? ` ${tableAlias}` : ""}`;
|
||||
subQStr += ` WHERE (`;
|
||||
for (let j = 0; j < countSubQuery.srcTrgMap.length; j++) {
|
||||
const csqSrc = countSubQuery.srcTrgMap[j];
|
||||
if (!csqSrc)
|
||||
continue;
|
||||
subQStr += ` ${tableAlias || countSubQuery.table}.${csqSrc.src}`;
|
||||
if (typeof csqSrc.trg == "string") {
|
||||
subQStr += ` = ?`;
|
||||
sqlSearhValues.push(csqSrc.trg);
|
||||
}
|
||||
else if (typeof csqSrc.trg == "object") {
|
||||
subQStr += ` = ${csqSrc.trg.table}.${csqSrc.trg.field}`;
|
||||
}
|
||||
if (j < countSubQuery.srcTrgMap.length - 1) {
|
||||
subQStr += ` AND `;
|
||||
}
|
||||
}
|
||||
subQStr += ` )) AS ${countSubQuery.alias}`;
|
||||
countSqls.push(subQStr);
|
||||
}
|
||||
str += `, ${countSqls.join(",")}`;
|
||||
}
|
||||
if (genObject?.join && !count) {
|
||||
const existingJoinTableNames = [tableName];
|
||||
str +=
|
||||
"," +
|
||||
genObject.join
|
||||
.flat()
|
||||
.filter((j) => !isUndefined(j))
|
||||
.map((joinObj) => {
|
||||
const joinTableName = joinObj.alias
|
||||
? joinObj.alias
|
||||
: joinObj.tableName;
|
||||
if (existingJoinTableNames.includes(joinTableName))
|
||||
return null;
|
||||
existingJoinTableNames.push(joinTableName);
|
||||
if (joinObj.selectFields) {
|
||||
return joinObj.selectFields
|
||||
.map((selectField) => {
|
||||
if (typeof selectField == "string") {
|
||||
return `${finalDbName}${joinTableName}.${selectField}`;
|
||||
}
|
||||
else if (typeof selectField == "object") {
|
||||
let aliasSelectField = selectField.count
|
||||
? `COUNT(${finalDbName}${joinTableName}.${selectField.field})`
|
||||
: `${finalDbName}${joinTableName}.${selectField.field}`;
|
||||
if (selectField.alias)
|
||||
aliasSelectField += ` AS ${selectField.alias}`;
|
||||
return aliasSelectField;
|
||||
}
|
||||
})
|
||||
.join(",");
|
||||
}
|
||||
else {
|
||||
return `${finalDbName}${joinTableName}.*`;
|
||||
}
|
||||
})
|
||||
.filter((_) => Boolean(_))
|
||||
.join(",");
|
||||
}
|
||||
if (genObject?.fullTextSearch &&
|
||||
fullTextMatchStr &&
|
||||
fullTextSearchStr) {
|
||||
str += `, ${fullTextMatchStr} AS ${genObject.fullTextSearch.scoreAlias}`;
|
||||
sqlSearhValues.push(fullTextSearchStr);
|
||||
}
|
||||
str += ` FROM ${finalDbName}${tableName}`;
|
||||
if (genObject?.join) {
|
||||
str +=
|
||||
" " +
|
||||
genObject.join
|
||||
.flat()
|
||||
.filter((j) => !isUndefined(j))
|
||||
.map((join) => {
|
||||
return (join.joinType +
|
||||
" " +
|
||||
(join.alias
|
||||
? `${finalDbName}${join.tableName}` +
|
||||
" " +
|
||||
join.alias
|
||||
: `${finalDbName}${join.tableName}`) +
|
||||
" ON " +
|
||||
(() => {
|
||||
if (Array.isArray(join.match)) {
|
||||
return ("(" +
|
||||
join.match
|
||||
.map((mtch) => generateJoinStr(mtch, join))
|
||||
.join(join.operator
|
||||
? ` ${join.operator} `
|
||||
: " AND ") +
|
||||
")");
|
||||
}
|
||||
else if (typeof join.match == "object") {
|
||||
return generateJoinStr(join.match, join);
|
||||
}
|
||||
})());
|
||||
})
|
||||
.join(" ");
|
||||
}
|
||||
return str;
|
||||
})();
|
||||
const sqlSearhString = queryKeys?.map((field) => {
|
||||
const queryObj = finalQuery?.[field];
|
||||
if (!queryObj)
|
||||
return;
|
||||
if (queryObj.__query) {
|
||||
const subQueryGroup = queryObj.__query;
|
||||
const subSearchKeys = Object.keys(subQueryGroup);
|
||||
const subSearchString = subSearchKeys.map((_field) => {
|
||||
const newSubQueryObj = subQueryGroup?.[_field];
|
||||
if (newSubQueryObj) {
|
||||
return genSqlSrchStr({
|
||||
queryObj: newSubQueryObj,
|
||||
field: newSubQueryObj.fieldName || _field,
|
||||
join: genObject?.join,
|
||||
});
|
||||
}
|
||||
});
|
||||
return ("(" +
|
||||
subSearchString.join(` ${queryObj.operator || "AND"} `) +
|
||||
")");
|
||||
}
|
||||
return genSqlSrchStr({
|
||||
queryObj,
|
||||
field: queryObj.fieldName || field,
|
||||
join: genObject?.join,
|
||||
});
|
||||
});
|
||||
const cleanedUpSearchStr = sqlSearhString?.filter((str) => typeof str == "string");
|
||||
const isSearchStr = cleanedUpSearchStr?.[0] && cleanedUpSearchStr.find((str) => str);
|
||||
if (isSearchStr) {
|
||||
const stringOperator = genObject?.searchOperator || "AND";
|
||||
queryString += ` WHERE ${cleanedUpSearchStr.join(` ${stringOperator} `)}`;
|
||||
}
|
||||
if (genObject?.fullTextSearch && fullTextSearchStr && fullTextMatchStr) {
|
||||
queryString += `${isSearchStr ? " AND" : " WHERE"} ${fullTextMatchStr}`;
|
||||
sqlSearhValues.push(fullTextSearchStr);
|
||||
}
|
||||
if (genObject?.group) {
|
||||
let group_by_txt = ``;
|
||||
if (typeof genObject.group == "string") {
|
||||
group_by_txt = genObject.group;
|
||||
}
|
||||
else if (Array.isArray(genObject.group)) {
|
||||
for (let i = 0; i < genObject.group.length; i++) {
|
||||
const group = genObject.group[i];
|
||||
if (typeof group == "string") {
|
||||
group_by_txt += `\`${group.toString()}\``;
|
||||
}
|
||||
else if (typeof group == "object" && group.table) {
|
||||
group_by_txt += `${group.table}.${String(group.field)}`;
|
||||
}
|
||||
else if (typeof group == "object") {
|
||||
group_by_txt += `${String(group.field)}`;
|
||||
}
|
||||
if (i < genObject.group.length - 1) {
|
||||
group_by_txt += ",";
|
||||
}
|
||||
}
|
||||
}
|
||||
else if (typeof genObject.group == "object") {
|
||||
if (genObject.group.table) {
|
||||
group_by_txt = `${genObject.group.table}.${String(genObject.group.field)}`;
|
||||
}
|
||||
else {
|
||||
group_by_txt = `${String(genObject.group.field)}`;
|
||||
}
|
||||
}
|
||||
queryString += ` GROUP BY ${group_by_txt}`;
|
||||
}
|
||||
function grabOrderString(order) {
|
||||
let orderFields = [];
|
||||
let orderSrt = ``;
|
||||
if (genObject?.fullTextSearch && genObject.fullTextSearch.scoreAlias) {
|
||||
orderFields.push(genObject.fullTextSearch.scoreAlias);
|
||||
}
|
||||
else if (genObject?.join) {
|
||||
orderFields.push(`${finalDbName}${tableName}.${String(order.field)}`);
|
||||
}
|
||||
else {
|
||||
orderFields.push(order.field);
|
||||
}
|
||||
orderSrt += ` ${orderFields.join(", ")} ${order.strategy}`;
|
||||
return orderSrt;
|
||||
}
|
||||
if (genObject?.order && !count) {
|
||||
let orderSrt = ` ORDER BY`;
|
||||
if (Array.isArray(genObject.order)) {
|
||||
for (let i = 0; i < genObject.order.length; i++) {
|
||||
const order = genObject.order[i];
|
||||
if (order) {
|
||||
orderSrt +=
|
||||
grabOrderString(order) +
|
||||
(i < genObject.order.length - 1 ? `,` : "");
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
orderSrt += grabOrderString(genObject.order);
|
||||
}
|
||||
queryString += ` ${orderSrt}`;
|
||||
}
|
||||
if (genObject?.limit && !count)
|
||||
queryString += ` LIMIT ${genObject.limit}`;
|
||||
if (genObject?.offset && !count)
|
||||
queryString += ` OFFSET ${genObject.offset}`;
|
||||
return {
|
||||
string: queryString,
|
||||
values: sqlSearhValues,
|
||||
};
|
||||
}
|
||||
5
dist/utils/sql-insert-generator.d.ts
vendored
Normal file
5
dist/utils/sql-insert-generator.d.ts
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
import type { SQLInsertGenParams, SQLInsertGenReturn } from "../types";
|
||||
/**
|
||||
* # SQL Insert Generator
|
||||
*/
|
||||
export default function sqlInsertGenerator({ tableName, data, dbFullName, }: SQLInsertGenParams): SQLInsertGenReturn | undefined;
|
||||
56
dist/utils/sql-insert-generator.js
vendored
Normal file
56
dist/utils/sql-insert-generator.js
vendored
Normal file
@ -0,0 +1,56 @@
|
||||
/**
|
||||
* # SQL Insert Generator
|
||||
*/
|
||||
export default function sqlInsertGenerator({ tableName, data, dbFullName, }) {
|
||||
const finalDbName = dbFullName ? `${dbFullName}.` : "";
|
||||
try {
|
||||
if (Array.isArray(data) && data?.[0]) {
|
||||
let insertKeys = [];
|
||||
data.forEach((dt) => {
|
||||
const kys = Object.keys(dt);
|
||||
kys.forEach((ky) => {
|
||||
if (!insertKeys.includes(ky)) {
|
||||
insertKeys.push(ky);
|
||||
}
|
||||
});
|
||||
});
|
||||
let queryBatches = [];
|
||||
let queryValues = [];
|
||||
data.forEach((item) => {
|
||||
queryBatches.push(`(${insertKeys
|
||||
.map((ky) => {
|
||||
const value = item[ky];
|
||||
const finalValue = typeof value == "string" ||
|
||||
typeof value == "number"
|
||||
? value
|
||||
: value
|
||||
? String(value().value)
|
||||
: null;
|
||||
if (!finalValue) {
|
||||
queryValues.push("");
|
||||
return "?";
|
||||
}
|
||||
queryValues.push(finalValue);
|
||||
const placeholder = typeof value == "function"
|
||||
? value().placeholder
|
||||
: "?";
|
||||
return placeholder;
|
||||
})
|
||||
.filter((k) => Boolean(k))
|
||||
.join(",")})`);
|
||||
});
|
||||
let query = `INSERT INTO ${finalDbName}${tableName} (${insertKeys.join(",")}) VALUES ${queryBatches.join(",")}`;
|
||||
return {
|
||||
query: query,
|
||||
values: queryValues,
|
||||
};
|
||||
}
|
||||
else {
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
catch ( /** @type {any} */error) {
|
||||
console.log(`SQL insert gen ERROR: ${error.message}`);
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
6
dist/utils/trim-backups.d.ts
vendored
Normal file
6
dist/utils/trim-backups.d.ts
vendored
Normal file
@ -0,0 +1,6 @@
|
||||
import type { BunSQLiteConfig } from "../types";
|
||||
type Params = {
|
||||
config: BunSQLiteConfig;
|
||||
};
|
||||
export default function trimBackups({ config }: Params): void;
|
||||
export {};
|
||||
19
dist/utils/trim-backups.js
vendored
Normal file
19
dist/utils/trim-backups.js
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
import grabDBDir from "../utils/grab-db-dir";
|
||||
import fs from "fs";
|
||||
import grabSortedBackups from "./grab-sorted-backups";
|
||||
import { AppData } from "../data/app-data";
|
||||
import path from "path";
|
||||
export default function trimBackups({ config }) {
|
||||
const { backup_dir } = grabDBDir({ config });
|
||||
const backups = grabSortedBackups({ config });
|
||||
const max_backups = config.max_backups || AppData["MaxBackups"];
|
||||
for (let i = 0; i < backups.length; i++) {
|
||||
const backup_name = backups[i];
|
||||
if (!backup_name)
|
||||
continue;
|
||||
if (i > max_backups - 1) {
|
||||
const backup_file_to_unlink = path.join(backup_dir, backup_name);
|
||||
fs.unlinkSync(backup_file_to_unlink);
|
||||
}
|
||||
}
|
||||
}
|
||||
1196
package-lock.json
generated
Normal file
1196
package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
43
package.json
Normal file
43
package.json
Normal file
@ -0,0 +1,43 @@
|
||||
{
|
||||
"name": "@moduletrace/nsqlite",
|
||||
"version": "1.0.8",
|
||||
"description": "SQLite manager for Bun",
|
||||
"author": "Benjamin Toby",
|
||||
"main": "dist/index.js",
|
||||
"bin": {
|
||||
"nsqlite": "dist/commands/index.js"
|
||||
},
|
||||
"scripts": {
|
||||
"dev": "tsc --watch",
|
||||
"compile": "rm -rf dist && tsc"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/better-sqlite3": "^7.6.13",
|
||||
"@types/bun": "latest",
|
||||
"@types/lodash": "^4.17.24",
|
||||
"@types/mysql": "^2.15.27",
|
||||
"@types/node": "^25.3.3"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"typescript": "^5"
|
||||
},
|
||||
"files": [
|
||||
"dist",
|
||||
"README.md",
|
||||
"package.json"
|
||||
],
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://git.tben.me/Moduletrace/nsqlite.git"
|
||||
},
|
||||
"dependencies": {
|
||||
"@inquirer/prompts": "^8.3.0",
|
||||
"better-sqlite3": "^12.6.2",
|
||||
"chalk": "^5.6.2",
|
||||
"commander": "^14.0.3",
|
||||
"inquirer": "^13.3.0",
|
||||
"lodash": "^4.17.23",
|
||||
"mysql": "^2.18.1",
|
||||
"sqlite-vec": "^0.1.7-alpha.2"
|
||||
}
|
||||
}
|
||||
14
publish.sh
Executable file
14
publish.sh
Executable file
@ -0,0 +1,14 @@
|
||||
#!/bin/bash
|
||||
|
||||
if [ -z "$1" ]; then
|
||||
msg="Updates"
|
||||
else
|
||||
msg="$1"
|
||||
fi
|
||||
|
||||
rm -rf dist
|
||||
tsc
|
||||
git add .
|
||||
git commit -m "$msg"
|
||||
git push
|
||||
bun publish
|
||||
29
src/commands/backup.ts
Normal file
29
src/commands/backup.ts
Normal file
@ -0,0 +1,29 @@
|
||||
import { Command } from "commander";
|
||||
import init from "../functions/init";
|
||||
import path from "path";
|
||||
import grabDBDir from "../utils/grab-db-dir";
|
||||
import fs from "fs";
|
||||
import grabDBBackupFileName from "../utils/grab-db-backup-file-name";
|
||||
import chalk from "chalk";
|
||||
import trimBackups from "../utils/trim-backups";
|
||||
|
||||
export default function () {
|
||||
return new Command("backup")
|
||||
.description("Backup Database")
|
||||
.action(async (opts) => {
|
||||
console.log(`Backing up database ...`);
|
||||
|
||||
const { config } = await init();
|
||||
|
||||
const { backup_dir, db_file_path } = grabDBDir({ config });
|
||||
|
||||
const new_db_file_name = grabDBBackupFileName({ config });
|
||||
|
||||
fs.cpSync(db_file_path, path.join(backup_dir, new_db_file_name));
|
||||
|
||||
trimBackups({ config });
|
||||
|
||||
console.log(`${chalk.bold(chalk.green(`DB Backup Success!`))}`);
|
||||
process.exit();
|
||||
});
|
||||
}
|
||||
44
src/commands/index.ts
Normal file
44
src/commands/index.ts
Normal file
@ -0,0 +1,44 @@
|
||||
#!/usr/bin/env bun
|
||||
|
||||
import { program } from "commander";
|
||||
import schema from "./schema";
|
||||
import typedef from "./typedef";
|
||||
import backup from "./backup";
|
||||
import restore from "./restore";
|
||||
|
||||
/**
|
||||
* # Declare Global Variables
|
||||
*/
|
||||
declare global {}
|
||||
|
||||
/**
|
||||
* # Describe Program
|
||||
*/
|
||||
program
|
||||
.name(`bun-sqlite`)
|
||||
.description(`SQLite manager for Bun`)
|
||||
.version(`1.0.0`);
|
||||
|
||||
/**
|
||||
* # Declare Commands
|
||||
*/
|
||||
program.addCommand(schema());
|
||||
program.addCommand(typedef());
|
||||
program.addCommand(backup());
|
||||
program.addCommand(restore());
|
||||
|
||||
/**
|
||||
* # Handle Unavailable Commands
|
||||
*/
|
||||
program.on("command:*", () => {
|
||||
console.error(
|
||||
"Invalid command: %s\nSee --help for a list of available commands.",
|
||||
program.args.join(" "),
|
||||
);
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
/**
|
||||
* # Parse Arguments
|
||||
*/
|
||||
program.parse(process.argv);
|
||||
56
src/commands/restore.ts
Normal file
56
src/commands/restore.ts
Normal file
@ -0,0 +1,56 @@
|
||||
import { Command } from "commander";
|
||||
import init from "../functions/init";
|
||||
import grabDBDir from "../utils/grab-db-dir";
|
||||
import fs from "fs";
|
||||
import chalk from "chalk";
|
||||
import grabSortedBackups from "../utils/grab-sorted-backups";
|
||||
import { select } from "@inquirer/prompts";
|
||||
import grabBackupData from "../utils/grab-backup-data";
|
||||
import path from "path";
|
||||
|
||||
export default function () {
|
||||
return new Command("restore")
|
||||
.description("Restore Database")
|
||||
.action(async (opts) => {
|
||||
console.log(`Restoring up database ...`);
|
||||
|
||||
const { config } = await init();
|
||||
|
||||
const { backup_dir, db_file_path } = grabDBDir({ config });
|
||||
|
||||
const backups = grabSortedBackups({ config });
|
||||
|
||||
if (!backups?.[0]) {
|
||||
console.error(
|
||||
`No Backups to restore. Use the \`backup\` command to create a backup`,
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
try {
|
||||
const selected_backup = await select({
|
||||
message: "Select a backup:",
|
||||
choices: backups.map((b, i) => {
|
||||
const { backup_date } = grabBackupData({
|
||||
backup_name: b,
|
||||
});
|
||||
return {
|
||||
name: `Backup #${i + 1}: ${backup_date.toDateString()} ${backup_date.getHours()}:${backup_date.getMinutes()}:${backup_date.getSeconds().toString().padStart(2, "0")}`,
|
||||
value: b,
|
||||
};
|
||||
}),
|
||||
});
|
||||
|
||||
fs.cpSync(path.join(backup_dir, selected_backup), db_file_path);
|
||||
|
||||
console.log(
|
||||
`${chalk.bold(chalk.green(`DB Restore Success!`))}`,
|
||||
);
|
||||
|
||||
process.exit();
|
||||
} catch (error: any) {
|
||||
console.error(`Backup Restore ERROR => ${error.message}`);
|
||||
process.exit();
|
||||
}
|
||||
});
|
||||
}
|
||||
55
src/commands/schema.ts
Normal file
55
src/commands/schema.ts
Normal file
@ -0,0 +1,55 @@
|
||||
import { Command } from "commander";
|
||||
import { SQLiteSchemaManager } from "../lib/sqlite/db-schema-manager";
|
||||
import init from "../functions/init";
|
||||
import grabDirNames from "../data/grab-dir-names";
|
||||
import path from "path";
|
||||
import dbSchemaToTypeDef from "../lib/sqlite/schema-to-typedef";
|
||||
import _ from "lodash";
|
||||
import appendDefaultFieldsToDbSchema from "../utils/append-default-fields-to-db-schema";
|
||||
import chalk from "chalk";
|
||||
|
||||
export default function () {
|
||||
return new Command("schema")
|
||||
.description("Build DB From Schema")
|
||||
.option(
|
||||
"-v, --vector",
|
||||
"Recreate Vector Tables. This will drop and rebuild all vector tables",
|
||||
)
|
||||
.option("-t, --typedef", "Generate typescript type definitions")
|
||||
.action(async (opts) => {
|
||||
console.log(`Starting process ...`);
|
||||
|
||||
const { config, dbSchema } = await init();
|
||||
const { ROOT_DIR } = grabDirNames();
|
||||
|
||||
const isVector = Boolean(opts.vector || opts.v);
|
||||
const isTypeDef = Boolean(opts.typedef || opts.t);
|
||||
|
||||
const finaldbSchema = appendDefaultFieldsToDbSchema({ dbSchema });
|
||||
|
||||
const manager = new SQLiteSchemaManager({
|
||||
schema: finaldbSchema,
|
||||
recreate_vector_table: isVector,
|
||||
});
|
||||
|
||||
await manager.syncSchema();
|
||||
manager.close();
|
||||
|
||||
if (isTypeDef && config.typedef_file_path) {
|
||||
const out_file = path.resolve(
|
||||
ROOT_DIR,
|
||||
config.typedef_file_path,
|
||||
);
|
||||
|
||||
dbSchemaToTypeDef({
|
||||
dbSchema: finaldbSchema,
|
||||
dst_file: out_file,
|
||||
});
|
||||
}
|
||||
|
||||
console.log(
|
||||
`${chalk.bold(chalk.green(`DB Schema setup success!`))}`,
|
||||
);
|
||||
process.exit();
|
||||
});
|
||||
}
|
||||
38
src/commands/typedef.ts
Normal file
38
src/commands/typedef.ts
Normal file
@ -0,0 +1,38 @@
|
||||
import { Command } from "commander";
|
||||
import init from "../functions/init";
|
||||
import dbSchemaToTypeDef from "../lib/sqlite/schema-to-typedef";
|
||||
import path from "path";
|
||||
import grabDirNames from "../data/grab-dir-names";
|
||||
import appendDefaultFieldsToDbSchema from "../utils/append-default-fields-to-db-schema";
|
||||
import chalk from "chalk";
|
||||
|
||||
export default function () {
|
||||
return new Command("typedef")
|
||||
.description("Build DB From Schema")
|
||||
.action(async (opts) => {
|
||||
console.log(`Creating Type Definition From DB Schema ...`);
|
||||
|
||||
const { config, dbSchema } = await init();
|
||||
const { ROOT_DIR } = grabDirNames();
|
||||
|
||||
const finaldbSchema = appendDefaultFieldsToDbSchema({ dbSchema });
|
||||
|
||||
if (config.typedef_file_path) {
|
||||
const out_file = path.resolve(
|
||||
ROOT_DIR,
|
||||
config.typedef_file_path,
|
||||
);
|
||||
dbSchemaToTypeDef({
|
||||
dbSchema: finaldbSchema,
|
||||
dst_file: out_file,
|
||||
});
|
||||
} else {
|
||||
console.error(``);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
console.log(`${chalk.bold(chalk.green(`Typedef gen success!`))}`);
|
||||
|
||||
process.exit();
|
||||
});
|
||||
}
|
||||
5
src/data/app-data.ts
Normal file
5
src/data/app-data.ts
Normal file
@ -0,0 +1,5 @@
|
||||
export const AppData = {
|
||||
ConfigFileName: "bun-sqlite.config.ts",
|
||||
MaxBackups: 10,
|
||||
DefaultBackupDirName: ".backups",
|
||||
} as const;
|
||||
9
src/data/grab-dir-names.ts
Normal file
9
src/data/grab-dir-names.ts
Normal file
@ -0,0 +1,9 @@
|
||||
import path from "path";
|
||||
|
||||
export default function grabDirNames() {
|
||||
const ROOT_DIR = process.cwd();
|
||||
|
||||
return {
|
||||
ROOT_DIR,
|
||||
};
|
||||
}
|
||||
69
src/functions/init.ts
Normal file
69
src/functions/init.ts
Normal file
@ -0,0 +1,69 @@
|
||||
import path from "path";
|
||||
import fs from "fs";
|
||||
import { AppData } from "../data/app-data";
|
||||
import grabDirNames from "../data/grab-dir-names";
|
||||
import type {
|
||||
BunSQLiteConfig,
|
||||
BunSQLiteConfigReturn,
|
||||
BUN_SQLITE_DatabaseSchemaType,
|
||||
} from "../types";
|
||||
|
||||
export default async function init(): Promise<BunSQLiteConfigReturn> {
|
||||
try {
|
||||
const { ROOT_DIR } = grabDirNames();
|
||||
const { ConfigFileName } = AppData;
|
||||
|
||||
const ConfigFilePath = path.join(ROOT_DIR, ConfigFileName);
|
||||
|
||||
if (!fs.existsSync(ConfigFilePath)) {
|
||||
console.log("ConfigFilePath", ConfigFilePath);
|
||||
|
||||
console.error(
|
||||
`Please create a \`${ConfigFileName}\` file at the root of your project.`,
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const ConfigImport = await import(ConfigFilePath);
|
||||
const Config = ConfigImport["default"] as BunSQLiteConfig;
|
||||
|
||||
if (!Config.db_name) {
|
||||
console.error(`\`db_name\` is required in your config`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
if (!Config.db_schema_file_name) {
|
||||
console.error(`\`db_schema_file_name\` is required in your config`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
let db_dir = ROOT_DIR;
|
||||
|
||||
if (Config.db_dir) {
|
||||
db_dir = path.resolve(ROOT_DIR, Config.db_dir);
|
||||
|
||||
if (!fs.existsSync(Config.db_dir)) {
|
||||
fs.mkdirSync(Config.db_dir, { recursive: true });
|
||||
}
|
||||
}
|
||||
|
||||
const DBSchemaFilePath = path.join(db_dir, Config.db_schema_file_name);
|
||||
const DbSchemaImport = await import(DBSchemaFilePath);
|
||||
const DbSchema = DbSchemaImport[
|
||||
"default"
|
||||
] as BUN_SQLITE_DatabaseSchemaType;
|
||||
|
||||
const backup_dir =
|
||||
Config.db_backup_dir || AppData["DefaultBackupDirName"];
|
||||
|
||||
const BackupDir = path.resolve(db_dir, backup_dir);
|
||||
if (!fs.existsSync(BackupDir)) {
|
||||
fs.mkdirSync(BackupDir, { recursive: true });
|
||||
}
|
||||
|
||||
return { config: Config, dbSchema: DbSchema };
|
||||
} catch (error: any) {
|
||||
console.error(`Initialization ERROR => ` + error.message);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
15
src/index.ts
Normal file
15
src/index.ts
Normal file
@ -0,0 +1,15 @@
|
||||
import DbDelete from "./lib/sqlite/db-delete";
|
||||
import DbInsert from "./lib/sqlite/db-insert";
|
||||
import DbSelect from "./lib/sqlite/db-select";
|
||||
import DbSQL from "./lib/sqlite/db-sql";
|
||||
import DbUpdate from "./lib/sqlite/db-update";
|
||||
|
||||
const NodeSQLite = {
|
||||
select: DbSelect,
|
||||
insert: DbInsert,
|
||||
update: DbUpdate,
|
||||
delete: DbDelete,
|
||||
sql: DbSQL,
|
||||
} as const;
|
||||
|
||||
export default NodeSQLite;
|
||||
74
src/lib/sqlite/db-delete.ts
Normal file
74
src/lib/sqlite/db-delete.ts
Normal file
@ -0,0 +1,74 @@
|
||||
import DbClient from ".";
|
||||
import _ from "lodash";
|
||||
import type { APIResponseObject, ServerQueryParam } from "../../types";
|
||||
import sqlGenerator from "../../utils/sql-generator";
|
||||
|
||||
type Params<
|
||||
Schema extends { [k: string]: any } = { [k: string]: any },
|
||||
Table extends string = string,
|
||||
> = {
|
||||
table: Table;
|
||||
query?: ServerQueryParam<Schema>;
|
||||
targetId?: number | string;
|
||||
};
|
||||
|
||||
export default async function DbDelete<
|
||||
Schema extends { [k: string]: any } = { [k: string]: any },
|
||||
Table extends string = string,
|
||||
>({
|
||||
table,
|
||||
query,
|
||||
targetId,
|
||||
}: Params<Schema, Table>): Promise<APIResponseObject> {
|
||||
try {
|
||||
let finalQuery = query || {};
|
||||
|
||||
if (targetId) {
|
||||
finalQuery = _.merge<ServerQueryParam<any>, ServerQueryParam<any>>(
|
||||
finalQuery,
|
||||
{
|
||||
query: {
|
||||
id: {
|
||||
value: String(targetId),
|
||||
},
|
||||
},
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
const sqlQueryObj = sqlGenerator({
|
||||
tableName: table,
|
||||
genObject: finalQuery,
|
||||
});
|
||||
|
||||
const whereClause = sqlQueryObj.string.match(/WHERE .*/)?.[0];
|
||||
|
||||
if (whereClause) {
|
||||
let sql = `DELETE FROM ${table} ${whereClause}`;
|
||||
|
||||
const res = DbClient.run(sql, sqlQueryObj.values);
|
||||
|
||||
return {
|
||||
success: Boolean(res.changes),
|
||||
postInsertReturn: {
|
||||
affectedRows: res.changes,
|
||||
insertId: Number(res.lastInsertRowid),
|
||||
},
|
||||
debug: {
|
||||
sql,
|
||||
values: sqlQueryObj.values,
|
||||
},
|
||||
};
|
||||
} else {
|
||||
return {
|
||||
success: false,
|
||||
msg: `No WHERE clause`,
|
||||
};
|
||||
}
|
||||
} catch (error: any) {
|
||||
return {
|
||||
success: false,
|
||||
error: error.message,
|
||||
};
|
||||
}
|
||||
}
|
||||
112
src/lib/sqlite/db-generate-type-defs.ts
Normal file
112
src/lib/sqlite/db-generate-type-defs.ts
Normal file
@ -0,0 +1,112 @@
|
||||
import type {
|
||||
BUN_SQLITE_FieldSchemaType,
|
||||
BUN_SQLITE_TableSchemaType,
|
||||
} from "../../types";
|
||||
|
||||
type Param = {
|
||||
paradigm: "JavaScript" | "TypeScript" | undefined;
|
||||
table: BUN_SQLITE_TableSchemaType;
|
||||
query?: any;
|
||||
typeDefName?: string;
|
||||
allValuesOptional?: boolean;
|
||||
addExport?: boolean;
|
||||
dbName?: string;
|
||||
};
|
||||
|
||||
export default function generateTypeDefinition({
|
||||
paradigm,
|
||||
table,
|
||||
query,
|
||||
typeDefName,
|
||||
allValuesOptional,
|
||||
addExport,
|
||||
dbName,
|
||||
}: Param) {
|
||||
let typeDefinition: string | null = ``;
|
||||
let tdName: string | null = ``;
|
||||
|
||||
try {
|
||||
tdName = typeDefName
|
||||
? typeDefName
|
||||
: dbName
|
||||
? `BUN_SQLITE_${dbName}_${table.tableName}`.toUpperCase()
|
||||
: `BUN_SQLITE_${query.single}_${query.single_table}`.toUpperCase();
|
||||
|
||||
const fields = table.fields;
|
||||
|
||||
function typeMap(schemaType: BUN_SQLITE_FieldSchemaType) {
|
||||
if (schemaType.options && schemaType.options.length > 0) {
|
||||
return schemaType.options
|
||||
.map((opt) =>
|
||||
schemaType.dataType?.match(/int/i) ||
|
||||
typeof opt == "number"
|
||||
? `${opt}`
|
||||
: `"${opt}"`,
|
||||
)
|
||||
.join(" | ");
|
||||
}
|
||||
|
||||
if (schemaType.dataType?.match(/int|double|decimal/i)) {
|
||||
return "number";
|
||||
}
|
||||
|
||||
if (schemaType.dataType?.match(/text|varchar|timestamp/i)) {
|
||||
return "string";
|
||||
}
|
||||
|
||||
if (schemaType.dataType?.match(/boolean/i)) {
|
||||
return "0 | 1";
|
||||
}
|
||||
|
||||
return "string";
|
||||
}
|
||||
|
||||
const typesArrayTypeScript = [];
|
||||
const typesArrayJavascript = [];
|
||||
|
||||
typesArrayTypeScript.push(
|
||||
`${addExport ? "export " : ""}type ${tdName} = {`,
|
||||
);
|
||||
typesArrayJavascript.push(`/**\n * @typedef {object} ${tdName}`);
|
||||
|
||||
fields.forEach((field) => {
|
||||
if (field.fieldDescription) {
|
||||
typesArrayTypeScript.push(
|
||||
` /** \n * ${field.fieldDescription}\n */`,
|
||||
);
|
||||
}
|
||||
|
||||
const nullValue = allValuesOptional
|
||||
? "?"
|
||||
: field.notNullValue
|
||||
? ""
|
||||
: "?";
|
||||
|
||||
typesArrayTypeScript.push(
|
||||
` ${field.fieldName}${nullValue}: ${typeMap(field)};`,
|
||||
);
|
||||
|
||||
typesArrayJavascript.push(
|
||||
` * @property {${typeMap(field)}${nullValue}} ${
|
||||
field.fieldName
|
||||
}`,
|
||||
);
|
||||
});
|
||||
|
||||
typesArrayTypeScript.push(`}`);
|
||||
typesArrayJavascript.push(` */`);
|
||||
|
||||
if (paradigm?.match(/javascript/i)) {
|
||||
typeDefinition = typesArrayJavascript.join("\n");
|
||||
}
|
||||
|
||||
if (paradigm?.match(/typescript/i)) {
|
||||
typeDefinition = typesArrayTypeScript.join("\n");
|
||||
}
|
||||
} catch (error: any) {
|
||||
console.log(error.message);
|
||||
typeDefinition = null;
|
||||
}
|
||||
|
||||
return { typeDefinition, tdName };
|
||||
}
|
||||
47
src/lib/sqlite/db-insert.ts
Normal file
47
src/lib/sqlite/db-insert.ts
Normal file
@ -0,0 +1,47 @@
|
||||
import DbClient from ".";
|
||||
import type { APIResponseObject } from "../../types";
|
||||
import sqlInsertGenerator from "../../utils/sql-insert-generator";
|
||||
|
||||
type Params<
|
||||
Schema extends { [k: string]: any } = { [k: string]: any },
|
||||
Table extends string = string,
|
||||
> = {
|
||||
table: Table;
|
||||
data: Schema[];
|
||||
};
|
||||
|
||||
export default async function DbInsert<
|
||||
Schema extends { [k: string]: any } = { [k: string]: any },
|
||||
Table extends string = string,
|
||||
>({ table, data }: Params<Schema, Table>): Promise<APIResponseObject> {
|
||||
try {
|
||||
const finalData: { [k: string]: any }[] = data.map((d) => ({
|
||||
...d,
|
||||
created_at: Date.now(),
|
||||
updated_at: Date.now(),
|
||||
}));
|
||||
|
||||
const sqlObj = sqlInsertGenerator({
|
||||
tableName: table,
|
||||
data: finalData as any[],
|
||||
});
|
||||
|
||||
const res = DbClient.run(sqlObj?.query || "", sqlObj?.values || []);
|
||||
|
||||
return {
|
||||
success: Boolean(Number(res.lastInsertRowid)),
|
||||
postInsertReturn: {
|
||||
affectedRows: res.changes,
|
||||
insertId: Number(res.lastInsertRowid),
|
||||
},
|
||||
debug: {
|
||||
sqlObj,
|
||||
},
|
||||
};
|
||||
} catch (error: any) {
|
||||
return {
|
||||
success: false,
|
||||
error: error.message,
|
||||
};
|
||||
}
|
||||
}
|
||||
634
src/lib/sqlite/db-schema-manager.ts
Normal file
634
src/lib/sqlite/db-schema-manager.ts
Normal file
@ -0,0 +1,634 @@
|
||||
#!/usr/bin/env bun
|
||||
|
||||
import { Database } from "bun:sqlite";
|
||||
import _ from "lodash";
|
||||
import DbClient from ".";
|
||||
import type {
|
||||
BUN_SQLITE_DatabaseSchemaType,
|
||||
BUN_SQLITE_FieldSchemaType,
|
||||
BUN_SQLITE_TableSchemaType,
|
||||
} from "../../types";
|
||||
|
||||
// Schema Manager Class
|
||||
class SQLiteSchemaManager {
|
||||
private db: Database;
|
||||
private db_manager_table_name: string;
|
||||
private recreate_vector_table: boolean;
|
||||
private db_schema: BUN_SQLITE_DatabaseSchemaType;
|
||||
|
||||
constructor({
|
||||
schema,
|
||||
recreate_vector_table = false,
|
||||
}: {
|
||||
schema: BUN_SQLITE_DatabaseSchemaType;
|
||||
recreate_vector_table?: boolean;
|
||||
}) {
|
||||
this.db = DbClient;
|
||||
this.db_manager_table_name = "__db_schema_manager__";
|
||||
this.db.run("PRAGMA foreign_keys = ON;");
|
||||
this.recreate_vector_table = recreate_vector_table;
|
||||
this.createDbManagerTable();
|
||||
this.db_schema = schema;
|
||||
}
|
||||
|
||||
private createDbManagerTable() {
|
||||
this.db.run(`
|
||||
CREATE TABLE IF NOT EXISTS ${this.db_manager_table_name} (
|
||||
table_name TEXT NOT NULL,
|
||||
created_at INTEGER NOT NULL,
|
||||
updated_at INTEGER NOT NULL
|
||||
)
|
||||
`);
|
||||
}
|
||||
|
||||
private insertDbManagerTable(tableName: string) {
|
||||
this.db.run(
|
||||
`INSERT INTO ${this.db_manager_table_name} (table_name,created_at,updated_at) VALUES (?, ?, ?)`,
|
||||
[tableName, Date.now(), Date.now()],
|
||||
);
|
||||
}
|
||||
|
||||
private removeDbManagerTable(tableName: string) {
|
||||
this.db.run(
|
||||
`DELETE FROM ${this.db_manager_table_name} WHERE table_name = ?`,
|
||||
[tableName],
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Main synchronization method
|
||||
*/
|
||||
async syncSchema(): Promise<void> {
|
||||
console.log("Starting schema synchronization...");
|
||||
|
||||
const existingTables = this.getExistingTables();
|
||||
const schemaTables = this.db_schema.tables.map((t) => t.tableName);
|
||||
|
||||
// 2. Create or update tables
|
||||
for (const table of this.db_schema.tables) {
|
||||
await this.syncTable(table, existingTables);
|
||||
}
|
||||
|
||||
// 1. Drop tables that no longer exist in schema
|
||||
await this.dropRemovedTables(existingTables, schemaTables);
|
||||
|
||||
console.log("Schema synchronization complete!");
|
||||
}
|
||||
|
||||
/**
|
||||
* Get list of existing tables in the database
|
||||
*/
|
||||
private getExistingTables(): string[] {
|
||||
let sql = `SELECT table_name FROM ${this.db_manager_table_name}`;
|
||||
|
||||
const query = this.db.query(sql);
|
||||
const results = query.all() as { table_name: string }[];
|
||||
|
||||
return results.map((r) => r.table_name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Drop tables that are no longer in the schema
|
||||
*/
|
||||
private async dropRemovedTables(
|
||||
existingTables: string[],
|
||||
schemaTables: string[],
|
||||
): Promise<void> {
|
||||
const tablesToDrop = existingTables.filter(
|
||||
(t) =>
|
||||
!schemaTables.includes(t) &&
|
||||
!schemaTables.find((scT) => t.startsWith(scT + "_")),
|
||||
);
|
||||
|
||||
for (const tableName of tablesToDrop) {
|
||||
console.log(`Dropping table: ${tableName}`);
|
||||
this.db.run(`DROP TABLE IF EXISTS "${tableName}"`);
|
||||
this.db.run(
|
||||
`DELETE FROM ${this.db_manager_table_name} WHERE table_name = "${tableName}"`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Sync a single table (create or update)
|
||||
*/
|
||||
private async syncTable(
|
||||
table: BUN_SQLITE_TableSchemaType,
|
||||
existingTables: string[],
|
||||
): Promise<void> {
|
||||
let tableExists = existingTables.includes(table.tableName);
|
||||
|
||||
// Handle table rename
|
||||
if (table.tableNameOld && table.tableNameOld !== table.tableName) {
|
||||
if (existingTables.includes(table.tableNameOld)) {
|
||||
console.log(
|
||||
`Renaming table: ${table.tableNameOld} -> ${table.tableName}`,
|
||||
);
|
||||
this.db.run(
|
||||
`ALTER TABLE "${table.tableNameOld}" RENAME TO "${table.tableName}"`,
|
||||
);
|
||||
this.insertDbManagerTable(table.tableName);
|
||||
this.removeDbManagerTable(table.tableNameOld);
|
||||
tableExists = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (!tableExists) {
|
||||
// Create new table
|
||||
await this.createTable(table);
|
||||
this.insertDbManagerTable(table.tableName);
|
||||
} else {
|
||||
// Update existing table
|
||||
await this.updateTable(table);
|
||||
}
|
||||
|
||||
// Sync indexes
|
||||
await this.syncIndexes(table);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new table
|
||||
*/
|
||||
private async createTable(
|
||||
table: BUN_SQLITE_TableSchemaType,
|
||||
): Promise<void> {
|
||||
console.log(`Creating table: ${table.tableName}`);
|
||||
|
||||
let new_table = _.cloneDeep(table);
|
||||
|
||||
if (new_table.parentTableName) {
|
||||
const parent_table = this.db_schema.tables.find(
|
||||
(t) => t.tableName === new_table.parentTableName,
|
||||
);
|
||||
|
||||
if (!parent_table) {
|
||||
throw new Error(
|
||||
`Parent table \`${new_table.parentTableName}\` not found for \`${new_table.tableName}\``,
|
||||
);
|
||||
}
|
||||
|
||||
new_table = _.merge(parent_table, {
|
||||
tableName: new_table.tableName,
|
||||
tableDescription: new_table.tableDescription,
|
||||
});
|
||||
}
|
||||
|
||||
const columns: string[] = [];
|
||||
const foreignKeys: string[] = [];
|
||||
|
||||
for (const field of new_table.fields) {
|
||||
const columnDef = this.buildColumnDefinition(field);
|
||||
columns.push(columnDef);
|
||||
|
||||
if (field.foreignKey) {
|
||||
foreignKeys.push(this.buildForeignKeyConstraint(field));
|
||||
}
|
||||
}
|
||||
|
||||
// Add unique constraints
|
||||
if (new_table.uniqueConstraints) {
|
||||
for (const constraint of new_table.uniqueConstraints) {
|
||||
if (
|
||||
constraint.constraintTableFields &&
|
||||
constraint.constraintTableFields.length > 0
|
||||
) {
|
||||
const fields = constraint.constraintTableFields
|
||||
.map((f) => `"${f.value}"`)
|
||||
.join(", ");
|
||||
const constraintName =
|
||||
constraint.constraintName ||
|
||||
`unique_${fields.replace(/"/g, "")}`;
|
||||
columns.push(
|
||||
`CONSTRAINT "${constraintName}" UNIQUE (${fields})`,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const allConstraints = [...columns, ...foreignKeys];
|
||||
|
||||
const sql = new_table.isVector
|
||||
? `CREATE VIRTUAL TABLE "${new_table.tableName}" USING ${new_table.vectorType || "vec0"}(${allConstraints.join(", ")})`
|
||||
: `CREATE TABLE "${new_table.tableName}" (${allConstraints.join(", ")})`;
|
||||
|
||||
this.db.run(sql);
|
||||
}
|
||||
|
||||
/**
|
||||
* Update an existing table
|
||||
*/
|
||||
private async updateTable(
|
||||
table: BUN_SQLITE_TableSchemaType,
|
||||
): Promise<void> {
|
||||
console.log(`Updating table: ${table.tableName}`);
|
||||
|
||||
const existingColumns = this.getTableColumns(table.tableName);
|
||||
const schemaColumns = table.fields.map((f) => f.fieldName || "");
|
||||
|
||||
// SQLite has limited ALTER TABLE support
|
||||
// We need to use the recreation strategy for complex changes
|
||||
|
||||
const columnsToAdd = table.fields.filter(
|
||||
(f) =>
|
||||
f.fieldName &&
|
||||
!existingColumns.find(
|
||||
(c) =>
|
||||
c.name == f.fieldName && c.type == this.mapDataType(f),
|
||||
),
|
||||
);
|
||||
const columnsToRemove = existingColumns.filter(
|
||||
(c) => !schemaColumns.includes(c.name),
|
||||
);
|
||||
const columnsToUpdate = table.fields.filter(
|
||||
(f) =>
|
||||
f.fieldName &&
|
||||
f.updatedField &&
|
||||
existingColumns.find(
|
||||
(c) =>
|
||||
c.name == f.fieldName && c.type == this.mapDataType(f),
|
||||
),
|
||||
);
|
||||
|
||||
// Simple case: only adding columns
|
||||
if (columnsToRemove.length === 0 && columnsToUpdate.length === 0) {
|
||||
for (const field of columnsToAdd) {
|
||||
await this.addColumn(table.tableName, field);
|
||||
}
|
||||
} else {
|
||||
// Complex case: need to recreate table
|
||||
await this.recreateTable(table);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get existing columns for a table
|
||||
*/
|
||||
private getTableColumns(
|
||||
tableName: string,
|
||||
): { name: string; type: string }[] {
|
||||
const query = this.db.query(`PRAGMA table_info("${tableName}")`);
|
||||
const results = query.all() as { name: string; type: string }[];
|
||||
return results;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a new column to existing table
|
||||
*/
|
||||
private async addColumn(
|
||||
tableName: string,
|
||||
field: BUN_SQLITE_FieldSchemaType,
|
||||
): Promise<void> {
|
||||
console.log(`Adding column: ${tableName}.${field.fieldName}`);
|
||||
|
||||
const columnDef = this.buildColumnDefinition(field);
|
||||
// Remove PRIMARY KEY and UNIQUE constraints for ALTER TABLE ADD COLUMN
|
||||
const cleanDef = columnDef
|
||||
.replace(/PRIMARY KEY/gi, "")
|
||||
.replace(/AUTOINCREMENT/gi, "")
|
||||
.replace(/UNIQUE/gi, "")
|
||||
.trim();
|
||||
|
||||
const sql = `ALTER TABLE "${tableName}" ADD COLUMN ${cleanDef}`;
|
||||
|
||||
this.db.run(sql);
|
||||
}
|
||||
|
||||
/**
|
||||
* Recreate table (for complex schema changes)
|
||||
*/
|
||||
private async recreateTable(
|
||||
table: BUN_SQLITE_TableSchemaType,
|
||||
): Promise<void> {
|
||||
if (table.isVector) {
|
||||
if (!this.recreate_vector_table) {
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`Recreating vector table: ${table.tableName}`);
|
||||
|
||||
const existingRows = this.db
|
||||
.query(`SELECT * FROM "${table.tableName}"`)
|
||||
.all() as { [k: string]: any }[];
|
||||
|
||||
this.db.run(`DROP TABLE "${table.tableName}"`);
|
||||
await this.createTable(table);
|
||||
|
||||
if (existingRows.length > 0) {
|
||||
for (let i = 0; i < existingRows.length; i++) {
|
||||
const row = existingRows[i];
|
||||
if (!row) continue;
|
||||
|
||||
const columns = Object.keys(row);
|
||||
const placeholders = columns.map(() => "?").join(", ");
|
||||
|
||||
this.db.run(
|
||||
`INSERT INTO "${table.tableName}" (${columns.join(", ")}) VALUES (${placeholders})`,
|
||||
Object.values(row),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
const tempTableName = `${table.tableName}_temp_${Date.now()}`;
|
||||
|
||||
// Get existing data
|
||||
const existingColumns = this.getTableColumns(table.tableName);
|
||||
const columnsToKeep = table.fields
|
||||
.filter(
|
||||
(f) =>
|
||||
f.fieldName &&
|
||||
existingColumns.find(
|
||||
(c) =>
|
||||
c.name == f.fieldName &&
|
||||
c.type == this.mapDataType(f),
|
||||
),
|
||||
)
|
||||
.map((f) => f.fieldName);
|
||||
|
||||
// Create temp table with new schema
|
||||
const tempTable = { ...table, tableName: tempTableName };
|
||||
await this.createTable(tempTable);
|
||||
|
||||
// Copy data if there are common columns
|
||||
if (columnsToKeep.length > 0) {
|
||||
const columnList = columnsToKeep.map((c) => `"${c}"`).join(", ");
|
||||
this.db.run(
|
||||
`INSERT INTO "${tempTableName}" (${columnList}) SELECT ${columnList} FROM "${table.tableName}"`,
|
||||
);
|
||||
}
|
||||
|
||||
// Drop old table
|
||||
this.db.run(`DROP TABLE "${table.tableName}"`);
|
||||
|
||||
// Rename temp table
|
||||
this.db.run(
|
||||
`ALTER TABLE "${tempTableName}" RENAME TO "${table.tableName}"`,
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Build column definition SQL
|
||||
*/
|
||||
private buildColumnDefinition(field: BUN_SQLITE_FieldSchemaType): string {
|
||||
if (!field.fieldName) {
|
||||
throw new Error("Field name is required");
|
||||
}
|
||||
|
||||
const fieldName = field.sideCar
|
||||
? `+${field.fieldName}`
|
||||
: `${field.fieldName}`;
|
||||
|
||||
const parts: string[] = [fieldName];
|
||||
|
||||
// Data type mapping
|
||||
const dataType = this.mapDataType(field);
|
||||
parts.push(dataType);
|
||||
|
||||
// Primary key
|
||||
if (field.primaryKey) {
|
||||
parts.push("PRIMARY KEY");
|
||||
if (field.autoIncrement) {
|
||||
parts.push("AUTOINCREMENT");
|
||||
}
|
||||
}
|
||||
|
||||
// Not null
|
||||
if (field.notNullValue || field.primaryKey) {
|
||||
if (!field.primaryKey) {
|
||||
parts.push("NOT NULL");
|
||||
}
|
||||
}
|
||||
|
||||
// Unique
|
||||
if (field.unique && !field.primaryKey) {
|
||||
parts.push("UNIQUE");
|
||||
}
|
||||
|
||||
// Default value
|
||||
if (field.defaultValue !== undefined) {
|
||||
if (typeof field.defaultValue === "string") {
|
||||
parts.push(
|
||||
// Escape single quotes by doubling them to prevent SQL injection and wrap in single quotes
|
||||
`DEFAULT '${field.defaultValue.replace(/'/g, "''")}'`,
|
||||
);
|
||||
} else {
|
||||
parts.push(`DEFAULT ${field.defaultValue}`);
|
||||
}
|
||||
} else if (field.defaultValueLiteral) {
|
||||
parts.push(`DEFAULT ${field.defaultValueLiteral}`);
|
||||
}
|
||||
|
||||
return parts.join(" ");
|
||||
}
|
||||
|
||||
/**
|
||||
* Map DSQL data types to SQLite types
|
||||
*/
|
||||
private mapDataType(field: BUN_SQLITE_FieldSchemaType): string {
|
||||
const dataType = field.dataType?.toLowerCase() || "text";
|
||||
const vectorSize = field.vectorSize || 1536;
|
||||
|
||||
// Vector Embeddings
|
||||
if (field.isVector) {
|
||||
return `FLOAT[${vectorSize}]`;
|
||||
}
|
||||
|
||||
// Integer types
|
||||
if (
|
||||
dataType.includes("int") ||
|
||||
dataType === "bigint" ||
|
||||
dataType === "smallint" ||
|
||||
dataType === "tinyint"
|
||||
) {
|
||||
return "INTEGER";
|
||||
}
|
||||
|
||||
// Real/Float types
|
||||
if (
|
||||
dataType.includes("real") ||
|
||||
dataType.includes("float") ||
|
||||
dataType.includes("double") ||
|
||||
dataType === "decimal" ||
|
||||
dataType === "numeric"
|
||||
) {
|
||||
return "REAL";
|
||||
}
|
||||
|
||||
// Blob types
|
||||
if (dataType.includes("blob") || dataType.includes("binary")) {
|
||||
return "BLOB";
|
||||
}
|
||||
|
||||
// Boolean
|
||||
if (dataType === "boolean" || dataType === "bool") {
|
||||
return "INTEGER"; // SQLite uses INTEGER for boolean (0/1)
|
||||
}
|
||||
|
||||
// Date/Time types
|
||||
if (dataType.includes("date") || dataType.includes("time")) {
|
||||
return "TEXT"; // SQLite stores dates as TEXT or INTEGER
|
||||
}
|
||||
|
||||
// Default to TEXT for all text-based types
|
||||
return "TEXT";
|
||||
}
|
||||
|
||||
/**
|
||||
* Build foreign key constraint
|
||||
*/
|
||||
private buildForeignKeyConstraint(
|
||||
field: BUN_SQLITE_FieldSchemaType,
|
||||
): string {
|
||||
const fk = field.foreignKey!;
|
||||
let constraint = `FOREIGN KEY ("${field.fieldName}") REFERENCES "${fk.destinationTableName}"("${fk.destinationTableColumnName}")`;
|
||||
|
||||
if (fk.cascadeDelete) {
|
||||
constraint += " ON DELETE CASCADE";
|
||||
}
|
||||
|
||||
if (fk.cascadeUpdate) {
|
||||
constraint += " ON UPDATE CASCADE";
|
||||
}
|
||||
|
||||
return constraint;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sync indexes for a table
|
||||
*/
|
||||
private async syncIndexes(
|
||||
table: BUN_SQLITE_TableSchemaType,
|
||||
): Promise<void> {
|
||||
if (!table.indexes || table.indexes.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Get existing indexes
|
||||
const query = this.db.query(
|
||||
`SELECT name FROM sqlite_master WHERE type='index' AND tbl_name='${table.tableName}' AND name NOT LIKE 'sqlite_%'`,
|
||||
);
|
||||
const existingIndexes = (query.all() as { name: string }[]).map(
|
||||
(r) => r.name,
|
||||
);
|
||||
|
||||
// Drop indexes not in schema
|
||||
for (const indexName of existingIndexes) {
|
||||
const stillExists = table.indexes.some(
|
||||
(idx) => idx.indexName === indexName,
|
||||
);
|
||||
if (!stillExists) {
|
||||
console.log(`Dropping index: ${indexName}`);
|
||||
this.db.run(`DROP INDEX IF EXISTS "${indexName}"`);
|
||||
}
|
||||
}
|
||||
|
||||
// Create new indexes
|
||||
for (const index of table.indexes) {
|
||||
if (
|
||||
!index.indexName ||
|
||||
!index.indexTableFields ||
|
||||
index.indexTableFields.length === 0
|
||||
) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!existingIndexes.includes(index.indexName)) {
|
||||
console.log(`Creating index: ${index.indexName}`);
|
||||
const fields = index.indexTableFields
|
||||
.map((f) => `"${f.value}"`)
|
||||
.join(", ");
|
||||
const unique = index.indexType === "regular" ? "" : ""; // SQLite doesn't have FULLTEXT in CREATE INDEX
|
||||
this.db.run(
|
||||
`CREATE ${unique}INDEX "${index.indexName}" ON "${table.tableName}" (${fields})`,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Close database connection
|
||||
*/
|
||||
close(): void {
|
||||
this.db.close();
|
||||
}
|
||||
}
|
||||
|
||||
// Example usage
|
||||
async function main() {
|
||||
const schema: BUN_SQLITE_DatabaseSchemaType = {
|
||||
dbName: "example_db",
|
||||
tables: [
|
||||
{
|
||||
tableName: "users",
|
||||
tableDescription: "User accounts",
|
||||
fields: [
|
||||
{
|
||||
fieldName: "id",
|
||||
dataType: "INTEGER",
|
||||
primaryKey: true,
|
||||
autoIncrement: true,
|
||||
},
|
||||
{
|
||||
fieldName: "username",
|
||||
dataType: "TEXT",
|
||||
notNullValue: true,
|
||||
unique: true,
|
||||
},
|
||||
{
|
||||
fieldName: "email",
|
||||
dataType: "TEXT",
|
||||
notNullValue: true,
|
||||
},
|
||||
{
|
||||
fieldName: "created_at",
|
||||
dataType: "TEXT",
|
||||
defaultValueLiteral: "CURRENT_TIMESTAMP",
|
||||
},
|
||||
],
|
||||
indexes: [
|
||||
{
|
||||
indexName: "idx_users_email",
|
||||
indexType: "regular",
|
||||
indexTableFields: [
|
||||
{ value: "email", dataType: "TEXT" },
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
tableName: "posts",
|
||||
fields: [
|
||||
{
|
||||
fieldName: "id",
|
||||
dataType: "INTEGER",
|
||||
primaryKey: true,
|
||||
autoIncrement: true,
|
||||
},
|
||||
{
|
||||
fieldName: "user_id",
|
||||
dataType: "INTEGER",
|
||||
notNullValue: true,
|
||||
foreignKey: {
|
||||
destinationTableName: "users",
|
||||
destinationTableColumnName: "id",
|
||||
cascadeDelete: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
fieldName: "title",
|
||||
dataType: "TEXT",
|
||||
notNullValue: true,
|
||||
},
|
||||
{
|
||||
fieldName: "content",
|
||||
dataType: "TEXT",
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
export { SQLiteSchemaManager };
|
||||
64
src/lib/sqlite/db-schema-to-typedef.ts
Normal file
64
src/lib/sqlite/db-schema-to-typedef.ts
Normal file
@ -0,0 +1,64 @@
|
||||
import _ from "lodash";
|
||||
import type { BUN_SQLITE_DatabaseSchemaType } from "../../types";
|
||||
import generateTypeDefinition from "./db-generate-type-defs";
|
||||
|
||||
type Params = {
|
||||
dbSchema?: BUN_SQLITE_DatabaseSchemaType;
|
||||
};
|
||||
|
||||
export default function dbSchemaToType(params?: Params): string[] | undefined {
|
||||
let datasquirelSchema = params?.dbSchema;
|
||||
|
||||
if (!datasquirelSchema) return;
|
||||
|
||||
let tableNames = `export const BunSQLiteTables = [\n${datasquirelSchema.tables
|
||||
.map((tbl) => ` "${tbl.tableName}",`)
|
||||
.join("\n")}\n] as const`;
|
||||
|
||||
const dbTablesSchemas = datasquirelSchema.tables;
|
||||
|
||||
const defDbName = datasquirelSchema.dbName
|
||||
?.toUpperCase()
|
||||
.replace(/ |\-/g, "_");
|
||||
|
||||
const defNames: string[] = [];
|
||||
|
||||
const schemas = dbTablesSchemas
|
||||
.map((table) => {
|
||||
let final_table = _.cloneDeep(table);
|
||||
|
||||
if (final_table.parentTableName) {
|
||||
const parent_table = dbTablesSchemas.find(
|
||||
(t) => t.tableName === final_table.parentTableName,
|
||||
);
|
||||
|
||||
if (parent_table) {
|
||||
final_table = _.merge(parent_table, {
|
||||
tableName: final_table.tableName,
|
||||
tableDescription: final_table.tableDescription,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const defObj = generateTypeDefinition({
|
||||
paradigm: "TypeScript",
|
||||
table: final_table,
|
||||
typeDefName: `BUN_SQLITE_${defDbName}_${final_table.tableName.toUpperCase()}`,
|
||||
allValuesOptional: true,
|
||||
addExport: true,
|
||||
});
|
||||
|
||||
if (defObj.tdName?.match(/./)) {
|
||||
defNames.push(defObj.tdName);
|
||||
}
|
||||
|
||||
return defObj.typeDefinition;
|
||||
})
|
||||
.filter((schm) => typeof schm == "string");
|
||||
|
||||
const allTd = defNames?.[0]
|
||||
? `export type BUN_SQLITE_${defDbName}_ALL_TYPEDEFS = ${defNames.join(` & `)}`
|
||||
: ``;
|
||||
|
||||
return [tableNames, ...schemas, allTd];
|
||||
}
|
||||
78
src/lib/sqlite/db-select.ts
Normal file
78
src/lib/sqlite/db-select.ts
Normal file
@ -0,0 +1,78 @@
|
||||
import mysql from "mysql";
|
||||
import DbClient from ".";
|
||||
import _ from "lodash";
|
||||
import type { APIResponseObject, ServerQueryParam } from "../../types";
|
||||
import sqlGenerator from "../../utils/sql-generator";
|
||||
|
||||
type Params<
|
||||
Schema extends { [k: string]: any } = { [k: string]: any },
|
||||
Table extends string = string,
|
||||
> = {
|
||||
query?: ServerQueryParam<Schema>;
|
||||
table: Table;
|
||||
count?: boolean;
|
||||
targetId?: number | string;
|
||||
};
|
||||
|
||||
export default async function DbSelect<
|
||||
Schema extends { [k: string]: any } = { [k: string]: any },
|
||||
Table extends string = string,
|
||||
>({
|
||||
table,
|
||||
query,
|
||||
count,
|
||||
targetId,
|
||||
}: Params<Schema, Table>): Promise<APIResponseObject<Schema>> {
|
||||
try {
|
||||
let finalQuery = query || {};
|
||||
|
||||
if (targetId) {
|
||||
finalQuery = _.merge<ServerQueryParam<any>, ServerQueryParam<any>>(
|
||||
finalQuery,
|
||||
{
|
||||
query: {
|
||||
id: {
|
||||
value: String(targetId),
|
||||
},
|
||||
},
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
const sqlObj = sqlGenerator({
|
||||
tableName: table,
|
||||
genObject: finalQuery,
|
||||
count,
|
||||
});
|
||||
|
||||
const sql = mysql.format(sqlObj.string, sqlObj.values);
|
||||
|
||||
const res = DbClient.query<Schema, Schema[]>(sql);
|
||||
const batchRes = res.all();
|
||||
|
||||
let resp: APIResponseObject<Schema> = {
|
||||
success: Boolean(batchRes[0]),
|
||||
payload: batchRes,
|
||||
singleRes: batchRes[0],
|
||||
debug: {
|
||||
sqlObj,
|
||||
sql,
|
||||
},
|
||||
};
|
||||
|
||||
if (count) {
|
||||
const count_val = count ? batchRes[0]?.["COUNT(*)"] : undefined;
|
||||
resp["count"] = Number(count_val);
|
||||
|
||||
delete resp.payload;
|
||||
delete resp.singleRes;
|
||||
}
|
||||
|
||||
return resp;
|
||||
} catch (error: any) {
|
||||
return {
|
||||
success: false,
|
||||
error: error.message,
|
||||
};
|
||||
}
|
||||
}
|
||||
42
src/lib/sqlite/db-sql.ts
Normal file
42
src/lib/sqlite/db-sql.ts
Normal file
@ -0,0 +1,42 @@
|
||||
import DbClient from ".";
|
||||
import _ from "lodash";
|
||||
import type { APIResponseObject } from "../../types";
|
||||
|
||||
type Params = {
|
||||
sql: string;
|
||||
values?: (string | number)[];
|
||||
};
|
||||
|
||||
export default async function DbSQL<
|
||||
T extends { [k: string]: any } = { [k: string]: any },
|
||||
>({ sql, values }: Params): Promise<APIResponseObject<T>> {
|
||||
try {
|
||||
const res = sql.match(/^select/i)
|
||||
? DbClient.query(sql).all(...(values || []))
|
||||
: DbClient.run(sql, values || []);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
payload: Array.isArray(res) ? (res as T[]) : undefined,
|
||||
singleRes: Array.isArray(res) ? (res as T[])?.[0] : undefined,
|
||||
postInsertReturn: Array.isArray(res)
|
||||
? undefined
|
||||
: {
|
||||
affectedRows: res.changes,
|
||||
insertId: Number(res.lastInsertRowid),
|
||||
},
|
||||
debug: {
|
||||
sqlObj: {
|
||||
sql,
|
||||
values,
|
||||
},
|
||||
sql,
|
||||
},
|
||||
};
|
||||
} catch (error: any) {
|
||||
return {
|
||||
success: false,
|
||||
error: error.message,
|
||||
};
|
||||
}
|
||||
}
|
||||
104
src/lib/sqlite/db-update.ts
Normal file
104
src/lib/sqlite/db-update.ts
Normal file
@ -0,0 +1,104 @@
|
||||
import DbClient from ".";
|
||||
import _ from "lodash";
|
||||
import type { APIResponseObject, ServerQueryParam } from "../../types";
|
||||
import sqlGenerator from "../../utils/sql-generator";
|
||||
|
||||
type Params<
|
||||
Schema extends { [k: string]: any } = { [k: string]: any },
|
||||
Table extends string = string,
|
||||
> = {
|
||||
table: Table;
|
||||
data: Schema;
|
||||
query?: ServerQueryParam<Schema>;
|
||||
targetId?: number | string;
|
||||
};
|
||||
|
||||
export default async function DbUpdate<
|
||||
Schema extends { [k: string]: any } = { [k: string]: any },
|
||||
Table extends string = string,
|
||||
>({
|
||||
table,
|
||||
data,
|
||||
query,
|
||||
targetId,
|
||||
}: Params<Schema, Table>): Promise<APIResponseObject> {
|
||||
try {
|
||||
let finalQuery = query || {};
|
||||
|
||||
if (targetId) {
|
||||
finalQuery = _.merge<ServerQueryParam<any>, ServerQueryParam<any>>(
|
||||
finalQuery,
|
||||
{
|
||||
query: {
|
||||
id: {
|
||||
value: String(targetId),
|
||||
},
|
||||
},
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
const sqlQueryObj = sqlGenerator({
|
||||
tableName: table,
|
||||
genObject: finalQuery,
|
||||
});
|
||||
|
||||
let values: (string | number)[] = [];
|
||||
|
||||
const whereClause = sqlQueryObj.string.match(/WHERE .*/)?.[0];
|
||||
|
||||
if (whereClause) {
|
||||
let sql = `UPDATE ${table} SET`;
|
||||
|
||||
const finalData: { [k: string]: any } = {
|
||||
...data,
|
||||
updated_at: Date.now(),
|
||||
};
|
||||
|
||||
const keys = Object.keys(finalData);
|
||||
|
||||
for (let i = 0; i < keys.length; i++) {
|
||||
const key = keys[i];
|
||||
if (!key) continue;
|
||||
|
||||
const isLast = i == keys.length - 1;
|
||||
|
||||
sql += ` ${key}=?`;
|
||||
values.push(
|
||||
String(finalData[key as keyof { [k: string]: any }]),
|
||||
);
|
||||
|
||||
if (!isLast) {
|
||||
sql += `,`;
|
||||
}
|
||||
}
|
||||
|
||||
sql += ` ${whereClause}`;
|
||||
values = [...values, ...sqlQueryObj.values];
|
||||
|
||||
const res = DbClient.run(sql, values);
|
||||
|
||||
return {
|
||||
success: Boolean(res.changes),
|
||||
postInsertReturn: {
|
||||
affectedRows: res.changes,
|
||||
insertId: Number(res.lastInsertRowid),
|
||||
},
|
||||
debug: {
|
||||
sql,
|
||||
values,
|
||||
},
|
||||
};
|
||||
} else {
|
||||
return {
|
||||
success: false,
|
||||
msg: `No WHERE clause`,
|
||||
};
|
||||
}
|
||||
} catch (error: any) {
|
||||
return {
|
||||
success: false,
|
||||
error: error.message,
|
||||
};
|
||||
}
|
||||
}
|
||||
22
src/lib/sqlite/index.ts
Normal file
22
src/lib/sqlite/index.ts
Normal file
@ -0,0 +1,22 @@
|
||||
import Database from "better-sqlite3";
|
||||
import * as sqliteVec from "sqlite-vec";
|
||||
import grabDirNames from "../../data/grab-dir-names";
|
||||
import init from "../../functions/init";
|
||||
import grabDBDir from "../../utils/grab-db-dir";
|
||||
|
||||
const { ROOT_DIR } = grabDirNames();
|
||||
const { config } = await init();
|
||||
|
||||
let db_dir = ROOT_DIR;
|
||||
|
||||
if (config.db_dir) {
|
||||
db_dir = config.db_dir;
|
||||
}
|
||||
|
||||
const { db_file_path } = grabDBDir({ config });
|
||||
|
||||
const DbClient = new Database(db_file_path, { fileMustExist: false });
|
||||
|
||||
sqliteVec.load(DbClient);
|
||||
|
||||
export default DbClient;
|
||||
27
src/lib/sqlite/schema-to-typedef.ts
Normal file
27
src/lib/sqlite/schema-to-typedef.ts
Normal file
@ -0,0 +1,27 @@
|
||||
import path from "node:path";
|
||||
import { existsSync, mkdirSync, writeFileSync } from "node:fs";
|
||||
import type { BUN_SQLITE_DatabaseSchemaType } from "../../types";
|
||||
import dbSchemaToType from "./db-schema-to-typedef";
|
||||
|
||||
type Params = {
|
||||
dbSchema: BUN_SQLITE_DatabaseSchemaType;
|
||||
dst_file: string;
|
||||
};
|
||||
|
||||
export default function dbSchemaToTypeDef({ dbSchema, dst_file }: Params) {
|
||||
try {
|
||||
if (!dbSchema) throw new Error("No schema found");
|
||||
|
||||
const definitions = dbSchemaToType({ dbSchema });
|
||||
|
||||
const ourfileDir = path.dirname(dst_file);
|
||||
|
||||
if (!existsSync(ourfileDir)) {
|
||||
mkdirSync(ourfileDir, { recursive: true });
|
||||
}
|
||||
|
||||
writeFileSync(dst_file, definitions?.join("\n\n") || "", "utf-8");
|
||||
} catch (error: any) {
|
||||
console.log(`Schema to Typedef Error =>`, error.message);
|
||||
}
|
||||
}
|
||||
7
src/lib/sqlite/schema.ts
Normal file
7
src/lib/sqlite/schema.ts
Normal file
@ -0,0 +1,7 @@
|
||||
import _ from "lodash";
|
||||
import type { BUN_SQLITE_DatabaseSchemaType } from "../../types";
|
||||
|
||||
export const DbSchema: BUN_SQLITE_DatabaseSchemaType = {
|
||||
dbName: "travis-ai",
|
||||
tables: [],
|
||||
};
|
||||
1193
src/types/index.ts
Normal file
1193
src/types/index.ts
Normal file
File diff suppressed because it is too large
Load Diff
20
src/utils/append-default-fields-to-db-schema.ts
Normal file
20
src/utils/append-default-fields-to-db-schema.ts
Normal file
@ -0,0 +1,20 @@
|
||||
import _ from "lodash";
|
||||
import { DefaultFields, type BUN_SQLITE_DatabaseSchemaType } from "../types";
|
||||
|
||||
type Params = {
|
||||
dbSchema: BUN_SQLITE_DatabaseSchemaType;
|
||||
};
|
||||
|
||||
export default function ({ dbSchema }: Params): BUN_SQLITE_DatabaseSchemaType {
|
||||
const finaldbSchema = _.cloneDeep(dbSchema);
|
||||
finaldbSchema.tables = finaldbSchema.tables.map((t) => {
|
||||
const newTable = _.cloneDeep(t);
|
||||
newTable.fields = newTable.fields.filter(
|
||||
(f) => !f.fieldName?.match(/^(id|created_at|updated_at)$/),
|
||||
);
|
||||
newTable.fields.unshift(...DefaultFields);
|
||||
return newTable;
|
||||
});
|
||||
|
||||
return finaldbSchema;
|
||||
}
|
||||
13
src/utils/grab-backup-data.ts
Normal file
13
src/utils/grab-backup-data.ts
Normal file
@ -0,0 +1,13 @@
|
||||
type Params = {
|
||||
backup_name: string;
|
||||
};
|
||||
|
||||
export default function grabBackupData({ backup_name }: Params) {
|
||||
const backup_parts = backup_name.split("-");
|
||||
const backup_date_timestamp = Number(backup_parts.pop());
|
||||
const origin_backup_name = backup_parts.join("-");
|
||||
|
||||
const backup_date = new Date(backup_date_timestamp);
|
||||
|
||||
return { backup_date, backup_date_timestamp, origin_backup_name };
|
||||
}
|
||||
11
src/utils/grab-db-backup-file-name.ts
Normal file
11
src/utils/grab-db-backup-file-name.ts
Normal file
@ -0,0 +1,11 @@
|
||||
import type { BunSQLiteConfig } from "../types";
|
||||
|
||||
type Params = {
|
||||
config: BunSQLiteConfig;
|
||||
};
|
||||
|
||||
export default function grabDBBackupFileName({ config }: Params) {
|
||||
const new_db_file_name = `${config.db_name}-${Date.now()}`;
|
||||
|
||||
return new_db_file_name;
|
||||
}
|
||||
26
src/utils/grab-db-dir.ts
Normal file
26
src/utils/grab-db-dir.ts
Normal file
@ -0,0 +1,26 @@
|
||||
import path from "path";
|
||||
import grabDirNames from "../data/grab-dir-names";
|
||||
import type { BunSQLiteConfig } from "../types";
|
||||
import { AppData } from "../data/app-data";
|
||||
|
||||
type Params = {
|
||||
config: BunSQLiteConfig;
|
||||
};
|
||||
|
||||
export default function grabDBDir({ config }: Params) {
|
||||
const { ROOT_DIR } = grabDirNames();
|
||||
|
||||
let db_dir = ROOT_DIR;
|
||||
|
||||
if (config.db_dir) {
|
||||
db_dir = config.db_dir;
|
||||
}
|
||||
|
||||
const backup_dir_name =
|
||||
config.db_backup_dir || AppData["DefaultBackupDirName"];
|
||||
|
||||
const backup_dir = path.resolve(db_dir, backup_dir_name);
|
||||
const db_file_path = path.resolve(db_dir, config.db_name);
|
||||
|
||||
return { db_dir, backup_dir, db_file_path };
|
||||
}
|
||||
29
src/utils/grab-sorted-backups.ts
Normal file
29
src/utils/grab-sorted-backups.ts
Normal file
@ -0,0 +1,29 @@
|
||||
import grabDBDir from "../utils/grab-db-dir";
|
||||
import fs from "fs";
|
||||
import type { BunSQLiteConfig } from "../types";
|
||||
|
||||
type Params = {
|
||||
config: BunSQLiteConfig;
|
||||
};
|
||||
|
||||
export default function grabSortedBackups({ config }: Params) {
|
||||
const { backup_dir } = grabDBDir({ config });
|
||||
|
||||
const backups = fs.readdirSync(backup_dir);
|
||||
|
||||
/**
|
||||
* Order Backups. Most recent first.
|
||||
*/
|
||||
const ordered_backups = backups.sort((a, b) => {
|
||||
const a_date = Number(a.split("-").pop());
|
||||
const b_date = Number(b.split("-").pop());
|
||||
|
||||
if (a_date > b_date) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
return 1;
|
||||
});
|
||||
|
||||
return ordered_backups;
|
||||
}
|
||||
42
src/utils/sql-equality-parser.ts
Normal file
42
src/utils/sql-equality-parser.ts
Normal file
@ -0,0 +1,42 @@
|
||||
import { ServerQueryEqualities } from "../types";
|
||||
|
||||
export default function sqlEqualityParser(
|
||||
eq: (typeof ServerQueryEqualities)[number]
|
||||
): string {
|
||||
switch (eq) {
|
||||
case "EQUAL":
|
||||
return "=";
|
||||
case "LIKE":
|
||||
return "LIKE";
|
||||
case "NOT LIKE":
|
||||
return "NOT LIKE";
|
||||
case "NOT EQUAL":
|
||||
return "<>";
|
||||
case "IN":
|
||||
return "IN";
|
||||
case "NOT IN":
|
||||
return "NOT IN";
|
||||
case "BETWEEN":
|
||||
return "BETWEEN";
|
||||
case "NOT BETWEEN":
|
||||
return "NOT BETWEEN";
|
||||
case "IS NULL":
|
||||
return "IS NULL";
|
||||
case "IS NOT NULL":
|
||||
return "IS NOT NULL";
|
||||
case "EXISTS":
|
||||
return "EXISTS";
|
||||
case "NOT EXISTS":
|
||||
return "NOT EXISTS";
|
||||
case "GREATER THAN":
|
||||
return ">";
|
||||
case "GREATER THAN OR EQUAL":
|
||||
return ">=";
|
||||
case "LESS THAN":
|
||||
return "<";
|
||||
case "LESS THAN OR EQUAL":
|
||||
return "<=";
|
||||
default:
|
||||
return "=";
|
||||
}
|
||||
}
|
||||
140
src/utils/sql-gen-operator-gen.ts
Normal file
140
src/utils/sql-gen-operator-gen.ts
Normal file
@ -0,0 +1,140 @@
|
||||
import type { ServerQueryEqualities, ServerQueryObject } from "../types";
|
||||
import sqlEqualityParser from "./sql-equality-parser";
|
||||
|
||||
type Params = {
|
||||
fieldName: string;
|
||||
value?: string;
|
||||
equality?: (typeof ServerQueryEqualities)[number];
|
||||
queryObj: ServerQueryObject<
|
||||
{
|
||||
[key: string]: any;
|
||||
},
|
||||
string
|
||||
>;
|
||||
isValueFieldValue?: boolean;
|
||||
};
|
||||
|
||||
type Return = {
|
||||
str?: string;
|
||||
param?: string;
|
||||
};
|
||||
|
||||
/**
|
||||
* # SQL Gen Operator Gen
|
||||
* @description Generates an SQL operator for node module `mysql` or `serverless-mysql`
|
||||
*/
|
||||
export default function sqlGenOperatorGen({
|
||||
fieldName,
|
||||
value,
|
||||
equality,
|
||||
queryObj,
|
||||
isValueFieldValue,
|
||||
}: Params): Return {
|
||||
if (queryObj.nullValue) {
|
||||
return { str: `${fieldName} IS NULL` };
|
||||
}
|
||||
|
||||
if (queryObj.notNullValue) {
|
||||
return { str: `${fieldName} IS NOT NULL` };
|
||||
}
|
||||
|
||||
if (value) {
|
||||
const finalValue = isValueFieldValue ? value : "?";
|
||||
const finalParams = isValueFieldValue ? undefined : value;
|
||||
|
||||
if (equality == "MATCH") {
|
||||
return {
|
||||
str: `MATCH(${fieldName}) AGAINST(${finalValue} IN NATURAL LANGUAGE MODE)`,
|
||||
param: finalParams,
|
||||
};
|
||||
} else if (equality == "MATCH_BOOLEAN") {
|
||||
return {
|
||||
str: `MATCH(${fieldName}) AGAINST(${finalValue} IN BOOLEAN MODE)`,
|
||||
param: finalParams,
|
||||
};
|
||||
} else if (equality == "LIKE_LOWER") {
|
||||
return {
|
||||
str: `LOWER(${fieldName}) LIKE LOWER(${finalValue})`,
|
||||
param: `%${finalParams}%`,
|
||||
};
|
||||
} else if (equality == "LIKE_LOWER_RAW") {
|
||||
return {
|
||||
str: `LOWER(${fieldName}) LIKE LOWER(${finalValue})`,
|
||||
param: finalParams,
|
||||
};
|
||||
} else if (equality == "LIKE") {
|
||||
return {
|
||||
str: `${fieldName} LIKE ${finalValue}`,
|
||||
param: `%${finalParams}%`,
|
||||
};
|
||||
} else if (equality == "LIKE_RAW") {
|
||||
return {
|
||||
str: `${fieldName} LIKE ${finalValue}`,
|
||||
param: finalParams,
|
||||
};
|
||||
} else if (equality == "NOT_LIKE_LOWER") {
|
||||
return {
|
||||
str: `LOWER(${fieldName}) NOT LIKE LOWER(${finalValue})`,
|
||||
param: `%${finalParams}%`,
|
||||
};
|
||||
} else if (equality == "NOT_LIKE_LOWER_RAW") {
|
||||
return {
|
||||
str: `LOWER(${fieldName}) NOT LIKE LOWER(${finalValue})`,
|
||||
param: finalParams,
|
||||
};
|
||||
} else if (equality == "NOT LIKE") {
|
||||
return {
|
||||
str: `${fieldName} NOT LIKE ${finalValue}`,
|
||||
param: finalParams,
|
||||
};
|
||||
} else if (equality == "NOT LIKE_RAW") {
|
||||
return {
|
||||
str: `${fieldName} NOT LIKE ${finalValue}`,
|
||||
param: finalParams,
|
||||
};
|
||||
} else if (equality == "REGEXP") {
|
||||
return {
|
||||
str: `LOWER(${fieldName}) REGEXP LOWER(${finalValue})`,
|
||||
param: finalParams,
|
||||
};
|
||||
} else if (equality == "FULLTEXT") {
|
||||
return {
|
||||
str: `MATCH(${fieldName}) AGAINST(${finalValue} IN BOOLEAN MODE)`,
|
||||
param: finalParams,
|
||||
};
|
||||
} else if (equality == "NOT EQUAL") {
|
||||
return {
|
||||
str: `${fieldName} != ${finalValue}`,
|
||||
param: finalParams,
|
||||
};
|
||||
} else if (equality) {
|
||||
return {
|
||||
str: `${fieldName} ${sqlEqualityParser(
|
||||
equality,
|
||||
)} ${finalValue}`,
|
||||
param: finalParams,
|
||||
};
|
||||
} else {
|
||||
return {
|
||||
str: `${fieldName} = ${finalValue}`,
|
||||
param: finalParams,
|
||||
};
|
||||
}
|
||||
} else {
|
||||
if (equality == "IS NULL") {
|
||||
return { str: `${fieldName} IS NULL` };
|
||||
} else if (equality == "IS NOT NULL") {
|
||||
return { str: `${fieldName} IS NOT NULL` };
|
||||
} else if (equality) {
|
||||
return {
|
||||
str: `${fieldName} ${sqlEqualityParser(equality)} ?`,
|
||||
param: value,
|
||||
};
|
||||
} else {
|
||||
return {
|
||||
str: `${fieldName} = ?`,
|
||||
param: value,
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
521
src/utils/sql-generator.ts
Normal file
521
src/utils/sql-generator.ts
Normal file
@ -0,0 +1,521 @@
|
||||
import { isUndefined } from "lodash";
|
||||
import sqlGenOperatorGen from "./sql-gen-operator-gen";
|
||||
import type {
|
||||
ServerQueryParam,
|
||||
ServerQueryParamOrder,
|
||||
ServerQueryParamsJoin,
|
||||
ServerQueryParamsJoinMatchObject,
|
||||
ServerQueryQueryObject,
|
||||
ServerQueryValuesObject,
|
||||
} from "../types";
|
||||
|
||||
type Param<T extends { [key: string]: any } = { [key: string]: any }> = {
|
||||
genObject?: ServerQueryParam<T>;
|
||||
tableName: string;
|
||||
dbFullName?: string;
|
||||
count?: boolean;
|
||||
};
|
||||
|
||||
type Return = {
|
||||
string: string;
|
||||
values: (string | number)[];
|
||||
};
|
||||
|
||||
/**
|
||||
* # SQL Query Generator
|
||||
* @description Generates an SQL Query for node module `mysql` or `serverless-mysql`
|
||||
*/
|
||||
export default function sqlGenerator<
|
||||
T extends { [key: string]: any } = { [key: string]: any },
|
||||
>({ tableName, genObject, dbFullName, count }: Param<T>): Return {
|
||||
const finalQuery = genObject?.query ? genObject.query : undefined;
|
||||
|
||||
const queryKeys = finalQuery ? Object.keys(finalQuery) : undefined;
|
||||
|
||||
const sqlSearhValues: string[] = [];
|
||||
|
||||
const finalDbName = dbFullName ? `${dbFullName}.` : "";
|
||||
|
||||
/**
|
||||
* # Generate Query
|
||||
*/
|
||||
function genSqlSrchStr({
|
||||
queryObj,
|
||||
join,
|
||||
field,
|
||||
}: {
|
||||
queryObj: ServerQueryQueryObject[string];
|
||||
join?: (ServerQueryParamsJoin | ServerQueryParamsJoin[] | undefined)[];
|
||||
field?: string;
|
||||
}) {
|
||||
const finalFieldName = (() => {
|
||||
if (queryObj?.tableName) {
|
||||
return `${finalDbName}${queryObj.tableName}.${field}`;
|
||||
}
|
||||
if (join) {
|
||||
return `${finalDbName}${tableName}.${field}`;
|
||||
}
|
||||
return field;
|
||||
})();
|
||||
|
||||
let str = `${finalFieldName}=?`;
|
||||
|
||||
function grabValue(val?: string | ServerQueryValuesObject | null) {
|
||||
const valueParsed = val;
|
||||
|
||||
if (!valueParsed) return;
|
||||
|
||||
const valueString =
|
||||
typeof valueParsed == "string"
|
||||
? valueParsed
|
||||
: valueParsed
|
||||
? valueParsed.fieldName && valueParsed.tableName
|
||||
? `${valueParsed.tableName}.${valueParsed.fieldName}`
|
||||
: valueParsed.value?.toString()
|
||||
: undefined;
|
||||
|
||||
const valueEquality =
|
||||
typeof valueParsed == "object"
|
||||
? valueParsed.equality || queryObj.equality
|
||||
: queryObj.equality;
|
||||
|
||||
const operatorStrParam = sqlGenOperatorGen({
|
||||
queryObj,
|
||||
equality: valueEquality,
|
||||
fieldName: finalFieldName || "",
|
||||
value: valueString?.toString() || "",
|
||||
isValueFieldValue: Boolean(
|
||||
typeof valueParsed == "object" &&
|
||||
valueParsed.fieldName &&
|
||||
valueParsed.tableName,
|
||||
),
|
||||
});
|
||||
|
||||
return operatorStrParam;
|
||||
}
|
||||
|
||||
if (Array.isArray(queryObj.value)) {
|
||||
const strArray: string[] = [];
|
||||
|
||||
queryObj.value.forEach((val) => {
|
||||
const operatorStrParam = grabValue(val);
|
||||
|
||||
if (!operatorStrParam) return;
|
||||
|
||||
if (operatorStrParam.str && operatorStrParam.param) {
|
||||
strArray.push(operatorStrParam.str);
|
||||
sqlSearhValues.push(operatorStrParam.param);
|
||||
} else if (operatorStrParam.str) {
|
||||
strArray.push(operatorStrParam.str);
|
||||
}
|
||||
});
|
||||
|
||||
str = "(" + strArray.join(` ${queryObj.operator || "AND"} `) + ")";
|
||||
} else if (typeof queryObj.value == "object") {
|
||||
const operatorStrParam = grabValue(queryObj.value);
|
||||
if (operatorStrParam?.str) {
|
||||
str = operatorStrParam.str;
|
||||
if (operatorStrParam.param) {
|
||||
sqlSearhValues.push(operatorStrParam.param);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
const valueParsed = queryObj.value
|
||||
? String(queryObj.value)
|
||||
: undefined;
|
||||
|
||||
const operatorStrParam = sqlGenOperatorGen({
|
||||
equality: queryObj.equality,
|
||||
fieldName: finalFieldName || "",
|
||||
value: valueParsed,
|
||||
queryObj,
|
||||
});
|
||||
|
||||
if (operatorStrParam.str && operatorStrParam.param) {
|
||||
str = operatorStrParam.str;
|
||||
sqlSearhValues.push(operatorStrParam.param);
|
||||
} else if (operatorStrParam.str) {
|
||||
str = operatorStrParam.str;
|
||||
}
|
||||
}
|
||||
|
||||
return str;
|
||||
}
|
||||
|
||||
function generateJoinStr(
|
||||
mtch: ServerQueryParamsJoinMatchObject,
|
||||
join: ServerQueryParamsJoin,
|
||||
) {
|
||||
if (mtch.__batch) {
|
||||
let btch_mtch = ``;
|
||||
btch_mtch += `(`;
|
||||
|
||||
for (let i = 0; i < mtch.__batch.matches.length; i++) {
|
||||
const __mtch = mtch.__batch.matches[
|
||||
i
|
||||
] as ServerQueryParamsJoinMatchObject;
|
||||
btch_mtch += `${generateJoinStr(__mtch, join)}`;
|
||||
if (i < mtch.__batch.matches.length - 1) {
|
||||
btch_mtch += ` ${mtch.__batch.operator || "OR"} `;
|
||||
}
|
||||
}
|
||||
|
||||
btch_mtch += `)`;
|
||||
|
||||
return btch_mtch;
|
||||
}
|
||||
|
||||
return `${finalDbName}${
|
||||
typeof mtch.source == "object" ? mtch.source.tableName : tableName
|
||||
}.${
|
||||
typeof mtch.source == "object" ? mtch.source.fieldName : mtch.source
|
||||
}=${(() => {
|
||||
if (mtch.targetLiteral) {
|
||||
if (typeof mtch.targetLiteral == "number") {
|
||||
return `${mtch.targetLiteral}`;
|
||||
}
|
||||
return `'${mtch.targetLiteral}'`;
|
||||
}
|
||||
|
||||
if (join.alias) {
|
||||
return `${finalDbName}${
|
||||
typeof mtch.target == "object"
|
||||
? mtch.target.tableName
|
||||
: join.alias
|
||||
}.${
|
||||
typeof mtch.target == "object"
|
||||
? mtch.target.fieldName
|
||||
: mtch.target
|
||||
}`;
|
||||
}
|
||||
|
||||
return `${finalDbName}${
|
||||
typeof mtch.target == "object"
|
||||
? mtch.target.tableName
|
||||
: join.tableName
|
||||
}.${
|
||||
typeof mtch.target == "object"
|
||||
? mtch.target.fieldName
|
||||
: mtch.target
|
||||
}`;
|
||||
})()}`;
|
||||
}
|
||||
|
||||
let fullTextMatchStr = genObject?.fullTextSearch
|
||||
? ` MATCH(${genObject.fullTextSearch.fields
|
||||
.map((f) =>
|
||||
genObject.join ? `${tableName}.${String(f)}` : `${String(f)}`,
|
||||
)
|
||||
.join(",")}) AGAINST (? IN BOOLEAN MODE)`
|
||||
: undefined;
|
||||
|
||||
const fullTextSearchStr = genObject?.fullTextSearch
|
||||
? genObject.fullTextSearch.searchTerm
|
||||
.split(` `)
|
||||
.map((t) => `${t}`)
|
||||
.join(" ")
|
||||
: undefined;
|
||||
|
||||
let queryString = (() => {
|
||||
let str = "SELECT";
|
||||
|
||||
if (count) {
|
||||
str += ` COUNT(*)`;
|
||||
} else if (genObject?.selectFields?.[0]) {
|
||||
if (genObject.join) {
|
||||
str += ` ${genObject.selectFields
|
||||
?.map((fld) =>
|
||||
typeof fld == "object"
|
||||
? `${finalDbName}${tableName}.${fld.fieldName.toString()}` +
|
||||
(fld.alias ? ` as ${fld.alias}` : ``)
|
||||
: `${finalDbName}${tableName}.${String(fld)}`,
|
||||
)
|
||||
.join(",")}`;
|
||||
} else {
|
||||
str += ` ${genObject.selectFields
|
||||
?.map((fld) =>
|
||||
typeof fld == "object"
|
||||
? `${fld.fieldName.toString()}` +
|
||||
(fld.alias ? ` as ${fld.alias}` : ``)
|
||||
: fld,
|
||||
)
|
||||
.join(",")}`;
|
||||
}
|
||||
} else {
|
||||
if (genObject?.join) {
|
||||
str += ` ${finalDbName}${tableName}.*`;
|
||||
} else {
|
||||
str += " *";
|
||||
}
|
||||
}
|
||||
|
||||
if (genObject?.countSubQueries) {
|
||||
let countSqls: string[] = [];
|
||||
|
||||
for (let i = 0; i < genObject.countSubQueries.length; i++) {
|
||||
const countSubQuery = genObject.countSubQueries[i];
|
||||
if (!countSubQuery) continue;
|
||||
|
||||
const tableAlias = countSubQuery.table_alias;
|
||||
|
||||
let subQStr = `(SELECT COUNT(*)`;
|
||||
|
||||
subQStr += ` FROM ${countSubQuery.table}${
|
||||
tableAlias ? ` ${tableAlias}` : ""
|
||||
}`;
|
||||
|
||||
subQStr += ` WHERE (`;
|
||||
|
||||
for (let j = 0; j < countSubQuery.srcTrgMap.length; j++) {
|
||||
const csqSrc = countSubQuery.srcTrgMap[j];
|
||||
if (!csqSrc) continue;
|
||||
|
||||
subQStr += ` ${tableAlias || countSubQuery.table}.${
|
||||
csqSrc.src
|
||||
}`;
|
||||
|
||||
if (typeof csqSrc.trg == "string") {
|
||||
subQStr += ` = ?`;
|
||||
sqlSearhValues.push(csqSrc.trg);
|
||||
} else if (typeof csqSrc.trg == "object") {
|
||||
subQStr += ` = ${csqSrc.trg.table}.${csqSrc.trg.field}`;
|
||||
}
|
||||
|
||||
if (j < countSubQuery.srcTrgMap.length - 1) {
|
||||
subQStr += ` AND `;
|
||||
}
|
||||
}
|
||||
|
||||
subQStr += ` )) AS ${countSubQuery.alias}`;
|
||||
countSqls.push(subQStr);
|
||||
}
|
||||
|
||||
str += `, ${countSqls.join(",")}`;
|
||||
}
|
||||
|
||||
if (genObject?.join && !count) {
|
||||
const existingJoinTableNames: string[] = [tableName];
|
||||
|
||||
str +=
|
||||
"," +
|
||||
genObject.join
|
||||
.flat()
|
||||
.filter((j) => !isUndefined(j))
|
||||
.map((joinObj) => {
|
||||
const joinTableName = joinObj.alias
|
||||
? joinObj.alias
|
||||
: joinObj.tableName;
|
||||
|
||||
if (existingJoinTableNames.includes(joinTableName))
|
||||
return null;
|
||||
existingJoinTableNames.push(joinTableName);
|
||||
|
||||
if (joinObj.selectFields) {
|
||||
return joinObj.selectFields
|
||||
.map((selectField) => {
|
||||
if (typeof selectField == "string") {
|
||||
return `${finalDbName}${joinTableName}.${selectField}`;
|
||||
} else if (typeof selectField == "object") {
|
||||
let aliasSelectField = selectField.count
|
||||
? `COUNT(${finalDbName}${joinTableName}.${selectField.field})`
|
||||
: `${finalDbName}${joinTableName}.${selectField.field}`;
|
||||
if (selectField.alias)
|
||||
aliasSelectField += ` AS ${selectField.alias}`;
|
||||
return aliasSelectField;
|
||||
}
|
||||
})
|
||||
.join(",");
|
||||
} else {
|
||||
return `${finalDbName}${joinTableName}.*`;
|
||||
}
|
||||
})
|
||||
.filter((_) => Boolean(_))
|
||||
.join(",");
|
||||
}
|
||||
|
||||
if (
|
||||
genObject?.fullTextSearch &&
|
||||
fullTextMatchStr &&
|
||||
fullTextSearchStr
|
||||
) {
|
||||
str += `, ${fullTextMatchStr} AS ${genObject.fullTextSearch.scoreAlias}`;
|
||||
sqlSearhValues.push(fullTextSearchStr);
|
||||
}
|
||||
|
||||
str += ` FROM ${finalDbName}${tableName}`;
|
||||
|
||||
if (genObject?.join) {
|
||||
str +=
|
||||
" " +
|
||||
genObject.join
|
||||
.flat()
|
||||
.filter((j) => !isUndefined(j))
|
||||
.map((join) => {
|
||||
return (
|
||||
join.joinType +
|
||||
" " +
|
||||
(join.alias
|
||||
? `${finalDbName}${join.tableName}` +
|
||||
" " +
|
||||
join.alias
|
||||
: `${finalDbName}${join.tableName}`) +
|
||||
" ON " +
|
||||
(() => {
|
||||
if (Array.isArray(join.match)) {
|
||||
return (
|
||||
"(" +
|
||||
join.match
|
||||
.map((mtch) =>
|
||||
generateJoinStr(mtch, join),
|
||||
)
|
||||
.join(
|
||||
join.operator
|
||||
? ` ${join.operator} `
|
||||
: " AND ",
|
||||
) +
|
||||
")"
|
||||
);
|
||||
} else if (typeof join.match == "object") {
|
||||
return generateJoinStr(join.match, join);
|
||||
}
|
||||
})()
|
||||
);
|
||||
})
|
||||
.join(" ");
|
||||
}
|
||||
|
||||
return str;
|
||||
})();
|
||||
|
||||
const sqlSearhString = queryKeys?.map((field) => {
|
||||
const queryObj = finalQuery?.[field];
|
||||
if (!queryObj) return;
|
||||
|
||||
if (queryObj.__query) {
|
||||
const subQueryGroup = queryObj.__query;
|
||||
|
||||
const subSearchKeys = Object.keys(subQueryGroup);
|
||||
const subSearchString = subSearchKeys.map((_field) => {
|
||||
const newSubQueryObj = subQueryGroup?.[_field];
|
||||
|
||||
if (newSubQueryObj) {
|
||||
return genSqlSrchStr({
|
||||
queryObj: newSubQueryObj,
|
||||
field: newSubQueryObj.fieldName || _field,
|
||||
join: genObject?.join,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
return (
|
||||
"(" +
|
||||
subSearchString.join(` ${queryObj.operator || "AND"} `) +
|
||||
")"
|
||||
);
|
||||
}
|
||||
|
||||
return genSqlSrchStr({
|
||||
queryObj,
|
||||
field: queryObj.fieldName || field,
|
||||
join: genObject?.join,
|
||||
});
|
||||
});
|
||||
|
||||
const cleanedUpSearchStr = sqlSearhString?.filter(
|
||||
(str) => typeof str == "string",
|
||||
);
|
||||
|
||||
const isSearchStr =
|
||||
cleanedUpSearchStr?.[0] && cleanedUpSearchStr.find((str) => str);
|
||||
|
||||
if (isSearchStr) {
|
||||
const stringOperator = genObject?.searchOperator || "AND";
|
||||
queryString += ` WHERE ${cleanedUpSearchStr.join(
|
||||
` ${stringOperator} `,
|
||||
)}`;
|
||||
}
|
||||
|
||||
if (genObject?.fullTextSearch && fullTextSearchStr && fullTextMatchStr) {
|
||||
queryString += `${isSearchStr ? " AND" : " WHERE"} ${fullTextMatchStr}`;
|
||||
sqlSearhValues.push(fullTextSearchStr);
|
||||
}
|
||||
|
||||
if (genObject?.group) {
|
||||
let group_by_txt = ``;
|
||||
|
||||
if (typeof genObject.group == "string") {
|
||||
group_by_txt = genObject.group;
|
||||
} else if (Array.isArray(genObject.group)) {
|
||||
for (let i = 0; i < genObject.group.length; i++) {
|
||||
const group = genObject.group[i];
|
||||
|
||||
if (typeof group == "string") {
|
||||
group_by_txt += `\`${group.toString()}\``;
|
||||
} else if (typeof group == "object" && group.table) {
|
||||
group_by_txt += `${group.table}.${String(group.field)}`;
|
||||
} else if (typeof group == "object") {
|
||||
group_by_txt += `${String(group.field)}`;
|
||||
}
|
||||
|
||||
if (i < genObject.group.length - 1) {
|
||||
group_by_txt += ",";
|
||||
}
|
||||
}
|
||||
} else if (typeof genObject.group == "object") {
|
||||
if (genObject.group.table) {
|
||||
group_by_txt = `${genObject.group.table}.${String(genObject.group.field)}`;
|
||||
} else {
|
||||
group_by_txt = `${String(genObject.group.field)}`;
|
||||
}
|
||||
}
|
||||
|
||||
queryString += ` GROUP BY ${group_by_txt}`;
|
||||
}
|
||||
|
||||
function grabOrderString(order: ServerQueryParamOrder<T>) {
|
||||
let orderFields = [];
|
||||
let orderSrt = ``;
|
||||
|
||||
if (genObject?.fullTextSearch && genObject.fullTextSearch.scoreAlias) {
|
||||
orderFields.push(genObject.fullTextSearch.scoreAlias);
|
||||
} else if (genObject?.join) {
|
||||
orderFields.push(
|
||||
`${finalDbName}${tableName}.${String(order.field)}`,
|
||||
);
|
||||
} else {
|
||||
orderFields.push(order.field);
|
||||
}
|
||||
|
||||
orderSrt += ` ${orderFields.join(", ")} ${order.strategy}`;
|
||||
|
||||
return orderSrt;
|
||||
}
|
||||
|
||||
if (genObject?.order && !count) {
|
||||
let orderSrt = ` ORDER BY`;
|
||||
|
||||
if (Array.isArray(genObject.order)) {
|
||||
for (let i = 0; i < genObject.order.length; i++) {
|
||||
const order = genObject.order[i];
|
||||
if (order) {
|
||||
orderSrt +=
|
||||
grabOrderString(order) +
|
||||
(i < genObject.order.length - 1 ? `,` : "");
|
||||
}
|
||||
}
|
||||
} else {
|
||||
orderSrt += grabOrderString(genObject.order);
|
||||
}
|
||||
|
||||
queryString += ` ${orderSrt}`;
|
||||
}
|
||||
|
||||
if (genObject?.limit && !count) queryString += ` LIMIT ${genObject.limit}`;
|
||||
if (genObject?.offset && !count)
|
||||
queryString += ` OFFSET ${genObject.offset}`;
|
||||
|
||||
return {
|
||||
string: queryString,
|
||||
values: sqlSearhValues,
|
||||
};
|
||||
}
|
||||
76
src/utils/sql-insert-generator.ts
Normal file
76
src/utils/sql-insert-generator.ts
Normal file
@ -0,0 +1,76 @@
|
||||
import type { SQLInsertGenParams, SQLInsertGenReturn } from "../types";
|
||||
|
||||
/**
|
||||
* # SQL Insert Generator
|
||||
*/
|
||||
export default function sqlInsertGenerator({
|
||||
tableName,
|
||||
data,
|
||||
dbFullName,
|
||||
}: SQLInsertGenParams): SQLInsertGenReturn | undefined {
|
||||
const finalDbName = dbFullName ? `${dbFullName}.` : "";
|
||||
|
||||
try {
|
||||
if (Array.isArray(data) && data?.[0]) {
|
||||
let insertKeys: string[] = [];
|
||||
|
||||
data.forEach((dt) => {
|
||||
const kys = Object.keys(dt);
|
||||
kys.forEach((ky) => {
|
||||
if (!insertKeys.includes(ky)) {
|
||||
insertKeys.push(ky);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
let queryBatches: string[] = [];
|
||||
let queryValues: (string | number)[] = [];
|
||||
|
||||
data.forEach((item) => {
|
||||
queryBatches.push(
|
||||
`(${insertKeys
|
||||
.map((ky) => {
|
||||
const value = item[ky];
|
||||
|
||||
const finalValue =
|
||||
typeof value == "string" ||
|
||||
typeof value == "number"
|
||||
? value
|
||||
: value
|
||||
? String(value().value)
|
||||
: null;
|
||||
|
||||
if (!finalValue) {
|
||||
queryValues.push("");
|
||||
return "?";
|
||||
}
|
||||
|
||||
queryValues.push(finalValue);
|
||||
|
||||
const placeholder =
|
||||
typeof value == "function"
|
||||
? value().placeholder
|
||||
: "?";
|
||||
|
||||
return placeholder;
|
||||
})
|
||||
.filter((k) => Boolean(k))
|
||||
.join(",")})`,
|
||||
);
|
||||
});
|
||||
let query = `INSERT INTO ${finalDbName}${tableName} (${insertKeys.join(
|
||||
",",
|
||||
)}) VALUES ${queryBatches.join(",")}`;
|
||||
|
||||
return {
|
||||
query: query,
|
||||
values: queryValues,
|
||||
};
|
||||
} else {
|
||||
return undefined;
|
||||
}
|
||||
} catch (/** @type {any} */ error: any) {
|
||||
console.log(`SQL insert gen ERROR: ${error.message}`);
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
27
src/utils/trim-backups.ts
Normal file
27
src/utils/trim-backups.ts
Normal file
@ -0,0 +1,27 @@
|
||||
import grabDBDir from "../utils/grab-db-dir";
|
||||
import fs from "fs";
|
||||
import type { BunSQLiteConfig } from "../types";
|
||||
import grabSortedBackups from "./grab-sorted-backups";
|
||||
import { AppData } from "../data/app-data";
|
||||
import path from "path";
|
||||
|
||||
type Params = {
|
||||
config: BunSQLiteConfig;
|
||||
};
|
||||
|
||||
export default function trimBackups({ config }: Params) {
|
||||
const { backup_dir } = grabDBDir({ config });
|
||||
|
||||
const backups = grabSortedBackups({ config });
|
||||
|
||||
const max_backups = config.max_backups || AppData["MaxBackups"];
|
||||
|
||||
for (let i = 0; i < backups.length; i++) {
|
||||
const backup_name = backups[i];
|
||||
if (!backup_name) continue;
|
||||
if (i > max_backups - 1) {
|
||||
const backup_file_to_unlink = path.join(backup_dir, backup_name);
|
||||
fs.unlinkSync(backup_file_to_unlink);
|
||||
}
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user