From 7e8bb37c09902f0a5b2480464bcf6192f5feaa37 Mon Sep 17 00:00:00 2001 From: Benjamin Toby Date: Sat, 5 Jul 2025 14:59:30 +0100 Subject: [PATCH] Updates --- client/fetch/index.ts | 32 +- dist/client/auth/github/getAccessToken.js | 5 +- dist/client/auth/google/getAccessToken.js | 47 +- dist/client/auth/logout.js | 161 ++- dist/client/auth/post-login.js | 12 +- dist/client/fetch/index.d.ts | 25 +- dist/client/fetch/index.js | 150 ++- dist/client/index.js | 71 +- dist/client/media/client.js | 19 +- dist/client/media/imageInputFileToBase64.js | 158 ++- dist/client/media/imageInputToBase64.js | 150 ++- dist/client/media/inputFileToBase64.js | 82 +- dist/client/utils/parseClientCookies.js | 5 +- dist/console-colors.js | 4 +- dist/engine/dsql.js | 149 +-- dist/engine/dump.js | 13 +- dist/engine/schema-to-typedef.js | 68 +- dist/index.d.ts | 25 +- dist/index.js | 150 ++- dist/package-shared/actions/delete-file.js | 117 +- .../actions/get-csrf-header-name.js | 5 +- dist/package-shared/actions/get-schema.js | 105 +- dist/package-shared/actions/get.js | 231 ++-- dist/package-shared/actions/post.js | 239 ++-- dist/package-shared/actions/upload-file.js | 121 +- dist/package-shared/actions/upload-image.js | 121 +- dist/package-shared/actions/users/add-user.js | 165 ++- .../actions/users/delete-user.js | 153 ++- .../package-shared/actions/users/get-token.js | 20 +- dist/package-shared/actions/users/get-user.js | 213 ++-- .../actions/users/login-user.d.ts | 36 +- .../actions/users/login-user.js | 345 +++--- .../actions/users/logout-user.js | 38 +- .../actions/users/reauth-user.js | 331 +++-- .../actions/users/send-email-code.d.ts | 2 +- .../actions/users/send-email-code.js | 184 ++- .../actions/users/social/github-auth.js | 307 +++-- .../actions/users/social/google-auth.js | 277 ++--- .../actions/users/update-user.js | 165 ++- .../package-shared/actions/users/user-auth.js | 44 +- .../actions/users/validate-temp-email-code.js | 69 +- .../actions/users/validate-token.js | 12 +- dist/package-shared/api/crud/delete.d.ts | 21 + dist/package-shared/api/crud/delete.js | 14 + dist/package-shared/api/crud/get.d.ts | 17 + dist/package-shared/api/crud/get.js | 14 + dist/package-shared/api/crud/index.d.ts | 12 + dist/package-shared/api/crud/index.js | 12 + dist/package-shared/api/crud/post.d.ts | 16 + dist/package-shared/api/crud/post.js | 19 + dist/package-shared/api/crud/put.d.ts | 14 + dist/package-shared/api/crud/put.js | 13 + dist/package-shared/api/media/delete.d.ts | 5 + dist/package-shared/api/media/delete.js | 17 + dist/package-shared/api/media/get.d.ts | 3 + dist/package-shared/api/media/get.js | 18 + dist/package-shared/api/media/index.d.ts | 9 + dist/package-shared/api/media/index.js | 9 + dist/package-shared/api/media/post.d.ts | 3 + dist/package-shared/api/media/post.js | 11 + dist/package-shared/api/user/index.d.ts | 2 + dist/package-shared/api/user/index.js | 2 + dist/package-shared/data/data-types.d.ts | 87 ++ dist/package-shared/data/data-types.js | 103 ++ dist/package-shared/data/data-types.ts | 105 ++ dist/package-shared/data/dataTypes.json | 8 + dist/package-shared/dict/app-names.d.ts | 8 + dist/package-shared/dict/app-names.js | 8 + dist/package-shared/dict/cookie-names.d.ts | 5 + dist/package-shared/dict/cookie-names.js | 5 + .../dict/local-storage-dict.d.ts | 7 + .../package-shared/dict/local-storage-dict.js | 8 + dist/package-shared/dict/resource-limits.d.ts | 6 + dist/package-shared/dict/resource-limits.js | 6 + .../functions/api/query-dsql-api.d.ts | 20 + .../functions/api/query-dsql-api.js | 73 ++ .../package-shared/functions/api/query/get.js | 158 ++- .../functions/api/query/post.js | 166 ++- .../api/social-login/facebookLogin.js | 71 +- .../functions/api/social-login/githubLogin.js | 95 +- .../functions/api/social-login/googleLogin.js | 159 ++- .../api/social-login/handleSocialDb.js | 398 +++--- .../api/social-login/loginSocialUser.js | 129 +- .../functions/api/users/api-create-user.d.ts | 2 +- .../functions/api/users/api-create-user.js | 255 ++-- .../functions/api/users/api-delete-user.js | 69 +- .../functions/api/users/api-get-user.js | 55 +- .../functions/api/users/api-login.js | 298 +++-- .../functions/api/users/api-reauth-user.js | 121 +- .../api/users/api-send-email-code.js | 225 ++-- .../functions/api/users/api-update-user.js | 133 +- .../reset-password/(utils)/encrypt-url.js | 16 +- .../api-send-reset-password-link.js | 78 +- .../api/users/social/api-github-login.js | 147 +-- .../api/users/social/api-google-login.js | 151 ++- .../functions/backend/addAdminUserOnLogin.js | 179 ++- .../functions/backend/addMariadbUser.js | 108 +- .../functions/backend/addUsersTableToDb.d.ts | 3 +- .../functions/backend/addUsersTableToDb.js | 132 +- .../functions/backend/api-cred.d.ts | 4 +- .../functions/backend/api-cred.js | 90 +- .../backend/auth/write-auth-files.js | 79 +- .../backend/cookies/get-auth-cookie-names.js | 17 +- .../backend/createDbSchemaFromDb.d.ts | 3 +- .../functions/backend/createDbSchemaFromDb.js | 232 ++-- .../functions/backend/db/addDbEntry.d.ts | 24 +- .../functions/backend/db/addDbEntry.js | 243 ++-- .../functions/backend/db/deleteDbEntry.d.ts | 17 +- .../functions/backend/db/deleteDbEntry.js | 78 +- .../backend/db/pathTraversalCheck.js | 5 +- .../functions/backend/db/runQuery.js | 274 ++--- .../functions/backend/db/sanitizeSql.js | 11 +- .../functions/backend/db/updateDbEntry.d.ts | 8 +- .../functions/backend/db/updateDbEntry.js | 240 ++-- .../functions/backend/dbHandler.d.ts | 9 +- .../functions/backend/dbHandler.js | 105 +- .../backend/defaultFieldsRegexp.d.ts | 7 - .../functions/backend/defaultFieldsRegexp.js | 9 - .../functions/backend/fullAccessDbHandler.js | 127 +- .../grab-mariadb-main-user-for-user.d.ts | 12 + .../grab-mariadb-main-user-for-user.js | 41 + .../backend/grabNewUsersTableSchema.js | 16 +- .../backend/grabSchemaFieldsFromData.js | 12 +- .../functions/backend/grabUserSchemaData.d.ts | 9 - .../functions/backend/grabUserSchemaData.js | 30 - .../functions/backend/handleNodemailer.js | 107 +- .../backend/html/sanitizeHtmlOptions.js | 4 +- .../functions/backend/httpsRequest.js | 22 +- .../functions/backend/noDatabaseDbHandler.js | 103 +- .../functions/backend/parseDbResults.js | 99 +- .../functions/backend/queues/add-queue.d.ts | 2 +- .../functions/backend/queues/add-queue.js | 65 +- .../functions/backend/queues/delete-queue.js | 41 +- .../functions/backend/queues/get-queue.js | 49 +- .../functions/backend/queues/update-queue.js | 35 +- .../functions/backend/serverError.js | 115 +- .../functions/backend/setUserSchemaData.d.ts | 10 - .../functions/backend/setUserSchemaData.js | 31 - .../functions/backend/su-db-handler.d.ts | 9 + .../functions/backend/su-db-handler.js | 15 + .../functions/backend/suSocketAuth.js | 71 +- .../backend/updateUsersTableSchema.d.ts | 3 +- .../backend/updateUsersTableSchema.js | 100 +- .../functions/backend/user-db-handler.d.ts | 11 + .../functions/backend/user-db-handler.js | 17 + .../backend/varDatabaseDbHandler.d.ts | 3 +- .../functions/backend/varDatabaseDbHandler.js | 101 +- .../backend/varReadOnlyDatabaseDbHandler.js | 87 +- .../functions/dsql/db-schema-to-type.js | 35 +- dist/package-shared/functions/dsql/decrypt.js | 23 +- .../functions/dsql/default-fields-regexp.d.ts | 4 +- .../functions/dsql/default-fields-regexp.js | 14 +- dist/package-shared/functions/dsql/encrypt.js | 22 +- .../dsql/generate-type-definitions.d.ts | 3 +- .../dsql/generate-type-definitions.js | 47 +- .../functions/dsql/hashPassword.js | 16 +- .../dsql/sql/sql-delete-generator.d.ts | 7 +- .../dsql/sql/sql-delete-generator.js | 39 +- .../dsql/sql/sql-gen-operator-gen.d.ts | 12 + .../dsql/sql/sql-gen-operator-gen.js | 50 + .../functions/dsql/sql/sql-generator.js | 39 +- .../dsql/sql/sql-insert-generator.js | 5 +- .../dsql/sync-databases/index.d.ts} | 0 .../functions/dsql/sync-databases/index.js | 82 ++ .../create-duplicate-tables-triggers.d.ts | 2 + .../create-duplicate-tables-triggers.js | 2 + .../grab-trigger-name.d.ts | 9 + .../grab-trigger-name.js | 11 + .../stored-proceedure-sql-gen.d.ts | 20 + .../stored-proceedure-sql-gen.js | 23 + .../table-replication-trigger-sql-gen.d.ts | 8 + .../table-replication-trigger-sql-gen.js | 32 + .../trigger-sql-gen.d.ts | 22 + .../trigger-sql-gen.js | 30 + .../functions/email/fns/validate-email.js | 87 +- .../email/verification/email-mx-lookup.js | 18 +- .../email/verification/email-regex-test.js | 5 +- .../email/verification/smtp-verification.js | 16 +- .../web-app/db/grab-user-resource/index.d.ts | 29 + .../web-app/db/grab-user-resource/index.js | 24 + .../web-app/db/grab-user-resource/query.d.ts | 16 + .../web-app/db/grab-user-resource/query.js | 32 + .../handle-mariadb-user-creation.d.ts | 19 + .../handle-mariadb-user-creation.js | 52 + ...-grants-for-databases-cleanup-records.d.ts | 12 + ...er-grants-for-databases-cleanup-records.js | 31 + ...-grants-for-databases-recreate-grants.d.ts | 13 + ...er-grants-for-databases-recreate-grants.js | 68 ++ ...grants-for-databases-recreate-records.d.ts | 13 + ...r-grants-for-databases-recreate-records.js | 60 + ...dle-mariadb-user-grants-for-databases.d.ts | 13 + ...andle-mariadb-user-grants-for-databases.js | 20 + .../handle-mariadb-user-grants.d.ts | 12 + .../handle-mariadb-user-grants.js | 62 + .../handle-mariadb-user-record.d.ts | 12 + .../handle-mariadb-user-record.js | 54 + .../revoke-all-existing-grants.d.ts | 12 + .../revoke-all-existing-grants.js | 45 + dist/package-shared/shell/checkDb.js | 24 +- .../createDbFromSchema/check-db-record.d.ts | 3 +- .../createDbFromSchema/check-db-record.js | 108 +- .../createDbFromSchema/check-table-record.js | 149 +-- .../grab-required-database-schemas.d.ts | 24 + .../grab-required-database-schemas.js | 218 ++++ .../createDbFromSchema/handle-indexes.js | 76 +- .../shell/createDbFromSchema/index.d.ts | 7 +- .../shell/createDbFromSchema/index.js | 211 ++-- dist/package-shared/shell/deploy.js | 16 +- dist/package-shared/shell/encodingUpdate.js | 28 +- .../shell/functions/jsonToBase64.js | 13 +- .../shell/grantFullPriviledges.js | 73 +- dist/package-shared/shell/lessWatch.js | 15 +- .../shell/mariadb-users/handleGrants.js | 117 +- .../mariadb-users/refreshUsersAndGrants.d.ts | 11 - .../mariadb-users/refreshUsersAndGrants.js | 110 -- .../shell/mariadb-users/resetSQLPasswords.js | 113 +- .../shell/mariadb-users/users/create-user.js | 227 ++-- .../shell/mariadb-users/users/update-user.js | 112 +- dist/package-shared/shell/readImage.js | 9 +- .../shell/recoverMainJsonFromDb.js | 124 +- .../shell/resetSQLCredentials.js | 74 -- .../shell/resetSQLCredentialsPasswords.js | 93 +- .../shell/scripts/convert-js-to-ts.js | 21 +- .../package-shared/shell/setSQLCredentials.js | 97 +- dist/package-shared/shell/tailwindWatch.js | 13 +- .../shell/test-external-server.js | 24 +- dist/package-shared/shell/testSQLEscape.d.ts | 6 - dist/package-shared/shell/testSQLEscape.js | 120 +- .../shell/updateChildrenTablesOnDb.js | 82 +- .../shell/updateDateTimestamps.js | 28 +- .../shell/updateDbSlugsForTableRecords.js | 32 +- dist/package-shared/shell/updateSSLUsers.js | 29 +- .../shell/utils/camelJoinedtoCamelSpace.js | 5 +- .../create-table-handle-table-record.d.ts | 13 + .../utils/create-table-handle-table-record.js | 83 ++ .../shell/utils/createTable.d.ts | 10 +- .../package-shared/shell/utils/createTable.js | 172 +-- .../package-shared/shell/utils/dbHandler.d.ts | 10 - dist/package-shared/shell/utils/dbHandler.js | 60 - .../shell/utils/drop-all-foreign-keys.d.ts | 9 + .../shell/utils/drop-all-foreign-keys.js | 35 + .../utils/generateColumnDescription.d.ts | 3 +- .../shell/utils/generateColumnDescription.js | 33 +- .../utils/grab-dsql-schema-index-comment.d.ts | 1 + .../utils/grab-dsql-schema-index-comment.js | 3 + .../shell/utils/handle-table-foreign-key.d.ts | 13 + .../shell/utils/handle-table-foreign-key.js | 24 + .../shell/utils/noDatabaseDbHandler.js | 49 +- .../shell/utils/slugToCamelTitle.js | 5 +- .../shell/utils/supplementTable.js | 5 +- .../shell/utils/updateTable.d.ts | 18 +- .../package-shared/shell/utils/updateTable.js | 668 +++++----- .../shell/utils/varDatabaseDbHandler.js | 95 +- dist/package-shared/types/dsql.d.ts | 218 ++-- dist/package-shared/types/dsql.js | 16 +- dist/package-shared/types/index.d.ts | 766 +++++++----- dist/package-shared/types/index.js | 331 ++++- .../utils/backend/config/grab-config.d.ts | 10 + .../utils/backend/config/grab-config.js | 24 + .../backend/config/grab-main-config.d.ts | 10 + .../utils/backend/config/grab-main-config.js | 6 + .../backend/config/update-user-config.d.ts | 11 + .../backend/config/update-user-config.js | 25 + .../utils/backend/export-mariadb-database.js | 16 +- .../utils/backend/global-db/DB_HANDLER.js | 57 +- .../backend/global-db/DSQL_USER_DB_HANDLER.js | 51 +- .../backend/global-db/LOCAL_DB_HANDLER.js | 53 +- .../utils/backend/global-db/NO_DB_HANDLER.js | 12 +- .../backend/global-db/ROOT_DB_HANDLER.js | 12 +- .../package-shared/utils/backend/grabDbSSL.js | 14 +- .../utils/backend/import-mariadb-database.js | 55 +- .../backend/names/grab-db-full-name.d.ts | 8 - .../utils/backend/names/grab-db-full-name.js | 12 - .../utils/backend/names/grab-dir-names.d.ts | 35 +- .../utils/backend/names/grab-dir-names.js | 164 ++- .../backend/names/grab-ip-addresses.d.ts | 6 + .../utils/backend/names/grab-ip-addresses.js | 9 + .../names/replace-datasquirel-db-name.js | 5 +- .../utils/backend/parseCookies.js | 5 +- .../utils/camelJoinedtoCamelSpace.js | 5 +- .../utils/check-if-is-master.js | 5 +- dist/package-shared/utils/console-colors.js | 7 +- .../package-shared/utils/cookies-actions.d.ts | 7 + dist/package-shared/utils/cookies-actions.js | 43 + .../utils/create-user-sql-user.d.ts | 7 + .../utils/create-user-sql-user.js | 41 + .../utils/data-fetching/crud-get.d.ts | 9 +- .../utils/data-fetching/crud-get.js | 114 +- .../utils/data-fetching/crud.d.ts | 10 +- .../utils/data-fetching/crud.js | 122 +- .../utils/data-fetching/method-crud.js | 298 +++-- .../utils/db/conn-db-handler.d.ts | 4 +- .../utils/db/conn-db-handler.js | 152 ++- .../db/schema/data-type-constructor.d.ts | 1 + .../utils/db/schema/data-type-constructor.js | 20 + .../utils/db/schema/data-type-parser.d.ts | 10 + .../utils/db/schema/data-type-parser.js | 40 + .../schema/grab-target-db-schema-index.d.ts | 11 + .../db/schema/grab-target-db-schema-index.js | 10 + .../grab-target-table-schema-index.d.ts | 9 + .../schema/grab-target-table-schema-index.js | 11 + .../db/schema/grab-target-table-schema.d.ts | 7 + .../db/schema/grab-target-table-schema.js | 4 + .../utils/db/schema/grab-text-field-type.d.ts | 2 + .../utils/db/schema/grab-text-field-type.js | 19 + ...ma-children-handle-children-databases.d.ts | 7 + ...hema-children-handle-children-databases.js | 88 ++ ...chema-children-handle-children-tables.d.ts | 9 + ...-schema-children-handle-children-tables.js | 133 ++ .../db/schema/resolve-schema-children.d.ts | 7 + .../db/schema/resolve-schema-children.js | 20 + .../schema/resolve-schema-foreign-keys.d.ts | 7 + .../db/schema/resolve-schema-foreign-keys.js | 27 + .../db/schema/resolve-users-schema-ids.d.ts | 10 + .../db/schema/resolve-users-schema-ids.js | 54 + .../utils/db/schema/set-text-field-type.d.ts | 2 + .../utils/db/schema/set-text-field-type.js | 30 + dist/package-shared/utils/delete-by-key.d.ts | 6 + dist/package-shared/utils/delete-by-key.js | 29 + .../package-shared/utils/deserialize-query.js | 14 +- dist/package-shared/utils/ejson.js | 4 +- dist/package-shared/utils/empty-dir.js | 20 +- dist/package-shared/utils/endConnection.js | 4 +- dist/package-shared/utils/envsub.d.ts | 1 + dist/package-shared/utils/envsub.js | 6 + .../utils/generateColumnDescription.js | 5 +- .../utils/grab-api-base-path.d.ts | 7 + .../utils/grab-api-base-path.js | 8 + .../utils/grab-app-main-db-schema.d.ts | 2 + .../utils/grab-app-main-db-schema.js | 11 + .../utils/grab-app-version.d.ts | 2 + dist/package-shared/utils/grab-app-version.js | 11 + .../utils/grab-cookie-expirt-date.js | 12 +- .../utils/grab-db-full-name.d.ts | 12 +- .../package-shared/utils/grab-db-full-name.js | 30 +- dist/package-shared/utils/grab-db-names.d.ts | 21 + dist/package-shared/utils/grab-db-names.js | 14 + .../grab-docker-resource-ip-numbers.d.ts | 13 + .../utils/grab-docker-resource-ip-numbers.js | 15 + .../utils/grab-dsql-connection.js | 18 +- dist/package-shared/utils/grab-host-names.js | 14 +- .../grab-instance-global-network-name.d.ts | 1 + .../grab-instance-global-network-name.js | 4 + dist/package-shared/utils/grab-keys.js | 12 +- .../utils/grab-query-and-values.js | 12 +- .../utils/grab-sql-key-name.d.ts | 11 + .../package-shared/utils/grab-sql-key-name.js | 23 + .../utils/grab-sql-user-name-for-user.d.ts | 1 + .../utils/grab-sql-user-name-for-user.js | 3 + .../utils/grab-sql-user-name.d.ts | 12 + .../utils/grab-sql-user-name.js | 22 + .../utils/grab-user-main-sql-user-name.d.ts | 14 + .../utils/grab-user-main-sql-user-name.js | 16 + .../package-shared/utils/logging/debug-log.js | 21 +- dist/package-shared/utils/normalize-text.d.ts | 1 + dist/package-shared/utils/normalize-text.js | 6 + dist/package-shared/utils/numberfy.js | 5 +- dist/package-shared/utils/parse-env.d.ts | 81 +- dist/package-shared/utils/parse-env.js | 15 +- .../utils/purge-default-fields.d.ts | 6 + .../utils/purge-default-fields.js | 27 + .../package-shared/utils/serialize-cookies.js | 5 +- dist/package-shared/utils/serialize-query.js | 12 +- .../utils/setup-global-network.d.ts | 1 + .../utils/setup-global-network.js | 17 + .../utils/slug-to-normal-text.d.ts | 1 + .../utils/slug-to-normal-text.js | 13 + dist/package-shared/utils/slugToCamelTitle.js | 5 +- dist/package-shared/utils/slugify.d.ts | 2 +- dist/package-shared/utils/slugify.js | 22 +- .../utils/sql-equality-parser.d.ts | 2 + .../utils/sql-equality-parser.js | 38 + dist/package-shared/utils/trim-sql.js | 5 +- dist/package-shared/utils/unique-by-key.d.ts | 3 + dist/package-shared/utils/unique-by-key.js | 28 + .../utils/update-grastate-file-to-latest.d.ts | 1 + .../utils/update-grastate-file-to-latest.js | 8 + engine/schema-to-typedef.ts | 2 +- index.ts | 16 +- package-shared/actions/post.ts | 2 +- package-shared/actions/users/login-user.ts | 36 +- .../actions/users/send-email-code.ts | 32 +- package-shared/actions/users/user-auth.ts | 5 +- package-shared/api/crud/delete.ts | 29 + package-shared/api/crud/get.ts | 34 + package-shared/api/crud/index.ts | 14 + package-shared/api/crud/post.ts | 42 + package-shared/api/crud/put.ts | 25 + package-shared/api/media/delete.ts | 32 + package-shared/api/media/get.ts | 33 + package-shared/api/media/index.ts | 11 + package-shared/api/media/post.ts | 24 + package-shared/api/user/index.ts | 3 + package-shared/data/data-types.ts | 105 ++ package-shared/data/dataTypes.json | 8 + package-shared/dict/app-names.ts | 8 + package-shared/dict/cookie-names.ts | 5 + package-shared/dict/local-storage-dict.ts | 9 + package-shared/dict/resource-limits.ts | 7 + .../functions/api/query-dsql-api.ts | 116 ++ package-shared/functions/api/query/get.ts | 3 - package-shared/functions/api/query/post.ts | 9 +- .../api/social-login/handleSocialDb.ts | 15 +- .../functions/api/users/api-create-user.ts | 23 +- .../functions/api/users/api-login.ts | 10 + .../api-send-reset-password-link.ts | 12 +- .../functions/backend/addMariadbUser.ts | 3 +- .../functions/backend/addUsersTableToDb.ts | 20 +- package-shared/functions/backend/api-cred.ts | 86 +- .../backend/cookies/get-auth-cookie-names.ts | 11 +- .../functions/backend/createDbSchemaFromDb.ts | 71 +- .../functions/backend/db/addDbEntry.ts | 324 +++-- .../functions/backend/db/deleteDbEntry.ts | 25 +- .../functions/backend/db/runQuery.ts | 6 +- .../functions/backend/db/updateDbEntry.ts | 41 +- package-shared/functions/backend/dbHandler.ts | 88 +- .../functions/backend/defaultFieldsRegexp.ts | 9 - .../grab-mariadb-main-user-for-user.ts | 70 ++ .../functions/backend/grabUserSchemaData.ts | 38 - .../functions/backend/parseDbResults.ts | 2 +- .../functions/backend/setUserSchemaData.ts | 43 - .../functions/backend/su-db-handler.ts | 31 + .../backend/updateUsersTableSchema.ts | 31 +- .../functions/backend/user-db-handler.ts | 39 + .../functions/backend/varDatabaseDbHandler.ts | 3 +- .../functions/dsql/db-schema-to-type.ts | 17 +- package-shared/functions/dsql/decrypt.ts | 1 + .../functions/dsql/default-fields-regexp.ts | 13 +- .../dsql/generate-type-definitions.ts | 40 +- .../dsql/sql/sql-delete-generator.ts | 49 +- .../dsql/sql/sql-gen-operator-gen.ts | 50 + .../functions/dsql/sql/sql-generator.ts | 30 +- .../functions/dsql/sync-databases/index.ts | 117 ++ .../create-duplicate-tables-triggers.ts | 3 + .../database-replication.md | 106 ++ .../database-replication.sql | 212 ++++ .../grab-trigger-name.ts | 23 + .../stored-proceedure-sql-gen.ts | 44 + .../table-replication-trigger-sql-gen.ts | 48 + .../table-replication.md | 92 ++ .../trigger-sql-gen.ts | 53 + .../web-app/db/grab-user-resource/index.ts | 44 + .../web-app/db/grab-user-resource/query.ts | 39 + .../handle-mariadb-user-creation.ts | 87 ++ ...er-grants-for-databases-cleanup-records.ts | 66 + ...er-grants-for-databases-recreate-grants.ts | 105 ++ ...r-grants-for-databases-recreate-records.ts | 107 ++ ...andle-mariadb-user-grants-for-databases.ts | 40 + .../handle-mariadb-user-grants.ts | 106 ++ .../handle-mariadb-user-record.ts | 89 ++ .../revoke-all-existing-grants.ts | 70 ++ .../createDbFromSchema/check-db-record.ts | 52 +- .../createDbFromSchema/check-table-record.ts | 23 +- .../grab-required-database-schemas.ts | 317 +++++ .../createDbFromSchema/handle-indexes.ts | 49 +- .../shell/createDbFromSchema/index.ts | 397 +++--- package-shared/shell/grantFullPriviledges.ts | 16 +- .../mariadb-users/refreshUsersAndGrants.ts | 137 --- .../shell/mariadb-users/resetSQLPasswords.ts | 7 +- .../shell/mariadb-users/users/create-user.ts | 22 +- package-shared/shell/resetSQLCredentials.ts | 80 -- .../shell/resetSQLCredentialsPasswords.ts | 5 +- package-shared/shell/setSQLCredentials.ts | 5 +- package-shared/shell/testSQLEscape.ts | 18 +- package-shared/shell/updateSSLUsers.ts | 3 +- .../utils/create-table-handle-table-record.ts | 112 ++ package-shared/shell/utils/createTable.ts | 162 +-- package-shared/shell/utils/dbHandler.ts | 58 - .../shell/utils/drop-all-foreign-keys.ts | 53 + .../shell/utils/generateColumnDescription.ts | 38 +- .../utils/grab-dsql-schema-index-comment.ts | 3 + .../shell/utils/handle-table-foreign-key.ts | 51 + .../shell/utils/noDatabaseDbHandler.ts | 2 +- package-shared/shell/utils/updateTable.ts | 651 ++++------ .../shell/utils/varDatabaseDbHandler.ts | 3 +- package-shared/sqls/grab-foreign-key.sql | 16 + package-shared/types/dsql.ts | 236 ++-- package-shared/types/index.ts | 1072 ++++++++++++----- .../utils/backend/config/grab-config.ts | 47 + .../utils/backend/config/grab-main-config.ts | 18 + .../backend/config/update-user-config.ts | 50 + .../utils/backend/names/grab-db-full-name.ts | 22 - .../utils/backend/names/grab-dir-names.ts | 137 ++- .../utils/backend/names/grab-ip-addresses.ts | 13 + package-shared/utils/cookies-actions.ts | 74 ++ package-shared/utils/create-user-sql-user.ts | 72 ++ .../utils/data-fetching/crud-get.ts | 22 +- package-shared/utils/data-fetching/crud.ts | 106 +- .../utils/data-fetching/method-crud.ts | 5 +- package-shared/utils/db/conn-db-handler.ts | 22 +- .../utils/db/schema/data-type-constructor.ts | 29 + .../utils/db/schema/data-type-parser.ts | 54 + .../db/schema/grab-target-db-schema-index.ts | 36 + .../schema/grab-target-table-schema-index.ts | 29 + .../db/schema/grab-target-table-schema.ts | 16 + .../utils/db/schema/grab-text-field-type.ts | 16 + ...hema-children-handle-children-databases.ts | 132 ++ ...-schema-children-handle-children-tables.ts | 229 ++++ .../db/schema/resolve-schema-children.ts | 32 + .../db/schema/resolve-schema-foreign-keys.ts | 38 + .../db/schema/resolve-users-schema-ids.ts | 78 ++ .../utils/db/schema/set-text-field-type.ts | 31 + package-shared/utils/delete-by-key.ts | 37 + package-shared/utils/envsub.ts | 6 + package-shared/utils/grab-api-base-path.ts | 16 + .../utils/grab-app-main-db-schema.ts | 18 + package-shared/utils/grab-app-version.ts | 17 + package-shared/utils/grab-db-full-name.ts | 42 +- package-shared/utils/grab-db-names.ts | 27 + .../utils/grab-docker-resource-ip-numbers.ts | 15 + .../grab-instance-global-network-name.ts | 7 + package-shared/utils/grab-sql-key-name.ts | 24 + .../utils/grab-sql-user-name-for-user.ts | 5 + package-shared/utils/grab-sql-user-name.ts | 42 + .../utils/grab-user-main-sql-user-name.ts | 30 + package-shared/utils/normalize-text.ts | 6 + package-shared/utils/parse-env.ts | 8 +- package-shared/utils/purge-default-fields.ts | 35 + package-shared/utils/setup-global-network.ts | 20 + package-shared/utils/slug-to-normal-text.ts | 15 + package-shared/utils/slugify.ts | 27 +- package-shared/utils/sql-equality-parser.ts | 42 + package-shared/utils/unique-by-key.ts | 36 + .../utils/update-grastate-file-to-latest.ts | 18 + package.json | 2 +- tsconfig.json | 6 +- 526 files changed, 17560 insertions(+), 11386 deletions(-) create mode 100644 dist/package-shared/api/crud/delete.d.ts create mode 100644 dist/package-shared/api/crud/delete.js create mode 100644 dist/package-shared/api/crud/get.d.ts create mode 100644 dist/package-shared/api/crud/get.js create mode 100644 dist/package-shared/api/crud/index.d.ts create mode 100644 dist/package-shared/api/crud/index.js create mode 100644 dist/package-shared/api/crud/post.d.ts create mode 100644 dist/package-shared/api/crud/post.js create mode 100644 dist/package-shared/api/crud/put.d.ts create mode 100644 dist/package-shared/api/crud/put.js create mode 100644 dist/package-shared/api/media/delete.d.ts create mode 100644 dist/package-shared/api/media/delete.js create mode 100644 dist/package-shared/api/media/get.d.ts create mode 100644 dist/package-shared/api/media/get.js create mode 100644 dist/package-shared/api/media/index.d.ts create mode 100644 dist/package-shared/api/media/index.js create mode 100644 dist/package-shared/api/media/post.d.ts create mode 100644 dist/package-shared/api/media/post.js create mode 100644 dist/package-shared/api/user/index.d.ts create mode 100644 dist/package-shared/api/user/index.js create mode 100644 dist/package-shared/data/data-types.d.ts create mode 100644 dist/package-shared/data/data-types.js create mode 100644 dist/package-shared/data/data-types.ts create mode 100644 dist/package-shared/dict/app-names.d.ts create mode 100644 dist/package-shared/dict/app-names.js create mode 100644 dist/package-shared/dict/cookie-names.d.ts create mode 100644 dist/package-shared/dict/cookie-names.js create mode 100644 dist/package-shared/dict/local-storage-dict.d.ts create mode 100644 dist/package-shared/dict/local-storage-dict.js create mode 100644 dist/package-shared/dict/resource-limits.d.ts create mode 100644 dist/package-shared/dict/resource-limits.js create mode 100644 dist/package-shared/functions/api/query-dsql-api.d.ts create mode 100644 dist/package-shared/functions/api/query-dsql-api.js delete mode 100644 dist/package-shared/functions/backend/defaultFieldsRegexp.d.ts delete mode 100644 dist/package-shared/functions/backend/defaultFieldsRegexp.js create mode 100644 dist/package-shared/functions/backend/grab-mariadb-main-user-for-user.d.ts create mode 100644 dist/package-shared/functions/backend/grab-mariadb-main-user-for-user.js delete mode 100644 dist/package-shared/functions/backend/grabUserSchemaData.d.ts delete mode 100644 dist/package-shared/functions/backend/grabUserSchemaData.js delete mode 100644 dist/package-shared/functions/backend/setUserSchemaData.d.ts delete mode 100644 dist/package-shared/functions/backend/setUserSchemaData.js create mode 100644 dist/package-shared/functions/backend/su-db-handler.d.ts create mode 100644 dist/package-shared/functions/backend/su-db-handler.js create mode 100644 dist/package-shared/functions/backend/user-db-handler.d.ts create mode 100644 dist/package-shared/functions/backend/user-db-handler.js create mode 100644 dist/package-shared/functions/dsql/sql/sql-gen-operator-gen.d.ts create mode 100644 dist/package-shared/functions/dsql/sql/sql-gen-operator-gen.js rename dist/package-shared/{shell/resetSQLCredentials.d.ts => functions/dsql/sync-databases/index.d.ts} (100%) create mode 100644 dist/package-shared/functions/dsql/sync-databases/index.js create mode 100644 dist/package-shared/functions/dsql/triggers-and-stored-proceedures/create-duplicate-tables-triggers.d.ts create mode 100644 dist/package-shared/functions/dsql/triggers-and-stored-proceedures/create-duplicate-tables-triggers.js create mode 100644 dist/package-shared/functions/dsql/triggers-and-stored-proceedures/grab-trigger-name.d.ts create mode 100644 dist/package-shared/functions/dsql/triggers-and-stored-proceedures/grab-trigger-name.js create mode 100644 dist/package-shared/functions/dsql/triggers-and-stored-proceedures/stored-proceedure-sql-gen.d.ts create mode 100644 dist/package-shared/functions/dsql/triggers-and-stored-proceedures/stored-proceedure-sql-gen.js create mode 100644 dist/package-shared/functions/dsql/triggers-and-stored-proceedures/table-replication-trigger-sql-gen.d.ts create mode 100644 dist/package-shared/functions/dsql/triggers-and-stored-proceedures/table-replication-trigger-sql-gen.js create mode 100644 dist/package-shared/functions/dsql/triggers-and-stored-proceedures/trigger-sql-gen.d.ts create mode 100644 dist/package-shared/functions/dsql/triggers-and-stored-proceedures/trigger-sql-gen.js create mode 100644 dist/package-shared/functions/web-app/db/grab-user-resource/index.d.ts create mode 100644 dist/package-shared/functions/web-app/db/grab-user-resource/index.js create mode 100644 dist/package-shared/functions/web-app/db/grab-user-resource/query.d.ts create mode 100644 dist/package-shared/functions/web-app/db/grab-user-resource/query.js create mode 100644 dist/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-creation.d.ts create mode 100644 dist/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-creation.js create mode 100644 dist/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-grants-for-databases-cleanup-records.d.ts create mode 100644 dist/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-grants-for-databases-cleanup-records.js create mode 100644 dist/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-grants-for-databases-recreate-grants.d.ts create mode 100644 dist/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-grants-for-databases-recreate-grants.js create mode 100644 dist/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-grants-for-databases-recreate-records.d.ts create mode 100644 dist/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-grants-for-databases-recreate-records.js create mode 100644 dist/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-grants-for-databases.d.ts create mode 100644 dist/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-grants-for-databases.js create mode 100644 dist/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-grants.d.ts create mode 100644 dist/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-grants.js create mode 100644 dist/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-record.d.ts create mode 100644 dist/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-record.js create mode 100644 dist/package-shared/functions/web-app/mariadb-user/revoke-all-existing-grants.d.ts create mode 100644 dist/package-shared/functions/web-app/mariadb-user/revoke-all-existing-grants.js create mode 100644 dist/package-shared/shell/createDbFromSchema/grab-required-database-schemas.d.ts create mode 100644 dist/package-shared/shell/createDbFromSchema/grab-required-database-schemas.js delete mode 100644 dist/package-shared/shell/mariadb-users/refreshUsersAndGrants.d.ts delete mode 100644 dist/package-shared/shell/mariadb-users/refreshUsersAndGrants.js delete mode 100644 dist/package-shared/shell/resetSQLCredentials.js create mode 100644 dist/package-shared/shell/utils/create-table-handle-table-record.d.ts create mode 100644 dist/package-shared/shell/utils/create-table-handle-table-record.js delete mode 100644 dist/package-shared/shell/utils/dbHandler.d.ts delete mode 100644 dist/package-shared/shell/utils/dbHandler.js create mode 100644 dist/package-shared/shell/utils/drop-all-foreign-keys.d.ts create mode 100644 dist/package-shared/shell/utils/drop-all-foreign-keys.js create mode 100644 dist/package-shared/shell/utils/grab-dsql-schema-index-comment.d.ts create mode 100644 dist/package-shared/shell/utils/grab-dsql-schema-index-comment.js create mode 100644 dist/package-shared/shell/utils/handle-table-foreign-key.d.ts create mode 100644 dist/package-shared/shell/utils/handle-table-foreign-key.js create mode 100644 dist/package-shared/utils/backend/config/grab-config.d.ts create mode 100644 dist/package-shared/utils/backend/config/grab-config.js create mode 100644 dist/package-shared/utils/backend/config/grab-main-config.d.ts create mode 100644 dist/package-shared/utils/backend/config/grab-main-config.js create mode 100644 dist/package-shared/utils/backend/config/update-user-config.d.ts create mode 100644 dist/package-shared/utils/backend/config/update-user-config.js delete mode 100644 dist/package-shared/utils/backend/names/grab-db-full-name.d.ts delete mode 100644 dist/package-shared/utils/backend/names/grab-db-full-name.js create mode 100644 dist/package-shared/utils/backend/names/grab-ip-addresses.d.ts create mode 100644 dist/package-shared/utils/backend/names/grab-ip-addresses.js create mode 100644 dist/package-shared/utils/cookies-actions.d.ts create mode 100644 dist/package-shared/utils/cookies-actions.js create mode 100644 dist/package-shared/utils/create-user-sql-user.d.ts create mode 100644 dist/package-shared/utils/create-user-sql-user.js create mode 100644 dist/package-shared/utils/db/schema/data-type-constructor.d.ts create mode 100644 dist/package-shared/utils/db/schema/data-type-constructor.js create mode 100644 dist/package-shared/utils/db/schema/data-type-parser.d.ts create mode 100644 dist/package-shared/utils/db/schema/data-type-parser.js create mode 100644 dist/package-shared/utils/db/schema/grab-target-db-schema-index.d.ts create mode 100644 dist/package-shared/utils/db/schema/grab-target-db-schema-index.js create mode 100644 dist/package-shared/utils/db/schema/grab-target-table-schema-index.d.ts create mode 100644 dist/package-shared/utils/db/schema/grab-target-table-schema-index.js create mode 100644 dist/package-shared/utils/db/schema/grab-target-table-schema.d.ts create mode 100644 dist/package-shared/utils/db/schema/grab-target-table-schema.js create mode 100644 dist/package-shared/utils/db/schema/grab-text-field-type.d.ts create mode 100644 dist/package-shared/utils/db/schema/grab-text-field-type.js create mode 100644 dist/package-shared/utils/db/schema/resolve-schema-children-handle-children-databases.d.ts create mode 100644 dist/package-shared/utils/db/schema/resolve-schema-children-handle-children-databases.js create mode 100644 dist/package-shared/utils/db/schema/resolve-schema-children-handle-children-tables.d.ts create mode 100644 dist/package-shared/utils/db/schema/resolve-schema-children-handle-children-tables.js create mode 100644 dist/package-shared/utils/db/schema/resolve-schema-children.d.ts create mode 100644 dist/package-shared/utils/db/schema/resolve-schema-children.js create mode 100644 dist/package-shared/utils/db/schema/resolve-schema-foreign-keys.d.ts create mode 100644 dist/package-shared/utils/db/schema/resolve-schema-foreign-keys.js create mode 100644 dist/package-shared/utils/db/schema/resolve-users-schema-ids.d.ts create mode 100644 dist/package-shared/utils/db/schema/resolve-users-schema-ids.js create mode 100644 dist/package-shared/utils/db/schema/set-text-field-type.d.ts create mode 100644 dist/package-shared/utils/db/schema/set-text-field-type.js create mode 100644 dist/package-shared/utils/delete-by-key.d.ts create mode 100644 dist/package-shared/utils/delete-by-key.js create mode 100644 dist/package-shared/utils/envsub.d.ts create mode 100644 dist/package-shared/utils/envsub.js create mode 100644 dist/package-shared/utils/grab-api-base-path.d.ts create mode 100644 dist/package-shared/utils/grab-api-base-path.js create mode 100644 dist/package-shared/utils/grab-app-main-db-schema.d.ts create mode 100644 dist/package-shared/utils/grab-app-main-db-schema.js create mode 100644 dist/package-shared/utils/grab-app-version.d.ts create mode 100644 dist/package-shared/utils/grab-app-version.js create mode 100644 dist/package-shared/utils/grab-db-names.d.ts create mode 100644 dist/package-shared/utils/grab-db-names.js create mode 100644 dist/package-shared/utils/grab-docker-resource-ip-numbers.d.ts create mode 100644 dist/package-shared/utils/grab-docker-resource-ip-numbers.js create mode 100644 dist/package-shared/utils/grab-instance-global-network-name.d.ts create mode 100644 dist/package-shared/utils/grab-instance-global-network-name.js create mode 100644 dist/package-shared/utils/grab-sql-key-name.d.ts create mode 100644 dist/package-shared/utils/grab-sql-key-name.js create mode 100644 dist/package-shared/utils/grab-sql-user-name-for-user.d.ts create mode 100644 dist/package-shared/utils/grab-sql-user-name-for-user.js create mode 100644 dist/package-shared/utils/grab-sql-user-name.d.ts create mode 100644 dist/package-shared/utils/grab-sql-user-name.js create mode 100644 dist/package-shared/utils/grab-user-main-sql-user-name.d.ts create mode 100644 dist/package-shared/utils/grab-user-main-sql-user-name.js create mode 100644 dist/package-shared/utils/normalize-text.d.ts create mode 100644 dist/package-shared/utils/normalize-text.js create mode 100644 dist/package-shared/utils/purge-default-fields.d.ts create mode 100644 dist/package-shared/utils/purge-default-fields.js create mode 100644 dist/package-shared/utils/setup-global-network.d.ts create mode 100644 dist/package-shared/utils/setup-global-network.js create mode 100644 dist/package-shared/utils/slug-to-normal-text.d.ts create mode 100644 dist/package-shared/utils/slug-to-normal-text.js create mode 100644 dist/package-shared/utils/sql-equality-parser.d.ts create mode 100644 dist/package-shared/utils/sql-equality-parser.js create mode 100644 dist/package-shared/utils/unique-by-key.d.ts create mode 100644 dist/package-shared/utils/unique-by-key.js create mode 100644 dist/package-shared/utils/update-grastate-file-to-latest.d.ts create mode 100644 dist/package-shared/utils/update-grastate-file-to-latest.js create mode 100644 package-shared/api/crud/delete.ts create mode 100644 package-shared/api/crud/get.ts create mode 100644 package-shared/api/crud/index.ts create mode 100644 package-shared/api/crud/post.ts create mode 100644 package-shared/api/crud/put.ts create mode 100644 package-shared/api/media/delete.ts create mode 100644 package-shared/api/media/get.ts create mode 100644 package-shared/api/media/index.ts create mode 100644 package-shared/api/media/post.ts create mode 100644 package-shared/api/user/index.ts create mode 100644 package-shared/data/data-types.ts create mode 100644 package-shared/dict/app-names.ts create mode 100644 package-shared/dict/cookie-names.ts create mode 100644 package-shared/dict/local-storage-dict.ts create mode 100644 package-shared/dict/resource-limits.ts create mode 100644 package-shared/functions/api/query-dsql-api.ts delete mode 100644 package-shared/functions/backend/defaultFieldsRegexp.ts create mode 100644 package-shared/functions/backend/grab-mariadb-main-user-for-user.ts delete mode 100644 package-shared/functions/backend/grabUserSchemaData.ts delete mode 100644 package-shared/functions/backend/setUserSchemaData.ts create mode 100644 package-shared/functions/backend/su-db-handler.ts create mode 100644 package-shared/functions/backend/user-db-handler.ts create mode 100644 package-shared/functions/dsql/sql/sql-gen-operator-gen.ts create mode 100644 package-shared/functions/dsql/sync-databases/index.ts create mode 100644 package-shared/functions/dsql/triggers-and-stored-proceedures/create-duplicate-tables-triggers.ts create mode 100644 package-shared/functions/dsql/triggers-and-stored-proceedures/database-replication.md create mode 100644 package-shared/functions/dsql/triggers-and-stored-proceedures/database-replication.sql create mode 100644 package-shared/functions/dsql/triggers-and-stored-proceedures/grab-trigger-name.ts create mode 100644 package-shared/functions/dsql/triggers-and-stored-proceedures/stored-proceedure-sql-gen.ts create mode 100644 package-shared/functions/dsql/triggers-and-stored-proceedures/table-replication-trigger-sql-gen.ts create mode 100644 package-shared/functions/dsql/triggers-and-stored-proceedures/table-replication.md create mode 100644 package-shared/functions/dsql/triggers-and-stored-proceedures/trigger-sql-gen.ts create mode 100644 package-shared/functions/web-app/db/grab-user-resource/index.ts create mode 100644 package-shared/functions/web-app/db/grab-user-resource/query.ts create mode 100644 package-shared/functions/web-app/mariadb-user/handle-mariadb-user-creation.ts create mode 100644 package-shared/functions/web-app/mariadb-user/handle-mariadb-user-grants-for-databases-cleanup-records.ts create mode 100644 package-shared/functions/web-app/mariadb-user/handle-mariadb-user-grants-for-databases-recreate-grants.ts create mode 100644 package-shared/functions/web-app/mariadb-user/handle-mariadb-user-grants-for-databases-recreate-records.ts create mode 100644 package-shared/functions/web-app/mariadb-user/handle-mariadb-user-grants-for-databases.ts create mode 100644 package-shared/functions/web-app/mariadb-user/handle-mariadb-user-grants.ts create mode 100644 package-shared/functions/web-app/mariadb-user/handle-mariadb-user-record.ts create mode 100644 package-shared/functions/web-app/mariadb-user/revoke-all-existing-grants.ts create mode 100644 package-shared/shell/createDbFromSchema/grab-required-database-schemas.ts delete mode 100644 package-shared/shell/mariadb-users/refreshUsersAndGrants.ts delete mode 100644 package-shared/shell/resetSQLCredentials.ts create mode 100644 package-shared/shell/utils/create-table-handle-table-record.ts delete mode 100644 package-shared/shell/utils/dbHandler.ts create mode 100644 package-shared/shell/utils/drop-all-foreign-keys.ts create mode 100644 package-shared/shell/utils/grab-dsql-schema-index-comment.ts create mode 100644 package-shared/shell/utils/handle-table-foreign-key.ts create mode 100644 package-shared/sqls/grab-foreign-key.sql create mode 100644 package-shared/utils/backend/config/grab-config.ts create mode 100644 package-shared/utils/backend/config/grab-main-config.ts create mode 100644 package-shared/utils/backend/config/update-user-config.ts delete mode 100644 package-shared/utils/backend/names/grab-db-full-name.ts create mode 100644 package-shared/utils/backend/names/grab-ip-addresses.ts create mode 100644 package-shared/utils/cookies-actions.ts create mode 100644 package-shared/utils/create-user-sql-user.ts create mode 100644 package-shared/utils/db/schema/data-type-constructor.ts create mode 100644 package-shared/utils/db/schema/data-type-parser.ts create mode 100644 package-shared/utils/db/schema/grab-target-db-schema-index.ts create mode 100644 package-shared/utils/db/schema/grab-target-table-schema-index.ts create mode 100644 package-shared/utils/db/schema/grab-target-table-schema.ts create mode 100644 package-shared/utils/db/schema/grab-text-field-type.ts create mode 100644 package-shared/utils/db/schema/resolve-schema-children-handle-children-databases.ts create mode 100644 package-shared/utils/db/schema/resolve-schema-children-handle-children-tables.ts create mode 100644 package-shared/utils/db/schema/resolve-schema-children.ts create mode 100644 package-shared/utils/db/schema/resolve-schema-foreign-keys.ts create mode 100644 package-shared/utils/db/schema/resolve-users-schema-ids.ts create mode 100644 package-shared/utils/db/schema/set-text-field-type.ts create mode 100644 package-shared/utils/delete-by-key.ts create mode 100644 package-shared/utils/envsub.ts create mode 100644 package-shared/utils/grab-api-base-path.ts create mode 100644 package-shared/utils/grab-app-main-db-schema.ts create mode 100644 package-shared/utils/grab-app-version.ts create mode 100644 package-shared/utils/grab-db-names.ts create mode 100644 package-shared/utils/grab-docker-resource-ip-numbers.ts create mode 100644 package-shared/utils/grab-instance-global-network-name.ts create mode 100644 package-shared/utils/grab-sql-key-name.ts create mode 100644 package-shared/utils/grab-sql-user-name-for-user.ts create mode 100644 package-shared/utils/grab-sql-user-name.ts create mode 100644 package-shared/utils/grab-user-main-sql-user-name.ts create mode 100644 package-shared/utils/normalize-text.ts create mode 100644 package-shared/utils/purge-default-fields.ts create mode 100644 package-shared/utils/setup-global-network.ts create mode 100644 package-shared/utils/slug-to-normal-text.ts create mode 100644 package-shared/utils/sql-equality-parser.ts create mode 100644 package-shared/utils/unique-by-key.ts create mode 100644 package-shared/utils/update-grastate-file-to-latest.ts diff --git a/client/fetch/index.ts b/client/fetch/index.ts index dd829a8..38201e9 100644 --- a/client/fetch/index.ts +++ b/client/fetch/index.ts @@ -1,7 +1,6 @@ import _ from "lodash"; -import getCsrfHeaderName from "../../package-shared/actions/get-csrf-header-name"; -type FetchApiOptions = { +type FetchApiOptions = { method: | "POST" | "GET" @@ -13,7 +12,7 @@ type FetchApiOptions = { | "delete" | "put" | "patch"; - body?: object | string; + body?: T | string; headers?: FetchHeader; }; @@ -31,25 +30,34 @@ export type FetchApiReturn = { /** * # Fetch API */ -export default async function fetchApi( +export default async function fetchApi< + T extends { [k: string]: any } = { [k: string]: any }, + R extends any = any +>( url: string, - options?: FetchApiOptions, + options?: FetchApiOptions, csrf?: boolean, - /** Key to use to grab local Storage csrf value. */ - localStorageCSRFKey?: string -): Promise { + /** + * Key to use to grab local Storage csrf value. + */ + localStorageCSRFKey?: string, + /** + * Key with which to set the request header csrf + * value + */ + csrfHeaderKey?: string +): Promise { let data; - const csrfValue = localStorage.getItem( - localStorageCSRFKey || getCsrfHeaderName() - ); + const csrfKey = "x-dsql-csrf-key"; + const csrfValue = localStorage.getItem(localStorageCSRFKey || csrfKey); let finalHeaders = { "Content-Type": "application/json", } as FetchHeader; if (csrf && csrfValue) { - finalHeaders[getCsrfHeaderName()] = csrfValue; + finalHeaders[localStorageCSRFKey || csrfKey] = csrfValue; } if (typeof options === "string") { diff --git a/dist/client/auth/github/getAccessToken.js b/dist/client/auth/github/getAccessToken.js index 9b8ea89..c947eb1 100644 --- a/dist/client/auth/github/getAccessToken.js +++ b/dist/client/auth/github/getAccessToken.js @@ -1,12 +1,9 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = getAccessToken; /** * Login with Github Function * =============================================================================== * @description This function uses github api to login a user with datasquirel */ -function getAccessToken({ clientId, redirectUrl, setLoading, scopes, }) { +export default function getAccessToken({ clientId, redirectUrl, setLoading, scopes, }) { if (setLoading) setLoading(true); const scopeString = scopes ? scopes.join("%20") : "read:user"; diff --git a/dist/client/auth/google/getAccessToken.js b/dist/client/auth/google/getAccessToken.js index 7c53bbe..1adbbdc 100644 --- a/dist/client/auth/google/getAccessToken.js +++ b/dist/client/auth/google/getAccessToken.js @@ -1,44 +1,29 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = getAccessToken; -exports.googleLogin = googleLogin; let interval; /** * Login with Google Function * =============================================================================== * @description This function uses google identity api to login a user with datasquirel */ -function getAccessToken(params) { - return __awaiter(this, void 0, void 0, function* () { - var _a, _b; - (_a = params.setLoading) === null || _a === void 0 ? void 0 : _a.call(params, true); - const response = (yield new Promise((resolve, reject) => { - interval = setInterval(() => { - // @ts-ignore - let google = window.google; - if (google) { - window.clearInterval(interval); - resolve(googleLogin(Object.assign(Object.assign({}, params), { google }))); - } - }, 500); - })); - (_b = params.setLoading) === null || _b === void 0 ? void 0 : _b.call(params, false); - return response; - }); +export default async function getAccessToken(params) { + var _a, _b; + (_a = params.setLoading) === null || _a === void 0 ? void 0 : _a.call(params, true); + const response = (await new Promise((resolve, reject) => { + interval = setInterval(() => { + // @ts-ignore + let google = window.google; + if (google) { + window.clearInterval(interval); + resolve(googleLogin(Object.assign(Object.assign({}, params), { google }))); + } + }, 500); + })); + (_b = params.setLoading) === null || _b === void 0 ? void 0 : _b.call(params, false); + return response; } /** * # Google Login Function */ -function googleLogin({ google, clientId, setLoading, triggerPrompt, }) { +export function googleLogin({ google, clientId, setLoading, triggerPrompt, }) { setTimeout(() => { setLoading === null || setLoading === void 0 ? void 0 : setLoading(false); }, 3000); diff --git a/dist/client/auth/logout.js b/dist/client/auth/logout.js index a8d3390..8d61446 100644 --- a/dist/client/auth/logout.js +++ b/dist/client/auth/logout.js @@ -1,103 +1,86 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = logout; -const get_csrf_header_name_1 = __importDefault(require("../../package-shared/actions/get-csrf-header-name")); -const parseClientCookies_1 = __importDefault(require("../utils/parseClientCookies")); +import getCsrfHeaderName from "../../package-shared/actions/get-csrf-header-name"; +import parseClientCookies from "../utils/parseClientCookies"; /** * Login with Google Function * =============================================================================== * @description This function uses google identity api to login a user with datasquirel */ -function logout(params) { - return __awaiter(this, void 0, void 0, function* () { +export default async function logout(params) { + try { + const localUser = localStorage.getItem("user"); + let targetUser; try { - const localUser = localStorage.getItem("user"); - let targetUser; - try { - targetUser = JSON.parse(localUser || ""); - } - catch (error) { - console.log(error); - } - if (!targetUser) { - return false; - } - //////////////////////////////////////// - //////////////////////////////////////// - //////////////////////////////////////// - const cookies = (0, parseClientCookies_1.default)(); - const socialId = (cookies === null || cookies === void 0 ? void 0 : cookies.datasquirel_social_id) && - typeof cookies.datasquirel_social_id == "string" && - !cookies.datasquirel_social_id.match(/^null$/i) - ? cookies.datasquirel_social_id - : null; - //////////////////////////////////////// - //////////////////////////////////////// - //////////////////////////////////////// - localStorage.setItem("user", "{}"); - localStorage.removeItem((0, get_csrf_header_name_1.default)()); - document.cookie = `datasquirel_social_id=null;samesite=strict;path=/`; - //////////////////////////////////////// - //////////////////////////////////////// - //////////////////////////////////////// - const response = yield new Promise((resolve, reject) => { - if (socialId && !(socialId === null || socialId === void 0 ? void 0 : socialId.match(/^null$/i))) { - const googleClientId = params === null || params === void 0 ? void 0 : params.googleClientId; - if (googleClientId) { - const googleScript = document.createElement("script"); - googleScript.src = "https://accounts.google.com/gsi/client"; - googleScript.className = "social-script-tag"; - document.body.appendChild(googleScript); - //////////////////////////////////////// - //////////////////////////////////////// - //////////////////////////////////////// - googleScript.onload = function (e) { - // @ts-ignore - const google = window.google; - if (google) { - //////////////////////////////////////// - //////////////////////////////////////// - //////////////////////////////////////// - google.accounts.id.initialize({ - client_id: googleClientId, - }); - google.accounts.id.revoke(socialId, (done) => { - console.log(done.error); - resolve(true); - }); - //////////////////////////////////////// - //////////////////////////////////////// - //////////////////////////////////////// - } - }; - } - else { - resolve(true); - } + targetUser = JSON.parse(localUser || ""); + } + catch (error) { + console.log(error); + } + if (!targetUser) { + return false; + } + //////////////////////////////////////// + //////////////////////////////////////// + //////////////////////////////////////// + const cookies = parseClientCookies(); + const socialId = (cookies === null || cookies === void 0 ? void 0 : cookies.datasquirel_social_id) && + typeof cookies.datasquirel_social_id == "string" && + !cookies.datasquirel_social_id.match(/^null$/i) + ? cookies.datasquirel_social_id + : null; + //////////////////////////////////////// + //////////////////////////////////////// + //////////////////////////////////////// + localStorage.setItem("user", "{}"); + localStorage.removeItem(getCsrfHeaderName()); + document.cookie = `datasquirel_social_id=null;samesite=strict;path=/`; + //////////////////////////////////////// + //////////////////////////////////////// + //////////////////////////////////////// + const response = await new Promise((resolve, reject) => { + if (socialId && !(socialId === null || socialId === void 0 ? void 0 : socialId.match(/^null$/i))) { + const googleClientId = params === null || params === void 0 ? void 0 : params.googleClientId; + if (googleClientId) { + const googleScript = document.createElement("script"); + googleScript.src = "https://accounts.google.com/gsi/client"; + googleScript.className = "social-script-tag"; + document.body.appendChild(googleScript); //////////////////////////////////////// //////////////////////////////////////// //////////////////////////////////////// + googleScript.onload = function (e) { + // @ts-ignore + const google = window.google; + if (google) { + //////////////////////////////////////// + //////////////////////////////////////// + //////////////////////////////////////// + google.accounts.id.initialize({ + client_id: googleClientId, + }); + google.accounts.id.revoke(socialId, (done) => { + console.log(done.error); + resolve(true); + }); + //////////////////////////////////////// + //////////////////////////////////////// + //////////////////////////////////////// + } + }; } else { resolve(true); } - }); - return response; - } - catch (error) { - return false; - } - }); + //////////////////////////////////////// + //////////////////////////////////////// + //////////////////////////////////////// + } + else { + resolve(true); + } + }); + return response; + } + catch (error) { + return false; + } } diff --git a/dist/client/auth/post-login.js b/dist/client/auth/post-login.js index 8730b84..1ced89b 100644 --- a/dist/client/auth/post-login.js +++ b/dist/client/auth/post-login.js @@ -1,10 +1,4 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = postLogin; -const get_csrf_header_name_1 = __importDefault(require("../../package-shared/actions/get-csrf-header-name")); +import getCsrfHeaderName from "../../package-shared/actions/get-csrf-header-name"; /** * Client Setup After Login * =============================================================================== @@ -12,13 +6,13 @@ const get_csrf_header_name_1 = __importDefault(require("../../package-shared/act * is logged in. Use this in conjunction with the `datasquirel.user.loginUser` * function */ -function postLogin(res) { +export default function postLogin(res) { try { if (!res.payload) return false; if (!res.payload.csrf_k) return false; - localStorage.setItem((0, get_csrf_header_name_1.default)(), res.payload.csrf_k); + localStorage.setItem(getCsrfHeaderName(), res.payload.csrf_k); localStorage.setItem("user", JSON.stringify(res.payload)); return true; } diff --git a/dist/client/fetch/index.d.ts b/dist/client/fetch/index.d.ts index 9976f51..0c01b2b 100644 --- a/dist/client/fetch/index.d.ts +++ b/dist/client/fetch/index.d.ts @@ -1,6 +1,10 @@ -type FetchApiOptions = { +type FetchApiOptions = { method: "POST" | "GET" | "DELETE" | "PUT" | "PATCH" | "post" | "get" | "delete" | "put" | "patch"; - body?: object | string; + body?: T | string; headers?: FetchHeader; }; type FetchHeader = HeadersInit & { @@ -15,7 +19,18 @@ export type FetchApiReturn = { /** * # Fetch API */ -export default function fetchApi(url: string, options?: FetchApiOptions, csrf?: boolean, -/** Key to use to grab local Storage csrf value. */ -localStorageCSRFKey?: string): Promise; +export default function fetchApi(url: string, options?: FetchApiOptions, csrf?: boolean, +/** + * Key to use to grab local Storage csrf value. + */ +localStorageCSRFKey?: string, +/** + * Key with which to set the request header csrf + * value + */ +csrfHeaderKey?: string): Promise; export {}; diff --git a/dist/client/fetch/index.js b/dist/client/fetch/index.js index 77fff2f..7dfff2f 100644 --- a/dist/client/fetch/index.js +++ b/dist/client/fetch/index.js @@ -1,90 +1,80 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = fetchApi; -const lodash_1 = __importDefault(require("lodash")); -const get_csrf_header_name_1 = __importDefault(require("../../package-shared/actions/get-csrf-header-name")); +import _ from "lodash"; /** * # Fetch API */ -function fetchApi(url, options, csrf, -/** Key to use to grab local Storage csrf value. */ -localStorageCSRFKey) { - return __awaiter(this, void 0, void 0, function* () { - let data; - const csrfValue = localStorage.getItem(localStorageCSRFKey || (0, get_csrf_header_name_1.default)()); - let finalHeaders = { - "Content-Type": "application/json", - }; - if (csrf && csrfValue) { - finalHeaders[(0, get_csrf_header_name_1.default)()] = csrfValue; - } - if (typeof options === "string") { - try { - let fetchData; - switch (options) { - case "post": - fetchData = yield fetch(url, { - method: options, - headers: finalHeaders, - }); - data = fetchData.json(); - break; - default: - fetchData = yield fetch(url); - data = fetchData.json(); - break; - } - } - catch (error) { - console.log("FetchAPI error #1:", error.message); - data = null; +export default async function fetchApi(url, options, csrf, +/** + * Key to use to grab local Storage csrf value. + */ +localStorageCSRFKey, +/** + * Key with which to set the request header csrf + * value + */ +csrfHeaderKey) { + let data; + const csrfKey = "x-dsql-csrf-key"; + const csrfValue = localStorage.getItem(localStorageCSRFKey || csrfKey); + let finalHeaders = { + "Content-Type": "application/json", + }; + if (csrf && csrfValue) { + finalHeaders[localStorageCSRFKey || csrfKey] = csrfValue; + } + if (typeof options === "string") { + try { + let fetchData; + switch (options) { + case "post": + fetchData = await fetch(url, { + method: options, + headers: finalHeaders, + }); + data = fetchData.json(); + break; + default: + fetchData = await fetch(url); + data = fetchData.json(); + break; } } - else if (typeof options === "object") { - try { - let fetchData; - if (options.body && typeof options.body === "object") { - let oldOptionsBody = lodash_1.default.cloneDeep(options.body); - options.body = JSON.stringify(oldOptionsBody); - } - if (options.headers) { - options.headers = lodash_1.default.merge(options.headers, finalHeaders); - const finalOptions = Object.assign({}, options); - fetchData = yield fetch(url, finalOptions); - } - else { - const finalOptions = Object.assign(Object.assign({}, options), { headers: finalHeaders }); - fetchData = yield fetch(url, finalOptions); - } - data = fetchData.json(); - } - catch (error) { - console.log("FetchAPI error #2:", error.message); - data = null; - } + catch (error) { + console.log("FetchAPI error #1:", error.message); + data = null; } - else { - try { - let fetchData = yield fetch(url); - data = yield fetchData.json(); + } + else if (typeof options === "object") { + try { + let fetchData; + if (options.body && typeof options.body === "object") { + let oldOptionsBody = _.cloneDeep(options.body); + options.body = JSON.stringify(oldOptionsBody); } - catch (error) { - console.log("FetchAPI error #3:", error.message); - data = null; + if (options.headers) { + options.headers = _.merge(options.headers, finalHeaders); + const finalOptions = Object.assign({}, options); + fetchData = await fetch(url, finalOptions); } + else { + const finalOptions = Object.assign(Object.assign({}, options), { headers: finalHeaders }); + fetchData = await fetch(url, finalOptions); + } + data = fetchData.json(); } - return data; - }); + catch (error) { + console.log("FetchAPI error #2:", error.message); + data = null; + } + } + else { + try { + let fetchData = await fetch(url); + data = await fetchData.json(); + } + catch (error) { + console.log("FetchAPI error #3:", error.message); + data = null; + } + } + return data; } diff --git a/dist/client/index.js b/dist/client/index.js index 680a86b..032db04 100644 --- a/dist/client/index.js +++ b/dist/client/index.js @@ -1,60 +1,55 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const imageInputFileToBase64_1 = __importDefault(require("./media/imageInputFileToBase64")); -const imageInputToBase64_1 = __importDefault(require("./media/imageInputToBase64")); -const inputFileToBase64_1 = __importDefault(require("./media/inputFileToBase64")); -const getAccessToken_1 = __importDefault(require("./auth/google/getAccessToken")); -const getAccessToken_2 = __importDefault(require("./auth/github/getAccessToken")); -const logout_1 = __importDefault(require("./auth/logout")); -const fetch_1 = __importDefault(require("./fetch")); -const fetch_2 = __importDefault(require("./fetch")); -const serialize_query_1 = __importDefault(require("../package-shared/utils/serialize-query")); -const serialize_cookies_1 = __importDefault(require("../package-shared/utils/serialize-cookies")); -const ejson_1 = __importDefault(require("../package-shared/utils/ejson")); -const numberfy_1 = __importDefault(require("../package-shared/utils/numberfy")); -const slugify_1 = __importDefault(require("../package-shared/utils/slugify")); -const post_login_1 = __importDefault(require("./auth/post-login")); -const deserialize_query_1 = __importDefault(require("../package-shared/utils/deserialize-query")); -const debug_log_1 = __importDefault(require("../package-shared/utils/logging/debug-log")); +import imageInputFileToBase64 from "./media/imageInputFileToBase64"; +import imageInputToBase64 from "./media/imageInputToBase64"; +import inputFileToBase64 from "./media/inputFileToBase64"; +import getAccessToken from "./auth/google/getAccessToken"; +import getGithubAccessToken from "./auth/github/getAccessToken"; +import logout from "./auth/logout"; +import fetchApi from "./fetch"; +import clientFetch from "./fetch"; +import serializeQuery from "../package-shared/utils/serialize-query"; +import serializeCookies from "../package-shared/utils/serialize-cookies"; +import EJSON from "../package-shared/utils/ejson"; +import numberfy from "../package-shared/utils/numberfy"; +import slugify from "../package-shared/utils/slugify"; +import postLogin from "./auth/post-login"; +import deserializeQuery from "../package-shared/utils/deserialize-query"; +import debugLog from "../package-shared/utils/logging/debug-log"; const media = { - imageInputToBase64: imageInputToBase64_1.default, - imageInputFileToBase64: imageInputFileToBase64_1.default, - inputFileToBase64: inputFileToBase64_1.default, + imageInputToBase64: imageInputToBase64, + imageInputFileToBase64: imageInputFileToBase64, + inputFileToBase64: inputFileToBase64, }; /** * User Auth Object */ const auth = { google: { - getAccessToken: getAccessToken_1.default, + getAccessToken: getAccessToken, }, github: { - getAccessToken: getAccessToken_2.default, + getAccessToken: getGithubAccessToken, }, - logout: logout_1.default, - postLogin: post_login_1.default, + logout, + postLogin, }; const utils = { - deserializeQuery: deserialize_query_1.default, - serializeQuery: serialize_query_1.default, - serializeCookies: serialize_cookies_1.default, - EJSON: ejson_1.default, - numberfy: numberfy_1.default, - slugify: slugify_1.default, - debugLog: debug_log_1.default, + deserializeQuery, + serializeQuery, + serializeCookies, + EJSON, + numberfy, + slugify, + debugLog, }; /** * Fetch */ const fetch = { - fetchApi: fetch_1.default, - clientFetch: fetch_2.default, + fetchApi, + clientFetch, }; /** * Main Export */ const datasquirelClient = { media, auth, fetch, utils }; -exports.default = datasquirelClient; +export default datasquirelClient; diff --git a/dist/client/media/client.js b/dist/client/media/client.js index 5bdfeae..78f5339 100644 --- a/dist/client/media/client.js +++ b/dist/client/media/client.js @@ -1,18 +1,13 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const imageInputFileToBase64_1 = __importDefault(require("./imageInputFileToBase64")); -const imageInputToBase64_1 = __importDefault(require("./imageInputToBase64")); +import imageInputFileToBase64 from "./imageInputFileToBase64"; +import imageInputToBase64 from "./imageInputToBase64"; /** * ========================== * Media Functions Object * ========================== */ const media = { - imageInputToBase64: imageInputToBase64_1.default, - imageInputFileToBase64: imageInputFileToBase64_1.default, + imageInputToBase64: imageInputToBase64, + imageInputFileToBase64: imageInputFileToBase64, }; /** * ========================== @@ -20,8 +15,8 @@ const media = { * ========================== */ const auth = { - imageInputToBase64: imageInputToBase64_1.default, - imageInputFileToBase64: imageInputFileToBase64_1.default, + imageInputToBase64: imageInputToBase64, + imageInputFileToBase64: imageInputFileToBase64, }; /** * ========================== @@ -31,4 +26,4 @@ const auth = { const datasquirelClient = { media: media, }; -exports.default = datasquirelClient; +export default datasquirelClient; diff --git a/dist/client/media/imageInputFileToBase64.js b/dist/client/media/imageInputFileToBase64.js index 05417f9..479cabc 100644 --- a/dist/client/media/imageInputFileToBase64.js +++ b/dist/client/media/imageInputFileToBase64.js @@ -1,92 +1,78 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = imageInputFileToBase64; /** * # Image input File top Base64 */ -function imageInputFileToBase64(_a) { - return __awaiter(this, arguments, void 0, function* ({ imageInputFile, maxWidth, imagePreviewNode, }) { - /** - * Make https request - * - * @description make a request to datasquirel.com - */ - try { - let imageName = imageInputFile.name.replace(/\..*/, ""); - let imageDataBase64; - let imageSize; - let canvas = document.createElement("canvas"); - const MIME_TYPE = imageInputFile.type; - const QUALITY = 0.95; - const MAX_WIDTH = maxWidth ? maxWidth : null; - const file = imageInputFile; - const blobURL = URL.createObjectURL(file); - const img = new Image(); - /** ********************* Add source to new image */ - img.src = blobURL; - imageDataBase64 = yield new Promise((res, rej) => { - /** ********************* Handle Errors in loading image */ - img.onerror = function () { - URL.revokeObjectURL(this.src); - console.log("Cannot load image"); - }; - /** ********************* Handle new image when loaded */ - img.onload = function (e) { - const imgEl = e.target; - URL.revokeObjectURL(imgEl.src); - if (MAX_WIDTH) { - const scaleSize = MAX_WIDTH / img.naturalWidth; - canvas.width = - img.naturalWidth < MAX_WIDTH - ? img.naturalWidth - : MAX_WIDTH; - canvas.height = - img.naturalWidth < MAX_WIDTH - ? img.naturalHeight - : img.naturalHeight * scaleSize; - } - else { - canvas.width = img.naturalWidth; - canvas.height = img.naturalHeight; - } - const ctx = canvas.getContext("2d"); - ctx === null || ctx === void 0 ? void 0 : ctx.drawImage(img, 0, 0, canvas.width, canvas.height); - const srcEncoded = canvas.toDataURL(MIME_TYPE, QUALITY); - if (imagePreviewNode) { - imagePreviewNode.src = srcEncoded; - } - res(srcEncoded); - }; - }); - imageSize = yield new Promise((res, rej) => { - canvas.toBlob((blob) => { - res(blob === null || blob === void 0 ? void 0 : blob.size); - }, MIME_TYPE, QUALITY); - }); - return { - imageBase64: imageDataBase64 === null || imageDataBase64 === void 0 ? void 0 : imageDataBase64.replace(/.*?base64,/, ""), - imageBase64Full: imageDataBase64, - imageName: imageName, - imageSize: imageSize, +export default async function imageInputFileToBase64({ imageInputFile, maxWidth, imagePreviewNode, }) { + /** + * Make https request + * + * @description make a request to datasquirel.com + */ + try { + let imageName = imageInputFile.name.replace(/\..*/, ""); + let imageDataBase64; + let imageSize; + let canvas = document.createElement("canvas"); + const MIME_TYPE = imageInputFile.type; + const QUALITY = 0.95; + const MAX_WIDTH = maxWidth ? maxWidth : null; + const file = imageInputFile; + const blobURL = URL.createObjectURL(file); + const img = new Image(); + /** ********************* Add source to new image */ + img.src = blobURL; + imageDataBase64 = await new Promise((res, rej) => { + /** ********************* Handle Errors in loading image */ + img.onerror = function () { + URL.revokeObjectURL(this.src); + console.log("Cannot load image"); }; - } - catch (error) { - console.log("Image Processing Error! =>", error.message); - return { - imageBase64: undefined, - imageBase64Full: undefined, - imageName: undefined, - imageSize: undefined, + /** ********************* Handle new image when loaded */ + img.onload = function (e) { + const imgEl = e.target; + URL.revokeObjectURL(imgEl.src); + if (MAX_WIDTH) { + const scaleSize = MAX_WIDTH / img.naturalWidth; + canvas.width = + img.naturalWidth < MAX_WIDTH + ? img.naturalWidth + : MAX_WIDTH; + canvas.height = + img.naturalWidth < MAX_WIDTH + ? img.naturalHeight + : img.naturalHeight * scaleSize; + } + else { + canvas.width = img.naturalWidth; + canvas.height = img.naturalHeight; + } + const ctx = canvas.getContext("2d"); + ctx === null || ctx === void 0 ? void 0 : ctx.drawImage(img, 0, 0, canvas.width, canvas.height); + const srcEncoded = canvas.toDataURL(MIME_TYPE, QUALITY); + if (imagePreviewNode) { + imagePreviewNode.src = srcEncoded; + } + res(srcEncoded); }; - } - }); + }); + imageSize = await new Promise((res, rej) => { + canvas.toBlob((blob) => { + res(blob === null || blob === void 0 ? void 0 : blob.size); + }, MIME_TYPE, QUALITY); + }); + return { + imageBase64: imageDataBase64 === null || imageDataBase64 === void 0 ? void 0 : imageDataBase64.replace(/.*?base64,/, ""), + imageBase64Full: imageDataBase64, + imageName: imageName, + imageSize: imageSize, + }; + } + catch (error) { + console.log("Image Processing Error! =>", error.message); + return { + imageBase64: undefined, + imageBase64Full: undefined, + imageName: undefined, + imageSize: undefined, + }; + } } diff --git a/dist/client/media/imageInputToBase64.js b/dist/client/media/imageInputToBase64.js index 8e8decb..7b8a98e 100644 --- a/dist/client/media/imageInputToBase64.js +++ b/dist/client/media/imageInputToBase64.js @@ -1,90 +1,76 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = imageInputToBase64; /** * # Image Input Element to Base 64 */ -function imageInputToBase64(_a) { - return __awaiter(this, arguments, void 0, function* ({ imageInput, maxWidth, mimeType, }) { - var _b, _c; - /** - * Make https request - * - * @description make a request to datasquirel.com - */ - try { - let imagePreviewNode = document.querySelector(`[data-imagepreview='image']`); - let imageName = (_b = imageInput.files) === null || _b === void 0 ? void 0 : _b[0].name.replace(/\..*/, ""); - let imageDataBase64; - const MIME_TYPE = mimeType ? mimeType : "image/jpeg"; - const QUALITY = 0.95; - const MAX_WIDTH = maxWidth ? maxWidth : null; - const file = (_c = imageInput.files) === null || _c === void 0 ? void 0 : _c[0]; - const blobURL = file ? URL.createObjectURL(file) : undefined; - const img = new Image(); - if (blobURL) { - img.src = blobURL; - imageDataBase64 = yield new Promise((res, rej) => { - /** ********************* Handle Errors in loading image */ - img.onerror = function () { - URL.revokeObjectURL(this.src); - window.alert("Cannot load image!"); - }; - img.onload = function (e) { - const imgEl = e.target; - URL.revokeObjectURL(imgEl.src); - const canvas = document.createElement("canvas"); - if (MAX_WIDTH) { - const scaleSize = MAX_WIDTH / img.naturalWidth; - canvas.width = - img.naturalWidth < MAX_WIDTH - ? img.naturalWidth - : MAX_WIDTH; - canvas.height = - img.naturalWidth < MAX_WIDTH - ? img.naturalHeight - : img.naturalHeight * scaleSize; - } - else { - canvas.width = img.naturalWidth; - canvas.height = img.naturalHeight; - } - const ctx = canvas.getContext("2d"); - ctx === null || ctx === void 0 ? void 0 : ctx.drawImage(img, 0, 0, canvas.width, canvas.height); - const srcEncoded = canvas.toDataURL(MIME_TYPE, QUALITY); - if (imagePreviewNode) { - document - .querySelectorAll(`[data-imagepreview='image']`) - .forEach((_img) => { - const _imgEl = _img; - _imgEl.src = srcEncoded; - }); - } - res(srcEncoded); - }; - }); - return { - imageBase64: imageDataBase64 === null || imageDataBase64 === void 0 ? void 0 : imageDataBase64.replace(/.*?base64,/, ""), - imageBase64Full: imageDataBase64, - imageName: imageName, +export default async function imageInputToBase64({ imageInput, maxWidth, mimeType, }) { + var _a, _b; + /** + * Make https request + * + * @description make a request to datasquirel.com + */ + try { + let imagePreviewNode = document.querySelector(`[data-imagepreview='image']`); + let imageName = (_a = imageInput.files) === null || _a === void 0 ? void 0 : _a[0].name.replace(/\..*/, ""); + let imageDataBase64; + const MIME_TYPE = mimeType ? mimeType : "image/jpeg"; + const QUALITY = 0.95; + const MAX_WIDTH = maxWidth ? maxWidth : null; + const file = (_b = imageInput.files) === null || _b === void 0 ? void 0 : _b[0]; + const blobURL = file ? URL.createObjectURL(file) : undefined; + const img = new Image(); + if (blobURL) { + img.src = blobURL; + imageDataBase64 = await new Promise((res, rej) => { + /** ********************* Handle Errors in loading image */ + img.onerror = function () { + URL.revokeObjectURL(this.src); + window.alert("Cannot load image!"); }; - } - else { - return {}; - } + img.onload = function (e) { + const imgEl = e.target; + URL.revokeObjectURL(imgEl.src); + const canvas = document.createElement("canvas"); + if (MAX_WIDTH) { + const scaleSize = MAX_WIDTH / img.naturalWidth; + canvas.width = + img.naturalWidth < MAX_WIDTH + ? img.naturalWidth + : MAX_WIDTH; + canvas.height = + img.naturalWidth < MAX_WIDTH + ? img.naturalHeight + : img.naturalHeight * scaleSize; + } + else { + canvas.width = img.naturalWidth; + canvas.height = img.naturalHeight; + } + const ctx = canvas.getContext("2d"); + ctx === null || ctx === void 0 ? void 0 : ctx.drawImage(img, 0, 0, canvas.width, canvas.height); + const srcEncoded = canvas.toDataURL(MIME_TYPE, QUALITY); + if (imagePreviewNode) { + document + .querySelectorAll(`[data-imagepreview='image']`) + .forEach((_img) => { + const _imgEl = _img; + _imgEl.src = srcEncoded; + }); + } + res(srcEncoded); + }; + }); + return { + imageBase64: imageDataBase64 === null || imageDataBase64 === void 0 ? void 0 : imageDataBase64.replace(/.*?base64,/, ""), + imageBase64Full: imageDataBase64, + imageName: imageName, + }; } - catch ( /** @type {*} */error) { - console.log("Image Processing Error! =>", error.message); + else { return {}; } - }); + } + catch ( /** @type {*} */error) { + console.log("Image Processing Error! =>", error.message); + return {}; + } } diff --git a/dist/client/media/inputFileToBase64.js b/dist/client/media/inputFileToBase64.js index 5ef9e13..76c7323 100644 --- a/dist/client/media/inputFileToBase64.js +++ b/dist/client/media/inputFileToBase64.js @@ -1,15 +1,3 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = inputFileToBase64; /** * Input File to base64 * ============================================================================== @@ -18,42 +6,40 @@ exports.default = inputFileToBase64; * HTML file input elements usually return an array of input objects, so be sure to select the target * file from the array. */ -function inputFileToBase64(_a) { - return __awaiter(this, arguments, void 0, function* ({ inputFile, allowedRegex, }) { - var _b; - const allowedTypesRegex = allowedRegex ? allowedRegex : /image\/*|\/pdf/; - if (!((_b = inputFile === null || inputFile === void 0 ? void 0 : inputFile.type) === null || _b === void 0 ? void 0 : _b.match(allowedTypesRegex))) { - window.alert(`We currently don't support ${inputFile.type} file types. Support is coming soon. For now we support only images and PDFs.`); - return { - fileName: inputFile.name, +export default async function inputFileToBase64({ inputFile, allowedRegex, }) { + var _a; + const allowedTypesRegex = allowedRegex ? allowedRegex : /image\/*|\/pdf/; + if (!((_a = inputFile === null || inputFile === void 0 ? void 0 : inputFile.type) === null || _a === void 0 ? void 0 : _a.match(allowedTypesRegex))) { + window.alert(`We currently don't support ${inputFile.type} file types. Support is coming soon. For now we support only images and PDFs.`); + return { + fileName: inputFile.name, + }; + } + try { + let fileName = inputFile.name.replace(/\..*/, ""); + const fileData = await new Promise((resolve, reject) => { + var reader = new FileReader(); + reader.readAsDataURL(inputFile); + reader.onload = function () { + var _a; + resolve((_a = reader.result) === null || _a === void 0 ? void 0 : _a.toString()); }; - } - try { - let fileName = inputFile.name.replace(/\..*/, ""); - const fileData = yield new Promise((resolve, reject) => { - var reader = new FileReader(); - reader.readAsDataURL(inputFile); - reader.onload = function () { - var _a; - resolve((_a = reader.result) === null || _a === void 0 ? void 0 : _a.toString()); - }; - reader.onerror = function (/** @type {*} */ error) { - console.log("Error: ", error.message); - }; - }); - return { - fileBase64: fileData === null || fileData === void 0 ? void 0 : fileData.replace(/.*?base64,/, ""), - fileBase64Full: fileData, - fileName: fileName, - fileSize: inputFile.size, - fileType: inputFile.type, + reader.onerror = function (/** @type {*} */ error) { + console.log("Error: ", error.message); }; - } - catch (error) { - console.log("File Processing Error! =>", error.message); - return { - fileName: inputFile.name, - }; - } - }); + }); + return { + fileBase64: fileData === null || fileData === void 0 ? void 0 : fileData.replace(/.*?base64,/, ""), + fileBase64Full: fileData, + fileName: fileName, + fileSize: inputFile.size, + fileType: inputFile.type, + }; + } + catch (error) { + console.log("File Processing Error! =>", error.message); + return { + fileName: inputFile.name, + }; + } } diff --git a/dist/client/utils/parseClientCookies.js b/dist/client/utils/parseClientCookies.js index a9d31be..beb516a 100644 --- a/dist/client/utils/parseClientCookies.js +++ b/dist/client/utils/parseClientCookies.js @@ -1,12 +1,9 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = default_1; /** * Parse request cookies * ============================================================================== * * @description This function takes in a request object and returns the cookies as a JS object */ -function default_1() { +export default function () { /** * Check inputs * diff --git a/dist/console-colors.js b/dist/console-colors.js index f1d612e..04fa226 100644 --- a/dist/console-colors.js +++ b/dist/console-colors.js @@ -1,5 +1,3 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); const colors = { Reset: "\x1b[0m", Bright: "\x1b[1m", @@ -27,4 +25,4 @@ const colors = { BgWhite: "\x1b[47m", BgGray: "\x1b[100m", }; -exports.default = colors; +export default colors; diff --git a/dist/engine/dsql.js b/dist/engine/dsql.js index 84070f5..963ea32 100644 --- a/dist/engine/dsql.js +++ b/dist/engine/dsql.js @@ -1,28 +1,13 @@ #! /usr/bin/env node -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = run; -const fs_1 = __importDefault(require("fs")); -const path_1 = __importDefault(require("path")); +import fs from "fs"; +import path from "path"; require("dotenv").config({ - path: path_1.default.resolve(process.cwd(), ".env"), + path: path.resolve(process.cwd(), ".env"), }); -const index_1 = __importDefault(require("../index")); -const console_colors_1 = __importDefault(require("../console-colors")); -const createDbFromSchema_1 = __importDefault(require("../package-shared/shell/createDbFromSchema")); -if (!fs_1.default.existsSync(path_1.default.resolve(process.cwd(), ".env"))) { +import datasquirel from "../index"; +import colors from "../console-colors"; +import createDbFromSchema from "../package-shared/shell/createDbFromSchema"; +if (!fs.existsSync(path.resolve(process.cwd(), ".env"))) { console.log(".env file not found"); process.exit(); } @@ -39,76 +24,74 @@ if (!(DSQL_PASS === null || DSQL_PASS === void 0 ? void 0 : DSQL_PASS.match(/./) console.log("DSQL_PASS is required in your `.env` file"); process.exit(); } -const dbSchemaLocalFilePath = path_1.default.resolve(process.cwd(), "dsql.schema.json"); -function run() { - return __awaiter(this, void 0, void 0, function* () { - let schemaData; - if (DSQL_KEY && (DSQL_REF_DB_NAME === null || DSQL_REF_DB_NAME === void 0 ? void 0 : DSQL_REF_DB_NAME.match(/./))) { - const dbSchemaDataResponse = yield index_1.default.getSchema({ - key: DSQL_KEY, - database: DSQL_REF_DB_NAME || undefined, - }); - if (!dbSchemaDataResponse.payload || - Array.isArray(dbSchemaDataResponse.payload)) { - console.log("DSQL_KEY+DSQL_REF_DB_NAME => Error in fetching DB schema"); - console.log(dbSchemaDataResponse); - process.exit(); - } - let fetchedDbSchemaObject = dbSchemaDataResponse.payload; - if (DSQL_DB_NAME) - fetchedDbSchemaObject.dbFullName = DSQL_DB_NAME; - schemaData = [fetchedDbSchemaObject]; - } - else if (DSQL_KEY) { - const dbSchemaDataResponse = yield index_1.default.getSchema({ - key: DSQL_KEY, - database: DSQL_REF_DB_NAME || undefined, - }); - if (!dbSchemaDataResponse.payload || - !Array.isArray(dbSchemaDataResponse.payload)) { - console.log("DSQL_KEY => Error in fetching DB schema"); - console.log(dbSchemaDataResponse); - process.exit(); - } - let fetchedDbSchemaObject = dbSchemaDataResponse.payload; - // fetchedDbSchemaObject.forEach((db, index) => { - // db.dbFullName = db.dbFullName?.replace(/^datasquirel_user_\d+_/, ""); - // }); - schemaData = fetchedDbSchemaObject; - } - else if (fs_1.default.existsSync(dbSchemaLocalFilePath)) { - schemaData = [ - JSON.parse(fs_1.default.readFileSync(dbSchemaLocalFilePath, "utf8")), - ]; - } - else { - console.log("No source for DB Schema. Please provide a local `dsql.schema.json` file, or provide `DSQL_KEY` and `DSQL_REF_DB_NAME` environment variables."); - process.exit(); - } - if (!schemaData) { - console.log("No schema found"); - process.exit(); - } - if (DSQL_FULL_SYNC === null || DSQL_FULL_SYNC === void 0 ? void 0 : DSQL_FULL_SYNC.match(/true/i)) { - fs_1.default.writeFileSync(dbSchemaLocalFilePath, JSON.stringify(schemaData[0], null, 4), "utf8"); - } - console.log(` - ${console_colors_1.default.FgBlue}Info:${console_colors_1.default.Reset} Now generating and mapping databases ...`); - yield (0, createDbFromSchema_1.default)({ - dbSchemaData: schemaData, +const dbSchemaLocalFilePath = path.resolve(process.cwd(), "dsql.schema.json"); +export default async function run() { + let schemaData; + if (DSQL_KEY && (DSQL_REF_DB_NAME === null || DSQL_REF_DB_NAME === void 0 ? void 0 : DSQL_REF_DB_NAME.match(/./))) { + const dbSchemaDataResponse = await datasquirel.getSchema({ + key: DSQL_KEY, + database: DSQL_REF_DB_NAME || undefined, }); - console.log(` - ${console_colors_1.default.FgGreen}Success:${console_colors_1.default.Reset} Databases created Successfully!`); + if (!dbSchemaDataResponse.payload || + Array.isArray(dbSchemaDataResponse.payload)) { + console.log("DSQL_KEY+DSQL_REF_DB_NAME => Error in fetching DB schema"); + console.log(dbSchemaDataResponse); + process.exit(); + } + let fetchedDbSchemaObject = dbSchemaDataResponse.payload; + if (DSQL_DB_NAME) + fetchedDbSchemaObject.dbFullName = DSQL_DB_NAME; + schemaData = [fetchedDbSchemaObject]; + } + else if (DSQL_KEY) { + const dbSchemaDataResponse = await datasquirel.getSchema({ + key: DSQL_KEY, + database: DSQL_REF_DB_NAME || undefined, + }); + if (!dbSchemaDataResponse.payload || + !Array.isArray(dbSchemaDataResponse.payload)) { + console.log("DSQL_KEY => Error in fetching DB schema"); + console.log(dbSchemaDataResponse); + process.exit(); + } + let fetchedDbSchemaObject = dbSchemaDataResponse.payload; + // fetchedDbSchemaObject.forEach((db, index) => { + // db.dbFullName = db.dbFullName?.replace(/^datasquirel_user_\d+_/, ""); + // }); + schemaData = fetchedDbSchemaObject; + } + else if (fs.existsSync(dbSchemaLocalFilePath)) { + schemaData = [ + JSON.parse(fs.readFileSync(dbSchemaLocalFilePath, "utf8")), + ]; + } + else { + console.log("No source for DB Schema. Please provide a local `dsql.schema.json` file, or provide `DSQL_KEY` and `DSQL_REF_DB_NAME` environment variables."); + process.exit(); + } + if (!schemaData) { + console.log("No schema found"); + process.exit(); + } + if (DSQL_FULL_SYNC === null || DSQL_FULL_SYNC === void 0 ? void 0 : DSQL_FULL_SYNC.match(/true/i)) { + fs.writeFileSync(dbSchemaLocalFilePath, JSON.stringify(schemaData[0], null, 4), "utf8"); + } + console.log(` - ${colors.FgBlue}Info:${colors.Reset} Now generating and mapping databases ...`); + await createDbFromSchema({ + dbSchemaData: schemaData, }); + console.log(` - ${colors.FgGreen}Success:${colors.Reset} Databases created Successfully!`); } let interval; -if (fs_1.default.existsSync(dbSchemaLocalFilePath) && !(DSQL_KEY === null || DSQL_KEY === void 0 ? void 0 : DSQL_KEY.match(/....../))) { - fs_1.default.watchFile(dbSchemaLocalFilePath, { interval: 1000 }, (curr, prev) => { - console.log(` - ${console_colors_1.default.FgBlue}Info:${console_colors_1.default.Reset} Syncing Databases Locally ...`); +if (fs.existsSync(dbSchemaLocalFilePath) && !(DSQL_KEY === null || DSQL_KEY === void 0 ? void 0 : DSQL_KEY.match(/....../))) { + fs.watchFile(dbSchemaLocalFilePath, { interval: 1000 }, (curr, prev) => { + console.log(` - ${colors.FgBlue}Info:${colors.Reset} Syncing Databases Locally ...`); run(); }); } else if (DSQL_KEY === null || DSQL_KEY === void 0 ? void 0 : DSQL_KEY.match(/....../)) { interval = setInterval(() => { - console.log(` - ${console_colors_1.default.FgMagenta}Info:${console_colors_1.default.Reset} Syncing Databases from the cloud ...`); + console.log(` - ${colors.FgMagenta}Info:${colors.Reset} Syncing Databases from the cloud ...`); run(); }, 20000); } diff --git a/dist/engine/dump.js b/dist/engine/dump.js index da8e3b3..c213187 100644 --- a/dist/engine/dump.js +++ b/dist/engine/dump.js @@ -1,14 +1,9 @@ #! /usr/bin/env node -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; var _a, _b, _c; -Object.defineProperty(exports, "__esModule", { value: true }); -const path_1 = __importDefault(require("path")); -const child_process_1 = require("child_process"); +import path from "path"; +import { execSync } from "child_process"; require("dotenv").config({ - path: path_1.default.resolve(process.cwd(), ".env"), + path: path.resolve(process.cwd(), ".env"), }); const mysqlPath = ((_a = process.platform) === null || _a === void 0 ? void 0 : _a.match(/win/i)) ? "'" + "C:\\Program Files\\MySQL\\MySQL Server 8.0\\bin\\mysql.exe" + "'" @@ -39,7 +34,7 @@ try { cwd: process.cwd(), }; // if (process.platform.match(/win/i)) execSyncOptions.shell = "bash.exe"; - const dump = (0, child_process_1.execSync)(`${mysqlPath} -u ${DSQL_USER} -p${DSQL_PASS} ${dbName} < ${dumpFilePath}`, execSyncOptions); + const dump = execSync(`${mysqlPath} -u ${DSQL_USER} -p${DSQL_PASS} ${dbName} < ${dumpFilePath}`, execSyncOptions); console.log("Dumped successfully", dump.toString()); //////////////////////////////////////// //////////////////////////////////////// diff --git a/dist/engine/schema-to-typedef.js b/dist/engine/schema-to-typedef.js index b8bd0a0..b0e087e 100644 --- a/dist/engine/schema-to-typedef.js +++ b/dist/engine/schema-to-typedef.js @@ -1,26 +1,12 @@ #! /usr/bin/env node -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const fs_1 = __importDefault(require("fs")); -const __1 = __importDefault(require("..")); -const util_1 = require("util"); -const db_schema_to_type_1 = __importDefault(require("../package-shared/functions/dsql/db-schema-to-type")); -const path_1 = __importDefault(require("path")); -const debug_log_1 = __importDefault(require("../package-shared/utils/logging/debug-log")); -const parse_env_1 = __importDefault(require("../package-shared/utils/parse-env")); -const args = (0, util_1.parseArgs)({ +import fs from "fs"; +import datasquirel from ".."; +import { parseArgs } from "util"; +import dbSchemaToType from "../package-shared/functions/dsql/db-schema-to-type"; +import path from "path"; +import debugLog from "../package-shared/utils/logging/debug-log"; +import parseEnv from "../package-shared/utils/parse-env"; +const args = parseArgs({ args: process.argv, options: { apiKey: { @@ -55,12 +41,12 @@ const args = (0, util_1.parseArgs)({ }); let appendedEnv = {}; if (args.values.envfile && typeof args.values.envfile == "string") { - const finalEnvPath = path_1.default.resolve(process.cwd(), args.values.envfile); - if (fs_1.default.existsSync(finalEnvPath)) { - const parsedEnv = (0, parse_env_1.default)(finalEnvPath); - appendedEnv = parsedEnv || {}; + const finalEnvPath = path.resolve(process.cwd(), args.values.envfile); + if (fs.existsSync(finalEnvPath)) { + const parsedEnv = parseEnv(finalEnvPath); + appendedEnv = (parsedEnv || {}); if (args.values.debug) { - (0, debug_log_1.default)({ + debugLog({ log: appendedEnv, label: "Appended env", title: "Schema to Typedef", @@ -71,25 +57,25 @@ if (args.values.envfile && typeof args.values.envfile == "string") { } const finalEnv = Object.assign(Object.assign({}, process.env), appendedEnv); process.env = Object.assign(Object.assign({}, process.env), appendedEnv); -(() => __awaiter(void 0, void 0, void 0, function* () { +(async () => { try { const key = args.values.apiKey || finalEnv["DSQL_FULL_ACCESS_API_KEY"]; const database = args.values.database || finalEnv["DSQL_DB_NAME"]; const user_id = args.values.userid || finalEnv["DSQL_API_USER_ID"] || "1"; if (args.values.debug) { - (0, debug_log_1.default)({ + debugLog({ log: args.values, label: "Arguments", title: "Schema to Typedef", addTime: true, }); - (0, debug_log_1.default)({ + debugLog({ log: process.env.DSQL_FULL_ACCESS_API_KEY, label: "process.env.DSQL_FULL_ACCESS_API_KEY", title: "Schema to Typedef", addTime: true, }); - (0, debug_log_1.default)({ + debugLog({ log: process.env.DSQL_DB_NAME, label: "process.env.DSQL_DB_NAME", title: "Schema to Typedef", @@ -104,7 +90,7 @@ process.env = Object.assign(Object.assign({}, process.env), appendedEnv); throw new Error("Outfile are required"); if (!user_id || typeof user_id !== "string") throw new Error("Outfile are required"); - const schema = yield __1.default.getSchema({ + const schema = await datasquirel.getSchema({ key, database, user_id, @@ -112,7 +98,7 @@ process.env = Object.assign(Object.assign({}, process.env), appendedEnv); }); const dbSchema = schema.payload; if (args.values.debug) { - (0, debug_log_1.default)({ + debugLog({ log: schema, label: "schema", title: "Schema to Typedef", @@ -121,16 +107,16 @@ process.env = Object.assign(Object.assign({}, process.env), appendedEnv); } if (!dbSchema) throw new Error("No schema found"); - const definitions = (0, db_schema_to_type_1.default)({ dbSchema }); - const finalOutfile = path_1.default.resolve(process.cwd(), args.values.outfile); - const ourfileDir = path_1.default.dirname(finalOutfile); - if (!fs_1.default.existsSync(ourfileDir)) { - fs_1.default.mkdirSync(ourfileDir, { recursive: true }); + const definitions = dbSchemaToType({ dbSchema }); + const finalOutfile = path.resolve(process.cwd(), args.values.outfile); + const ourfileDir = path.dirname(finalOutfile); + if (!fs.existsSync(ourfileDir)) { + fs.mkdirSync(ourfileDir, { recursive: true }); } - fs_1.default.writeFileSync(finalOutfile, (definitions === null || definitions === void 0 ? void 0 : definitions.join("\n\n")) || "", "utf-8"); + fs.writeFileSync(finalOutfile, (definitions === null || definitions === void 0 ? void 0 : definitions.join("\n\n")) || "", "utf-8"); } catch (error) { - (0, debug_log_1.default)({ + debugLog({ log: error.message, label: "Error", title: "Schema to Typedef", @@ -139,4 +125,4 @@ process.env = Object.assign(Object.assign({}, process.env), appendedEnv); }); process.exit(1); } -}))(); +})(); diff --git a/dist/index.d.ts b/dist/index.d.ts index 4144ef4..4fc808d 100644 --- a/dist/index.d.ts +++ b/dist/index.d.ts @@ -9,9 +9,6 @@ declare global { import get from "./package-shared/actions/get"; import post from "./package-shared/actions/post"; import getSchema from "./package-shared/actions/get-schema"; -import uploadImage from "./package-shared/actions/upload-image"; -import uploadFile from "./package-shared/actions/upload-file"; -import deleteFile from "./package-shared/actions/delete-file"; import createUser from "./package-shared/actions/users/add-user"; import updateUser from "./package-shared/actions/users/update-user"; import loginUser from "./package-shared/actions/users/login-user"; @@ -44,6 +41,23 @@ import parseEnv from "./package-shared/utils/parse-env"; * Main Export */ declare const datasquirel: { + /** + * API Actions + */ + api: { + crud: { + get: typeof import("./package-shared/api/crud/get").default; + insert: typeof import("./package-shared/api/crud/post").default; + update: typeof import("./package-shared/api/crud/put").default; + delete: typeof import("./package-shared/api/crud/delete").default; + options: () => Promise; + }; + media: { + get: typeof import("./package-shared/api/media/get").default; + add: typeof import("./package-shared/api/media/post").default; + delete: typeof import("./package-shared/api/media/delete").default; + }; + }; /** * Get Action */ @@ -52,11 +66,6 @@ declare const datasquirel: { * Post Action */ post: typeof post; - media: { - uploadImage: typeof uploadImage; - uploadFile: typeof uploadFile; - deleteFile: typeof deleteFile; - }; user: { createUser: typeof createUser; deleteUser: typeof deleteUser; diff --git a/dist/index.js b/dist/index.js index 72b74f5..21db350 100644 --- a/dist/index.js +++ b/dist/index.js @@ -1,117 +1,113 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const get_1 = __importDefault(require("./package-shared/actions/get")); -const post_1 = __importDefault(require("./package-shared/actions/post")); -const get_schema_1 = __importDefault(require("./package-shared/actions/get-schema")); -const upload_image_1 = __importDefault(require("./package-shared/actions/upload-image")); -const upload_file_1 = __importDefault(require("./package-shared/actions/upload-file")); -const delete_file_1 = __importDefault(require("./package-shared/actions/delete-file")); -const add_user_1 = __importDefault(require("./package-shared/actions/users/add-user")); -const update_user_1 = __importDefault(require("./package-shared/actions/users/update-user")); -const login_user_1 = __importDefault(require("./package-shared/actions/users/login-user")); -const send_email_code_1 = __importDefault(require("./package-shared/actions/users/send-email-code")); -const logout_user_1 = __importDefault(require("./package-shared/actions/users/logout-user")); -const user_auth_1 = __importDefault(require("./package-shared/actions/users/user-auth")); -const reauth_user_1 = __importDefault(require("./package-shared/actions/users/reauth-user")); -const get_user_1 = __importDefault(require("./package-shared/actions/users/get-user")); -const google_auth_1 = __importDefault(require("./package-shared/actions/users/social/google-auth")); -const github_auth_1 = __importDefault(require("./package-shared/actions/users/social/github-auth")); -const get_token_1 = __importDefault(require("./package-shared/actions/users/get-token")); -const validate_token_1 = __importDefault(require("./package-shared/actions/users/validate-token")); -const client_1 = __importDefault(require("./client")); -const sql_generator_1 = __importDefault(require("./package-shared/functions/dsql/sql/sql-generator")); -const sql_insert_generator_1 = __importDefault(require("./package-shared/functions/dsql/sql/sql-insert-generator")); -const sql_delete_generator_1 = __importDefault(require("./package-shared/functions/dsql/sql/sql-delete-generator")); -const trim_sql_1 = __importDefault(require("./package-shared/utils/trim-sql")); -const parseCookies_1 = __importDefault(require("./package-shared/utils/backend/parseCookies")); -const conn_db_handler_1 = __importDefault(require("./package-shared/utils/db/conn-db-handler")); -const encrypt_1 = __importDefault(require("./package-shared/functions/dsql/encrypt")); -const decrypt_1 = __importDefault(require("./package-shared/functions/dsql/decrypt")); -const hashPassword_1 = __importDefault(require("./package-shared/functions/dsql/hashPassword")); -const validate_temp_email_code_1 = __importDefault(require("./package-shared/actions/users/validate-temp-email-code")); -const delete_user_1 = __importDefault(require("./package-shared/actions/users/delete-user")); -const crud_1 = __importDefault(require("./package-shared/utils/data-fetching/crud")); -const method_crud_1 = __importDefault(require("./package-shared/utils/data-fetching/method-crud")); -const debug_log_1 = __importDefault(require("./package-shared/utils/logging/debug-log")); -const parse_env_1 = __importDefault(require("./package-shared/utils/parse-env")); +import get from "./package-shared/actions/get"; +import post from "./package-shared/actions/post"; +import getSchema from "./package-shared/actions/get-schema"; +import createUser from "./package-shared/actions/users/add-user"; +import updateUser from "./package-shared/actions/users/update-user"; +import loginUser from "./package-shared/actions/users/login-user"; +import sendEmailCode from "./package-shared/actions/users/send-email-code"; +import logoutUser from "./package-shared/actions/users/logout-user"; +import userAuth from "./package-shared/actions/users/user-auth"; +import reAuthUser from "./package-shared/actions/users/reauth-user"; +import getUser from "./package-shared/actions/users/get-user"; +import loginWithGoogle from "./package-shared/actions/users/social/google-auth"; +import loginWithGithub from "./package-shared/actions/users/social/github-auth"; +import getToken from "./package-shared/actions/users/get-token"; +import validateToken from "./package-shared/actions/users/validate-token"; +import datasquirelClient from "./client"; +import sqlGenerator from "./package-shared/functions/dsql/sql/sql-generator"; +import sqlInsertGenerator from "./package-shared/functions/dsql/sql/sql-insert-generator"; +import sqlDeleteGenerator from "./package-shared/functions/dsql/sql/sql-delete-generator"; +import trimSql from "./package-shared/utils/trim-sql"; +import parseCookies from "./package-shared/utils/backend/parseCookies"; +import connDbHandler from "./package-shared/utils/db/conn-db-handler"; +import encrypt from "./package-shared/functions/dsql/encrypt"; +import decrypt from "./package-shared/functions/dsql/decrypt"; +import hashPassword from "./package-shared/functions/dsql/hashPassword"; +import validateTempEmailCode from "./package-shared/actions/users/validate-temp-email-code"; +import deleteUser from "./package-shared/actions/users/delete-user"; +import dsqlCrud from "./package-shared/utils/data-fetching/crud"; +import dsqlMethodCrud from "./package-shared/utils/data-fetching/method-crud"; +import debugLog from "./package-shared/utils/logging/debug-log"; +import parseEnv from "./package-shared/utils/parse-env"; +import crud from "./package-shared/api/crud"; +import media from "./package-shared/api/media"; /** * User Functions Object */ const user = { - createUser: add_user_1.default, - deleteUser: delete_user_1.default, - loginUser: login_user_1.default, - sendEmailCode: send_email_code_1.default, - logoutUser: logout_user_1.default, - userAuth: user_auth_1.default, - reAuthUser: reauth_user_1.default, - updateUser: update_user_1.default, - getUser: get_user_1.default, - getToken: get_token_1.default, - validateToken: validate_token_1.default, - validateTempEmailCode: validate_temp_email_code_1.default, + createUser: createUser, + deleteUser, + loginUser: loginUser, + sendEmailCode: sendEmailCode, + logoutUser: logoutUser, + userAuth: userAuth, + reAuthUser: reAuthUser, + updateUser: updateUser, + getUser: getUser, + getToken: getToken, + validateToken: validateToken, + validateTempEmailCode, social: { - loginWithGoogle: google_auth_1.default, - loginWithGithub: github_auth_1.default, + loginWithGoogle: loginWithGoogle, + loginWithGithub: loginWithGithub, }, }; /** - * Media Functions Object + * API Functions Object */ -const media = { - uploadImage: upload_image_1.default, - uploadFile: upload_file_1.default, - deleteFile: delete_file_1.default, +const api = { + crud, + media, }; /** * SQL Utils */ const sql = { - sqlGenerator: sql_generator_1.default, - sqlInsertGenerator: sql_insert_generator_1.default, - sqlDeleteGenerator: sql_delete_generator_1.default, - trim: trim_sql_1.default, + sqlGenerator, + sqlInsertGenerator, + sqlDeleteGenerator, + trim: trimSql, }; /** * Main Export */ const datasquirel = { + /** + * API Actions + */ + api, /** * Get Action */ - get: get_1.default, + get, /** * Post Action */ - post: post_1.default, - media, + post, user, - getSchema: get_schema_1.default, - client: client_1.default, + getSchema, + client: datasquirelClient, sql, utils: { crypto: { - encrypt: encrypt_1.default, - decrypt: decrypt_1.default, - hashPassword: hashPassword_1.default, + encrypt, + decrypt, + hashPassword, }, - parseCookies: parseCookies_1.default, - connDbHandler: conn_db_handler_1.default, - debugLog: debug_log_1.default, - parseEnv: parse_env_1.default, + parseCookies, + connDbHandler, + debugLog, + parseEnv, }, /** * Run Crud actions `get`, `insert`, `update`, `delete` * @note *Requires global variables `DSQL_USE_LOCAL` and `DSQL_DB_CONN` */ - crud: crud_1.default, + crud: dsqlCrud, /** * Run Crud based on request Methods `GET`, `POST`, `PUT`, `PATCH` * @note *Requires global variables `DSQL_USE_LOCAL` and `DSQL_DB_CONN` */ - methodCrud: method_crud_1.default, + methodCrud: dsqlMethodCrud, }; -exports.default = datasquirel; +export default datasquirel; diff --git a/dist/package-shared/actions/delete-file.js b/dist/package-shared/actions/delete-file.js index 0b8259d..7a0d79e 100644 --- a/dist/package-shared/actions/delete-file.js +++ b/dist/package-shared/actions/delete-file.js @@ -1,76 +1,59 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = deleteFile; -const grab_host_names_1 = __importDefault(require("../utils/grab-host-names")); +import grabHostNames from "../utils/grab-host-names"; /** * # Delete File via API */ -function deleteFile(_a) { - return __awaiter(this, arguments, void 0, function* ({ key, url, user_id, }) { - const grabedHostNames = (0, grab_host_names_1.default)(); - const { host, port, scheme } = grabedHostNames; - try { +export default async function deleteFile({ key, url, user_id, }) { + const grabedHostNames = grabHostNames(); + const { host, port, scheme } = grabedHostNames; + try { + /** + * Make https request + * + * @description make a request to datasquirel.com + */ + const httpResponse = await new Promise((resolve, reject) => { + const reqPayload = JSON.stringify({ url: url }); + const httpsRequest = scheme.request({ + method: "POST", + headers: { + "Content-Type": "application/json", + "Content-Length": Buffer.from(reqPayload).length, + Authorization: key || + process.env.DSQL_FULL_ACCESS_API_KEY || + process.env.DSQL_API_KEY, + }, + port, + hostname: host, + path: `/api/query/${user_id || grabedHostNames.user_id}/delete-file`, + }, /** - * Make https request + * Callback Function * - * @description make a request to datasquirel.com + * @description https request callback */ - const httpResponse = yield new Promise((resolve, reject) => { - const reqPayload = JSON.stringify({ url: url }); - const httpsRequest = scheme.request({ - method: "POST", - headers: { - "Content-Type": "application/json", - "Content-Length": Buffer.from(reqPayload).length, - Authorization: key || - process.env.DSQL_FULL_ACCESS_API_KEY || - process.env.DSQL_API_KEY, - }, - port, - hostname: host, - path: `/api/query/${user_id || grabedHostNames.user_id}/delete-file`, - }, - /** - * Callback Function - * - * @description https request callback - */ - (response) => { - var str = ""; - response.on("data", function (chunk) { - str += chunk; - }); - response.on("end", function () { - resolve(JSON.parse(str)); - }); - response.on("error", (err) => { - reject(err); - }); + (response) => { + var str = ""; + response.on("data", function (chunk) { + str += chunk; + }); + response.on("end", function () { + resolve(JSON.parse(str)); + }); + response.on("error", (err) => { + reject(err); }); - httpsRequest.write(reqPayload); - httpsRequest.end(); }); - return httpResponse; - } - catch ( /** @type {*} */error) { - console.log("Error deleting file: ", error.message); - return { - success: false, - payload: null, - msg: error.message, - }; - } - }); + httpsRequest.write(reqPayload); + httpsRequest.end(); + }); + return httpResponse; + } + catch ( /** @type {*} */error) { + console.log("Error deleting file: ", error.message); + return { + success: false, + payload: null, + msg: error.message, + }; + } } diff --git a/dist/package-shared/actions/get-csrf-header-name.js b/dist/package-shared/actions/get-csrf-header-name.js index 14780b6..abd3000 100644 --- a/dist/package-shared/actions/get-csrf-header-name.js +++ b/dist/package-shared/actions/get-csrf-header-name.js @@ -1,6 +1,3 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = getCsrfHeaderName; -function getCsrfHeaderName() { +export default function getCsrfHeaderName() { return "x-dsql-csrf-key"; } diff --git a/dist/package-shared/actions/get-schema.js b/dist/package-shared/actions/get-schema.js index 24f8b53..38f3fcb 100644 --- a/dist/package-shared/actions/get-schema.js +++ b/dist/package-shared/actions/get-schema.js @@ -1,69 +1,52 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = getSchema; -const grab_host_names_1 = __importDefault(require("../utils/grab-host-names")); +import grabHostNames from "../utils/grab-host-names"; /** * # Get Schema for Database, table, or field * */ -function getSchema(_a) { - return __awaiter(this, arguments, void 0, function* ({ key, database, field, table, user_id, env, }) { - const grabedHostNames = (0, grab_host_names_1.default)({ env }); - const { host, port, scheme } = grabedHostNames; +export default async function getSchema({ key, database, field, table, user_id, env, }) { + const grabedHostNames = grabHostNames({ env }); + const { host, port, scheme } = grabedHostNames; + /** + * Make https request + * + * @description make a request to datasquirel.com + */ + const httpResponse = await new Promise((resolve, reject) => { + const queryObject = { database, field, table }; + let query = Object.keys(queryObject) + .filter((k) => queryObject[k]) + .map((k) => `${k}=${queryObject[k]}`) + .join("&"); + scheme + .request({ + method: "GET", + headers: { + "Content-Type": "application/json", + Authorization: key || + process.env.DSQL_FULL_ACCESS_API_KEY || + process.env.DSQL_API_KEY, + }, + port, + hostname: host, + path: `/api/query/${user_id || grabedHostNames.user_id}/get-schema` + ((query === null || query === void 0 ? void 0 : query.match(/./)) ? `?${query}` : ""), + }, /** - * Make https request + * Callback Function * - * @description make a request to datasquirel.com + * @description https request callback */ - const httpResponse = yield new Promise((resolve, reject) => { - const queryObject = { database, field, table }; - let query = Object.keys(queryObject) - .filter((k) => queryObject[k]) - .map((k) => `${k}=${queryObject[k]}`) - .join("&"); - scheme - .request({ - method: "GET", - headers: { - "Content-Type": "application/json", - Authorization: key || - process.env.DSQL_FULL_ACCESS_API_KEY || - process.env.DSQL_API_KEY, - }, - port, - hostname: host, - path: `/api/query/${user_id || grabedHostNames.user_id}/get-schema` + ((query === null || query === void 0 ? void 0 : query.match(/./)) ? `?${query}` : ""), - }, - /** - * Callback Function - * - * @description https request callback - */ - (response) => { - var str = ""; - response.on("data", function (chunk) { - str += chunk; - }); - response.on("end", function () { - resolve(JSON.parse(str)); - }); - response.on("error", (err) => { - resolve(null); - }); - }) - .end(); - }); - return httpResponse; + (response) => { + var str = ""; + response.on("data", function (chunk) { + str += chunk; + }); + response.on("end", function () { + resolve(JSON.parse(str)); + }); + response.on("error", (err) => { + resolve(null); + }); + }) + .end(); }); + return httpResponse; } diff --git a/dist/package-shared/actions/get.js b/dist/package-shared/actions/get.js index 4677481..5205285 100644 --- a/dist/package-shared/actions/get.js +++ b/dist/package-shared/actions/get.js @@ -1,132 +1,115 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = get; -const path_1 = __importDefault(require("path")); -const fs_1 = __importDefault(require("fs")); -const grab_host_names_1 = __importDefault(require("../utils/grab-host-names")); -const get_1 = __importDefault(require("../functions/api/query/get")); -const serialize_query_1 = __importDefault(require("../utils/serialize-query")); -const grab_query_and_values_1 = __importDefault(require("../utils/grab-query-and-values")); -const debug_log_1 = __importDefault(require("../utils/logging/debug-log")); +import path from "path"; +import fs from "fs"; +import grabHostNames from "../utils/grab-host-names"; +import apiGet from "../functions/api/query/get"; +import serializeQuery from "../utils/serialize-query"; +import apiGetGrabQueryAndValues from "../utils/grab-query-and-values"; +import debugLog from "../utils/logging/debug-log"; /** * # Make a get request to Datasquirel API */ -function get(_a) { - return __awaiter(this, arguments, void 0, function* ({ key, db, query, queryValues, tableName, user_id, debug, forceLocal, }) { - const grabedHostNames = (0, grab_host_names_1.default)(); - const { host, port, scheme } = grabedHostNames; - function debugFn(log, label) { - (0, debug_log_1.default)({ log, addTime: true, title: "apiGet", label }); +export default async function get({ key, db, query, queryValues, tableName, user_id, debug, forceLocal, }) { + const grabedHostNames = grabHostNames(); + const { host, port, scheme } = grabedHostNames; + function debugFn(log, label) { + debugLog({ log, addTime: true, title: "apiGet", label }); + } + /** + * Check for local DB settings + * + * @description Look for local db settings in `.env` file and by pass the http request if available + */ + const { DSQL_DB_NAME } = process.env; + if ((DSQL_DB_NAME === null || DSQL_DB_NAME === void 0 ? void 0 : DSQL_DB_NAME.match(/./)) && global.DSQL_USE_LOCAL) { + let dbSchema; + try { + const localDbSchemaPath = path.resolve(process.cwd(), "dsql.schema.json"); + dbSchema = JSON.parse(fs.readFileSync(localDbSchemaPath, "utf8")); } - /** - * Check for local DB settings - * - * @description Look for local db settings in `.env` file and by pass the http request if available - */ - const { DSQL_DB_NAME } = process.env; - if ((DSQL_DB_NAME === null || DSQL_DB_NAME === void 0 ? void 0 : DSQL_DB_NAME.match(/./)) && global.DSQL_USE_LOCAL) { - let dbSchema; - try { - const localDbSchemaPath = path_1.default.resolve(process.cwd(), "dsql.schema.json"); - dbSchema = JSON.parse(fs_1.default.readFileSync(localDbSchemaPath, "utf8")); - } - catch (error) { } - if (debug) { - debugFn("Running Locally ..."); - } - return yield (0, get_1.default)({ - dbFullName: DSQL_DB_NAME, - query, - queryValues, - tableName, - dbSchema, - debug, - forceLocal, - }); + catch (error) { } + if (debug) { + debugFn("Running Locally ..."); } - /** - * Make https request - * - * @description make a request to datasquirel.com - */ - const httpResponse = yield new Promise((resolve, reject) => { - const queryAndValues = (0, grab_query_and_values_1.default)({ - query, - values: queryValues, - }); - const queryObject = { - db: process.env.DSQL_API_DB_NAME || String(db), - query: queryAndValues.query, - queryValues: queryAndValues.valuesString, - tableName, - debug, - }; - if (debug) { - debugFn(queryObject, "queryObject"); - } - const queryString = (0, serialize_query_1.default)(Object.assign({}, queryObject)); - if (debug) { - debugFn(queryString, "queryString"); - } - let path = `/api/query/${user_id || grabedHostNames.user_id}/get${queryString}`; - if (debug) { - debugFn(path, "path"); - } - const requestObject = { - method: "GET", - headers: { - "Content-Type": "application/json", - Authorization: key || - process.env.DSQL_READ_ONLY_API_KEY || - process.env.DSQL_FULL_ACCESS_API_KEY || - process.env.DSQL_API_KEY, - }, - port, - hostname: host, - path, - }; - scheme - .request(requestObject, - /** - * Callback Function - * - * @description https request callback - */ - (response) => { - var str = ""; - response.on("data", function (chunk) { - str += chunk; - }); - response.on("end", function () { - try { - resolve(JSON.parse(str)); - } - catch ( /** @type {any} */error) { - reject({ - error: error.message, - result: str, - }); - } - }); - response.on("error", (err) => { - console.log("DSQL get Error,", err.message); - resolve(null); - }); - }) - .end(); + return await apiGet({ + dbFullName: DSQL_DB_NAME, + query, + queryValues, + tableName, + dbSchema, + debug, + forceLocal, }); - return httpResponse; + } + /** + * Make https request + * + * @description make a request to datasquirel.com + */ + const httpResponse = await new Promise((resolve, reject) => { + const queryAndValues = apiGetGrabQueryAndValues({ + query, + values: queryValues, + }); + const queryObject = { + db: process.env.DSQL_API_DB_NAME || String(db), + query: queryAndValues.query, + queryValues: queryAndValues.valuesString, + tableName, + debug, + }; + if (debug) { + debugFn(queryObject, "queryObject"); + } + const queryString = serializeQuery(Object.assign({}, queryObject)); + if (debug) { + debugFn(queryString, "queryString"); + } + let path = `/api/query/${user_id || grabedHostNames.user_id}/get${queryString}`; + if (debug) { + debugFn(path, "path"); + } + const requestObject = { + method: "GET", + headers: { + "Content-Type": "application/json", + Authorization: key || + process.env.DSQL_READ_ONLY_API_KEY || + process.env.DSQL_FULL_ACCESS_API_KEY || + process.env.DSQL_API_KEY, + }, + port, + hostname: host, + path, + }; + scheme + .request(requestObject, + /** + * Callback Function + * + * @description https request callback + */ + (response) => { + var str = ""; + response.on("data", function (chunk) { + str += chunk; + }); + response.on("end", function () { + try { + resolve(JSON.parse(str)); + } + catch ( /** @type {any} */error) { + reject({ + error: error.message, + result: str, + }); + } + }); + response.on("error", (err) => { + console.log("DSQL get Error,", err.message); + resolve(null); + }); + }) + .end(); }); + return httpResponse; } diff --git a/dist/package-shared/actions/post.js b/dist/package-shared/actions/post.js index 0337540..d502a15 100644 --- a/dist/package-shared/actions/post.js +++ b/dist/package-shared/actions/post.js @@ -1,149 +1,132 @@ -"use strict"; // @ts-check -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = post; -const path_1 = __importDefault(require("path")); -const fs_1 = __importDefault(require("fs")); -const grab_host_names_1 = __importDefault(require("../utils/grab-host-names")); -const post_1 = __importDefault(require("../functions/api/query/post")); -const debug_log_1 = __importDefault(require("../utils/logging/debug-log")); +import path from "path"; +import fs from "fs"; +import grabHostNames from "../utils/grab-host-names"; +import apiPost from "../functions/api/query/post"; +import debugLog from "../utils/logging/debug-log"; /** * # Make a post request to Datasquirel API */ -function post(_a) { - return __awaiter(this, arguments, void 0, function* ({ key, query, queryValues, database, tableName, user_id, forceLocal, debug, }) { - const grabedHostNames = (0, grab_host_names_1.default)(); - const { host, port, scheme } = grabedHostNames; +export default async function post({ key, query, queryValues, database, tableName, user_id, forceLocal, debug, }) { + const grabedHostNames = grabHostNames(); + const { host, port, scheme } = grabedHostNames; + if (debug) { + debugLog({ + log: grabedHostNames, + addTime: true, + label: "grabedHostNames", + }); + } + /** + * Check for local DB settings + * + * @description Look for local db settings in `.env` file and by pass the http request if available + */ + const { DSQL_DB_HOST, DSQL_DB_USERNAME, DSQL_DB_PASSWORD, DSQL_DB_NAME } = process.env; + if ((DSQL_DB_HOST === null || DSQL_DB_HOST === void 0 ? void 0 : DSQL_DB_HOST.match(/./)) && + (DSQL_DB_USERNAME === null || DSQL_DB_USERNAME === void 0 ? void 0 : DSQL_DB_USERNAME.match(/./)) && + (DSQL_DB_PASSWORD === null || DSQL_DB_PASSWORD === void 0 ? void 0 : DSQL_DB_PASSWORD.match(/./)) && + (DSQL_DB_NAME === null || DSQL_DB_NAME === void 0 ? void 0 : DSQL_DB_NAME.match(/./)) && + global.DSQL_USE_LOCAL) { + /** @type {import("../types").DSQL_DatabaseSchemaType | undefined} */ + let dbSchema; + try { + const localDbSchemaPath = path.resolve(process.cwd(), "dsql.schema.json"); + dbSchema = JSON.parse(fs.readFileSync(localDbSchemaPath, "utf8")); + } + catch (error) { } if (debug) { - (0, debug_log_1.default)({ - log: grabedHostNames, + debugLog({ + log: "Using Local DB ...", addTime: true, - label: "grabedHostNames", }); } - /** - * Check for local DB settings - * - * @description Look for local db settings in `.env` file and by pass the http request if available - */ - const { DSQL_DB_HOST, DSQL_DB_USERNAME, DSQL_DB_PASSWORD, DSQL_DB_NAME } = process.env; - if ((DSQL_DB_HOST === null || DSQL_DB_HOST === void 0 ? void 0 : DSQL_DB_HOST.match(/./)) && - (DSQL_DB_USERNAME === null || DSQL_DB_USERNAME === void 0 ? void 0 : DSQL_DB_USERNAME.match(/./)) && - (DSQL_DB_PASSWORD === null || DSQL_DB_PASSWORD === void 0 ? void 0 : DSQL_DB_PASSWORD.match(/./)) && - (DSQL_DB_NAME === null || DSQL_DB_NAME === void 0 ? void 0 : DSQL_DB_NAME.match(/./)) && - global.DSQL_USE_LOCAL) { - /** @type {import("../types").DSQL_DatabaseSchemaType | undefined} */ - let dbSchema; - try { - const localDbSchemaPath = path_1.default.resolve(process.cwd(), "dsql.schema.json"); - dbSchema = JSON.parse(fs_1.default.readFileSync(localDbSchemaPath, "utf8")); - } - catch (error) { } - if (debug) { - (0, debug_log_1.default)({ - log: "Using Local DB ...", - addTime: true, - }); - } - return yield (0, post_1.default)({ - dbFullName: DSQL_DB_NAME, - query, - dbSchema, - queryValues, - tableName, - forceLocal, - debug, - }); + return await apiPost({ + dbFullName: database || DSQL_DB_NAME, + query, + dbSchema, + queryValues, + tableName, + forceLocal, + debug, + }); + } + /** + * Make https request + * + * @description make a request to datasquirel.com + */ + const httpResponse = await new Promise((resolve, reject) => { + var _a; + const reqPayloadString = JSON.stringify({ + query, + queryValues, + database: process.env.DSQL_API_DB_NAME || database, + tableName: tableName ? tableName : null, + }).replace(/\n|\r|\n\r/gm, ""); + try { + JSON.parse(reqPayloadString); } + catch (error) { + (_a = global.ERROR_CALLBACK) === null || _a === void 0 ? void 0 : _a.call(global, `Error Parsing HTTP response for post action`, error); + return { + success: false, + payload: null, + error: "Query object is invalid. Please Check query data values", + }; + } + const reqPayload = reqPayloadString; + const requPath = `/api/query/${user_id || grabedHostNames.user_id}/post`; + const httpsRequest = scheme.request({ + method: "POST", + headers: { + "Content-Type": "application/json", + "Content-Length": Buffer.from(reqPayload).length, + Authorization: key || + process.env.DSQL_FULL_ACCESS_API_KEY || + process.env.DSQL_API_KEY, + }, + port, + hostname: host, + path: requPath, + }, /** - * Make https request + * Callback Function * - * @description make a request to datasquirel.com + * @description https request callback */ - const httpResponse = yield new Promise((resolve, reject) => { - var _a; - const reqPayloadString = JSON.stringify({ - query, - queryValues, - database: process.env.DSQL_API_DB_NAME || database, - tableName: tableName ? tableName : null, - }).replace(/\n|\r|\n\r/gm, ""); - try { - JSON.parse(reqPayloadString); - } - catch (error) { - (_a = global.ERROR_CALLBACK) === null || _a === void 0 ? void 0 : _a.call(global, `Error Parsing HTTP response for post action`, error); - return { - success: false, - payload: null, - error: "Query object is invalid. Please Check query data values", - }; - } - const reqPayload = reqPayloadString; - const requPath = `/api/query/${user_id || grabedHostNames.user_id}/post`; - const httpsRequest = scheme.request({ - method: "POST", - headers: { - "Content-Type": "application/json", - "Content-Length": Buffer.from(reqPayload).length, - Authorization: key || - process.env.DSQL_FULL_ACCESS_API_KEY || - process.env.DSQL_API_KEY, - }, - port, - hostname: host, - path: requPath, - }, - /** - * Callback Function - * - * @description https request callback - */ - (response) => { - var str = ""; - response.on("data", function (chunk) { - str += chunk; - }); - response.on("end", function () { - try { - resolve(JSON.parse(str)); - } - catch (error) { - console.log("Route ERROR:", error.message); - resolve({ - success: false, - payload: null, - error: error.message, - errPayload: str, - }); - } - }); - response.on("error", (err) => { + (response) => { + var str = ""; + response.on("data", function (chunk) { + str += chunk; + }); + response.on("end", function () { + try { + resolve(JSON.parse(str)); + } + catch (error) { + console.log("Route ERROR:", error.message); resolve({ success: false, payload: null, - error: err.message, + error: error.message, + errPayload: str, }); + } + }); + response.on("error", (err) => { + resolve({ + success: false, + payload: null, + error: err.message, }); }); - httpsRequest.write(reqPayload); - httpsRequest.on("error", (error) => { - console.log("HTTPS request ERROR =>", error); - }); - httpsRequest.end(); }); - return httpResponse; + httpsRequest.write(reqPayload); + httpsRequest.on("error", (error) => { + console.log("HTTPS request ERROR =>", error); + }); + httpsRequest.end(); }); + return httpResponse; } diff --git a/dist/package-shared/actions/upload-file.js b/dist/package-shared/actions/upload-file.js index df4d8d1..1466059 100644 --- a/dist/package-shared/actions/upload-file.js +++ b/dist/package-shared/actions/upload-file.js @@ -1,78 +1,61 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = uploadImage; -const grab_host_names_1 = __importDefault(require("../utils/grab-host-names")); +import grabHostNames from "../utils/grab-host-names"; /** * # Upload File via API */ -function uploadImage(_a) { - return __awaiter(this, arguments, void 0, function* ({ key, payload, user_id, useDefault, }) { - var _b; - const grabedHostNames = (0, grab_host_names_1.default)({ useDefault }); - const { host, port, scheme } = grabedHostNames; - try { +export default async function uploadImage({ key, payload, user_id, useDefault, }) { + var _a; + const grabedHostNames = grabHostNames({ useDefault }); + const { host, port, scheme } = grabedHostNames; + try { + /** + * Make https request + * + * @description make a request to datasquirel.com + */ + const httpResponse = await new Promise((resolve, reject) => { + const reqPayload = JSON.stringify(payload); + const httpsRequest = scheme.request({ + method: "POST", + headers: { + "Content-Type": "application/json", + "Content-Length": Buffer.from(reqPayload).length, + Authorization: key || + process.env.DSQL_FULL_ACCESS_API_KEY || + process.env.DSQL_API_KEY, + }, + port, + hostname: host, + path: `/api/query/${user_id || grabedHostNames.user_id}/add-file`, + }, /** - * Make https request + * Callback Function * - * @description make a request to datasquirel.com + * @description https request callback */ - const httpResponse = yield new Promise((resolve, reject) => { - const reqPayload = JSON.stringify(payload); - const httpsRequest = scheme.request({ - method: "POST", - headers: { - "Content-Type": "application/json", - "Content-Length": Buffer.from(reqPayload).length, - Authorization: key || - process.env.DSQL_FULL_ACCESS_API_KEY || - process.env.DSQL_API_KEY, - }, - port, - hostname: host, - path: `/api/query/${user_id || grabedHostNames.user_id}/add-file`, - }, - /** - * Callback Function - * - * @description https request callback - */ - (response) => { - var str = ""; - response.on("data", function (chunk) { - str += chunk; - }); - response.on("end", function () { - resolve(JSON.parse(str)); - }); - response.on("error", (err) => { - reject(err); - }); + (response) => { + var str = ""; + response.on("data", function (chunk) { + str += chunk; + }); + response.on("end", function () { + resolve(JSON.parse(str)); + }); + response.on("error", (err) => { + reject(err); }); - httpsRequest.write(reqPayload); - httpsRequest.end(); }); - return httpResponse; - } - catch (error) { - console.log("Error in uploading file: ", error.message); - (_b = global.ERROR_CALLBACK) === null || _b === void 0 ? void 0 : _b.call(global, `Error Uploading File`, error); - return { - success: false, - payload: null, - msg: error.message, - }; - } - }); + httpsRequest.write(reqPayload); + httpsRequest.end(); + }); + return httpResponse; + } + catch (error) { + console.log("Error in uploading file: ", error.message); + (_a = global.ERROR_CALLBACK) === null || _a === void 0 ? void 0 : _a.call(global, `Error Uploading File`, error); + return { + success: false, + payload: null, + msg: error.message, + }; + } } diff --git a/dist/package-shared/actions/upload-image.js b/dist/package-shared/actions/upload-image.js index 5bd9326..9b9b8d4 100644 --- a/dist/package-shared/actions/upload-image.js +++ b/dist/package-shared/actions/upload-image.js @@ -1,78 +1,61 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = uploadImage; -const grab_host_names_1 = __importDefault(require("../utils/grab-host-names")); +import grabHostNames from "../utils/grab-host-names"; /** * # Upload Image via API */ -function uploadImage(_a) { - return __awaiter(this, arguments, void 0, function* ({ key, payload, user_id, useDefault, }) { - var _b; - const grabedHostNames = (0, grab_host_names_1.default)({ useDefault }); - const { host, port, scheme } = grabedHostNames; - try { +export default async function uploadImage({ key, payload, user_id, useDefault, }) { + var _a; + const grabedHostNames = grabHostNames({ useDefault }); + const { host, port, scheme } = grabedHostNames; + try { + /** + * Make https request + * + * @description make a request to datasquirel.com + */ + const httpResponse = await new Promise((resolve, reject) => { + const reqPayload = JSON.stringify(payload); + const httpsRequest = scheme.request({ + method: "POST", + headers: { + "Content-Type": "application/json", + "Content-Length": Buffer.from(reqPayload).length, + Authorization: key || + process.env.DSQL_FULL_ACCESS_API_KEY || + process.env.DSQL_API_KEY, + }, + port, + hostname: host, + path: `/api/query/${user_id || grabedHostNames.user_id}/add-image`, + }, /** - * Make https request + * Callback Function * - * @description make a request to datasquirel.com + * @description https request callback */ - const httpResponse = yield new Promise((resolve, reject) => { - const reqPayload = JSON.stringify(payload); - const httpsRequest = scheme.request({ - method: "POST", - headers: { - "Content-Type": "application/json", - "Content-Length": Buffer.from(reqPayload).length, - Authorization: key || - process.env.DSQL_FULL_ACCESS_API_KEY || - process.env.DSQL_API_KEY, - }, - port, - hostname: host, - path: `/api/query/${user_id || grabedHostNames.user_id}/add-image`, - }, - /** - * Callback Function - * - * @description https request callback - */ - (response) => { - var str = ""; - response.on("data", function (chunk) { - str += chunk; - }); - response.on("end", function () { - resolve(JSON.parse(str)); - }); - response.on("error", (err) => { - reject(err); - }); + (response) => { + var str = ""; + response.on("data", function (chunk) { + str += chunk; + }); + response.on("end", function () { + resolve(JSON.parse(str)); + }); + response.on("error", (err) => { + reject(err); }); - httpsRequest.write(reqPayload); - httpsRequest.end(); }); - return httpResponse; - } - catch (error) { - console.log("Error in uploading image: ", error.message); - (_b = global.ERROR_CALLBACK) === null || _b === void 0 ? void 0 : _b.call(global, `Error Uploading Image`, error); - return { - success: false, - payload: null, - msg: error.message, - }; - } - }); + httpsRequest.write(reqPayload); + httpsRequest.end(); + }); + return httpResponse; + } + catch (error) { + console.log("Error in uploading image: ", error.message); + (_a = global.ERROR_CALLBACK) === null || _a === void 0 ? void 0 : _a.call(global, `Error Uploading Image`, error); + return { + success: false, + payload: null, + msg: error.message, + }; + } } diff --git a/dist/package-shared/actions/users/add-user.js b/dist/package-shared/actions/users/add-user.js index 96df201..b2e4d61 100644 --- a/dist/package-shared/actions/users/add-user.js +++ b/dist/package-shared/actions/users/add-user.js @@ -1,98 +1,81 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = addUser; -const path_1 = __importDefault(require("path")); -const fs_1 = __importDefault(require("fs")); -const grab_host_names_1 = __importDefault(require("../../utils/grab-host-names")); -const api_create_user_1 = __importDefault(require("../../functions/api/users/api-create-user")); +import path from "path"; +import fs from "fs"; +import grabHostNames from "../../utils/grab-host-names"; +import apiCreateUser from "../../functions/api/users/api-create-user"; /** * # Add User to Database */ -function addUser(_a) { - return __awaiter(this, arguments, void 0, function* ({ key, payload, database, encryptionKey, user_id, apiUserId, }) { - /** - * Check for local DB settings - * - * @description Look for local db settings in `.env` file and by pass the http request if available - */ - const { DSQL_DB_HOST, DSQL_DB_USERNAME, DSQL_DB_PASSWORD, DSQL_DB_NAME, DSQL_API_USER_ID, } = process.env; - const grabedHostNames = (0, grab_host_names_1.default)(); - const { host, port, scheme } = grabedHostNames; - if ((DSQL_DB_HOST === null || DSQL_DB_HOST === void 0 ? void 0 : DSQL_DB_HOST.match(/./)) && - (DSQL_DB_USERNAME === null || DSQL_DB_USERNAME === void 0 ? void 0 : DSQL_DB_USERNAME.match(/./)) && - (DSQL_DB_PASSWORD === null || DSQL_DB_PASSWORD === void 0 ? void 0 : DSQL_DB_PASSWORD.match(/./)) && - (DSQL_DB_NAME === null || DSQL_DB_NAME === void 0 ? void 0 : DSQL_DB_NAME.match(/./)) && - global.DSQL_USE_LOCAL) { - /** @type {import("../../types").DSQL_DatabaseSchemaType | undefined} */ - let dbSchema; - try { - const localDbSchemaPath = path_1.default.resolve(process.cwd(), "dsql.schema.json"); - dbSchema = JSON.parse(fs_1.default.readFileSync(localDbSchemaPath, "utf8")); - } - catch (error) { } - return yield (0, api_create_user_1.default)({ - database: DSQL_DB_NAME, - encryptionKey, - payload, - userId: apiUserId, - }); +export default async function addUser({ key, payload, database, encryptionKey, user_id, apiUserId, }) { + /** + * Check for local DB settings + * + * @description Look for local db settings in `.env` file and by pass the http request if available + */ + const { DSQL_DB_HOST, DSQL_DB_USERNAME, DSQL_DB_PASSWORD, DSQL_DB_NAME, DSQL_API_USER_ID, } = process.env; + const grabedHostNames = grabHostNames(); + const { host, port, scheme } = grabedHostNames; + if ((DSQL_DB_HOST === null || DSQL_DB_HOST === void 0 ? void 0 : DSQL_DB_HOST.match(/./)) && + (DSQL_DB_USERNAME === null || DSQL_DB_USERNAME === void 0 ? void 0 : DSQL_DB_USERNAME.match(/./)) && + (DSQL_DB_PASSWORD === null || DSQL_DB_PASSWORD === void 0 ? void 0 : DSQL_DB_PASSWORD.match(/./)) && + (DSQL_DB_NAME === null || DSQL_DB_NAME === void 0 ? void 0 : DSQL_DB_NAME.match(/./)) && + global.DSQL_USE_LOCAL) { + /** @type {import("../../types").DSQL_DatabaseSchemaType | undefined} */ + let dbSchema; + try { + const localDbSchemaPath = path.resolve(process.cwd(), "dsql.schema.json"); + dbSchema = JSON.parse(fs.readFileSync(localDbSchemaPath, "utf8")); } - /** - * Make https request - * - * @description make a request to datasquirel.com - */ - const httpResponse = yield new Promise((resolve, reject) => { - const reqPayload = JSON.stringify({ - payload, - database, - encryptionKey, - }); - const httpsRequest = scheme.request({ - method: "POST", - headers: { - "Content-Type": "application/json", - "Content-Length": Buffer.from(reqPayload).length, - Authorization: key || - process.env.DSQL_FULL_ACCESS_API_KEY || - process.env.DSQL_API_KEY, - }, - port, - hostname: host, - path: `/api/user/${user_id || grabedHostNames.user_id}/add-user`, - }, - /** - * Callback Function - * - * @description https request callback - */ - (response) => { - var str = ""; - response.on("data", function (chunk) { - str += chunk; - }); - response.on("end", function () { - resolve(JSON.parse(str)); - }); - response.on("error", (err) => { - reject(err); - }); - }); - httpsRequest.write(reqPayload); - httpsRequest.end(); + catch (error) { } + return await apiCreateUser({ + database: DSQL_DB_NAME, + encryptionKey, + payload, + userId: apiUserId, }); - return httpResponse; + } + /** + * Make https request + * + * @description make a request to datasquirel.com + */ + const httpResponse = await new Promise((resolve, reject) => { + const reqPayload = JSON.stringify({ + payload, + database, + encryptionKey, + }); + const httpsRequest = scheme.request({ + method: "POST", + headers: { + "Content-Type": "application/json", + "Content-Length": Buffer.from(reqPayload).length, + Authorization: key || + process.env.DSQL_FULL_ACCESS_API_KEY || + process.env.DSQL_API_KEY, + }, + port, + hostname: host, + path: `/api/user/${user_id || grabedHostNames.user_id}/add-user`, + }, + /** + * Callback Function + * + * @description https request callback + */ + (response) => { + var str = ""; + response.on("data", function (chunk) { + str += chunk; + }); + response.on("end", function () { + resolve(JSON.parse(str)); + }); + response.on("error", (err) => { + reject(err); + }); + }); + httpsRequest.write(reqPayload); + httpsRequest.end(); }); + return httpResponse; } diff --git a/dist/package-shared/actions/users/delete-user.js b/dist/package-shared/actions/users/delete-user.js index e0b0dda..dc70dd2 100644 --- a/dist/package-shared/actions/users/delete-user.js +++ b/dist/package-shared/actions/users/delete-user.js @@ -1,95 +1,78 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = deleteUser; -const path_1 = __importDefault(require("path")); -const fs_1 = __importDefault(require("fs")); -const grab_host_names_1 = __importDefault(require("../../utils/grab-host-names")); -const api_delete_user_1 = __importDefault(require("../../functions/api/users/api-delete-user")); +import path from "path"; +import fs from "fs"; +import grabHostNames from "../../utils/grab-host-names"; +import apiDeleteUser from "../../functions/api/users/api-delete-user"; /** * # Update User */ -function deleteUser(_a) { - return __awaiter(this, arguments, void 0, function* ({ key, database, user_id, deletedUserId, }) { - /** - * Check for local DB settings - * - * @description Look for local db settings in `.env` file and by pass the http request if available - */ - const { DSQL_DB_HOST, DSQL_DB_USERNAME, DSQL_DB_PASSWORD, DSQL_DB_NAME } = process.env; - const grabedHostNames = (0, grab_host_names_1.default)(); - const { host, port, scheme } = grabedHostNames; - if ((DSQL_DB_HOST === null || DSQL_DB_HOST === void 0 ? void 0 : DSQL_DB_HOST.match(/./)) && - (DSQL_DB_USERNAME === null || DSQL_DB_USERNAME === void 0 ? void 0 : DSQL_DB_USERNAME.match(/./)) && - (DSQL_DB_PASSWORD === null || DSQL_DB_PASSWORD === void 0 ? void 0 : DSQL_DB_PASSWORD.match(/./)) && - (DSQL_DB_NAME === null || DSQL_DB_NAME === void 0 ? void 0 : DSQL_DB_NAME.match(/./)) && - global.DSQL_USE_LOCAL) { - /** @type {import("../../types").DSQL_DatabaseSchemaType | undefined} */ - let dbSchema; - try { - const localDbSchemaPath = path_1.default.resolve(process.cwd(), "dsql.schema.json"); - dbSchema = JSON.parse(fs_1.default.readFileSync(localDbSchemaPath, "utf8")); - } - catch (error) { } - return yield (0, api_delete_user_1.default)({ - dbFullName: DSQL_DB_NAME, - deletedUserId, - }); +export default async function deleteUser({ key, database, user_id, deletedUserId, }) { + /** + * Check for local DB settings + * + * @description Look for local db settings in `.env` file and by pass the http request if available + */ + const { DSQL_DB_HOST, DSQL_DB_USERNAME, DSQL_DB_PASSWORD, DSQL_DB_NAME } = process.env; + const grabedHostNames = grabHostNames(); + const { host, port, scheme } = grabedHostNames; + if ((DSQL_DB_HOST === null || DSQL_DB_HOST === void 0 ? void 0 : DSQL_DB_HOST.match(/./)) && + (DSQL_DB_USERNAME === null || DSQL_DB_USERNAME === void 0 ? void 0 : DSQL_DB_USERNAME.match(/./)) && + (DSQL_DB_PASSWORD === null || DSQL_DB_PASSWORD === void 0 ? void 0 : DSQL_DB_PASSWORD.match(/./)) && + (DSQL_DB_NAME === null || DSQL_DB_NAME === void 0 ? void 0 : DSQL_DB_NAME.match(/./)) && + global.DSQL_USE_LOCAL) { + /** @type {import("../../types").DSQL_DatabaseSchemaType | undefined} */ + let dbSchema; + try { + const localDbSchemaPath = path.resolve(process.cwd(), "dsql.schema.json"); + dbSchema = JSON.parse(fs.readFileSync(localDbSchemaPath, "utf8")); } + catch (error) { } + return await apiDeleteUser({ + dbFullName: DSQL_DB_NAME, + deletedUserId, + }); + } + /** + * Make https request + * + * @description make a request to datasquirel.com + */ + const httpResponse = (await new Promise((resolve, reject) => { + const reqPayload = JSON.stringify({ + database, + deletedUserId, + }); + const httpsRequest = scheme.request({ + method: "POST", + headers: { + "Content-Type": "application/json", + "Content-Length": Buffer.from(reqPayload).length, + Authorization: process.env.DSQL_FULL_ACCESS_API_KEY || + process.env.DSQL_API_KEY || + key, + }, + port, + hostname: host, + path: `/api/user/${user_id || grabedHostNames.user_id}/delete-user`, + }, /** - * Make https request + * Callback Function * - * @description make a request to datasquirel.com + * @description https request callback */ - const httpResponse = (yield new Promise((resolve, reject) => { - const reqPayload = JSON.stringify({ - database, - deletedUserId, + (response) => { + var str = ""; + response.on("data", function (chunk) { + str += chunk; }); - const httpsRequest = scheme.request({ - method: "POST", - headers: { - "Content-Type": "application/json", - "Content-Length": Buffer.from(reqPayload).length, - Authorization: process.env.DSQL_FULL_ACCESS_API_KEY || - process.env.DSQL_API_KEY || - key, - }, - port, - hostname: host, - path: `/api/user/${user_id || grabedHostNames.user_id}/delete-user`, - }, - /** - * Callback Function - * - * @description https request callback - */ - (response) => { - var str = ""; - response.on("data", function (chunk) { - str += chunk; - }); - response.on("end", function () { - resolve(JSON.parse(str)); - }); - response.on("error", (err) => { - reject(err); - }); + response.on("end", function () { + resolve(JSON.parse(str)); }); - httpsRequest.write(reqPayload); - httpsRequest.end(); - })); - return httpResponse; - }); + response.on("error", (err) => { + reject(err); + }); + }); + httpsRequest.write(reqPayload); + httpsRequest.end(); + })); + return httpResponse; } diff --git a/dist/package-shared/actions/users/get-token.js b/dist/package-shared/actions/users/get-token.js index 49bf723..7448549 100644 --- a/dist/package-shared/actions/users/get-token.js +++ b/dist/package-shared/actions/users/get-token.js @@ -1,19 +1,13 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = getToken; -const decrypt_1 = __importDefault(require("../../functions/dsql/decrypt")); -const get_auth_cookie_names_1 = __importDefault(require("../../functions/backend/cookies/get-auth-cookie-names")); -const parseCookies_1 = __importDefault(require("../../utils/backend/parseCookies")); +import decrypt from "../../functions/dsql/decrypt"; +import getAuthCookieNames from "../../functions/backend/cookies/get-auth-cookie-names"; +import parseCookies from "../../utils/backend/parseCookies"; /** * Get just the access token for user * ============================================================================== * @description This Function takes in a request object and returns a user token * string and csrf token string */ -function getToken({ request, encryptionKey, encryptionSalt, cookieString, }) { +export default function getToken({ request, encryptionKey, encryptionSalt, cookieString, }) { var _a; try { /** @@ -21,8 +15,8 @@ function getToken({ request, encryptionKey, encryptionSalt, cookieString, }) { * * @description Grab the payload */ - const cookies = (0, parseCookies_1.default)({ request, cookieString }); - const keynames = (0, get_auth_cookie_names_1.default)(); + const cookies = parseCookies({ request, cookieString }); + const keynames = getAuthCookieNames(); const authKeyName = keynames.keyCookieName; const csrfName = keynames.csrfCookieName; const key = cookies[authKeyName]; @@ -32,7 +26,7 @@ function getToken({ request, encryptionKey, encryptionSalt, cookieString, }) { * * @description Grab the payload */ - let userPayload = (0, decrypt_1.default)({ + let userPayload = decrypt({ encryptedString: key, encryptionKey, encryptionSalt, diff --git a/dist/package-shared/actions/users/get-user.js b/dist/package-shared/actions/users/get-user.js index e0f8b11..21b1426 100644 --- a/dist/package-shared/actions/users/get-user.js +++ b/dist/package-shared/actions/users/get-user.js @@ -1,120 +1,103 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = getUser; -const path_1 = __importDefault(require("path")); -const fs_1 = __importDefault(require("fs")); -const grab_host_names_1 = __importDefault(require("../../utils/grab-host-names")); -const api_get_user_1 = __importDefault(require("../../functions/api/users/api-get-user")); +import path from "path"; +import fs from "fs"; +import grabHostNames from "../../utils/grab-host-names"; +import apiGetUser from "../../functions/api/users/api-get-user"; /** * # Get User */ -function getUser(_a) { - return __awaiter(this, arguments, void 0, function* ({ key, userId, database, fields, apiUserId, }) { - /** - * Initialize - */ - const defaultFields = [ - "id", - "first_name", - "last_name", - "email", - "username", - "image", - "image_thumbnail", - "verification_status", - "date_created", - "date_created_code", - "date_created_timestamp", - "date_updated", - "date_updated_code", - "date_updated_timestamp", - ]; - const updatedFields = fields && fields[0] ? [...defaultFields, ...fields] : defaultFields; - const reqPayload = JSON.stringify({ - userId, - database, - fields: [...new Set(updatedFields)], - }); - const grabedHostNames = (0, grab_host_names_1.default)(); - const { host, port, scheme } = grabedHostNames; - /** - * Check for local DB settings - * - * @description Look for local db settings in `.env` file and by pass the http request if available - */ - const { DSQL_DB_HOST, DSQL_DB_USERNAME, DSQL_DB_PASSWORD, DSQL_DB_NAME } = process.env; - if ((DSQL_DB_HOST === null || DSQL_DB_HOST === void 0 ? void 0 : DSQL_DB_HOST.match(/./)) && - (DSQL_DB_USERNAME === null || DSQL_DB_USERNAME === void 0 ? void 0 : DSQL_DB_USERNAME.match(/./)) && - (DSQL_DB_PASSWORD === null || DSQL_DB_PASSWORD === void 0 ? void 0 : DSQL_DB_PASSWORD.match(/./)) && - (DSQL_DB_NAME === null || DSQL_DB_NAME === void 0 ? void 0 : DSQL_DB_NAME.match(/./)) && - global.DSQL_USE_LOCAL) { - /** @type {import("../../types").DSQL_DatabaseSchemaType | undefined} */ - let dbSchema; - try { - const localDbSchemaPath = path_1.default.resolve(process.cwd(), "dsql.schema.json"); - dbSchema = JSON.parse(fs_1.default.readFileSync(localDbSchemaPath, "utf8")); - } - catch (error) { } - return yield (0, api_get_user_1.default)({ - userId, - fields: [...new Set(updatedFields)], - dbFullName: DSQL_DB_NAME, - }); - } - /** - * Make https request - * - * @description make a request to datasquirel.com - */ - const httpResponse = yield new Promise((resolve, reject) => { - const httpsRequest = scheme.request({ - method: "POST", - headers: { - "Content-Type": "application/json", - "Content-Length": Buffer.from(reqPayload).length, - Authorization: key || - process.env.DSQL_FULL_ACCESS_API_KEY || - process.env.DSQL_API_KEY, - }, - port, - hostname: host, - path: `/api/user/${apiUserId || grabedHostNames.user_id}/get-user`, - }, - /** - * Callback Function - * - * @description https request callback - */ - (response) => { - var str = ""; - response.on("data", function (chunk) { - str += chunk; - }); - response.on("end", function () { - resolve(JSON.parse(str)); - }); - response.on("error", (err) => { - reject(err); - }); - }); - httpsRequest.write(reqPayload); - httpsRequest.end(); - }); - /** ********************************************** */ - /** ********************************************** */ - /** ********************************************** */ - return httpResponse; +export default async function getUser({ key, userId, database, fields, apiUserId, }) { + /** + * Initialize + */ + const defaultFields = [ + "id", + "first_name", + "last_name", + "email", + "username", + "image", + "image_thumbnail", + "verification_status", + "date_created", + "date_created_code", + "date_created_timestamp", + "date_updated", + "date_updated_code", + "date_updated_timestamp", + ]; + const updatedFields = fields && fields[0] ? [...defaultFields, ...fields] : defaultFields; + const reqPayload = JSON.stringify({ + userId, + database, + fields: [...new Set(updatedFields)], }); + const grabedHostNames = grabHostNames(); + const { host, port, scheme } = grabedHostNames; + /** + * Check for local DB settings + * + * @description Look for local db settings in `.env` file and by pass the http request if available + */ + const { DSQL_DB_HOST, DSQL_DB_USERNAME, DSQL_DB_PASSWORD, DSQL_DB_NAME } = process.env; + if ((DSQL_DB_HOST === null || DSQL_DB_HOST === void 0 ? void 0 : DSQL_DB_HOST.match(/./)) && + (DSQL_DB_USERNAME === null || DSQL_DB_USERNAME === void 0 ? void 0 : DSQL_DB_USERNAME.match(/./)) && + (DSQL_DB_PASSWORD === null || DSQL_DB_PASSWORD === void 0 ? void 0 : DSQL_DB_PASSWORD.match(/./)) && + (DSQL_DB_NAME === null || DSQL_DB_NAME === void 0 ? void 0 : DSQL_DB_NAME.match(/./)) && + global.DSQL_USE_LOCAL) { + /** @type {import("../../types").DSQL_DatabaseSchemaType | undefined} */ + let dbSchema; + try { + const localDbSchemaPath = path.resolve(process.cwd(), "dsql.schema.json"); + dbSchema = JSON.parse(fs.readFileSync(localDbSchemaPath, "utf8")); + } + catch (error) { } + return await apiGetUser({ + userId, + fields: [...new Set(updatedFields)], + dbFullName: DSQL_DB_NAME, + }); + } + /** + * Make https request + * + * @description make a request to datasquirel.com + */ + const httpResponse = await new Promise((resolve, reject) => { + const httpsRequest = scheme.request({ + method: "POST", + headers: { + "Content-Type": "application/json", + "Content-Length": Buffer.from(reqPayload).length, + Authorization: key || + process.env.DSQL_FULL_ACCESS_API_KEY || + process.env.DSQL_API_KEY, + }, + port, + hostname: host, + path: `/api/user/${apiUserId || grabedHostNames.user_id}/get-user`, + }, + /** + * Callback Function + * + * @description https request callback + */ + (response) => { + var str = ""; + response.on("data", function (chunk) { + str += chunk; + }); + response.on("end", function () { + resolve(JSON.parse(str)); + }); + response.on("error", (err) => { + reject(err); + }); + }); + httpsRequest.write(reqPayload); + httpsRequest.end(); + }); + /** ********************************************** */ + /** ********************************************** */ + /** ********************************************** */ + return httpResponse; } diff --git a/dist/package-shared/actions/users/login-user.d.ts b/dist/package-shared/actions/users/login-user.d.ts index df2bb75..a792be5 100644 --- a/dist/package-shared/actions/users/login-user.d.ts +++ b/dist/package-shared/actions/users/login-user.d.ts @@ -1,37 +1,5 @@ -import http from "http"; -import { APILoginFunctionReturn } from "../../types"; -type Param = { - key?: string; - database: string; - payload: { - email?: string; - username?: string; - password?: string; - }; - additionalFields?: string[]; - request?: http.IncomingMessage & { - [s: string]: any; - }; - response?: http.ServerResponse & { - [s: string]: any; - }; - encryptionKey?: string; - encryptionSalt?: string; - email_login?: boolean; - email_login_code?: string; - temp_code_field?: string; - token?: boolean; - user_id?: string | number; - skipPassword?: boolean; - debug?: boolean; - skipWriteAuthFile?: boolean; - apiUserID?: string | number; - dbUserId?: string | number; - cleanupTokens?: boolean; - secureCookie?: boolean; -}; +import { APILoginFunctionReturn, LoginUserParam } from "../../types"; /** * # Login A user */ -export default function loginUser({ key, payload, database, additionalFields, response, encryptionKey, encryptionSalt, email_login, email_login_code, temp_code_field, token, user_id, skipPassword, apiUserID, skipWriteAuthFile, dbUserId, debug, cleanupTokens, secureCookie, request, }: Param): Promise; -export {}; +export default function loginUser({ key, payload, database, additionalFields, response, encryptionKey, encryptionSalt, email_login, email_login_code, temp_code_field, token, user_id, skipPassword, apiUserID, skipWriteAuthFile, dbUserId, debug, cleanupTokens, secureCookie, request, }: LoginUserParam): Promise; diff --git a/dist/package-shared/actions/users/login-user.js b/dist/package-shared/actions/users/login-user.js index a472269..814a366 100644 --- a/dist/package-shared/actions/users/login-user.js +++ b/dist/package-shared/actions/users/login-user.js @@ -1,200 +1,183 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = loginUser; -const fs_1 = __importDefault(require("fs")); -const path_1 = __importDefault(require("path")); -const encrypt_1 = __importDefault(require("../../functions/dsql/encrypt")); -const grab_host_names_1 = __importDefault(require("../../utils/grab-host-names")); -const api_login_1 = __importDefault(require("../../functions/api/users/api-login")); -const get_auth_cookie_names_1 = __importDefault(require("../../functions/backend/cookies/get-auth-cookie-names")); -const write_auth_files_1 = require("../../functions/backend/auth/write-auth-files"); -const debug_log_1 = __importDefault(require("../../utils/logging/debug-log")); -const grab_cookie_expirt_date_1 = __importDefault(require("../../utils/grab-cookie-expirt-date")); +import fs from "fs"; +import path from "path"; +import encrypt from "../../functions/dsql/encrypt"; +import grabHostNames from "../../utils/grab-host-names"; +import apiLoginUser from "../../functions/api/users/api-login"; +import getAuthCookieNames from "../../functions/backend/cookies/get-auth-cookie-names"; +import { writeAuthFile } from "../../functions/backend/auth/write-auth-files"; +import debugLog from "../../utils/logging/debug-log"; +import grabCookieExpiryDate from "../../utils/grab-cookie-expirt-date"; /** * # Login A user */ -function loginUser(_a) { - return __awaiter(this, arguments, void 0, function* ({ key, payload, database, additionalFields, response, encryptionKey, encryptionSalt, email_login, email_login_code, temp_code_field, token, user_id, skipPassword, apiUserID, skipWriteAuthFile, dbUserId, debug, cleanupTokens, secureCookie, request, }) { - var _b, _c, _d; - const grabedHostNames = (0, grab_host_names_1.default)({ userId: user_id || apiUserID }); - const { host, port, scheme } = grabedHostNames; - const COOKIE_EXPIRY_DATE = (0, grab_cookie_expirt_date_1.default)(); - const defaultTempLoginFieldName = "temp_login_code"; - const emailLoginTempCodeFieldName = email_login +export default async function loginUser({ key, payload, database, additionalFields, response, encryptionKey, encryptionSalt, email_login, email_login_code, temp_code_field, token, user_id, skipPassword, apiUserID, skipWriteAuthFile, dbUserId, debug, cleanupTokens, secureCookie, request, }) { + var _a, _b, _c; + const grabedHostNames = grabHostNames({ userId: user_id || apiUserID }); + const { host, port, scheme } = grabedHostNames; + const COOKIE_EXPIRY_DATE = grabCookieExpiryDate(); + const defaultTempLoginFieldName = "temp_login_code"; + const emailLoginTempCodeFieldName = email_login + ? temp_code_field ? temp_code_field - ? temp_code_field - : defaultTempLoginFieldName - : undefined; - const finalEncryptionKey = encryptionKey || process.env.DSQL_ENCRYPTION_PASSWORD; - const finalEncryptionSalt = encryptionSalt || process.env.DSQL_ENCRYPTION_SALT; - function debugFn(log, label) { - (0, debug_log_1.default)({ log, addTime: true, title: "loginUser", label }); - } - if (!(finalEncryptionKey === null || finalEncryptionKey === void 0 ? void 0 : finalEncryptionKey.match(/.{8,}/))) { - console.log("Encryption key is invalid"); - return { - success: false, - payload: null, - msg: "Encryption key is invalid", - }; - } - if (!(finalEncryptionSalt === null || finalEncryptionSalt === void 0 ? void 0 : finalEncryptionSalt.match(/.{8,}/))) { - console.log("Encryption salt is invalid"); - return { - success: false, - payload: null, - msg: "Encryption salt is invalid", - }; - } - /** - * Check required fields - * - * @description Check required fields - */ - // const isEmailValid = await validateEmail({ email: payload.email }); - // if (!payload.email) { - // return { - // success: false, - // payload: null, - // msg: isEmailValid.message, - // }; - // } - /** - * Initialize HTTP response variable - */ - let httpResponse = { + : defaultTempLoginFieldName + : undefined; + const finalEncryptionKey = encryptionKey || process.env.DSQL_ENCRYPTION_PASSWORD; + const finalEncryptionSalt = encryptionSalt || process.env.DSQL_ENCRYPTION_SALT; + function debugFn(log, label) { + debugLog({ log, addTime: true, title: "loginUser", label }); + } + if (!(finalEncryptionKey === null || finalEncryptionKey === void 0 ? void 0 : finalEncryptionKey.match(/.{8,}/))) { + console.log("Encryption key is invalid"); + return { success: false, + payload: null, + msg: "Encryption key is invalid", }; - /** - * Check for local DB settings - * - * @description Look for local db settings in `.env` file and by pass the http request if available - */ - const { DSQL_DB_HOST, DSQL_DB_USERNAME, DSQL_DB_PASSWORD, DSQL_DB_NAME } = process.env; - if ((DSQL_DB_HOST === null || DSQL_DB_HOST === void 0 ? void 0 : DSQL_DB_HOST.match(/./)) && - (DSQL_DB_USERNAME === null || DSQL_DB_USERNAME === void 0 ? void 0 : DSQL_DB_USERNAME.match(/./)) && - (DSQL_DB_PASSWORD === null || DSQL_DB_PASSWORD === void 0 ? void 0 : DSQL_DB_PASSWORD.match(/./)) && - (DSQL_DB_NAME === null || DSQL_DB_NAME === void 0 ? void 0 : DSQL_DB_NAME.match(/./)) && - global.DSQL_USE_LOCAL) { - let dbSchema; - try { - const localDbSchemaPath = path_1.default.resolve(process.cwd(), "dsql.schema.json"); - dbSchema = JSON.parse(fs_1.default.readFileSync(localDbSchemaPath, "utf8")); - } - catch (error) { } - httpResponse = yield (0, api_login_1.default)({ - database: process.env.DSQL_DB_NAME || "", - email: payload.email, - username: payload.username, - password: payload.password, - skipPassword, + } + if (!(finalEncryptionSalt === null || finalEncryptionSalt === void 0 ? void 0 : finalEncryptionSalt.match(/.{8,}/))) { + console.log("Encryption salt is invalid"); + return { + success: false, + payload: null, + msg: "Encryption salt is invalid", + }; + } + /** + * Check required fields + * + * @description Check required fields + */ + // const isEmailValid = await validateEmail({ email: payload.email }); + // if (!payload.email) { + // return { + // success: false, + // payload: null, + // msg: isEmailValid.message, + // }; + // } + /** + * Initialize HTTP response variable + */ + let httpResponse = { + success: false, + }; + /** + * Check for local DB settings + * + * @description Look for local db settings in `.env` file and by pass the http request if available + */ + const { DSQL_DB_HOST, DSQL_DB_USERNAME, DSQL_DB_PASSWORD, DSQL_DB_NAME } = process.env; + if ((DSQL_DB_HOST === null || DSQL_DB_HOST === void 0 ? void 0 : DSQL_DB_HOST.match(/./)) && + (DSQL_DB_USERNAME === null || DSQL_DB_USERNAME === void 0 ? void 0 : DSQL_DB_USERNAME.match(/./)) && + (DSQL_DB_PASSWORD === null || DSQL_DB_PASSWORD === void 0 ? void 0 : DSQL_DB_PASSWORD.match(/./)) && + (DSQL_DB_NAME === null || DSQL_DB_NAME === void 0 ? void 0 : DSQL_DB_NAME.match(/./)) && + global.DSQL_USE_LOCAL) { + let dbSchema; + try { + const localDbSchemaPath = path.resolve(process.cwd(), "dsql.schema.json"); + dbSchema = JSON.parse(fs.readFileSync(localDbSchemaPath, "utf8")); + } + catch (error) { } + httpResponse = await apiLoginUser({ + database: database || process.env.DSQL_DB_NAME || "", + email: payload.email, + username: payload.username, + password: payload.password, + skipPassword, + encryptionKey: finalEncryptionKey, + additionalFields, + email_login, + email_login_code, + email_login_field: emailLoginTempCodeFieldName, + token, + dbUserId, + debug, + }); + } + else { + httpResponse = await new Promise((resolve, reject) => { + const reqPayload = { encryptionKey: finalEncryptionKey, + payload, + database, additionalFields, email_login, email_login_code, email_login_field: emailLoginTempCodeFieldName, token, - dbUserId, - debug, - }); - } - else { - httpResponse = yield new Promise((resolve, reject) => { - const reqPayload = { - encryptionKey: finalEncryptionKey, - payload, - database, - additionalFields, - email_login, - email_login_code, - email_login_field: emailLoginTempCodeFieldName, - token, - skipPassword: skipPassword, - dbUserId: dbUserId || 0, - }; - const reqPayloadJSON = JSON.stringify(reqPayload); - const httpsRequest = scheme.request({ - method: "POST", - headers: { - "Content-Type": "application/json", - "Content-Length": Buffer.from(reqPayloadJSON).length, - Authorization: key || - process.env.DSQL_FULL_ACCESS_API_KEY || - process.env.DSQL_API_KEY, - }, - port, - hostname: host, - path: `/api/user/${user_id || grabedHostNames.user_id}/login-user`, - }, (res) => { - var str = ""; - res.on("data", function (chunk) { - str += chunk; - }); - res.on("end", function () { - resolve(JSON.parse(str)); - }); - res.on("error", (err) => { - reject(err); - }); + skipPassword: skipPassword, + dbUserId: dbUserId || 0, + }; + const reqPayloadJSON = JSON.stringify(reqPayload); + const httpsRequest = scheme.request({ + method: "POST", + headers: { + "Content-Type": "application/json", + "Content-Length": Buffer.from(reqPayloadJSON).length, + Authorization: key || + process.env.DSQL_FULL_ACCESS_API_KEY || + process.env.DSQL_API_KEY, + }, + port, + hostname: host, + path: `/api/user/${user_id || grabedHostNames.user_id}/login-user`, + }, (res) => { + var str = ""; + res.on("data", function (chunk) { + str += chunk; + }); + res.on("end", function () { + resolve(JSON.parse(str)); + }); + res.on("error", (err) => { + reject(err); }); - httpsRequest.write(reqPayloadJSON); - httpsRequest.end(); }); + httpsRequest.write(reqPayloadJSON); + httpsRequest.end(); + }); + } + if (debug) { + debugFn(httpResponse, "httpResponse"); + } + if (httpResponse === null || httpResponse === void 0 ? void 0 : httpResponse.success) { + let encryptedPayload = encrypt({ + data: JSON.stringify(httpResponse.payload), + encryptionKey: finalEncryptionKey, + encryptionSalt: finalEncryptionSalt, + }); + try { + if (token && encryptedPayload) + httpResponse["token"] = encryptedPayload; } + catch (error) { + (_a = global.ERROR_CALLBACK) === null || _a === void 0 ? void 0 : _a.call(global, `Login User HTTP Response Error`, error); + } + const cookieNames = getAuthCookieNames({ + database, + userId: grabedHostNames.user_id, + }); + if (httpResponse.csrf && !skipWriteAuthFile) { + writeAuthFile(httpResponse.csrf, JSON.stringify(httpResponse.payload), cleanupTokens && ((_b = httpResponse.payload) === null || _b === void 0 ? void 0 : _b.id) + ? { userId: httpResponse.payload.id } + : undefined); + } + httpResponse["cookieNames"] = cookieNames; + httpResponse["key"] = String(encryptedPayload); + const authKeyName = cookieNames.keyCookieName; + const csrfName = cookieNames.csrfCookieName; if (debug) { - debugFn(httpResponse, "httpResponse"); + debugFn(authKeyName, "authKeyName"); + debugFn(csrfName, "csrfName"); + debugFn(encryptedPayload, "encryptedPayload"); } - if (httpResponse === null || httpResponse === void 0 ? void 0 : httpResponse.success) { - let encryptedPayload = (0, encrypt_1.default)({ - data: JSON.stringify(httpResponse.payload), - encryptionKey: finalEncryptionKey, - encryptionSalt: finalEncryptionSalt, - }); - try { - if (token && encryptedPayload) - httpResponse["token"] = encryptedPayload; - } - catch (error) { - (_b = global.ERROR_CALLBACK) === null || _b === void 0 ? void 0 : _b.call(global, `Login User HTTP Response Error`, error); - } - const cookieNames = (0, get_auth_cookie_names_1.default)({ - database, - userId: grabedHostNames.user_id, - }); - if (httpResponse.csrf && !skipWriteAuthFile) { - (0, write_auth_files_1.writeAuthFile)(httpResponse.csrf, JSON.stringify(httpResponse.payload), cleanupTokens && ((_c = httpResponse.payload) === null || _c === void 0 ? void 0 : _c.id) - ? { userId: httpResponse.payload.id } - : undefined); - } - httpResponse["cookieNames"] = cookieNames; - httpResponse["key"] = String(encryptedPayload); - const authKeyName = cookieNames.keyCookieName; - const csrfName = cookieNames.csrfCookieName; - if (debug) { - debugFn(authKeyName, "authKeyName"); - debugFn(csrfName, "csrfName"); - debugFn(encryptedPayload, "encryptedPayload"); - } - response === null || response === void 0 ? void 0 : response.setHeader("Set-Cookie", [ - `${authKeyName}=${encryptedPayload};samesite=strict;path=/;HttpOnly=true;Expires=${COOKIE_EXPIRY_DATE}${secureCookie ? ";Secure=true" : ""}`, - `${csrfName}=${(_d = httpResponse.payload) === null || _d === void 0 ? void 0 : _d.csrf_k};samesite=strict;path=/;HttpOnly=true;Expires=${COOKIE_EXPIRY_DATE}`, - ]); - if (debug) { - debugFn("Response Sent!"); - } + response === null || response === void 0 ? void 0 : response.setHeader("Set-Cookie", [ + `${authKeyName}=${encryptedPayload};samesite=strict;path=/;HttpOnly=true;Expires=${COOKIE_EXPIRY_DATE}${secureCookie ? ";Secure=true" : ""}`, + `${csrfName}=${(_c = httpResponse.payload) === null || _c === void 0 ? void 0 : _c.csrf_k};samesite=strict;path=/;HttpOnly=true;Expires=${COOKIE_EXPIRY_DATE}`, + ]); + if (debug) { + debugFn("Response Sent!"); } - return httpResponse; - }); + } + return httpResponse; } diff --git a/dist/package-shared/actions/users/logout-user.js b/dist/package-shared/actions/users/logout-user.js index e895823..13d3696 100644 --- a/dist/package-shared/actions/users/logout-user.js +++ b/dist/package-shared/actions/users/logout-user.js @@ -1,20 +1,14 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = logoutUser; -const get_auth_cookie_names_1 = __importDefault(require("../../functions/backend/cookies/get-auth-cookie-names")); -const decrypt_1 = __importDefault(require("../../functions/dsql/decrypt")); -const ejson_1 = __importDefault(require("../../utils/ejson")); -const write_auth_files_1 = require("../../functions/backend/auth/write-auth-files"); -const parseCookies_1 = __importDefault(require("../../utils/backend/parseCookies")); -const grab_host_names_1 = __importDefault(require("../../utils/grab-host-names")); -const debug_log_1 = __importDefault(require("../../utils/logging/debug-log")); +import getAuthCookieNames from "../../functions/backend/cookies/get-auth-cookie-names"; +import decrypt from "../../functions/dsql/decrypt"; +import EJSON from "../../utils/ejson"; +import { deleteAuthFile } from "../../functions/backend/auth/write-auth-files"; +import parseCookies from "../../utils/backend/parseCookies"; +import grabHostNames from "../../utils/grab-host-names"; +import debugLog from "../../utils/logging/debug-log"; /** * # Logout user */ -function logoutUser({ response, database, dsqlUserId, encryptedUserString, request, cookieString, debug, }) { +export default function logoutUser({ response, database, dsqlUserId, encryptedUserString, request, cookieString, debug, }) { var _a; /** * Check Encryption Keys @@ -22,13 +16,13 @@ function logoutUser({ response, database, dsqlUserId, encryptedUserString, reque * @description Check Encryption Keys */ try { - const { user_id } = (0, grab_host_names_1.default)({ userId: dsqlUserId }); - const cookieNames = (0, get_auth_cookie_names_1.default)({ + const { user_id } = grabHostNames({ userId: dsqlUserId }); + const cookieNames = getAuthCookieNames({ database, userId: user_id, }); function debugFn(log, label) { - (0, debug_log_1.default)({ log, addTime: true, title: "logoutUser", label }); + debugLog({ log, addTime: true, title: "logoutUser", label }); } if (debug) { debugFn(cookieNames, "cookieNames"); @@ -39,16 +33,16 @@ function logoutUser({ response, database, dsqlUserId, encryptedUserString, reque const decryptedUserJSON = (() => { try { if (request) { - const cookiesObject = (0, parseCookies_1.default)({ + const cookiesObject = parseCookies({ request, cookieString, }); - return (0, decrypt_1.default)({ + return decrypt({ encryptedString: cookiesObject[authKeyName], }); } else if (encryptedUserString) { - return (0, decrypt_1.default)({ + return decrypt({ encryptedString: encryptedUserString, }); } @@ -66,7 +60,7 @@ function logoutUser({ response, database, dsqlUserId, encryptedUserString, reque } if (!decryptedUserJSON) throw new Error("Invalid User"); - const userObject = ejson_1.default.parse(decryptedUserJSON); + const userObject = EJSON.parse(decryptedUserJSON); if (!(userObject === null || userObject === void 0 ? void 0 : userObject.csrf_k)) throw new Error("Invalid User. Please check key"); response === null || response === void 0 ? void 0 : response.setHeader("Set-Cookie", [ @@ -75,7 +69,7 @@ function logoutUser({ response, database, dsqlUserId, encryptedUserString, reque `${oneTimeCodeName}=null;max-age=0`, ]); const csrf = userObject.csrf_k; - (0, write_auth_files_1.deleteAuthFile)(csrf); + deleteAuthFile(csrf); return { success: true, msg: "User Logged Out", diff --git a/dist/package-shared/actions/users/reauth-user.js b/dist/package-shared/actions/users/reauth-user.js index dcdae5b..5b88114 100644 --- a/dist/package-shared/actions/users/reauth-user.js +++ b/dist/package-shared/actions/users/reauth-user.js @@ -1,179 +1,162 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = reauthUser; -const user_auth_1 = __importDefault(require("./user-auth")); -const grab_host_names_1 = __importDefault(require("../../utils/grab-host-names")); -const login_user_1 = __importDefault(require("./login-user")); +import userAuth from "./user-auth"; +import grabHostNames from "../../utils/grab-host-names"; +import loginUser from "./login-user"; /** * # Reauthorize User */ -function reauthUser(_a) { - return __awaiter(this, arguments, void 0, function* ({ key, database, response, request, level, encryptionKey, encryptionSalt, additionalFields, encryptedUserString, user_id, secureCookie, }) { - var _b; - /** - * Check Encryption Keys - * - * @description Check Encryption Keys - */ - const grabedHostNames = (0, grab_host_names_1.default)(); - // const { host, port, scheme } = grabedHostNames; - // const COOKIE_EXPIRY_DATE = grabCookieExpiryDate(); - const finalEncryptionKey = encryptionKey || process.env.DSQL_ENCRYPTION_PASSWORD; - const finalEncryptionSalt = encryptionSalt || process.env.DSQL_ENCRYPTION_SALT; - const existingUser = (0, user_auth_1.default)({ - database, - encryptionKey: finalEncryptionKey, - encryptionSalt: finalEncryptionSalt, - level, - request, - encryptedUserString, - }); - if (!((_b = existingUser === null || existingUser === void 0 ? void 0 : existingUser.payload) === null || _b === void 0 ? void 0 : _b.id)) { - return { - success: false, - payload: null, - msg: "Cookie Credentials Invalid", - }; - } - return yield (0, login_user_1.default)({ - database: database || "", - payload: { - email: existingUser.payload.email, - }, - additionalFields, - skipPassword: true, - response, - request, - user_id, - secureCookie, - key, - }); - /** - * Initialize HTTP response variable - */ - let httpResponse; - /** - * Check for local DB settings - * - * @description Look for local db settings in `.env` file and by pass the http request if available - */ - // const { DSQL_DB_HOST, DSQL_DB_USERNAME, DSQL_DB_PASSWORD, DSQL_DB_NAME } = - // process.env; - // if ( - // DSQL_DB_HOST?.match(/./) && - // DSQL_DB_USERNAME?.match(/./) && - // DSQL_DB_PASSWORD?.match(/./) && - // DSQL_DB_NAME?.match(/./) && - // global.DSQL_USE_LOCAL - // ) { - // let dbSchema: import("../../types").DSQL_DatabaseSchemaType | undefined; - // try { - // const localDbSchemaPath = path.resolve( - // process.cwd(), - // "dsql.schema.json" - // ); - // dbSchema = JSON.parse(fs.readFileSync(localDbSchemaPath, "utf8")); - // } catch (error) {} - // httpResponse = await apiReauthUser({ - // existingUser: existingUser.payload, - // additionalFields, - // }); - // } else { - // /** - // * Make https request - // * - // * @description make a request to datasquirel.com - // */ - // httpResponse = (await new Promise((resolve, reject) => { - // const reqPayload = JSON.stringify({ - // existingUser: existingUser.payload, - // database, - // additionalFields, - // }); - // const httpsRequest = scheme.request( - // { - // method: "POST", - // headers: { - // "Content-Type": "application/json", - // "Content-Length": Buffer.from(reqPayload).length, - // Authorization: - // key || - // process.env.DSQL_FULL_ACCESS_API_KEY || - // process.env.DSQL_API_KEY, - // }, - // port, - // hostname: host, - // path: `/api/user/${ - // user_id || grabedHostNames.user_id - // }/reauth-user`, - // }, - // /** - // * Callback Function - // * - // * @description https request callback - // */ - // (response) => { - // var str = ""; - // response.on("data", function (chunk) { - // str += chunk; - // }); - // response.on("end", function () { - // resolve(JSON.parse(str)); - // }); - // response.on("error", (err) => { - // reject(err); - // }); - // } - // ); - // httpsRequest.write(reqPayload); - // httpsRequest.end(); - // })) as APILoginFunctionReturn; - // } - // /** - // * Make https request - // * - // * @description make a request to datasquirel.com - // */ - // if (httpResponse?.success) { - // let encryptedPayload = encrypt({ - // data: JSON.stringify(httpResponse.payload), - // encryptionKey: finalEncryptionKey, - // encryptionSalt: finalEncryptionSalt, - // }); - // const cookieNames = getAuthCookieNames({ - // database, - // userId: user_id || grabedHostNames.user_id, - // }); - // httpResponse["cookieNames"] = cookieNames; - // httpResponse["key"] = String(encryptedPayload); - // const authKeyName = cookieNames.keyCookieName; - // const csrfName = cookieNames.csrfCookieName; - // response?.setHeader("Set-Cookie", [ - // `${authKeyName}=${encryptedPayload};samesite=strict;path=/;HttpOnly=true;Expires=${COOKIE_EXPIRY_DATE}${ - // secureCookie ? ";Secure=true" : "" - // }`, - // `${csrfName}=${httpResponse.payload?.csrf_k};samesite=strict;path=/;HttpOnly=true;Expires=${COOKIE_EXPIRY_DATE}`, - // ]); - // if (httpResponse.csrf) { - // deleteAuthFile(String(existingUser.payload.csrf_k)); - // writeAuthFile( - // httpResponse.csrf, - // JSON.stringify(httpResponse.payload) - // ); - // } - // } - // return httpResponse; +export default async function reauthUser({ key, database, response, request, level, encryptionKey, encryptionSalt, additionalFields, encryptedUserString, user_id, secureCookie, }) { + var _a; + /** + * Check Encryption Keys + * + * @description Check Encryption Keys + */ + const grabedHostNames = grabHostNames(); + // const { host, port, scheme } = grabedHostNames; + // const COOKIE_EXPIRY_DATE = grabCookieExpiryDate(); + const finalEncryptionKey = encryptionKey || process.env.DSQL_ENCRYPTION_PASSWORD; + const finalEncryptionSalt = encryptionSalt || process.env.DSQL_ENCRYPTION_SALT; + const existingUser = userAuth({ + database, + encryptionKey: finalEncryptionKey, + encryptionSalt: finalEncryptionSalt, + level, + request, + encryptedUserString, }); + if (!((_a = existingUser === null || existingUser === void 0 ? void 0 : existingUser.payload) === null || _a === void 0 ? void 0 : _a.id)) { + return { + success: false, + payload: null, + msg: "Cookie Credentials Invalid", + }; + } + return await loginUser({ + database: database || "", + payload: { + email: existingUser.payload.email, + }, + additionalFields, + skipPassword: true, + response, + request, + user_id, + secureCookie, + key, + }); + /** + * Initialize HTTP response variable + */ + let httpResponse; + /** + * Check for local DB settings + * + * @description Look for local db settings in `.env` file and by pass the http request if available + */ + // const { DSQL_DB_HOST, DSQL_DB_USERNAME, DSQL_DB_PASSWORD, DSQL_DB_NAME } = + // process.env; + // if ( + // DSQL_DB_HOST?.match(/./) && + // DSQL_DB_USERNAME?.match(/./) && + // DSQL_DB_PASSWORD?.match(/./) && + // DSQL_DB_NAME?.match(/./) && + // global.DSQL_USE_LOCAL + // ) { + // let dbSchema: import("../../types").DSQL_DatabaseSchemaType | undefined; + // try { + // const localDbSchemaPath = path.resolve( + // process.cwd(), + // "dsql.schema.json" + // ); + // dbSchema = JSON.parse(fs.readFileSync(localDbSchemaPath, "utf8")); + // } catch (error) {} + // httpResponse = await apiReauthUser({ + // existingUser: existingUser.payload, + // additionalFields, + // }); + // } else { + // /** + // * Make https request + // * + // * @description make a request to datasquirel.com + // */ + // httpResponse = (await new Promise((resolve, reject) => { + // const reqPayload = JSON.stringify({ + // existingUser: existingUser.payload, + // database, + // additionalFields, + // }); + // const httpsRequest = scheme.request( + // { + // method: "POST", + // headers: { + // "Content-Type": "application/json", + // "Content-Length": Buffer.from(reqPayload).length, + // Authorization: + // key || + // process.env.DSQL_FULL_ACCESS_API_KEY || + // process.env.DSQL_API_KEY, + // }, + // port, + // hostname: host, + // path: `/api/user/${ + // user_id || grabedHostNames.user_id + // }/reauth-user`, + // }, + // /** + // * Callback Function + // * + // * @description https request callback + // */ + // (response) => { + // var str = ""; + // response.on("data", function (chunk) { + // str += chunk; + // }); + // response.on("end", function () { + // resolve(JSON.parse(str)); + // }); + // response.on("error", (err) => { + // reject(err); + // }); + // } + // ); + // httpsRequest.write(reqPayload); + // httpsRequest.end(); + // })) as APILoginFunctionReturn; + // } + // /** + // * Make https request + // * + // * @description make a request to datasquirel.com + // */ + // if (httpResponse?.success) { + // let encryptedPayload = encrypt({ + // data: JSON.stringify(httpResponse.payload), + // encryptionKey: finalEncryptionKey, + // encryptionSalt: finalEncryptionSalt, + // }); + // const cookieNames = getAuthCookieNames({ + // database, + // userId: user_id || grabedHostNames.user_id, + // }); + // httpResponse["cookieNames"] = cookieNames; + // httpResponse["key"] = String(encryptedPayload); + // const authKeyName = cookieNames.keyCookieName; + // const csrfName = cookieNames.csrfCookieName; + // response?.setHeader("Set-Cookie", [ + // `${authKeyName}=${encryptedPayload};samesite=strict;path=/;HttpOnly=true;Expires=${COOKIE_EXPIRY_DATE}${ + // secureCookie ? ";Secure=true" : "" + // }`, + // `${csrfName}=${httpResponse.payload?.csrf_k};samesite=strict;path=/;HttpOnly=true;Expires=${COOKIE_EXPIRY_DATE}`, + // ]); + // if (httpResponse.csrf) { + // deleteAuthFile(String(existingUser.payload.csrf_k)); + // writeAuthFile( + // httpResponse.csrf, + // JSON.stringify(httpResponse.payload) + // ); + // } + // } + // return httpResponse; } diff --git a/dist/package-shared/actions/users/send-email-code.d.ts b/dist/package-shared/actions/users/send-email-code.d.ts index 209d639..d9c0ed0 100644 --- a/dist/package-shared/actions/users/send-email-code.d.ts +++ b/dist/package-shared/actions/users/send-email-code.d.ts @@ -19,5 +19,5 @@ type Param = { /** * # Send Email Code to a User */ -export default function sendEmailCode({ key, email, database, temp_code_field_name, mail_domain, mail_password, mail_username, mail_port, sender, user_id, response, extraCookies, }: Param): Promise; +export default function sendEmailCode(params: Param): Promise; export {}; diff --git a/dist/package-shared/actions/users/send-email-code.js b/dist/package-shared/actions/users/send-email-code.js index a57e568..2c20d4b 100644 --- a/dist/package-shared/actions/users/send-email-code.js +++ b/dist/package-shared/actions/users/send-email-code.js @@ -1,120 +1,104 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = sendEmailCode; -const fs_1 = __importDefault(require("fs")); -const path_1 = __importDefault(require("path")); -const grab_host_names_1 = __importDefault(require("../../utils/grab-host-names")); -const api_send_email_code_1 = __importDefault(require("../../functions/api/users/api-send-email-code")); +import fs from "fs"; +import path from "path"; +import grabHostNames from "../../utils/grab-host-names"; +import apiSendEmailCode from "../../functions/api/users/api-send-email-code"; /** * # Send Email Code to a User */ -function sendEmailCode(_a) { - return __awaiter(this, arguments, void 0, function* ({ key, email, database, temp_code_field_name, mail_domain, mail_password, mail_username, mail_port, sender, user_id, response, extraCookies, }) { - const grabedHostNames = (0, grab_host_names_1.default)(); - const { host, port, scheme } = grabedHostNames; - const defaultTempLoginFieldName = "temp_login_code"; - const emailLoginTempCodeFieldName = temp_code_field_name - ? temp_code_field_name - : defaultTempLoginFieldName; - const emailHtml = `

Please use this code to login

\n

{{code}}

\n

Please note that this code expires after 15 minutes

`; +export default async function sendEmailCode(params) { + const { key, email, database, temp_code_field_name, mail_domain, mail_password, mail_username, mail_port, sender, user_id, response, extraCookies, } = params; + const grabedHostNames = grabHostNames(); + const { host, port, scheme } = grabedHostNames; + const defaultTempLoginFieldName = "temp_login_code"; + const emailLoginTempCodeFieldName = temp_code_field_name + ? temp_code_field_name + : defaultTempLoginFieldName; + const emailHtml = `

Please use this code to login

\n

{{code}}

\n

Please note that this code expires after 15 minutes

`; + /** + * Check for local DB settings + * + * @description Look for local db settings in `.env` file and by pass the http request if available + */ + const { DSQL_DB_HOST, DSQL_DB_USERNAME, DSQL_DB_PASSWORD, DSQL_DB_NAME } = process.env; + if ((DSQL_DB_HOST === null || DSQL_DB_HOST === void 0 ? void 0 : DSQL_DB_HOST.match(/./)) && + (DSQL_DB_USERNAME === null || DSQL_DB_USERNAME === void 0 ? void 0 : DSQL_DB_USERNAME.match(/./)) && + (DSQL_DB_PASSWORD === null || DSQL_DB_PASSWORD === void 0 ? void 0 : DSQL_DB_PASSWORD.match(/./)) && + (DSQL_DB_NAME === null || DSQL_DB_NAME === void 0 ? void 0 : DSQL_DB_NAME.match(/./)) && + global.DSQL_USE_LOCAL) { + /** @type {import("../../types").DSQL_DatabaseSchemaType | undefined} */ + let dbSchema; + try { + const localDbSchemaPath = path.resolve(process.cwd(), "dsql.schema.json"); + dbSchema = JSON.parse(fs.readFileSync(localDbSchemaPath, "utf8")); + } + catch (error) { } + return await apiSendEmailCode({ + database: DSQL_DB_NAME, + email, + email_login_field: emailLoginTempCodeFieldName, + html: emailHtml, + mail_domain, + mail_password, + mail_port, + mail_username, + sender, + response, + extraCookies, + }); + } + else { /** - * Check for local DB settings + * Make https request * - * @description Look for local db settings in `.env` file and by pass the http request if available + * @description make a request to datasquirel.com + * + * @type {import("../../types").SendOneTimeCodeEmailResponse} */ - const { DSQL_DB_HOST, DSQL_DB_USERNAME, DSQL_DB_PASSWORD, DSQL_DB_NAME } = process.env; - if ((DSQL_DB_HOST === null || DSQL_DB_HOST === void 0 ? void 0 : DSQL_DB_HOST.match(/./)) && - (DSQL_DB_USERNAME === null || DSQL_DB_USERNAME === void 0 ? void 0 : DSQL_DB_USERNAME.match(/./)) && - (DSQL_DB_PASSWORD === null || DSQL_DB_PASSWORD === void 0 ? void 0 : DSQL_DB_PASSWORD.match(/./)) && - (DSQL_DB_NAME === null || DSQL_DB_NAME === void 0 ? void 0 : DSQL_DB_NAME.match(/./)) && - global.DSQL_USE_LOCAL) { - /** @type {import("../../types").DSQL_DatabaseSchemaType | undefined} */ - let dbSchema; - try { - const localDbSchemaPath = path_1.default.resolve(process.cwd(), "dsql.schema.json"); - dbSchema = JSON.parse(fs_1.default.readFileSync(localDbSchemaPath, "utf8")); - } - catch (error) { } - return yield (0, api_send_email_code_1.default)({ - database: DSQL_DB_NAME, + const httpResponse = await new Promise((resolve, reject) => { + const reqPayload = JSON.stringify({ email, + database, email_login_field: emailLoginTempCodeFieldName, - html: emailHtml, mail_domain, mail_password, - mail_port, mail_username, + mail_port, sender, - response, - extraCookies, + html: emailHtml, }); - } - else { + const httpsRequest = scheme.request({ + method: "POST", + headers: { + "Content-Type": "application/json", + "Content-Length": Buffer.from(reqPayload).length, + Authorization: key || + process.env.DSQL_FULL_ACCESS_API_KEY || + process.env.DSQL_API_KEY, + }, + port, + hostname: host, + path: `/api/user/${user_id || grabedHostNames.user_id}/send-email-code`, + }, /** - * Make https request + * Callback Function * - * @description make a request to datasquirel.com - * - * @type {import("../../types").SendOneTimeCodeEmailResponse} + * @description https request callback */ - const httpResponse = yield new Promise((resolve, reject) => { - const reqPayload = JSON.stringify({ - email, - database, - email_login_field: emailLoginTempCodeFieldName, - mail_domain, - mail_password, - mail_username, - mail_port, - sender, - html: emailHtml, + (res) => { + var str = ""; + res.on("data", function (chunk) { + str += chunk; }); - const httpsRequest = scheme.request({ - method: "POST", - headers: { - "Content-Type": "application/json", - "Content-Length": Buffer.from(reqPayload).length, - Authorization: key || - process.env.DSQL_FULL_ACCESS_API_KEY || - process.env.DSQL_API_KEY, - }, - port, - hostname: host, - path: `/api/user/${user_id || grabedHostNames.user_id}/send-email-code`, - }, - /** - * Callback Function - * - * @description https request callback - */ - (res) => { - var str = ""; - res.on("data", function (chunk) { - str += chunk; - }); - res.on("end", function () { - resolve(JSON.parse(str)); - }); - res.on("error", (err) => { - reject(err); - }); + res.on("end", function () { + resolve(JSON.parse(str)); + }); + res.on("error", (err) => { + reject(err); }); - httpsRequest.write(reqPayload); - httpsRequest.end(); }); - return httpResponse; - } - }); + httpsRequest.write(reqPayload); + httpsRequest.end(); + }); + return httpResponse; + } } diff --git a/dist/package-shared/actions/users/social/github-auth.js b/dist/package-shared/actions/users/social/github-auth.js index 60945e0..616375a 100644 --- a/dist/package-shared/actions/users/social/github-auth.js +++ b/dist/package-shared/actions/users/social/github-auth.js @@ -1,173 +1,156 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = githubAuth; -const fs_1 = __importDefault(require("fs")); -const path_1 = __importDefault(require("path")); -const encrypt_1 = __importDefault(require("../../../functions/dsql/encrypt")); -const grab_host_names_1 = __importDefault(require("../../../utils/grab-host-names")); -const api_github_login_1 = __importDefault(require("../../../functions/api/users/social/api-github-login")); -const grab_cookie_expirt_date_1 = __importDefault(require("../../../utils/grab-cookie-expirt-date")); +import fs from "fs"; +import path from "path"; +import encrypt from "../../../functions/dsql/encrypt"; +import grabHostNames from "../../../utils/grab-host-names"; +import apiGithubLogin from "../../../functions/api/users/social/api-github-login"; +import grabCookieExpiryDate from "../../../utils/grab-cookie-expirt-date"; /** * # SERVER FUNCTION: Login with google Function */ -function githubAuth(_a) { - return __awaiter(this, arguments, void 0, function* ({ key, code, email, database, clientId, clientSecret, response, encryptionKey, encryptionSalt, additionalFields, user_id, additionalData, secureCookie, }) { - /** - * Check inputs - * - * @description Check inputs - */ - const grabedHostNames = (0, grab_host_names_1.default)(); - const { host, port, scheme } = grabedHostNames; - const COOKIE_EXPIRY_DATE = (0, grab_cookie_expirt_date_1.default)(); - if (!code || (code === null || code === void 0 ? void 0 : code.match(/ /))) { - return { - success: false, - user: null, - msg: "Please enter Github Access Token", - }; +export default async function githubAuth({ key, code, email, database, clientId, clientSecret, response, encryptionKey, encryptionSalt, additionalFields, user_id, additionalData, secureCookie, }) { + /** + * Check inputs + * + * @description Check inputs + */ + const grabedHostNames = grabHostNames(); + const { host, port, scheme } = grabedHostNames; + const COOKIE_EXPIRY_DATE = grabCookieExpiryDate(); + if (!code || (code === null || code === void 0 ? void 0 : code.match(/ /))) { + return { + success: false, + user: null, + msg: "Please enter Github Access Token", + }; + } + if (!database || (database === null || database === void 0 ? void 0 : database.match(/ /))) { + return { + success: false, + user: null, + msg: "Please provide database slug name you want to access", + }; + } + if (!clientId || (clientId === null || clientId === void 0 ? void 0 : clientId.match(/ /))) { + return { + success: false, + user: null, + msg: "Please enter Github OAUTH client ID", + }; + } + /** + * Initialize HTTP response variable + */ + let httpResponse; + /** + * Check for local DB settings + * + * @description Look for local db settings in `.env` file and by pass the http request if available + */ + const { DSQL_DB_HOST, DSQL_DB_USERNAME, DSQL_DB_PASSWORD, DSQL_DB_NAME, DSQL_KEY, DSQL_REF_DB_NAME, DSQL_FULL_SYNC, } = process.env; + if ((DSQL_DB_HOST === null || DSQL_DB_HOST === void 0 ? void 0 : DSQL_DB_HOST.match(/./)) && + (DSQL_DB_USERNAME === null || DSQL_DB_USERNAME === void 0 ? void 0 : DSQL_DB_USERNAME.match(/./)) && + (DSQL_DB_PASSWORD === null || DSQL_DB_PASSWORD === void 0 ? void 0 : DSQL_DB_PASSWORD.match(/./)) && + (DSQL_DB_NAME === null || DSQL_DB_NAME === void 0 ? void 0 : DSQL_DB_NAME.match(/./))) { + /** @type {import("../../../types").DSQL_DatabaseSchemaType | undefined | undefined} */ + let dbSchema; + try { + const localDbSchemaPath = path.resolve(process.cwd(), "dsql.schema.json"); + dbSchema = JSON.parse(fs.readFileSync(localDbSchemaPath, "utf8")); } - if (!database || (database === null || database === void 0 ? void 0 : database.match(/ /))) { - return { - success: false, - user: null, - msg: "Please provide database slug name you want to access", - }; - } - if (!clientId || (clientId === null || clientId === void 0 ? void 0 : clientId.match(/ /))) { - return { - success: false, - user: null, - msg: "Please enter Github OAUTH client ID", - }; - } - /** - * Initialize HTTP response variable - */ - let httpResponse; - /** - * Check for local DB settings - * - * @description Look for local db settings in `.env` file and by pass the http request if available - */ - const { DSQL_DB_HOST, DSQL_DB_USERNAME, DSQL_DB_PASSWORD, DSQL_DB_NAME, DSQL_KEY, DSQL_REF_DB_NAME, DSQL_FULL_SYNC, } = process.env; - if ((DSQL_DB_HOST === null || DSQL_DB_HOST === void 0 ? void 0 : DSQL_DB_HOST.match(/./)) && - (DSQL_DB_USERNAME === null || DSQL_DB_USERNAME === void 0 ? void 0 : DSQL_DB_USERNAME.match(/./)) && - (DSQL_DB_PASSWORD === null || DSQL_DB_PASSWORD === void 0 ? void 0 : DSQL_DB_PASSWORD.match(/./)) && - (DSQL_DB_NAME === null || DSQL_DB_NAME === void 0 ? void 0 : DSQL_DB_NAME.match(/./))) { - /** @type {import("../../../types").DSQL_DatabaseSchemaType | undefined | undefined} */ - let dbSchema; - try { - const localDbSchemaPath = path_1.default.resolve(process.cwd(), "dsql.schema.json"); - dbSchema = JSON.parse(fs_1.default.readFileSync(localDbSchemaPath, "utf8")); - } - catch (error) { } - httpResponse = yield (0, api_github_login_1.default)({ - code, - email: email || undefined, - clientId, - clientSecret, - additionalFields, - database: DSQL_DB_NAME, - additionalData, - }); - } - else { - /** - * Make https request - * - * @description make a request to datasquirel.com - * @type {FunctionReturn} - Https response object - */ - httpResponse = (yield new Promise((resolve, reject) => { - const reqPayload = JSON.stringify({ - code, - email, - clientId, - clientSecret, - database, - additionalFields, - additionalData, - }); - const httpsRequest = scheme.request({ - method: "POST", - headers: { - "Content-Type": "application/json", - "Content-Length": Buffer.from(reqPayload).length, - Authorization: key || - process.env.DSQL_FULL_ACCESS_API_KEY || - process.env.DSQL_API_KEY, - }, - port, - hostname: host, - path: `/api/user/${user_id || grabedHostNames.user_id}/github-login`, - }, - /** - * Callback Function - * - * @description https request callback - */ - (response) => { - var str = ""; - response.on("data", function (chunk) { - str += chunk; - }); - response.on("end", function () { - var _a; - try { - resolve(JSON.parse(str)); - } - catch (error) { - (_a = global.ERROR_CALLBACK) === null || _a === void 0 ? void 0 : _a.call(global, `Github Auth Error`, error); - resolve({ - success: false, - user: null, - msg: "Something went wrong", - }); - } - }); - response.on("error", (err) => { - reject(err); - }); - }); - httpsRequest.write(reqPayload); - httpsRequest.end(); - })); - } - //////////////////////////////////////// - //////////////////////////////////////// - //////////////////////////////////////// + catch (error) { } + httpResponse = await apiGithubLogin({ + code, + email: email || undefined, + clientId, + clientSecret, + additionalFields, + database: DSQL_DB_NAME, + additionalData, + }); + } + else { /** * Make https request * * @description make a request to datasquirel.com + * @type {FunctionReturn} - Https response object */ - if ((httpResponse === null || httpResponse === void 0 ? void 0 : httpResponse.success) && (httpResponse === null || httpResponse === void 0 ? void 0 : httpResponse.user)) { - let encryptedPayload = (0, encrypt_1.default)({ - data: JSON.stringify(httpResponse.user), - encryptionKey, - encryptionSalt, + httpResponse = (await new Promise((resolve, reject) => { + const reqPayload = JSON.stringify({ + code, + email, + clientId, + clientSecret, + database, + additionalFields, + additionalData, }); - const { user, dsqlUserId } = httpResponse; - const authKeyName = `datasquirel_${dsqlUserId}_${database}_auth_key`; - const csrfName = `datasquirel_${dsqlUserId}_${database}_csrf`; - response.setHeader("Set-Cookie", [ - `${authKeyName}=${encryptedPayload};samesite=strict;path=/;HttpOnly=true;Expires=${COOKIE_EXPIRY_DATE}${secureCookie ? ";Secure=true" : ""}`, - `${csrfName}=${user.csrf_k};samesite=strict;path=/;HttpOnly=true;Expires=${COOKIE_EXPIRY_DATE}`, - ]); - } - return httpResponse; - }); + const httpsRequest = scheme.request({ + method: "POST", + headers: { + "Content-Type": "application/json", + "Content-Length": Buffer.from(reqPayload).length, + Authorization: key || + process.env.DSQL_FULL_ACCESS_API_KEY || + process.env.DSQL_API_KEY, + }, + port, + hostname: host, + path: `/api/user/${user_id || grabedHostNames.user_id}/github-login`, + }, + /** + * Callback Function + * + * @description https request callback + */ + (response) => { + var str = ""; + response.on("data", function (chunk) { + str += chunk; + }); + response.on("end", function () { + var _a; + try { + resolve(JSON.parse(str)); + } + catch (error) { + (_a = global.ERROR_CALLBACK) === null || _a === void 0 ? void 0 : _a.call(global, `Github Auth Error`, error); + resolve({ + success: false, + user: null, + msg: "Something went wrong", + }); + } + }); + response.on("error", (err) => { + reject(err); + }); + }); + httpsRequest.write(reqPayload); + httpsRequest.end(); + })); + } + //////////////////////////////////////// + //////////////////////////////////////// + //////////////////////////////////////// + /** + * Make https request + * + * @description make a request to datasquirel.com + */ + if ((httpResponse === null || httpResponse === void 0 ? void 0 : httpResponse.success) && (httpResponse === null || httpResponse === void 0 ? void 0 : httpResponse.user)) { + let encryptedPayload = encrypt({ + data: JSON.stringify(httpResponse.user), + encryptionKey, + encryptionSalt, + }); + const { user, dsqlUserId } = httpResponse; + const authKeyName = `datasquirel_${dsqlUserId}_${database}_auth_key`; + const csrfName = `datasquirel_${dsqlUserId}_${database}_csrf`; + response.setHeader("Set-Cookie", [ + `${authKeyName}=${encryptedPayload};samesite=strict;path=/;HttpOnly=true;Expires=${COOKIE_EXPIRY_DATE}${secureCookie ? ";Secure=true" : ""}`, + `${csrfName}=${user.csrf_k};samesite=strict;path=/;HttpOnly=true;Expires=${COOKIE_EXPIRY_DATE}`, + ]); + } + return httpResponse; } diff --git a/dist/package-shared/actions/users/social/google-auth.js b/dist/package-shared/actions/users/social/google-auth.js index 5e5371f..0d93d58 100644 --- a/dist/package-shared/actions/users/social/google-auth.js +++ b/dist/package-shared/actions/users/social/google-auth.js @@ -1,161 +1,144 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = googleAuth; -const encrypt_1 = __importDefault(require("../../../functions/dsql/encrypt")); -const grab_host_names_1 = __importDefault(require("../../../utils/grab-host-names")); -const api_google_login_1 = __importDefault(require("../../../functions/api/users/social/api-google-login")); -const get_auth_cookie_names_1 = __importDefault(require("../../../functions/backend/cookies/get-auth-cookie-names")); -const write_auth_files_1 = require("../../../functions/backend/auth/write-auth-files"); -const grab_cookie_expirt_date_1 = __importDefault(require("../../../utils/grab-cookie-expirt-date")); +import encrypt from "../../../functions/dsql/encrypt"; +import grabHostNames from "../../../utils/grab-host-names"; +import apiGoogleLogin from "../../../functions/api/users/social/api-google-login"; +import getAuthCookieNames from "../../../functions/backend/cookies/get-auth-cookie-names"; +import { writeAuthFile } from "../../../functions/backend/auth/write-auth-files"; +import grabCookieExpiryDate from "../../../utils/grab-cookie-expirt-date"; /** * # SERVER FUNCTION: Login with google Function */ -function googleAuth(_a) { - return __awaiter(this, arguments, void 0, function* ({ key, token, database, response, encryptionKey, encryptionSalt, additionalFields, additionalData, apiUserID, debug, secureCookie, loginOnly, }) { - var _b; - const grabedHostNames = (0, grab_host_names_1.default)({ - userId: apiUserID || process.env.DSQL_API_USER_ID, - }); - const { host, port, scheme, user_id } = grabedHostNames; - const COOKIE_EXPIRY_DATE = (0, grab_cookie_expirt_date_1.default)(); - const finalEncryptionKey = encryptionKey || process.env.DSQL_ENCRYPTION_PASSWORD; - const finalEncryptionSalt = encryptionSalt || process.env.DSQL_ENCRYPTION_SALT; - if (!(finalEncryptionKey === null || finalEncryptionKey === void 0 ? void 0 : finalEncryptionKey.match(/.{8,}/))) { - console.log("Encryption key is invalid"); - return { - success: false, - payload: null, - msg: "Encryption key is invalid", - }; - } - if (!(finalEncryptionSalt === null || finalEncryptionSalt === void 0 ? void 0 : finalEncryptionSalt.match(/.{8,}/))) { - console.log("Encryption salt is invalid"); - return { - success: false, - payload: null, - msg: "Encryption salt is invalid", - }; - } - /** - * Check inputs - * - * @description Check inputs - */ - if (!token || (token === null || token === void 0 ? void 0 : token.match(/ /))) { - return { - success: false, - payload: null, - msg: "Please enter Google Access Token", - }; - } - /** - * Initialize HTTP response variable - */ - let httpResponse = { +export default async function googleAuth({ key, token, database, response, encryptionKey, encryptionSalt, additionalFields, additionalData, apiUserID, debug, secureCookie, loginOnly, }) { + var _a; + const grabedHostNames = grabHostNames({ + userId: apiUserID || process.env.DSQL_API_USER_ID, + }); + const { host, port, scheme, user_id } = grabedHostNames; + const COOKIE_EXPIRY_DATE = grabCookieExpiryDate(); + const finalEncryptionKey = encryptionKey || process.env.DSQL_ENCRYPTION_PASSWORD; + const finalEncryptionSalt = encryptionSalt || process.env.DSQL_ENCRYPTION_SALT; + if (!(finalEncryptionKey === null || finalEncryptionKey === void 0 ? void 0 : finalEncryptionKey.match(/.{8,}/))) { + console.log("Encryption key is invalid"); + return { success: false, + payload: null, + msg: "Encryption key is invalid", }; - /** - * Check for local DB settings - * - * @description Look for local db settings in `.env` file and by pass the http request if available - */ - const { DSQL_DB_HOST, DSQL_DB_USERNAME, DSQL_DB_PASSWORD, DSQL_DB_NAME } = process.env; - if ((DSQL_DB_HOST === null || DSQL_DB_HOST === void 0 ? void 0 : DSQL_DB_HOST.match(/./)) && - (DSQL_DB_USERNAME === null || DSQL_DB_USERNAME === void 0 ? void 0 : DSQL_DB_USERNAME.match(/./)) && - (DSQL_DB_PASSWORD === null || DSQL_DB_PASSWORD === void 0 ? void 0 : DSQL_DB_PASSWORD.match(/./)) && - (DSQL_DB_NAME === null || DSQL_DB_NAME === void 0 ? void 0 : DSQL_DB_NAME.match(/./)) && - global.DSQL_USE_LOCAL) { - if (debug) { - console.log(`Google login with Local Paradigm ...`); - } - httpResponse = yield (0, api_google_login_1.default)({ + } + if (!(finalEncryptionSalt === null || finalEncryptionSalt === void 0 ? void 0 : finalEncryptionSalt.match(/.{8,}/))) { + console.log("Encryption salt is invalid"); + return { + success: false, + payload: null, + msg: "Encryption salt is invalid", + }; + } + /** + * Check inputs + * + * @description Check inputs + */ + if (!token || (token === null || token === void 0 ? void 0 : token.match(/ /))) { + return { + success: false, + payload: null, + msg: "Please enter Google Access Token", + }; + } + /** + * Initialize HTTP response variable + */ + let httpResponse = { + success: false, + }; + /** + * Check for local DB settings + * + * @description Look for local db settings in `.env` file and by pass the http request if available + */ + const { DSQL_DB_HOST, DSQL_DB_USERNAME, DSQL_DB_PASSWORD, DSQL_DB_NAME } = process.env; + if ((DSQL_DB_HOST === null || DSQL_DB_HOST === void 0 ? void 0 : DSQL_DB_HOST.match(/./)) && + (DSQL_DB_USERNAME === null || DSQL_DB_USERNAME === void 0 ? void 0 : DSQL_DB_USERNAME.match(/./)) && + (DSQL_DB_PASSWORD === null || DSQL_DB_PASSWORD === void 0 ? void 0 : DSQL_DB_PASSWORD.match(/./)) && + (DSQL_DB_NAME === null || DSQL_DB_NAME === void 0 ? void 0 : DSQL_DB_NAME.match(/./)) && + global.DSQL_USE_LOCAL) { + if (debug) { + console.log(`Google login with Local Paradigm ...`); + } + httpResponse = await apiGoogleLogin({ + token, + additionalFields, + additionalData, + debug, + }); + } + else { + httpResponse = await new Promise((resolve, reject) => { + const reqPayload = JSON.stringify({ token, + database, additionalFields, additionalData, - debug, }); - } - else { - httpResponse = yield new Promise((resolve, reject) => { - const reqPayload = JSON.stringify({ - token, - database, - additionalFields, - additionalData, + const httpsRequest = scheme.request({ + method: "POST", + headers: { + "Content-Type": "application/json", + "Content-Length": Buffer.from(reqPayload).length, + Authorization: key || + process.env.DSQL_FULL_ACCESS_API_KEY || + process.env.DSQL_API_KEY, + }, + port, + hostname: host, + path: `/api/user/${apiUserID || grabedHostNames.user_id}/google-login`, + }, + /** + * Callback Function + * + * @description https request callback + */ + (response) => { + var str = ""; + response.on("data", function (chunk) { + str += chunk; }); - const httpsRequest = scheme.request({ - method: "POST", - headers: { - "Content-Type": "application/json", - "Content-Length": Buffer.from(reqPayload).length, - Authorization: key || - process.env.DSQL_FULL_ACCESS_API_KEY || - process.env.DSQL_API_KEY, - }, - port, - hostname: host, - path: `/api/user/${apiUserID || grabedHostNames.user_id}/google-login`, - }, - /** - * Callback Function - * - * @description https request callback - */ - (response) => { - var str = ""; - response.on("data", function (chunk) { - str += chunk; - }); - response.on("end", function () { - resolve(JSON.parse(str)); - }); - response.on("error", (err) => { - reject(err); - }); + response.on("end", function () { + resolve(JSON.parse(str)); + }); + response.on("error", (err) => { + reject(err); }); - httpsRequest.write(reqPayload); - httpsRequest.end(); }); + httpsRequest.write(reqPayload); + httpsRequest.end(); + }); + } + /** + * Make https request + * + * @description make a request to datasquirel.com + */ + if ((httpResponse === null || httpResponse === void 0 ? void 0 : httpResponse.success) && (httpResponse === null || httpResponse === void 0 ? void 0 : httpResponse.payload)) { + let encryptedPayload = encrypt({ + data: JSON.stringify(httpResponse.payload), + encryptionKey: finalEncryptionKey, + encryptionSalt: finalEncryptionSalt, + }); + const cookieNames = getAuthCookieNames({ + database, + userId: user_id, + }); + if (httpResponse.csrf) { + writeAuthFile(httpResponse.csrf, JSON.stringify(httpResponse.payload)); } - /** - * Make https request - * - * @description make a request to datasquirel.com - */ - if ((httpResponse === null || httpResponse === void 0 ? void 0 : httpResponse.success) && (httpResponse === null || httpResponse === void 0 ? void 0 : httpResponse.payload)) { - let encryptedPayload = (0, encrypt_1.default)({ - data: JSON.stringify(httpResponse.payload), - encryptionKey: finalEncryptionKey, - encryptionSalt: finalEncryptionSalt, - }); - const cookieNames = (0, get_auth_cookie_names_1.default)({ - database, - userId: user_id, - }); - if (httpResponse.csrf) { - (0, write_auth_files_1.writeAuthFile)(httpResponse.csrf, JSON.stringify(httpResponse.payload)); - } - httpResponse["cookieNames"] = cookieNames; - httpResponse["key"] = String(encryptedPayload); - const authKeyName = cookieNames.keyCookieName; - const csrfName = cookieNames.csrfCookieName; - response === null || response === void 0 ? void 0 : response.setHeader("Set-Cookie", [ - `${authKeyName}=${encryptedPayload};samesite=strict;path=/;HttpOnly=true;;Expires=${COOKIE_EXPIRY_DATE}${secureCookie ? ";Secure=true" : ""}`, - `${csrfName}=${(_b = httpResponse.payload) === null || _b === void 0 ? void 0 : _b.csrf_k};samesite=strict;path=/;HttpOnly=true;;Expires=${COOKIE_EXPIRY_DATE}`, - ]); - } - return httpResponse; - }); + httpResponse["cookieNames"] = cookieNames; + httpResponse["key"] = String(encryptedPayload); + const authKeyName = cookieNames.keyCookieName; + const csrfName = cookieNames.csrfCookieName; + response === null || response === void 0 ? void 0 : response.setHeader("Set-Cookie", [ + `${authKeyName}=${encryptedPayload};samesite=strict;path=/;HttpOnly=true;;Expires=${COOKIE_EXPIRY_DATE}${secureCookie ? ";Secure=true" : ""}`, + `${csrfName}=${(_a = httpResponse.payload) === null || _a === void 0 ? void 0 : _a.csrf_k};samesite=strict;path=/;HttpOnly=true;;Expires=${COOKIE_EXPIRY_DATE}`, + ]); + } + return httpResponse; } diff --git a/dist/package-shared/actions/users/update-user.js b/dist/package-shared/actions/users/update-user.js index 9b15d92..2a47d2e 100644 --- a/dist/package-shared/actions/users/update-user.js +++ b/dist/package-shared/actions/users/update-user.js @@ -1,98 +1,81 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = updateUser; -const path_1 = __importDefault(require("path")); -const fs_1 = __importDefault(require("fs")); -const grab_host_names_1 = __importDefault(require("../../utils/grab-host-names")); -const api_update_user_1 = __importDefault(require("../../functions/api/users/api-update-user")); +import path from "path"; +import fs from "fs"; +import grabHostNames from "../../utils/grab-host-names"; +import apiUpdateUser from "../../functions/api/users/api-update-user"; /** * # Update User */ -function updateUser(_a) { - return __awaiter(this, arguments, void 0, function* ({ key, payload, database, user_id, updatedUserId, }) { - /** - * Check for local DB settings - * - * @description Look for local db settings in `.env` file and by pass the http request if available - */ - const { DSQL_DB_HOST, DSQL_DB_USERNAME, DSQL_DB_PASSWORD, DSQL_DB_NAME } = process.env; - const grabedHostNames = (0, grab_host_names_1.default)(); - const { host, port, scheme } = grabedHostNames; - if ((DSQL_DB_HOST === null || DSQL_DB_HOST === void 0 ? void 0 : DSQL_DB_HOST.match(/./)) && - (DSQL_DB_USERNAME === null || DSQL_DB_USERNAME === void 0 ? void 0 : DSQL_DB_USERNAME.match(/./)) && - (DSQL_DB_PASSWORD === null || DSQL_DB_PASSWORD === void 0 ? void 0 : DSQL_DB_PASSWORD.match(/./)) && - (DSQL_DB_NAME === null || DSQL_DB_NAME === void 0 ? void 0 : DSQL_DB_NAME.match(/./)) && - global.DSQL_USE_LOCAL) { - /** @type {import("../../types").DSQL_DatabaseSchemaType | undefined} */ - let dbSchema; - try { - const localDbSchemaPath = path_1.default.resolve(process.cwd(), "dsql.schema.json"); - dbSchema = JSON.parse(fs_1.default.readFileSync(localDbSchemaPath, "utf8")); - } - catch (error) { } - return yield (0, api_update_user_1.default)({ - payload: payload, - dbFullName: DSQL_DB_NAME, - updatedUserId, - dbSchema, - }); +export default async function updateUser({ key, payload, database, user_id, updatedUserId, }) { + /** + * Check for local DB settings + * + * @description Look for local db settings in `.env` file and by pass the http request if available + */ + const { DSQL_DB_HOST, DSQL_DB_USERNAME, DSQL_DB_PASSWORD, DSQL_DB_NAME } = process.env; + const grabedHostNames = grabHostNames(); + const { host, port, scheme } = grabedHostNames; + if ((DSQL_DB_HOST === null || DSQL_DB_HOST === void 0 ? void 0 : DSQL_DB_HOST.match(/./)) && + (DSQL_DB_USERNAME === null || DSQL_DB_USERNAME === void 0 ? void 0 : DSQL_DB_USERNAME.match(/./)) && + (DSQL_DB_PASSWORD === null || DSQL_DB_PASSWORD === void 0 ? void 0 : DSQL_DB_PASSWORD.match(/./)) && + (DSQL_DB_NAME === null || DSQL_DB_NAME === void 0 ? void 0 : DSQL_DB_NAME.match(/./)) && + global.DSQL_USE_LOCAL) { + /** @type {import("../../types").DSQL_DatabaseSchemaType | undefined} */ + let dbSchema; + try { + const localDbSchemaPath = path.resolve(process.cwd(), "dsql.schema.json"); + dbSchema = JSON.parse(fs.readFileSync(localDbSchemaPath, "utf8")); } - /** - * Make https request - * - * @description make a request to datasquirel.com - */ - const httpResponse = yield new Promise((resolve, reject) => { - const reqPayload = JSON.stringify({ - payload, - database, - updatedUserId, - }); - const httpsRequest = scheme.request({ - method: "POST", - headers: { - "Content-Type": "application/json", - "Content-Length": Buffer.from(reqPayload).length, - Authorization: process.env.DSQL_FULL_ACCESS_API_KEY || - process.env.DSQL_API_KEY || - key, - }, - port, - hostname: host, - path: `/api/user/${user_id || grabedHostNames.user_id}/update-user`, - }, - /** - * Callback Function - * - * @description https request callback - */ - (response) => { - var str = ""; - response.on("data", function (chunk) { - str += chunk; - }); - response.on("end", function () { - resolve(JSON.parse(str)); - }); - response.on("error", (err) => { - reject(err); - }); - }); - httpsRequest.write(reqPayload); - httpsRequest.end(); + catch (error) { } + return await apiUpdateUser({ + payload: payload, + dbFullName: DSQL_DB_NAME, + updatedUserId, + dbSchema, }); - return httpResponse; + } + /** + * Make https request + * + * @description make a request to datasquirel.com + */ + const httpResponse = await new Promise((resolve, reject) => { + const reqPayload = JSON.stringify({ + payload, + database, + updatedUserId, + }); + const httpsRequest = scheme.request({ + method: "POST", + headers: { + "Content-Type": "application/json", + "Content-Length": Buffer.from(reqPayload).length, + Authorization: process.env.DSQL_FULL_ACCESS_API_KEY || + process.env.DSQL_API_KEY || + key, + }, + port, + hostname: host, + path: `/api/user/${user_id || grabedHostNames.user_id}/update-user`, + }, + /** + * Callback Function + * + * @description https request callback + */ + (response) => { + var str = ""; + response.on("data", function (chunk) { + str += chunk; + }); + response.on("end", function () { + resolve(JSON.parse(str)); + }); + response.on("error", (err) => { + reject(err); + }); + }); + httpsRequest.write(reqPayload); + httpsRequest.end(); }); + return httpResponse; } diff --git a/dist/package-shared/actions/users/user-auth.js b/dist/package-shared/actions/users/user-auth.js index 4710960..2c6d124 100644 --- a/dist/package-shared/actions/users/user-auth.js +++ b/dist/package-shared/actions/users/user-auth.js @@ -1,16 +1,10 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = userAuth; -const decrypt_1 = __importDefault(require("../../functions/dsql/decrypt")); -const get_auth_cookie_names_1 = __importDefault(require("../../functions/backend/cookies/get-auth-cookie-names")); -const write_auth_files_1 = require("../../functions/backend/auth/write-auth-files"); -const parseCookies_1 = __importDefault(require("../../utils/backend/parseCookies")); -const get_csrf_header_name_1 = __importDefault(require("../../actions/get-csrf-header-name")); -const grab_host_names_1 = __importDefault(require("../../utils/grab-host-names")); -const debug_log_1 = __importDefault(require("../../utils/logging/debug-log")); +import decrypt from "../../functions/dsql/decrypt"; +import getAuthCookieNames from "../../functions/backend/cookies/get-auth-cookie-names"; +import { checkAuthFile } from "../../functions/backend/auth/write-auth-files"; +import parseCookies from "../../utils/backend/parseCookies"; +import getCsrfHeaderName from "../../actions/get-csrf-header-name"; +import grabHostNames from "../../utils/grab-host-names"; +import debugLog from "../../utils/logging/debug-log"; const minuteInMilliseconds = 60000; const hourInMilliseconds = minuteInMilliseconds * 60; const dayInMilliseconds = hourInMilliseconds * 24; @@ -23,28 +17,28 @@ const yearInMilliseconds = dayInMilliseconds * 365; * @description This Function takes in a request object and returns a user object * with the user's data */ -function userAuth({ request, req, encryptionKey, encryptionSalt, level, database, dsqlUserId, encryptedUserString, expiry = weekInMilliseconds, cookieString, csrfHeaderName, debug, skipFileCheck, }) { +export default function userAuth({ request, req, encryptionKey, encryptionSalt, level, database, dsqlUserId, encryptedUserString, expiry = weekInMilliseconds, cookieString, csrfHeaderName, debug, skipFileCheck, }) { var _a; try { const finalRequest = req || request; - const { user_id } = (0, grab_host_names_1.default)({ userId: dsqlUserId }); - const cookies = (0, parseCookies_1.default)({ + const { user_id } = grabHostNames({ userId: dsqlUserId }); + const cookies = parseCookies({ request: finalRequest, cookieString, }); if (debug) { - (0, debug_log_1.default)({ + debugLog({ log: cookies, addTime: true, label: "userAuth:cookies", }); } - const keyNames = (0, get_auth_cookie_names_1.default)({ + const keyNames = getAuthCookieNames({ userId: user_id, database: database || process.env.DSQL_DB_NAME, }); if (debug) { - (0, debug_log_1.default)({ + debugLog({ log: keyNames, addTime: true, label: "userAuth:keyNames", @@ -54,7 +48,7 @@ function userAuth({ request, req, encryptionKey, encryptionSalt, level, database ? encryptedUserString : cookies[keyNames.keyCookieName]; if (debug) { - (0, debug_log_1.default)({ + debugLog({ log: key, addTime: true, label: "userAuth:key", @@ -65,13 +59,13 @@ function userAuth({ request, req, encryptionKey, encryptionSalt, level, database * * @description Grab the payload */ - let userPayloadJSON = (0, decrypt_1.default)({ + let userPayloadJSON = decrypt({ encryptedString: key, encryptionKey, encryptionSalt, }); if (debug) { - (0, debug_log_1.default)({ + debugLog({ log: userPayloadJSON, addTime: true, label: "userAuth:userPayloadJSON", @@ -92,7 +86,7 @@ function userAuth({ request, req, encryptionKey, encryptionSalt, level, database } let userObject = JSON.parse(userPayloadJSON); if (debug) { - (0, debug_log_1.default)({ + debugLog({ log: userObject, addTime: true, label: "userAuth:userObject", @@ -106,7 +100,7 @@ function userAuth({ request, req, encryptionKey, encryptionSalt, level, database cookieNames: keyNames, }; } - if (!skipFileCheck && !(0, write_auth_files_1.checkAuthFile)(userObject.csrf_k)) { + if (!skipFileCheck && !checkAuthFile(userObject.csrf_k)) { return { success: false, payload: null, @@ -120,7 +114,7 @@ function userAuth({ request, req, encryptionKey, encryptionSalt, level, database * @description Grab the payload */ if ((level === null || level === void 0 ? void 0 : level.match(/deep/i)) && finalRequest) { - const finalCsrfHeaderName = csrfHeaderName || (0, get_csrf_header_name_1.default)(); + const finalCsrfHeaderName = csrfHeaderName || getCsrfHeaderName(); if (finalRequest.headers[finalCsrfHeaderName] !== userObject.csrf_k) { return { success: false, diff --git a/dist/package-shared/actions/users/validate-temp-email-code.js b/dist/package-shared/actions/users/validate-temp-email-code.js index a801451..2c8421f 100644 --- a/dist/package-shared/actions/users/validate-temp-email-code.js +++ b/dist/package-shared/actions/users/validate-temp-email-code.js @@ -1,49 +1,32 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = validateTempEmailCode; -const get_auth_cookie_names_1 = __importDefault(require("../../functions/backend/cookies/get-auth-cookie-names")); -const parseCookies_1 = __importDefault(require("../../utils/backend/parseCookies")); -const decrypt_1 = __importDefault(require("../../functions/dsql/decrypt")); -const ejson_1 = __importDefault(require("../../utils/ejson")); +import getAuthCookieNames from "../../functions/backend/cookies/get-auth-cookie-names"; +import parseCookies from "../../utils/backend/parseCookies"; +import decrypt from "../../functions/dsql/decrypt"; +import EJSON from "../../utils/ejson"; /** * # Verify the temp email code sent to the user's email address */ -function validateTempEmailCode(_a) { - return __awaiter(this, arguments, void 0, function* ({ request, email, cookieString, }) { - var _b; - try { - const keyNames = (0, get_auth_cookie_names_1.default)(); - const oneTimeCodeCookieName = keyNames.oneTimeCodeName; - const cookies = (0, parseCookies_1.default)({ request, cookieString }); - const encryptedOneTimeCode = cookies[oneTimeCodeCookieName]; - const encryptedPayload = (0, decrypt_1.default)({ - encryptedString: encryptedOneTimeCode, - }); - const payload = ejson_1.default.parse(encryptedPayload); - if ((payload === null || payload === void 0 ? void 0 : payload.email) && !email) { - return payload; - } - if ((payload === null || payload === void 0 ? void 0 : payload.email) && payload.email === email) { - return payload; - } - return null; +export default async function validateTempEmailCode({ request, email, cookieString, }) { + var _a; + try { + const keyNames = getAuthCookieNames(); + const oneTimeCodeCookieName = keyNames.oneTimeCodeName; + const cookies = parseCookies({ request, cookieString }); + const encryptedOneTimeCode = cookies[oneTimeCodeCookieName]; + const encryptedPayload = decrypt({ + encryptedString: encryptedOneTimeCode, + }); + const payload = EJSON.parse(encryptedPayload); + if ((payload === null || payload === void 0 ? void 0 : payload.email) && !email) { + return payload; } - catch (error) { - (_b = global.ERROR_CALLBACK) === null || _b === void 0 ? void 0 : _b.call(global, `Validate Temp Email Code Error`, error); - console.log("validateTempEmailCode error:", error.message); - return null; + if ((payload === null || payload === void 0 ? void 0 : payload.email) && payload.email === email) { + return payload; } - }); + return null; + } + catch (error) { + (_a = global.ERROR_CALLBACK) === null || _a === void 0 ? void 0 : _a.call(global, `Validate Temp Email Code Error`, error); + console.log("validateTempEmailCode error:", error.message); + return null; + } } diff --git a/dist/package-shared/actions/users/validate-token.js b/dist/package-shared/actions/users/validate-token.js index ec18a3d..2834776 100644 --- a/dist/package-shared/actions/users/validate-token.js +++ b/dist/package-shared/actions/users/validate-token.js @@ -1,16 +1,10 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = validateToken; -const decrypt_1 = __importDefault(require("../../functions/dsql/decrypt")); +import decrypt from "../../functions/dsql/decrypt"; /** * Validate Token * ====================================== * @description This Function takes in a encrypted token and returns a user object */ -function validateToken({ token, encryptionKey, encryptionSalt, }) { +export default function validateToken({ token, encryptionKey, encryptionSalt, }) { var _a; try { /** @@ -24,7 +18,7 @@ function validateToken({ token, encryptionKey, encryptionSalt, }) { * * @description Grab the payload */ - let userPayload = (0, decrypt_1.default)({ + let userPayload = decrypt({ encryptedString: key, encryptionKey, encryptionSalt, diff --git a/dist/package-shared/api/crud/delete.d.ts b/dist/package-shared/api/crud/delete.d.ts new file mode 100644 index 0000000..dfef902 --- /dev/null +++ b/dist/package-shared/api/crud/delete.d.ts @@ -0,0 +1,21 @@ +import { SQLDeleteData } from "../../types"; +type Params = { + dbName: string; + tableName: string; + deleteSpec?: T & { + deleteKeyValues?: SQLDeleteData[]; + }; + targetID?: string | number; +}; +export default function apiCrudDELETE({ dbName, tableName, deleteSpec, targetID }: Params): Promise>; +export {}; diff --git a/dist/package-shared/api/crud/delete.js b/dist/package-shared/api/crud/delete.js new file mode 100644 index 0000000..741702d --- /dev/null +++ b/dist/package-shared/api/crud/delete.js @@ -0,0 +1,14 @@ +import path from "path"; +import queryDSQLAPI from "../../functions/api/query-dsql-api"; +import grabAPIBasePath from "../../utils/grab-api-base-path"; +export default async function apiCrudDELETE({ dbName, tableName, deleteSpec, targetID }) { + const basePath = grabAPIBasePath({ paradigm: "crud" }); + const finalID = typeof targetID === "number" ? String(targetID) : targetID; + const finalPath = path.join(basePath, dbName, tableName, finalID || ""); + const GET_RES = await queryDSQLAPI({ + method: "DELETE", + path: finalPath, + body: deleteSpec, + }); + return GET_RES; +} diff --git a/dist/package-shared/api/crud/get.d.ts b/dist/package-shared/api/crud/get.d.ts new file mode 100644 index 0000000..e4665f5 --- /dev/null +++ b/dist/package-shared/api/crud/get.d.ts @@ -0,0 +1,17 @@ +import { APIResponseObject, DsqlCrudQueryObject } from "../../types"; +type Params = { + dbName: string; + tableName: string; + query?: DsqlCrudQueryObject; + targetId?: string | number; +}; +export default function apiCrudGET({ dbName, tableName, query, targetId, }: Params): Promise; +export {}; diff --git a/dist/package-shared/api/crud/get.js b/dist/package-shared/api/crud/get.js new file mode 100644 index 0000000..6763df9 --- /dev/null +++ b/dist/package-shared/api/crud/get.js @@ -0,0 +1,14 @@ +import path from "path"; +import queryDSQLAPI from "../../functions/api/query-dsql-api"; +import grabAPIBasePath from "../../utils/grab-api-base-path"; +export default async function apiCrudGET({ dbName, tableName, query, targetId, }) { + const basePath = grabAPIBasePath({ paradigm: "crud" }); + const finalID = typeof targetId === "number" ? String(targetId) : targetId; + const finalPath = path.join(basePath, dbName, tableName, finalID || ""); + const GET_RES = await queryDSQLAPI({ + method: "GET", + path: finalPath, + query, + }); + return GET_RES; +} diff --git a/dist/package-shared/api/crud/index.d.ts b/dist/package-shared/api/crud/index.d.ts new file mode 100644 index 0000000..59a8324 --- /dev/null +++ b/dist/package-shared/api/crud/index.d.ts @@ -0,0 +1,12 @@ +import apiCrudGET from "./get"; +import apiCrudPOST from "./post"; +import apiCrudPUT from "./put"; +import apiCrudDELETE from "./delete"; +declare const crud: { + get: typeof apiCrudGET; + insert: typeof apiCrudPOST; + update: typeof apiCrudPUT; + delete: typeof apiCrudDELETE; + options: () => Promise; +}; +export default crud; diff --git a/dist/package-shared/api/crud/index.js b/dist/package-shared/api/crud/index.js new file mode 100644 index 0000000..ea90647 --- /dev/null +++ b/dist/package-shared/api/crud/index.js @@ -0,0 +1,12 @@ +import apiCrudGET from "./get"; +import apiCrudPOST from "./post"; +import apiCrudPUT from "./put"; +import apiCrudDELETE from "./delete"; +const crud = { + get: apiCrudGET, + insert: apiCrudPOST, + update: apiCrudPUT, + delete: apiCrudDELETE, + options: async () => { }, +}; +export default crud; diff --git a/dist/package-shared/api/crud/post.d.ts b/dist/package-shared/api/crud/post.d.ts new file mode 100644 index 0000000..f3d4659 --- /dev/null +++ b/dist/package-shared/api/crud/post.d.ts @@ -0,0 +1,16 @@ +import { APIResponseObject } from "../../types"; +export type APICrudPostParams = { + dbName: string; + tableName: string; + body: T; + update?: boolean; +}; +export default function apiCrudPOST({ dbName, tableName, body, update, }: APICrudPostParams): Promise; diff --git a/dist/package-shared/api/crud/post.js b/dist/package-shared/api/crud/post.js new file mode 100644 index 0000000..3e08889 --- /dev/null +++ b/dist/package-shared/api/crud/post.js @@ -0,0 +1,19 @@ +import path from "path"; +import queryDSQLAPI from "../../functions/api/query-dsql-api"; +import grabAPIBasePath from "../../utils/grab-api-base-path"; +export default async function apiCrudPOST({ dbName, tableName, body, update, }) { + const basePath = grabAPIBasePath({ paradigm: "crud" }); + const passedID = body.id; + const finalID = update + ? typeof passedID === "number" + ? String(passedID) + : passedID + : undefined; + const finalPath = path.join(basePath, dbName, tableName, finalID || ""); + const GET_RES = await queryDSQLAPI({ + method: update ? "PUT" : "POST", + path: finalPath, + body, + }); + return GET_RES; +} diff --git a/dist/package-shared/api/crud/put.d.ts b/dist/package-shared/api/crud/put.d.ts new file mode 100644 index 0000000..3090b0c --- /dev/null +++ b/dist/package-shared/api/crud/put.d.ts @@ -0,0 +1,14 @@ +import { APICrudPostParams } from "./post"; +type Params = Omit, "update"> & { + targetID: string | number; +}; +export default function apiCrudPUT({ dbName, tableName, body, targetID }: Params): Promise; +export {}; diff --git a/dist/package-shared/api/crud/put.js b/dist/package-shared/api/crud/put.js new file mode 100644 index 0000000..056a053 --- /dev/null +++ b/dist/package-shared/api/crud/put.js @@ -0,0 +1,13 @@ +import apiCrudPOST from "./post"; +export default async function apiCrudPUT({ dbName, tableName, body, targetID }) { + const updatedBody = Object.assign({}, body); + if (targetID) { + updatedBody["id"] = targetID; + } + return await apiCrudPOST({ + dbName, + tableName, + body: updatedBody, + update: true, + }); +} diff --git a/dist/package-shared/api/media/delete.d.ts b/dist/package-shared/api/media/delete.d.ts new file mode 100644 index 0000000..d355aa0 --- /dev/null +++ b/dist/package-shared/api/media/delete.d.ts @@ -0,0 +1,5 @@ +import { APIResponseObject } from "../../types"; +import { DSQL_DATASQUIREL_USER_MEDIA } from "../../types/dsql"; +export default function apiMediaDELETE(params: { + mediaID?: string | number; +}): Promise>; diff --git a/dist/package-shared/api/media/delete.js b/dist/package-shared/api/media/delete.js new file mode 100644 index 0000000..dc9ab13 --- /dev/null +++ b/dist/package-shared/api/media/delete.js @@ -0,0 +1,17 @@ +import queryDSQLAPI from "../../functions/api/query-dsql-api"; +import path from "path"; +import grabAPIBasePath from "../../utils/grab-api-base-path"; +export default async function apiMediaDELETE(params) { + const basePath = grabAPIBasePath({ paradigm: "media" }); + const mediaID = params.mediaID + ? typeof params.mediaID === "number" + ? String(params.mediaID) + : params.mediaID + : undefined; + const finalPath = path.join(basePath, mediaID || ""); + const DELETE_MEDIA_RES = await queryDSQLAPI({ + method: "DELETE", + path: finalPath, + }); + return DELETE_MEDIA_RES; +} diff --git a/dist/package-shared/api/media/get.d.ts b/dist/package-shared/api/media/get.d.ts new file mode 100644 index 0000000..8437929 --- /dev/null +++ b/dist/package-shared/api/media/get.d.ts @@ -0,0 +1,3 @@ +import { APIGetMediaParams, APIResponseObject } from "../../types"; +import { DSQL_DATASQUIREL_USER_MEDIA } from "../../types/dsql"; +export default function apiMediaGET(params: APIGetMediaParams): Promise>; diff --git a/dist/package-shared/api/media/get.js b/dist/package-shared/api/media/get.js new file mode 100644 index 0000000..02a4a6d --- /dev/null +++ b/dist/package-shared/api/media/get.js @@ -0,0 +1,18 @@ +import queryDSQLAPI from "../../functions/api/query-dsql-api"; +import path from "path"; +import grabAPIBasePath from "../../utils/grab-api-base-path"; +export default async function apiMediaGET(params) { + const basePath = grabAPIBasePath({ paradigm: "media" }); + const mediaID = params.mediaID + ? typeof params.mediaID === "number" + ? String(params.mediaID) + : params.mediaID + : undefined; + const finalPath = path.join(basePath, mediaID || ""); + const GET_MEDIA_RES = await queryDSQLAPI({ + method: "GET", + path: finalPath, + query: params, + }); + return GET_MEDIA_RES; +} diff --git a/dist/package-shared/api/media/index.d.ts b/dist/package-shared/api/media/index.d.ts new file mode 100644 index 0000000..d30b322 --- /dev/null +++ b/dist/package-shared/api/media/index.d.ts @@ -0,0 +1,9 @@ +import apiMediaGET from "./get"; +import apiMediaPOST from "./post"; +import apiMediaDELETE from "./delete"; +declare const media: { + get: typeof apiMediaGET; + add: typeof apiMediaPOST; + delete: typeof apiMediaDELETE; +}; +export default media; diff --git a/dist/package-shared/api/media/index.js b/dist/package-shared/api/media/index.js new file mode 100644 index 0000000..08dc010 --- /dev/null +++ b/dist/package-shared/api/media/index.js @@ -0,0 +1,9 @@ +import apiMediaGET from "./get"; +import apiMediaPOST from "./post"; +import apiMediaDELETE from "./delete"; +const media = { + get: apiMediaGET, + add: apiMediaPOST, + delete: apiMediaDELETE, +}; +export default media; diff --git a/dist/package-shared/api/media/post.d.ts b/dist/package-shared/api/media/post.d.ts new file mode 100644 index 0000000..a224f85 --- /dev/null +++ b/dist/package-shared/api/media/post.d.ts @@ -0,0 +1,3 @@ +import { AddMediaAPIBody, APIResponseObject } from "../../types"; +import { DSQL_DATASQUIREL_USER_MEDIA } from "../../types/dsql"; +export default function apiMediaPOST(params: AddMediaAPIBody): Promise>; diff --git a/dist/package-shared/api/media/post.js b/dist/package-shared/api/media/post.js new file mode 100644 index 0000000..0f0cb08 --- /dev/null +++ b/dist/package-shared/api/media/post.js @@ -0,0 +1,11 @@ +import queryDSQLAPI from "../../functions/api/query-dsql-api"; +import grabAPIBasePath from "../../utils/grab-api-base-path"; +export default async function apiMediaPOST(params) { + const basePath = grabAPIBasePath({ paradigm: "media" }); + const POST_MEDIA_RES = await queryDSQLAPI({ + method: "POST", + path: basePath, + body: params, + }); + return POST_MEDIA_RES; +} diff --git a/dist/package-shared/api/user/index.d.ts b/dist/package-shared/api/user/index.d.ts new file mode 100644 index 0000000..ebda72b --- /dev/null +++ b/dist/package-shared/api/user/index.d.ts @@ -0,0 +1,2 @@ +declare const user: {}; +export default user; diff --git a/dist/package-shared/api/user/index.js b/dist/package-shared/api/user/index.js new file mode 100644 index 0000000..1056d92 --- /dev/null +++ b/dist/package-shared/api/user/index.js @@ -0,0 +1,2 @@ +const user = {}; +export default user; diff --git a/dist/package-shared/data/data-types.d.ts b/dist/package-shared/data/data-types.d.ts new file mode 100644 index 0000000..46e53c3 --- /dev/null +++ b/dist/package-shared/data/data-types.d.ts @@ -0,0 +1,87 @@ +declare const DataTypes: readonly [{ + readonly title: "VARCHAR"; + readonly name: "VARCHAR"; + readonly value: "0-255"; + readonly argument: true; + readonly description: "Varchar is simply letters and numbers within the range 0 - 255"; + readonly maxValue: 255; +}, { + readonly title: "TINYINT"; + readonly name: "TINYINT"; + readonly value: "0-100"; + readonly description: "TINYINT means Integers: 0 to 100"; + readonly maxValue: 127; +}, { + readonly title: "SMALLINT"; + readonly name: "SMALLINT"; + readonly value: "0-255"; + readonly description: "SMALLINT means Integers: 0 to 240933"; + readonly maxValue: 32767; +}, { + readonly title: "MEDIUMINT"; + readonly name: "MEDIUMINT"; + readonly value: "0-255"; + readonly description: "MEDIUMINT means Integers: 0 to 1245568545560"; + readonly maxValue: 8388607; +}, { + readonly title: "INT"; + readonly name: "INT"; + readonly value: "0-255"; + readonly description: "INT means Integers: 0 to 12560"; + readonly maxValue: 2147483647; +}, { + readonly title: "BIGINT"; + readonly name: "BIGINT"; + readonly value: "0-255"; + readonly description: "BIGINT means Integers: 0 to 1245569056767568545560"; + readonly maxValue: 2e+63; +}, { + readonly title: "TINYTEXT"; + readonly name: "TINYTEXT"; + readonly value: "0-255"; + readonly description: "Text with 255 max characters"; + readonly maxValue: 127; +}, { + readonly title: "TEXT"; + readonly name: "TEXT"; + readonly value: "0-100"; + readonly description: "MEDIUMTEXT is just text with max length 16,777,215"; +}, { + readonly title: "MEDIUMTEXT"; + readonly name: "MEDIUMTEXT"; + readonly value: "0-255"; + readonly description: "MEDIUMTEXT is just text with max length 16,777,215"; +}, { + readonly title: "LONGTEXT"; + readonly name: "LONGTEXT"; + readonly value: "0-255"; + readonly description: "LONGTEXT is just text with max length 4,294,967,295"; +}, { + readonly title: "DECIMAL"; + readonly name: "DECIMAL"; + readonly description: "Numbers with decimals"; + readonly integer: "1-100"; + readonly decimals: "1-4"; +}, { + readonly title: "FLOAT"; + readonly name: "FLOAT"; + readonly description: "Numbers with decimals"; + readonly integer: "1-100"; + readonly decimals: "1-4"; +}, { + readonly title: "DOUBLE"; + readonly name: "DOUBLE"; + readonly description: "Numbers with decimals"; + readonly integer: "1-100"; + readonly decimals: "1-4"; +}, { + readonly title: "UUID"; + readonly name: "UUID"; + readonly valueLiteral: "UUID()"; + readonly description: "A Unique ID"; +}, { + readonly title: "TIMESTAMP"; + readonly name: "TIMESTAMP"; + readonly description: "Time Stamp"; +}]; +export default DataTypes; diff --git a/dist/package-shared/data/data-types.js b/dist/package-shared/data/data-types.js new file mode 100644 index 0000000..4bd3476 --- /dev/null +++ b/dist/package-shared/data/data-types.js @@ -0,0 +1,103 @@ +const DataTypes = [ + { + title: "VARCHAR", + name: "VARCHAR", + value: "0-255", + argument: true, + description: "Varchar is simply letters and numbers within the range 0 - 255", + maxValue: 255, + }, + { + title: "TINYINT", + name: "TINYINT", + value: "0-100", + description: "TINYINT means Integers: 0 to 100", + maxValue: 127, + }, + { + title: "SMALLINT", + name: "SMALLINT", + value: "0-255", + description: "SMALLINT means Integers: 0 to 240933", + maxValue: 32767, + }, + { + title: "MEDIUMINT", + name: "MEDIUMINT", + value: "0-255", + description: "MEDIUMINT means Integers: 0 to 1245568545560", + maxValue: 8388607, + }, + { + title: "INT", + name: "INT", + value: "0-255", + description: "INT means Integers: 0 to 12560", + maxValue: 2147483647, + }, + { + title: "BIGINT", + name: "BIGINT", + value: "0-255", + description: "BIGINT means Integers: 0 to 1245569056767568545560", + maxValue: 2e63, + }, + { + title: "TINYTEXT", + name: "TINYTEXT", + value: "0-255", + description: "Text with 255 max characters", + maxValue: 127, + }, + { + title: "TEXT", + name: "TEXT", + value: "0-100", + description: "MEDIUMTEXT is just text with max length 16,777,215", + }, + { + title: "MEDIUMTEXT", + name: "MEDIUMTEXT", + value: "0-255", + description: "MEDIUMTEXT is just text with max length 16,777,215", + }, + { + title: "LONGTEXT", + name: "LONGTEXT", + value: "0-255", + description: "LONGTEXT is just text with max length 4,294,967,295", + }, + { + title: "DECIMAL", + name: "DECIMAL", + description: "Numbers with decimals", + integer: "1-100", + decimals: "1-4", + }, + { + title: "FLOAT", + name: "FLOAT", + description: "Numbers with decimals", + integer: "1-100", + decimals: "1-4", + }, + { + title: "DOUBLE", + name: "DOUBLE", + description: "Numbers with decimals", + integer: "1-100", + decimals: "1-4", + }, + { + title: "UUID", + name: "UUID", + valueLiteral: "UUID()", + description: "A Unique ID", + }, + { + title: "TIMESTAMP", + name: "TIMESTAMP", + description: "Time Stamp", + }, +]; +export default DataTypes; diff --git a/dist/package-shared/data/data-types.ts b/dist/package-shared/data/data-types.ts new file mode 100644 index 0000000..bad6714 --- /dev/null +++ b/dist/package-shared/data/data-types.ts @@ -0,0 +1,105 @@ +const DataTypes = [ + { + title: "VARCHAR", + name: "VARCHAR", + value: "0-255", + argument: true, + description: + "Varchar is simply letters and numbers within the range 0 - 255", + maxValue: 255, + }, + { + title: "TINYINT", + name: "TINYINT", + value: "0-100", + description: "TINYINT means Integers: 0 to 100", + maxValue: 127, + }, + { + title: "SMALLINT", + name: "SMALLINT", + value: "0-255", + description: "SMALLINT means Integers: 0 to 240933", + maxValue: 32767, + }, + { + title: "MEDIUMINT", + name: "MEDIUMINT", + value: "0-255", + description: "MEDIUMINT means Integers: 0 to 1245568545560", + maxValue: 8388607, + }, + { + title: "INT", + name: "INT", + value: "0-255", + description: "INT means Integers: 0 to 12560", + maxValue: 2147483647, + }, + { + title: "BIGINT", + name: "BIGINT", + value: "0-255", + description: "BIGINT means Integers: 0 to 1245569056767568545560", + maxValue: 2e63, + }, + { + title: "TINYTEXT", + name: "TINYTEXT", + value: "0-255", + description: "Text with 255 max characters", + maxValue: 127, + }, + { + title: "TEXT", + name: "TEXT", + value: "0-100", + description: "MEDIUMTEXT is just text with max length 16,777,215", + }, + { + title: "MEDIUMTEXT", + name: "MEDIUMTEXT", + value: "0-255", + description: "MEDIUMTEXT is just text with max length 16,777,215", + }, + { + title: "LONGTEXT", + name: "LONGTEXT", + value: "0-255", + description: "LONGTEXT is just text with max length 4,294,967,295", + }, + { + title: "DECIMAL", + name: "DECIMAL", + description: "Numbers with decimals", + integer: "1-100", + decimals: "1-4", + }, + { + title: "FLOAT", + name: "FLOAT", + description: "Numbers with decimals", + integer: "1-100", + decimals: "1-4", + }, + { + title: "DOUBLE", + name: "DOUBLE", + description: "Numbers with decimals", + integer: "1-100", + decimals: "1-4", + }, + { + title: "UUID", + name: "UUID", + valueLiteral: "UUID()", + description: "A Unique ID", + }, + { + title: "TIMESTAMP", + name: "TIMESTAMP", + description: "Time Stamp", + }, +] as const; + +export default DataTypes; diff --git a/dist/package-shared/data/dataTypes.json b/dist/package-shared/data/dataTypes.json index 2207d5a..c410934 100644 --- a/dist/package-shared/data/dataTypes.json +++ b/dist/package-shared/data/dataTypes.json @@ -88,6 +88,14 @@ "integer": "1-100", "decimals": "1-4" }, + { + "title": "OPTIONS", + "name": "VARCHAR", + "value": "250", + "argument": true, + "description": "This is a custom field which is a varchar under the hood", + "maxValue": 255 + }, { "title": "UUID", "name": "UUID", diff --git a/dist/package-shared/dict/app-names.d.ts b/dist/package-shared/dict/app-names.d.ts new file mode 100644 index 0000000..10a48bd --- /dev/null +++ b/dist/package-shared/dict/app-names.d.ts @@ -0,0 +1,8 @@ +export declare const AppNames: { + readonly MaxScaleUserName: "dsql_maxscale_user"; + readonly ReplicaUserName: "dsql_replication_user"; + readonly DsqlDbPrefix: "datasquirel_user_"; + readonly PrivateMediaProceedureName: "dsql_UpdateUserMedia"; + readonly PrivateMediaInsertTriggerName: "dsql_trg_user_private_folders_insert"; + readonly PrivateMediaDeleteTriggerName: "dsql_trg_user_private_folders_delete"; +}; diff --git a/dist/package-shared/dict/app-names.js b/dist/package-shared/dict/app-names.js new file mode 100644 index 0000000..e020bee --- /dev/null +++ b/dist/package-shared/dict/app-names.js @@ -0,0 +1,8 @@ +export const AppNames = { + MaxScaleUserName: "dsql_maxscale_user", + ReplicaUserName: "dsql_replication_user", + DsqlDbPrefix: "datasquirel_user_", + PrivateMediaProceedureName: "dsql_UpdateUserMedia", + PrivateMediaInsertTriggerName: "dsql_trg_user_private_folders_insert", + PrivateMediaDeleteTriggerName: "dsql_trg_user_private_folders_delete", +}; diff --git a/dist/package-shared/dict/cookie-names.d.ts b/dist/package-shared/dict/cookie-names.d.ts new file mode 100644 index 0000000..9f7416b --- /dev/null +++ b/dist/package-shared/dict/cookie-names.d.ts @@ -0,0 +1,5 @@ +export declare const CookieNames: { + readonly OneTimeLoginEmail: "dsql-one-time-login-email"; + readonly DelegatedUserId: "dsql-delegated-user-id"; + readonly DelegatedDatabase: "dsql-delegated-database"; +}; diff --git a/dist/package-shared/dict/cookie-names.js b/dist/package-shared/dict/cookie-names.js new file mode 100644 index 0000000..b17dcac --- /dev/null +++ b/dist/package-shared/dict/cookie-names.js @@ -0,0 +1,5 @@ +export const CookieNames = { + OneTimeLoginEmail: "dsql-one-time-login-email", + DelegatedUserId: "dsql-delegated-user-id", + DelegatedDatabase: "dsql-delegated-database", +}; diff --git a/dist/package-shared/dict/local-storage-dict.d.ts b/dist/package-shared/dict/local-storage-dict.d.ts new file mode 100644 index 0000000..6283419 --- /dev/null +++ b/dist/package-shared/dict/local-storage-dict.d.ts @@ -0,0 +1,7 @@ +export declare const LocalStorageDict: { + OneTimeEmail: string; + User: string; + CSRF: string; + CurrentQueue: string; + DiskUsage: string; +}; diff --git a/dist/package-shared/dict/local-storage-dict.js b/dist/package-shared/dict/local-storage-dict.js new file mode 100644 index 0000000..a7b36cc --- /dev/null +++ b/dist/package-shared/dict/local-storage-dict.js @@ -0,0 +1,8 @@ +import getCsrfHeaderName from "../actions/get-csrf-header-name"; +export const LocalStorageDict = { + OneTimeEmail: "dsql-one-time-login-email", + User: "user", + CSRF: getCsrfHeaderName(), + CurrentQueue: "current_queue", + DiskUsage: "disk_usage", +}; diff --git a/dist/package-shared/dict/resource-limits.d.ts b/dist/package-shared/dict/resource-limits.d.ts new file mode 100644 index 0000000..6c3a4ac --- /dev/null +++ b/dist/package-shared/dict/resource-limits.d.ts @@ -0,0 +1,6 @@ +declare const ResourceLimits: { + readonly user_databases: 20; + readonly table_entries: 20; + readonly general: 20; +}; +export default ResourceLimits; diff --git a/dist/package-shared/dict/resource-limits.js b/dist/package-shared/dict/resource-limits.js new file mode 100644 index 0000000..c963351 --- /dev/null +++ b/dist/package-shared/dict/resource-limits.js @@ -0,0 +1,6 @@ +const ResourceLimits = { + user_databases: 20, + table_entries: 20, + general: 20, +}; +export default ResourceLimits; diff --git a/dist/package-shared/functions/api/query-dsql-api.d.ts b/dist/package-shared/functions/api/query-dsql-api.d.ts new file mode 100644 index 0000000..ba49e20 --- /dev/null +++ b/dist/package-shared/functions/api/query-dsql-api.d.ts @@ -0,0 +1,20 @@ +import { APIResponseObject, DataCrudRequestMethods, DataCrudRequestMethodsLowerCase } from "../../types"; +type Param = { + key?: string; + body?: T; + query?: T; + useDefault?: boolean; + path: string; + method?: (typeof DataCrudRequestMethods)[number] | (typeof DataCrudRequestMethodsLowerCase)[number]; +}; +/** + * # Query DSQL API + */ +export default function queryDSQLAPI({ key, body, query, useDefault, path: passedPath, method, }: Param): Promise>; +export {}; diff --git a/dist/package-shared/functions/api/query-dsql-api.js b/dist/package-shared/functions/api/query-dsql-api.js new file mode 100644 index 0000000..6cba258 --- /dev/null +++ b/dist/package-shared/functions/api/query-dsql-api.js @@ -0,0 +1,73 @@ +import path from "path"; +import grabHostNames from "../../utils/grab-host-names"; +import serializeQuery from "../../utils/serialize-query"; +/** + * # Query DSQL API + */ +export default async function queryDSQLAPI({ key, body, query, useDefault, path: passedPath, method, }) { + const grabedHostNames = grabHostNames({ useDefault }); + const { host, port, scheme } = grabedHostNames; + try { + /** + * Make https request + * + * @description make a request to datasquirel.com + */ + const httpResponse = await new Promise((resolve, reject) => { + const reqPayload = body ? JSON.stringify(body) : undefined; + let headers = { + "Content-Type": "application/json", + Authorization: key || + (!method || method == "GET" || method == "get" + ? process.env.DSQL_READ_ONLY_API_KEY + : undefined) || + process.env.DSQL_FULL_ACCESS_API_KEY || + process.env.DSQL_API_KEY, + }; + if (reqPayload) { + headers["Content-Length"] = Buffer.from(reqPayload).length; + } + let finalPath = path.join("/", passedPath); + if (query) { + const queryString = serializeQuery(query); + finalPath += `${queryString}`; + } + const httpsRequest = scheme.request({ + method: method || "GET", + headers, + port, + hostname: host, + path: finalPath, + }, + /** + * Callback Function + * + * @description https request callback + */ + (response) => { + var str = ""; + response.on("data", function (chunk) { + str += chunk; + }); + response.on("end", function () { + resolve(JSON.parse(str)); + }); + response.on("error", (err) => { + reject(err); + }); + }); + if (reqPayload) { + httpsRequest.write(reqPayload); + } + httpsRequest.end(); + }); + return httpResponse; + } + catch (error) { + return { + success: false, + payload: undefined, + msg: error.message, + }; + } +} diff --git a/dist/package-shared/functions/api/query/get.js b/dist/package-shared/functions/api/query/get.js index 7328880..378a453 100644 --- a/dist/package-shared/functions/api/query/get.js +++ b/dist/package-shared/functions/api/query/get.js @@ -1,94 +1,74 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = apiGet; -const lodash_1 = __importDefault(require("lodash")); -const serverError_1 = __importDefault(require("../../backend/serverError")); -const runQuery_1 = __importDefault(require("../../backend/db/runQuery")); -const grab_query_and_values_1 = __importDefault(require("../../../utils/grab-query-and-values")); +import _ from "lodash"; +import serverError from "../../backend/serverError"; +import runQuery from "../../backend/db/runQuery"; +import apiGetGrabQueryAndValues from "../../../utils/grab-query-and-values"; /** * # Get Function FOr API */ -function apiGet(_a) { - return __awaiter(this, arguments, void 0, function* ({ query, dbFullName, queryValues, tableName, dbSchema, debug, dbContext, forceLocal, }) { - var _b, _c; - const queryAndValues = (0, grab_query_and_values_1.default)({ - query, - values: queryValues, - }); - if (typeof query == "string" && query.match(/^alter|^delete|^create/i)) { - return { success: false, msg: "Wrong Input." }; - } - let results; - try { - let { result, error } = yield (0, runQuery_1.default)({ - dbFullName: dbFullName, - query: queryAndValues.query, - queryValuesArray: queryAndValues.values, - readOnly: true, - dbSchema, - tableName, - dbContext, - debug, - forceLocal, - }); - if (debug && global.DSQL_USE_LOCAL) { - console.log("apiGet:result", result); - console.log("apiGet:error", error); - } - let tableSchema; - if (dbSchema) { - const targetTable = (_b = dbSchema.tables) === null || _b === void 0 ? void 0 : _b.find((table) => table.tableName === tableName); - if (targetTable) { - const clonedTargetTable = lodash_1.default.cloneDeep(targetTable); - delete clonedTargetTable.childTable; - delete clonedTargetTable.childTableDbFullName; - delete clonedTargetTable.childTableName; - delete clonedTargetTable.childrenTables; - delete clonedTargetTable.updateData; - delete clonedTargetTable.tableNameOld; - delete clonedTargetTable.indexes; - tableSchema = clonedTargetTable; - } - } - if (error) - throw error; - if (result.error) - throw new Error(result.error); - results = result; - const resObject = { - success: true, - payload: results, - schema: tableName && tableSchema ? tableSchema : undefined, - }; - return resObject; - } - catch (error) { - (0, serverError_1.default)({ - component: "/api/query/get/lines-85-94", - message: error.message, - }); - (_c = global.ERROR_CALLBACK) === null || _c === void 0 ? void 0 : _c.call(global, `API Get Error`, error); - if (debug && global.DSQL_USE_LOCAL) { - console.log("apiGet:error", error.message); - console.log("queryAndValues", queryAndValues); - } - return { - success: false, - payload: null, - error: error.message, - }; - } +export default async function apiGet({ query, dbFullName, queryValues, tableName, dbSchema, debug, dbContext, forceLocal, }) { + var _a, _b; + const queryAndValues = apiGetGrabQueryAndValues({ + query, + values: queryValues, }); + if (typeof query == "string" && query.match(/^alter|^delete|^create/i)) { + return { success: false, msg: "Wrong Input." }; + } + let results; + try { + let { result, error } = await runQuery({ + dbFullName: dbFullName, + query: queryAndValues.query, + queryValuesArray: queryAndValues.values, + readOnly: true, + dbSchema, + tableName, + dbContext, + debug, + forceLocal, + }); + if (debug && global.DSQL_USE_LOCAL) { + console.log("apiGet:result", result); + console.log("apiGet:error", error); + } + let tableSchema; + if (dbSchema) { + const targetTable = (_a = dbSchema.tables) === null || _a === void 0 ? void 0 : _a.find((table) => table.tableName === tableName); + if (targetTable) { + const clonedTargetTable = _.cloneDeep(targetTable); + delete clonedTargetTable.childTable; + delete clonedTargetTable.childrenTables; + delete clonedTargetTable.updateData; + delete clonedTargetTable.indexes; + tableSchema = clonedTargetTable; + } + } + if (error) + throw error; + if (result.error) + throw new Error(result.error); + results = result; + const resObject = { + success: true, + payload: results, + schema: tableName && tableSchema ? tableSchema : undefined, + }; + return resObject; + } + catch (error) { + serverError({ + component: "/api/query/get/lines-85-94", + message: error.message, + }); + (_b = global.ERROR_CALLBACK) === null || _b === void 0 ? void 0 : _b.call(global, `API Get Error`, error); + if (debug && global.DSQL_USE_LOCAL) { + console.log("apiGet:error", error.message); + console.log("queryAndValues", queryAndValues); + } + return { + success: false, + payload: null, + error: error.message, + }; + } } diff --git a/dist/package-shared/functions/api/query/post.js b/dist/package-shared/functions/api/query/post.js index 3114aa3..01842b7 100644 --- a/dist/package-shared/functions/api/query/post.js +++ b/dist/package-shared/functions/api/query/post.js @@ -1,100 +1,80 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = apiPost; -const lodash_1 = __importDefault(require("lodash")); -const serverError_1 = __importDefault(require("../../backend/serverError")); -const runQuery_1 = __importDefault(require("../../backend/db/runQuery")); -const debug_log_1 = __importDefault(require("../../../utils/logging/debug-log")); +import _ from "lodash"; +import serverError from "../../backend/serverError"; +import runQuery from "../../backend/db/runQuery"; +import debugLog from "../../../utils/logging/debug-log"; /** * # Post Function For API */ -function apiPost(_a) { - return __awaiter(this, arguments, void 0, function* ({ query, dbFullName, queryValues, tableName, dbSchema, dbContext, forceLocal, debug, }) { - var _b, _c; - if (typeof query === "string" && (query === null || query === void 0 ? void 0 : query.match(/^create |^alter |^drop /i))) { - return { success: false, msg: "Wrong Input" }; - } - if (typeof query === "object" && - ((_b = query === null || query === void 0 ? void 0 : query.action) === null || _b === void 0 ? void 0 : _b.match(/^create |^alter |^drop /i))) { - return { success: false, msg: "Wrong Input" }; - } - let results; - /** - * Create new user folder and file - * - * @description Create new user folder and file - */ - try { - let { result, error } = yield (0, runQuery_1.default)({ - dbFullName: dbFullName, - query: query, - dbSchema: dbSchema, - queryValuesArray: queryValues, - tableName, - dbContext, - forceLocal, - debug, +export default async function apiPost({ query, dbFullName, queryValues, tableName, dbSchema, dbContext, forceLocal, debug, }) { + var _a, _b; + if (typeof query === "string" && (query === null || query === void 0 ? void 0 : query.match(/^create |^alter |^drop /i))) { + return { success: false, msg: "Wrong Input" }; + } + if (typeof query === "object" && + ((_a = query === null || query === void 0 ? void 0 : query.action) === null || _a === void 0 ? void 0 : _a.match(/^create |^alter |^drop /i))) { + return { success: false, msg: "Wrong Input" }; + } + let results; + /** + * Create new user folder and file + * + * @description Create new user folder and file + */ + try { + let { result, error } = await runQuery({ + dbFullName, + query, + dbSchema, + queryValuesArray: queryValues, + tableName, + dbContext, + forceLocal, + debug, + }); + if (debug) { + debugLog({ + log: result, + addTime: true, + label: "result", }); - if (debug) { - (0, debug_log_1.default)({ - log: result, - addTime: true, - label: "result", - }); - (0, debug_log_1.default)({ - log: query, - addTime: true, - label: "query", - }); - } - results = result; - if (error) - throw new Error(error); - let tableSchema; - if (dbSchema) { - const targetTable = dbSchema.tables.find((table) => table.tableName === tableName); - if (targetTable) { - const clonedTargetTable = lodash_1.default.cloneDeep(targetTable); - delete clonedTargetTable.childTable; - delete clonedTargetTable.childTableDbFullName; - delete clonedTargetTable.childTableName; - delete clonedTargetTable.childrenTables; - delete clonedTargetTable.updateData; - delete clonedTargetTable.tableNameOld; - delete clonedTargetTable.indexes; - tableSchema = clonedTargetTable; - } - } - return { - success: true, - payload: results, - error: error, - schema: tableName && tableSchema ? tableSchema : undefined, - }; - } - catch (error) { - (0, serverError_1.default)({ - component: "/api/query/post/lines-132-142", - message: error.message, + debugLog({ + log: query, + addTime: true, + label: "query", }); - (_c = global.ERROR_CALLBACK) === null || _c === void 0 ? void 0 : _c.call(global, `API Post Error`, error); - return { - success: false, - payload: results, - error: error.message, - }; } - }); + results = result; + if (error) + throw new Error(error); + let tableSchema; + if (dbSchema) { + const targetTable = dbSchema.tables.find((table) => table.tableName === tableName); + if (targetTable) { + const clonedTargetTable = _.cloneDeep(targetTable); + delete clonedTargetTable.childTable; + delete clonedTargetTable.childrenTables; + delete clonedTargetTable.updateData; + delete clonedTargetTable.indexes; + tableSchema = clonedTargetTable; + } + } + return { + success: true, + payload: results, + error: error, + schema: tableName && tableSchema ? tableSchema : undefined, + }; + } + catch (error) { + serverError({ + component: "/api/query/post/lines-132-142", + message: error.message, + }); + (_b = global.ERROR_CALLBACK) === null || _b === void 0 ? void 0 : _b.call(global, `API Post Error`, error); + return { + success: false, + payload: results, + error: error.message, + }; + } } diff --git a/dist/package-shared/functions/api/social-login/facebookLogin.js b/dist/package-shared/functions/api/social-login/facebookLogin.js index 32b5e94..0be54fd 100644 --- a/dist/package-shared/functions/api/social-login/facebookLogin.js +++ b/dist/package-shared/functions/api/social-login/facebookLogin.js @@ -1,35 +1,19 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = facebookLogin; -const DB_HANDLER_1 = __importDefault(require("../../../utils/backend/global-db/DB_HANDLER")); -const serverError_1 = __importDefault(require("../../backend/serverError")); -const hashPassword_1 = __importDefault(require("../../dsql/hashPassword")); +import DB_HANDLER from "../../../utils/backend/global-db/DB_HANDLER"; +import serverError from "../../backend/serverError"; +import hashPassword from "../../dsql/hashPassword"; /** * # Facebook Login */ -function facebookLogin(_a) { - return __awaiter(this, arguments, void 0, function* ({ usertype, body, }) { - try { - const foundUser = yield (0, DB_HANDLER_1.default)(`SELECT * FROM users WHERE email='${body.facebookUserEmail}' AND social_login='1'`); - if (foundUser && foundUser[0]) { - return foundUser[0]; - } - let socialHashedPassword = (0, hashPassword_1.default)({ - password: body.facebookUserId, - }); - let newUser = yield (0, DB_HANDLER_1.default)(`INSERT INTO ${usertype} ( +export default async function facebookLogin({ usertype, body, }) { + try { + const foundUser = await DB_HANDLER(`SELECT * FROM users WHERE email='${body.facebookUserEmail}' AND social_login='1'`); + if (foundUser && foundUser[0]) { + return foundUser[0]; + } + let socialHashedPassword = hashPassword({ + password: body.facebookUserId, + }); + let newUser = await DB_HANDLER(`INSERT INTO ${usertype} ( first_name, last_name, social_platform, @@ -49,8 +33,8 @@ function facebookLogin(_a) { '${body.facebookUserLastName}', 'facebook', 'facebook_${body.facebookUserEmail - ? body.facebookUserEmail.replace(/@.*/, "") - : body.facebookUserFirstName.toLowerCase()}', + ? body.facebookUserEmail.replace(/@.*/, "") + : body.facebookUserFirstName.toLowerCase()}', '${body.facebookUserEmail}', '${body.facebookUserImage}', '${body.facebookUserImage}', @@ -62,17 +46,16 @@ function facebookLogin(_a) { '${Date()}', '${Date.now()}' )`); - const newFoundUser = yield (0, DB_HANDLER_1.default)(`SELECT * FROM ${usertype} WHERE id='${newUser.insertId}'`); - } - catch ( /** @type {any} */error) { - (0, serverError_1.default)({ - component: "functions/backend/facebookLogin", - message: error.message, - }); - } - return { - isFacebookAuthValid: false, - newFoundUser: null, - }; - }); + const newFoundUser = await DB_HANDLER(`SELECT * FROM ${usertype} WHERE id='${newUser.insertId}'`); + } + catch ( /** @type {any} */error) { + serverError({ + component: "functions/backend/facebookLogin", + message: error.message, + }); + } + return { + isFacebookAuthValid: false, + newFoundUser: null, + }; } diff --git a/dist/package-shared/functions/api/social-login/githubLogin.js b/dist/package-shared/functions/api/social-login/githubLogin.js index ef5a828..14e7b38 100644 --- a/dist/package-shared/functions/api/social-login/githubLogin.js +++ b/dist/package-shared/functions/api/social-login/githubLogin.js @@ -1,62 +1,45 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = githubLogin; -const DB_HANDLER_1 = __importDefault(require("../../../utils/backend/global-db/DB_HANDLER")); -const httpsRequest_1 = __importDefault(require("../../backend/httpsRequest")); +import DB_HANDLER from "../../../utils/backend/global-db/DB_HANDLER"; +import httpsRequest from "../../backend/httpsRequest"; /** * # Login/signup a github user */ -function githubLogin(_a) { - return __awaiter(this, arguments, void 0, function* ({ code, clientId, clientSecret, }) { - let gitHubUser; - try { - const response = yield (0, httpsRequest_1.default)({ - method: "POST", - hostname: "github.com", - path: `/login/oauth/access_token?client_id=${clientId}&client_secret=${clientSecret}&code=${code}`, - headers: { - Accept: "application/json", - "User-Agent": "*", - }, - scheme: "https", - }); - const accessTokenObject = JSON.parse(response); - if (!(accessTokenObject === null || accessTokenObject === void 0 ? void 0 : accessTokenObject.access_token)) { - return gitHubUser; - } - const userDataResponse = yield (0, httpsRequest_1.default)({ - method: "GET", - hostname: "api.github.com", - path: "/user", - headers: { - Authorization: `Bearer ${accessTokenObject.access_token}`, - "User-Agent": "*", - }, - scheme: "https", - }); - gitHubUser = JSON.parse(userDataResponse); - if (!(gitHubUser === null || gitHubUser === void 0 ? void 0 : gitHubUser.email) && gitHubUser) { - const existingGithubUser = yield (0, DB_HANDLER_1.default)(`SELECT email FROM users WHERE social_login='1' AND social_platform='github' AND social_id='${gitHubUser.id}'`); - if (existingGithubUser && existingGithubUser[0]) { - gitHubUser.email = existingGithubUser[0].email; - } +export default async function githubLogin({ code, clientId, clientSecret, }) { + let gitHubUser; + try { + const response = await httpsRequest({ + method: "POST", + hostname: "github.com", + path: `/login/oauth/access_token?client_id=${clientId}&client_secret=${clientSecret}&code=${code}`, + headers: { + Accept: "application/json", + "User-Agent": "*", + }, + scheme: "https", + }); + const accessTokenObject = JSON.parse(response); + if (!(accessTokenObject === null || accessTokenObject === void 0 ? void 0 : accessTokenObject.access_token)) { + return gitHubUser; + } + const userDataResponse = await httpsRequest({ + method: "GET", + hostname: "api.github.com", + path: "/user", + headers: { + Authorization: `Bearer ${accessTokenObject.access_token}`, + "User-Agent": "*", + }, + scheme: "https", + }); + gitHubUser = JSON.parse(userDataResponse); + if (!(gitHubUser === null || gitHubUser === void 0 ? void 0 : gitHubUser.email) && gitHubUser) { + const existingGithubUser = await DB_HANDLER(`SELECT email FROM users WHERE social_login='1' AND social_platform='github' AND social_id='${gitHubUser.id}'`); + if (existingGithubUser && existingGithubUser[0]) { + gitHubUser.email = existingGithubUser[0].email; } } - catch ( /** @type {any} */error) { - console.log("ERROR in githubLogin.ts backend function =>", error.message); - } - return gitHubUser; - }); + } + catch ( /** @type {any} */error) { + console.log("ERROR in githubLogin.ts backend function =>", error.message); + } + return gitHubUser; } diff --git a/dist/package-shared/functions/api/social-login/googleLogin.js b/dist/package-shared/functions/api/social-login/googleLogin.js index 6f53154..4d5caa9 100644 --- a/dist/package-shared/functions/api/social-login/googleLogin.js +++ b/dist/package-shared/functions/api/social-login/googleLogin.js @@ -1,82 +1,66 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = googleLogin; -const google_auth_library_1 = require("google-auth-library"); -const serverError_1 = __importDefault(require("../../backend/serverError")); -const DB_HANDLER_1 = __importDefault(require("../../../utils/backend/global-db/DB_HANDLER")); -const hashPassword_1 = __importDefault(require("../../dsql/hashPassword")); +import { OAuth2Client } from "google-auth-library"; +import serverError from "../../backend/serverError"; +import DB_HANDLER from "../../../utils/backend/global-db/DB_HANDLER"; +import hashPassword from "../../dsql/hashPassword"; /** * # Google Login */ -function googleLogin(_a) { - return __awaiter(this, arguments, void 0, function* ({ usertype, foundUser, isSocialValidated, isUserValid, reqBody, serverRes, loginFailureReason, }) { - var _b, _c; - const client = new google_auth_library_1.OAuth2Client(process.env.DSQL_GOOGLE_CLIENT_ID); - let isGoogleAuthValid = false; - let newFoundUser = null; +export default async function googleLogin({ usertype, foundUser, isSocialValidated, isUserValid, reqBody, serverRes, loginFailureReason, }) { + var _a, _b; + const client = new OAuth2Client(process.env.DSQL_GOOGLE_CLIENT_ID); + let isGoogleAuthValid = false; + let newFoundUser = null; + //////////////////////////////////////////////// + //////////////////////////////////////////////// + //////////////////////////////////////////////// + try { + const ticket = await client.verifyIdToken({ + idToken: reqBody.token, + audience: process.env.DSQL_GOOGLE_CLIENT_ID, // Specify the CLIENT_ID of the app that accesses the backend + // Or, if multiple clients access the backend: + //[CLIENT_ID_1, CLIENT_ID_2, CLIENT_ID_3] + }); + const payload = ticket.getPayload(); + const userid = payload === null || payload === void 0 ? void 0 : payload["sub"]; + if (!payload) + throw new Error("Google login failed. Credentials invalid"); + isUserValid = Boolean(payload.email_verified); + if (!isUserValid || !payload || !payload.email_verified) + return; + serverRes.isUserValid = payload.email_verified; + isSocialValidated = payload.email_verified; + isGoogleAuthValid = payload.email_verified; + ////// If request specified a G Suite domain: + ////// const domain = payload['hd']; + let socialHashedPassword = hashPassword({ + password: payload.at_hash || "", + }); //////////////////////////////////////////////// //////////////////////////////////////////////// //////////////////////////////////////////////// - try { - const ticket = yield client.verifyIdToken({ - idToken: reqBody.token, - audience: process.env.DSQL_GOOGLE_CLIENT_ID, // Specify the CLIENT_ID of the app that accesses the backend - // Or, if multiple clients access the backend: - //[CLIENT_ID_1, CLIENT_ID_2, CLIENT_ID_3] - }); - const payload = ticket.getPayload(); - const userid = payload === null || payload === void 0 ? void 0 : payload["sub"]; - if (!payload) - throw new Error("Google login failed. Credentials invalid"); - isUserValid = Boolean(payload.email_verified); - if (!isUserValid || !payload || !payload.email_verified) - return; - serverRes.isUserValid = payload.email_verified; - isSocialValidated = payload.email_verified; - isGoogleAuthValid = payload.email_verified; - ////// If request specified a G Suite domain: - ////// const domain = payload['hd']; - let socialHashedPassword = (0, hashPassword_1.default)({ - password: payload.at_hash || "", - }); - //////////////////////////////////////////////// - //////////////////////////////////////////////// - //////////////////////////////////////////////// - let existinEmail = yield (0, DB_HANDLER_1.default)(`SELECT * FROM ${usertype} WHERE email='${payload.email}' AND social_login!='1' AND social_platform!='google'`); - if (existinEmail && existinEmail[0]) { - loginFailureReason = "Email Exists Already"; - isGoogleAuthValid = false; - return { - isGoogleAuthValid: isGoogleAuthValid, - newFoundUser: newFoundUser, - loginFailureReason: loginFailureReason, - }; - } - //////////////////////////////////////// - foundUser = yield (0, DB_HANDLER_1.default)(`SELECT * FROM ${usertype} WHERE email='${payload.email}' AND social_login='1' AND social_platform='google'`); - if (foundUser && foundUser[0]) { - newFoundUser = foundUser; - return { - isGoogleAuthValid: isGoogleAuthValid, - newFoundUser: newFoundUser, - }; - } - //////////////////////////////////////////////// - //////////////////////////////////////////////// - //////////////////////////////////////////////// - let newUser = yield (0, DB_HANDLER_1.default)(`INSERT INTO ${usertype} ( + let existinEmail = await DB_HANDLER(`SELECT * FROM ${usertype} WHERE email='${payload.email}' AND social_login!='1' AND social_platform!='google'`); + if (existinEmail && existinEmail[0]) { + loginFailureReason = "Email Exists Already"; + isGoogleAuthValid = false; + return { + isGoogleAuthValid: isGoogleAuthValid, + newFoundUser: newFoundUser, + loginFailureReason: loginFailureReason, + }; + } + //////////////////////////////////////// + foundUser = await DB_HANDLER(`SELECT * FROM ${usertype} WHERE email='${payload.email}' AND social_login='1' AND social_platform='google'`); + if (foundUser && foundUser[0]) { + newFoundUser = foundUser; + return { + isGoogleAuthValid: isGoogleAuthValid, + newFoundUser: newFoundUser, + }; + } + //////////////////////////////////////////////// + //////////////////////////////////////////////// + //////////////////////////////////////////////// + let newUser = await DB_HANDLER(`INSERT INTO ${usertype} ( first_name, last_name, social_platform, @@ -95,7 +79,7 @@ function googleLogin(_a) { '${payload.given_name}', '${payload.family_name}', 'google', - 'google_${(_b = payload.email) === null || _b === void 0 ? void 0 : _b.replace(/@.*/, "")}', + 'google_${(_a = payload.email) === null || _a === void 0 ? void 0 : _a.replace(/@.*/, "")}', '${payload.sub}', '${payload.email}', '${payload.picture}', @@ -107,18 +91,17 @@ function googleLogin(_a) { '${Date()}', '${Date.now()}' )`); - newFoundUser = yield (0, DB_HANDLER_1.default)(`SELECT * FROM ${usertype} WHERE id='${newUser.insertId}'`); - } - catch (error) { - (0, serverError_1.default)({ - component: "googleLogin", - message: error.message, - }); - (_c = global.ERROR_CALLBACK) === null || _c === void 0 ? void 0 : _c.call(global, `Google Login Error`, error); - loginFailureReason = error; - isUserValid = false; - isSocialValidated = false; - } - return { isGoogleAuthValid: isGoogleAuthValid, newFoundUser: newFoundUser }; - }); + newFoundUser = await DB_HANDLER(`SELECT * FROM ${usertype} WHERE id='${newUser.insertId}'`); + } + catch (error) { + serverError({ + component: "googleLogin", + message: error.message, + }); + (_b = global.ERROR_CALLBACK) === null || _b === void 0 ? void 0 : _b.call(global, `Google Login Error`, error); + loginFailureReason = error; + isUserValid = false; + isSocialValidated = false; + } + return { isGoogleAuthValid: isGoogleAuthValid, newFoundUser: newFoundUser }; } diff --git a/dist/package-shared/functions/api/social-login/handleSocialDb.js b/dist/package-shared/functions/api/social-login/handleSocialDb.js index 3b93e62..328794e 100644 --- a/dist/package-shared/functions/api/social-login/handleSocialDb.js +++ b/dist/package-shared/functions/api/social-login/handleSocialDb.js @@ -1,217 +1,201 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = handleSocialDb; -const fs_1 = __importDefault(require("fs")); -const handleNodemailer_1 = __importDefault(require("../../backend/handleNodemailer")); -const path_1 = __importDefault(require("path")); -const addMariadbUser_1 = __importDefault(require("../../backend/addMariadbUser")); -const varDatabaseDbHandler_1 = __importDefault(require("../../backend/varDatabaseDbHandler")); -const encrypt_1 = __importDefault(require("../../dsql/encrypt")); -const addDbEntry_1 = __importDefault(require("../../backend/db/addDbEntry")); -const loginSocialUser_1 = __importDefault(require("./loginSocialUser")); +import fs from "fs"; +import handleNodemailer from "../../backend/handleNodemailer"; +import path from "path"; +import addMariadbUser from "../../backend/addMariadbUser"; +import varDatabaseDbHandler from "../../backend/varDatabaseDbHandler"; +import encrypt from "../../dsql/encrypt"; +import addDbEntry from "../../backend/db/addDbEntry"; +import loginSocialUser from "./loginSocialUser"; +import grabDirNames from "../../../utils/backend/names/grab-dir-names"; /** * # Handle Social DB */ -function handleSocialDb(_a) { - return __awaiter(this, arguments, void 0, function* ({ database, email, social_platform, payload, invitation, supEmail, additionalFields, debug, loginOnly, }) { - var _b; - try { - const finalDbName = global.DSQL_USE_LOCAL - ? undefined - : database - ? database - : "datasquirel"; - const dbAppend = global.DSQL_USE_LOCAL ? "" : `${finalDbName}.`; - const existingSocialUserQUery = `SELECT * FROM ${dbAppend}users WHERE email = ? AND social_login='1' AND social_platform = ? `; - const existingSocialUserValues = [email, social_platform]; - if (debug) { - console.log("handleSocialDb:existingSocialUserQUery", existingSocialUserQUery); - console.log("handleSocialDb:existingSocialUserValues", existingSocialUserValues); - } - let existingSocialUser = yield (0, varDatabaseDbHandler_1.default)({ - database: finalDbName, - queryString: existingSocialUserQUery, - queryValuesArray: existingSocialUserValues, - debug, - }); - if (debug) { - console.log("handleSocialDb:existingSocialUser", existingSocialUser); - } - if (existingSocialUser === null || existingSocialUser === void 0 ? void 0 : existingSocialUser[0]) { - return yield (0, loginSocialUser_1.default)({ - user: existingSocialUser[0], - social_platform, - invitation, - database: finalDbName, - additionalFields, - debug, - }); - } - else if (loginOnly) { - return { - success: false, - payload: null, - msg: "User Does not Exist", - }; - } - const finalEmail = email ? email : supEmail ? supEmail : null; - if (!finalEmail) { - return { - success: false, - payload: null, - msg: "No Email Present", - }; - } - const existingEmailOnlyQuery = `SELECT * FROM ${dbAppend}users WHERE email='${finalEmail}'`; - if (debug) { - console.log("handleSocialDb:existingEmailOnlyQuery", existingEmailOnlyQuery); - } - let existingEmailOnly = yield (0, varDatabaseDbHandler_1.default)({ - database: finalDbName, - queryString: existingEmailOnlyQuery, - debug, - }); - if (debug) { - console.log("handleSocialDb:existingEmailOnly", existingEmailOnly); - } - if (existingEmailOnly === null || existingEmailOnly === void 0 ? void 0 : existingEmailOnly[0]) { - return yield (0, loginSocialUser_1.default)({ - user: existingEmailOnly[0], - social_platform, - invitation, - database: finalDbName, - additionalFields, - debug, - }); - } - else if (loginOnly) { - return { - success: false, - payload: null, - msg: "Social Account Creation Not allowed", - }; - } - const socialHashedPassword = (0, encrypt_1.default)({ - data: email, - }); - const data = { - social_login: "1", - verification_status: supEmail ? "0" : "1", - password: socialHashedPassword, - }; - Object.keys(payload).forEach((key) => { - data[key] = payload[key]; - }); - const newUser = yield (0, addDbEntry_1.default)({ - dbContext: finalDbName ? "Dsql User" : undefined, - paradigm: finalDbName ? "Full Access" : undefined, - dbFullName: finalDbName, - tableName: "users", - duplicateColumnName: "email", - duplicateColumnValue: finalEmail, - data: Object.assign(Object.assign({}, data), { email: finalEmail }), - }); - if (newUser === null || newUser === void 0 ? void 0 : newUser.insertId) { - if (!database) { - /** - * Add a Mariadb User for this User - */ - yield (0, addMariadbUser_1.default)({ userId: newUser.insertId }); - } - const newUserQueriedQuery = `SELECT * FROM ${dbAppend}users WHERE id='${newUser.insertId}'`; - const newUserQueried = yield (0, varDatabaseDbHandler_1.default)({ - database: finalDbName, - queryString: newUserQueriedQuery, - debug, - }); - if (!newUserQueried || !newUserQueried[0]) - return { - success: false, - payload: null, - msg: "User Insertion Failed!", - }; - if (supEmail && (database === null || database === void 0 ? void 0 : database.match(/^datasquirel$/))) { - /** - * Send email Verification - * - * @description Send verification email to newly created agent - */ - let generatedToken = (0, encrypt_1.default)({ - data: JSON.stringify({ - id: newUser.insertId, - email: supEmail, - dateCode: Date.now(), - }), - }); - (0, handleNodemailer_1.default)({ - to: supEmail, - subject: "Verify Email Address", - text: "Please click the link to verify your email address", - html: fs_1.default - .readFileSync("./email/send-email-verification-link.html", "utf8") - .replace(/{{host}}/, process.env.DSQL_HOST || "") - .replace(/{{token}}/, generatedToken || ""), - }).then(() => { }); - } - const STATIC_ROOT = process.env.DSQL_STATIC_SERVER_DIR; - if (!STATIC_ROOT) { - console.log("Static File ENV not Found!"); - return { - success: false, - payload: null, - msg: "Static File ENV not Found!", - }; - } - /** - * Create new user folder and file - * - * @description Create new user folder and file - */ - if (!database || (database === null || database === void 0 ? void 0 : database.match(/^datasquirel$/))) { - let newUserSchemaFolderPath = `${process.env.DSQL_USER_DB_SCHEMA_PATH}/user-${newUser.insertId}`; - let newUserMediaFolderPath = path_1.default.join(STATIC_ROOT, `images/user-images/user-${newUser.insertId}`); - fs_1.default.mkdirSync(newUserSchemaFolderPath); - fs_1.default.mkdirSync(newUserMediaFolderPath); - fs_1.default.writeFileSync(`${newUserSchemaFolderPath}/main.json`, JSON.stringify([]), "utf8"); - } - return yield (0, loginSocialUser_1.default)({ - user: newUserQueried[0], - social_platform, - invitation, - database: finalDbName, - additionalFields, - debug, - }); - } - else { - console.log("Social User Failed to insert in 'handleSocialDb.ts' backend function =>", newUser); - return { - success: false, - payload: null, - msg: "Social User Failed to insert in 'handleSocialDb.ts' backend function", - }; - } +export default async function handleSocialDb({ database, email, social_platform, payload, invitation, supEmail, additionalFields, debug, loginOnly, }) { + var _a, _b; + try { + const finalDbName = global.DSQL_USE_LOCAL + ? undefined + : database + ? database + : "datasquirel"; + const dbAppend = global.DSQL_USE_LOCAL ? "" : `${finalDbName}.`; + const existingSocialUserQUery = `SELECT * FROM ${dbAppend}users WHERE email = ? AND social_login='1' AND social_platform = ? `; + const existingSocialUserValues = [email, social_platform]; + if (debug) { + console.log("handleSocialDb:existingSocialUserQUery", existingSocialUserQUery); + console.log("handleSocialDb:existingSocialUserValues", existingSocialUserValues); } - catch (error) { - console.log("ERROR in 'handleSocialDb.ts' backend function =>", error.message); - (_b = global.ERROR_CALLBACK) === null || _b === void 0 ? void 0 : _b.call(global, `Handle Social DB Error`, error); + let existingSocialUser = await varDatabaseDbHandler({ + database: finalDbName, + queryString: existingSocialUserQUery, + queryValuesArray: existingSocialUserValues, + debug, + }); + if (debug) { + console.log("handleSocialDb:existingSocialUser", existingSocialUser); + } + if (existingSocialUser === null || existingSocialUser === void 0 ? void 0 : existingSocialUser[0]) { + return await loginSocialUser({ + user: existingSocialUser[0], + social_platform, + invitation, + database: finalDbName, + additionalFields, + debug, + }); + } + else if (loginOnly) { return { success: false, payload: null, - msg: error.message, + msg: "User Does not Exist", }; } - }); + const finalEmail = email ? email : supEmail ? supEmail : null; + if (!finalEmail) { + return { + success: false, + payload: null, + msg: "No Email Present", + }; + } + const existingEmailOnlyQuery = `SELECT * FROM ${dbAppend}users WHERE email='${finalEmail}'`; + if (debug) { + console.log("handleSocialDb:existingEmailOnlyQuery", existingEmailOnlyQuery); + } + let existingEmailOnly = await varDatabaseDbHandler({ + database: finalDbName, + queryString: existingEmailOnlyQuery, + debug, + }); + if (debug) { + console.log("handleSocialDb:existingEmailOnly", existingEmailOnly); + } + if (existingEmailOnly === null || existingEmailOnly === void 0 ? void 0 : existingEmailOnly[0]) { + return await loginSocialUser({ + user: existingEmailOnly[0], + social_platform, + invitation, + database: finalDbName, + additionalFields, + debug, + }); + } + else if (loginOnly) { + return { + success: false, + payload: null, + msg: "Social Account Creation Not allowed", + }; + } + const socialHashedPassword = encrypt({ + data: email, + }); + const data = { + social_login: "1", + verification_status: supEmail ? "0" : "1", + password: socialHashedPassword, + }; + Object.keys(payload).forEach((key) => { + data[key] = payload[key]; + }); + const newUser = await addDbEntry({ + dbContext: finalDbName ? "Dsql User" : undefined, + paradigm: finalDbName ? "Full Access" : undefined, + dbFullName: finalDbName, + tableName: "users", + duplicateColumnName: "email", + duplicateColumnValue: finalEmail, + data: Object.assign(Object.assign({}, data), { email: finalEmail }), + }); + if ((_a = newUser === null || newUser === void 0 ? void 0 : newUser.payload) === null || _a === void 0 ? void 0 : _a.insertId) { + if (!database) { + /** + * Add a Mariadb User for this User + */ + await addMariadbUser({ userId: newUser.payload.insertId }); + } + const newUserQueriedQuery = `SELECT * FROM ${dbAppend}users WHERE id='${newUser.payload.insertId}'`; + const newUserQueried = await varDatabaseDbHandler({ + database: finalDbName, + queryString: newUserQueriedQuery, + debug, + }); + if (!newUserQueried || !newUserQueried[0]) + return { + success: false, + payload: null, + msg: "User Insertion Failed!", + }; + if (supEmail && (database === null || database === void 0 ? void 0 : database.match(/^datasquirel$/))) { + /** + * Send email Verification + * + * @description Send verification email to newly created agent + */ + let generatedToken = encrypt({ + data: JSON.stringify({ + id: newUser.payload.insertId, + email: supEmail, + dateCode: Date.now(), + }), + }); + handleNodemailer({ + to: supEmail, + subject: "Verify Email Address", + text: "Please click the link to verify your email address", + html: fs + .readFileSync("./email/send-email-verification-link.html", "utf8") + .replace(/{{host}}/, process.env.DSQL_HOST || "") + .replace(/{{token}}/, generatedToken || ""), + }).then(() => { }); + } + const { STATIC_ROOT } = grabDirNames(); + if (!STATIC_ROOT) { + console.log("Static File ENV not Found!"); + return { + success: false, + payload: null, + msg: "Static File ENV not Found!", + }; + } + /** + * Create new user folder and file + * + * @description Create new user folder and file + */ + if (!database || (database === null || database === void 0 ? void 0 : database.match(/^datasquirel$/))) { + let newUserSchemaFolderPath = `${process.env.DSQL_USER_DB_SCHEMA_PATH}/user-${newUser.payload.insertId}`; + let newUserMediaFolderPath = path.join(STATIC_ROOT, `images/user-images/user-${newUser.payload.insertId}`); + fs.mkdirSync(newUserSchemaFolderPath); + fs.mkdirSync(newUserMediaFolderPath); + fs.writeFileSync(`${newUserSchemaFolderPath}/main.json`, JSON.stringify([]), "utf8"); + } + return await loginSocialUser({ + user: newUserQueried[0], + social_platform, + invitation, + database: finalDbName, + additionalFields, + debug, + }); + } + else { + console.log("Social User Failed to insert in 'handleSocialDb.ts' backend function =>", newUser); + return { + success: false, + payload: null, + msg: "Social User Failed to insert in 'handleSocialDb.ts' backend function", + }; + } + } + catch (error) { + console.log("ERROR in 'handleSocialDb.ts' backend function =>", error.message); + (_b = global.ERROR_CALLBACK) === null || _b === void 0 ? void 0 : _b.call(global, `Handle Social DB Error`, error); + return { + success: false, + payload: null, + msg: error.message, + }; + } } diff --git a/dist/package-shared/functions/api/social-login/loginSocialUser.js b/dist/package-shared/functions/api/social-login/loginSocialUser.js index e68f7cf..89b78bb 100644 --- a/dist/package-shared/functions/api/social-login/loginSocialUser.js +++ b/dist/package-shared/functions/api/social-login/loginSocialUser.js @@ -1,81 +1,64 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = loginSocialUser; -const addAdminUserOnLogin_1 = __importDefault(require("../../backend/addAdminUserOnLogin")); -const varDatabaseDbHandler_1 = __importDefault(require("../../backend/varDatabaseDbHandler")); +import addAdminUserOnLogin from "../../backend/addAdminUserOnLogin"; +import varDatabaseDbHandler from "../../backend/varDatabaseDbHandler"; /** * Function to login social user * ============================================================================== * @description This function logs in the user after 'handleSocialDb' function finishes * the user creation or confirmation process */ -function loginSocialUser(_a) { - return __awaiter(this, arguments, void 0, function* ({ user, social_platform, invitation, database, additionalFields, debug, }) { - const finalDbName = database ? database : "datasquirel"; - const dbAppend = database ? `\`${finalDbName}\`.` : ""; - const foundUserQuery = `SELECT * FROM ${dbAppend}\`users\` WHERE email=?`; - const foundUserValues = [user.email]; - const foundUser = yield (0, varDatabaseDbHandler_1.default)({ - database: finalDbName, - queryString: foundUserQuery, - queryValuesArray: foundUserValues, - debug, - }); - if (!(foundUser === null || foundUser === void 0 ? void 0 : foundUser[0])) - return { - success: false, - payload: null, - msg: "Couldn't find Social User.", - }; - let csrfKey = Math.random().toString(36).substring(2) + - "-" + - Math.random().toString(36).substring(2); - let userPayload = { - id: foundUser[0].id, - uuid: foundUser[0].uuid, - first_name: foundUser[0].first_name, - last_name: foundUser[0].last_name, - username: foundUser[0].username, - user_type: foundUser[0].user_type, - email: foundUser[0].email, - social_id: foundUser[0].social_id, - image: foundUser[0].image, - image_thumbnail: foundUser[0].image_thumbnail, - verification_status: foundUser[0].verification_status, - social_login: foundUser[0].social_login, - social_platform: foundUser[0].social_platform, - csrf_k: csrfKey, - logged_in_status: true, - date: Date.now(), - }; - if (additionalFields === null || additionalFields === void 0 ? void 0 : additionalFields[0]) { - additionalFields.forEach((key) => { - userPayload[key] = foundUser[0][key]; - }); - } - if (invitation && (!database || (database === null || database === void 0 ? void 0 : database.match(/^datasquirel$/)))) { - (0, addAdminUserOnLogin_1.default)({ - query: invitation, - user: userPayload, - }); - } - let result = { - success: true, - payload: userPayload, - csrf: csrfKey, - }; - return result; +export default async function loginSocialUser({ user, social_platform, invitation, database, additionalFields, debug, }) { + const finalDbName = database ? database : "datasquirel"; + const dbAppend = database ? `\`${finalDbName}\`.` : ""; + const foundUserQuery = `SELECT * FROM ${dbAppend}\`users\` WHERE email=?`; + const foundUserValues = [user.email]; + const foundUser = await varDatabaseDbHandler({ + database: finalDbName, + queryString: foundUserQuery, + queryValuesArray: foundUserValues, + debug, }); + if (!(foundUser === null || foundUser === void 0 ? void 0 : foundUser[0])) + return { + success: false, + payload: null, + msg: "Couldn't find Social User.", + }; + let csrfKey = Math.random().toString(36).substring(2) + + "-" + + Math.random().toString(36).substring(2); + let userPayload = { + id: foundUser[0].id, + uuid: foundUser[0].uuid, + first_name: foundUser[0].first_name, + last_name: foundUser[0].last_name, + username: foundUser[0].username, + user_type: foundUser[0].user_type, + email: foundUser[0].email, + social_id: foundUser[0].social_id, + image: foundUser[0].image, + image_thumbnail: foundUser[0].image_thumbnail, + verification_status: foundUser[0].verification_status, + social_login: foundUser[0].social_login, + social_platform: foundUser[0].social_platform, + csrf_k: csrfKey, + logged_in_status: true, + date: Date.now(), + }; + if (additionalFields === null || additionalFields === void 0 ? void 0 : additionalFields[0]) { + additionalFields.forEach((key) => { + userPayload[key] = foundUser[0][key]; + }); + } + if (invitation && (!database || (database === null || database === void 0 ? void 0 : database.match(/^datasquirel$/)))) { + addAdminUserOnLogin({ + query: invitation, + user: userPayload, + }); + } + let result = { + success: true, + payload: userPayload, + csrf: csrfKey, + }; + return result; } diff --git a/dist/package-shared/functions/api/users/api-create-user.d.ts b/dist/package-shared/functions/api/users/api-create-user.d.ts index 472c090..cead025 100644 --- a/dist/package-shared/functions/api/users/api-create-user.d.ts +++ b/dist/package-shared/functions/api/users/api-create-user.d.ts @@ -20,6 +20,6 @@ export default function apiCreateUser({ encryptionKey, payload, database, userId } | { success: boolean; msg: string; - sqlResult: import("../../../types").PostInsertReturn | null; + sqlResult: import("../../../types").APIResponseObject; payload: null; }>; diff --git a/dist/package-shared/functions/api/users/api-create-user.js b/dist/package-shared/functions/api/users/api-create-user.js index d6dbc70..b3cdf05 100644 --- a/dist/package-shared/functions/api/users/api-create-user.js +++ b/dist/package-shared/functions/api/users/api-create-user.js @@ -1,143 +1,140 @@ -"use strict"; -// @ts-check -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = apiCreateUser; -const addUsersTableToDb_1 = __importDefault(require("../../backend/addUsersTableToDb")); -const addDbEntry_1 = __importDefault(require("../../backend/db/addDbEntry")); -const updateUsersTableSchema_1 = __importDefault(require("../../backend/updateUsersTableSchema")); -const varDatabaseDbHandler_1 = __importDefault(require("../../backend/varDatabaseDbHandler")); -const hashPassword_1 = __importDefault(require("../../dsql/hashPassword")); -const validate_email_1 = __importDefault(require("../../email/fns/validate-email")); +import { findDbNameInSchemaDir } from "../../../shell/createDbFromSchema/grab-required-database-schemas"; +import addUsersTableToDb from "../../backend/addUsersTableToDb"; +import addDbEntry from "../../backend/db/addDbEntry"; +import updateUsersTableSchema from "../../backend/updateUsersTableSchema"; +import varDatabaseDbHandler from "../../backend/varDatabaseDbHandler"; +import hashPassword from "../../dsql/hashPassword"; +import validateEmail from "../../email/fns/validate-email"; /** * # API Create User */ -function apiCreateUser(_a) { - return __awaiter(this, arguments, void 0, function* ({ encryptionKey, payload, database, userId, }) { - const dbFullName = database; - const API_USER_ID = userId || process.env.DSQL_API_USER_ID; - const finalEncryptionKey = encryptionKey || process.env.DSQL_ENCRYPTION_PASSWORD; - if (!finalEncryptionKey) { - return { - success: false, - msg: "No encryption key provided", - payload: null, - }; - } - if (!(finalEncryptionKey === null || finalEncryptionKey === void 0 ? void 0 : finalEncryptionKey.match(/.{8,}/))) { - return { - success: false, - msg: "Encryption key must be at least 8 characters long", - payload: null, - }; - } - const hashedPassword = (0, hashPassword_1.default)({ - encryptionKey: finalEncryptionKey, - password: String(payload.password), +export default async function apiCreateUser({ encryptionKey, payload, database, userId, }) { + var _a; + const dbFullName = database; + const API_USER_ID = userId || process.env.DSQL_API_USER_ID; + const finalEncryptionKey = encryptionKey || process.env.DSQL_ENCRYPTION_PASSWORD; + if (!finalEncryptionKey) { + return { + success: false, + msg: "No encryption key provided", + payload: null, + }; + } + if (!(finalEncryptionKey === null || finalEncryptionKey === void 0 ? void 0 : finalEncryptionKey.match(/.{8,}/))) { + return { + success: false, + msg: "Encryption key must be at least 8 characters long", + payload: null, + }; + } + const targetDbSchema = findDbNameInSchemaDir({ + dbName: dbFullName, + userId, + }); + if (!(targetDbSchema === null || targetDbSchema === void 0 ? void 0 : targetDbSchema.id)) { + return { + success: false, + msg: "targetDbSchema not found", + payload: null, + }; + } + const hashedPassword = hashPassword({ + encryptionKey: finalEncryptionKey, + password: String(payload.password), + }); + payload.password = hashedPassword; + const fieldsQuery = `SHOW COLUMNS FROM ${dbFullName}.users`; + let fields = await varDatabaseDbHandler({ + queryString: fieldsQuery, + database: dbFullName, + }); + if (!(fields === null || fields === void 0 ? void 0 : fields[0])) { + const newTable = await addUsersTableToDb({ + userId: Number(API_USER_ID), + database: dbFullName, + payload: payload, + dbId: targetDbSchema.id, }); - payload.password = hashedPassword; - const fieldsQuery = `SHOW COLUMNS FROM ${dbFullName}.users`; - let fields = yield (0, varDatabaseDbHandler_1.default)({ + fields = await varDatabaseDbHandler({ queryString: fieldsQuery, database: dbFullName, }); - if (!(fields === null || fields === void 0 ? void 0 : fields[0])) { - const newTable = yield (0, addUsersTableToDb_1.default)({ + } + if (!(fields === null || fields === void 0 ? void 0 : fields[0])) { + return { + success: false, + msg: "Could not create users table", + }; + } + const fieldsTitles = fields.map((fieldObject) => fieldObject.Field); + let invalidField = null; + for (let i = 0; i < Object.keys(payload).length; i++) { + const key = Object.keys(payload)[i]; + if (!fieldsTitles.includes(key)) { + await updateUsersTableSchema({ userId: Number(API_USER_ID), database: dbFullName, - payload: payload, - }); - fields = yield (0, varDatabaseDbHandler_1.default)({ - queryString: fieldsQuery, - database: dbFullName, + newPayload: { + [key]: payload[key], + }, + dbId: targetDbSchema.id, }); } - if (!(fields === null || fields === void 0 ? void 0 : fields[0])) { - return { - success: false, - msg: "Could not create users table", - }; - } - const fieldsTitles = fields.map((fieldObject) => fieldObject.Field); - let invalidField = null; - for (let i = 0; i < Object.keys(payload).length; i++) { - const key = Object.keys(payload)[i]; - if (!fieldsTitles.includes(key)) { - yield (0, updateUsersTableSchema_1.default)({ - userId: Number(API_USER_ID), - database: dbFullName, - newPayload: { - [key]: payload[key], - }, - }); - } - } - if (invalidField) { - return { - success: false, - msg: `${invalidField} is not a valid field!`, - }; - } - const existingUserQuery = `SELECT * FROM ${dbFullName}.users WHERE email = ?${payload.username ? " OR username = ?" : ""}`; - const existingUserValues = payload.username - ? [payload.email, payload.username] - : [payload.email]; - const existingUser = yield (0, varDatabaseDbHandler_1.default)({ - queryString: existingUserQuery, - queryValuesArray: existingUserValues, + } + if (invalidField) { + return { + success: false, + msg: `${invalidField} is not a valid field!`, + }; + } + const existingUserQuery = `SELECT * FROM ${dbFullName}.users WHERE email = ?${payload.username ? " OR username = ?" : ""}`; + const existingUserValues = payload.username + ? [payload.email, payload.username] + : [payload.email]; + const existingUser = await varDatabaseDbHandler({ + queryString: existingUserQuery, + queryValuesArray: existingUserValues, + database: dbFullName, + }); + if (existingUser === null || existingUser === void 0 ? void 0 : existingUser[0]) { + return { + success: false, + msg: "User Already Exists", + payload: null, + }; + } + const isEmailValid = await validateEmail({ email: payload.email }); + if (!isEmailValid.isValid) { + return { + success: false, + msg: isEmailValid.message, + payload: null, + }; + } + const addUser = await addDbEntry({ + dbFullName: dbFullName, + tableName: "users", + data: Object.assign(Object.assign({}, payload), { image: process.env.DSQL_DEFAULT_USER_IMAGE || + "/images/user-preset.png", image_thumbnail: process.env.DSQL_DEFAULT_USER_IMAGE || + "/images/user-preset-thumbnail.png" }), + }); + if ((_a = addUser === null || addUser === void 0 ? void 0 : addUser.payload) === null || _a === void 0 ? void 0 : _a.insertId) { + const newlyAddedUserQuery = `SELECT id,uuid,first_name,last_name,email,username,image,image_thumbnail,verification_status FROM ${dbFullName}.users WHERE id='${addUser.payload.insertId}'`; + const newlyAddedUser = await varDatabaseDbHandler({ + queryString: newlyAddedUserQuery, database: dbFullName, }); - if (existingUser === null || existingUser === void 0 ? void 0 : existingUser[0]) { - return { - success: false, - msg: "User Already Exists", - payload: null, - }; - } - const isEmailValid = yield (0, validate_email_1.default)({ email: payload.email }); - if (!isEmailValid.isValid) { - return { - success: false, - msg: isEmailValid.message, - payload: null, - }; - } - const addUser = yield (0, addDbEntry_1.default)({ - dbFullName: dbFullName, - tableName: "users", - data: Object.assign(Object.assign({}, payload), { image: process.env.DSQL_DEFAULT_USER_IMAGE || - "/images/user-preset.png", image_thumbnail: process.env.DSQL_DEFAULT_USER_IMAGE || - "/images/user-preset-thumbnail.png" }), - }); - if (addUser === null || addUser === void 0 ? void 0 : addUser.insertId) { - const newlyAddedUserQuery = `SELECT id,uuid,first_name,last_name,email,username,image,image_thumbnail,verification_status FROM ${dbFullName}.users WHERE id='${addUser.insertId}'`; - const newlyAddedUser = yield (0, varDatabaseDbHandler_1.default)({ - queryString: newlyAddedUserQuery, - database: dbFullName, - }); - return { - success: true, - payload: newlyAddedUser[0], - }; - } - else { - return { - success: false, - msg: "Could not create user", - sqlResult: addUser, - payload: null, - }; - } - }); + return { + success: true, + payload: newlyAddedUser[0], + }; + } + else { + return { + success: false, + msg: "Could not create user", + sqlResult: addUser, + payload: null, + }; + } } diff --git a/dist/package-shared/functions/api/users/api-delete-user.js b/dist/package-shared/functions/api/users/api-delete-user.js index 5dd5095..4ff018a 100644 --- a/dist/package-shared/functions/api/users/api-delete-user.js +++ b/dist/package-shared/functions/api/users/api-delete-user.js @@ -1,48 +1,31 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = apiDeleteUser; -const deleteDbEntry_1 = __importDefault(require("../../backend/db/deleteDbEntry")); -const varDatabaseDbHandler_1 = __importDefault(require("../../backend/varDatabaseDbHandler")); +import deleteDbEntry from "../../backend/db/deleteDbEntry"; +import varDatabaseDbHandler from "../../backend/varDatabaseDbHandler"; /** * # Update API User Function */ -function apiDeleteUser(_a) { - return __awaiter(this, arguments, void 0, function* ({ dbFullName, deletedUserId, }) { - const existingUserQuery = `SELECT * FROM ${dbFullName}.users WHERE id = ?`; - const existingUserValues = [deletedUserId]; - const existingUser = yield (0, varDatabaseDbHandler_1.default)({ - queryString: existingUserQuery, - queryValuesArray: existingUserValues, - database: dbFullName, - }); - if (!(existingUser === null || existingUser === void 0 ? void 0 : existingUser[0])) { - return { - success: false, - msg: "User not found", - }; - } - const deleteUser = yield (0, deleteDbEntry_1.default)({ - dbContext: "Dsql User", - dbFullName, - tableName: "users", - identifierColumnName: "id", - identifierValue: deletedUserId, - }); - return { - success: true, - result: deleteUser, - }; +export default async function apiDeleteUser({ dbFullName, deletedUserId, }) { + const existingUserQuery = `SELECT * FROM ${dbFullName}.users WHERE id = ?`; + const existingUserValues = [deletedUserId]; + const existingUser = await varDatabaseDbHandler({ + queryString: existingUserQuery, + queryValuesArray: existingUserValues, + database: dbFullName, }); + if (!(existingUser === null || existingUser === void 0 ? void 0 : existingUser[0])) { + return { + success: false, + msg: "User not found", + }; + } + const deleteUser = await deleteDbEntry({ + dbContext: "Dsql User", + dbFullName, + tableName: "users", + identifierColumnName: "id", + identifierValue: deletedUserId, + }); + return { + success: true, + result: deleteUser, + }; } diff --git a/dist/package-shared/functions/api/users/api-get-user.js b/dist/package-shared/functions/api/users/api-get-user.js index ab21b65..84c57e3 100644 --- a/dist/package-shared/functions/api/users/api-get-user.js +++ b/dist/package-shared/functions/api/users/api-get-user.js @@ -1,41 +1,24 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = apiGetUser; -const varDatabaseDbHandler_1 = __importDefault(require("../../backend/varDatabaseDbHandler")); +import varDatabaseDbHandler from "../../backend/varDatabaseDbHandler"; /** * # API Get User */ -function apiGetUser(_a) { - return __awaiter(this, arguments, void 0, function* ({ fields, dbFullName, userId, }) { - const finalDbName = dbFullName.replace(/[^a-z0-9_]/g, ""); - const query = `SELECT ${fields.join(",")} FROM ${finalDbName}.users WHERE id=?`; - const API_USER_ID = userId || process.env.DSQL_API_USER_ID; - let foundUser = yield (0, varDatabaseDbHandler_1.default)({ - queryString: query, - queryValuesArray: [API_USER_ID], - database: finalDbName, - }); - if (!foundUser || !foundUser[0]) { - return { - success: false, - payload: null, - }; - } - return { - success: true, - payload: foundUser[0], - }; +export default async function apiGetUser({ fields, dbFullName, userId, }) { + const finalDbName = dbFullName.replace(/[^a-z0-9_]/g, ""); + const query = `SELECT ${fields.join(",")} FROM ${finalDbName}.users WHERE id=?`; + const API_USER_ID = userId || process.env.DSQL_API_USER_ID; + let foundUser = await varDatabaseDbHandler({ + queryString: query, + queryValuesArray: [API_USER_ID], + database: finalDbName, }); + if (!foundUser || !foundUser[0]) { + return { + success: false, + payload: null, + }; + } + return { + success: true, + payload: foundUser[0], + }; } diff --git a/dist/package-shared/functions/api/users/api-login.js b/dist/package-shared/functions/api/users/api-login.js index 5f97c56..5c87a2f 100644 --- a/dist/package-shared/functions/api/users/api-login.js +++ b/dist/package-shared/functions/api/users/api-login.js @@ -1,162 +1,154 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = apiLoginUser; -const grab_db_full_name_1 = __importDefault(require("../../../utils/grab-db-full-name")); -const varDatabaseDbHandler_1 = __importDefault(require("../../backend/varDatabaseDbHandler")); -const hashPassword_1 = __importDefault(require("../../dsql/hashPassword")); +import grabDbFullName from "../../../utils/grab-db-full-name"; +import varDatabaseDbHandler from "../../backend/varDatabaseDbHandler"; +import hashPassword from "../../dsql/hashPassword"; /** * # API Login */ -function apiLoginUser(_a) { - return __awaiter(this, arguments, void 0, function* ({ encryptionKey, email, username, password, database, additionalFields, email_login, email_login_code, email_login_field, skipPassword, social, dbUserId, debug, }) { - const dbFullName = (0, grab_db_full_name_1.default)({ dbName: database, userId: dbUserId }); - const dbAppend = global.DSQL_USE_LOCAL ? "" : `${dbFullName}.`; - /** - * Check input validity - * - * @description Check input validity - */ - if ((email === null || email === void 0 ? void 0 : email.match(/ /)) || - (username && (username === null || username === void 0 ? void 0 : username.match(/ /))) || - (password && (password === null || password === void 0 ? void 0 : password.match(/ /)))) { - return { - success: false, - msg: "Invalid Email/Password format", - }; - } - /** - * Password hash - * - * @description Password hash - */ - let hashedPassword = password - ? (0, hashPassword_1.default)({ - encryptionKey: encryptionKey, - password: password, - }) - : null; +export default async function apiLoginUser({ encryptionKey, email, username, password, database, additionalFields, email_login, email_login_code, email_login_field, skipPassword, social, dbUserId, debug, }) { + const dbFullName = grabDbFullName({ dbName: database, userId: dbUserId }); + if (!dbFullName) { + console.log(`Database Full Name couldn't be grabbed`); + return { + success: false, + msg: `Database Full Name couldn't be grabbed`, + }; + } + const dbAppend = global.DSQL_USE_LOCAL ? "" : `${dbFullName}.`; + /** + * Check input validity + * + * @description Check input validity + */ + if ((email === null || email === void 0 ? void 0 : email.match(/ /)) || + (username && (username === null || username === void 0 ? void 0 : username.match(/ /))) || + (password && (password === null || password === void 0 ? void 0 : password.match(/ /)))) { + return { + success: false, + msg: "Invalid Email/Password format", + }; + } + /** + * Password hash + * + * @description Password hash + */ + let hashedPassword = password + ? hashPassword({ + encryptionKey: encryptionKey, + password: password, + }) + : null; + if (debug) { + console.log("apiLoginUser:database:", dbFullName); + console.log("apiLoginUser:Finding User ..."); + } + let foundUser = await varDatabaseDbHandler({ + queryString: `SELECT * FROM ${dbAppend}users WHERE email = ? OR username = ?`, + queryValuesArray: [email, username], + database: dbFullName, + debug, + }); + if (debug) { + console.log("apiLoginUser:foundUser:", foundUser); + } + if ((!foundUser || !foundUser[0]) && !social) + return { + success: false, + payload: null, + msg: "No user found", + }; + let isPasswordCorrect = false; + if (debug) { + console.log("apiLoginUser:isPasswordCorrect:", isPasswordCorrect); + } + if ((foundUser === null || foundUser === void 0 ? void 0 : foundUser[0]) && !email_login && skipPassword) { + isPasswordCorrect = true; + } + else if ((foundUser === null || foundUser === void 0 ? void 0 : foundUser[0]) && !email_login) { if (debug) { - console.log("apiLoginUser:database:", dbFullName); - console.log("apiLoginUser:Finding User ..."); + console.log("apiLoginUser:hashedPassword:", hashedPassword); + console.log("apiLoginUser:foundUser[0].password:", foundUser[0].password); } - let foundUser = yield (0, varDatabaseDbHandler_1.default)({ - queryString: `SELECT * FROM ${dbAppend}users WHERE email = ? OR username = ?`, + isPasswordCorrect = hashedPassword === foundUser[0].password; + } + else if (foundUser && + foundUser[0] && + email_login && + email_login_code && + email_login_field) { + const tempCode = foundUser[0][email_login_field]; + if (debug) { + console.log("apiLoginUser:tempCode:", tempCode); + } + if (!tempCode) + throw new Error("No code Found!"); + const tempCodeArray = tempCode.split("-"); + const [code, codeDate] = tempCodeArray; + const millisecond15mins = 1000 * 60 * 15; + if (Date.now() - Number(codeDate) > millisecond15mins) { + throw new Error("Code Expired"); + } + isPasswordCorrect = code === email_login_code; + } + let socialUserValid = false; + if (!isPasswordCorrect && !socialUserValid) { + return { + success: false, + msg: "Wrong password, no social login validity", + payload: null, + }; + } + if (debug) { + console.log("apiLoginUser:isPasswordCorrect:", isPasswordCorrect); + console.log("apiLoginUser:email_login:", email_login); + } + if (isPasswordCorrect && email_login) { + const resetTempCode = await varDatabaseDbHandler({ + queryString: `UPDATE ${dbAppend}users SET ${email_login_field} = '' WHERE email = ? OR username = ?`, queryValuesArray: [email, username], database: dbFullName, - debug, }); - if (debug) { - console.log("apiLoginUser:foundUser:", foundUser); - } - if ((!foundUser || !foundUser[0]) && !social) - return { - success: false, - payload: null, - msg: "No user found", - }; - let isPasswordCorrect = false; - if (debug) { - console.log("apiLoginUser:isPasswordCorrect:", isPasswordCorrect); - } - if ((foundUser === null || foundUser === void 0 ? void 0 : foundUser[0]) && !email_login && skipPassword) { - isPasswordCorrect = true; - } - else if ((foundUser === null || foundUser === void 0 ? void 0 : foundUser[0]) && !email_login) { - if (debug) { - console.log("apiLoginUser:hashedPassword:", hashedPassword); - console.log("apiLoginUser:foundUser[0].password:", foundUser[0].password); - } - isPasswordCorrect = hashedPassword === foundUser[0].password; - } - else if (foundUser && - foundUser[0] && - email_login && - email_login_code && - email_login_field) { - const tempCode = foundUser[0][email_login_field]; - if (debug) { - console.log("apiLoginUser:tempCode:", tempCode); - } - if (!tempCode) - throw new Error("No code Found!"); - const tempCodeArray = tempCode.split("-"); - const [code, codeDate] = tempCodeArray; - const millisecond15mins = 1000 * 60 * 15; - if (Date.now() - Number(codeDate) > millisecond15mins) { - throw new Error("Code Expired"); - } - isPasswordCorrect = code === email_login_code; - } - let socialUserValid = false; - if (!isPasswordCorrect && !socialUserValid) { - return { - success: false, - msg: "Wrong password, no social login validity", - payload: null, - }; - } - if (debug) { - console.log("apiLoginUser:isPasswordCorrect:", isPasswordCorrect); - console.log("apiLoginUser:email_login:", email_login); - } - if (isPasswordCorrect && email_login) { - const resetTempCode = yield (0, varDatabaseDbHandler_1.default)({ - queryString: `UPDATE ${dbAppend}users SET ${email_login_field} = '' WHERE email = ? OR username = ?`, - queryValuesArray: [email, username], - database: dbFullName, - }); - } - let csrfKey = Math.random().toString(36).substring(2) + - "-" + - Math.random().toString(36).substring(2); - let userPayload = { - id: foundUser[0].id, - first_name: foundUser[0].first_name, - last_name: foundUser[0].last_name, - username: foundUser[0].username, - email: foundUser[0].email, - phone: foundUser[0].phone, - social_id: foundUser[0].social_id, - image: foundUser[0].image, - image_thumbnail: foundUser[0].image_thumbnail, - verification_status: foundUser[0].verification_status, - social_login: foundUser[0].social_login, - social_platform: foundUser[0].social_platform, - csrf_k: csrfKey, - more_data: foundUser[0].more_user_data, - logged_in_status: true, - date: Date.now(), - }; - if (debug) { - console.log("apiLoginUser:userPayload:", userPayload); - console.log("apiLoginUser:Sending Response Object ..."); - } - const resposeObject = { - success: true, - msg: "Login Successful", - payload: userPayload, - userId: foundUser[0].id, - csrf: csrfKey, - }; - if (additionalFields && - Array.isArray(additionalFields) && - additionalFields.length > 0) { - additionalFields.forEach((key) => { - userPayload[key] = foundUser[0][key]; - }); - } - return resposeObject; - }); + } + let csrfKey = Math.random().toString(36).substring(2) + + "-" + + Math.random().toString(36).substring(2); + let userPayload = { + id: foundUser[0].id, + uid: foundUser[0].uid, + uuid: foundUser[0].uuid, + first_name: foundUser[0].first_name, + last_name: foundUser[0].last_name, + username: foundUser[0].username, + email: foundUser[0].email, + phone: foundUser[0].phone, + social_id: foundUser[0].social_id, + image: foundUser[0].image, + image_thumbnail: foundUser[0].image_thumbnail, + verification_status: foundUser[0].verification_status, + social_login: foundUser[0].social_login, + social_platform: foundUser[0].social_platform, + csrf_k: csrfKey, + more_data: foundUser[0].more_user_data, + logged_in_status: true, + date: Date.now(), + }; + if (debug) { + console.log("apiLoginUser:userPayload:", userPayload); + console.log("apiLoginUser:Sending Response Object ..."); + } + const resposeObject = { + success: true, + msg: "Login Successful", + payload: userPayload, + userId: foundUser[0].id, + csrf: csrfKey, + }; + if (additionalFields && + Array.isArray(additionalFields) && + additionalFields.length > 0) { + additionalFields.forEach((key) => { + userPayload[key] = foundUser[0][key]; + }); + } + return resposeObject; } diff --git a/dist/package-shared/functions/api/users/api-reauth-user.js b/dist/package-shared/functions/api/users/api-reauth-user.js index e26d31e..3ee8f79 100644 --- a/dist/package-shared/functions/api/users/api-reauth-user.js +++ b/dist/package-shared/functions/api/users/api-reauth-user.js @@ -1,75 +1,58 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = apiReauthUser; -const varDatabaseDbHandler_1 = __importDefault(require("../../backend/varDatabaseDbHandler")); +import varDatabaseDbHandler from "../../backend/varDatabaseDbHandler"; /** * # Re-authenticate API user */ -function apiReauthUser(_a) { - return __awaiter(this, arguments, void 0, function* ({ existingUser, database, additionalFields, }) { - const dbAppend = global.DSQL_USE_LOCAL - ? "" - : database - ? `${database}.` - : ""; - let foundUser = (existingUser === null || existingUser === void 0 ? void 0 : existingUser.id) && existingUser.id.toString().match(/./) - ? yield (0, varDatabaseDbHandler_1.default)({ - queryString: `SELECT * FROM ${dbAppend}users WHERE id=?`, - queryValuesArray: [existingUser.id.toString()], - database, - }) - : null; - if (!foundUser || !foundUser[0]) - return { - success: false, - payload: null, - msg: "No user found", - }; - let csrfKey = Math.random().toString(36).substring(2) + - "-" + - Math.random().toString(36).substring(2); - let userPayload = { - id: foundUser[0].id, - first_name: foundUser[0].first_name, - last_name: foundUser[0].last_name, - username: foundUser[0].username, - email: foundUser[0].email, - phone: foundUser[0].phone, - social_id: foundUser[0].social_id, - image: foundUser[0].image, - image_thumbnail: foundUser[0].image_thumbnail, - verification_status: foundUser[0].verification_status, - social_login: foundUser[0].social_login, - social_platform: foundUser[0].social_platform, - csrf_k: csrfKey, - more_data: foundUser[0].more_user_data, - logged_in_status: true, - date: Date.now(), - }; - if (additionalFields && - Array.isArray(additionalFields) && - additionalFields.length > 0) { - additionalFields.forEach((key) => { - userPayload[key] = foundUser[0][key]; - }); - } +export default async function apiReauthUser({ existingUser, database, additionalFields, }) { + const dbAppend = global.DSQL_USE_LOCAL + ? "" + : database + ? `${database}.` + : ""; + let foundUser = (existingUser === null || existingUser === void 0 ? void 0 : existingUser.id) && existingUser.id.toString().match(/./) + ? await varDatabaseDbHandler({ + queryString: `SELECT * FROM ${dbAppend}users WHERE id=?`, + queryValuesArray: [existingUser.id.toString()], + database, + }) + : null; + if (!foundUser || !foundUser[0]) return { - success: true, - msg: "Login Successful", - payload: userPayload, - csrf: csrfKey, + success: false, + payload: null, + msg: "No user found", }; - }); + let csrfKey = Math.random().toString(36).substring(2) + + "-" + + Math.random().toString(36).substring(2); + let userPayload = { + id: foundUser[0].id, + first_name: foundUser[0].first_name, + last_name: foundUser[0].last_name, + username: foundUser[0].username, + email: foundUser[0].email, + phone: foundUser[0].phone, + social_id: foundUser[0].social_id, + image: foundUser[0].image, + image_thumbnail: foundUser[0].image_thumbnail, + verification_status: foundUser[0].verification_status, + social_login: foundUser[0].social_login, + social_platform: foundUser[0].social_platform, + csrf_k: csrfKey, + more_data: foundUser[0].more_user_data, + logged_in_status: true, + date: Date.now(), + }; + if (additionalFields && + Array.isArray(additionalFields) && + additionalFields.length > 0) { + additionalFields.forEach((key) => { + userPayload[key] = foundUser[0][key]; + }); + } + return { + success: true, + msg: "Login Successful", + payload: userPayload, + csrf: csrfKey, + }; } diff --git a/dist/package-shared/functions/api/users/api-send-email-code.js b/dist/package-shared/functions/api/users/api-send-email-code.js index 9de591f..57e0dfd 100644 --- a/dist/package-shared/functions/api/users/api-send-email-code.js +++ b/dist/package-shared/functions/api/users/api-send-email-code.js @@ -1,132 +1,115 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = apiSendEmailCode; -const varDatabaseDbHandler_1 = __importDefault(require("../../backend/varDatabaseDbHandler")); -const nodemailer_1 = __importDefault(require("nodemailer")); -const get_auth_cookie_names_1 = __importDefault(require("../../backend/cookies/get-auth-cookie-names")); -const encrypt_1 = __importDefault(require("../../dsql/encrypt")); -const serialize_cookies_1 = __importDefault(require("../../../utils/serialize-cookies")); +import varDatabaseDbHandler from "../../backend/varDatabaseDbHandler"; +import nodemailer from "nodemailer"; +import getAuthCookieNames from "../../backend/cookies/get-auth-cookie-names"; +import encrypt from "../../dsql/encrypt"; +import serializeCookies from "../../../utils/serialize-cookies"; /** * # Send Email Login Code */ -function apiSendEmailCode(_a) { - return __awaiter(this, arguments, void 0, function* ({ email, database, email_login_field, mail_domain, mail_port, sender, mail_username, mail_password, html, response, extraCookies, }) { - if (email === null || email === void 0 ? void 0 : email.match(/ /)) { - return { - success: false, - msg: "Invalid Email/Password format", - }; +export default async function apiSendEmailCode({ email, database, email_login_field, mail_domain, mail_port, sender, mail_username, mail_password, html, response, extraCookies, }) { + if (email === null || email === void 0 ? void 0 : email.match(/ /)) { + return { + success: false, + msg: "Invalid Email/Password format", + }; + } + const createdAt = Date.now(); + const foundUserQuery = `SELECT * FROM ${database}.users WHERE email = ?`; + const foundUserValues = [email]; + let foundUser = await varDatabaseDbHandler({ + queryString: foundUserQuery, + queryValuesArray: foundUserValues, + database, + }); + //////////////////////////////////////// + //////////////////////////////////////// + //////////////////////////////////////// + if (!foundUser || !foundUser[0]) { + return { + success: false, + msg: "No user found", + }; + } + function generateCode() { + const chars = "ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"; + let code = ""; + for (let i = 0; i < 8; i++) { + code += chars[Math.floor(Math.random() * chars.length)]; } - const createdAt = Date.now(); - const foundUserQuery = `SELECT * FROM ${database}.users WHERE email = ?`; - const foundUserValues = [email]; - let foundUser = yield (0, varDatabaseDbHandler_1.default)({ - queryString: foundUserQuery, - queryValuesArray: foundUserValues, + return code; + } + if ((foundUser === null || foundUser === void 0 ? void 0 : foundUser[0]) && email_login_field) { + const tempCode = generateCode(); + let transporter = nodemailer.createTransport({ + host: mail_domain || process.env.DSQL_MAIL_HOST, + port: mail_port + ? mail_port + : process.env.DSQL_MAIL_PORT + ? Number(process.env.DSQL_MAIL_PORT) + : 465, + secure: true, + auth: { + user: mail_username || process.env.DSQL_MAIL_EMAIL, + pass: mail_password || process.env.DSQL_MAIL_PASSWORD, + }, + }); + let mailObject = {}; + mailObject["from"] = `"Datasquirel SSO" <${sender || "support@datasquirel.com"}>`; + mailObject["sender"] = sender || "support@datasquirel.com"; + mailObject["to"] = email; + mailObject["subject"] = "One Time Login Code"; + mailObject["html"] = html.replace(/{{code}}/, tempCode); + const info = await transporter.sendMail(mailObject); + if (!(info === null || info === void 0 ? void 0 : info.accepted)) + throw new Error("Mail not Sent!"); + const setTempCodeQuery = `UPDATE ${database}.users SET ${email_login_field} = ? WHERE email = ?`; + const setTempCodeValues = [tempCode + `-${createdAt}`, email]; + let setTempCode = await varDatabaseDbHandler({ + queryString: setTempCodeQuery, + queryValuesArray: setTempCodeValues, database, }); - //////////////////////////////////////// - //////////////////////////////////////// - //////////////////////////////////////// - if (!foundUser || !foundUser[0]) { - return { - success: false, - msg: "No user found", - }; - } - function generateCode() { - const chars = "ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"; - let code = ""; - for (let i = 0; i < 8; i++) { - code += chars[Math.floor(Math.random() * chars.length)]; + /** @type {import("../../../types").SendOneTimeCodeEmailResponse} */ + const resObject = { + success: true, + code: tempCode, + email: email, + createdAt, + msg: "Success", + }; + if (response) { + const cookieKeyNames = getAuthCookieNames(); + const oneTimeCodeCookieName = cookieKeyNames.oneTimeCodeName; + const encryptedPayload = encrypt({ + data: JSON.stringify(resObject), + }); + if (!encryptedPayload) { + throw new Error("apiSendEmailCode Error: Failed to encrypt payload"); } - return code; - } - if ((foundUser === null || foundUser === void 0 ? void 0 : foundUser[0]) && email_login_field) { - const tempCode = generateCode(); - let transporter = nodemailer_1.default.createTransport({ - host: mail_domain || process.env.DSQL_MAIL_HOST, - port: mail_port - ? mail_port - : process.env.DSQL_MAIL_PORT - ? Number(process.env.DSQL_MAIL_PORT) - : 465, + /** @type {import("../../../../package-shared/types").CookieObject} */ + const oneTimeCookieObject = { + name: oneTimeCodeCookieName, + value: encryptedPayload, + sameSite: "Strict", + path: "/", + httpOnly: true, secure: true, - auth: { - user: mail_username || process.env.DSQL_MAIL_EMAIL, - pass: mail_password || process.env.DSQL_MAIL_PASSWORD, - }, - }); - let mailObject = {}; - mailObject["from"] = `"Datasquirel SSO" <${sender || "support@datasquirel.com"}>`; - mailObject["sender"] = sender || "support@datasquirel.com"; - mailObject["to"] = email; - mailObject["subject"] = "One Time Login Code"; - mailObject["html"] = html.replace(/{{code}}/, tempCode); - const info = yield transporter.sendMail(mailObject); - if (!(info === null || info === void 0 ? void 0 : info.accepted)) - throw new Error("Mail not Sent!"); - const setTempCodeQuery = `UPDATE ${database}.users SET ${email_login_field} = ? WHERE email = ?`; - const setTempCodeValues = [tempCode + `-${createdAt}`, email]; - let setTempCode = yield (0, varDatabaseDbHandler_1.default)({ - queryString: setTempCodeQuery, - queryValuesArray: setTempCodeValues, - database, - }); - /** @type {import("../../../types").SendOneTimeCodeEmailResponse} */ - const resObject = { - success: true, - code: tempCode, - email: email, - createdAt, - msg: "Success", }; - if (response) { - const cookieKeyNames = (0, get_auth_cookie_names_1.default)(); - const oneTimeCodeCookieName = cookieKeyNames.oneTimeCodeName; - const encryptedPayload = (0, encrypt_1.default)({ - data: JSON.stringify(resObject), - }); - if (!encryptedPayload) { - throw new Error("apiSendEmailCode Error: Failed to encrypt payload"); - } - /** @type {import("../../../../package-shared/types").CookieObject} */ - const oneTimeCookieObject = { - name: oneTimeCodeCookieName, - value: encryptedPayload, - sameSite: "Strict", - path: "/", - httpOnly: true, - secure: true, - }; - /** @type {import("../../../../package-shared/types").CookieObject[]} */ - const cookiesObjectArray = extraCookies - ? [...extraCookies, oneTimeCookieObject] - : [oneTimeCookieObject]; - const serializedCookies = (0, serialize_cookies_1.default)({ - cookies: cookiesObjectArray, - }); - response.setHeader("Set-Cookie", serializedCookies); - } - return resObject; + /** @type {import("../../../../package-shared/types").CookieObject[]} */ + const cookiesObjectArray = extraCookies + ? [...extraCookies, oneTimeCookieObject] + : [oneTimeCookieObject]; + const serializedCookies = serializeCookies({ + cookies: cookiesObjectArray, + }); + response.setHeader("Set-Cookie", serializedCookies); } - else { - return { - success: false, - msg: "Invalid Email/Password format", - }; - } - }); + return resObject; + } + else { + return { + success: false, + msg: "Invalid Email/Password format", + }; + } } diff --git a/dist/package-shared/functions/api/users/api-update-user.js b/dist/package-shared/functions/api/users/api-update-user.js index 1960a57..a0cb570 100644 --- a/dist/package-shared/functions/api/users/api-update-user.js +++ b/dist/package-shared/functions/api/users/api-update-user.js @@ -1,81 +1,64 @@ -"use strict"; // @ts-check -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = apiUpdateUser; -const updateDbEntry_1 = __importDefault(require("../../backend/db/updateDbEntry")); -const encrypt_1 = __importDefault(require("../../dsql/encrypt")); -const hashPassword_1 = __importDefault(require("../../dsql/hashPassword")); -const varDatabaseDbHandler_1 = __importDefault(require("../../backend/varDatabaseDbHandler")); +import updateDbEntry from "../../backend/db/updateDbEntry"; +import encrypt from "../../dsql/encrypt"; +import hashPassword from "../../dsql/hashPassword"; +import varDatabaseDbHandler from "../../backend/varDatabaseDbHandler"; /** * # Update API User Function */ -function apiUpdateUser(_a) { - return __awaiter(this, arguments, void 0, function* ({ payload, dbFullName, updatedUserId, dbSchema, }) { - const existingUserQuery = `SELECT * FROM ${dbFullName}.users WHERE id = ?`; - const existingUserValues = [updatedUserId]; - const existingUser = yield (0, varDatabaseDbHandler_1.default)({ - queryString: existingUserQuery, - queryValuesArray: existingUserValues, - database: dbFullName, - }); - if (!(existingUser === null || existingUser === void 0 ? void 0 : existingUser[0])) { - return { - success: false, - msg: "User not found", - }; - } - const data = (() => { - const reqBodyKeys = Object.keys(payload); - const targetTableSchema = (() => { - var _a; - try { - const targetDatabaseSchema = (_a = dbSchema === null || dbSchema === void 0 ? void 0 : dbSchema.tables) === null || _a === void 0 ? void 0 : _a.find((tbl) => tbl.tableName == "users"); - return targetDatabaseSchema; - } - catch (error) { - return undefined; - } - })(); - /** @type {any} */ - const finalData = {}; - reqBodyKeys.forEach((key) => { - var _a; - const targetFieldSchema = (_a = targetTableSchema === null || targetTableSchema === void 0 ? void 0 : targetTableSchema.fields) === null || _a === void 0 ? void 0 : _a.find((field) => field.fieldName == key); - if (key === null || key === void 0 ? void 0 : key.match(/^date_|^id$|^uuid$/)) - return; - let value = payload[key]; - if (targetFieldSchema === null || targetFieldSchema === void 0 ? void 0 : targetFieldSchema.encrypted) { - value = (0, encrypt_1.default)({ data: value }); - } - finalData[key] = value; - }); - if (finalData.password && typeof finalData.password == "string") { - finalData.password = (0, hashPassword_1.default)({ password: finalData.password }); - } - return finalData; - })(); - const updateUser = yield (0, updateDbEntry_1.default)({ - dbFullName, - tableName: "users", - identifierColumnName: "id", - identifierValue: updatedUserId, - data: data, - }); - return { - success: true, - payload: updateUser, - }; +export default async function apiUpdateUser({ payload, dbFullName, updatedUserId, dbSchema, }) { + const existingUserQuery = `SELECT * FROM ${dbFullName}.users WHERE id = ?`; + const existingUserValues = [updatedUserId]; + const existingUser = await varDatabaseDbHandler({ + queryString: existingUserQuery, + queryValuesArray: existingUserValues, + database: dbFullName, }); + if (!(existingUser === null || existingUser === void 0 ? void 0 : existingUser[0])) { + return { + success: false, + msg: "User not found", + }; + } + const data = (() => { + const reqBodyKeys = Object.keys(payload); + const targetTableSchema = (() => { + var _a; + try { + const targetDatabaseSchema = (_a = dbSchema === null || dbSchema === void 0 ? void 0 : dbSchema.tables) === null || _a === void 0 ? void 0 : _a.find((tbl) => tbl.tableName == "users"); + return targetDatabaseSchema; + } + catch (error) { + return undefined; + } + })(); + /** @type {any} */ + const finalData = {}; + reqBodyKeys.forEach((key) => { + var _a; + const targetFieldSchema = (_a = targetTableSchema === null || targetTableSchema === void 0 ? void 0 : targetTableSchema.fields) === null || _a === void 0 ? void 0 : _a.find((field) => field.fieldName == key); + if (key === null || key === void 0 ? void 0 : key.match(/^date_|^id$|^uuid$/)) + return; + let value = payload[key]; + if (targetFieldSchema === null || targetFieldSchema === void 0 ? void 0 : targetFieldSchema.encrypted) { + value = encrypt({ data: value }); + } + finalData[key] = value; + }); + if (finalData.password && typeof finalData.password == "string") { + finalData.password = hashPassword({ password: finalData.password }); + } + return finalData; + })(); + const updateUser = await updateDbEntry({ + dbFullName, + tableName: "users", + identifierColumnName: "id", + identifierValue: updatedUserId, + data: data, + }); + return { + success: true, + payload: updateUser, + }; } diff --git a/dist/package-shared/functions/api/users/reset-password/(utils)/encrypt-url.js b/dist/package-shared/functions/api/users/reset-password/(utils)/encrypt-url.js index ba4607d..7428f08 100644 --- a/dist/package-shared/functions/api/users/reset-password/(utils)/encrypt-url.js +++ b/dist/package-shared/functions/api/users/reset-password/(utils)/encrypt-url.js @@ -1,18 +1,12 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = encryptReserPasswordUrl; -const ejson_1 = __importDefault(require("../../../../../utils/ejson")); -const encrypt_1 = __importDefault(require("../../../../dsql/encrypt")); -function encryptReserPasswordUrl({ email, encryptionKey, encryptionSalt, }) { +import EJSON from "../../../../../utils/ejson"; +import encrypt from "../../../../dsql/encrypt"; +export default function encryptReserPasswordUrl({ email, encryptionKey, encryptionSalt, }) { const encryptObject = { email, createdAt: Date.now(), }; - const encryptStr = (0, encrypt_1.default)({ - data: ejson_1.default.stringify(encryptObject), + const encryptStr = encrypt({ + data: EJSON.stringify(encryptObject), encryptionKey, encryptionSalt, }); diff --git a/dist/package-shared/functions/api/users/reset-password/api-send-reset-password-link.js b/dist/package-shared/functions/api/users/reset-password/api-send-reset-password-link.js index aee50b5..10627fe 100644 --- a/dist/package-shared/functions/api/users/reset-password/api-send-reset-password-link.js +++ b/dist/package-shared/functions/api/users/reset-password/api-send-reset-password-link.js @@ -1,52 +1,36 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = apiSendResetPasswordLink; -const grab_db_full_name_1 = __importDefault(require("../../../../utils/grab-db-full-name")); -const varDatabaseDbHandler_1 = __importDefault(require("../../../backend/varDatabaseDbHandler")); +import grabDbFullName from "../../../../utils/grab-db-full-name"; +import varDatabaseDbHandler from "../../../backend/varDatabaseDbHandler"; /** * # API Login */ -function apiSendResetPasswordLink(_a) { - return __awaiter(this, arguments, void 0, function* ({ database, email, dbUserId, debug, }) { - const dbFullName = (0, grab_db_full_name_1.default)({ dbName: database, userId: dbUserId }); - /** - * Check input validity - * - * @description Check input validity - */ - if (email === null || email === void 0 ? void 0 : email.match(/ /)) { - return { - success: false, - msg: "Invalid Email/Password format", - }; - } - let foundUser = yield (0, varDatabaseDbHandler_1.default)({ - queryString: `SELECT * FROM ${dbFullName}.users WHERE email = ? OR username = ?`, - queryValuesArray: [email, email], - database: dbFullName, - debug, - }); - if (debug) { - console.log("apiSendResetPassword:foundUser:", foundUser); - } - const targetUser = foundUser === null || foundUser === void 0 ? void 0 : foundUser[0]; - if (!targetUser) - return { - success: false, - msg: "No user found", - }; - return { success: true }; +export default async function apiSendResetPasswordLink({ database, email, dbUserId, debug, }) { + const dbFullName = grabDbFullName({ dbName: database, userId: dbUserId }); + if (!dbFullName) { + return { + success: false, + msg: `Couldn't get database full name`, + }; + } + if (email === null || email === void 0 ? void 0 : email.match(/ /)) { + return { + success: false, + msg: "Invalid Email/Password format", + }; + } + let foundUser = await varDatabaseDbHandler({ + queryString: `SELECT * FROM ${dbFullName}.users WHERE email = ? OR username = ?`, + queryValuesArray: [email, email], + database: dbFullName, + debug, }); + if (debug) { + console.log("apiSendResetPassword:foundUser:", foundUser); + } + const targetUser = foundUser === null || foundUser === void 0 ? void 0 : foundUser[0]; + if (!targetUser) + return { + success: false, + msg: "No user found", + }; + return { success: true }; } diff --git a/dist/package-shared/functions/api/users/social/api-github-login.js b/dist/package-shared/functions/api/users/social/api-github-login.js index f3cfd8d..d95ef29 100644 --- a/dist/package-shared/functions/api/users/social/api-github-login.js +++ b/dist/package-shared/functions/api/users/social/api-github-login.js @@ -1,88 +1,71 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = apiGithubLogin; -const handleSocialDb_1 = __importDefault(require("../../social-login/handleSocialDb")); -const githubLogin_1 = __importDefault(require("../../social-login/githubLogin")); -const camelJoinedtoCamelSpace_1 = __importDefault(require("../../../../utils/camelJoinedtoCamelSpace")); +import handleSocialDb from "../../social-login/handleSocialDb"; +import githubLogin from "../../social-login/githubLogin"; +import camelJoinedtoCamelSpace from "../../../../utils/camelJoinedtoCamelSpace"; /** * # API Login with Github */ -function apiGithubLogin(_a) { - return __awaiter(this, arguments, void 0, function* ({ code, clientId, clientSecret, database, additionalFields, email, additionalData, }) { - if (!code || !clientId || !clientSecret || !database) { - return { - success: false, - msg: "Missing query params", - }; - } - if (typeof code !== "string" || - typeof clientId !== "string" || - typeof clientSecret !== "string" || - typeof database !== "string") { - return { - success: false, - msg: "Wrong Parameters", - }; - } - /** - * Create new user folder and file - * - * @description Create new user folder and file - */ - const gitHubUser = yield (0, githubLogin_1.default)({ - code: code, - clientId: clientId, - clientSecret: clientSecret, - }); - if (!gitHubUser) { - return { - success: false, - msg: "No github user returned", - }; - } - const socialId = gitHubUser.name || gitHubUser.id || gitHubUser.login; - const targetName = gitHubUser.name || gitHubUser.login; - const nameArray = (targetName === null || targetName === void 0 ? void 0 : targetName.match(/ /)) - ? targetName === null || targetName === void 0 ? void 0 : targetName.split(" ") - : (targetName === null || targetName === void 0 ? void 0 : targetName.match(/\-/)) - ? targetName === null || targetName === void 0 ? void 0 : targetName.split("-") - : [targetName]; - let payload = { - email: gitHubUser.email, - first_name: (0, camelJoinedtoCamelSpace_1.default)(nameArray[0]), - last_name: (0, camelJoinedtoCamelSpace_1.default)(nameArray[1]), - social_id: socialId, - social_platform: "github", - image: gitHubUser.avatar_url, - image_thumbnail: gitHubUser.avatar_url, - username: "github-user-" + socialId, +export default async function apiGithubLogin({ code, clientId, clientSecret, database, additionalFields, email, additionalData, }) { + if (!code || !clientId || !clientSecret || !database) { + return { + success: false, + msg: "Missing query params", }; - if (additionalData) { - payload = Object.assign(Object.assign({}, payload), additionalData); - } - const loggedInGithubUser = yield (0, handleSocialDb_1.default)({ - database, - email: gitHubUser.email, - payload, - social_platform: "github", - supEmail: email, - additionalFields, - }); - //////////////////////////////////////////////// - //////////////////////////////////////////////// - //////////////////////////////////////////////// - return Object.assign({}, loggedInGithubUser); + } + if (typeof code !== "string" || + typeof clientId !== "string" || + typeof clientSecret !== "string" || + typeof database !== "string") { + return { + success: false, + msg: "Wrong Parameters", + }; + } + /** + * Create new user folder and file + * + * @description Create new user folder and file + */ + const gitHubUser = await githubLogin({ + code: code, + clientId: clientId, + clientSecret: clientSecret, }); + if (!gitHubUser) { + return { + success: false, + msg: "No github user returned", + }; + } + const socialId = gitHubUser.name || gitHubUser.id || gitHubUser.login; + const targetName = gitHubUser.name || gitHubUser.login; + const nameArray = (targetName === null || targetName === void 0 ? void 0 : targetName.match(/ /)) + ? targetName === null || targetName === void 0 ? void 0 : targetName.split(" ") + : (targetName === null || targetName === void 0 ? void 0 : targetName.match(/\-/)) + ? targetName === null || targetName === void 0 ? void 0 : targetName.split("-") + : [targetName]; + let payload = { + email: gitHubUser.email, + first_name: camelJoinedtoCamelSpace(nameArray[0]), + last_name: camelJoinedtoCamelSpace(nameArray[1]), + social_id: socialId, + social_platform: "github", + image: gitHubUser.avatar_url, + image_thumbnail: gitHubUser.avatar_url, + username: "github-user-" + socialId, + }; + if (additionalData) { + payload = Object.assign(Object.assign({}, payload), additionalData); + } + const loggedInGithubUser = await handleSocialDb({ + database, + email: gitHubUser.email, + payload, + social_platform: "github", + supEmail: email, + additionalFields, + }); + //////////////////////////////////////////////// + //////////////////////////////////////////////// + //////////////////////////////////////////////// + return Object.assign({}, loggedInGithubUser); } diff --git a/dist/package-shared/functions/api/users/social/api-google-login.js b/dist/package-shared/functions/api/users/social/api-google-login.js index 35d7775..da54e5b 100644 --- a/dist/package-shared/functions/api/users/social/api-google-login.js +++ b/dist/package-shared/functions/api/users/social/api-google-login.js @@ -1,89 +1,72 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = apiGoogleLogin; -const https_1 = __importDefault(require("https")); -const handleSocialDb_1 = __importDefault(require("../../social-login/handleSocialDb")); -const ejson_1 = __importDefault(require("../../../../utils/ejson")); +import https from "https"; +import handleSocialDb from "../../social-login/handleSocialDb"; +import EJSON from "../../../../utils/ejson"; /** * # API google login */ -function apiGoogleLogin(_a) { - return __awaiter(this, arguments, void 0, function* ({ token, database, additionalFields, additionalData, debug, loginOnly, }) { - try { - const gUser = yield new Promise((resolve, reject) => { - https_1.default - .request({ - method: "GET", - hostname: "www.googleapis.com", - path: "/oauth2/v3/userinfo", - headers: { - Authorization: `Bearer ${token}`, - }, - }, (res) => { - let data = ""; - res.on("data", (chunk) => { - data += chunk; - }); - res.on("end", () => { - resolve(ejson_1.default.parse(data)); - }); - }) - .end(); - }); - if (!(gUser === null || gUser === void 0 ? void 0 : gUser.email_verified)) - throw new Error("No Google User."); - /** - * Create new user folder and file - * - * @description Create new user folder and file - */ - const { given_name, family_name, email, sub, picture } = gUser; - let payloadObject = { - email: email, - first_name: given_name, - last_name: family_name, - social_id: sub, - social_platform: "google", - image: picture, - image_thumbnail: picture, - username: `google-user-${sub}`, - }; - if (additionalData) { - payloadObject = Object.assign(Object.assign({}, payloadObject), additionalData); - } - const loggedInGoogleUser = yield (0, handleSocialDb_1.default)({ - database, - email: email || "", - payload: payloadObject, - social_platform: "google", - additionalFields, - debug, - loginOnly, - }); - //////////////////////////////////////// - //////////////////////////////////////// - //////////////////////////////////////// - return Object.assign({}, loggedInGoogleUser); +export default async function apiGoogleLogin({ token, database, additionalFields, additionalData, debug, loginOnly, }) { + try { + const gUser = await new Promise((resolve, reject) => { + https + .request({ + method: "GET", + hostname: "www.googleapis.com", + path: "/oauth2/v3/userinfo", + headers: { + Authorization: `Bearer ${token}`, + }, + }, (res) => { + let data = ""; + res.on("data", (chunk) => { + data += chunk; + }); + res.on("end", () => { + resolve(EJSON.parse(data)); + }); + }) + .end(); + }); + if (!(gUser === null || gUser === void 0 ? void 0 : gUser.email_verified)) + throw new Error("No Google User."); + /** + * Create new user folder and file + * + * @description Create new user folder and file + */ + const { given_name, family_name, email, sub, picture } = gUser; + let payloadObject = { + email: email, + first_name: given_name, + last_name: family_name, + social_id: sub, + social_platform: "google", + image: picture, + image_thumbnail: picture, + username: `google-user-${sub}`, + }; + if (additionalData) { + payloadObject = Object.assign(Object.assign({}, payloadObject), additionalData); } - catch ( /** @type {any} */error) { - console.log(`api-google-login.ts ERROR: ${error.message}`); - return { - success: false, - payload: undefined, - msg: error.message, - }; - } - }); + const loggedInGoogleUser = await handleSocialDb({ + database, + email: email || "", + payload: payloadObject, + social_platform: "google", + additionalFields, + debug, + loginOnly, + }); + //////////////////////////////////////// + //////////////////////////////////////// + //////////////////////////////////////// + return Object.assign({}, loggedInGoogleUser); + } + catch ( /** @type {any} */error) { + console.log(`api-google-login.ts ERROR: ${error.message}`); + return { + success: false, + payload: undefined, + msg: error.message, + }; + } } diff --git a/dist/package-shared/functions/backend/addAdminUserOnLogin.js b/dist/package-shared/functions/backend/addAdminUserOnLogin.js index fa68467..3a76f85 100644 --- a/dist/package-shared/functions/backend/addAdminUserOnLogin.js +++ b/dist/package-shared/functions/backend/addAdminUserOnLogin.js @@ -1,22 +1,7 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = addAdminUserOnLogin; -const serverError_1 = __importDefault(require("./serverError")); -const DB_HANDLER_1 = __importDefault(require("../../utils/backend/global-db/DB_HANDLER")); -const addDbEntry_1 = __importDefault(require("./db/addDbEntry")); -const LOCAL_DB_HANDLER_1 = __importDefault(require("../../utils/backend/global-db/LOCAL_DB_HANDLER")); +import serverError from "./serverError"; +import DB_HANDLER from "../../utils/backend/global-db/DB_HANDLER"; +import addDbEntry from "./db/addDbEntry"; +import LOCAL_DB_HANDLER from "../../utils/backend/global-db/LOCAL_DB_HANDLER"; /** * Add Admin User on Login * ============================================================================== @@ -25,88 +10,86 @@ const LOCAL_DB_HANDLER_1 = __importDefault(require("../../utils/backend/global-d * admin user. This fires when the invited user has been logged in or a new account * has been created for the invited user */ -function addAdminUserOnLogin(_a) { - return __awaiter(this, arguments, void 0, function* ({ query, user, }) { - var _b; - try { - const finalDbHandler = global.DSQL_USE_LOCAL - ? LOCAL_DB_HANDLER_1.default - : DB_HANDLER_1.default; - const { invite, database_access, priviledge, email } = query; - const lastInviteTimeQuery = `SELECT date_created_code FROM invitations WHERE inviting_user_id=? AND invited_user_email=?`; - const lastInviteTimeValues = [invite, email]; - const lastInviteTimeArray = yield finalDbHandler(lastInviteTimeQuery, lastInviteTimeValues); - if (!lastInviteTimeArray || !lastInviteTimeArray[0]) { - throw new Error("No Invitation Found"); +export default async function addAdminUserOnLogin({ query, user, }) { + var _a; + try { + const finalDbHandler = global.DSQL_USE_LOCAL + ? LOCAL_DB_HANDLER + : DB_HANDLER; + const { invite, database_access, priviledge, email } = query; + const lastInviteTimeQuery = `SELECT date_created_code FROM invitations WHERE inviting_user_id=? AND invited_user_email=?`; + const lastInviteTimeValues = [invite, email]; + const lastInviteTimeArray = await finalDbHandler(lastInviteTimeQuery, lastInviteTimeValues); + if (!lastInviteTimeArray || !lastInviteTimeArray[0]) { + throw new Error("No Invitation Found"); + } + //////////////////////////////////////////////// + //////////////////////////////////////////////// + //////////////////////////////////////////////// + const invitingUserDbQuery = `SELECT first_name,last_name,email FROM users WHERE id=?`; + const invitingUserDbValues = [invite]; + const invitingUserDb = await finalDbHandler(invitingUserDbQuery, invitingUserDbValues); + if (invitingUserDb === null || invitingUserDb === void 0 ? void 0 : invitingUserDb[0]) { + const existingUserUser = await finalDbHandler(`SELECT email FROM user_users WHERE user_id=? AND invited_user_id=? AND user_type='admin' AND email=?`, [invite, user.id, email]); + if (existingUserUser === null || existingUserUser === void 0 ? void 0 : existingUserUser[0]) { + console.log("User already added"); } - //////////////////////////////////////////////// - //////////////////////////////////////////////// - //////////////////////////////////////////////// - const invitingUserDbQuery = `SELECT first_name,last_name,email FROM users WHERE id=?`; - const invitingUserDbValues = [invite]; - const invitingUserDb = yield finalDbHandler(invitingUserDbQuery, invitingUserDbValues); - if (invitingUserDb === null || invitingUserDb === void 0 ? void 0 : invitingUserDb[0]) { - const existingUserUser = yield finalDbHandler(`SELECT email FROM user_users WHERE user_id=? AND invited_user_id=? AND user_type='admin' AND email=?`, [invite, user.id, email]); - if (existingUserUser === null || existingUserUser === void 0 ? void 0 : existingUserUser[0]) { - console.log("User already added"); - } - else { - (0, addDbEntry_1.default)({ - dbFullName: "datasquirel", - tableName: "user_users", - data: { - user_id: invite, - invited_user_id: user.id, - database_access: database_access, - first_name: user.first_name, - last_name: user.last_name, - phone: user.phone, - email: user.email, - username: user.username, - user_type: "admin", - user_priviledge: priviledge, - image: user.image, - image_thumbnail: user.image_thumbnail, - }, - }); - //////////////////////////////////////////////// - //////////////////////////////////////////////// - //////////////////////////////////////////////// - const dbTableData = yield finalDbHandler(`SELECT db_tables_data FROM invitations WHERE inviting_user_id=? AND invited_user_email=?`, [invite, email]); - const clearEntries = yield finalDbHandler(`DELETE FROM delegated_user_tables WHERE root_user_id=? AND delegated_user_id=?`, [invite, user.id]); - //////////////////////////////////////////////// - //////////////////////////////////////////////// - //////////////////////////////////////////////// - if (dbTableData && dbTableData[0]) { - const dbTableEntries = dbTableData[0].db_tables_data.split("|"); - for (let i = 0; i < dbTableEntries.length; i++) { - const dbTableEntry = dbTableEntries[i]; - const dbTableEntryArray = dbTableEntry.split("-"); - const [db_slug, table_slug] = dbTableEntryArray; - const newEntry = yield (0, addDbEntry_1.default)({ - dbFullName: "datasquirel", - tableName: "delegated_user_tables", - data: { - delegated_user_id: user.id, - root_user_id: invite, - database: db_slug, - table: table_slug, - priviledge: priviledge, - }, - }); - } + else { + addDbEntry({ + dbFullName: "datasquirel", + tableName: "user_users", + data: { + user_id: invite, + invited_user_id: user.id, + database_access: database_access, + first_name: user.first_name, + last_name: user.last_name, + phone: user.phone, + email: user.email, + username: user.username, + user_type: "admin", + user_priviledge: priviledge, + image: user.image, + image_thumbnail: user.image_thumbnail, + }, + }); + //////////////////////////////////////////////// + //////////////////////////////////////////////// + //////////////////////////////////////////////// + const dbTableData = await finalDbHandler(`SELECT db_tables_data FROM invitations WHERE inviting_user_id=? AND invited_user_email=?`, [invite, email]); + const clearEntries = await finalDbHandler(`DELETE FROM delegated_user_tables WHERE root_user_id=? AND delegated_user_id=?`, [invite, user.id]); + //////////////////////////////////////////////// + //////////////////////////////////////////////// + //////////////////////////////////////////////// + if (dbTableData && dbTableData[0]) { + const dbTableEntries = dbTableData[0].db_tables_data.split("|"); + for (let i = 0; i < dbTableEntries.length; i++) { + const dbTableEntry = dbTableEntries[i]; + const dbTableEntryArray = dbTableEntry.split("-"); + const [db_slug, table_slug] = dbTableEntryArray; + const newEntry = await addDbEntry({ + dbFullName: "datasquirel", + tableName: "delegated_user_tables", + data: { + delegated_user_id: user.id, + root_user_id: invite, + database: db_slug, + table: table_slug, + priviledge: priviledge, + }, + }); } } - const inviteAccepted = yield finalDbHandler(`UPDATE invitations SET invitation_status='Accepted' WHERE inviting_user_id=? AND invited_user_email=?`, [invite, email]); } + const inviteAccepted = await finalDbHandler(`UPDATE invitations SET invitation_status='Accepted' WHERE inviting_user_id=? AND invited_user_email=?`, [invite, email]); } - catch (error) { - (_b = global.ERROR_CALLBACK) === null || _b === void 0 ? void 0 : _b.call(global, `Add Admin User On Login Error`, error); - (0, serverError_1.default)({ - component: "addAdminUserOnLogin", - message: error.message, - user: user, - }); - } - }); + } + catch (error) { + (_a = global.ERROR_CALLBACK) === null || _a === void 0 ? void 0 : _a.call(global, `Add Admin User On Login Error`, error); + serverError({ + component: "addAdminUserOnLogin", + message: error.message, + user: user, + }); + } } diff --git a/dist/package-shared/functions/backend/addMariadbUser.js b/dist/package-shared/functions/backend/addMariadbUser.js index ce2793b..f4dc50b 100644 --- a/dist/package-shared/functions/backend/addMariadbUser.js +++ b/dist/package-shared/functions/backend/addMariadbUser.js @@ -1,70 +1,54 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = addMariadbUser; -const generate_password_1 = __importDefault(require("generate-password")); -const DB_HANDLER_1 = __importDefault(require("../../utils/backend/global-db/DB_HANDLER")); -const NO_DB_HANDLER_1 = __importDefault(require("../../utils/backend/global-db/NO_DB_HANDLER")); -const addDbEntry_1 = __importDefault(require("./db/addDbEntry")); -const encrypt_1 = __importDefault(require("../dsql/encrypt")); -const LOCAL_DB_HANDLER_1 = __importDefault(require("../../utils/backend/global-db/LOCAL_DB_HANDLER")); +import generator from "generate-password"; +import DB_HANDLER from "../../utils/backend/global-db/DB_HANDLER"; +import NO_DB_HANDLER from "../../utils/backend/global-db/NO_DB_HANDLER"; +import addDbEntry from "./db/addDbEntry"; +import encrypt from "../dsql/encrypt"; +import LOCAL_DB_HANDLER from "../../utils/backend/global-db/LOCAL_DB_HANDLER"; +import grabSQLKeyName from "../../utils/grab-sql-key-name"; /** * # Add Mariadb User */ -function addMariadbUser(_a) { - return __awaiter(this, arguments, void 0, function* ({ userId }) { - try { - const defaultMariadbUserHost = process.env.DSQL_DB_HOST || "127.0.0.1"; - const username = `dsql_user_${userId}`; - const password = generate_password_1.default.generate({ - length: 16, - numbers: true, - symbols: true, - uppercase: true, - exclude: "*#.'`\"", - }); - const encryptedPassword = (0, encrypt_1.default)({ data: password }); - const createMariadbUsersQuery = `CREATE USER IF NOT EXISTS '${username}'@'127.0.0.1' IDENTIFIED BY '${password}'`; - if (global.DSQL_USE_LOCAL) { - yield (0, LOCAL_DB_HANDLER_1.default)(createMariadbUsersQuery); - } - else { - yield (0, NO_DB_HANDLER_1.default)(createMariadbUsersQuery); - } - const updateUserQuery = `UPDATE users SET mariadb_user = ?, mariadb_host = '127.0.0.1', mariadb_pass = ? WHERE id = ?`; - const updateUserValues = [username, encryptedPassword, userId]; - const updateUser = global.DSQL_USE_LOCAL - ? yield (0, LOCAL_DB_HANDLER_1.default)(updateUserQuery, updateUserValues) - : yield (0, DB_HANDLER_1.default)(updateUserQuery, updateUserValues); - const addMariadbUser = yield (0, addDbEntry_1.default)({ - tableName: "mariadb_users", - data: { - user_id: userId, - username, - host: defaultMariadbUserHost, - password: encryptedPassword, - primary: "1", - grants: '[{"database":"*","table":"*","privileges":["ALL"]}]', - }, - dbContext: "Master", - }); - console.log(`User ${userId} SQL credentials successfully added.`); +export default async function addMariadbUser({ userId }) { + try { + const defaultMariadbUserHost = process.env.DSQL_DB_HOST || "127.0.0.1"; + const username = grabSQLKeyName({ type: "user", userId }); + const password = generator.generate({ + length: 16, + numbers: true, + symbols: true, + uppercase: true, + exclude: "*#.'`\"", + }); + const encryptedPassword = encrypt({ data: password }); + const createMariadbUsersQuery = `CREATE USER IF NOT EXISTS '${username}'@'127.0.0.1' IDENTIFIED BY '${password}'`; + if (global.DSQL_USE_LOCAL) { + await LOCAL_DB_HANDLER(createMariadbUsersQuery); } - catch ( /** @type {any} */error) { - console.log(`Error in adding SQL user in 'addMariadbUser' function =>`, error.message); + else { + await NO_DB_HANDLER(createMariadbUsersQuery); } - }); + const updateUserQuery = `UPDATE users SET mariadb_user = ?, mariadb_host = '127.0.0.1', mariadb_pass = ? WHERE id = ?`; + const updateUserValues = [username, encryptedPassword, userId]; + const updateUser = global.DSQL_USE_LOCAL + ? await LOCAL_DB_HANDLER(updateUserQuery, updateUserValues) + : await DB_HANDLER(updateUserQuery, updateUserValues); + const addMariadbUser = await addDbEntry({ + tableName: "mariadb_users", + data: { + user_id: userId, + username, + host: defaultMariadbUserHost, + password: encryptedPassword, + primary: "1", + grants: '[{"database":"*","table":"*","privileges":["ALL"]}]', + }, + dbContext: "Master", + }); + console.log(`User ${userId} SQL credentials successfully added.`); + } + catch ( /** @type {any} */error) { + console.log(`Error in adding SQL user in 'addMariadbUser' function =>`, error.message); + } } //////////////////////////////////////////////// //////////////////////////////////////////////// diff --git a/dist/package-shared/functions/backend/addUsersTableToDb.d.ts b/dist/package-shared/functions/backend/addUsersTableToDb.d.ts index b46a28c..922ae55 100644 --- a/dist/package-shared/functions/backend/addUsersTableToDb.d.ts +++ b/dist/package-shared/functions/backend/addUsersTableToDb.d.ts @@ -4,9 +4,10 @@ type Param = { payload?: { [s: string]: any; }; + dbId: string | number; }; /** * # Add User Table to Database */ -export default function addUsersTableToDb({ userId, database, payload, }: Param): Promise; +export default function addUsersTableToDb({ userId, database, payload, dbId, }: Param): Promise; export {}; diff --git a/dist/package-shared/functions/backend/addUsersTableToDb.js b/dist/package-shared/functions/backend/addUsersTableToDb.js index 573e61c..99ca312 100644 --- a/dist/package-shared/functions/backend/addUsersTableToDb.js +++ b/dist/package-shared/functions/backend/addUsersTableToDb.js @@ -1,81 +1,63 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = addUsersTableToDb; -const serverError_1 = __importDefault(require("./serverError")); -const DB_HANDLER_1 = __importDefault(require("../../utils/backend/global-db/DB_HANDLER")); -const grabUserSchemaData_1 = __importDefault(require("./grabUserSchemaData")); -const setUserSchemaData_1 = __importDefault(require("./setUserSchemaData")); -const addDbEntry_1 = __importDefault(require("./db/addDbEntry")); -const createDbFromSchema_1 = __importDefault(require("../../shell/createDbFromSchema")); -const LOCAL_DB_HANDLER_1 = __importDefault(require("../../utils/backend/global-db/LOCAL_DB_HANDLER")); -const grabNewUsersTableSchema_1 = __importDefault(require("./grabNewUsersTableSchema")); +import serverError from "./serverError"; +import DB_HANDLER from "../../utils/backend/global-db/DB_HANDLER"; +import addDbEntry from "./db/addDbEntry"; +import createDbFromSchema from "../../shell/createDbFromSchema"; +import LOCAL_DB_HANDLER from "../../utils/backend/global-db/LOCAL_DB_HANDLER"; +import grabNewUsersTableSchema from "./grabNewUsersTableSchema"; +import { grabPrimaryRequiredDbSchema, writeUpdatedDbSchema, } from "../../shell/createDbFromSchema/grab-required-database-schemas"; /** * # Add User Table to Database */ -function addUsersTableToDb(_a) { - return __awaiter(this, arguments, void 0, function* ({ userId, database, payload, }) { - try { - const dbFullName = database; - const userPreset = (0, grabNewUsersTableSchema_1.default)({ payload }); - if (!userPreset) - throw new Error("Couldn't Get User Preset!"); - const userSchemaData = (0, grabUserSchemaData_1.default)({ userId }); - if (!userSchemaData) - throw new Error("User schema data not found!"); - let targetDatabase = userSchemaData.find((db) => db.dbFullName === database); - if (!targetDatabase) { - throw new Error("Couldn't Find Target Database!"); - } - let existingTableIndex = targetDatabase === null || targetDatabase === void 0 ? void 0 : targetDatabase.tables.findIndex((table) => table.tableName === "users"); - if (typeof existingTableIndex == "number" && existingTableIndex > 0) { - targetDatabase.tables[existingTableIndex] = userPreset; - } - else { - targetDatabase.tables.push(userPreset); - } - (0, setUserSchemaData_1.default)({ schemaData: userSchemaData, userId }); - const targetDb = global.DSQL_USE_LOCAL - ? yield (0, LOCAL_DB_HANDLER_1.default)(`SELECT id FROM user_databases WHERE user_id=? AND db_slug=?`, [userId, database]) - : yield (0, DB_HANDLER_1.default)(`SELECT id FROM user_databases WHERE user_id=? AND db_slug=?`, [userId, database]); - if (targetDb === null || targetDb === void 0 ? void 0 : targetDb[0]) { - const newTableEntry = yield (0, addDbEntry_1.default)({ - dbFullName: "datasquirel", - tableName: "user_database_tables", - data: { - user_id: userId, - db_id: targetDb[0].id, - db_slug: targetDatabase.dbSlug, - table_name: "Users", - table_slug: "users", - }, - }); - } - const dbShellUpdate = yield (0, createDbFromSchema_1.default)({ - userId, - targetDatabase: dbFullName, - }); - return `Done!`; +export default async function addUsersTableToDb({ userId, database, payload, dbId, }) { + try { + const dbFullName = database; + const userPreset = grabNewUsersTableSchema({ payload }); + if (!userPreset) + throw new Error("Couldn't Get User Preset!"); + let targetDatabase = grabPrimaryRequiredDbSchema({ + dbId, + userId, + }); + if (!targetDatabase) { + throw new Error("Couldn't Find Target Database!"); } - catch ( /** @type {any} */error) { - console.log(`addUsersTableToDb.ts ERROR: ${error.message}`); - (0, serverError_1.default)({ - component: "addUsersTableToDb", - message: error.message, - user: { id: userId }, - }); - return error.message; + let existingTableIndex = targetDatabase === null || targetDatabase === void 0 ? void 0 : targetDatabase.tables.findIndex((table) => table.tableName === "users"); + if (typeof existingTableIndex == "number" && existingTableIndex > 0) { + targetDatabase.tables[existingTableIndex] = userPreset; } - }); + else { + targetDatabase.tables.push(userPreset); + } + writeUpdatedDbSchema({ dbSchema: targetDatabase, userId }); + const targetDb = global.DSQL_USE_LOCAL + ? await LOCAL_DB_HANDLER(`SELECT id FROM user_databases WHERE user_id=? AND db_slug=?`, [userId, database]) + : await DB_HANDLER(`SELECT id FROM user_databases WHERE user_id=? AND db_slug=?`, [userId, database]); + if (targetDb === null || targetDb === void 0 ? void 0 : targetDb[0]) { + const newTableEntry = await addDbEntry({ + dbFullName: "datasquirel", + tableName: "user_database_tables", + data: { + user_id: userId, + db_id: targetDb[0].id, + db_slug: targetDatabase.dbSlug, + table_name: "Users", + table_slug: "users", + }, + }); + } + const dbShellUpdate = await createDbFromSchema({ + userId, + targetDatabase: dbFullName, + }); + return `Done!`; + } + catch ( /** @type {any} */error) { + console.log(`addUsersTableToDb.ts ERROR: ${error.message}`); + serverError({ + component: "addUsersTableToDb", + message: error.message, + user: { id: userId }, + }); + return error.message; + } } diff --git a/dist/package-shared/functions/backend/api-cred.d.ts b/dist/package-shared/functions/backend/api-cred.d.ts index c88266c..0e2349c 100644 --- a/dist/package-shared/functions/backend/api-cred.d.ts +++ b/dist/package-shared/functions/backend/api-cred.d.ts @@ -1,6 +1,4 @@ -import { CheckApiCredentialsFn } from "../../types"; +export {}; /** * # Grap API Credentials */ -declare const grabApiCred: CheckApiCredentialsFn; -export default grabApiCred; diff --git a/dist/package-shared/functions/backend/api-cred.js b/dist/package-shared/functions/backend/api-cred.js index 4134ec2..d39a6e2 100644 --- a/dist/package-shared/functions/backend/api-cred.js +++ b/dist/package-shared/functions/backend/api-cred.js @@ -1,49 +1,47 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const fs_1 = __importDefault(require("fs")); -const decrypt_1 = __importDefault(require("../dsql/decrypt")); +export {}; /** * # Grap API Credentials */ -const grabApiCred = ({ key, database, table, user_id, media, }) => { - var _a, _b; - if (!key) - return null; - if (!user_id) - return null; - try { - const allowedKeysPath = process.env.DSQL_API_KEYS_PATH; - if (!allowedKeysPath) - throw new Error("process.env.DSQL_API_KEYS_PATH variable not found"); - const ApiJSON = (0, decrypt_1.default)({ encryptedString: key }); - const ApiObject = JSON.parse(ApiJSON || ""); - const isApiKeyValid = fs_1.default.existsSync(`${allowedKeysPath}/${ApiObject.sign}`); - if (String(ApiObject.user_id) !== String(user_id)) - return null; - if (!isApiKeyValid) - return null; - if (!ApiObject.target_database) - return ApiObject; - if (media) - return ApiObject; - if (!database && ApiObject.target_database) - return null; - const isDatabaseAllowed = (_a = ApiObject.target_database) === null || _a === void 0 ? void 0 : _a.split(",").includes(String(database)); - if (isDatabaseAllowed && !ApiObject.target_table) - return ApiObject; - if (isDatabaseAllowed && !table && ApiObject.target_table) - return null; - const isTableAllowed = (_b = ApiObject.target_table) === null || _b === void 0 ? void 0 : _b.split(",").includes(String(table)); - if (isTableAllowed) - return ApiObject; - return null; - } - catch (error) { - console.log(`api-cred ERROR: ${error.message}`); - return { error: `api-cred ERROR: ${error.message}` }; - } -}; -exports.default = grabApiCred; +// const grabApiCred: CheckApiCredentialsFn = ({ +// key, +// database, +// table, +// user_id, +// media, +// }) => { +// if (!key) return null; +// if (!user_id) return null; +// try { +// const allowedKeysPath = process.env.DSQL_API_KEYS_PATH; +// if (!allowedKeysPath) +// throw new Error( +// "process.env.DSQL_API_KEYS_PATH variable not found" +// ); +// const ApiJSON = decrypt({ encryptedString: key }); +// const ApiObject: import("../../types").ApiKeyObject = JSON.parse( +// ApiJSON || "" +// ); +// const isApiKeyValid = fs.existsSync( +// `${allowedKeysPath}/${ApiObject.sign}` +// ); +// if (String(ApiObject.user_id) !== String(user_id)) return null; +// if (!isApiKeyValid) return null; +// if (!ApiObject.target_database) return ApiObject; +// if (media) return ApiObject; +// if (!database && ApiObject.target_database) return null; +// const isDatabaseAllowed = ApiObject.target_database +// ?.split(",") +// .includes(String(database)); +// if (isDatabaseAllowed && !ApiObject.target_table) return ApiObject; +// if (isDatabaseAllowed && !table && ApiObject.target_table) return null; +// const isTableAllowed = ApiObject.target_table +// ?.split(",") +// .includes(String(table)); +// if (isTableAllowed) return ApiObject; +// return null; +// } catch (error: any) { +// console.log(`api-cred ERROR: ${error.message}`); +// return { error: `api-cred ERROR: ${error.message}` }; +// } +// }; +// export default grabApiCred; diff --git a/dist/package-shared/functions/backend/auth/write-auth-files.js b/dist/package-shared/functions/backend/auth/write-auth-files.js index 6a70118..f34026d 100644 --- a/dist/package-shared/functions/backend/auth/write-auth-files.js +++ b/dist/package-shared/functions/backend/auth/write-auth-files.js @@ -1,33 +1,26 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.checkAuthFile = exports.deleteAuthFile = exports.getAuthFile = exports.cleanupUserAuthFiles = exports.writeAuthFile = exports.initAuthFiles = exports.grabAuthDirs = void 0; -const fs_1 = __importDefault(require("fs")); -const path_1 = __importDefault(require("path")); -const ejson_1 = __importDefault(require("../../../utils/ejson")); -const debug_log_1 = __importDefault(require("../../../utils/logging/debug-log")); +import fs from "fs"; +import path from "path"; +import EJSON from "../../../utils/ejson"; +import debugLog from "../../../utils/logging/debug-log"; function debugFn(log, label) { - (0, debug_log_1.default)({ log, addTime: true, title: "write-auth-files", label }); + debugLog({ log, addTime: true, title: "write-auth-files", label }); } -const grabAuthDirs = () => { +export const grabAuthDirs = () => { const DSQL_AUTH_DIR = process.env.DSQL_AUTH_DIR; const ROOT_DIR = (DSQL_AUTH_DIR === null || DSQL_AUTH_DIR === void 0 ? void 0 : DSQL_AUTH_DIR.match(/./)) ? DSQL_AUTH_DIR - : path_1.default.resolve(process.cwd(), "./.tmp"); - const AUTH_DIR = path_1.default.join(ROOT_DIR, "logins"); + : path.resolve(process.cwd(), "./.tmp"); + const AUTH_DIR = path.join(ROOT_DIR, "logins"); return { root: ROOT_DIR, auth: AUTH_DIR }; }; -exports.grabAuthDirs = grabAuthDirs; -const initAuthFiles = () => { +export const initAuthFiles = () => { var _a; try { - const authDirs = (0, exports.grabAuthDirs)(); - if (!fs_1.default.existsSync(authDirs.root)) - fs_1.default.mkdirSync(authDirs.root, { recursive: true }); - if (!fs_1.default.existsSync(authDirs.auth)) - fs_1.default.mkdirSync(authDirs.auth, { recursive: true }); + const authDirs = grabAuthDirs(); + if (!fs.existsSync(authDirs.root)) + fs.mkdirSync(authDirs.root, { recursive: true }); + if (!fs.existsSync(authDirs.auth)) + fs.mkdirSync(authDirs.auth, { recursive: true }); return true; } catch (error) { @@ -36,18 +29,17 @@ const initAuthFiles = () => { return false; } }; -exports.initAuthFiles = initAuthFiles; /** * # Write Auth Files */ -const writeAuthFile = (name, data, cleanup) => { - (0, exports.initAuthFiles)(); +export const writeAuthFile = (name, data, cleanup) => { + initAuthFiles(); try { - const { auth } = (0, exports.grabAuthDirs)(); + const { auth } = grabAuthDirs(); if (cleanup) { - (0, exports.cleanupUserAuthFiles)(cleanup.userId); + cleanupUserAuthFiles(cleanup.userId); } - fs_1.default.writeFileSync(path_1.default.join(auth, name), data); + fs.writeFileSync(path.join(auth, name), data); return true; } catch (error) { @@ -55,22 +47,21 @@ const writeAuthFile = (name, data, cleanup) => { return false; } }; -exports.writeAuthFile = writeAuthFile; /** * # Clean up User Auth Files */ -const cleanupUserAuthFiles = (userId) => { - (0, exports.initAuthFiles)(); +export const cleanupUserAuthFiles = (userId) => { + initAuthFiles(); try { - const { auth } = (0, exports.grabAuthDirs)(); - const loginFiles = fs_1.default.readdirSync(auth); + const { auth } = grabAuthDirs(); + const loginFiles = fs.readdirSync(auth); for (let i = 0; i < loginFiles.length; i++) { const loginFile = loginFiles[i]; - const loginFilePath = path_1.default.join(auth, loginFile); + const loginFilePath = path.join(auth, loginFile); try { - const authPayload = ejson_1.default.parse(fs_1.default.readFileSync(loginFilePath, "utf-8")); + const authPayload = EJSON.parse(fs.readFileSync(loginFilePath, "utf-8")); if (authPayload.id == userId) { - fs_1.default.unlinkSync(loginFilePath); + fs.unlinkSync(loginFilePath); } } catch (error) { } @@ -82,42 +73,39 @@ const cleanupUserAuthFiles = (userId) => { return false; } }; -exports.cleanupUserAuthFiles = cleanupUserAuthFiles; /** * # Get Auth Files */ -const getAuthFile = (name) => { +export const getAuthFile = (name) => { try { - const authFilePath = path_1.default.join((0, exports.grabAuthDirs)().auth, name); - return fs_1.default.readFileSync(authFilePath, "utf-8"); + const authFilePath = path.join(grabAuthDirs().auth, name); + return fs.readFileSync(authFilePath, "utf-8"); } catch (error) { console.log(`Error getting Auth File: ${error.message}`); return null; } }; -exports.getAuthFile = getAuthFile; /** * # Delete Auth Files * @param {string} name */ -const deleteAuthFile = (name) => { +export const deleteAuthFile = (name) => { try { - return fs_1.default.rmSync(path_1.default.join((0, exports.grabAuthDirs)().auth, name)); + return fs.rmSync(path.join(grabAuthDirs().auth, name)); } catch (error) { console.log(`Error deleting Auth File: ${error.message}`); return null; } }; -exports.deleteAuthFile = deleteAuthFile; /** * # Delete Auth Files * @param {string} name */ -const checkAuthFile = (name) => { +export const checkAuthFile = (name) => { try { - return fs_1.default.existsSync(path_1.default.join((0, exports.grabAuthDirs)().auth, name)); + return fs.existsSync(path.join(grabAuthDirs().auth, name)); return true; } catch (error) { @@ -125,4 +113,3 @@ const checkAuthFile = (name) => { return false; } }; -exports.checkAuthFile = checkAuthFile; diff --git a/dist/package-shared/functions/backend/cookies/get-auth-cookie-names.js b/dist/package-shared/functions/backend/cookies/get-auth-cookie-names.js index 7c68391..f2a9ee3 100644 --- a/dist/package-shared/functions/backend/cookies/get-auth-cookie-names.js +++ b/dist/package-shared/functions/backend/cookies/get-auth-cookie-names.js @@ -1,21 +1,16 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = getAuthCookieNames; -const get_csrf_header_name_1 = __importDefault(require("../../../actions/get-csrf-header-name")); +import getCsrfHeaderName from "../../../actions/get-csrf-header-name"; +import { AppNames } from "../../../dict/app-names"; /** * # Grab Auth Cookie Names */ -function getAuthCookieNames(params) { +export default function getAuthCookieNames(params) { var _a, _b; const cookiesPrefix = process.env.DSQL_COOKIES_PREFIX || "dsql_"; const cookiesKeyName = process.env.DSQL_COOKIES_KEY_NAME || "key"; - const cookiesCSRFName = (0, get_csrf_header_name_1.default)(); + const cookiesCSRFName = getCsrfHeaderName(); const cookieOneTimeCodeName = process.env.DSQL_COOKIES_ONE_TIME_CODE_NAME || "one-time-code"; - const targetDatabase = ((_a = params === null || params === void 0 ? void 0 : params.database) === null || _a === void 0 ? void 0 : _a.replace(/^datasquirel_user_\d+_/, "")) || - ((_b = process.env.DSQL_DB_NAME) === null || _b === void 0 ? void 0 : _b.replace(/^datasquirel_user_\d+_/, "")); + const targetDatabase = ((_a = params === null || params === void 0 ? void 0 : params.database) === null || _a === void 0 ? void 0 : _a.replace(new RegExp(`^${AppNames["DsqlDbPrefix"]}\\d+_`), "")) || + ((_b = process.env.DSQL_DB_NAME) === null || _b === void 0 ? void 0 : _b.replace(new RegExp(`^${AppNames["DsqlDbPrefix"]}\\d+_`), "")); let keyCookieName = cookiesPrefix; if (params === null || params === void 0 ? void 0 : params.userId) keyCookieName += `user_${params.userId}_`; diff --git a/dist/package-shared/functions/backend/createDbSchemaFromDb.d.ts b/dist/package-shared/functions/backend/createDbSchemaFromDb.d.ts index 4b6c94b..2a55d38 100644 --- a/dist/package-shared/functions/backend/createDbSchemaFromDb.d.ts +++ b/dist/package-shared/functions/backend/createDbSchemaFromDb.d.ts @@ -2,6 +2,7 @@ import { DSQL_DATASQUIREL_USER_DATABASES } from "../../types/dsql"; type Params = { userId: number | string; database: DSQL_DATASQUIREL_USER_DATABASES; + dbId?: string | number; }; -export default function createDbSchemaFromDb({ userId, database, }: Params): Promise; +export default function createDbSchemaFromDb({ userId, database, dbId, }: Params): Promise; export {}; diff --git a/dist/package-shared/functions/backend/createDbSchemaFromDb.js b/dist/package-shared/functions/backend/createDbSchemaFromDb.js index f9bcf60..755b9d5 100644 --- a/dist/package-shared/functions/backend/createDbSchemaFromDb.js +++ b/dist/package-shared/functions/backend/createDbSchemaFromDb.js @@ -1,132 +1,120 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = createDbSchemaFromDb; -const varDatabaseDbHandler_1 = __importDefault(require("../../functions/backend/varDatabaseDbHandler")); -const grabUserSchemaData_1 = __importDefault(require("../../functions/backend/grabUserSchemaData")); -const setUserSchemaData_1 = __importDefault(require("../../functions/backend/setUserSchemaData")); -const addDbEntry_1 = __importDefault(require("../../functions/backend/db/addDbEntry")); -const slugToCamelTitle_1 = __importDefault(require("../../shell/utils/slugToCamelTitle")); -function createDbSchemaFromDb(_a) { - return __awaiter(this, arguments, void 0, function* ({ userId, database, }) { - var _b, _c, _d, _e, _f, _g; - try { - if (!userId) { - console.log("No user Id provided"); - return; - } - const userSchemaData = (0, grabUserSchemaData_1.default)({ userId }); - if (!userSchemaData) - throw new Error("User schema data not found!"); - const targetDb = userSchemaData.filter((dbObject) => dbObject.dbFullName === database.db_full_name)[0]; - const existingTables = yield (0, varDatabaseDbHandler_1.default)({ - database: database.db_full_name, - queryString: `SHOW TABLES FROM ${database.db_full_name}`, +import varDatabaseDbHandler from "../../functions/backend/varDatabaseDbHandler"; +import addDbEntry from "../../functions/backend/db/addDbEntry"; +import slugToCamelTitle from "../../shell/utils/slugToCamelTitle"; +import grabDSQLSchemaIndexComment from "../../shell/utils/grab-dsql-schema-index-comment"; +import { grabPrimaryRequiredDbSchema, writeUpdatedDbSchema, } from "../../shell/createDbFromSchema/grab-required-database-schemas"; +import _n from "../../utils/numberfy"; +import dataTypeParser from "../../utils/db/schema/data-type-parser"; +import dataTypeConstructor from "../../utils/db/schema/data-type-constructor"; +export default async function createDbSchemaFromDb({ userId, database, dbId, }) { + var _a, _b, _c, _d, _e, _f; + try { + if (!userId) { + console.log("No user Id provided"); + return; + } + const targetDb = grabPrimaryRequiredDbSchema({ + userId, + dbId: database.db_schema_id || dbId, + }); + if (!targetDb) + throw new Error(`Target Db not found!`); + const existingTables = await varDatabaseDbHandler({ + database: database.db_full_name, + queryString: `SHOW TABLES FROM ${database.db_full_name}`, + }); + if (!existingTables) + throw new Error("No Existing Tables"); + for (let i = 0; i < existingTables.length; i++) { + const table = existingTables[i]; + const tableName = Object.values(table)[0]; + const tableInsert = await addDbEntry({ + dbFullName: "datasquirel", + tableName: "user_database_tables", + data: { + user_id: _n(userId), + db_id: database.id, + db_slug: database.db_slug, + table_name: slugToCamelTitle(tableName) || undefined, + table_slug: tableName, + }, }); - if (!existingTables) - throw new Error("No Existing Tables"); - for (let i = 0; i < existingTables.length; i++) { - const table = existingTables[i]; - const tableName = Object.values(table)[0]; - const tableInsert = yield (0, addDbEntry_1.default)({ - dbFullName: "datasquirel", - tableName: "user_database_tables", - data: { - user_id: userId, - db_id: database.id, - db_slug: database.db_slug, - table_name: (0, slugToCamelTitle_1.default)(tableName), - table_slug: tableName, - }, - }); - const tableObject = { - tableName: tableName, - tableFullName: (0, slugToCamelTitle_1.default)(tableName) || "", - fields: [], - indexes: [], - }; - const tableColumns = yield (0, varDatabaseDbHandler_1.default)({ - database: database.db_full_name, - queryString: `SHOW COLUMNS FROM ${database.db_full_name}.${tableName}`, - }); - if (tableColumns) { - for (let k = 0; k < tableColumns.length; k++) { - const tableColumn = tableColumns[k]; - const { Field, Type, Null, Key, Default, Extra } = tableColumn; - const fieldObject = { - fieldName: Field, - dataType: Type.toUpperCase(), - }; - if (Null === null || Null === void 0 ? void 0 : Null.match(/^no$/i)) - fieldObject.notNullValue = true; - if (Key === null || Key === void 0 ? void 0 : Key.match(/^pri$/i)) - fieldObject.primaryKey = true; - if ((_b = Default === null || Default === void 0 ? void 0 : Default.toString()) === null || _b === void 0 ? void 0 : _b.match(/./)) - fieldObject.defaultValue = Default; - if ((_c = Default === null || Default === void 0 ? void 0 : Default.toString()) === null || _c === void 0 ? void 0 : _c.match(/timestamp/i)) { - delete fieldObject.defaultValue; - fieldObject.defaultValueLiteral = Default; - } - if ((_d = Extra === null || Extra === void 0 ? void 0 : Extra.toString()) === null || _d === void 0 ? void 0 : _d.match(/auto_increment/i)) - fieldObject.autoIncrement = true; - tableObject.fields.push(fieldObject); + const tableObject = { + tableName: tableName, + fields: [], + indexes: [], + }; + const tableColumns = await varDatabaseDbHandler({ + database: database.db_full_name, + queryString: `SHOW COLUMNS FROM ${database.db_full_name}.${tableName}`, + }); + if (tableColumns) { + for (let k = 0; k < tableColumns.length; k++) { + const tableColumn = tableColumns[k]; + const { Field, Type, Null, Key, Default, Extra } = tableColumn; + const parsedDataType = dataTypeParser(Type.toUpperCase()); + const fieldObject = { + fieldName: Field, + dataType: dataTypeConstructor(parsedDataType.type, parsedDataType.limit, parsedDataType.decimal), + }; + if (Null === null || Null === void 0 ? void 0 : Null.match(/^no$/i)) + fieldObject.notNullValue = true; + if (Key === null || Key === void 0 ? void 0 : Key.match(/^pri$/i)) + fieldObject.primaryKey = true; + if ((_a = Default === null || Default === void 0 ? void 0 : Default.toString()) === null || _a === void 0 ? void 0 : _a.match(/./)) + fieldObject.defaultValue = Default; + if ((_b = Default === null || Default === void 0 ? void 0 : Default.toString()) === null || _b === void 0 ? void 0 : _b.match(/timestamp/i)) { + delete fieldObject.defaultValue; + fieldObject.defaultValueLiteral = Default; } + if ((_c = Extra === null || Extra === void 0 ? void 0 : Extra.toString()) === null || _c === void 0 ? void 0 : _c.match(/auto_increment/i)) + fieldObject.autoIncrement = true; + tableObject.fields.push(fieldObject); } - const tableIndexes = yield (0, varDatabaseDbHandler_1.default)({ - database: database.db_full_name, - queryString: `SHOW INDEXES FROM ${database.db_full_name}.${tableName}`, - }); - if (tableIndexes) { - for (let m = 0; m < tableIndexes.length; m++) { - const indexObject = tableIndexes[m]; - const { Table, Key_name, Column_name, Null, Index_type, Index_comment, } = indexObject; - if (!(Index_comment === null || Index_comment === void 0 ? void 0 : Index_comment.match(/^schema_index$/))) - continue; - const indexNewObject = { - indexType: (Index_type === null || Index_type === void 0 ? void 0 : Index_type.match(/fulltext/i)) - ? "fullText" - : "regular", - indexName: Key_name, - indexTableFields: [], - }; - const targetTableFieldObject = tableColumns === null || tableColumns === void 0 ? void 0 : tableColumns.filter((col) => col.Field === Column_name)[0]; - const existingIndexField = (_e = tableObject.indexes) === null || _e === void 0 ? void 0 : _e.filter((indx) => indx.indexName == Key_name); - if (existingIndexField && existingIndexField[0]) { - (_f = existingIndexField[0].indexTableFields) === null || _f === void 0 ? void 0 : _f.push({ + } + const tableIndexes = await varDatabaseDbHandler({ + database: database.db_full_name, + queryString: `SHOW INDEXES FROM ${database.db_full_name}.${tableName}`, + }); + if (tableIndexes) { + for (let m = 0; m < tableIndexes.length; m++) { + const indexObject = tableIndexes[m]; + const { Table, Key_name, Column_name, Null, Index_type, Index_comment, } = indexObject; + if (!(Index_comment === null || Index_comment === void 0 ? void 0 : Index_comment.match(new RegExp(grabDSQLSchemaIndexComment())))) + continue; + const indexNewObject = { + indexType: (Index_type === null || Index_type === void 0 ? void 0 : Index_type.match(/fulltext/i)) + ? "full_text" + : "regular", + indexName: Key_name, + indexTableFields: [], + }; + const targetTableFieldObject = tableColumns === null || tableColumns === void 0 ? void 0 : tableColumns.filter((col) => col.Field === Column_name)[0]; + const existingIndexField = (_d = tableObject.indexes) === null || _d === void 0 ? void 0 : _d.filter((indx) => indx.indexName == Key_name); + if (existingIndexField && existingIndexField[0]) { + (_e = existingIndexField[0].indexTableFields) === null || _e === void 0 ? void 0 : _e.push({ + value: Column_name, + dataType: targetTableFieldObject.Type.toUpperCase(), + }); + } + else { + indexNewObject.indexTableFields = [ + { value: Column_name, dataType: targetTableFieldObject.Type.toUpperCase(), - }); - } - else { - indexNewObject.indexTableFields = [ - { - value: Column_name, - dataType: targetTableFieldObject.Type.toUpperCase(), - }, - ]; - (_g = tableObject.indexes) === null || _g === void 0 ? void 0 : _g.push(indexNewObject); - } + }, + ]; + (_f = tableObject.indexes) === null || _f === void 0 ? void 0 : _f.push(indexNewObject); } } - targetDb.tables.push(tableObject); } - (0, setUserSchemaData_1.default)({ schemaData: userSchemaData, userId }); - return true; + targetDb.tables.push(tableObject); } - catch (error) { - console.log(error); - return false; - } - }); + writeUpdatedDbSchema({ dbSchema: targetDb, userId }); + return true; + } + catch (error) { + console.log(error); + return false; + } } diff --git a/dist/package-shared/functions/backend/db/addDbEntry.d.ts b/dist/package-shared/functions/backend/db/addDbEntry.d.ts index 71f604a..77d71ef 100644 --- a/dist/package-shared/functions/backend/db/addDbEntry.d.ts +++ b/dist/package-shared/functions/backend/db/addDbEntry.d.ts @@ -1,16 +1,21 @@ import { DbContextsArray } from "./runQuery"; -import { PostInsertReturn } from "../../../types"; -type Param = { +} = any, K extends string = string> = { dbContext?: (typeof DbContextsArray)[number]; paradigm?: "Read Only" | "Full Access"; dbFullName?: string; - tableName: string; - data: T; - tableSchema?: import("../../../types").DSQL_TableSchemaType; - duplicateColumnName?: string; - duplicateColumnValue?: string; + tableName: K; + data?: T; + batchData?: T[]; + tableSchema?: DSQL_TableSchemaType; + duplicateColumnName?: keyof T; + duplicateColumnValue?: string | number; + /** + * Update Entry if a duplicate is found. + * Requires `duplicateColumnName` and `duplicateColumnValue` parameters + */ update?: boolean; encryptionKey?: string; encryptionSalt?: string; @@ -22,5 +27,4 @@ type Param({ dbContext, paradigm, dbFullName, tableName, data, tableSchema, duplicateColumnName, duplicateColumnValue, update, encryptionKey, encryptionSalt, forceLocal, debug, }: Param): Promise; -export {}; +} = any, K extends string = string>({ dbContext, paradigm, dbFullName, tableName, data, batchData, tableSchema, duplicateColumnName, duplicateColumnValue, update, encryptionKey, encryptionSalt, forceLocal, debug, }: AddDbEntryParam): Promise>; diff --git a/dist/package-shared/functions/backend/db/addDbEntry.js b/dist/package-shared/functions/backend/db/addDbEntry.js index 20a1c0c..3571a47 100644 --- a/dist/package-shared/functions/backend/db/addDbEntry.js +++ b/dist/package-shared/functions/backend/db/addDbEntry.js @@ -1,100 +1,89 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = addDbEntry; -const sanitize_html_1 = __importDefault(require("sanitize-html")); -const sanitizeHtmlOptions_1 = __importDefault(require("../html/sanitizeHtmlOptions")); -const updateDbEntry_1 = __importDefault(require("./updateDbEntry")); -const encrypt_1 = __importDefault(require("../../dsql/encrypt")); -const conn_db_handler_1 = __importDefault(require("../../../utils/db/conn-db-handler")); -const check_if_is_master_1 = __importDefault(require("../../../utils/check-if-is-master")); -const debug_log_1 = __importDefault(require("../../../utils/logging/debug-log")); +import sanitizeHtml from "sanitize-html"; +import sanitizeHtmlOptions from "../html/sanitizeHtmlOptions"; +import updateDbEntry from "./updateDbEntry"; +import _ from "lodash"; +import encrypt from "../../dsql/encrypt"; +import connDbHandler from "../../../utils/db/conn-db-handler"; +import checkIfIsMaster from "../../../utils/check-if-is-master"; +import debugLog from "../../../utils/logging/debug-log"; +import purgeDefaultFields from "../../../utils/purge-default-fields"; /** * Add a db Entry Function */ -function addDbEntry(_a) { - return __awaiter(this, arguments, void 0, function* ({ dbContext, paradigm, dbFullName, tableName, data, tableSchema, duplicateColumnName, duplicateColumnValue, update, encryptionKey, encryptionSalt, forceLocal, debug, }) { - var _b, _c, _d; - const isMaster = forceLocal - ? true - : (0, check_if_is_master_1.default)({ dbContext, dbFullName }); - if (debug) { - (0, debug_log_1.default)({ - log: isMaster, - addTime: true, - label: "isMaster", +export default async function addDbEntry({ dbContext, paradigm, dbFullName, tableName, data, batchData, tableSchema, duplicateColumnName, duplicateColumnValue, update, encryptionKey, encryptionSalt, forceLocal, debug, }) { + const isMaster = forceLocal + ? true + : checkIfIsMaster({ dbContext, dbFullName }); + if (debug) { + debugLog({ + log: isMaster, + addTime: true, + label: "isMaster", + }); + } + const DB_CONN = isMaster + ? global.DSQL_DB_CONN + : global.DSQL_FULL_ACCESS_DB_CONN || global.DSQL_DB_CONN; + const DB_RO_CONN = isMaster + ? global.DSQL_DB_CONN + : global.DSQL_READ_ONLY_DB_CONN || global.DSQL_DB_CONN; + let newData = _.cloneDeep(data); + if (newData) { + newData = purgeDefaultFields(newData); + } + let newBatchData = _.cloneDeep(batchData); + if (newBatchData) { + newBatchData = purgeDefaultFields(newBatchData); + } + if (duplicateColumnName && + typeof duplicateColumnName === "string" && + newData) { + const checkDuplicateQuery = `SELECT * FROM ${isMaster ? "" : `\`${dbFullName}\`.`}\`${tableName}\` WHERE \`${duplicateColumnName}\`=?`; + const duplicateValue = await connDbHandler(DB_RO_CONN, checkDuplicateQuery, [duplicateColumnValue]); + if ((duplicateValue === null || duplicateValue === void 0 ? void 0 : duplicateValue[0]) && !update) { + return { + success: false, + payload: undefined, + msg: "Duplicate entry found", + }; + } + else if ((duplicateValue === null || duplicateValue === void 0 ? void 0 : duplicateValue[0]) && update) { + return await updateDbEntry({ + dbContext, + dbFullName, + tableName, + data: newData, + tableSchema, + encryptionKey, + encryptionSalt, + identifierColumnName: duplicateColumnName, + identifierValue: duplicateColumnValue || "", }); } - const DB_CONN = isMaster - ? global.DSQL_DB_CONN - : global.DSQL_FULL_ACCESS_DB_CONN || global.DSQL_DB_CONN; - const DB_RO_CONN = isMaster - ? global.DSQL_DB_CONN - : global.DSQL_READ_ONLY_DB_CONN || global.DSQL_DB_CONN; - if (data === null || data === void 0 ? void 0 : data["date_created_timestamp"]) - delete data["date_created_timestamp"]; - if (data === null || data === void 0 ? void 0 : data["date_updated_timestamp"]) - delete data["date_updated_timestamp"]; - if (data === null || data === void 0 ? void 0 : data["date_updated"]) - delete data["date_updated"]; - if (data === null || data === void 0 ? void 0 : data["date_updated_code"]) - delete data["date_updated_code"]; - if (data === null || data === void 0 ? void 0 : data["date_created"]) - delete data["date_created"]; - if (data === null || data === void 0 ? void 0 : data["date_created_code"]) - delete data["date_created_code"]; - if (duplicateColumnName && typeof duplicateColumnName === "string") { - const checkDuplicateQuery = `SELECT * FROM ${isMaster ? "" : `\`${dbFullName}\`.`}\`${tableName}\` WHERE \`${duplicateColumnName}\`=?`; - const duplicateValue = yield (0, conn_db_handler_1.default)(DB_RO_CONN, checkDuplicateQuery, [duplicateColumnValue]); - if ((duplicateValue === null || duplicateValue === void 0 ? void 0 : duplicateValue[0]) && !update) { - return null; - } - else if (duplicateValue && duplicateValue[0] && update) { - return yield (0, updateDbEntry_1.default)({ - dbContext, - dbFullName, - tableName, - data, - tableSchema, - encryptionKey, - encryptionSalt, - identifierColumnName: duplicateColumnName, - identifierValue: duplicateColumnValue || "", - }); - } - } + } + function generateQuery(data) { + var _a, _b, _c; const dataKeys = Object.keys(data); let insertKeysArray = []; let insertValuesArray = []; for (let i = 0; i < dataKeys.length; i++) { try { const dataKey = dataKeys[i]; - let value = data === null || data === void 0 ? void 0 : data[dataKey]; + let value = data[dataKey]; const targetFieldSchemaArray = tableSchema - ? (_b = tableSchema === null || tableSchema === void 0 ? void 0 : tableSchema.fields) === null || _b === void 0 ? void 0 : _b.filter((field) => field.fieldName == dataKey) + ? (_a = tableSchema === null || tableSchema === void 0 ? void 0 : tableSchema.fields) === null || _a === void 0 ? void 0 : _a.filter((field) => field.fieldName == dataKey) : null; const targetFieldSchema = targetFieldSchemaArray && targetFieldSchemaArray[0] ? targetFieldSchemaArray[0] : null; if (value == null || value == undefined) continue; - if (((_c = targetFieldSchema === null || targetFieldSchema === void 0 ? void 0 : targetFieldSchema.dataType) === null || _c === void 0 ? void 0 : _c.match(/int$/i)) && + if (((_b = targetFieldSchema === null || targetFieldSchema === void 0 ? void 0 : targetFieldSchema.dataType) === null || _b === void 0 ? void 0 : _b.match(/int$/i)) && typeof value == "string" && !(value === null || value === void 0 ? void 0 : value.match(/./))) continue; if (targetFieldSchema === null || targetFieldSchema === void 0 ? void 0 : targetFieldSchema.encrypted) { - value = (0, encrypt_1.default)({ + value = encrypt({ data: value, encryptionKey, encryptionSalt, @@ -102,8 +91,9 @@ function addDbEntry(_a) { console.log("DSQL: Encrypted value =>", value); } const htmlRegex = /<[^>]+>/g; - if ((targetFieldSchema === null || targetFieldSchema === void 0 ? void 0 : targetFieldSchema.richText) || String(value).match(htmlRegex)) { - value = (0, sanitize_html_1.default)(value, sanitizeHtmlOptions_1.default); + if ((targetFieldSchema === null || targetFieldSchema === void 0 ? void 0 : targetFieldSchema.richText) || + String(value).match(htmlRegex)) { + value = sanitizeHtml(value, sanitizeHtmlOptions); } if (targetFieldSchema === null || targetFieldSchema === void 0 ? void 0 : targetFieldSchema.pattern) { const pattern = new RegExp(targetFieldSchema.pattern, targetFieldSchema.patternFlags || ""); @@ -125,55 +115,74 @@ function addDbEntry(_a) { } catch (error) { console.log("DSQL: Error in parsing data keys =>", error.message); - (_d = global.ERROR_CALLBACK) === null || _d === void 0 ? void 0 : _d.call(global, `Error parsing Data Keys`, error); + (_c = global.ERROR_CALLBACK) === null || _c === void 0 ? void 0 : _c.call(global, `Error parsing Data Keys`, error); continue; } } - if (!(data === null || data === void 0 ? void 0 : data["date_created"])) { - insertKeysArray.push("`date_created`"); - insertValuesArray.push(Date()); - } - if (!(data === null || data === void 0 ? void 0 : data["date_created_code"])) { - insertKeysArray.push("`date_created_code`"); - insertValuesArray.push(Date.now()); - } - if (!(data === null || data === void 0 ? void 0 : data["date_updated"])) { - insertKeysArray.push("`date_updated`"); - insertValuesArray.push(Date()); - } - if (!(data === null || data === void 0 ? void 0 : data["date_updated_code"])) { - insertKeysArray.push("`date_updated_code`"); - insertValuesArray.push(Date.now()); - } - const query = `INSERT INTO ${isMaster ? "" : `\`${dbFullName}\`.`}\`${tableName}\` (${insertKeysArray.join(",")}) VALUES (${insertValuesArray - .map(() => "?") - .join(",")})`; + insertKeysArray.push("`date_created`"); + insertValuesArray.push(Date()); + insertKeysArray.push("`date_created_code`"); + insertValuesArray.push(Date.now()); + insertKeysArray.push("`date_updated`"); + insertValuesArray.push(Date()); + insertKeysArray.push("`date_updated_code`"); + insertValuesArray.push(Date.now()); const queryValuesArray = insertValuesArray; - if (debug) { - (0, debug_log_1.default)({ - log: DB_CONN === null || DB_CONN === void 0 ? void 0 : DB_CONN.getConfig(), - addTime: true, - label: "DB_CONN Config", - }); - (0, debug_log_1.default)({ - log: query, - addTime: true, - label: "query", - }); - (0, debug_log_1.default)({ - log: queryValuesArray, - addTime: true, - label: "queryValuesArray", - }); + return { queryValuesArray, insertValuesArray, insertKeysArray }; + } + if (newData) { + const { insertKeysArray, insertValuesArray, queryValuesArray } = generateQuery(newData); + const query = `INSERT INTO ${isMaster && !dbFullName ? "" : `\`${dbFullName}\`.`}\`${tableName}\` (${insertKeysArray.join(",")}) VALUES (${insertValuesArray.map(() => "?").join(",")})`; + const newInsert = await connDbHandler(DB_CONN, query, queryValuesArray, debug); + return { + success: Boolean(newInsert === null || newInsert === void 0 ? void 0 : newInsert.insertId), + payload: newInsert, + queryObject: { + sql: query, + params: queryValuesArray, + }, + }; + } + else if (newBatchData) { + let batchInsertKeysArray; + let batchInsertValuesArray = []; + let batchQueryValuesArray = []; + for (let i = 0; i < newBatchData.length; i++) { + const singleBatchData = newBatchData[i]; + const { insertKeysArray, insertValuesArray, queryValuesArray } = generateQuery(singleBatchData); + if (!batchInsertKeysArray) { + batchInsertKeysArray = insertKeysArray; + } + batchInsertValuesArray.push(insertValuesArray); + batchQueryValuesArray.push(queryValuesArray); } - const newInsert = yield (0, conn_db_handler_1.default)(DB_CONN, query, queryValuesArray, debug); + const query = `INSERT INTO ${isMaster && !dbFullName ? "" : `\`${dbFullName}\`.`}\`${tableName}\` (${batchInsertKeysArray === null || batchInsertKeysArray === void 0 ? void 0 : batchInsertKeysArray.join(",")}) VALUES ${batchInsertValuesArray + .map((vl) => `(${vl.map(() => "?").join(",")})`) + .join(",")}`; + console.log("query", query); + console.log("batchQueryValuesArray", batchQueryValuesArray); + const newInsert = await connDbHandler(DB_CONN, query, batchQueryValuesArray.flat(), debug); if (debug) { - (0, debug_log_1.default)({ + debugLog({ log: newInsert, addTime: true, label: "newInsert", }); } - return newInsert; - }); + return { + success: Boolean(newInsert === null || newInsert === void 0 ? void 0 : newInsert.insertId), + payload: newInsert, + queryObject: { + sql: query, + params: batchQueryValuesArray.flat(), + }, + }; + } + else { + return { + success: false, + payload: undefined, + msg: "No data provided", + }; + } } diff --git a/dist/package-shared/functions/backend/db/deleteDbEntry.d.ts b/dist/package-shared/functions/backend/db/deleteDbEntry.d.ts index c7e4e73..b1b20c7 100644 --- a/dist/package-shared/functions/backend/db/deleteDbEntry.d.ts +++ b/dist/package-shared/functions/backend/db/deleteDbEntry.d.ts @@ -1,10 +1,13 @@ +import { DSQL_TableSchemaType, PostInsertReturn } from "../../../types"; import { DbContextsArray } from "./runQuery"; -type Param = { +type Param = { dbContext?: (typeof DbContextsArray)[number]; - dbFullName: string; - tableName: string; - tableSchema?: import("../../../types").DSQL_TableSchemaType; - identifierColumnName: string; + dbFullName?: string; + tableName: K; + tableSchema?: DSQL_TableSchemaType; + identifierColumnName: keyof T; identifierValue: string | number; forceLocal?: boolean; }; @@ -12,5 +15,7 @@ type Param = { * # Delete DB Entry Function * @description */ -export default function deleteDbEntry({ dbContext, dbFullName, tableName, identifierColumnName, identifierValue, forceLocal, }: Param): Promise; +export default function deleteDbEntry({ dbContext, dbFullName, tableName, identifierColumnName, identifierValue, forceLocal, }: Param): Promise; export {}; diff --git a/dist/package-shared/functions/backend/db/deleteDbEntry.js b/dist/package-shared/functions/backend/db/deleteDbEntry.js index 650ca6a..fd6c43a 100644 --- a/dist/package-shared/functions/backend/db/deleteDbEntry.js +++ b/dist/package-shared/functions/backend/db/deleteDbEntry.js @@ -1,54 +1,34 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = deleteDbEntry; -const check_if_is_master_1 = __importDefault(require("../../../utils/check-if-is-master")); -const conn_db_handler_1 = __importDefault(require("../../../utils/db/conn-db-handler")); +import checkIfIsMaster from "../../../utils/check-if-is-master"; +import connDbHandler from "../../../utils/db/conn-db-handler"; /** * # Delete DB Entry Function * @description */ -function deleteDbEntry(_a) { - return __awaiter(this, arguments, void 0, function* ({ dbContext, dbFullName, tableName, identifierColumnName, identifierValue, forceLocal, }) { - var _b; - try { - const isMaster = forceLocal - ? true - : (0, check_if_is_master_1.default)({ dbContext, dbFullName }); - const DB_CONN = isMaster - ? global.DSQL_DB_CONN - : global.DSQL_FULL_ACCESS_DB_CONN || global.DSQL_DB_CONN; - const DB_RO_CONN = isMaster - ? global.DSQL_DB_CONN - : global.DSQL_READ_ONLY_DB_CONN || global.DSQL_DB_CONN; - /** - * Execution - * - * @description - */ - const query = `DELETE FROM ${isMaster ? "" : `\`${dbFullName}\`.`}\`${tableName}\` WHERE \`${identifierColumnName}\`=?`; - const deletedEntry = yield (0, conn_db_handler_1.default)(DB_CONN, query, [ - identifierValue, - ]); - /** - * Return statement - */ - return deletedEntry; - } - catch (error) { - (_b = global.ERROR_CALLBACK) === null || _b === void 0 ? void 0 : _b.call(global, `Error Deleting Entry`, error); - return null; - } - }); +export default async function deleteDbEntry({ dbContext, dbFullName, tableName, identifierColumnName, identifierValue, forceLocal, }) { + var _a; + try { + const isMaster = forceLocal + ? true + : checkIfIsMaster({ dbContext, dbFullName }); + const DB_CONN = isMaster + ? global.DSQL_DB_CONN + : global.DSQL_FULL_ACCESS_DB_CONN || global.DSQL_DB_CONN; + /** + * Execution + * + * @description + */ + const query = `DELETE FROM ${isMaster && !dbFullName ? "" : `\`${dbFullName}\`.`}\`${tableName}\` WHERE \`${identifierColumnName.toString()}\`=?`; + const deletedEntry = await connDbHandler(DB_CONN, query, [ + identifierValue, + ]); + /** + * Return statement + */ + return deletedEntry; + } + catch (error) { + (_a = global.ERROR_CALLBACK) === null || _a === void 0 ? void 0 : _a.call(global, `Error Deleting Entry`, error); + return null; + } } diff --git a/dist/package-shared/functions/backend/db/pathTraversalCheck.js b/dist/package-shared/functions/backend/db/pathTraversalCheck.js index 66ca645..aa3f8a6 100644 --- a/dist/package-shared/functions/backend/db/pathTraversalCheck.js +++ b/dist/package-shared/functions/backend/db/pathTraversalCheck.js @@ -1,10 +1,7 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = pathTraversalCheck; /** * # Path Traversal Check * @returns {string} */ -function pathTraversalCheck(text) { +export default function pathTraversalCheck(text) { return text.toString().replace(/\//g, ""); } diff --git a/dist/package-shared/functions/backend/db/runQuery.js b/dist/package-shared/functions/backend/db/runQuery.js index 0a43ef5..76e52a3 100644 --- a/dist/package-shared/functions/backend/db/runQuery.js +++ b/dist/package-shared/functions/backend/db/runQuery.js @@ -1,155 +1,137 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.DbContextsArray = void 0; -exports.default = runQuery; -const fullAccessDbHandler_1 = __importDefault(require("../fullAccessDbHandler")); -const varReadOnlyDatabaseDbHandler_1 = __importDefault(require("../varReadOnlyDatabaseDbHandler")); -const serverError_1 = __importDefault(require("../serverError")); -const addDbEntry_1 = __importDefault(require("./addDbEntry")); -const updateDbEntry_1 = __importDefault(require("./updateDbEntry")); -const deleteDbEntry_1 = __importDefault(require("./deleteDbEntry")); -const trim_sql_1 = __importDefault(require("../../../utils/trim-sql")); -exports.DbContextsArray = ["Master", "Dsql User"]; +import fullAccessDbHandler from "../fullAccessDbHandler"; +import varReadOnlyDatabaseDbHandler from "../varReadOnlyDatabaseDbHandler"; +import serverError from "../serverError"; +import addDbEntry from "./addDbEntry"; +import updateDbEntry from "./updateDbEntry"; +import deleteDbEntry from "./deleteDbEntry"; +import trimSql from "../../../utils/trim-sql"; +export const DbContextsArray = ["Master", "Dsql User"]; /** * # Run DSQL users queries */ -function runQuery(_a) { - return __awaiter(this, arguments, void 0, function* ({ dbFullName, query, readOnly, dbSchema, queryValuesArray, tableName, debug, dbContext, forceLocal, }) { - /** - * Declare variables - * - * @description Declare "results" variable - */ - let result; - let error; - let tableSchema; - if (dbSchema) { - try { - const table = tableName - ? tableName - : typeof query == "string" - ? null - : query - ? query === null || query === void 0 ? void 0 : query.table - : null; - if (!table) - throw new Error("No table name provided"); - tableSchema = dbSchema.tables.filter((tb) => (tb === null || tb === void 0 ? void 0 : tb.tableName) === table)[0]; - } - catch (_err) { - // console.log("ERROR getting tableSchema: ", _err.message); - } - } - /** - * Declare variables - * - * @description Declare "results" variable - */ +export default async function runQuery({ dbFullName, query, readOnly, dbSchema, queryValuesArray, tableName, debug, dbContext, forceLocal, }) { + /** + * Declare variables + * + * @description Declare "results" variable + */ + let result; + let error; + let tableSchema; + if (dbSchema) { try { - if (typeof query === "string") { - const formattedQuery = (0, trim_sql_1.default)(query); - if (debug && global.DSQL_USE_LOCAL) { - console.log("runQuery:formattedQuery", formattedQuery); - } - /** - * Input Validation - * - * @description Input Validation - */ - if (readOnly && formattedQuery.match(/^alter|^delete|^create/i)) { - throw new Error("Wrong Input!"); - } - if (readOnly) { - result = yield (0, varReadOnlyDatabaseDbHandler_1.default)({ - queryString: formattedQuery, - queryValuesArray: queryValuesArray === null || queryValuesArray === void 0 ? void 0 : queryValuesArray.map((vl) => String(vl)), - tableSchema, - forceLocal, - }); - } - else { - result = yield (0, fullAccessDbHandler_1.default)({ - queryString: formattedQuery, - queryValuesArray: queryValuesArray === null || queryValuesArray === void 0 ? void 0 : queryValuesArray.map((vl) => String(vl)), - tableSchema, - forceLocal, - }); - } - } - else if (typeof query === "object") { - /** - * Declare variables - * - * @description Declare "results" variable - */ - const { data, action, table, identifierColumnName, identifierValue, update, duplicateColumnName, duplicateColumnValue, } = query; - switch (action.toLowerCase()) { - case "insert": - result = yield (0, addDbEntry_1.default)({ - dbContext, - dbFullName: dbFullName, - tableName: table, - data: data, - update, - duplicateColumnName, - duplicateColumnValue, - tableSchema, - debug, - }); - if (!(result === null || result === void 0 ? void 0 : result.insertId)) { - error = "Couldn't insert data"; - } - break; - case "update": - result = yield (0, updateDbEntry_1.default)({ - dbContext, - dbFullName: dbFullName, - tableName: table, - data: data, - identifierColumnName, - identifierValue, - tableSchema, - }); - break; - case "delete": - result = yield (0, deleteDbEntry_1.default)({ - dbContext, - dbFullName: dbFullName, - tableName: table, - identifierColumnName, - identifierValue, - tableSchema, - }); - break; - default: - result = null; - break; - } - } + const table = tableName + ? tableName + : typeof query == "string" + ? null + : query + ? query === null || query === void 0 ? void 0 : query.table + : null; + if (!table) + throw new Error("No table name provided"); + tableSchema = dbSchema.tables.filter((tb) => (tb === null || tb === void 0 ? void 0 : tb.tableName) === table)[0]; } - catch (err) { - (0, serverError_1.default)({ - component: "functions/backend/runQuery", - message: err.message, - }); + catch (_err) { + // console.log("ERROR getting tableSchema: ", _err.message); + } + } + /** + * Declare variables + * + * @description Declare "results" variable + */ + try { + if (typeof query === "string") { + const formattedQuery = trimSql(query); if (debug && global.DSQL_USE_LOCAL) { - console.log("runQuery:error", err.message); + console.log("runQuery:formattedQuery", formattedQuery); + } + /** + * Input Validation + * + * @description Input Validation + */ + if (readOnly && formattedQuery.match(/^alter|^delete|^create/i)) { + throw new Error("Wrong Input!"); + } + if (readOnly) { + result = await varReadOnlyDatabaseDbHandler({ + queryString: formattedQuery, + queryValuesArray: queryValuesArray === null || queryValuesArray === void 0 ? void 0 : queryValuesArray.map((vl) => String(vl)), + tableSchema, + forceLocal, + }); + } + else { + result = await fullAccessDbHandler({ + queryString: formattedQuery, + queryValuesArray: queryValuesArray === null || queryValuesArray === void 0 ? void 0 : queryValuesArray.map((vl) => String(vl)), + tableSchema, + forceLocal, + }); } - result = null; - error = err.message; } - return { result, error }; - }); + else if (typeof query === "object") { + /** + * Declare variables + * + * @description Declare "results" variable + */ + const { data, action, table, identifierColumnName, identifierValue, update, duplicateColumnName, duplicateColumnValue, } = query; + switch (action.toLowerCase()) { + case "insert": + result = await addDbEntry({ + dbContext, + dbFullName, + tableName: table, + data: data, + update, + duplicateColumnName, + duplicateColumnValue, + tableSchema, + debug, + }); + if (!(result === null || result === void 0 ? void 0 : result.insertId)) { + error = "Couldn't insert data"; + } + break; + case "update": + result = await updateDbEntry({ + dbContext, + dbFullName, + tableName: table, + data: data, + identifierColumnName, + identifierValue, + tableSchema, + }); + break; + case "delete": + result = await deleteDbEntry({ + dbContext, + dbFullName, + tableName: table, + identifierColumnName, + identifierValue, + tableSchema, + }); + break; + default: + result = null; + break; + } + } + } + catch (err) { + serverError({ + component: "functions/backend/runQuery", + message: err.message, + }); + if (debug && global.DSQL_USE_LOCAL) { + console.log("runQuery:error", err.message); + } + result = null; + error = err.message; + } + return { result, error }; } diff --git a/dist/package-shared/functions/backend/db/sanitizeSql.js b/dist/package-shared/functions/backend/db/sanitizeSql.js index d6ca0a6..0ff3527 100644 --- a/dist/package-shared/functions/backend/db/sanitizeSql.js +++ b/dist/package-shared/functions/backend/db/sanitizeSql.js @@ -1,9 +1,4 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const lodash_1 = __importDefault(require("lodash")); +import _ from "lodash"; /** * Sanitize SQL function * ============================================================================== @@ -89,7 +84,7 @@ function sanitizeObjects(object, spaces) { * @returns {string[]|number[]|object[]} */ function sanitizeArrays(array, spaces) { - let arrayUpdated = lodash_1.default.cloneDeep(array); + let arrayUpdated = _.cloneDeep(array); arrayUpdated.forEach((item, index) => { const value = item; if (!value) { @@ -108,4 +103,4 @@ function sanitizeArrays(array, spaces) { }); return arrayUpdated; } -exports.default = sanitizeSql; +export default sanitizeSql; diff --git a/dist/package-shared/functions/backend/db/updateDbEntry.d.ts b/dist/package-shared/functions/backend/db/updateDbEntry.d.ts index a1ae138..2c0aad5 100644 --- a/dist/package-shared/functions/backend/db/updateDbEntry.d.ts +++ b/dist/package-shared/functions/backend/db/updateDbEntry.d.ts @@ -1,5 +1,5 @@ import { DbContextsArray } from "./runQuery"; -import { PostInsertReturn } from "../../../types"; +import { APIResponseObject, DSQL_TableSchemaType, PostInsertReturn } from "../../../types"; type Param = { @@ -8,8 +8,8 @@ type Param({ dbContext, dbFullName, tableName, data, tableSchema, identifierColumnName, identifierValue, encryptionKey, encryptionSalt, forceLocal, }: Param): Promise; +} = any>({ dbContext, dbFullName, tableName, data, tableSchema, identifierColumnName, identifierValue, encryptionKey, encryptionSalt, forceLocal, }: Param): Promise>; export {}; diff --git a/dist/package-shared/functions/backend/db/updateDbEntry.js b/dist/package-shared/functions/backend/db/updateDbEntry.js index d11348f..5821a11 100644 --- a/dist/package-shared/functions/backend/db/updateDbEntry.js +++ b/dist/package-shared/functions/backend/db/updateDbEntry.js @@ -1,129 +1,127 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = updateDbEntry; -const sanitize_html_1 = __importDefault(require("sanitize-html")); -const sanitizeHtmlOptions_1 = __importDefault(require("../html/sanitizeHtmlOptions")); -const encrypt_1 = __importDefault(require("../../dsql/encrypt")); -const check_if_is_master_1 = __importDefault(require("../../../utils/check-if-is-master")); -const conn_db_handler_1 = __importDefault(require("../../../utils/db/conn-db-handler")); +import sanitizeHtml from "sanitize-html"; +import sanitizeHtmlOptions from "../html/sanitizeHtmlOptions"; +import encrypt from "../../dsql/encrypt"; +import checkIfIsMaster from "../../../utils/check-if-is-master"; +import connDbHandler from "../../../utils/db/conn-db-handler"; +import _ from "lodash"; +import purgeDefaultFields from "../../../utils/purge-default-fields"; /** * # Update DB Function * @description */ -function updateDbEntry(_a) { - return __awaiter(this, arguments, void 0, function* ({ dbContext, dbFullName, tableName, data, tableSchema, identifierColumnName, identifierValue, encryptionKey, encryptionSalt, forceLocal, }) { - var _b; - /** - * Check if data is valid - */ - if (!data || !Object.keys(data).length) - return null; - const isMaster = forceLocal - ? true - : (0, check_if_is_master_1.default)({ dbContext, dbFullName }); - const DB_CONN = isMaster - ? global.DSQL_DB_CONN - : global.DSQL_FULL_ACCESS_DB_CONN || global.DSQL_DB_CONN; - //////////////////////////////////////// - //////////////////////////////////////// - //////////////////////////////////////// - /** - * Declare variables - * - * @description Declare "results" variable - */ - const dataKeys = Object.keys(data); - let updateKeyValueArray = []; - let updateValues = []; - for (let i = 0; i < dataKeys.length; i++) { - try { - const dataKey = dataKeys[i]; - // @ts-ignore - let value = data[dataKey]; - const targetFieldSchemaArray = tableSchema - ? (_b = tableSchema === null || tableSchema === void 0 ? void 0 : tableSchema.fields) === null || _b === void 0 ? void 0 : _b.filter((field) => field.fieldName === dataKey) - : null; - const targetFieldSchema = targetFieldSchemaArray && targetFieldSchemaArray[0] - ? targetFieldSchemaArray[0] - : null; - if (value == null || value == undefined) - continue; - const htmlRegex = /<[^>]+>/g; - if ((targetFieldSchema === null || targetFieldSchema === void 0 ? void 0 : targetFieldSchema.richText) || String(value).match(htmlRegex)) { - value = (0, sanitize_html_1.default)(value, sanitizeHtmlOptions_1.default); - } - if (targetFieldSchema === null || targetFieldSchema === void 0 ? void 0 : targetFieldSchema.encrypted) { - value = (0, encrypt_1.default)({ - data: value, - encryptionKey, - encryptionSalt, - }); - } - if (typeof value === "object") { - value = JSON.stringify(value); - } - if (targetFieldSchema === null || targetFieldSchema === void 0 ? void 0 : targetFieldSchema.pattern) { - const pattern = new RegExp(targetFieldSchema.pattern, targetFieldSchema.patternFlags || ""); - if (!pattern.test(value)) { - console.log("DSQL: Pattern not matched =>", value); - value = ""; - } - } - if (typeof value === "string" && value.match(/^null$/i)) { - value = { - toSqlString: function () { - return "NULL"; - }, - }; - } - if (typeof value === "string" && !value.match(/./i)) { - value = { - toSqlString: function () { - return "NULL"; - }, - }; - } - updateKeyValueArray.push(`\`${dataKey}\`=?`); - if (typeof value == "number") { - updateValues.push(String(value)); - } - else { - updateValues.push(value); - } - //////////////////////////////////////// - //////////////////////////////////////// - } - catch ( /** @type {any} */error) { - //////////////////////////////////////// - //////////////////////////////////////// - console.log("DSQL: Error in parsing data keys in update function =>", error.message); +export default async function updateDbEntry({ dbContext, dbFullName, tableName, data, tableSchema, identifierColumnName, identifierValue, encryptionKey, encryptionSalt, forceLocal, }) { + var _a; + /** + * Check if data is valid + */ + if (!data || !Object.keys(data).length) { + return { + success: false, + payload: undefined, + msg: "No data provided", + }; + } + const isMaster = forceLocal + ? true + : checkIfIsMaster({ dbContext, dbFullName }); + const DB_CONN = isMaster + ? global.DSQL_DB_CONN + : global.DSQL_FULL_ACCESS_DB_CONN || global.DSQL_DB_CONN; + //////////////////////////////////////// + //////////////////////////////////////// + //////////////////////////////////////// + let newData = _.cloneDeep(data); + newData = purgeDefaultFields(newData); + /** + * Declare variables + * + * @description Declare "results" variable + */ + const dataKeys = Object.keys(newData); + let updateKeyValueArray = []; + let updateValues = []; + for (let i = 0; i < dataKeys.length; i++) { + try { + const dataKey = dataKeys[i]; + let value = newData[dataKey]; + const targetFieldSchemaArray = tableSchema + ? (_a = tableSchema === null || tableSchema === void 0 ? void 0 : tableSchema.fields) === null || _a === void 0 ? void 0 : _a.filter((field) => field.fieldName === dataKey) + : null; + const targetFieldSchema = targetFieldSchemaArray && targetFieldSchemaArray[0] + ? targetFieldSchemaArray[0] + : null; + if (value == null || value == undefined) continue; + const htmlRegex = /<[^>]+>/g; + if ((targetFieldSchema === null || targetFieldSchema === void 0 ? void 0 : targetFieldSchema.richText) || String(value).match(htmlRegex)) { + value = sanitizeHtml(value, sanitizeHtmlOptions); } + if (targetFieldSchema === null || targetFieldSchema === void 0 ? void 0 : targetFieldSchema.encrypted) { + value = encrypt({ + data: value, + encryptionKey, + encryptionSalt, + }); + } + if (typeof value === "object") { + value = JSON.stringify(value); + } + if (targetFieldSchema === null || targetFieldSchema === void 0 ? void 0 : targetFieldSchema.pattern) { + const pattern = new RegExp(targetFieldSchema.pattern, targetFieldSchema.patternFlags || ""); + if (!pattern.test(value)) { + console.log("DSQL: Pattern not matched =>", value); + value = ""; + } + } + if (typeof value === "string" && value.match(/^null$/i)) { + value = { + toSqlString: function () { + return "NULL"; + }, + }; + } + if (typeof value === "string" && !value.match(/./i)) { + value = { + toSqlString: function () { + return "NULL"; + }, + }; + } + updateKeyValueArray.push(`\`${dataKey}\`=?`); + if (typeof value == "number") { + updateValues.push(String(value)); + } + else { + updateValues.push(value); + } + //////////////////////////////////////// + //////////////////////////////////////// } - //////////////////////////////////////// - //////////////////////////////////////// - updateKeyValueArray.push(`date_updated='${Date()}'`); - updateKeyValueArray.push(`date_updated_code='${Date.now()}'`); - //////////////////////////////////////// - //////////////////////////////////////// - const query = `UPDATE ${isMaster ? "" : `\`${dbFullName}\`.`}\`${tableName}\` SET ${updateKeyValueArray.join(",")} WHERE \`${identifierColumnName}\`=?`; - updateValues.push(identifierValue); - const updatedEntry = yield (0, conn_db_handler_1.default)(DB_CONN, query, updateValues); - /** - * Return statement - */ - return updatedEntry; - }); + catch ( /** @type {any} */error) { + //////////////////////////////////////// + //////////////////////////////////////// + console.log("DSQL: Error in parsing data keys in update function =>", error.message); + continue; + } + } + //////////////////////////////////////// + //////////////////////////////////////// + updateKeyValueArray.push(`date_updated='${Date()}'`); + updateKeyValueArray.push(`date_updated_code='${Date.now()}'`); + //////////////////////////////////////// + //////////////////////////////////////// + const query = `UPDATE ${isMaster && !dbFullName ? "" : `\`${dbFullName}\`.`}\`${tableName}\` SET ${updateKeyValueArray.join(",")} WHERE \`${identifierColumnName}\`=?`; + updateValues.push(identifierValue); + const updatedEntry = await connDbHandler(DB_CONN, query, updateValues); + /** + * Return statement + */ + return { + success: Boolean(updatedEntry === null || updatedEntry === void 0 ? void 0 : updatedEntry.affectedRows), + payload: updatedEntry, + queryObject: { + sql: query, + params: updateValues, + }, + }; } diff --git a/dist/package-shared/functions/backend/dbHandler.d.ts b/dist/package-shared/functions/backend/dbHandler.d.ts index 7bfe147..b11d4b9 100644 --- a/dist/package-shared/functions/backend/dbHandler.d.ts +++ b/dist/package-shared/functions/backend/dbHandler.d.ts @@ -1,4 +1,11 @@ +type Param = { + query: string; + values?: string[] | object; + noErrorLogs?: boolean; +}; /** * # Main DB Handler Function + * @requires DSQL_DB_CONN - Gobal Variable for Datasquirel Database */ -export default function dbHandler(...args: any[]): Promise; +export default function dbHandler({ query, values, noErrorLogs, }: Param): Promise; +export {}; diff --git a/dist/package-shared/functions/backend/dbHandler.js b/dist/package-shared/functions/backend/dbHandler.js index 393a8a7..0e182eb 100644 --- a/dist/package-shared/functions/backend/dbHandler.js +++ b/dist/package-shared/functions/backend/dbHandler.js @@ -1,74 +1,47 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = dbHandler; -const fs_1 = __importDefault(require("fs")); -const serverError_1 = __importDefault(require("./serverError")); -const grab_dsql_connection_1 = __importDefault(require("../../utils/grab-dsql-connection")); -const path_1 = __importDefault(require("path")); +import fs from "fs"; +import path from "path"; +import grabDSQLConnection from "../../utils/grab-dsql-connection"; /** * # Main DB Handler Function + * @requires DSQL_DB_CONN - Gobal Variable for Datasquirel Database */ -function dbHandler(...args) { - return __awaiter(this, void 0, void 0, function* () { - var _a, _b; - ((_a = process.env.NODE_ENV) === null || _a === void 0 ? void 0 : _a.match(/dev/)) && - fs_1.default.appendFileSync("./.tmp/sqlQuery.sql", args[0] + "\n" + Date() + "\n\n\n", "utf8"); - const CONNECTION = (0, grab_dsql_connection_1.default)(); - let results; - /** - * Fetch from db - * - * @description Fetch data from db if no cache - */ - try { - results = yield new Promise((resolve, reject) => { - CONNECTION.query(...args, (error, result, fields) => { - if (error) { - resolve({ error: error.message }); - } - else { - resolve(result); - } - }); - }); - } - catch (error) { - const tmpFolder = path_1.default.resolve(process.cwd(), "./.tmp"); - if (!fs_1.default.existsSync(tmpFolder)) - fs_1.default.mkdirSync(tmpFolder, { recursive: true }); - fs_1.default.appendFileSync(path_1.default.resolve(tmpFolder, "./dbErrorLogs.txt"), JSON.stringify(error, null, 4) + "\n" + Date() + "\n\n\n", "utf8"); - results = null; - (_b = global.ERROR_CALLBACK) === null || _b === void 0 ? void 0 : _b.call(global, `DB Handler Error`, error); - (0, serverError_1.default)({ - component: "dbHandler", - message: error.message, - }); - } - finally { - yield (CONNECTION === null || CONNECTION === void 0 ? void 0 : CONNECTION.end()); - } - /** - * Return results - * - * @description Return results add to cache if "req" param is passed - */ - if (results) { - return JSON.parse(JSON.stringify(results)); +export default async function dbHandler({ query, values, noErrorLogs, }) { + var _a; + const CONNECTION = grabDSQLConnection(); + let results; + try { + if (query && values) { + results = await CONNECTION.query(query, values); } else { + results = await CONNECTION.query(query); + } + } + catch (error) { + if (!noErrorLogs) { + (_a = global.ERROR_CALLBACK) === null || _a === void 0 ? void 0 : _a.call(global, `DB Handler Error...`, error); + } + if (process.env.FIRST_RUN) { return null; } - }); + if (!noErrorLogs) { + console.log("ERROR in dbHandler =>", error.message); + console.log(error); + console.log(CONNECTION.config()); + const tmpFolder = path.resolve(process.cwd(), "./.tmp"); + if (!fs.existsSync(tmpFolder)) + fs.mkdirSync(tmpFolder, { recursive: true }); + fs.appendFileSync(path.resolve(tmpFolder, "./dbErrorLogs.txt"), JSON.stringify(error, null, 4) + "\n" + Date() + "\n\n\n", "utf8"); + } + results = null; + } + finally { + await (CONNECTION === null || CONNECTION === void 0 ? void 0 : CONNECTION.end()); + } + if (results) { + return JSON.parse(JSON.stringify(results)); + } + else { + return null; + } } diff --git a/dist/package-shared/functions/backend/defaultFieldsRegexp.d.ts b/dist/package-shared/functions/backend/defaultFieldsRegexp.d.ts deleted file mode 100644 index f9e698e..0000000 --- a/dist/package-shared/functions/backend/defaultFieldsRegexp.d.ts +++ /dev/null @@ -1,7 +0,0 @@ -/** - * Regular expression to match default fields - * - * @description Regular expression to match default fields - */ -declare const defaultFieldsRegexp: RegExp; -export default defaultFieldsRegexp; diff --git a/dist/package-shared/functions/backend/defaultFieldsRegexp.js b/dist/package-shared/functions/backend/defaultFieldsRegexp.js deleted file mode 100644 index df09adf..0000000 --- a/dist/package-shared/functions/backend/defaultFieldsRegexp.js +++ /dev/null @@ -1,9 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -/** - * Regular expression to match default fields - * - * @description Regular expression to match default fields - */ -const defaultFieldsRegexp = /^id$|^uuid$|^date_created$|^date_created_code$|^date_created_timestamp$|^date_updated$|^date_updated_code$|^date_updated_timestamp$/; -exports.default = defaultFieldsRegexp; diff --git a/dist/package-shared/functions/backend/fullAccessDbHandler.js b/dist/package-shared/functions/backend/fullAccessDbHandler.js index 926d577..3833c93 100644 --- a/dist/package-shared/functions/backend/fullAccessDbHandler.js +++ b/dist/package-shared/functions/backend/fullAccessDbHandler.js @@ -1,78 +1,61 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = fullAccessDbHandler; -const conn_db_handler_1 = __importDefault(require("../../utils/db/conn-db-handler")); -const parseDbResults_1 = __importDefault(require("./parseDbResults")); -const serverError_1 = __importDefault(require("./serverError")); +import connDbHandler from "../../utils/db/conn-db-handler"; +import parseDbResults from "./parseDbResults"; +import serverError from "./serverError"; /** * # Full Access Db Handler */ -function fullAccessDbHandler(_a) { - return __awaiter(this, arguments, void 0, function* ({ queryString, tableSchema, queryValuesArray, forceLocal, }) { - var _b; +export default async function fullAccessDbHandler({ queryString, tableSchema, queryValuesArray, forceLocal, }) { + var _a; + /** + * Declare variables + * + * @description Declare "results" variable + */ + let results; + const DB_CONN = forceLocal + ? global.DSQL_DB_CONN + : global.DSQL_FULL_ACCESS_DB_CONN || global.DSQL_DB_CONN; + /** + * Fetch from db + * + * @description Fetch data from db if no cache + */ + try { + results = await connDbHandler(DB_CONN, queryString, queryValuesArray); + //////////////////////////////////////// + } + catch (error) { + //////////////////////////////////////// + (_a = global.ERROR_CALLBACK) === null || _a === void 0 ? void 0 : _a.call(global, `Full Access DB Handler Error`, error); + serverError({ + component: "fullAccessDbHandler", + message: error.message, + }); /** - * Declare variables - * - * @description Declare "results" variable + * Return error */ - let results; - const DB_CONN = forceLocal - ? global.DSQL_DB_CONN - : global.DSQL_FULL_ACCESS_DB_CONN || global.DSQL_DB_CONN; - /** - * Fetch from db - * - * @description Fetch data from db if no cache - */ - try { - results = yield (0, conn_db_handler_1.default)(DB_CONN, queryString, queryValuesArray); - //////////////////////////////////////// - } - catch (error) { - //////////////////////////////////////// - (_b = global.ERROR_CALLBACK) === null || _b === void 0 ? void 0 : _b.call(global, `Full Access DB Handler Error`, error); - (0, serverError_1.default)({ - component: "fullAccessDbHandler", - message: error.message, - }); - /** - * Return error - */ - return error.message; - } - finally { - DB_CONN === null || DB_CONN === void 0 ? void 0 : DB_CONN.end(); - } - /** - * Return results - * - * @description Return results add to cache if "req" param is passed - */ - if (results && tableSchema) { - const unparsedResults = results; - const parsedResults = yield (0, parseDbResults_1.default)({ - unparsedResults: unparsedResults, - tableSchema: tableSchema, - }); - return parsedResults; - } - else if (results) { - return results; - } - else { - return null; - } - }); + return error.message; + } + finally { + DB_CONN === null || DB_CONN === void 0 ? void 0 : DB_CONN.end(); + } + /** + * Return results + * + * @description Return results add to cache if "req" param is passed + */ + if (results && tableSchema) { + const unparsedResults = results; + const parsedResults = await parseDbResults({ + unparsedResults: unparsedResults, + tableSchema: tableSchema, + }); + return parsedResults; + } + else if (results) { + return results; + } + else { + return null; + } } diff --git a/dist/package-shared/functions/backend/grab-mariadb-main-user-for-user.d.ts b/dist/package-shared/functions/backend/grab-mariadb-main-user-for-user.d.ts new file mode 100644 index 0000000..7a2b814 --- /dev/null +++ b/dist/package-shared/functions/backend/grab-mariadb-main-user-for-user.d.ts @@ -0,0 +1,12 @@ +import { UserType } from "../../types"; +type Params = { + user: UserType; +}; +type Return = { + fullName?: string; + host?: string; + username?: string; + password?: string; +}; +export default function grabMariadbMainUserForUser({ user, }: Params): Promise; +export {}; diff --git a/dist/package-shared/functions/backend/grab-mariadb-main-user-for-user.js b/dist/package-shared/functions/backend/grab-mariadb-main-user-for-user.js new file mode 100644 index 0000000..56f425c --- /dev/null +++ b/dist/package-shared/functions/backend/grab-mariadb-main-user-for-user.js @@ -0,0 +1,41 @@ +import dbHandler from "./dbHandler"; +import dsqlCrud from "../../utils/data-fetching/crud"; +import decrypt from "../dsql/decrypt"; +import createUserSQLUser from "../../utils/create-user-sql-user"; +import grabUserMainSqlUserName from "../../utils/grab-user-main-sql-user-name"; +export default async function grabMariadbMainUserForUser({ user, }) { + var _a; + const { fullName, host, username: mariaDBUsername, webHost, } = grabUserMainSqlUserName({ user }); + const existingWebAppUser = (await dbHandler({ + query: `SELECT * FROM mysql.user WHERE user=? AND host=?`, + values: [mariaDBUsername, webHost], + })); + if (!(existingWebAppUser === null || existingWebAppUser === void 0 ? void 0 : existingWebAppUser[0])) { + return await createUserSQLUser(user); + } + else { + const existingUserRecord = await dsqlCrud({ + action: "get", + table: "users", + query: { + query: { + id: { + value: String(user.id), + }, + }, + }, + }); + const targetUser = (_a = existingUserRecord === null || existingUserRecord === void 0 ? void 0 : existingUserRecord.payload) === null || _a === void 0 ? void 0 : _a[0]; + if (!(targetUser === null || targetUser === void 0 ? void 0 : targetUser.id)) { + return {}; + } + return { + fullName, + host, + username: mariaDBUsername, + password: decrypt({ + encryptedString: targetUser.mariadb_pass || "", + }), + }; + } +} diff --git a/dist/package-shared/functions/backend/grabNewUsersTableSchema.js b/dist/package-shared/functions/backend/grabNewUsersTableSchema.js index 2eaa1e1..40d6e9f 100644 --- a/dist/package-shared/functions/backend/grabNewUsersTableSchema.js +++ b/dist/package-shared/functions/backend/grabNewUsersTableSchema.js @@ -1,20 +1,14 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = grabNewUsersTableSchema; -const grabSchemaFieldsFromData_1 = __importDefault(require("./grabSchemaFieldsFromData")); -const serverError_1 = __importDefault(require("./serverError")); +import grabSchemaFieldsFromData from "./grabSchemaFieldsFromData"; +import serverError from "./serverError"; /** * # Add User Table to Database */ -function grabNewUsersTableSchema(params) { +export default function grabNewUsersTableSchema(params) { try { const userPreset = require("../../data/presets/users.json"); const defaultFields = require("../../data/defaultFields.json"); const supplementalFields = (params === null || params === void 0 ? void 0 : params.payload) - ? (0, grabSchemaFieldsFromData_1.default)({ + ? grabSchemaFieldsFromData({ data: params === null || params === void 0 ? void 0 : params.payload, excludeData: defaultFields, excludeFields: userPreset.fields, @@ -33,7 +27,7 @@ function grabNewUsersTableSchema(params) { } catch ( /** @type {any} */error) { console.log(`grabNewUsersTableSchema.ts ERROR: ${error.message}`); - (0, serverError_1.default)({ + serverError({ component: "grabNewUsersTableSchema", message: error.message, }); diff --git a/dist/package-shared/functions/backend/grabSchemaFieldsFromData.js b/dist/package-shared/functions/backend/grabSchemaFieldsFromData.js index a948bb7..9aedbeb 100644 --- a/dist/package-shared/functions/backend/grabSchemaFieldsFromData.js +++ b/dist/package-shared/functions/backend/grabSchemaFieldsFromData.js @@ -1,14 +1,8 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = grabSchemaFieldsFromData; -const serverError_1 = __importDefault(require("./serverError")); +import serverError from "./serverError"; /** * # Add User Table to Database */ -function grabSchemaFieldsFromData({ data, fields, excludeData, excludeFields, }) { +export default function grabSchemaFieldsFromData({ data, fields, excludeData, excludeFields, }) { var _a; try { /** @type {DSQL_FieldSchemaType[]} */ @@ -55,7 +49,7 @@ function grabSchemaFieldsFromData({ data, fields, excludeData, excludeFields, }) } catch ( /** @type {any} */error) { console.log(`grabSchemaFieldsFromData.ts ERROR: ${error.message}`); - (0, serverError_1.default)({ + serverError({ component: "grabSchemaFieldsFromData.ts", message: error.message, }); diff --git a/dist/package-shared/functions/backend/grabUserSchemaData.d.ts b/dist/package-shared/functions/backend/grabUserSchemaData.d.ts deleted file mode 100644 index c1e7702..0000000 --- a/dist/package-shared/functions/backend/grabUserSchemaData.d.ts +++ /dev/null @@ -1,9 +0,0 @@ -import { DSQL_DatabaseSchemaType } from "../../types"; -type Params = { - userId?: string | number; -}; -/** - * # Grab User Schema Data - */ -export default function grabUserSchemaData({ userId, }: Params): DSQL_DatabaseSchemaType[] | null; -export {}; diff --git a/dist/package-shared/functions/backend/grabUserSchemaData.js b/dist/package-shared/functions/backend/grabUserSchemaData.js deleted file mode 100644 index 28162b3..0000000 --- a/dist/package-shared/functions/backend/grabUserSchemaData.js +++ /dev/null @@ -1,30 +0,0 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = grabUserSchemaData; -const serverError_1 = __importDefault(require("./serverError")); -const fs_1 = __importDefault(require("fs")); -const grab_dir_names_1 = __importDefault(require("../../utils/backend/names/grab-dir-names")); -const ejson_1 = __importDefault(require("../../utils/ejson")); -/** - * # Grab User Schema Data - */ -function grabUserSchemaData({ userId, }) { - var _a; - try { - const { userSchemaMainJSONFilePath } = (0, grab_dir_names_1.default)({ userId }); - const schemaJSON = fs_1.default.readFileSync(userSchemaMainJSONFilePath || "", "utf-8"); - const schemaObj = ejson_1.default.parse(schemaJSON); - return schemaObj; - } - catch (error) { - (0, serverError_1.default)({ - component: "grabUserSchemaData", - message: error.message, - }); - (_a = global.ERROR_CALLBACK) === null || _a === void 0 ? void 0 : _a.call(global, `Error Grabbing User Schema Data`, error); - return null; - } -} diff --git a/dist/package-shared/functions/backend/handleNodemailer.js b/dist/package-shared/functions/backend/handleNodemailer.js index 9d067fe..817d4f0 100644 --- a/dist/package-shared/functions/backend/handleNodemailer.js +++ b/dist/package-shared/functions/backend/handleNodemailer.js @@ -1,67 +1,50 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = handleNodemailer; -const fs_1 = __importDefault(require("fs")); -const lodash_1 = __importDefault(require("lodash")); -const nodemailer_1 = __importDefault(require("nodemailer")); +import fs from "fs"; +import _ from "lodash"; +import nodemailer from "nodemailer"; /** * # Handle mails With Nodemailer */ -function handleNodemailer(params) { - return __awaiter(this, void 0, void 0, function* () { - var _a, _b; - if (!process.env.DSQL_MAIL_HOST || - !process.env.DSQL_MAIL_EMAIL || - !process.env.DSQL_MAIL_PASSWORD) { - return undefined; - } - let transporter = nodemailer_1.default.createTransport(Object.assign({ host: process.env.DSQL_MAIL_HOST, port: 465, secure: true, auth: { - user: process.env.DSQL_MAIL_EMAIL, - pass: process.env.DSQL_MAIL_PASSWORD, - } }, params.options)); - const sender = (() => { - var _a; - if ((_a = params.alias) === null || _a === void 0 ? void 0 : _a.match(/support/i)) - return process.env.DSQL_MAIL_EMAIL; - return process.env.DSQL_MAIL_EMAIL; - })(); - const mailRootPath = process.env.DSQL_MAIL_ROOT || "./email/index.html"; - let mailRoot = fs_1.default.existsSync(mailRootPath) - ? fs_1.default.readFileSync(mailRootPath, "utf8") - : undefined; - let finalHtml = mailRoot - ? mailRoot - .replace(/{{email_body}}/, ((_a = params.html) === null || _a === void 0 ? void 0 : _a.toString()) || "") - .replace(/{{issue_date}}/, Date().substring(0, 24)) - : (_b = params.html) === null || _b === void 0 ? void 0 : _b.toString(); - try { - let mailObject = {}; - mailObject["from"] = `"${params.senderName || "Datasquirel"}" <${sender}>`; - mailObject["sender"] = sender; - if (params.alias) - mailObject["replyTo"] = sender; - mailObject["to"] = params.to; - mailObject["subject"] = params.subject; - mailObject["text"] = params.text; - mailObject["html"] = finalHtml; - let info = yield transporter.sendMail(Object.assign(Object.assign({}, lodash_1.default.omit(mailObject, ["alias", "senderName", "options"])), mailObject)); - return info; - } - catch (error) { - console.log("ERROR in handleNodemailer Function =>", error.message); - } +export default async function handleNodemailer(params) { + var _a, _b; + if (!process.env.DSQL_MAIL_HOST || + !process.env.DSQL_MAIL_EMAIL || + !process.env.DSQL_MAIL_PASSWORD) { return undefined; - }); + } + let transporter = nodemailer.createTransport(Object.assign({ host: process.env.DSQL_MAIL_HOST, port: 465, secure: true, auth: { + user: process.env.DSQL_MAIL_EMAIL, + pass: process.env.DSQL_MAIL_PASSWORD, + } }, params.options)); + const sender = (() => { + var _a; + if ((_a = params.alias) === null || _a === void 0 ? void 0 : _a.match(/support/i)) + return process.env.DSQL_MAIL_EMAIL; + return process.env.DSQL_MAIL_EMAIL; + })(); + const mailRootPath = process.env.DSQL_MAIL_ROOT || "./email/index.html"; + let mailRoot = fs.existsSync(mailRootPath) + ? fs.readFileSync(mailRootPath, "utf8") + : undefined; + let finalHtml = mailRoot + ? mailRoot + .replace(/{{email_body}}/, ((_a = params.html) === null || _a === void 0 ? void 0 : _a.toString()) || "") + .replace(/{{issue_date}}/, Date().substring(0, 24)) + : (_b = params.html) === null || _b === void 0 ? void 0 : _b.toString(); + try { + let mailObject = {}; + mailObject["from"] = `"${params.senderName || "Datasquirel"}" <${sender}>`; + mailObject["sender"] = sender; + if (params.alias) + mailObject["replyTo"] = sender; + mailObject["to"] = params.to; + mailObject["subject"] = params.subject; + mailObject["text"] = params.text; + mailObject["html"] = finalHtml; + let info = await transporter.sendMail(Object.assign(Object.assign({}, _.omit(mailObject, ["alias", "senderName", "options"])), mailObject)); + return info; + } + catch (error) { + console.log("ERROR in handleNodemailer Function =>", error.message); + } + return undefined; } diff --git a/dist/package-shared/functions/backend/html/sanitizeHtmlOptions.js b/dist/package-shared/functions/backend/html/sanitizeHtmlOptions.js index f1ecdfd..083df32 100644 --- a/dist/package-shared/functions/backend/html/sanitizeHtmlOptions.js +++ b/dist/package-shared/functions/backend/html/sanitizeHtmlOptions.js @@ -1,5 +1,3 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); const sanitizeHtmlOptions = { allowedTags: [ "b", @@ -31,4 +29,4 @@ const sanitizeHtmlOptions = { "*": ["style", "class", "title"], }, }; -exports.default = sanitizeHtmlOptions; +export default sanitizeHtmlOptions; diff --git a/dist/package-shared/functions/backend/httpsRequest.js b/dist/package-shared/functions/backend/httpsRequest.js index 53d4abd..65db696 100644 --- a/dist/package-shared/functions/backend/httpsRequest.js +++ b/dist/package-shared/functions/backend/httpsRequest.js @@ -1,19 +1,13 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = httpsRequest; -const https_1 = __importDefault(require("https")); -const http_1 = __importDefault(require("http")); -const url_1 = require("url"); +import https from "https"; +import http from "http"; +import { URL } from "url"; /** * # Make Https Request */ -function httpsRequest({ url, method, hostname, path, headers, body, port, scheme, }) { +export default function httpsRequest({ url, method, hostname, path, headers, body, port, scheme, }) { var _a; const reqPayloadString = body ? JSON.stringify(body) : null; - const PARSED_URL = url ? new url_1.URL(url) : null; + const PARSED_URL = url ? new URL(url) : null; //////////////////////////////////////////////// //////////////////////////////////////////////// //////////////////////////////////////////////// @@ -49,10 +43,10 @@ function httpsRequest({ url, method, hostname, path, headers, body, port, scheme return new Promise((res, rej) => { var _a; const httpsRequest = ((scheme === null || scheme === void 0 ? void 0 : scheme.match(/https/i)) - ? https_1.default + ? https : ((_a = PARSED_URL === null || PARSED_URL === void 0 ? void 0 : PARSED_URL.protocol) === null || _a === void 0 ? void 0 : _a.match(/https/i)) - ? https_1.default - : http_1.default).request( + ? https + : http).request( /* ====== Request Options object ====== */ requestOptions, //////////////////////////////////////////////// diff --git a/dist/package-shared/functions/backend/noDatabaseDbHandler.js b/dist/package-shared/functions/backend/noDatabaseDbHandler.js index b129744..e0d831c 100644 --- a/dist/package-shared/functions/backend/noDatabaseDbHandler.js +++ b/dist/package-shared/functions/backend/noDatabaseDbHandler.js @@ -1,64 +1,47 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = noDatabaseDbHandler; -const fs_1 = __importDefault(require("fs")); -const serverError_1 = __importDefault(require("./serverError")); -const NO_DB_HANDLER_1 = __importDefault(require("../../utils/backend/global-db/NO_DB_HANDLER")); +import fs from "fs"; +import serverError from "./serverError"; +import NO_DB_HANDLER from "../../utils/backend/global-db/NO_DB_HANDLER"; /** * # No Database DB Handler */ -function noDatabaseDbHandler(queryString) { - return __awaiter(this, void 0, void 0, function* () { - var _a; - ((_a = process.env.NODE_ENV) === null || _a === void 0 ? void 0 : _a.match(/dev/)) && - fs_1.default.appendFileSync("./.tmp/sqlQuery.sql", queryString + "\n" + Date() + "\n\n\n", "utf8"); - /** - * Declare variables - * - * @description Declare "results" variable - */ - let results; - /** - * Fetch from db - * - * @description Fetch data from db if no cache - */ - try { - /** ********************* Run Query */ - results = yield (0, NO_DB_HANDLER_1.default)(queryString); - //////////////////////////////////////// - //////////////////////////////////////// - //////////////////////////////////////// - } - catch ( /** @type {any} */error) { - (0, serverError_1.default)({ - component: "noDatabaseDbHandler", - message: error.message, - }); - console.log("ERROR in noDatabaseDbHandler =>", error.message); - } - /** - * Return results - * - * @description Return results add to cache if "req" param is passed - */ - if (results) { - return results; - } - else { - return null; - } - }); +export default async function noDatabaseDbHandler(queryString) { + var _a; + ((_a = process.env.NODE_ENV) === null || _a === void 0 ? void 0 : _a.match(/dev/)) && + fs.appendFileSync("./.tmp/sqlQuery.sql", queryString + "\n" + Date() + "\n\n\n", "utf8"); + /** + * Declare variables + * + * @description Declare "results" variable + */ + let results; + /** + * Fetch from db + * + * @description Fetch data from db if no cache + */ + try { + /** ********************* Run Query */ + results = await NO_DB_HANDLER(queryString); + //////////////////////////////////////// + //////////////////////////////////////// + //////////////////////////////////////// + } + catch ( /** @type {any} */error) { + serverError({ + component: "noDatabaseDbHandler", + message: error.message, + }); + console.log("ERROR in noDatabaseDbHandler =>", error.message); + } + /** + * Return results + * + * @description Return results add to cache if "req" param is passed + */ + if (results) { + return results; + } + else { + return null; + } } diff --git a/dist/package-shared/functions/backend/parseDbResults.js b/dist/package-shared/functions/backend/parseDbResults.js index 6ce9a41..3ef61a1 100644 --- a/dist/package-shared/functions/backend/parseDbResults.js +++ b/dist/package-shared/functions/backend/parseDbResults.js @@ -1,21 +1,6 @@ -"use strict"; // @ts-check -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = parseDbResults; -const decrypt_1 = __importDefault(require("../dsql/decrypt")); -const defaultFieldsRegexp_1 = __importDefault(require("./defaultFieldsRegexp")); +import decrypt from "../dsql/decrypt"; +import defaultFieldsRegexp from "../dsql/default-fields-regexp"; /** * Parse Database results * ============================================================================== @@ -23,54 +8,52 @@ const defaultFieldsRegexp_1 = __importDefault(require("./defaultFieldsRegexp")); * function, decrypts encrypted fields, and returns an updated array with no encrypted * fields */ -function parseDbResults(_a) { - return __awaiter(this, arguments, void 0, function* ({ unparsedResults, tableSchema, }) { +export default async function parseDbResults({ unparsedResults, tableSchema, }) { + /** + * Declare variables + * + * @description Declare "results" variable + */ + let parsedResults = []; + try { /** * Declare variables * * @description Declare "results" variable */ - let parsedResults = []; - try { - /** - * Declare variables - * - * @description Declare "results" variable - */ - for (let pr = 0; pr < unparsedResults.length; pr++) { - let result = unparsedResults[pr]; - let resultFieldNames = Object.keys(result); - for (let i = 0; i < resultFieldNames.length; i++) { - const resultFieldName = resultFieldNames[i]; - let resultFieldSchema = tableSchema === null || tableSchema === void 0 ? void 0 : tableSchema.fields[i]; - if (resultFieldName === null || resultFieldName === void 0 ? void 0 : resultFieldName.match(defaultFieldsRegexp_1.default)) { - continue; - } - let value = result[resultFieldName]; - if (typeof value !== "number" && !value) { - // parsedResults.push(result); - continue; - } - if (resultFieldSchema === null || resultFieldSchema === void 0 ? void 0 : resultFieldSchema.encrypted) { - if (value === null || value === void 0 ? void 0 : value.match(/./)) { - result[resultFieldName] = (0, decrypt_1.default)({ - encryptedString: value, - }); - } + for (let pr = 0; pr < unparsedResults.length; pr++) { + let result = unparsedResults[pr]; + let resultFieldNames = Object.keys(result); + for (let i = 0; i < resultFieldNames.length; i++) { + const resultFieldName = resultFieldNames[i]; + let resultFieldSchema = tableSchema === null || tableSchema === void 0 ? void 0 : tableSchema.fields[i]; + if (resultFieldName === null || resultFieldName === void 0 ? void 0 : resultFieldName.match(defaultFieldsRegexp)) { + continue; + } + let value = result[resultFieldName]; + if (typeof value !== "number" && !value) { + // parsedResults.push(result); + continue; + } + if (resultFieldSchema === null || resultFieldSchema === void 0 ? void 0 : resultFieldSchema.encrypted) { + if (value === null || value === void 0 ? void 0 : value.match(/./)) { + result[resultFieldName] = decrypt({ + encryptedString: value, + }); } } - parsedResults.push(result); } - /** - * Declare variables - * - * @description Declare "results" variable - */ - return parsedResults; + parsedResults.push(result); } - catch ( /** @type {any} */error) { - console.log("ERROR in parseDbResults Function =>", error.message); - return unparsedResults; - } - }); + /** + * Declare variables + * + * @description Declare "results" variable + */ + return parsedResults; + } + catch ( /** @type {any} */error) { + console.log("ERROR in parseDbResults Function =>", error.message); + return unparsedResults; + } } diff --git a/dist/package-shared/functions/backend/queues/add-queue.d.ts b/dist/package-shared/functions/backend/queues/add-queue.d.ts index d14f152..f121c8c 100644 --- a/dist/package-shared/functions/backend/queues/add-queue.d.ts +++ b/dist/package-shared/functions/backend/queues/add-queue.d.ts @@ -4,5 +4,5 @@ type Param = { userId: string | number; dummy?: boolean; }; -export default function addQueue({ queue, userId, dummy }: Param): Promise; +export default function addQueue({ queue, userId, dummy }: Param): Promise; export {}; diff --git a/dist/package-shared/functions/backend/queues/add-queue.js b/dist/package-shared/functions/backend/queues/add-queue.js index 9a805c6..d1b86d8 100644 --- a/dist/package-shared/functions/backend/queues/add-queue.js +++ b/dist/package-shared/functions/backend/queues/add-queue.js @@ -1,46 +1,29 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = addQueue; -const get_queue_1 = __importDefault(require("./get-queue")); -const crud_1 = __importDefault(require("../../../utils/data-fetching/crud")); -const numberfy_1 = __importDefault(require("../../../utils/numberfy")); -function addQueue(_a) { - return __awaiter(this, arguments, void 0, function* ({ queue, userId, dummy }) { - const tableName = "process_queue"; - const existingQueueRes = dummy - ? undefined - : (yield (0, get_queue_1.default)({ +import getQueue from "./get-queue"; +import dsqlCrud from "../../../utils/data-fetching/crud"; +import numberfy from "../../../utils/numberfy"; +export default async function addQueue({ queue, userId, dummy }) { + const tableName = "process_queue"; + const existingQueueRes = dummy + ? undefined + : (await getQueue({ + query: { query: { - query: { - user_id: { - value: String(userId), - }, - job_type: { - value: String(queue.job_type), - }, + user_id: { + value: String(userId), + }, + job_type: { + value: String(queue.job_type), }, }, - })); - const existingQueue = existingQueueRes === null || existingQueueRes === void 0 ? void 0 : existingQueueRes[0]; - if ((existingQueue === null || existingQueue === void 0 ? void 0 : existingQueue.id) && !dummy) - return undefined; - const addQueueRes = yield (0, crud_1.default)({ - action: "insert", - table: tableName, - data: Object.assign(Object.assign({}, queue), { user_id: (0, numberfy_1.default)(userId) }), - }); - return addQueueRes; + }, + })); + const existingQueue = existingQueueRes === null || existingQueueRes === void 0 ? void 0 : existingQueueRes[0]; + if ((existingQueue === null || existingQueue === void 0 ? void 0 : existingQueue.id) && !dummy) + return undefined; + const addQueueRes = await dsqlCrud({ + action: "insert", + table: tableName, + data: Object.assign(Object.assign({}, queue), { user_id: numberfy(userId) }), }); + return addQueueRes; } diff --git a/dist/package-shared/functions/backend/queues/delete-queue.js b/dist/package-shared/functions/backend/queues/delete-queue.js index 37bfb34..fe85f98 100644 --- a/dist/package-shared/functions/backend/queues/delete-queue.js +++ b/dist/package-shared/functions/backend/queues/delete-queue.js @@ -1,31 +1,14 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = deleteQueue; -const crud_1 = __importDefault(require("../../../utils/data-fetching/crud")); -const get_queue_1 = __importDefault(require("./get-queue")); -function deleteQueue(_a) { - return __awaiter(this, arguments, void 0, function* ({ queueId, userId }) { - const tableName = "process_queue"; - const existingQueue = (yield (0, get_queue_1.default)({ userId, queueId })); - if (!(existingQueue === null || existingQueue === void 0 ? void 0 : existingQueue.id)) - return false; - const deleteQueueRes = yield (0, crud_1.default)({ - action: "delete", - table: tableName, - targetId: existingQueue.id, - }); - return Boolean(deleteQueueRes === null || deleteQueueRes === void 0 ? void 0 : deleteQueueRes.success); +import dsqlCrud from "../../../utils/data-fetching/crud"; +import getQueue from "./get-queue"; +export default async function deleteQueue({ queueId, userId }) { + const tableName = "process_queue"; + const existingQueue = (await getQueue({ userId, queueId })); + if (!(existingQueue === null || existingQueue === void 0 ? void 0 : existingQueue.id)) + return false; + const deleteQueueRes = await dsqlCrud({ + action: "delete", + table: tableName, + targetId: existingQueue.id, }); + return Boolean(deleteQueueRes === null || deleteQueueRes === void 0 ? void 0 : deleteQueueRes.success); } diff --git a/dist/package-shared/functions/backend/queues/get-queue.js b/dist/package-shared/functions/backend/queues/get-queue.js index cc355d0..92f0738 100644 --- a/dist/package-shared/functions/backend/queues/get-queue.js +++ b/dist/package-shared/functions/backend/queues/get-queue.js @@ -1,35 +1,18 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = getQueue; -const crud_1 = __importDefault(require("../../../utils/data-fetching/crud")); -function getQueue(_a) { - return __awaiter(this, arguments, void 0, function* ({ queueId, userId, query, single, }) { - const tableName = "process_queue"; - let queryQuery = {}; - if (queueId) { - queryQuery = Object.assign(Object.assign({}, queryQuery), { id: { value: String(queueId) } }); - } - if (userId) { - queryQuery = Object.assign(Object.assign({}, queryQuery), { user_id: { value: String(userId) } }); - } - const getQueue = yield (0, crud_1.default)({ - action: "get", - table: tableName, - query: Object.assign(Object.assign({}, query), { query: Object.assign(Object.assign({}, query === null || query === void 0 ? void 0 : query.query), queryQuery) }), - }); - const queuePayload = getQueue === null || getQueue === void 0 ? void 0 : getQueue.payload; - return queueId || single ? queuePayload === null || queuePayload === void 0 ? void 0 : queuePayload[0] : queuePayload; +import dsqlCrud from "../../../utils/data-fetching/crud"; +export default async function getQueue({ queueId, userId, query, single, }) { + const tableName = "process_queue"; + let queryQuery = {}; + if (queueId) { + queryQuery = Object.assign(Object.assign({}, queryQuery), { id: { value: String(queueId) } }); + } + if (userId) { + queryQuery = Object.assign(Object.assign({}, queryQuery), { user_id: { value: String(userId) } }); + } + const getQueue = await dsqlCrud({ + action: "get", + table: tableName, + query: Object.assign(Object.assign({}, query), { query: Object.assign(Object.assign({}, query === null || query === void 0 ? void 0 : query.query), queryQuery) }), }); + const queuePayload = getQueue === null || getQueue === void 0 ? void 0 : getQueue.payload; + return queueId || single ? queuePayload === null || queuePayload === void 0 ? void 0 : queuePayload[0] : queuePayload; } diff --git a/dist/package-shared/functions/backend/queues/update-queue.js b/dist/package-shared/functions/backend/queues/update-queue.js index fad00b6..8780741 100644 --- a/dist/package-shared/functions/backend/queues/update-queue.js +++ b/dist/package-shared/functions/backend/queues/update-queue.js @@ -1,28 +1,11 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = updateQueue; -const crud_1 = __importDefault(require("../../../utils/data-fetching/crud")); -function updateQueue(_a) { - return __awaiter(this, arguments, void 0, function* ({ queueId, queue }) { - const tableName = "process_queue"; - const updateQueueRes = yield (0, crud_1.default)({ - action: "update", - table: tableName, - targetId: queueId, - data: queue, - }); - return Boolean(updateQueueRes === null || updateQueueRes === void 0 ? void 0 : updateQueueRes.success); +import dsqlCrud from "../../../utils/data-fetching/crud"; +export default async function updateQueue({ queueId, queue }) { + const tableName = "process_queue"; + const updateQueueRes = await dsqlCrud({ + action: "update", + table: tableName, + targetId: queueId, + data: queue, }); + return Boolean(updateQueueRes === null || updateQueueRes === void 0 ? void 0 : updateQueueRes.success); } diff --git a/dist/package-shared/functions/backend/serverError.js b/dist/package-shared/functions/backend/serverError.js index 2bc28a4..924f9c0 100644 --- a/dist/package-shared/functions/backend/serverError.js +++ b/dist/package-shared/functions/backend/serverError.js @@ -1,76 +1,59 @@ -"use strict"; // @ts-check -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = serverError; -const fs_1 = __importDefault(require("fs")); +import fs from "fs"; /** * # Server Error */ -function serverError(_a) { - return __awaiter(this, arguments, void 0, function* ({ user, message, component, noMail, req, }) { - const date = new Date(); - const reqIp = (() => { - if (!req) - return null; - try { - const forwarded = req.headers["x-forwarded-for"]; - const realIp = req.headers["x-real-ip"]; - const cloudflareIp = req.headers["cf-connecting-ip"]; - // Convert forwarded IPs to string and get the first IP if multiple exist - const forwardedIp = Array.isArray(forwarded) - ? forwarded[0] - : forwarded === null || forwarded === void 0 ? void 0 : forwarded.split(",")[0]; - const clientIp = cloudflareIp || - forwardedIp || - realIp || - req.socket.remoteAddress; - if (!clientIp) - return null; - return String(clientIp); - } - catch (error) { - return null; - } - })(); +export default async function serverError({ user, message, component, noMail, req, }) { + const date = new Date(); + const reqIp = (() => { + if (!req) + return null; try { - let log = `🚀 SERVER ERROR ===========================\nError Message: ${message}\nComponent: ${component}`; - if ((user === null || user === void 0 ? void 0 : user.id) && (user === null || user === void 0 ? void 0 : user.first_name) && (user === null || user === void 0 ? void 0 : user.last_name) && (user === null || user === void 0 ? void 0 : user.email)) { - log += `\nUser Id: ${user === null || user === void 0 ? void 0 : user.id}\nUser Name: ${user === null || user === void 0 ? void 0 : user.first_name} ${user === null || user === void 0 ? void 0 : user.last_name}\nUser Email: ${user === null || user === void 0 ? void 0 : user.email}`; - } - if (req === null || req === void 0 ? void 0 : req.url) { - log += `\nURL: ${req.url}`; - } - if (req === null || req === void 0 ? void 0 : req.body) { - log += `\nRequest Body: ${JSON.stringify(req.body, null, 4)}`; - } - if (reqIp) { - log += `\nIP: ${reqIp}`; - } - log += `\nDate: ${date.toDateString()}`; - log += "\n========================================"; - if (!fs_1.default.existsSync(`./.tmp/error.log`)) { - fs_1.default.writeFileSync(`./.tmp/error.log`, "", "utf-8"); - } - const initialText = fs_1.default.readFileSync(`./.tmp/error.log`, "utf-8"); - fs_1.default.writeFileSync(`./.tmp/error.log`, log); - fs_1.default.appendFileSync(`./.tmp/error.log`, `\n\n\n\n\n${initialText}`); + const forwarded = req.headers["x-forwarded-for"]; + const realIp = req.headers["x-real-ip"]; + const cloudflareIp = req.headers["cf-connecting-ip"]; + // Convert forwarded IPs to string and get the first IP if multiple exist + const forwardedIp = Array.isArray(forwarded) + ? forwarded[0] + : forwarded === null || forwarded === void 0 ? void 0 : forwarded.split(",")[0]; + const clientIp = cloudflareIp || + forwardedIp || + realIp || + req.socket.remoteAddress; + if (!clientIp) + return null; + return String(clientIp); } - catch ( /** @type {any} */error) { - console.log("Server Error Reporting Error:", error.message); + catch (error) { + return null; } - }); + })(); + try { + let log = `🚀 SERVER ERROR ===========================\nError Message: ${message}\nComponent: ${component}`; + if ((user === null || user === void 0 ? void 0 : user.id) && (user === null || user === void 0 ? void 0 : user.first_name) && (user === null || user === void 0 ? void 0 : user.last_name) && (user === null || user === void 0 ? void 0 : user.email)) { + log += `\nUser Id: ${user === null || user === void 0 ? void 0 : user.id}\nUser Name: ${user === null || user === void 0 ? void 0 : user.first_name} ${user === null || user === void 0 ? void 0 : user.last_name}\nUser Email: ${user === null || user === void 0 ? void 0 : user.email}`; + } + if (req === null || req === void 0 ? void 0 : req.url) { + log += `\nURL: ${req.url}`; + } + if (req === null || req === void 0 ? void 0 : req.body) { + log += `\nRequest Body: ${JSON.stringify(req.body, null, 4)}`; + } + if (reqIp) { + log += `\nIP: ${reqIp}`; + } + log += `\nDate: ${date.toDateString()}`; + log += "\n========================================"; + if (!fs.existsSync(`./.tmp/error.log`)) { + fs.writeFileSync(`./.tmp/error.log`, "", "utf-8"); + } + const initialText = fs.readFileSync(`./.tmp/error.log`, "utf-8"); + fs.writeFileSync(`./.tmp/error.log`, log); + fs.appendFileSync(`./.tmp/error.log`, `\n\n\n\n\n${initialText}`); + } + catch ( /** @type {any} */error) { + console.log("Server Error Reporting Error:", error.message); + } } //////////////////////////////////////// //////////////////////////////////////// diff --git a/dist/package-shared/functions/backend/setUserSchemaData.d.ts b/dist/package-shared/functions/backend/setUserSchemaData.d.ts deleted file mode 100644 index 38fd4fb..0000000 --- a/dist/package-shared/functions/backend/setUserSchemaData.d.ts +++ /dev/null @@ -1,10 +0,0 @@ -import { DSQL_DatabaseSchemaType } from "../../types"; -type Param = { - userId: string | number; - schemaData: DSQL_DatabaseSchemaType[]; -}; -/** - * # Set User Schema Data - */ -export default function setUserSchemaData({ userId, schemaData, }: Param): boolean; -export {}; diff --git a/dist/package-shared/functions/backend/setUserSchemaData.js b/dist/package-shared/functions/backend/setUserSchemaData.js deleted file mode 100644 index f573ee7..0000000 --- a/dist/package-shared/functions/backend/setUserSchemaData.js +++ /dev/null @@ -1,31 +0,0 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = setUserSchemaData; -const serverError_1 = __importDefault(require("./serverError")); -const fs_1 = __importDefault(require("fs")); -const grab_dir_names_1 = __importDefault(require("../../utils/backend/names/grab-dir-names")); -/** - * # Set User Schema Data - */ -function setUserSchemaData({ userId, schemaData, }) { - var _a; - try { - const { userSchemaMainJSONFilePath } = (0, grab_dir_names_1.default)({ userId }); - if (!userSchemaMainJSONFilePath) { - throw new Error(`No User Schema JSON found!`); - } - fs_1.default.writeFileSync(userSchemaMainJSONFilePath, JSON.stringify(schemaData), "utf8"); - return true; - } - catch (error) { - (0, serverError_1.default)({ - component: "/functions/backend/setUserSchemaData", - message: error.message, - }); - (_a = global.ERROR_CALLBACK) === null || _a === void 0 ? void 0 : _a.call(global, `Error Setting User Schema`, error); - return false; - } -} diff --git a/dist/package-shared/functions/backend/su-db-handler.d.ts b/dist/package-shared/functions/backend/su-db-handler.d.ts new file mode 100644 index 0000000..165d22c --- /dev/null +++ b/dist/package-shared/functions/backend/su-db-handler.d.ts @@ -0,0 +1,9 @@ +import { UserType } from "../../types"; +type Params = { + query?: string; + values?: any[]; + database?: string; + user: UserType; +}; +export default function suDbHandler({ query, database, user, values, }: Params): Promise; +export {}; diff --git a/dist/package-shared/functions/backend/su-db-handler.js b/dist/package-shared/functions/backend/su-db-handler.js new file mode 100644 index 0000000..d8c0340 --- /dev/null +++ b/dist/package-shared/functions/backend/su-db-handler.js @@ -0,0 +1,15 @@ +import mysql from "serverless-mysql"; +import connDbHandler from "../../utils/db/conn-db-handler"; +export default async function suDbHandler({ query, database, user, values, }) { + const connection = mysql({ + config: { + host: process.env.DSQL_DB_HOST, + user: process.env.DSQL_DB_USERNAME, + password: process.env.DSQL_DB_PASSWORD, + database: database, + charset: "utf8mb4", + }, + }); + const results = await connDbHandler(connection, query); + return results; +} diff --git a/dist/package-shared/functions/backend/suSocketAuth.js b/dist/package-shared/functions/backend/suSocketAuth.js index 00b8001..5894ab9 100644 --- a/dist/package-shared/functions/backend/suSocketAuth.js +++ b/dist/package-shared/functions/backend/suSocketAuth.js @@ -1,47 +1,30 @@ -"use strict"; // @ts-check -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = default_1; -const parseCookies_1 = __importDefault(require("../../utils/backend/parseCookies")); -const decrypt_1 = __importDefault(require("../dsql/decrypt")); -const get_auth_cookie_names_1 = __importDefault(require("./cookies/get-auth-cookie-names")); -function default_1(req) { - return __awaiter(this, void 0, void 0, function* () { - const { keyCookieName, csrfCookieName } = (0, get_auth_cookie_names_1.default)(); - const suKeyName = `${keyCookieName}_su`; - const cookies = (0, parseCookies_1.default)({ request: req }); - if (!(cookies === null || cookies === void 0 ? void 0 : cookies[suKeyName])) { - return null; - } - /** ********************* Grab the payload */ - let userPayload = (0, decrypt_1.default)({ - encryptedString: cookies[suKeyName], - }); - /** ********************* Return if no payload */ - if (!userPayload) - return null; - /** ********************* Parse the payload */ - let userObject = JSON.parse(userPayload); - if (userObject.password !== process.env.DSQL_USER_KEY) - return null; - if (userObject.authKey !== process.env.DSQL_SPECIAL_KEY) - return null; - //////////////////////////////////////// - //////////////////////////////////////// - //////////////////////////////////////// - /** ********************* return user object */ - return userObject; +import parseCookies from "../../utils/backend/parseCookies"; +import decrypt from "../dsql/decrypt"; +import getAuthCookieNames from "./cookies/get-auth-cookie-names"; +export default async function (req) { + const { keyCookieName, csrfCookieName } = getAuthCookieNames(); + const suKeyName = `${keyCookieName}_su`; + const cookies = parseCookies({ request: req }); + if (!(cookies === null || cookies === void 0 ? void 0 : cookies[suKeyName])) { + return null; + } + /** ********************* Grab the payload */ + let userPayload = decrypt({ + encryptedString: cookies[suKeyName], }); + /** ********************* Return if no payload */ + if (!userPayload) + return null; + /** ********************* Parse the payload */ + let userObject = JSON.parse(userPayload); + if (userObject.password !== process.env.DSQL_USER_KEY) + return null; + if (userObject.authKey !== process.env.DSQL_SPECIAL_KEY) + return null; + //////////////////////////////////////// + //////////////////////////////////////// + //////////////////////////////////////// + /** ********************* return user object */ + return userObject; } diff --git a/dist/package-shared/functions/backend/updateUsersTableSchema.d.ts b/dist/package-shared/functions/backend/updateUsersTableSchema.d.ts index 896c89e..594533d 100644 --- a/dist/package-shared/functions/backend/updateUsersTableSchema.d.ts +++ b/dist/package-shared/functions/backend/updateUsersTableSchema.d.ts @@ -5,9 +5,10 @@ type Param = { newPayload?: { [s: string]: any; }; + dbId: string | number; }; /** * # Add User Table to Database */ -export default function updateUsersTableSchema({ userId, database, newFields, newPayload, }: Param): Promise; +export default function updateUsersTableSchema({ userId, database, newFields, newPayload, dbId, }: Param): Promise; export {}; diff --git a/dist/package-shared/functions/backend/updateUsersTableSchema.js b/dist/package-shared/functions/backend/updateUsersTableSchema.js index 8a7099e..112208c 100644 --- a/dist/package-shared/functions/backend/updateUsersTableSchema.js +++ b/dist/package-shared/functions/backend/updateUsersTableSchema.js @@ -1,64 +1,46 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = updateUsersTableSchema; -const serverError_1 = __importDefault(require("./serverError")); -const grabUserSchemaData_1 = __importDefault(require("./grabUserSchemaData")); -const setUserSchemaData_1 = __importDefault(require("./setUserSchemaData")); -const createDbFromSchema_1 = __importDefault(require("../../shell/createDbFromSchema")); -const grabSchemaFieldsFromData_1 = __importDefault(require("./grabSchemaFieldsFromData")); +import serverError from "./serverError"; +import createDbFromSchema from "../../shell/createDbFromSchema"; +import grabSchemaFieldsFromData from "./grabSchemaFieldsFromData"; +import { grabPrimaryRequiredDbSchema, writeUpdatedDbSchema, } from "../../shell/createDbFromSchema/grab-required-database-schemas"; /** * # Add User Table to Database */ -function updateUsersTableSchema(_a) { - return __awaiter(this, arguments, void 0, function* ({ userId, database, newFields, newPayload, }) { - var _b, _c, _d; - try { - const dbFullName = database; - const userSchemaData = (0, grabUserSchemaData_1.default)({ userId }); - if (!userSchemaData) - throw new Error("User schema data not found!"); - let targetDatabaseIndex = userSchemaData.findIndex((db) => db.dbFullName === database); - if (targetDatabaseIndex < 0) { - throw new Error("Couldn't Find Target Database!"); - } - let existingTableIndex = (_b = userSchemaData[targetDatabaseIndex]) === null || _b === void 0 ? void 0 : _b.tables.findIndex((table) => table.tableName === "users"); - const usersTable = userSchemaData[targetDatabaseIndex].tables[existingTableIndex]; - if (!((_c = usersTable === null || usersTable === void 0 ? void 0 : usersTable.fields) === null || _c === void 0 ? void 0 : _c[0])) - throw new Error("Users Table Not Found!"); - const additionalFields = (0, grabSchemaFieldsFromData_1.default)({ - fields: newFields, - data: newPayload, - }); - const spliceStartIndex = usersTable.fields.findIndex((field) => field.fieldName === "date_created"); - const finalSpliceStartIndex = spliceStartIndex >= 0 ? spliceStartIndex : 0; - usersTable.fields.splice(finalSpliceStartIndex, 0, ...additionalFields); - (0, setUserSchemaData_1.default)({ schemaData: userSchemaData, userId }); - const dbShellUpdate = yield (0, createDbFromSchema_1.default)({ - userId, - targetDatabase: dbFullName, - }); - return `Done!`; +export default async function updateUsersTableSchema({ userId, database, newFields, newPayload, dbId, }) { + var _a, _b; + try { + const dbFullName = database; + let targetDatabase = grabPrimaryRequiredDbSchema({ + dbId, + userId, + }); + if (!targetDatabase) { + throw new Error("Couldn't Find Target Database!"); } - catch (error) { - (_d = global.ERROR_CALLBACK) === null || _d === void 0 ? void 0 : _d.call(global, `Error Updating User Table Schema`, error); - (0, serverError_1.default)({ - component: "addUsersTableToDb", - message: error.message, - user: { id: userId }, - }); - return error.message; - } - }); + let existingTableIndex = targetDatabase === null || targetDatabase === void 0 ? void 0 : targetDatabase.tables.findIndex((table) => table.tableName === "users"); + const usersTable = targetDatabase.tables[existingTableIndex]; + if (!((_a = usersTable === null || usersTable === void 0 ? void 0 : usersTable.fields) === null || _a === void 0 ? void 0 : _a[0])) + throw new Error("Users Table Not Found!"); + const additionalFields = grabSchemaFieldsFromData({ + fields: newFields, + data: newPayload, + }); + const spliceStartIndex = usersTable.fields.findIndex((field) => field.fieldName === "date_created"); + const finalSpliceStartIndex = spliceStartIndex >= 0 ? spliceStartIndex : 0; + usersTable.fields.splice(finalSpliceStartIndex, 0, ...additionalFields); + writeUpdatedDbSchema({ dbSchema: targetDatabase, userId }); + const dbShellUpdate = await createDbFromSchema({ + userId, + targetDatabase: dbFullName, + }); + return `Done!`; + } + catch (error) { + (_b = global.ERROR_CALLBACK) === null || _b === void 0 ? void 0 : _b.call(global, `Error Updating User Table Schema`, error); + serverError({ + component: "addUsersTableToDb", + message: error.message, + user: { id: userId }, + }); + return error.message; + } } diff --git a/dist/package-shared/functions/backend/user-db-handler.d.ts b/dist/package-shared/functions/backend/user-db-handler.d.ts new file mode 100644 index 0000000..28183d0 --- /dev/null +++ b/dist/package-shared/functions/backend/user-db-handler.d.ts @@ -0,0 +1,11 @@ +import { DSQL_TableSchemaType, UserType } from "../../types"; +type Params = { + query?: string; + values?: any[]; + database?: string; + tableSchema?: DSQL_TableSchemaType; + debug?: boolean; + user: UserType; +}; +export default function userDbHandler({ query, user, database, debug, tableSchema, values, }: Params): Promise; +export {}; diff --git a/dist/package-shared/functions/backend/user-db-handler.js b/dist/package-shared/functions/backend/user-db-handler.js new file mode 100644 index 0000000..407ef45 --- /dev/null +++ b/dist/package-shared/functions/backend/user-db-handler.js @@ -0,0 +1,17 @@ +import mysql from "serverless-mysql"; +import grabMariadbMainUserForUser from "./grab-mariadb-main-user-for-user"; +import connDbHandler from "../../utils/db/conn-db-handler"; +export default async function userDbHandler({ query, user, database, debug, tableSchema, values, }) { + const { fullName, host, username, password } = await grabMariadbMainUserForUser({ user }); + const connection = mysql({ + config: { + host, + user: username, + password: password, + database: database, + charset: "utf8mb4", + }, + }); + const results = await connDbHandler(connection, query); + return results; +} diff --git a/dist/package-shared/functions/backend/varDatabaseDbHandler.d.ts b/dist/package-shared/functions/backend/varDatabaseDbHandler.d.ts index 07da850..4c977ac 100644 --- a/dist/package-shared/functions/backend/varDatabaseDbHandler.d.ts +++ b/dist/package-shared/functions/backend/varDatabaseDbHandler.d.ts @@ -1,8 +1,9 @@ +import { DSQL_TableSchemaType } from "../../types"; type Param = { queryString: string; queryValuesArray?: any[]; database?: string; - tableSchema?: import("../../types").DSQL_TableSchemaType; + tableSchema?: DSQL_TableSchemaType; debug?: boolean; }; /** diff --git a/dist/package-shared/functions/backend/varDatabaseDbHandler.js b/dist/package-shared/functions/backend/varDatabaseDbHandler.js index 2eface8..234d919 100644 --- a/dist/package-shared/functions/backend/varDatabaseDbHandler.js +++ b/dist/package-shared/functions/backend/varDatabaseDbHandler.js @@ -1,66 +1,49 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = varDatabaseDbHandler; -const parseDbResults_1 = __importDefault(require("./parseDbResults")); -const serverError_1 = __importDefault(require("./serverError")); -const grab_dsql_connection_1 = __importDefault(require("../../utils/grab-dsql-connection")); -const conn_db_handler_1 = __importDefault(require("../../utils/db/conn-db-handler")); +import parseDbResults from "./parseDbResults"; +import serverError from "./serverError"; +import grabDSQLConnection from "../../utils/grab-dsql-connection"; +import connDbHandler from "../../utils/db/conn-db-handler"; /** * # DB handler for specific database */ -function varDatabaseDbHandler(_a) { - return __awaiter(this, arguments, void 0, function* ({ queryString, queryValuesArray, database, tableSchema, debug, }) { - var _b; - let CONNECTION = (0, grab_dsql_connection_1.default)({ fa: true }); - if (global.DSQL_USE_LOCAL) - CONNECTION = (0, grab_dsql_connection_1.default)({ local: true }); - if (database === null || database === void 0 ? void 0 : database.match(/^datasquirel$/)) - CONNECTION = (0, grab_dsql_connection_1.default)(); - if (debug) { - console.log(`varDatabaseDbHandler:query:`, queryString); - console.log(`varDatabaseDbHandler:values:`, queryValuesArray); - console.log(`varDatabaseDbHandler:CONNECTION:`, CONNECTION.getConfig()); +export default async function varDatabaseDbHandler({ queryString, queryValuesArray, database, tableSchema, debug, }) { + var _a; + let CONNECTION = grabDSQLConnection({ fa: true }); + if (global.DSQL_USE_LOCAL) + CONNECTION = grabDSQLConnection({ local: true }); + if (database === null || database === void 0 ? void 0 : database.match(/^datasquirel$/)) + CONNECTION = grabDSQLConnection(); + if (debug) { + console.log(`varDatabaseDbHandler:query:`, queryString); + console.log(`varDatabaseDbHandler:values:`, queryValuesArray); + console.log(`varDatabaseDbHandler:CONNECTION:`, CONNECTION.getConfig()); + } + let results = await connDbHandler(CONNECTION, queryString, queryValuesArray); + if (debug) { + console.log(`varDatabaseDbHandler:results:`, results); + } + if (results && tableSchema) { + try { + const unparsedResults = results; + const parsedResults = await parseDbResults({ + unparsedResults: unparsedResults, + tableSchema: tableSchema, + }); + return parsedResults; } - let results = yield (0, conn_db_handler_1.default)(CONNECTION, queryString, queryValuesArray); - if (debug) { - console.log(`varDatabaseDbHandler:results:`, results); - } - if (results && tableSchema) { - try { - const unparsedResults = results; - const parsedResults = yield (0, parseDbResults_1.default)({ - unparsedResults: unparsedResults, - tableSchema: tableSchema, - }); - return parsedResults; - } - catch (error) { - console.log("\x1b[31mvarDatabaseDbHandler ERROR\x1b[0m =>", database, error); - (_b = global.ERROR_CALLBACK) === null || _b === void 0 ? void 0 : _b.call(global, `VarDb Handler Error`, error); - (0, serverError_1.default)({ - component: "varDatabaseDbHandler/lines-52-53", - message: error.message, - }); - return null; - } - } - else if (results) { - return results; - } - else { + catch (error) { + console.log("\x1b[31mvarDatabaseDbHandler ERROR\x1b[0m =>", database, error); + (_a = global.ERROR_CALLBACK) === null || _a === void 0 ? void 0 : _a.call(global, `VarDb Handler Error`, error); + serverError({ + component: "varDatabaseDbHandler/lines-52-53", + message: error.message, + }); return null; } - }); + } + else if (results) { + return results; + } + else { + return null; + } } diff --git a/dist/package-shared/functions/backend/varReadOnlyDatabaseDbHandler.js b/dist/package-shared/functions/backend/varReadOnlyDatabaseDbHandler.js index f71c407..26fb9fa 100644 --- a/dist/package-shared/functions/backend/varReadOnlyDatabaseDbHandler.js +++ b/dist/package-shared/functions/backend/varReadOnlyDatabaseDbHandler.js @@ -1,57 +1,40 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = varReadOnlyDatabaseDbHandler; -const serverError_1 = __importDefault(require("./serverError")); -const parseDbResults_1 = __importDefault(require("./parseDbResults")); -const conn_db_handler_1 = __importDefault(require("../../utils/db/conn-db-handler")); +import serverError from "./serverError"; +import parseDbResults from "./parseDbResults"; +import connDbHandler from "../../utils/db/conn-db-handler"; /** * # Read Only Db Handler with Varaibles * @returns */ -function varReadOnlyDatabaseDbHandler(_a) { - return __awaiter(this, arguments, void 0, function* ({ queryString, queryValuesArray, tableSchema, forceLocal, }) { - var _b; - let results; - const DB_CONN = forceLocal - ? global.DSQL_DB_CONN - : global.DSQL_READ_ONLY_DB_CONN || global.DSQL_DB_CONN; - try { - results = yield (0, conn_db_handler_1.default)(DB_CONN, queryString, queryValuesArray); - } - catch (error) { - (0, serverError_1.default)({ - component: "varReadOnlyDatabaseDbHandler", - message: error.message, - noMail: true, - }); - (_b = global.ERROR_CALLBACK) === null || _b === void 0 ? void 0 : _b.call(global, `VarReadOnlyDB Handler Error`, error); - return error.message; - } - finally { - DB_CONN === null || DB_CONN === void 0 ? void 0 : DB_CONN.end(); - } - if (results) { - const unparsedResults = results; - const parsedResults = yield (0, parseDbResults_1.default)({ - unparsedResults: unparsedResults, - tableSchema: tableSchema, - }); - return parsedResults; - } - else { - return null; - } - }); +export default async function varReadOnlyDatabaseDbHandler({ queryString, queryValuesArray, tableSchema, forceLocal, }) { + var _a; + let results; + const DB_CONN = forceLocal + ? global.DSQL_DB_CONN + : global.DSQL_READ_ONLY_DB_CONN || global.DSQL_DB_CONN; + try { + results = await connDbHandler(DB_CONN, queryString, queryValuesArray); + } + catch (error) { + serverError({ + component: "varReadOnlyDatabaseDbHandler", + message: error.message, + noMail: true, + }); + (_a = global.ERROR_CALLBACK) === null || _a === void 0 ? void 0 : _a.call(global, `VarReadOnlyDB Handler Error`, error); + return error.message; + } + finally { + DB_CONN === null || DB_CONN === void 0 ? void 0 : DB_CONN.end(); + } + if (results) { + const unparsedResults = results; + const parsedResults = await parseDbResults({ + unparsedResults: unparsedResults, + tableSchema: tableSchema, + }); + return parsedResults; + } + else { + return null; + } } diff --git a/dist/package-shared/functions/dsql/db-schema-to-type.js b/dist/package-shared/functions/dsql/db-schema-to-type.js index e726e13..2042d72 100644 --- a/dist/package-shared/functions/dsql/db-schema-to-type.js +++ b/dist/package-shared/functions/dsql/db-schema-to-type.js @@ -1,25 +1,18 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = dbSchemaToType; -const fs_1 = __importDefault(require("fs")); -const grab_dir_names_1 = __importDefault(require("../../utils/backend/names/grab-dir-names")); -const lodash_1 = __importDefault(require("lodash")); -const ejson_1 = __importDefault(require("../../utils/ejson")); -const generate_type_definitions_1 = __importDefault(require("./generate-type-definitions")); -const path_1 = __importDefault(require("path")); -function dbSchemaToType(params) { +import fs from "fs"; +import grabDirNames from "../../utils/backend/names/grab-dir-names"; +import _ from "lodash"; +import EJSON from "../../utils/ejson"; +import generateTypeDefinition from "./generate-type-definitions"; +import { AppNames } from "../../dict/app-names"; +export default function dbSchemaToType(params) { var _a, _b; let datasquirelSchema; - const defaultTableFieldsJSONFilePath = path_1.default.resolve(__dirname, "../../data/defaultFields.json"); + const { mainShemaJSONFilePath, defaultTableFieldsJSONFilePath } = grabDirNames(); if (params === null || params === void 0 ? void 0 : params.dbSchema) { datasquirelSchema = params.dbSchema; } else { - const { mainShemaJSONFilePath } = (0, grab_dir_names_1.default)(); - const mainSchema = ejson_1.default.parse(fs_1.default.readFileSync(mainShemaJSONFilePath, "utf-8")); + const mainSchema = EJSON.parse(fs.readFileSync(mainShemaJSONFilePath, "utf-8")); datasquirelSchema = mainSchema.find((sch) => sch.dbFullName == "datasquirel"); } if (!datasquirelSchema) @@ -27,10 +20,10 @@ function dbSchemaToType(params) { let tableNames = `export const DsqlTables = [\n${datasquirelSchema.tables .map((tbl) => ` "${tbl.tableName}",`) .join("\n")}\n] as const`; - const defaultFields = ejson_1.default.parse(fs_1.default.readFileSync(defaultTableFieldsJSONFilePath, "utf-8")); + const defaultFields = EJSON.parse(fs.readFileSync(defaultTableFieldsJSONFilePath, "utf-8")); const dbTablesSchemas = datasquirelSchema.tables.map((tblSchm) => { - let newDefaultFields = lodash_1.default.cloneDeep(defaultFields); - return Object.assign(Object.assign({}, tblSchm), { fields: (params === null || params === void 0 ? void 0 : params.dbSchema) + let newDefaultFields = _.cloneDeep(defaultFields); + return Object.assign(Object.assign({}, tblSchm), { fields: tblSchm.fields.find((fld) => fld.fieldName == "id") ? tblSchm.fields : [ newDefaultFields.shift(), @@ -40,9 +33,9 @@ function dbSchemaToType(params) { ] }); }); const defDbName = (_b = (datasquirelSchema.dbName || - ((_a = datasquirelSchema.dbFullName) === null || _a === void 0 ? void 0 : _a.replace(/datasquirel_user_\d+_/, "")))) === null || _b === void 0 ? void 0 : _b.toUpperCase().replace(/ /g, "_"); + ((_a = datasquirelSchema.dbFullName) === null || _a === void 0 ? void 0 : _a.replace(new RegExp(`${AppNames["DsqlDbPrefix"]}\\d+_`), "")))) === null || _b === void 0 ? void 0 : _b.toUpperCase().replace(/ /g, "_"); const schemas = dbTablesSchemas - .map((table) => (0, generate_type_definitions_1.default)({ + .map((table) => generateTypeDefinition({ paradigm: "TypeScript", table, typeDefName: `DSQL_${defDbName}_${table.tableName.toUpperCase()}`, diff --git a/dist/package-shared/functions/dsql/decrypt.js b/dist/package-shared/functions/dsql/decrypt.js index becf4fe..2df37f9 100644 --- a/dist/package-shared/functions/dsql/decrypt.js +++ b/dist/package-shared/functions/dsql/decrypt.js @@ -1,23 +1,17 @@ -"use strict"; // @ts-check -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = decrypt; -const crypto_1 = require("crypto"); -const buffer_1 = require("buffer"); -const grab_keys_1 = __importDefault(require("../../utils/grab-keys")); +import { scryptSync, createDecipheriv } from "crypto"; +import { Buffer } from "buffer"; +import grabKeys from "../../utils/grab-keys"; /** * # Decrypt Function */ -function decrypt({ encryptedString, encryptionKey, encryptionSalt, }) { +export default function decrypt({ encryptedString, encryptionKey, encryptionSalt, }) { var _a; if (!(encryptedString === null || encryptedString === void 0 ? void 0 : encryptedString.match(/./))) { console.log("Encrypted string is invalid"); return encryptedString; } - const { key: encrptKey, salt, keyLen, algorithm, bufferAllocSize, } = (0, grab_keys_1.default)({ encryptionKey }); + const { key: encrptKey, salt, keyLen, algorithm, bufferAllocSize, } = grabKeys({ encryptionKey }); if (!(encrptKey === null || encrptKey === void 0 ? void 0 : encrptKey.match(/.{8,}/))) { console.log("Decrption key is invalid"); return encryptedString; @@ -26,9 +20,9 @@ function decrypt({ encryptedString, encryptionKey, encryptionSalt, }) { console.log("Decrption salt is invalid"); return encryptedString; } - let key = (0, crypto_1.scryptSync)(encrptKey, salt, keyLen); - let iv = buffer_1.Buffer.alloc(bufferAllocSize, 0); - const decipher = (0, crypto_1.createDecipheriv)(algorithm, key, iv); + let key = scryptSync(encrptKey, salt, keyLen); + let iv = Buffer.alloc(bufferAllocSize, 0); + const decipher = createDecipheriv(algorithm, key, iv); try { let decrypted = decipher.update(encryptedString, "hex", "utf8"); decrypted += decipher.final("utf8"); @@ -36,6 +30,7 @@ function decrypt({ encryptedString, encryptionKey, encryptionSalt, }) { } catch (error) { console.log("Error in decrypting =>", error.message); + console.log("encryptedString =>", encryptedString); (_a = global.ERROR_CALLBACK) === null || _a === void 0 ? void 0 : _a.call(global, `Error Decrypting data`, error); return encryptedString; } diff --git a/dist/package-shared/functions/dsql/default-fields-regexp.d.ts b/dist/package-shared/functions/dsql/default-fields-regexp.d.ts index 9a1f5b4..f9e698e 100644 --- a/dist/package-shared/functions/dsql/default-fields-regexp.d.ts +++ b/dist/package-shared/functions/dsql/default-fields-regexp.d.ts @@ -1,7 +1,7 @@ /** - * Check for user in local storage + * Regular expression to match default fields * - * @description Preventdefault, declare variables + * @description Regular expression to match default fields */ declare const defaultFieldsRegexp: RegExp; export default defaultFieldsRegexp; diff --git a/dist/package-shared/functions/dsql/default-fields-regexp.js b/dist/package-shared/functions/dsql/default-fields-regexp.js index 239e5bd..18373b5 100644 --- a/dist/package-shared/functions/dsql/default-fields-regexp.js +++ b/dist/package-shared/functions/dsql/default-fields-regexp.js @@ -1,13 +1,7 @@ -"use strict"; -// @ts-check -Object.defineProperty(exports, "__esModule", { value: true }); /** - * Check for user in local storage + * Regular expression to match default fields * - * @description Preventdefault, declare variables + * @description Regular expression to match default fields */ -const defaultFieldsRegexp = /^id$|^uuid$|^date_created$|^date_created_code$|^date_created_timestamp$|^date_updated$|^date_updated_code$|^date_updated_timestamp$/; -//////////////////////////////////////// -//////////////////////////////////////// -//////////////////////////////////////// -exports.default = defaultFieldsRegexp; +const defaultFieldsRegexp = /^id$|^uuid$|^uid$|^date_created$|^date_created_code$|^date_created_timestamp$|^date_updated$|^date_updated_code$|^date_updated_timestamp$/; +export default defaultFieldsRegexp; diff --git a/dist/package-shared/functions/dsql/encrypt.js b/dist/package-shared/functions/dsql/encrypt.js index 55efc1a..51fc737 100644 --- a/dist/package-shared/functions/dsql/encrypt.js +++ b/dist/package-shared/functions/dsql/encrypt.js @@ -1,23 +1,17 @@ -"use strict"; // @ts-check -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = encrypt; -const crypto_1 = require("crypto"); -const buffer_1 = require("buffer"); -const grab_keys_1 = __importDefault(require("../../utils/grab-keys")); +import { scryptSync, createCipheriv } from "crypto"; +import { Buffer } from "buffer"; +import grabKeys from "../../utils/grab-keys"; /** * # Encrypt String */ -function encrypt({ data, encryptionKey, encryptionSalt, }) { +export default function encrypt({ data, encryptionKey, encryptionSalt, }) { var _a; if (!(data === null || data === void 0 ? void 0 : data.match(/./))) { console.log("Encryption string is invalid"); return data; } - const { key: encrptKey, salt, keyLen, algorithm, bufferAllocSize, } = (0, grab_keys_1.default)({ encryptionKey }); + const { key: encrptKey, salt, keyLen, algorithm, bufferAllocSize, } = grabKeys({ encryptionKey }); if (!(encrptKey === null || encrptKey === void 0 ? void 0 : encrptKey.match(/.{8,}/))) { console.log("Encryption key is invalid"); return data; @@ -27,9 +21,9 @@ function encrypt({ data, encryptionKey, encryptionSalt, }) { return data; } const password = encrptKey; - let key = (0, crypto_1.scryptSync)(password, salt, keyLen); - let iv = buffer_1.Buffer.alloc(bufferAllocSize, 0); - const cipher = (0, crypto_1.createCipheriv)(algorithm, key, iv); + let key = scryptSync(password, salt, keyLen); + let iv = Buffer.alloc(bufferAllocSize, 0); + const cipher = createCipheriv(algorithm, key, iv); try { let encrypted = cipher.update(data, "utf8", "hex"); encrypted += cipher.final("hex"); diff --git a/dist/package-shared/functions/dsql/generate-type-definitions.d.ts b/dist/package-shared/functions/dsql/generate-type-definitions.d.ts index 5560cb9..4fa84f6 100644 --- a/dist/package-shared/functions/dsql/generate-type-definitions.d.ts +++ b/dist/package-shared/functions/dsql/generate-type-definitions.d.ts @@ -6,6 +6,7 @@ type Param = { typeDefName?: string; allValuesOptional?: boolean; addExport?: boolean; + dbName?: string; }; -export default function generateTypeDefinition({ paradigm, table, query, typeDefName, allValuesOptional, addExport, }: Param): string | null; +export default function generateTypeDefinition({ paradigm, table, query, typeDefName, allValuesOptional, addExport, dbName, }: Param): string | null; export {}; diff --git a/dist/package-shared/functions/dsql/generate-type-definitions.js b/dist/package-shared/functions/dsql/generate-type-definitions.js index 18d2438..f789350 100644 --- a/dist/package-shared/functions/dsql/generate-type-definitions.js +++ b/dist/package-shared/functions/dsql/generate-type-definitions.js @@ -1,21 +1,30 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = generateTypeDefinition; -const default_fields_regexp_1 = __importDefault(require("./default-fields-regexp")); -function generateTypeDefinition({ paradigm, table, query, typeDefName, allValuesOptional, addExport, }) { +import defaultFieldsRegexp from "./default-fields-regexp"; +export default function generateTypeDefinition({ paradigm, table, query, typeDefName, allValuesOptional, addExport, dbName, }) { let typeDefinition = ``; try { - const tdName = typeDefName || - `DSQL_${query.single}_${query.single_table}`.toUpperCase(); + const tdName = typeDefName + ? typeDefName + : dbName + ? `DSQL_${dbName}_${table.tableName}`.toUpperCase() + : `DSQL_${query.single}_${query.single_table}`.toUpperCase(); const fields = table.fields; - function typeMap(type) { - if (type === null || type === void 0 ? void 0 : type.match(/int/i)) { + function typeMap(schemaType) { + var _a, _b; + if (schemaType.options && schemaType.options.length > 0) { + return schemaType.options + .map((opt) => { + var _a; + return ((_a = schemaType.dataType) === null || _a === void 0 ? void 0 : _a.match(/int/i)) || + typeof opt == "number" + ? `${opt}` + : `"${opt}"`; + }) + .join(" | "); + } + if ((_a = schemaType.dataType) === null || _a === void 0 ? void 0 : _a.match(/int/i)) { return "number"; } - if (type === null || type === void 0 ? void 0 : type.match(/text|varchar|timestamp/i)) { + if ((_b = schemaType.dataType) === null || _b === void 0 ? void 0 : _b.match(/text|varchar|timestamp/i)) { return "string"; } return "string"; @@ -28,13 +37,13 @@ function generateTypeDefinition({ paradigm, table, query, typeDefName, allValues var _a; const nullValue = allValuesOptional ? "?" - : field.nullValue + : ((_a = field.fieldName) === null || _a === void 0 ? void 0 : _a.match(defaultFieldsRegexp)) ? "?" - : ((_a = field.fieldName) === null || _a === void 0 ? void 0 : _a.match(default_fields_regexp_1.default)) - ? "?" - : ""; - typesArrayTypeScript.push(` ${field.fieldName}${nullValue}: ${typeMap(field.dataType || "")};`); - typesArrayJavascript.push(` * @property {${typeMap(field.dataType || "")}${nullValue}} ${field.fieldName}`); + : field.notNullValue + ? "" + : "?"; + typesArrayTypeScript.push(` ${field.fieldName}${nullValue}: ${typeMap(field)};`); + typesArrayJavascript.push(` * @property {${typeMap(field)}${nullValue}} ${field.fieldName}`); }); typesArrayTypeScript.push(`}`); typesArrayJavascript.push(` */`); diff --git a/dist/package-shared/functions/dsql/hashPassword.js b/dist/package-shared/functions/dsql/hashPassword.js index 2c7ac9b..2a8f2f6 100644 --- a/dist/package-shared/functions/dsql/hashPassword.js +++ b/dist/package-shared/functions/dsql/hashPassword.js @@ -1,20 +1,14 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = hashPassword; -const crypto_1 = require("crypto"); -const grab_keys_1 = __importDefault(require("../../utils/grab-keys")); +import { createHmac } from "crypto"; +import grabKeys from "../../utils/grab-keys"; /** * # Hash password Function */ -function hashPassword({ password, encryptionKey, }) { - const { key } = (0, grab_keys_1.default)({ encryptionKey }); +export default function hashPassword({ password, encryptionKey, }) { + const { key } = grabKeys({ encryptionKey }); if (!(key === null || key === void 0 ? void 0 : key.match(/.{8,}/))) { throw new Error("Encryption key is invalid"); } - const hmac = (0, crypto_1.createHmac)("sha512", key); + const hmac = createHmac("sha512", key); hmac.update(password); let hashed = hmac.digest("base64"); return hashed; diff --git a/dist/package-shared/functions/dsql/sql/sql-delete-generator.d.ts b/dist/package-shared/functions/dsql/sql/sql-delete-generator.d.ts index b423204..1eeb462 100644 --- a/dist/package-shared/functions/dsql/sql/sql-delete-generator.d.ts +++ b/dist/package-shared/functions/dsql/sql/sql-delete-generator.d.ts @@ -1,3 +1,4 @@ +import { SQLDeleteGeneratorParams } from "../../../types"; interface SQLDeleteGenReturn { query: string; values: string[]; @@ -5,9 +6,5 @@ interface SQLDeleteGenReturn { /** * # SQL Delete Generator */ -export default function sqlDeleteGenerator({ tableName, data, dbFullName, }: { - data: any; - tableName: string; - dbFullName?: string; -}): SQLDeleteGenReturn | undefined; +export default function sqlDeleteGenerator({ tableName, deleteKeyValues, dbFullName, data, }: SQLDeleteGeneratorParams): SQLDeleteGenReturn | undefined; export {}; diff --git a/dist/package-shared/functions/dsql/sql/sql-delete-generator.js b/dist/package-shared/functions/dsql/sql/sql-delete-generator.js index cadaaf8..ea23df9 100644 --- a/dist/package-shared/functions/dsql/sql/sql-delete-generator.js +++ b/dist/package-shared/functions/dsql/sql/sql-delete-generator.js @@ -1,26 +1,47 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = sqlDeleteGenerator; +import sqlEqualityParser from "../../../utils/sql-equality-parser"; /** * # SQL Delete Generator */ -function sqlDeleteGenerator({ tableName, data, dbFullName, }) { +export default function sqlDeleteGenerator({ tableName, deleteKeyValues, dbFullName, data, }) { const finalDbName = dbFullName ? `${dbFullName}.` : ""; try { let queryStr = `DELETE FROM ${finalDbName}${tableName}`; let deleteBatch = []; let queryArr = []; - Object.keys(data).forEach((ky) => { - deleteBatch.push(`${ky}=?`); - queryArr.push(data[ky]); - }); + if (data) { + Object.keys(data).forEach((ky) => { + let value = data[ky]; + const parsedValue = typeof value == "number" ? String(value) : value; + if (!parsedValue) + return; + if (parsedValue.match(/%/)) { + deleteBatch.push(`${ky} LIKE ?`); + queryArr.push(parsedValue); + } + else { + deleteBatch.push(`${ky}=?`); + queryArr.push(parsedValue); + } + }); + } + else if (deleteKeyValues) { + deleteKeyValues.forEach((ky) => { + let value = ky.value; + const parsedValue = typeof value == "number" ? String(value) : value; + if (!parsedValue) + return; + const operator = sqlEqualityParser(ky.operator || "EQUAL"); + deleteBatch.push(`${ky.key} ${operator} ?`); + queryArr.push(parsedValue); + }); + } queryStr += ` WHERE ${deleteBatch.join(" AND ")}`; return { query: queryStr, values: queryArr, }; } - catch ( /** @type {any} */error) { + catch (error) { console.log(`SQL delete gen ERROR: ${error.message}`); return undefined; } diff --git a/dist/package-shared/functions/dsql/sql/sql-gen-operator-gen.d.ts b/dist/package-shared/functions/dsql/sql/sql-gen-operator-gen.d.ts new file mode 100644 index 0000000..91bf69b --- /dev/null +++ b/dist/package-shared/functions/dsql/sql/sql-gen-operator-gen.d.ts @@ -0,0 +1,12 @@ +import { ServerQueryEqualities } from "../../../types"; +type Params = { + fieldName: string; + value?: string; + equality: (typeof ServerQueryEqualities)[number]; +}; +/** + * # SQL Gen Operator Gen + * @description Generates an SQL operator for node module `mysql` or `serverless-mysql` + */ +export default function sqlGenOperatorGen({ fieldName, value, equality, }: Params): string; +export {}; diff --git a/dist/package-shared/functions/dsql/sql/sql-gen-operator-gen.js b/dist/package-shared/functions/dsql/sql/sql-gen-operator-gen.js new file mode 100644 index 0000000..9b42ebd --- /dev/null +++ b/dist/package-shared/functions/dsql/sql/sql-gen-operator-gen.js @@ -0,0 +1,50 @@ +import sqlEqualityParser from "../../../utils/sql-equality-parser"; +/** + * # SQL Gen Operator Gen + * @description Generates an SQL operator for node module `mysql` or `serverless-mysql` + */ +export default function sqlGenOperatorGen({ fieldName, value, equality, }) { + if (value) { + if (equality == "LIKE") { + return `LOWER(${fieldName}) LIKE LOWER('%${value}%')`; + } + else if (equality == "LIKE_RAW") { + return `LOWER(${fieldName}) LIKE LOWER('${value}')`; + } + else if (equality == "NOT LIKE") { + return `LOWER(${fieldName}) NOT LIKE LOWER('%${value}%')`; + } + else if (equality == "NOT LIKE_RAW") { + return `LOWER(${fieldName}) NOT LIKE LOWER('${value}')`; + } + else if (equality == "REGEXP") { + return `LOWER(${fieldName}) REGEXP LOWER('${value}')`; + } + else if (equality == "FULLTEXT") { + return `MATCH(${fieldName}) AGAINST('${value}' IN BOOLEAN MODE)`; + } + else if (equality == "NOT EQUAL") { + return `${fieldName} != ${value}`; + } + else if (equality) { + return `${fieldName} ${sqlEqualityParser(equality)} ${value}`; + } + else { + return `${fieldName} = ${value}`; + } + } + else { + if (equality == "IS NULL") { + return `${fieldName} IS NULL`; + } + else if (equality == "IS NOT NULL") { + return `${fieldName} IS NOT NULL`; + } + else if (equality) { + return `${fieldName} ${sqlEqualityParser(equality)} ?`; + } + else { + return `${fieldName} = ?`; + } + } +} diff --git a/dist/package-shared/functions/dsql/sql/sql-generator.js b/dist/package-shared/functions/dsql/sql/sql-generator.js index 2a0d661..e98f33a 100644 --- a/dist/package-shared/functions/dsql/sql/sql-generator.js +++ b/dist/package-shared/functions/dsql/sql/sql-generator.js @@ -1,11 +1,10 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = sqlGenerator; +import sqlEqualityParser from "../../../utils/sql-equality-parser"; /** * # SQL Query Generator * @description Generates an SQL Query for node module `mysql` or `serverless-mysql` */ -function sqlGenerator({ tableName, genObject, dbFullName, count }) { +export default function sqlGenerator({ tableName, genObject, dbFullName, count }) { + var _a; const finalQuery = (genObject === null || genObject === void 0 ? void 0 : genObject.query) ? genObject.query : undefined; const queryKeys = finalQuery ? Object.keys(finalQuery) : undefined; const sqlSearhValues = []; @@ -33,19 +32,37 @@ function sqlGenerator({ tableName, genObject, dbFullName, count }) { else if (typeof queryObj.value == "string" || typeof queryObj.value == "number") { const valueParsed = String(queryObj.value); + const operator = sqlEqualityParser(queryObj.equality || "EQUAL"); if (queryObj.equality == "LIKE") { str = `LOWER(${finalFieldName}) LIKE LOWER('%${valueParsed}%')`; } + else if (queryObj.equality == "LIKE_RAW") { + str = `LOWER(${finalFieldName}) LIKE LOWER(?)`; + sqlSearhValues.push(valueParsed); + } + else if (queryObj.equality == "NOT LIKE") { + str = `LOWER(${finalFieldName}) NOT LIKE LOWER('%${valueParsed}%')`; + } + else if (queryObj.equality == "NOT LIKE_RAW") { + str = `LOWER(${finalFieldName}) NOT LIKE LOWER(?)`; + sqlSearhValues.push(valueParsed); + } else if (queryObj.equality == "REGEXP") { - str = `${finalFieldName} REGEXP '${valueParsed}'`; + str = `LOWER(${finalFieldName}) REGEXP LOWER(?)`; + sqlSearhValues.push(valueParsed); } else if (queryObj.equality == "FULLTEXT") { - str = `MATCH(${finalFieldName}) AGAINST('${valueParsed}' IN BOOLEAN MODE)`; + str = `MATCH(${finalFieldName}) AGAINST(? IN BOOLEAN MODE)`; + sqlSearhValues.push(valueParsed); } else if (queryObj.equality == "NOT EQUAL") { str = `${finalFieldName} != ?`; sqlSearhValues.push(valueParsed); } + else if (queryObj.equality) { + str = `${finalFieldName} ${operator} ?`; + sqlSearhValues.push(valueParsed); + } else { sqlSearhValues.push(valueParsed); } @@ -120,7 +137,7 @@ function sqlGenerator({ tableName, genObject, dbFullName, count }) { } else if ((_a = genObject === null || genObject === void 0 ? void 0 : genObject.selectFields) === null || _a === void 0 ? void 0 : _a[0]) { if (genObject.join) { - str += ` ${(_b = genObject.selectFields) === null || _b === void 0 ? void 0 : _b.map((fld) => `${finalDbName}${tableName}.${fld}`).join(",")}`; + str += ` ${(_b = genObject.selectFields) === null || _b === void 0 ? void 0 : _b.map((fld) => `${finalDbName}${tableName}.${String(fld)}`).join(",")}`; } else { str += ` ${(_c = genObject.selectFields) === null || _c === void 0 ? void 0 : _c.join(",")}`; @@ -207,10 +224,16 @@ function sqlGenerator({ tableName, genObject, dbFullName, count }) { const stringOperator = (genObject === null || genObject === void 0 ? void 0 : genObject.searchOperator) || "AND"; queryString += ` WHERE ${sqlSearhString.join(` ${stringOperator} `)}`; } - if ((genObject === null || genObject === void 0 ? void 0 : genObject.order) && !count) + if ((_a = genObject === null || genObject === void 0 ? void 0 : genObject.group) === null || _a === void 0 ? void 0 : _a[0]) { + queryString += ` GROUP BY ${genObject.group + .map((g) => `\`${g.toString()}\``) + .join(",")}`; + } + if ((genObject === null || genObject === void 0 ? void 0 : genObject.order) && !count) { queryString += ` ORDER BY ${genObject.join ? `${finalDbName}${tableName}.${String(genObject.order.field)}` : String(genObject.order.field)} ${genObject.order.strategy}`; + } if ((genObject === null || genObject === void 0 ? void 0 : genObject.limit) && !count) queryString += ` LIMIT ${genObject.limit}`; if ((genObject === null || genObject === void 0 ? void 0 : genObject.offset) && !count) diff --git a/dist/package-shared/functions/dsql/sql/sql-insert-generator.js b/dist/package-shared/functions/dsql/sql/sql-insert-generator.js index 12543e2..dd0f38e 100644 --- a/dist/package-shared/functions/dsql/sql/sql-insert-generator.js +++ b/dist/package-shared/functions/dsql/sql/sql-insert-generator.js @@ -1,11 +1,8 @@ -"use strict"; // @ts-check -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = sqlInsertGenerator; /** * # SQL Insert Generator */ -function sqlInsertGenerator({ tableName, data, dbFullName, }) { +export default function sqlInsertGenerator({ tableName, data, dbFullName, }) { const finalDbName = dbFullName ? `${dbFullName}.` : ""; try { if (Array.isArray(data) && (data === null || data === void 0 ? void 0 : data[0])) { diff --git a/dist/package-shared/shell/resetSQLCredentials.d.ts b/dist/package-shared/functions/dsql/sync-databases/index.d.ts similarity index 100% rename from dist/package-shared/shell/resetSQLCredentials.d.ts rename to dist/package-shared/functions/dsql/sync-databases/index.d.ts diff --git a/dist/package-shared/functions/dsql/sync-databases/index.js b/dist/package-shared/functions/dsql/sync-databases/index.js new file mode 100644 index 0000000..c2ac534 --- /dev/null +++ b/dist/package-shared/functions/dsql/sync-databases/index.js @@ -0,0 +1,82 @@ +import mysql from "mysql"; +import { exec } from "child_process"; +import { promisify } from "util"; +function getConnection(config) { + return mysql.createConnection(config); +} +function getMasterStatus(config) { + return new Promise((resolve, reject) => { + const connection = getConnection(config); + connection.query("SHOW MASTER STATUS", (error, results) => { + connection.end(); + if (error) + reject(error); + else + resolve(results[0]); + }); + }); +} +async function syncDatabases() { + const config = { + host: "localhost", + user: "root", + password: "your_password", + }; + let lastPosition = null; // Track last synced position + while (true) { + try { + // Get current master status + const { File, Position } = await getMasterStatus(config); + // Determine start position (use lastPosition or 4 if first run) + const startPosition = lastPosition !== null ? lastPosition + 1 : 4; + if (startPosition >= Position) { + await new Promise((resolve) => setTimeout(resolve, 5000)); // Wait 5 seconds if no new changes + continue; + } + // Execute mysqlbinlog to get changes + const execPromise = promisify(exec); + const { stdout } = await execPromise(`mysqlbinlog --database=db_master ${File} --start-position=${startPosition} --stop-position=${Position}`); + if (stdout) { + const connection = getConnection(Object.assign(Object.assign({}, config), { database: "db_slave" })); + return new Promise((resolve, reject) => { + connection.query(stdout, (error) => { + connection.end(); + if (error) + reject(error); + else { + lastPosition = Position; + console.log(`Synced up to position ${Position} at ${new Date().toISOString()}`); + resolve(null); + } + }); + }); + } + } + catch (error) { + console.error("Sync error:", error); + } + await new Promise((resolve) => setTimeout(resolve, 5000)); // Check every 5 seconds + } +} +// Initialize db_slave with db_master data +async function initializeSlave() { + const config = { + host: "localhost", + user: "root", + password: "your_password", + }; + try { + await promisify(exec)(`mysqldump -u ${config.user} -p${config.password} db_master > db_master_backup.sql`); + await promisify(exec)(`mysql -u ${config.user} -p${config.password} db_slave < db_master_backup.sql`); + console.log("Slave initialized with master data"); + } + catch (error) { + console.error("Initialization error:", error); + } +} +// Run the sync process +async function main() { + await initializeSlave(); + await syncDatabases(); +} +main().catch(console.error); diff --git a/dist/package-shared/functions/dsql/triggers-and-stored-proceedures/create-duplicate-tables-triggers.d.ts b/dist/package-shared/functions/dsql/triggers-and-stored-proceedures/create-duplicate-tables-triggers.d.ts new file mode 100644 index 0000000..b1b3154 --- /dev/null +++ b/dist/package-shared/functions/dsql/triggers-and-stored-proceedures/create-duplicate-tables-triggers.d.ts @@ -0,0 +1,2 @@ +type Params = {}; +declare function createDuplicateTablesTriggers({}: Params): void; diff --git a/dist/package-shared/functions/dsql/triggers-and-stored-proceedures/create-duplicate-tables-triggers.js b/dist/package-shared/functions/dsql/triggers-and-stored-proceedures/create-duplicate-tables-triggers.js new file mode 100644 index 0000000..befea82 --- /dev/null +++ b/dist/package-shared/functions/dsql/triggers-and-stored-proceedures/create-duplicate-tables-triggers.js @@ -0,0 +1,2 @@ +"use strict"; +function createDuplicateTablesTriggers({}) { } diff --git a/dist/package-shared/functions/dsql/triggers-and-stored-proceedures/grab-trigger-name.d.ts b/dist/package-shared/functions/dsql/triggers-and-stored-proceedures/grab-trigger-name.d.ts new file mode 100644 index 0000000..9b7914c --- /dev/null +++ b/dist/package-shared/functions/dsql/triggers-and-stored-proceedures/grab-trigger-name.d.ts @@ -0,0 +1,9 @@ +export declare const TriggerParadigms: readonly ["sync_tables", "sync_dbs"]; +type Params = { + userId?: string | number; + paradigm: (typeof TriggerParadigms)[number]; + dbId?: string | number; + tableName?: string; +}; +export default function grabTriggerName({ userId, paradigm, dbId, tableName, }: Params): string; +export {}; diff --git a/dist/package-shared/functions/dsql/triggers-and-stored-proceedures/grab-trigger-name.js b/dist/package-shared/functions/dsql/triggers-and-stored-proceedures/grab-trigger-name.js new file mode 100644 index 0000000..90f7787 --- /dev/null +++ b/dist/package-shared/functions/dsql/triggers-and-stored-proceedures/grab-trigger-name.js @@ -0,0 +1,11 @@ +export const TriggerParadigms = ["sync_tables", "sync_dbs"]; +export default function grabTriggerName({ userId, paradigm, dbId, tableName, }) { + let triggerName = `dsql_trig_${paradigm}`; + if (userId) + triggerName += `_${userId}`; + if (dbId) + triggerName += `_${dbId}`; + if (tableName) + triggerName += `_${tableName}`; + return triggerName; +} diff --git a/dist/package-shared/functions/dsql/triggers-and-stored-proceedures/stored-proceedure-sql-gen.d.ts b/dist/package-shared/functions/dsql/triggers-and-stored-proceedures/stored-proceedure-sql-gen.d.ts new file mode 100644 index 0000000..5548d31 --- /dev/null +++ b/dist/package-shared/functions/dsql/triggers-and-stored-proceedures/stored-proceedure-sql-gen.d.ts @@ -0,0 +1,20 @@ +import { DSQL_DatabaseSchemaType, DSQL_TableSchemaType } from "../../../types"; +declare const TriggerTypes: readonly [{ + readonly name: "after_insert"; + readonly value: "INSERT"; +}, { + readonly name: "after_update"; + readonly value: "UPDATE"; +}, { + readonly name: "after_delete"; + readonly value: "DELETE"; +}]; +export type TriggerSQLGenParams = { + type: (typeof TriggerTypes)[number]; + srcDbSchema: DSQL_DatabaseSchemaType; + srcTableSchema: DSQL_TableSchemaType; + content: string; + proceedureName: string; +}; +export default function triggerSQLGen({ type, srcDbSchema, srcTableSchema, content, proceedureName, }: TriggerSQLGenParams): string; +export {}; diff --git a/dist/package-shared/functions/dsql/triggers-and-stored-proceedures/stored-proceedure-sql-gen.js b/dist/package-shared/functions/dsql/triggers-and-stored-proceedures/stored-proceedure-sql-gen.js new file mode 100644 index 0000000..7a938e6 --- /dev/null +++ b/dist/package-shared/functions/dsql/triggers-and-stored-proceedures/stored-proceedure-sql-gen.js @@ -0,0 +1,23 @@ +const TriggerTypes = [ + { + name: "after_insert", + value: "INSERT", + }, + { + name: "after_update", + value: "UPDATE", + }, + { + name: "after_delete", + value: "DELETE", + }, +]; +export default function triggerSQLGen({ type, srcDbSchema, srcTableSchema, content, proceedureName, }) { + let sql = `DELIMITER //\n`; + sql += `CREATE PROCEDURE ${proceedureName}`; + sql += `\nBEGIN`; + sql += ` ${content}`; + sql += `\nEND //`; + sql += `\nDELIMITER\n`; + return sql; +} diff --git a/dist/package-shared/functions/dsql/triggers-and-stored-proceedures/table-replication-trigger-sql-gen.d.ts b/dist/package-shared/functions/dsql/triggers-and-stored-proceedures/table-replication-trigger-sql-gen.d.ts new file mode 100644 index 0000000..4b48535 --- /dev/null +++ b/dist/package-shared/functions/dsql/triggers-and-stored-proceedures/table-replication-trigger-sql-gen.d.ts @@ -0,0 +1,8 @@ +import { DSQL_DatabaseSchemaType, DSQL_TableSchemaType } from "../../../types"; +import { TriggerSQLGenParams } from "./trigger-sql-gen"; +type Params = TriggerSQLGenParams & { + dstDbSchema: DSQL_DatabaseSchemaType; + dstTableSchema: DSQL_TableSchemaType; +}; +export default function tableReplicationTriggerSQLGen({ type, dstDbSchema, dstTableSchema, srcDbSchema, srcTableSchema, userId, paradigm, }: Params): string; +export {}; diff --git a/dist/package-shared/functions/dsql/triggers-and-stored-proceedures/table-replication-trigger-sql-gen.js b/dist/package-shared/functions/dsql/triggers-and-stored-proceedures/table-replication-trigger-sql-gen.js new file mode 100644 index 0000000..0a6a26f --- /dev/null +++ b/dist/package-shared/functions/dsql/triggers-and-stored-proceedures/table-replication-trigger-sql-gen.js @@ -0,0 +1,32 @@ +import triggerSQLGen from "./trigger-sql-gen"; +export default function tableReplicationTriggerSQLGen({ type, dstDbSchema, dstTableSchema, srcDbSchema, srcTableSchema, userId, paradigm, }) { + let sql = `CREATE TRIGGER`; + const srcColumns = srcTableSchema.fields + .map((fld) => fld.fieldName) + .filter((fld) => typeof fld == "string"); + const dstColumns = dstTableSchema.fields + .map((fld) => fld.fieldName) + .filter((fld) => typeof fld == "string"); + if (type.name == "after_insert") { + sql += ` INSERT INTO ${dstDbSchema.dbFullName}.${dstTableSchema.tableName}`; + sql += ` (${dstColumns.join(",")})`; + sql += ` VALUES (${dstColumns.map((c) => `NEW.${c}`).join(",")})`; + } + else if (type.name == "after_update") { + sql += ` UPDATE ${dstDbSchema.dbFullName}.${dstTableSchema.tableName}`; + sql += ` SET ${dstColumns.map((c) => `${c}=NEW.${c}`).join(",")}`; + sql += ` WHERE id = NEW.id`; + } + else if (type.name == "after_delete") { + sql += ` DELETE FROM ${dstDbSchema.dbFullName}.${dstTableSchema.tableName}`; + sql += ` WHERE id = OLD.id`; + } + return triggerSQLGen({ + content: sql, + srcDbSchema, + srcTableSchema, + type, + paradigm, + userId, + }); +} diff --git a/dist/package-shared/functions/dsql/triggers-and-stored-proceedures/trigger-sql-gen.d.ts b/dist/package-shared/functions/dsql/triggers-and-stored-proceedures/trigger-sql-gen.d.ts new file mode 100644 index 0000000..fb3d21c --- /dev/null +++ b/dist/package-shared/functions/dsql/triggers-and-stored-proceedures/trigger-sql-gen.d.ts @@ -0,0 +1,22 @@ +import { DSQL_DatabaseSchemaType, DSQL_TableSchemaType } from "../../../types"; +import { TriggerParadigms } from "./grab-trigger-name"; +declare const TriggerTypes: readonly [{ + readonly name: "after_insert"; + readonly value: "INSERT"; +}, { + readonly name: "after_update"; + readonly value: "UPDATE"; +}, { + readonly name: "after_delete"; + readonly value: "DELETE"; +}]; +export type TriggerSQLGenParams = { + type: (typeof TriggerTypes)[number]; + srcDbSchema: DSQL_DatabaseSchemaType; + srcTableSchema: DSQL_TableSchemaType; + content: string; + userId?: string | number; + paradigm: (typeof TriggerParadigms)[number]; +}; +export default function triggerSQLGen({ type, srcDbSchema, srcTableSchema, content, userId, paradigm, }: TriggerSQLGenParams): string; +export {}; diff --git a/dist/package-shared/functions/dsql/triggers-and-stored-proceedures/trigger-sql-gen.js b/dist/package-shared/functions/dsql/triggers-and-stored-proceedures/trigger-sql-gen.js new file mode 100644 index 0000000..5245599 --- /dev/null +++ b/dist/package-shared/functions/dsql/triggers-and-stored-proceedures/trigger-sql-gen.js @@ -0,0 +1,30 @@ +import grabTriggerName from "./grab-trigger-name"; +const TriggerTypes = [ + { + name: "after_insert", + value: "INSERT", + }, + { + name: "after_update", + value: "UPDATE", + }, + { + name: "after_delete", + value: "DELETE", + }, +]; +export default function triggerSQLGen({ type, srcDbSchema, srcTableSchema, content, userId, paradigm, }) { + let sql = `CREATE TRIGGER`; + let triggerName = grabTriggerName({ + paradigm, + dbId: srcDbSchema.id, + tableName: srcTableSchema.tableName, + userId, + }); + sql += ` ${triggerName}`; + sql += ` AFTER ${type.value} ON ${srcTableSchema.tableName}`; + sql += ` FOR EACH ROW BEGIN`; + sql += ` ${content}`; + sql += ` END`; + return sql; +} diff --git a/dist/package-shared/functions/email/fns/validate-email.js b/dist/package-shared/functions/email/fns/validate-email.js index a9ee328..14590fa 100644 --- a/dist/package-shared/functions/email/fns/validate-email.js +++ b/dist/package-shared/functions/email/fns/validate-email.js @@ -1,55 +1,38 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = validateEmail; -const handleNodemailer_1 = __importDefault(require("../../backend/handleNodemailer")); -const email_mx_lookup_1 = __importDefault(require("../verification/email-mx-lookup")); -const email_regex_test_1 = __importDefault(require("../verification/email-regex-test")); -function validateEmail(_a) { - return __awaiter(this, arguments, void 0, function* ({ email, welcomeEmailOptions, }) { - var _b; - if (!email) { - return { - isValid: false, - message: "Email is required.", - }; - } - if (!(0, email_regex_test_1.default)(email)) { - return { - isValid: false, - message: "Invalid email format.", - }; - } - const checkEmailMxRecords = yield (0, email_mx_lookup_1.default)(email); - if (!checkEmailMxRecords) { - return { - isValid: false, - message: "Email domain does not have valid MX records.", - }; - } - if (welcomeEmailOptions) { - const welcomeEmail = yield (0, handleNodemailer_1.default)(welcomeEmailOptions); - if (!((_b = welcomeEmail === null || welcomeEmail === void 0 ? void 0 : welcomeEmail.accepted) === null || _b === void 0 ? void 0 : _b[0])) { - return { - isValid: false, - message: "Email verification failed.", - }; - } - } +import handleNodemailer from "../../backend/handleNodemailer"; +import emailMxLookup from "../verification/email-mx-lookup"; +import emailRegexCheck from "../verification/email-regex-test"; +export default async function validateEmail({ email, welcomeEmailOptions, }) { + var _a; + if (!email) { return { - isValid: true, - message: "Email is valid.", + isValid: false, + message: "Email is required.", }; - }); + } + if (!emailRegexCheck(email)) { + return { + isValid: false, + message: "Invalid email format.", + }; + } + const checkEmailMxRecords = await emailMxLookup(email); + if (!checkEmailMxRecords) { + return { + isValid: false, + message: "Email domain does not have valid MX records.", + }; + } + if (welcomeEmailOptions) { + const welcomeEmail = await handleNodemailer(welcomeEmailOptions); + if (!((_a = welcomeEmail === null || welcomeEmail === void 0 ? void 0 : welcomeEmail.accepted) === null || _a === void 0 ? void 0 : _a[0])) { + return { + isValid: false, + message: "Email verification failed.", + }; + } + } + return { + isValid: true, + message: "Email is valid.", + }; } diff --git a/dist/package-shared/functions/email/verification/email-mx-lookup.js b/dist/package-shared/functions/email/verification/email-mx-lookup.js index de82543..6ad264d 100644 --- a/dist/package-shared/functions/email/verification/email-mx-lookup.js +++ b/dist/package-shared/functions/email/verification/email-mx-lookup.js @@ -1,22 +1,16 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = emailMxLookup; -const dns_1 = __importDefault(require("dns")); -const debug_log_1 = __importDefault(require("../../../utils/logging/debug-log")); -function emailMxLookup(email, debug) { +import dns from "dns"; +import debugLog from "../../../utils/logging/debug-log"; +export default function emailMxLookup(email, debug) { return new Promise((resolve, reject) => { if (!email) { resolve(false); return; } const domain = email.split("@")[1]; - dns_1.default.resolveMx(domain, (err, addresses) => { + dns.resolveMx(domain, (err, addresses) => { if (err || !addresses.length) { if (debug) { - (0, debug_log_1.default)({ + debugLog({ log: (err === null || err === void 0 ? void 0 : err.message) || "No MX records found", addTime: true, label: "Email MX Lookup", @@ -27,7 +21,7 @@ function emailMxLookup(email, debug) { } else { if (debug) { - (0, debug_log_1.default)({ + debugLog({ log: addresses, addTime: true, label: "MX Records", diff --git a/dist/package-shared/functions/email/verification/email-regex-test.js b/dist/package-shared/functions/email/verification/email-regex-test.js index d954198..431bdf1 100644 --- a/dist/package-shared/functions/email/verification/email-regex-test.js +++ b/dist/package-shared/functions/email/verification/email-regex-test.js @@ -1,7 +1,4 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = emailRegexCheck; -function emailRegexCheck(email) { +export default function emailRegexCheck(email) { const regex = /^[^\s@]+@[^\s@]+\.[^\s@]+$/; return regex.test(email); } diff --git a/dist/package-shared/functions/email/verification/smtp-verification.js b/dist/package-shared/functions/email/verification/smtp-verification.js index 75b3c0e..7232d0c 100644 --- a/dist/package-shared/functions/email/verification/smtp-verification.js +++ b/dist/package-shared/functions/email/verification/smtp-verification.js @@ -1,22 +1,16 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = verifyEmailSMTP; -const net_1 = __importDefault(require("net")); -const dns_1 = __importDefault(require("dns")); -function verifyEmailSMTP(email) { +import net from "net"; +import dns from "dns"; +export default function verifyEmailSMTP(email) { return new Promise((resolve, reject) => { const domain = email.split("@")[1]; - dns_1.default.resolveMx(domain, (err, addresses) => { + dns.resolveMx(domain, (err, addresses) => { if (err || !addresses.length) { console.log("Invalid email domain."); return; } const mxServer = addresses[0].exchange; console.log(`Connecting to ${mxServer} to verify email...`); - const client = net_1.default.createConnection(25, mxServer); + const client = net.createConnection(25, mxServer); client.on("connect", () => { console.log("Connected to SMTP server."); client.write("HELO example.com\r\n"); diff --git a/dist/package-shared/functions/web-app/db/grab-user-resource/index.d.ts b/dist/package-shared/functions/web-app/db/grab-user-resource/index.d.ts new file mode 100644 index 0000000..bfa377b --- /dev/null +++ b/dist/package-shared/functions/web-app/db/grab-user-resource/index.d.ts @@ -0,0 +1,29 @@ +import { DsqlCrudQueryObject } from "../../../../types"; +import { DsqlTables } from "../../../../types/dsql"; +export type GrabUserResourceParams = { + query?: DsqlCrudQueryObject; + userId?: string | number; + tableName: (typeof DsqlTables)[number]; + count?: boolean; + countOnly?: boolean; + noLimit?: boolean; + isSuperUser?: boolean; + targetID?: string | number; +}; +export default function dbGrabUserResource(params: GrabUserResourceParams): Promise<{ + batch: T[] | null; + single: T | null; + debug: { + queryObject: { + sql?: string; + params?: string[]; + } | undefined; + error: any; + msg: string | undefined; + }; + count: number; +}>; diff --git a/dist/package-shared/functions/web-app/db/grab-user-resource/index.js b/dist/package-shared/functions/web-app/db/grab-user-resource/index.js new file mode 100644 index 0000000..26710ed --- /dev/null +++ b/dist/package-shared/functions/web-app/db/grab-user-resource/index.js @@ -0,0 +1,24 @@ +import dsqlCrud from "../../../../utils/data-fetching/crud"; +import query from "./query"; +import _n from "../../../../utils/numberfy"; +export default async function dbGrabUserResource(params) { + let queryObject = query(params); + let result = await dsqlCrud({ + action: "get", + table: params.tableName, + query: queryObject, + count: params.count, + countOnly: params.countOnly, + }); + const payload = result === null || result === void 0 ? void 0 : result.payload; + return { + batch: payload || null, + single: (payload === null || payload === void 0 ? void 0 : payload[0]) || null, + debug: { + queryObject: result === null || result === void 0 ? void 0 : result.queryObject, + error: result === null || result === void 0 ? void 0 : result.error, + msg: result === null || result === void 0 ? void 0 : result.msg, + }, + count: _n(result === null || result === void 0 ? void 0 : result.count), + }; +} diff --git a/dist/package-shared/functions/web-app/db/grab-user-resource/query.d.ts b/dist/package-shared/functions/web-app/db/grab-user-resource/query.d.ts new file mode 100644 index 0000000..2d0396f --- /dev/null +++ b/dist/package-shared/functions/web-app/db/grab-user-resource/query.d.ts @@ -0,0 +1,16 @@ +import { GrabUserResourceParams } from "."; +export default function (params?: GrabUserResourceParams): import("../../../../types").ServerQueryParam<{ + [k: string]: any; +}> & { + query?: import("../../../../types").ServerQueryQueryObject<{ + [k: string]: any; + }> | undefined; +} & import("../../../../types").ServerQueryParam & { + query?: import("../../../../types").ServerQueryQueryObject | undefined; +} & import("../../../../types").ServerQueryParam<{ + [k: string]: any; +}> & { + query?: import("../../../../types").ServerQueryQueryObject<{ + [k: string]: any; + }> | undefined; +}; diff --git a/dist/package-shared/functions/web-app/db/grab-user-resource/query.js b/dist/package-shared/functions/web-app/db/grab-user-resource/query.js new file mode 100644 index 0000000..fd9e952 --- /dev/null +++ b/dist/package-shared/functions/web-app/db/grab-user-resource/query.js @@ -0,0 +1,32 @@ +import _ from "lodash"; +import ResourceLimits from "../../../../dict/resource-limits"; +import _n from "../../../../utils/numberfy"; +export default function (params) { + let queryObject = { + limit: (params === null || params === void 0 ? void 0 : params.noLimit) ? undefined : ResourceLimits["general"], + order: { + field: "id", + strategy: "DESC", + }, + }; + if (params === null || params === void 0 ? void 0 : params.targetID) { + const targetIDQuery = { + query: { + id: { + value: _n(params.targetID).toString(), + }, + }, + }; + queryObject = _.merge(queryObject, targetIDQuery); + } + let queryFixedObject = (params === null || params === void 0 ? void 0 : params.isSuperUser) + ? {} + : { + query: { + user_id: { + value: String((params === null || params === void 0 ? void 0 : params.userId) || 0), + }, + }, + }; + return _.merge(queryObject, params === null || params === void 0 ? void 0 : params.query, queryFixedObject); +} diff --git a/dist/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-creation.d.ts b/dist/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-creation.d.ts new file mode 100644 index 0000000..ffbd10d --- /dev/null +++ b/dist/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-creation.d.ts @@ -0,0 +1,19 @@ +import { DSQL_DATASQUIREL_MARIADB_USERS } from "../../../types/dsql"; +import { UserType } from "../../../types"; +type Params = { + user: UserType; + existingRecord?: DSQL_DATASQUIREL_MARIADB_USERS | null; + updatedRecord: DSQL_DATASQUIREL_MARIADB_USERS; +}; +type Return = { + msg?: string; + success?: boolean; +}; +export default function handleMariadbUserCreation({ user, existingRecord, updatedRecord, }: Params): Promise; +type CreateNewUserParams = { + username?: string; + host?: string; + password?: string; +}; +export declare function createNewSQLUser({ host, password, username, }: CreateNewUserParams): Promise; +export {}; diff --git a/dist/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-creation.js b/dist/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-creation.js new file mode 100644 index 0000000..3fb9a96 --- /dev/null +++ b/dist/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-creation.js @@ -0,0 +1,52 @@ +import dbHandler from "../../backend/dbHandler"; +import normalizeText from "../../../utils/normalize-text"; +import decrypt from "../../dsql/decrypt"; +export default async function handleMariadbUserCreation({ user, existingRecord, updatedRecord, }) { + const parsedPassword = decrypt({ + encryptedString: (updatedRecord === null || updatedRecord === void 0 ? void 0 : updatedRecord.password) || "", + }); + if ((existingRecord === null || existingRecord === void 0 ? void 0 : existingRecord.id) && (updatedRecord === null || updatedRecord === void 0 ? void 0 : updatedRecord.id)) { + if (existingRecord.username !== updatedRecord.username || + existingRecord.host !== updatedRecord.host) { + const renameSQLUser = await dbHandler({ + query: normalizeText(` + RENAME USER '${existingRecord.username}'@'${existingRecord.host}' \ + TO '${updatedRecord.username}'@'${updatedRecord.host}' + `), + }); + if (!renameSQLUser) { + await createNewSQLUser({ + host: updatedRecord.host, + password: parsedPassword, + username: updatedRecord.username, + }); + } + } + const updateSQLUser = await dbHandler({ + query: normalizeText(` + ALTER USER '${updatedRecord.username}'@'${updatedRecord.host}' \ + IDENTIFIED BY '${parsedPassword}' + `), + }); + if (!updateSQLUser) { + await createNewSQLUser({ + host: updatedRecord.host, + password: parsedPassword, + username: updatedRecord.username, + }); + } + } + else if (!(existingRecord === null || existingRecord === void 0 ? void 0 : existingRecord.id) && (updatedRecord === null || updatedRecord === void 0 ? void 0 : updatedRecord.id)) { + await createNewSQLUser({ + host: updatedRecord.host, + password: parsedPassword, + username: updatedRecord.username, + }); + } + return { success: true }; +} +export async function createNewSQLUser({ host, password, username, }) { + return await dbHandler({ + query: `CREATE USER IF NOT EXISTS '${username}'@'${host}' IDENTIFIED BY '${password}'`, + }); +} diff --git a/dist/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-grants-for-databases-cleanup-records.d.ts b/dist/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-grants-for-databases-cleanup-records.d.ts new file mode 100644 index 0000000..8ff9ea0 --- /dev/null +++ b/dist/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-grants-for-databases-cleanup-records.d.ts @@ -0,0 +1,12 @@ +import { DSQL_DATASQUIREL_MARIADB_USERS } from "../../../types/dsql"; +import { UserType } from "../../../types"; +type Params = { + user: UserType; + updatedRecord: DSQL_DATASQUIREL_MARIADB_USERS; +}; +type Return = { + msg?: string; + success?: boolean; +}; +export default function handleMariadbUserGrantsForDatabasesCleanUpRecords({ user, updatedRecord, }: Params): Promise; +export {}; diff --git a/dist/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-grants-for-databases-cleanup-records.js b/dist/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-grants-for-databases-cleanup-records.js new file mode 100644 index 0000000..3cf42af --- /dev/null +++ b/dist/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-grants-for-databases-cleanup-records.js @@ -0,0 +1,31 @@ +import dsqlCrud from "../../../utils/data-fetching/crud"; +export default async function handleMariadbUserGrantsForDatabasesCleanUpRecords({ user, updatedRecord, }) { + /** + * # Clean up Records + */ + await dsqlCrud({ + action: "delete", + table: "mariadb_user_databases", + deleteData: { + user_id: user.id, + mariadb_user_id: updatedRecord.id, + }, + }); + await dsqlCrud({ + action: "delete", + table: "mariadb_user_privileges", + deleteData: { + user_id: user.id, + mariadb_user_id: updatedRecord.id, + }, + }); + await dsqlCrud({ + action: "delete", + table: "mariadb_user_tables", + deleteData: { + user_id: user.id, + mariadb_user_id: updatedRecord.id, + }, + }); + return { success: true }; +} diff --git a/dist/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-grants-for-databases-recreate-grants.d.ts b/dist/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-grants-for-databases-recreate-grants.d.ts new file mode 100644 index 0000000..fdb149c --- /dev/null +++ b/dist/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-grants-for-databases-recreate-grants.d.ts @@ -0,0 +1,13 @@ +import { DSQL_DATASQUIREL_MARIADB_USERS } from "../../../types/dsql"; +import { DatabaseScopedAccessObject, UserType } from "../../../types"; +type Params = { + currentAccessedDatabase: DatabaseScopedAccessObject; + user: UserType; + updatedRecord: DSQL_DATASQUIREL_MARIADB_USERS; +}; +type Return = { + msg?: string; + success?: boolean; +}; +export default function handleMariadbUserGrantsForDatabasesRecreateGrants({ currentAccessedDatabase, user, updatedRecord, }: Params): Promise; +export {}; diff --git a/dist/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-grants-for-databases-recreate-grants.js b/dist/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-grants-for-databases-recreate-grants.js new file mode 100644 index 0000000..6ffb16f --- /dev/null +++ b/dist/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-grants-for-databases-recreate-grants.js @@ -0,0 +1,68 @@ +import { UserSQLPermissions, } from "../../../types"; +import grabDbFullName from "../../../utils/grab-db-full-name"; +import dbHandler from "../../backend/dbHandler"; +import normalizeText from "../../../utils/normalize-text"; +export default async function handleMariadbUserGrantsForDatabasesRecreateGrants({ currentAccessedDatabase, user, updatedRecord, }) { + const { accessedDatabase, dbSlug, allGrants, allTables, grants, tables } = currentAccessedDatabase; + const dbFullName = grabDbFullName({ + user, + dbName: dbSlug, + }); + if (allGrants && allTables) { + const grantAllPrivileges = await dbHandler({ + query: normalizeText(` + GRANT ALL PRIVILEGES ON \`${dbFullName}\`.* TO \ + '${updatedRecord.username}'@'${updatedRecord.host}' + `), + }); + return { success: true }; + } + if (allGrants && (tables === null || tables === void 0 ? void 0 : tables[0])) { + for (let t = 0; t < tables.length; t++) { + const table = tables[t]; + // queries.push( + // normalizeText(` + // GRANT ALL PRIVILEGES ON \`${dbFullName}\`.\`${table.tableSlug}\` \ + // TO '${updatedRecord.username}'@'${updatedRecord.host}' + // `) + // ); + const grantAllPrivilegesToTables = await dbHandler({ + query: normalizeText(` + GRANT ALL PRIVILEGES ON \`${dbFullName}\`.\`${table.tableSlug}\` \ + TO '${updatedRecord.username}'@'${updatedRecord.host}' + `), + }); + } + return { success: true }; + } + if (grants === null || grants === void 0 ? void 0 : grants[0]) { + const isGrantsInalid = grants.find((g) => !UserSQLPermissions.includes(g)); + if (isGrantsInalid) { + return { msg: `grants is/are invalid!` }; + } + if (tables === null || tables === void 0 ? void 0 : tables[0]) { + for (let t = 0; t < tables.length; t++) { + const table = tables[t]; + const grantSpecificPrivilegesToTables = await dbHandler({ + query: normalizeText(` + GRANT ${grants.join(",")} ON \ + \`${dbFullName}\`.\`${table.tableSlug}\` TO \ + '${updatedRecord.username}'@'${updatedRecord.host}' + `), + }); + } + return { success: true }; + } + else { + const grantSecificPrivilegesToAllTables = await dbHandler({ + query: normalizeText(` + GRANT ${grants.join(",")} ON \ + \`${dbFullName}\`.* TO \ + '${updatedRecord.username}'@'${updatedRecord.host}' + `), + }); + return { success: true }; + } + } + return { success: true }; +} diff --git a/dist/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-grants-for-databases-recreate-records.d.ts b/dist/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-grants-for-databases-recreate-records.d.ts new file mode 100644 index 0000000..b6e2757 --- /dev/null +++ b/dist/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-grants-for-databases-recreate-records.d.ts @@ -0,0 +1,13 @@ +import { DSQL_DATASQUIREL_MARIADB_USERS } from "../../../types/dsql"; +import { DatabaseScopedAccessObject, UserType } from "../../../types"; +type Params = { + currentAccessedDatabase: DatabaseScopedAccessObject; + user: UserType; + updatedRecord: DSQL_DATASQUIREL_MARIADB_USERS; +}; +type Return = { + msg?: string; + success?: boolean; +}; +export default function handleMariadbUserGrantsForDatabasesRecreateRecordsForDatabase({ currentAccessedDatabase, user, updatedRecord, }: Params): Promise; +export {}; diff --git a/dist/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-grants-for-databases-recreate-records.js b/dist/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-grants-for-databases-recreate-records.js new file mode 100644 index 0000000..ea3143a --- /dev/null +++ b/dist/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-grants-for-databases-recreate-records.js @@ -0,0 +1,60 @@ +import { UserSQLPermissions, } from "../../../types"; +import dsqlCrud from "../../../utils/data-fetching/crud"; +import _n from "../../../utils/numberfy"; +export default async function handleMariadbUserGrantsForDatabasesRecreateRecordsForDatabase({ currentAccessedDatabase, user, updatedRecord, }) { + const { accessedDatabase, dbSlug, allGrants, allTables, grants, tables } = currentAccessedDatabase; + const insertSQLDbRecord = await dsqlCrud({ + action: "insert", + table: "mariadb_user_databases", + data: { + all_privileges: allGrants ? 1 : 0, + all_tables: allTables ? 1 : 0, + db_id: _n(accessedDatabase.dbId), + db_slug: accessedDatabase.dbSlug, + db_schema_id: _n(accessedDatabase.dbSchemaId), + user_id: user.id, + mariadb_user_id: updatedRecord.id, + }, + }); + if (tables === null || tables === void 0 ? void 0 : tables[0]) { + for (let t = 0; t < tables.length; t++) { + const table = tables[t]; + const insertTable = await dsqlCrud({ + action: "insert", + table: "mariadb_user_tables", + data: { + all_privileges: allGrants ? 1 : 0, + all_fields: 1, + user_id: user.id, + mariadb_user_id: updatedRecord.id, + table_slug: table.tableSlug, + db_id: _n(table.dbId), + db_slug: table.dbSlug, + db_schema_id: _n(table.dbSchemaId), + }, + }); + } + } + if (grants === null || grants === void 0 ? void 0 : grants[0]) { + const isGrantsInalid = grants.find((g) => !UserSQLPermissions.includes(g)); + if (isGrantsInalid) { + return { msg: `grants is/are invalid!` }; + } + for (let t = 0; t < grants.length; t++) { + const grant = grants[t]; + await dsqlCrud({ + action: "insert", + table: "mariadb_user_privileges", + data: { + user_id: user.id, + mariadb_user_id: updatedRecord.id, + privilege: grant, + db_id: _n(accessedDatabase.dbId), + db_slug: accessedDatabase.dbSlug, + db_schema_id: _n(accessedDatabase.dbSchemaId), + }, + }); + } + } + return { success: true }; +} diff --git a/dist/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-grants-for-databases.d.ts b/dist/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-grants-for-databases.d.ts new file mode 100644 index 0000000..af14728 --- /dev/null +++ b/dist/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-grants-for-databases.d.ts @@ -0,0 +1,13 @@ +import { DSQL_DATASQUIREL_MARIADB_USERS } from "../../../types/dsql"; +import { DatabaseScopedAccessObject, UserType } from "../../../types"; +type Params = { + accessedDatabases: DatabaseScopedAccessObject[]; + user: UserType; + updatedRecord: DSQL_DATASQUIREL_MARIADB_USERS; +}; +type Return = { + msg?: string; + success?: boolean; +}; +export default function handleMariadbUserGrantsForDatabases({ accessedDatabases, user, updatedRecord, }: Params): Promise; +export {}; diff --git a/dist/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-grants-for-databases.js b/dist/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-grants-for-databases.js new file mode 100644 index 0000000..8121c03 --- /dev/null +++ b/dist/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-grants-for-databases.js @@ -0,0 +1,20 @@ +import handleMariadbUserGrantsForDatabasesRecreateRecordsForDatabase from "./handle-mariadb-user-grants-for-databases-recreate-records"; +import handleMariadbUserGrantsForDatabasesRecreateGrants from "./handle-mariadb-user-grants-for-databases-recreate-grants"; +export default async function handleMariadbUserGrantsForDatabases({ accessedDatabases, user, updatedRecord, }) { + /** + * # Recreate Records + */ + for (let i = 0; i < accessedDatabases.length; i++) { + await handleMariadbUserGrantsForDatabasesRecreateRecordsForDatabase({ + currentAccessedDatabase: accessedDatabases[i], + updatedRecord, + user, + }); + await handleMariadbUserGrantsForDatabasesRecreateGrants({ + currentAccessedDatabase: accessedDatabases[i], + updatedRecord, + user, + }); + } + return { success: true }; +} diff --git a/dist/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-grants.d.ts b/dist/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-grants.d.ts new file mode 100644 index 0000000..f7a84e3 --- /dev/null +++ b/dist/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-grants.d.ts @@ -0,0 +1,12 @@ +import { AddUpdateMariadbUserAPIReqBody, UserType } from "../../../types"; +import { DSQL_DATASQUIREL_MARIADB_USERS } from "../../../types/dsql"; +type Params = AddUpdateMariadbUserAPIReqBody & { + user: UserType; + updatedRecord: DSQL_DATASQUIREL_MARIADB_USERS; +}; +type Return = { + msg?: string; + success?: boolean; +}; +export default function handleMariadbUserGrants({ accessedDatabases, grants, isAllDbsAccess, isAllGrants, user, updatedRecord, }: Params): Promise; +export {}; diff --git a/dist/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-grants.js b/dist/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-grants.js new file mode 100644 index 0000000..08278c5 --- /dev/null +++ b/dist/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-grants.js @@ -0,0 +1,62 @@ +import { UserSQLPermissions, } from "../../../types"; +import dsqlCrud from "../../../utils/data-fetching/crud"; +import grabDbNames from "../../../utils/grab-db-names"; +import normalizeText from "../../../utils/normalize-text"; +import dbHandler from "../../backend/dbHandler"; +import handleMariadbUserGrantsForDatabases from "./handle-mariadb-user-grants-for-databases"; +import revokeAllExistingGrants from "./revoke-all-existing-grants"; +export default async function handleMariadbUserGrants({ accessedDatabases, grants, isAllDbsAccess, isAllGrants, user, updatedRecord, }) { + const { userDbPrefix } = grabDbNames({ user }); + /** + * # Revoke All Existing Grants + */ + await revokeAllExistingGrants({ updatedRecord, user }); + /** + * # Recreate Grants + */ + if (isAllGrants && isAllDbsAccess) { + const grantAllPrivileges = await dbHandler({ + query: normalizeText(` + GRANT ALL PRIVILEGES ON \ + \`${userDbPrefix.replace(/\_/g, "\\_")}%\`.* TO \ + '${updatedRecord.username}'@'${updatedRecord.host}' + `), + }); + return { success: true }; + } + if (isAllDbsAccess && grants) { + const isGrantsInalid = grants.find((g) => !UserSQLPermissions.includes(g)); + if (isGrantsInalid) { + return { msg: `grants is/are invalid!` }; + } + const grantQuery = normalizeText(` + GRANT ${grants.join(",")} ON \`${userDbPrefix}%\`.* TO \ + '${updatedRecord.username}'@'${updatedRecord.host}' + `); + const grantSpecificPrivilegesToAllDbs = await dbHandler({ + query: grantQuery, + }); + for (let t = 0; t < grants.length; t++) { + const grant = grants[t]; + const addGrant = await dsqlCrud({ + action: "insert", + table: "mariadb_user_privileges", + data: { + user_id: user.id, + mariadb_user_id: updatedRecord.id, + privilege: grant, + }, + }); + } + return { success: true }; + } + if (accessedDatabases === null || accessedDatabases === void 0 ? void 0 : accessedDatabases[0]) { + const res = await handleMariadbUserGrantsForDatabases({ + accessedDatabases, + updatedRecord, + user, + }); + return res; + } + return {}; +} diff --git a/dist/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-record.d.ts b/dist/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-record.d.ts new file mode 100644 index 0000000..bce2ddc --- /dev/null +++ b/dist/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-record.d.ts @@ -0,0 +1,12 @@ +import { AddUpdateMariadbUserAPIReqBody, UserType } from "../../../types"; +import { DSQL_DATASQUIREL_MARIADB_USERS } from "../../../types/dsql"; +type Params = AddUpdateMariadbUserAPIReqBody & { + user: UserType; +}; +type Return = { + existingRecord?: DSQL_DATASQUIREL_MARIADB_USERS | null; + updatedRecord?: DSQL_DATASQUIREL_MARIADB_USERS | null; + msg?: string; +}; +export default function handleMariadbUserRecord({ mariadbUser, accessedDatabases, grants, isAllDbsAccess, isAllGrants, user, }: Params): Promise; +export {}; diff --git a/dist/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-record.js b/dist/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-record.js new file mode 100644 index 0000000..2edd842 --- /dev/null +++ b/dist/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-record.js @@ -0,0 +1,54 @@ +import grabSQLUserName from "../../../utils/grab-sql-user-name"; +import addDbEntry from "../../backend/db/addDbEntry"; +import encrypt from "../../dsql/encrypt"; +import dbGrabUserResource from "../db/grab-user-resource"; +export default async function handleMariadbUserRecord({ mariadbUser, accessedDatabases, grants, isAllDbsAccess, isAllGrants, user, }) { + var _a, _b, _c; + const { name: finalMariadbUserName } = grabSQLUserName({ + name: mariadbUser.username, + user, + }); + const finalPassword = (_a = mariadbUser.password) === null || _a === void 0 ? void 0 : _a.replace(/ /g, ""); + if (!finalPassword) + return { msg: `Couldn't get password` }; + const encryptedFinalPassword = encrypt({ data: finalPassword }); + const finalHost = (_b = mariadbUser.host) === null || _b === void 0 ? void 0 : _b.replace(/ /g, ""); + const newMariadbUser = { + password: encryptedFinalPassword || undefined, + username: finalMariadbUserName, + all_databases: isAllDbsAccess ? 1 : 0, + all_grants: isAllGrants ? 1 : 0, + host: finalHost, + user_id: user.id, + }; + let { single: existingRecord } = await dbGrabUserResource({ + tableName: "mariadb_users", + userId: user.id, + query: { + query: { + id: { + value: String(mariadbUser.id || 0), + }, + }, + }, + }); + const record = await addDbEntry({ + tableName: "mariadb_users", + data: newMariadbUser, + update: true, + duplicateColumnName: "id", + duplicateColumnValue: ((existingRecord === null || existingRecord === void 0 ? void 0 : existingRecord.id) || 0).toString(), + }); + let { single: updatedRecord } = await dbGrabUserResource({ + tableName: "mariadb_users", + userId: user.id, + query: { + query: { + id: { + value: String((existingRecord === null || existingRecord === void 0 ? void 0 : existingRecord.id) || ((_c = record === null || record === void 0 ? void 0 : record.payload) === null || _c === void 0 ? void 0 : _c.insertId) || 0), + }, + }, + }, + }); + return { existingRecord, updatedRecord }; +} diff --git a/dist/package-shared/functions/web-app/mariadb-user/revoke-all-existing-grants.d.ts b/dist/package-shared/functions/web-app/mariadb-user/revoke-all-existing-grants.d.ts new file mode 100644 index 0000000..1fefee8 --- /dev/null +++ b/dist/package-shared/functions/web-app/mariadb-user/revoke-all-existing-grants.d.ts @@ -0,0 +1,12 @@ +import { UserType } from "../../../types"; +import { DSQL_DATASQUIREL_MARIADB_USERS } from "../../../types/dsql"; +type Params = { + user: UserType; + updatedRecord: DSQL_DATASQUIREL_MARIADB_USERS; +}; +type Return = { + msg?: string; + success?: boolean; +}; +export default function revokeAllExistingGrants({ user, updatedRecord, }: Params): Promise; +export {}; diff --git a/dist/package-shared/functions/web-app/mariadb-user/revoke-all-existing-grants.js b/dist/package-shared/functions/web-app/mariadb-user/revoke-all-existing-grants.js new file mode 100644 index 0000000..0067727 --- /dev/null +++ b/dist/package-shared/functions/web-app/mariadb-user/revoke-all-existing-grants.js @@ -0,0 +1,45 @@ +import grabDbNames from "../../../utils/grab-db-names"; +import normalizeText from "../../../utils/normalize-text"; +import dbHandler from "../../backend/dbHandler"; +import decrypt from "../../dsql/decrypt"; +import { createNewSQLUser } from "./handle-mariadb-user-creation"; +export default async function revokeAllExistingGrants({ user, updatedRecord, }) { + const { userDbPrefix } = grabDbNames({ user }); + const parsedPassword = decrypt({ + encryptedString: (updatedRecord === null || updatedRecord === void 0 ? void 0 : updatedRecord.password) || "", + }); + const revokeAllPrivileges = await dbHandler({ + query: normalizeText(` + REVOKE ALL PRIVILEGES ON *.* FROM '${updatedRecord.username}'@'${updatedRecord.host}' + `), + }); + if (!revokeAllPrivileges) { + await createNewSQLUser({ + host: updatedRecord.host, + password: parsedPassword, + username: updatedRecord.username, + }); + } + const revokeGrantOption = await dbHandler({ + query: normalizeText(` + REVOKE GRANT OPTION ON *.* FROM '${updatedRecord.username}'@'${updatedRecord.host}' + `), + }); + const userGrants = (await dbHandler({ + query: `SHOW GRANTS FOR '${updatedRecord.username}'@'${updatedRecord.host}'`, + })); + for (let i = 0; i < userGrants.length; i++) { + const grantObject = userGrants[i]; + const grant = grantObject === null || grantObject === void 0 ? void 0 : grantObject[Object.keys(grantObject)[0]]; + if (!(grant === null || grant === void 0 ? void 0 : grant.match(/GRANT USAGE .* IDENTIFIED BY PASSWORD/))) { + const revokeGrantText = grant + .replace(/GRANT/, "REVOKE") + .replace(/ TO /, " FROM "); + const revokePrivilege = await dbHandler({ query: revokeGrantText }); + } + } + const flushPrivileges = await dbHandler({ + query: `FLUSH PRIVILEGES`, + }); + return { success: true }; +} diff --git a/dist/package-shared/shell/checkDb.js b/dist/package-shared/shell/checkDb.js index a7ac02e..013630b 100644 --- a/dist/package-shared/shell/checkDb.js +++ b/dist/package-shared/shell/checkDb.js @@ -1,19 +1,5 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); require("dotenv").config({ path: "./../.env" }); -const grab_dsql_connection_1 = __importDefault(require("../utils/grab-dsql-connection")); +import grabDSQLConnection from "../utils/grab-dsql-connection"; /** * # Main DB Handler Function * @async @@ -25,11 +11,11 @@ const grab_dsql_connection_1 = __importDefault(require("../utils/grab-dsql-conne * * @returns {Promise} */ -(() => __awaiter(void 0, void 0, void 0, function* () { +(async () => { var _a; - const CONNECTION = (0, grab_dsql_connection_1.default)(); + const CONNECTION = grabDSQLConnection(); try { - const result = yield CONNECTION.query("SELECT id,first_name,last_name FROM users LIMIT 3"); + const result = await CONNECTION.query("SELECT id,first_name,last_name FROM users LIMIT 3"); console.log("Connection Query Success =>", result); } catch (error) { @@ -40,4 +26,4 @@ const grab_dsql_connection_1 = __importDefault(require("../utils/grab-dsql-conne CONNECTION === null || CONNECTION === void 0 ? void 0 : CONNECTION.end(); process.exit(); } -}))(); +})(); diff --git a/dist/package-shared/shell/createDbFromSchema/check-db-record.d.ts b/dist/package-shared/shell/createDbFromSchema/check-db-record.d.ts index 040e60e..8ce0b2e 100644 --- a/dist/package-shared/shell/createDbFromSchema/check-db-record.d.ts +++ b/dist/package-shared/shell/createDbFromSchema/check-db-record.d.ts @@ -3,10 +3,11 @@ import { DSQL_DATASQUIREL_USER_DATABASES } from "../../types/dsql"; type Param = { userId?: number | string | null; dbSchema: DSQL_DatabaseSchemaType; + isMain?: boolean; }; /** * # Create database from Schema Function * @requires DSQL_DB_CONN - Gobal Variable for Datasquirel Database */ -export default function checkDbRecordCreateDbSchema({ userId, dbSchema, }: Param): Promise; +export default function checkDbRecordCreateDbSchema({ userId, dbSchema, isMain, }: Param): Promise; export {}; diff --git a/dist/package-shared/shell/createDbFromSchema/check-db-record.js b/dist/package-shared/shell/createDbFromSchema/check-db-record.js index 9125338..d0e14ca 100644 --- a/dist/package-shared/shell/createDbFromSchema/check-db-record.js +++ b/dist/package-shared/shell/createDbFromSchema/check-db-record.js @@ -1,66 +1,62 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = checkDbRecordCreateDbSchema; -const varDatabaseDbHandler_1 = __importDefault(require("../utils/varDatabaseDbHandler")); -const numberfy_1 = __importDefault(require("../../utils/numberfy")); -const addDbEntry_1 = __importDefault(require("../../functions/backend/db/addDbEntry")); +import varDatabaseDbHandler from "../utils/varDatabaseDbHandler"; +import numberfy from "../../utils/numberfy"; +import addDbEntry from "../../functions/backend/db/addDbEntry"; +import updateDbEntry from "../../functions/backend/db/updateDbEntry"; /** * # Create database from Schema Function * @requires DSQL_DB_CONN - Gobal Variable for Datasquirel Database */ -function checkDbRecordCreateDbSchema(_a) { - return __awaiter(this, arguments, void 0, function* ({ userId, dbSchema, }) { - var _b; - try { - const { dbFullName, dbName, dbSlug, dbDescription, dbImage, childDatabase, childDatabaseDbFullName, } = dbSchema; - let recordedDbEntryArray = userId - ? yield (0, varDatabaseDbHandler_1.default)({ +export default async function checkDbRecordCreateDbSchema({ userId, dbSchema, isMain, }) { + var _a, _b; + if (isMain) + return undefined; + try { + const { dbFullName, dbName, dbSlug, dbDescription, dbImage, childDatabase, childDatabaseDbId, id, } = dbSchema; + let recordedDbEntryArray = userId + ? await varDatabaseDbHandler({ + queryString: `SELECT * FROM datasquirel.user_databases WHERE db_full_name = ?`, + queryValuesArray: [dbFullName || "NULL"], + }) + : undefined; + let recordedDbEntry = recordedDbEntryArray === null || recordedDbEntryArray === void 0 ? void 0 : recordedDbEntryArray[0]; + const newDbEntryObj = { + user_id: numberfy(userId), + db_name: dbName, + db_slug: dbSlug, + db_full_name: dbFullName, + db_description: dbDescription, + db_image: dbImage, + active_clone: childDatabase ? 1 : undefined, + db_schema_id: numberfy(id), + active_clone_parent_db_id: numberfy(childDatabaseDbId), + }; + if (!(recordedDbEntry === null || recordedDbEntry === void 0 ? void 0 : recordedDbEntry.id) && userId) { + const newDbEntry = await addDbEntry({ + data: newDbEntryObj, + tableName: "user_databases", + forceLocal: true, + }); + if ((_a = newDbEntry.payload) === null || _a === void 0 ? void 0 : _a.insertId) { + recordedDbEntryArray = await varDatabaseDbHandler({ queryString: `SELECT * FROM datasquirel.user_databases WHERE db_full_name = ?`, queryValuesArray: [dbFullName || "NULL"], - }) - : undefined; - let recordedDbEntry = recordedDbEntryArray === null || recordedDbEntryArray === void 0 ? void 0 : recordedDbEntryArray[0]; - if (!(recordedDbEntry === null || recordedDbEntry === void 0 ? void 0 : recordedDbEntry.id) && userId) { - const newDbEntryObj = { - user_id: (0, numberfy_1.default)(userId), - db_name: dbName, - db_slug: dbSlug, - db_full_name: dbFullName, - db_description: dbDescription, - db_image: dbImage, - active_clone: childDatabase ? 1 : undefined, - active_clone_parent_db: childDatabaseDbFullName, - }; - const newDbEntry = (yield (0, addDbEntry_1.default)({ - data: newDbEntryObj, - tableName: "user_databases", - forceLocal: true, - })); - if (newDbEntry.insertId) { - recordedDbEntryArray = yield (0, varDatabaseDbHandler_1.default)({ - queryString: `SELECT * FROM datasquirel.user_databases WHERE db_full_name = ?`, - queryValuesArray: [dbFullName || "NULL"], - }); - recordedDbEntry = recordedDbEntryArray === null || recordedDbEntryArray === void 0 ? void 0 : recordedDbEntryArray[0]; - } + }); + recordedDbEntry = recordedDbEntryArray === null || recordedDbEntryArray === void 0 ? void 0 : recordedDbEntryArray[0]; } - return recordedDbEntry; } - catch (error) { - (_b = global.ERROR_CALLBACK) === null || _b === void 0 ? void 0 : _b.call(global, `Error Checking DB Record on Creating Schema`, error); - return undefined; + else if (recordedDbEntry === null || recordedDbEntry === void 0 ? void 0 : recordedDbEntry.id) { + await updateDbEntry({ + data: newDbEntryObj, + tableName: "user_databases", + forceLocal: true, + identifierColumnName: "id", + identifierValue: String(recordedDbEntry.id), + }); } - }); + return recordedDbEntry; + } + catch (error) { + (_b = global.ERROR_CALLBACK) === null || _b === void 0 ? void 0 : _b.call(global, `Error Checking DB Record on Creating Schema`, error); + return undefined; + } } diff --git a/dist/package-shared/shell/createDbFromSchema/check-table-record.js b/dist/package-shared/shell/createDbFromSchema/check-table-record.js index 01d6b93..a2bc3e9 100644 --- a/dist/package-shared/shell/createDbFromSchema/check-table-record.js +++ b/dist/package-shared/shell/createDbFromSchema/check-table-record.js @@ -1,94 +1,75 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = checkTableRecordCreateDbSchema; -const varDatabaseDbHandler_1 = __importDefault(require("../utils/varDatabaseDbHandler")); -const sql_generator_1 = __importDefault(require("../../functions/dsql/sql/sql-generator")); -const numberfy_1 = __importDefault(require("../../utils/numberfy")); -const addDbEntry_1 = __importDefault(require("../../functions/backend/db/addDbEntry")); +import varDatabaseDbHandler from "../utils/varDatabaseDbHandler"; +import sqlGenerator from "../../functions/dsql/sql/sql-generator"; +import numberfy from "../../utils/numberfy"; +import addDbEntry from "../../functions/backend/db/addDbEntry"; /** * # Create database from Schema Function * @requires DSQL_DB_CONN - Gobal Variable for Datasquirel Database */ -function checkTableRecordCreateDbSchema(_a) { - return __awaiter(this, arguments, void 0, function* ({ userId, tableSchema, dbSchema, dbRecord, dbFullName, }) { - var _b; - if (!tableSchema) - return undefined; - try { - const queryObj = (0, sql_generator_1.default)({ - tableName: "user_database_tables", - genObject: { - query: { - db_id: { - value: String(dbRecord === null || dbRecord === void 0 ? void 0 : dbRecord.id), - }, - table_slug: { - value: tableSchema.tableName, - }, - user_id: { - value: String(userId), - }, +export default async function checkTableRecordCreateDbSchema({ userId, tableSchema, dbSchema, dbRecord, dbFullName, }) { + var _a, _b; + if (!tableSchema) + return undefined; + try { + const queryObj = sqlGenerator({ + tableName: "user_database_tables", + genObject: { + query: { + db_id: { + value: String(dbRecord === null || dbRecord === void 0 ? void 0 : dbRecord.id), + }, + table_slug: { + value: tableSchema.tableName, + }, + user_id: { + value: String(userId), }, }, - dbFullName: "datasquirel", - }); - let recordedTableEntryArray = userId - ? yield (0, varDatabaseDbHandler_1.default)({ - queryString: (queryObj === null || queryObj === void 0 ? void 0 : queryObj.string) || "", - queryValuesArray: queryObj === null || queryObj === void 0 ? void 0 : queryObj.values, - }) - : undefined; - let recordedTableEntry = recordedTableEntryArray === null || recordedTableEntryArray === void 0 ? void 0 : recordedTableEntryArray[0]; - if (!(recordedTableEntry === null || recordedTableEntry === void 0 ? void 0 : recordedTableEntry.id) && userId) { - const newTableInsertObject = { - user_id: (0, numberfy_1.default)(userId), - db_id: dbRecord === null || dbRecord === void 0 ? void 0 : dbRecord.id, - db_slug: dbRecord === null || dbRecord === void 0 ? void 0 : dbRecord.db_slug, - table_name: tableSchema.tableFullName, - table_slug: tableSchema.tableName, - }; - if ((tableSchema === null || tableSchema === void 0 ? void 0 : tableSchema.childTable) && tableSchema.childTableName) { - const parentDb = dbSchema.find((db) => db.dbFullName == tableSchema.childTableDbFullName); - const parentDbTable = parentDb === null || parentDb === void 0 ? void 0 : parentDb.tables.find((tbl) => tbl.tableName == tableSchema.childTableName); - if (parentDb && parentDbTable) { - newTableInsertObject["child_table"] = 1; - newTableInsertObject["child_table_parent_database"] = - parentDb.dbFullName; - newTableInsertObject["child_table_parent_table"] = - parentDbTable.tableName; - } - } - const newTableRecordEntry = (yield (0, addDbEntry_1.default)({ - data: newTableInsertObject, - tableName: "user_database_tables", - dbContext: "Master", - forceLocal: true, - })); - if (newTableRecordEntry.insertId) { - recordedTableEntryArray = yield (0, varDatabaseDbHandler_1.default)({ - queryString: (queryObj === null || queryObj === void 0 ? void 0 : queryObj.string) || "", - queryValuesArray: queryObj === null || queryObj === void 0 ? void 0 : queryObj.values, - }); - recordedTableEntry = recordedTableEntryArray === null || recordedTableEntryArray === void 0 ? void 0 : recordedTableEntryArray[0]; + }, + dbFullName: "datasquirel", + }); + let recordedTableEntryArray = userId + ? await varDatabaseDbHandler({ + queryString: (queryObj === null || queryObj === void 0 ? void 0 : queryObj.string) || "", + queryValuesArray: queryObj === null || queryObj === void 0 ? void 0 : queryObj.values, + }) + : undefined; + let recordedTableEntry = recordedTableEntryArray === null || recordedTableEntryArray === void 0 ? void 0 : recordedTableEntryArray[0]; + if (!(recordedTableEntry === null || recordedTableEntry === void 0 ? void 0 : recordedTableEntry.id) && userId) { + const newTableInsertObject = { + user_id: numberfy(userId), + db_id: dbRecord === null || dbRecord === void 0 ? void 0 : dbRecord.id, + db_slug: dbRecord === null || dbRecord === void 0 ? void 0 : dbRecord.db_slug, + table_slug: tableSchema.tableName, + }; + if ((tableSchema === null || tableSchema === void 0 ? void 0 : tableSchema.childTable) && tableSchema.childTableId) { + const parentDb = dbSchema.find((db) => db.id == tableSchema.childTableDbId); + const parentDbTable = parentDb === null || parentDb === void 0 ? void 0 : parentDb.tables.find((tbl) => tbl.id == tableSchema.childTableId); + if (parentDb && parentDbTable) { + newTableInsertObject["child_table"] = 1; + newTableInsertObject["child_table_parent_database_schema_id"] = numberfy(parentDb.id); + newTableInsertObject["child_table_parent_table_schema_id"] = + numberfy(parentDbTable.id); } } - return recordedTableEntry; + const newTableRecordEntry = await addDbEntry({ + data: newTableInsertObject, + tableName: "user_database_tables", + dbContext: "Master", + forceLocal: true, + }); + if ((_a = newTableRecordEntry.payload) === null || _a === void 0 ? void 0 : _a.insertId) { + recordedTableEntryArray = await varDatabaseDbHandler({ + queryString: (queryObj === null || queryObj === void 0 ? void 0 : queryObj.string) || "", + queryValuesArray: queryObj === null || queryObj === void 0 ? void 0 : queryObj.values, + }); + recordedTableEntry = recordedTableEntryArray === null || recordedTableEntryArray === void 0 ? void 0 : recordedTableEntryArray[0]; + } } - catch (error) { - (_b = global.ERROR_CALLBACK) === null || _b === void 0 ? void 0 : _b.call(global, `Error Checking Table Record on Creating Schema`, error); - return undefined; - } - }); + return recordedTableEntry; + } + catch (error) { + (_b = global.ERROR_CALLBACK) === null || _b === void 0 ? void 0 : _b.call(global, `Error Checking Table Record on Creating Schema`, error); + return undefined; + } } diff --git a/dist/package-shared/shell/createDbFromSchema/grab-required-database-schemas.d.ts b/dist/package-shared/shell/createDbFromSchema/grab-required-database-schemas.d.ts new file mode 100644 index 0000000..4b2f018 --- /dev/null +++ b/dist/package-shared/shell/createDbFromSchema/grab-required-database-schemas.d.ts @@ -0,0 +1,24 @@ +import { DSQL_DatabaseSchemaType } from "../../types"; +type Params = { + userId?: string | number | null; + dbId?: string | number; + dbSlug?: string; +}; +export default function grabRequiredDatabaseSchemas(params: Params): DSQL_DatabaseSchemaType[] | undefined; +export declare function grabPrimaryRequiredDbSchema({ userId, dbId, dbSlug }: Params): DSQL_DatabaseSchemaType | undefined; +export declare function findDbNameInSchemaDir({ userId, dbName, }: { + userId?: string | number; + dbName?: string; +}): DSQL_DatabaseSchemaType | undefined; +type UpdateDbSchemaParam = { + dbSchema: DSQL_DatabaseSchemaType; + userId?: string | number | null; +}; +export declare function writeUpdatedDbSchema({ dbSchema, userId, }: UpdateDbSchemaParam): { + success?: boolean; + dbSchemaId?: string | number; +}; +export declare function deleteDbSchema({ dbSchema, userId }: UpdateDbSchemaParam): void; +export declare function findTargetDbSchemaFromMainSchema(schemas: DSQL_DatabaseSchemaType[], dbFullName?: string, dbId?: string | number): DSQL_DatabaseSchemaType | undefined; +export declare function grabLatestDbSchemaID(userSchemaDir: string): number; +export {}; diff --git a/dist/package-shared/shell/createDbFromSchema/grab-required-database-schemas.js b/dist/package-shared/shell/createDbFromSchema/grab-required-database-schemas.js new file mode 100644 index 0000000..3ec9d8d --- /dev/null +++ b/dist/package-shared/shell/createDbFromSchema/grab-required-database-schemas.js @@ -0,0 +1,218 @@ +import fs from "fs"; +import path from "path"; +import grabDirNames from "../../utils/backend/names/grab-dir-names"; +import EJSON from "../../utils/ejson"; +import numberfy from "../../utils/numberfy"; +import uniqueByKey from "../../utils/unique-by-key"; +export default function grabRequiredDatabaseSchemas(params) { + const primaryDbSchema = grabPrimaryRequiredDbSchema(params); + if (!primaryDbSchema) + return undefined; + let relatedDatabases = []; + const childrenDatabases = primaryDbSchema.childrenDatabases || []; + const childrenTables = primaryDbSchema.tables + .map((tbl) => { + return tbl.childrenTables || []; + }) + .flat() || []; + for (let i = 0; i < childrenDatabases.length; i++) { + const childDb = childrenDatabases[i]; + const childDbSchema = grabPrimaryRequiredDbSchema({ + userId: params.userId, + dbId: childDb.dbId, + }); + if (!(childDbSchema === null || childDbSchema === void 0 ? void 0 : childDbSchema.dbSlug)) + continue; + relatedDatabases.push(childDbSchema); + } + for (let i = 0; i < childrenTables.length; i++) { + const childTbl = childrenTables[i]; + const childTableDbSchema = grabPrimaryRequiredDbSchema({ + userId: params.userId, + dbId: childTbl.dbId, + }); + if (!(childTableDbSchema === null || childTableDbSchema === void 0 ? void 0 : childTableDbSchema.dbSlug)) + continue; + relatedDatabases.push(childTableDbSchema); + } + return uniqueByKey([primaryDbSchema, ...relatedDatabases], "dbFullName"); +} +export function grabPrimaryRequiredDbSchema({ userId, dbId, dbSlug }) { + let finalDbId = dbId; + if (!finalDbId && userId && dbSlug) { + const searchedDb = findDbNameInSchemaDir({ dbName: dbSlug, userId }); + if (searchedDb === null || searchedDb === void 0 ? void 0 : searchedDb.id) { + finalDbId = searchedDb.id; + } + } + if (!finalDbId) { + return undefined; + } + const { targetUserPrivateDir, oldSchemasDir } = grabDirNames({ + userId, + }); + const finalSchemaDir = targetUserPrivateDir || oldSchemasDir; + if (!finalSchemaDir) { + console.log(`finalSchemaDir not found!`); + return undefined; + } + if (finalDbId) { + const dbIdSchema = path.resolve(finalSchemaDir, `${finalDbId}.json`); + if (fs.existsSync(dbIdSchema)) { + const dbIdSchemaObject = EJSON.parse(fs.readFileSync(dbIdSchema, "utf-8")); + return dbIdSchemaObject; + } + } + const dbSchemasFiles = fs.readdirSync(finalSchemaDir); + let targetDbSchema; + try { + for (let i = 0; i < dbSchemasFiles.length; i++) { + const fileOrPath = dbSchemasFiles[i]; + if (!fileOrPath.endsWith(`.json`)) + continue; + if (!fileOrPath.match(/^\d+.json/)) + continue; + const targetFileJSONPath = path.join(finalSchemaDir, fileOrPath); + const targetSchema = EJSON.parse(fs.readFileSync(targetFileJSONPath, "utf-8")); + if (targetSchema && finalDbId && (targetSchema === null || targetSchema === void 0 ? void 0 : targetSchema.id) == finalDbId) { + targetDbSchema = targetSchema; + } + } + } + catch (error) { } + if (targetDbSchema) { + return targetDbSchema; + } + // else if ( dbFullName) { + // let existingSchemaInMainJSON = findTargetDbSchemaFromMainSchema( + // dbFullName + // ); + // const nextID = grabLatestDbSchemaID(finalSchemaDir); + // if (existingSchemaInMainJSON) { + // existingSchemaInMainJSON.id = nextID; + // fs.writeFileSync( + // path.join(finalSchemaDir, `${nextID}.json`), + // EJSON.stringify(existingSchemaInMainJSON) || "[]" + // ); + // return existingSchemaInMainJSON; + // } + // } + console.log(`userSchemaDir not found!`); + console.log(`userId`, userId); + return undefined; +} +export function findDbNameInSchemaDir({ userId, dbName, }) { + if (!userId) { + console.log(`userId not provided!`); + return undefined; + } + if (!dbName) { + console.log(`dbName not provided!`); + return undefined; + } + const { targetUserPrivateDir } = grabDirNames({ + userId, + }); + if (!targetUserPrivateDir) { + console.log(`targetUserPrivateDir not found!`); + return undefined; + } + const dbSchemasFiles = fs.readdirSync(targetUserPrivateDir); + let targetDbSchema; + try { + for (let i = 0; i < dbSchemasFiles.length; i++) { + const fileOrPath = dbSchemasFiles[i]; + if (!fileOrPath.endsWith(`.json`)) + continue; + if (!fileOrPath.match(/^\d+.json/)) + continue; + const targetFileJSONPath = path.join(targetUserPrivateDir, fileOrPath); + const targetSchema = EJSON.parse(fs.readFileSync(targetFileJSONPath, "utf-8")); + if (!targetSchema) + continue; + if (targetSchema.dbFullName == dbName || + targetSchema.dbSlug == dbName) { + targetDbSchema = targetSchema; + return targetSchema; + } + } + } + catch (error) { } + return targetDbSchema; +} +export function writeUpdatedDbSchema({ dbSchema, userId, }) { + const { targetUserPrivateDir } = grabDirNames({ + userId, + }); + if (!targetUserPrivateDir) { + console.log(`user ${userId} has no targetUserPrivateDir`); + return {}; + } + if (dbSchema.id) { + const dbIdSchemaPath = path.join(targetUserPrivateDir, `${dbSchema.id}.json`); + fs.writeFileSync(dbIdSchemaPath, EJSON.stringify(dbSchema) || "[]"); + return { success: true }; + } + else { + const nextID = grabLatestDbSchemaID(targetUserPrivateDir); + dbSchema.id = nextID; + fs.writeFileSync(path.join(targetUserPrivateDir, `${nextID}.json`), EJSON.stringify(dbSchema) || "[]"); + return { success: true, dbSchemaId: nextID }; + } +} +export function deleteDbSchema({ dbSchema, userId }) { + const { targetUserPrivateDir, userSchemaMainJSONFilePath } = grabDirNames({ + userId, + }); + if (!targetUserPrivateDir) + return; + const targetDbSchema = grabPrimaryRequiredDbSchema({ + dbId: dbSchema.id, + userId, + }); + const schemaFile = path.join(targetUserPrivateDir, `${targetDbSchema === null || targetDbSchema === void 0 ? void 0 : targetDbSchema.id}.json`); + try { + fs.unlinkSync(schemaFile); + } + catch (error) { } + if (userSchemaMainJSONFilePath && + fs.existsSync(userSchemaMainJSONFilePath)) { + try { + let allDbSchemas = EJSON.parse(fs.readFileSync(userSchemaMainJSONFilePath, "utf-8")); + if (allDbSchemas === null || allDbSchemas === void 0 ? void 0 : allDbSchemas[0]) { + for (let i = 0; i < allDbSchemas.length; i++) { + const dbSch = allDbSchemas[i]; + if (dbSch.dbFullName == dbSchema.dbFullName || + dbSch.id == dbSchema.id) { + allDbSchemas.splice(i, 1); + } + } + fs.writeFileSync(userSchemaMainJSONFilePath, EJSON.stringify(allDbSchemas) || "[]"); + } + } + catch (error) { } + } +} +export function findTargetDbSchemaFromMainSchema(schemas, dbFullName, dbId) { + const targetDbSchema = schemas.find((sch) => sch.dbFullName == dbFullName || (dbId && sch.id == dbId)); + return targetDbSchema; +} +export function grabLatestDbSchemaID(userSchemaDir) { + const dbSchemasFiles = fs.readdirSync(userSchemaDir); + const dbNumbers = dbSchemasFiles + .filter((dbSch) => { + if (!dbSch.endsWith(`.json`)) + return false; + if (dbSch.match(/^\d+\.json/)) + return true; + return false; + }) + .map((dbSch) => numberfy(dbSch.replace(/[^0-9]/g, ""))); + if (dbNumbers[0]) + return ((dbNumbers + .sort((a, b) => { + return a - b; + }) + .pop() || 0) + 1); + return 1; +} diff --git a/dist/package-shared/shell/createDbFromSchema/handle-indexes.js b/dist/package-shared/shell/createDbFromSchema/handle-indexes.js index e351c6f..847960f 100644 --- a/dist/package-shared/shell/createDbFromSchema/handle-indexes.js +++ b/dist/package-shared/shell/createDbFromSchema/handle-indexes.js @@ -1,57 +1,37 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = handleIndexescreateDbFromSchema; -const varDatabaseDbHandler_1 = __importDefault(require("../utils/varDatabaseDbHandler")); +import varDatabaseDbHandler from "../utils/varDatabaseDbHandler"; +import grabDSQLSchemaIndexComment from "../utils/grab-dsql-schema-index-comment"; /** * Handle DATASQUIREL Table Indexes * =================================================== * @description Iterate through each datasquirel schema * table index(if available), and perform operations */ -function handleIndexescreateDbFromSchema(_a) { - return __awaiter(this, arguments, void 0, function* ({ dbFullName, tableName, indexes, }) { - var _b; - for (let g = 0; g < indexes.length; g++) { - const { indexType, indexName, indexTableFields, alias } = indexes[g]; - if (!(alias === null || alias === void 0 ? void 0 : alias.match(/./))) - continue; - /** - * @description Check for existing Index in MYSQL db - */ - try { - /** - * @type {import("../../types").DSQL_MYSQL_SHOW_INDEXES_Type[]} - * @description All indexes from MYSQL db - */ // @ts-ignore - const allExistingIndexes = yield (0, varDatabaseDbHandler_1.default)({ - queryString: `SHOW INDEXES FROM \`${dbFullName}\`.\`${tableName}\``, - }); - const existingKeyInDb = allExistingIndexes.filter((indexObject) => indexObject.Key_name === alias); - if (!existingKeyInDb[0]) - throw new Error("This Index Does not Exist"); - } - catch (error) { - (_b = global.ERROR_CALLBACK) === null || _b === void 0 ? void 0 : _b.call(global, `Error Handling Indexes on Creating Schema`, error); - /** - * @description Create new index if determined that it - * doesn't exist in MYSQL db - */ - yield (0, varDatabaseDbHandler_1.default)({ - queryString: `CREATE${(indexType === null || indexType === void 0 ? void 0 : indexType.match(/fullText/i)) ? " FULLTEXT" : ""} INDEX \`${alias}\` ON \`${dbFullName}\`.\`${tableName}\`(${indexTableFields === null || indexTableFields === void 0 ? void 0 : indexTableFields.map((nm) => nm.value).map((nm) => `\`${nm}\``).join(",")}) COMMENT 'schema_index'`, - }); - } +export default async function handleIndexescreateDbFromSchema({ dbFullName, tableName, indexes, }) { + const allExistingIndexes = await varDatabaseDbHandler({ + queryString: `SHOW INDEXES FROM \`${dbFullName}\`.\`${tableName}\``, + }); + for (let g = 0; g < indexes.length; g++) { + const { indexType, indexName, indexTableFields, alias } = indexes[g]; + if (!(alias === null || alias === void 0 ? void 0 : alias.match(/./))) + continue; + /** + * @description Check for existing Index in MYSQL db + */ + try { + const existingKeyInDb = allExistingIndexes.filter((indexObject) => indexObject.Key_name === alias); + if (!existingKeyInDb[0]) + throw new Error("This Index Does not Exist"); } + catch (error) { + /** + * @description Create new index if determined that it + * doesn't exist in MYSQL db + */ + const queryString = `CREATE${indexType == "full_text" ? " FULLTEXT" : ""} INDEX \`${alias}\` ON \`${dbFullName}\`.\`${tableName}\`(${indexTableFields === null || indexTableFields === void 0 ? void 0 : indexTableFields.map((nm) => nm.value).map((nm) => `\`${nm}\``).join(",")}) COMMENT '${grabDSQLSchemaIndexComment()} ${indexName}'`; + const addIndex = await varDatabaseDbHandler({ queryString }); + } + } + const allExistingIndexesAfterUpdate = await varDatabaseDbHandler({ + queryString: `SHOW INDEXES FROM \`${dbFullName}\`.\`${tableName}\``, }); } diff --git a/dist/package-shared/shell/createDbFromSchema/index.d.ts b/dist/package-shared/shell/createDbFromSchema/index.d.ts index a350e69..2125217 100644 --- a/dist/package-shared/shell/createDbFromSchema/index.d.ts +++ b/dist/package-shared/shell/createDbFromSchema/index.d.ts @@ -1,11 +1,14 @@ +import { DSQL_DatabaseSchemaType } from "../../types"; type Param = { userId?: number | string | null; targetDatabase?: string; - dbSchemaData?: import("../../types").DSQL_DatabaseSchemaType[]; + dbSchemaData?: DSQL_DatabaseSchemaType[]; + targetTable?: string; + dbId?: string | number; }; /** * # Create database from Schema Function * @requires DSQL_DB_CONN - Gobal Variable for Datasquirel Database */ -export default function createDbFromSchema({ userId, targetDatabase, dbSchemaData, }: Param): Promise; +export default function createDbFromSchema({ userId, targetDatabase, dbSchemaData, targetTable, dbId, }: Param): Promise; export {}; diff --git a/dist/package-shared/shell/createDbFromSchema/index.js b/dist/package-shared/shell/createDbFromSchema/index.js index f896ab1..40f9837 100644 --- a/dist/package-shared/shell/createDbFromSchema/index.js +++ b/dist/package-shared/shell/createDbFromSchema/index.js @@ -1,46 +1,35 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = createDbFromSchema; -const fs_1 = __importDefault(require("fs")); -const noDatabaseDbHandler_1 = __importDefault(require("../utils/noDatabaseDbHandler")); -const varDatabaseDbHandler_1 = __importDefault(require("../utils/varDatabaseDbHandler")); -const createTable_1 = __importDefault(require("../utils/createTable")); -const updateTable_1 = __importDefault(require("../utils/updateTable")); -const dbHandler_1 = __importDefault(require("../utils/dbHandler")); -const ejson_1 = __importDefault(require("../../utils/ejson")); -const grab_dir_names_1 = __importDefault(require("../../utils/backend/names/grab-dir-names")); -const check_db_record_1 = __importDefault(require("./check-db-record")); -const check_table_record_1 = __importDefault(require("./check-table-record")); -const handle_indexes_1 = __importDefault(require("./handle-indexes")); +import noDatabaseDbHandler from "../utils/noDatabaseDbHandler"; +import varDatabaseDbHandler from "../utils/varDatabaseDbHandler"; +import createTable from "../utils/createTable"; +import updateTable from "../utils/updateTable"; +import grabDirNames from "../../utils/backend/names/grab-dir-names"; +import checkDbRecordCreateDbSchema from "./check-db-record"; +import handleIndexescreateDbFromSchema from "./handle-indexes"; +import grabRequiredDatabaseSchemas, { grabPrimaryRequiredDbSchema, } from "./grab-required-database-schemas"; +import dbHandler from "../../functions/backend/dbHandler"; /** * # Create database from Schema Function * @requires DSQL_DB_CONN - Gobal Variable for Datasquirel Database */ -function createDbFromSchema(_a) { - return __awaiter(this, arguments, void 0, function* ({ userId, targetDatabase, dbSchemaData, }) { - var _b, _c; - const { userSchemaMainJSONFilePath, mainShemaJSONFilePath } = (0, grab_dir_names_1.default)({ +export default async function createDbFromSchema({ userId, targetDatabase, dbSchemaData, targetTable, dbId, }) { + var _a, _b; + try { + const { userSchemaMainJSONFilePath } = grabDirNames({ userId, }); - const schemaPath = userSchemaMainJSONFilePath || mainShemaJSONFilePath; - const dbSchema = dbSchemaData || - ejson_1.default.parse(fs_1.default.readFileSync(schemaPath, "utf8")); + let dbSchema = dbSchemaData + ? dbSchemaData + : dbId + ? grabRequiredDatabaseSchemas({ + dbId, + userId, + }) + : undefined; if (!dbSchema) { console.log("Schema Not Found!"); return false; } + const isMain = !userSchemaMainJSONFilePath; for (let i = 0; i < dbSchema.length; i++) { const database = dbSchema[i]; const { dbFullName, tables, dbSlug, childrenDatabases } = database; @@ -49,14 +38,16 @@ function createDbFromSchema(_a) { if (targetDatabase && dbFullName != targetDatabase) { continue; } - const dbCheck = yield (0, noDatabaseDbHandler_1.default)(`SELECT SCHEMA_NAME AS dbFullName FROM INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME = '${dbFullName}'`); - if (!((_b = dbCheck === null || dbCheck === void 0 ? void 0 : dbCheck[0]) === null || _b === void 0 ? void 0 : _b.dbFullName)) { - const newDatabase = yield (0, noDatabaseDbHandler_1.default)(`CREATE DATABASE IF NOT EXISTS \`${dbFullName}\` CHARACTER SET utf8mb4 COLLATE utf8mb4_bin`); + console.log(`Handling database => ${dbFullName}`); + const dbCheck = await noDatabaseDbHandler(`SELECT SCHEMA_NAME AS dbFullName FROM INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME = '${dbFullName}'`); + if (!((_a = dbCheck === null || dbCheck === void 0 ? void 0 : dbCheck[0]) === null || _a === void 0 ? void 0 : _a.dbFullName)) { + const newDatabase = await noDatabaseDbHandler(`CREATE DATABASE IF NOT EXISTS \`${dbFullName}\` CHARACTER SET utf8mb4 COLLATE utf8mb4_bin`); } - const allTables = yield (0, noDatabaseDbHandler_1.default)(`SELECT TABLE_NAME FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_SCHEMA='${dbFullName}'`); - let recordedDbEntry = yield (0, check_db_record_1.default)({ + const allTables = await noDatabaseDbHandler(`SELECT TABLE_NAME FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_SCHEMA='${dbFullName}'`); + let recordedDbEntry = await checkDbRecordCreateDbSchema({ dbSchema: database, userId, + isMain, }); for (let tb = 0; tb < allTables.length; tb++) { const { TABLE_NAME } = allTables[tb]; @@ -67,28 +58,40 @@ function createDbFromSchema(_a) { * or the table name has been recently changed */ if (!targetTableSchema) { - const oldTable = tables.find((_table) => _table.tableNameOld && - _table.tableNameOld === TABLE_NAME); - /** - * @description Check if this table has been recently renamed. Rename - * table id true. Drop table if false - */ - if (oldTable) { - console.log("Renaming Table"); - yield (0, varDatabaseDbHandler_1.default)({ - queryString: `RENAME TABLE \`${dbFullName}\`.\`${oldTable.tableNameOld}\` TO \`${oldTable.tableName}\``, - }); - } - else { - console.log(`Dropping Table from ${dbFullName}`); - yield (0, varDatabaseDbHandler_1.default)({ - queryString: `DROP TABLE \`${dbFullName}\`.\`${TABLE_NAME}\``, - }); - const deleteTableEntry = yield (0, dbHandler_1.default)({ - query: `DELETE FROM datasquirel.user_database_tables WHERE user_id = ? AND db_slug = ? AND table_slug = ?`, - values: [userId, dbSlug, TABLE_NAME], - }); - } + console.log(`Dropping Table ${TABLE_NAME} from ${dbFullName}`); + await varDatabaseDbHandler({ + queryString: `DROP TABLE \`${dbFullName}\`.\`${TABLE_NAME}\``, + }); + const deleteTableEntry = await dbHandler({ + query: `DELETE FROM datasquirel.user_database_tables WHERE user_id = ? AND db_slug = ? AND table_slug = ?`, + values: [userId, dbSlug, TABLE_NAME], + }); + // const oldTable = tables.find( + // (_table) => + // _table.tableNameOld && + // _table.tableNameOld === TABLE_NAME + // ); + // /** + // * @description Check if this table has been recently renamed. Rename + // * table id true. Drop table if false + // */ + // if (oldTable) { + // console.log("Renaming Table"); + // await varDatabaseDbHandler({ + // queryString: `RENAME TABLE \`${dbFullName}\`.\`${oldTable.tableNameOld}\` TO \`${oldTable.tableName}\``, + // }); + // } else { + // console.log( + // `Dropping Table ${TABLE_NAME} from ${dbFullName}` + // ); + // await varDatabaseDbHandler({ + // queryString: `DROP TABLE \`${dbFullName}\`.\`${TABLE_NAME}\``, + // }); + // const deleteTableEntry = await dbHandler({ + // query: `DELETE FROM datasquirel.user_database_tables WHERE user_id = ? AND db_slug = ? AND table_slug = ?`, + // values: [userId, dbSlug, TABLE_NAME], + // }); + // } } } /** @@ -97,56 +100,64 @@ function createDbFromSchema(_a) { for (let t = 0; t < tables.length; t++) { const table = tables[t]; const { tableName, fields, indexes } = table; + if (targetTable && tableName !== targetTable) + continue; + console.log(`Handling table => ${tableName}`); /** * @description Check if table exists * @type {any} */ - const tableCheck = yield (0, varDatabaseDbHandler_1.default)({ + const tableCheck = await varDatabaseDbHandler({ queryString: ` - SELECT EXISTS ( - SELECT - TABLE_NAME - FROM - information_schema.TABLES - WHERE - TABLE_SCHEMA = ? AND - TABLE_NAME = ? + SELECT EXISTS ( + SELECT + TABLE_NAME + FROM + information_schema.TABLES + WHERE + TABLE_SCHEMA = ? AND + TABLE_NAME = ? ) AS tableExists`, queryValuesArray: [dbFullName, table.tableName], }); - //////////////////////////////////////// - if (tableCheck && ((_c = tableCheck[0]) === null || _c === void 0 ? void 0 : _c.tableExists) > 0) { + if (tableCheck && ((_b = tableCheck[0]) === null || _b === void 0 ? void 0 : _b.tableExists) > 0) { /** * @description Update table if table exists */ - const updateExistingTable = yield (0, updateTable_1.default)({ + const updateExistingTable = await updateTable({ dbFullName: dbFullName, tableName: tableName, - tableNameFull: table.tableFullName, - tableInfoArray: fields, + tableFields: fields, userId, - dbSchema, + dbSchema: database, tableIndexes: indexes, - tableIndex: t, - childDb: database.childDatabase || undefined, recordedDbEntry, tableSchema: table, + isMain, }); if (table.childrenTables && table.childrenTables[0]) { for (let ch = 0; ch < table.childrenTables.length; ch++) { const childTable = table.childrenTables[ch]; - const updateExistingChildTable = yield (0, updateTable_1.default)({ - dbFullName: childTable.dbNameFull, - tableName: childTable.tableName, - tableNameFull: childTable.tableNameFull, - tableInfoArray: fields, + const childTableParentDbSchema = grabPrimaryRequiredDbSchema({ + dbId: childTable.dbId, userId, - dbSchema, - tableIndexes: indexes, + }); + if (!(childTableParentDbSchema === null || childTableParentDbSchema === void 0 ? void 0 : childTableParentDbSchema.dbFullName)) + continue; + const childTableSchema = childTableParentDbSchema.tables.find((tbl) => tbl.id == childTable.tableId); + if (!childTableSchema) + continue; + const updateExistingChildTable = await updateTable({ + dbFullName: childTableParentDbSchema.dbFullName, + tableName: childTableSchema.tableName, + tableFields: childTableSchema.fields, + userId, + dbSchema: childTableParentDbSchema, + tableIndexes: childTableSchema.indexes, clone: true, - childDb: database.childDatabase || undefined, recordedDbEntry, tableSchema: table, + isMain, }); } } @@ -155,12 +166,13 @@ function createDbFromSchema(_a) { /** * @description Create new Table if table doesnt exist */ - const createNewTable = yield (0, createTable_1.default)({ + const createNewTable = await createTable({ tableName: tableName, tableInfoArray: fields, dbFullName: dbFullName, tableSchema: table, recordedDbEntry, + isMain, }); /** * Handle DATASQUIREL Table Indexes @@ -169,20 +181,13 @@ function createDbFromSchema(_a) { * table index(if available), and perform operations */ if (indexes === null || indexes === void 0 ? void 0 : indexes[0]) { - (0, handle_indexes_1.default)({ + handleIndexescreateDbFromSchema({ dbFullName, indexes, tableName, }); } } - const tableRecord = yield (0, check_table_record_1.default)({ - dbFullName, - dbSchema, - tableSchema: table, - dbRecord: recordedDbEntry, - userId, - }); } /** * @description Check all children databases @@ -191,14 +196,20 @@ function createDbFromSchema(_a) { for (let ch = 0; ch < childrenDatabases.length; ch++) { const childDb = childrenDatabases[ch]; const { dbId } = childDb; - const targetDatabase = dbSchema.find((dbSch) => dbSch.id == dbId); - yield createDbFromSchema({ - userId, - targetDatabase: targetDatabase === null || targetDatabase === void 0 ? void 0 : targetDatabase.dbFullName, - }); + const targetDatabase = dbSchema.find((dbSch) => dbSch.childDatabaseDbId == dbId); + if (targetDatabase === null || targetDatabase === void 0 ? void 0 : targetDatabase.id) { + await createDbFromSchema({ + userId, + dbId: targetDatabase === null || targetDatabase === void 0 ? void 0 : targetDatabase.id, + }); + } } } } return true; - }); + } + catch (error) { + console.log(`createDbFromSchema ERROR => ${error.message}`); + return false; + } } diff --git a/dist/package-shared/shell/deploy.js b/dist/package-shared/shell/deploy.js index 2abfbc7..7564176 100644 --- a/dist/package-shared/shell/deploy.js +++ b/dist/package-shared/shell/deploy.js @@ -1,15 +1,3 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -Object.defineProperty(exports, "__esModule", { value: true }); -function deploy() { - return __awaiter(this, void 0, void 0, function* () { }); -} +async function deploy() { } deploy(); +export {}; diff --git a/dist/package-shared/shell/encodingUpdate.js b/dist/package-shared/shell/encodingUpdate.js index c50ffb7..2806b7d 100644 --- a/dist/package-shared/shell/encodingUpdate.js +++ b/dist/package-shared/shell/encodingUpdate.js @@ -1,19 +1,5 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); require("dotenv").config({ path: "./../.env" }); -const varDatabaseDbHandler_1 = __importDefault(require("../functions/backend/varDatabaseDbHandler")); +import varDatabaseDbHandler from "../functions/backend/varDatabaseDbHandler"; /** ****************************************************************************** */ /** ****************************************************************************** */ /** ****************************************************************************** */ @@ -25,22 +11,22 @@ const varDatabaseDbHandler_1 = __importDefault(require("../functions/backend/var * * @description Grab Schema */ -(0, varDatabaseDbHandler_1.default)({ +varDatabaseDbHandler({ queryString: `SELECT user_database_tables.*,user_databases.db_full_name FROM user_database_tables JOIN user_databases ON user_database_tables.db_id=user_databases.id`, database: "datasquirel", -}).then((tables) => __awaiter(void 0, void 0, void 0, function* () { +}).then(async (tables) => { for (let i = 0; i < tables.length; i++) { const table = tables[i]; const { id, user_id, db_id, db_full_name, table_name, table_slug, table_description, } = table; - const tableInfo = yield (0, varDatabaseDbHandler_1.default)({ + const tableInfo = await varDatabaseDbHandler({ queryString: `SELECT COLUMN_NAME FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_SCHEMA='${db_full_name}' AND TABLE_NAME='${table_slug}'`, }); - const updateDbCharset = yield (0, varDatabaseDbHandler_1.default)({ + const updateDbCharset = await varDatabaseDbHandler({ queryString: `ALTER DATABASE ${db_full_name} CHARACTER SET = utf8mb4 COLLATE = utf8mb4_bin;`, }); - const updateEncoding = yield (0, varDatabaseDbHandler_1.default)({ + const updateEncoding = await varDatabaseDbHandler({ queryString: `ALTER TABLE \`${db_full_name}\`.\`${table_slug}\` CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin`, }); } process.exit(); -})); +}); diff --git a/dist/package-shared/shell/functions/jsonToBase64.js b/dist/package-shared/shell/functions/jsonToBase64.js index c44ae99..3154405 100644 --- a/dist/package-shared/shell/functions/jsonToBase64.js +++ b/dist/package-shared/shell/functions/jsonToBase64.js @@ -1,10 +1,5 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const fs_1 = __importDefault(require("fs")); -const path_1 = __importDefault(require("path")); -const jsonFile = path_1.default.resolve(__dirname, "../../jsonData/userPriviledges.json"); -const base64File = Buffer.from(fs_1.default.readFileSync(jsonFile, "utf8")).toString("base64"); +import fs from "fs"; +import path from "path"; +const jsonFile = path.resolve(__dirname, "../../jsonData/userPriviledges.json"); +const base64File = Buffer.from(fs.readFileSync(jsonFile, "utf8")).toString("base64"); console.log(base64File); diff --git a/dist/package-shared/shell/grantFullPriviledges.js b/dist/package-shared/shell/grantFullPriviledges.js index 9a54b44..b5cb853 100644 --- a/dist/package-shared/shell/grantFullPriviledges.js +++ b/dist/package-shared/shell/grantFullPriviledges.js @@ -1,55 +1,34 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); require("dotenv").config({ path: "./../.env" }); -const serverError_1 = __importDefault(require("../functions/backend/serverError")); -const noDatabaseDbHandler_1 = __importDefault(require("./utils/noDatabaseDbHandler")); +import { AppNames } from "../dict/app-names"; +import serverError from "../functions/backend/serverError"; +import noDatabaseDbHandler from "./utils/noDatabaseDbHandler"; /** * # Create Database From Schema */ -function grantFullPrivileges(_a) { - return __awaiter(this, arguments, void 0, function* ({ userId }) { - /** - * Grab Schema - * - * @description Grab Schema - */ - try { - const defaultMariadbUserHost = process.env.DSQL_DB_HOST || "127.0.0.1"; - const allDatabases = yield (0, noDatabaseDbHandler_1.default)(`SHOW DATABASES`); - const datasquirelUserDatabases = allDatabases.filter((/** @type {any} */ database) => database.Database.match(/datasquirel_user_/)); - for (let i = 0; i < datasquirelUserDatabases.length; i++) { - const datasquirelUserDatabase = datasquirelUserDatabases[i]; - const { Database } = datasquirelUserDatabase; - // const grantDbPriviledges = await noDatabaseDbHandler( - // `GRANT ALL PRIVILEGES ON ${Database}.* TO '${process.env.DSQL_DB_FULL_ACCESS_USERNAME}'@'%' WITH GRANT OPTION` - // ); - // const grantRead = await noDatabaseDbHandler( - // `GRANT SELECT ON ${Database}.* TO '${process.env.DSQL_DB_READ_ONLY_USERNAME}'@'%'` - // ); - } - const flushPriviledged = yield (0, noDatabaseDbHandler_1.default)(`FLUSH PRIVILEGES`); +async function grantFullPrivileges({ userId }) { + /** + * Grab Schema + * + * @description Grab Schema + */ + try { + const defaultMariadbUserHost = process.env.DSQL_DB_HOST || "127.0.0.1"; + const allDatabases = await noDatabaseDbHandler(`SHOW DATABASES`); + const datasquirelUserDatabases = allDatabases.filter((database) => database.Database.match(new RegExp(`^${AppNames["DsqlDbPrefix"]}`))); + for (let i = 0; i < datasquirelUserDatabases.length; i++) { + const datasquirelUserDatabase = datasquirelUserDatabases[i]; + const { Database } = datasquirelUserDatabase; } - catch ( /** @type {any} */error) { - (0, serverError_1.default)({ - component: "shell/grantDbPriviledges/main-catch-error", - message: error.message, - user: { id: userId }, - }); - } - process.exit(); - }); + const flushPriviledged = await noDatabaseDbHandler(`FLUSH PRIVILEGES`); + } + catch (error) { + serverError({ + component: "shell/grantDbPriviledges/main-catch-error", + message: error.message, + user: { id: userId }, + }); + } + process.exit(); } const userArg = process.argv[process.argv.indexOf("--user")]; const externalUser = process.argv[process.argv.indexOf("--user") + 1]; diff --git a/dist/package-shared/shell/lessWatch.js b/dist/package-shared/shell/lessWatch.js index bfcd42a..ebda906 100644 --- a/dist/package-shared/shell/lessWatch.js +++ b/dist/package-shared/shell/lessWatch.js @@ -1,10 +1,5 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const fs_1 = __importDefault(require("fs")); -const child_process_1 = require("child_process"); +import fs from "fs"; +import { exec } from "child_process"; require("dotenv").config({ path: "./../.env" }); const sourceFile = process.argv.indexOf("--src") >= 0 ? process.argv[process.argv.indexOf("--src") + 1] @@ -21,7 +16,7 @@ if (!sourceFiles || !dstFiles) { for (let i = 0; i < sourceFiles.length; i++) { const srcFolder = sourceFiles[i]; const dstFile = dstFiles[i]; - fs_1.default.watch(srcFolder, { recursive: true }, (evtType, prev) => { + fs.watch(srcFolder, { recursive: true }, (evtType, prev) => { if ((prev === null || prev === void 0 ? void 0 : prev.match(/\(/)) || (prev === null || prev === void 0 ? void 0 : prev.match(/\.(j|t)s$/i))) { return; } @@ -38,13 +33,13 @@ for (let i = 0; i < sourceFiles.length; i++) { finalSrcPath = `${srcFolder}/${targetPathFull}`; finalDstPath = targetDstFilePath; } - (0, child_process_1.exec)(`lessc ${finalSrcPath} ${(finalDstPath === null || finalDstPath === void 0 ? void 0 : finalDstPath.match(/\.css$/)) + exec(`lessc ${finalSrcPath} ${(finalDstPath === null || finalDstPath === void 0 ? void 0 : finalDstPath.match(/\.css$/)) ? finalDstPath : finalDstPath.replace(/\/$/, "") + "/_main.css"}`, (error, stdout, stderr) => { if (error) { console.log("ERROR =>", error.message); if (!(evtType === null || evtType === void 0 ? void 0 : evtType.match(/change/i)) && (prev === null || prev === void 0 ? void 0 : prev.match(/\[/))) { - fs_1.default.unlinkSync(finalDstPath); + fs.unlinkSync(finalDstPath); } return; } diff --git a/dist/package-shared/shell/mariadb-users/handleGrants.js b/dist/package-shared/shell/mariadb-users/handleGrants.js index 94de0e7..ceb0da0 100644 --- a/dist/package-shared/shell/mariadb-users/handleGrants.js +++ b/dist/package-shared/shell/mariadb-users/handleGrants.js @@ -1,74 +1,57 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = handleGrants; -const noDatabaseDbHandler_1 = __importDefault(require("../utils/noDatabaseDbHandler")); +import noDatabaseDbHandler from "../utils/noDatabaseDbHandler"; /** * # Handle Grants for Users */ -function handleGrants(_a) { - return __awaiter(this, arguments, void 0, function* ({ username, host, grants, userId, }) { - var _b; - let success = false; - console.log(`Handling Grants for User =>`, username, host); - if (!username) { - console.log(`No username provided.`); - return success; - } - if (!host) { - console.log(`No Host provided. \x1b[35m\`--host\`\x1b[0m flag is required`); - return success; - } - if (!grants) { - console.log(`No grants Array provided.`); - return success; - } - try { - const existingUser = yield (0, noDatabaseDbHandler_1.default)(`SELECT * FROM mysql.user WHERE User = '${username}' AND Host = '${host}'`); - const isUserExisting = Boolean((_b = existingUser === null || existingUser === void 0 ? void 0 : existingUser[0]) === null || _b === void 0 ? void 0 : _b.User); - if (isUserExisting) { - const userGrants = yield (0, noDatabaseDbHandler_1.default)(`SHOW GRANTS FOR '${username}'@'${host}'`); - for (let i = 0; i < userGrants.length; i++) { - const grantObject = userGrants[i]; - const grant = grantObject === null || grantObject === void 0 ? void 0 : grantObject[Object.keys(grantObject)[0]]; - if (grant === null || grant === void 0 ? void 0 : grant.match(/GRANT .* PRIVILEGES ON .* TO/)) { - const revokeGrantText = grant - .replace(/GRANT/, "REVOKE") - .replace(/ TO /, " FROM "); - const revokePrivilege = yield (0, noDatabaseDbHandler_1.default)(revokeGrantText); - } - } - const grantsArray = grants; - for (let i = 0; i < grantsArray.length; i++) { - const grantObject = grantsArray[i]; - const { database, table, privileges } = grantObject; - const tableText = table == "*" ? "*" : `\`${table}\``; - const databaseText = database == "*" - ? `\`${process.env.DSQL_USER_DB_PREFIX}${userId}_%\`` - : `\`${database}\``; - const privilegesText = privileges.includes("ALL") - ? "ALL PRIVILEGES" - : privileges.join(", "); - const grantText = `GRANT ${privilegesText} ON ${databaseText}.${tableText} TO '${username}'@'${host}'`; - const grantPriviledge = yield (0, noDatabaseDbHandler_1.default)(grantText); +export default async function handleGrants({ username, host, grants, userId, }) { + var _a; + let success = false; + console.log(`Handling Grants for User =>`, username, host); + if (!username) { + console.log(`No username provided.`); + return success; + } + if (!host) { + console.log(`No Host provided. \x1b[35m\`--host\`\x1b[0m flag is required`); + return success; + } + if (!grants) { + console.log(`No grants Array provided.`); + return success; + } + try { + const existingUser = await noDatabaseDbHandler(`SELECT * FROM mysql.user WHERE User = '${username}' AND Host = '${host}'`); + const isUserExisting = Boolean((_a = existingUser === null || existingUser === void 0 ? void 0 : existingUser[0]) === null || _a === void 0 ? void 0 : _a.User); + if (isUserExisting) { + const userGrants = await noDatabaseDbHandler(`SHOW GRANTS FOR '${username}'@'${host}'`); + for (let i = 0; i < userGrants.length; i++) { + const grantObject = userGrants[i]; + const grant = grantObject === null || grantObject === void 0 ? void 0 : grantObject[Object.keys(grantObject)[0]]; + if (grant === null || grant === void 0 ? void 0 : grant.match(/GRANT .* PRIVILEGES ON .* TO/)) { + const revokeGrantText = grant + .replace(/GRANT/, "REVOKE") + .replace(/ TO /, " FROM "); + const revokePrivilege = await noDatabaseDbHandler(revokeGrantText); } } - success = true; + const grantsArray = grants; + for (let i = 0; i < grantsArray.length; i++) { + const grantObject = grantsArray[i]; + const { database, table, privileges } = grantObject; + const tableText = table == "*" ? "*" : `\`${table}\``; + const databaseText = database == "*" + ? `\`${process.env.DSQL_USER_DB_PREFIX}${userId}_%\`` + : `\`${database}\``; + const privilegesText = privileges.includes("ALL") + ? "ALL PRIVILEGES" + : privileges.join(", "); + const grantText = `GRANT ${privilegesText} ON ${databaseText}.${tableText} TO '${username}'@'${host}'`; + const grantPriviledge = await noDatabaseDbHandler(grantText); + } } - catch ( /** @type {any} */error) { - console.log(`Error in adding SQL user =>`, error.message); - } - return success; - }); + success = true; + } + catch ( /** @type {any} */error) { + console.log(`Error in adding SQL user =>`, error.message); + } + return success; } diff --git a/dist/package-shared/shell/mariadb-users/refreshUsersAndGrants.d.ts b/dist/package-shared/shell/mariadb-users/refreshUsersAndGrants.d.ts deleted file mode 100644 index 99d4596..0000000 --- a/dist/package-shared/shell/mariadb-users/refreshUsersAndGrants.d.ts +++ /dev/null @@ -1,11 +0,0 @@ -type Param = { - userId?: number | string; - mariadbUserHost?: string; - mariadbUsername?: string; - sqlUserID?: string | number; -}; -/** - * # Refresh Mariadb User Grants - */ -export default function refreshUsersAndGrants({ userId, mariadbUserHost, mariadbUsername, sqlUserID, }: Param): Promise; -export {}; diff --git a/dist/package-shared/shell/mariadb-users/refreshUsersAndGrants.js b/dist/package-shared/shell/mariadb-users/refreshUsersAndGrants.js deleted file mode 100644 index db6b01f..0000000 --- a/dist/package-shared/shell/mariadb-users/refreshUsersAndGrants.js +++ /dev/null @@ -1,110 +0,0 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = refreshUsersAndGrants; -const generate_password_1 = __importDefault(require("generate-password")); -const noDatabaseDbHandler_1 = __importDefault(require("../utils/noDatabaseDbHandler")); -const dbHandler_1 = __importDefault(require("../utils/dbHandler")); -const handleGrants_1 = __importDefault(require("./handleGrants")); -const encrypt_1 = __importDefault(require("../../functions/dsql/encrypt")); -const decrypt_1 = __importDefault(require("../../functions/dsql/decrypt")); -/** - * # Refresh Mariadb User Grants - */ -function refreshUsersAndGrants(_a) { - return __awaiter(this, arguments, void 0, function* ({ userId, mariadbUserHost, mariadbUsername, sqlUserID, }) { - var _b, _c, _d; - const mariadbUsers = (yield (0, dbHandler_1.default)({ - query: `SELECT * FROM mariadb_users`, - })); - if (!(mariadbUsers === null || mariadbUsers === void 0 ? void 0 : mariadbUsers[0])) { - return; - } - const isRootUser = userId - ? userId == Number(process.env.DSQL_SU_USER_ID) - : false; - const isWildcardHost = mariadbUserHost == "%"; - if (isWildcardHost && !isRootUser) { - return; - } - for (let i = 0; i < mariadbUsers.length; i++) { - const mariadbUser = mariadbUsers[i]; - if (!mariadbUser) - continue; - if (userId && mariadbUser.user_id != userId) - continue; - if (sqlUserID && mariadbUser.id != sqlUserID) - continue; - try { - const { username, password, host, user_id } = mariadbUser; - const existingUser = yield (0, noDatabaseDbHandler_1.default)(`SELECT * FROM mysql.user WHERE User = '${username}' AND Host = '${host}'`); - const isUserExisting = Boolean((_b = existingUser === null || existingUser === void 0 ? void 0 : existingUser[0]) === null || _b === void 0 ? void 0 : _b.User); - const isPrimary = ((_c = String(mariadbUser.primary)) === null || _c === void 0 ? void 0 : _c.match(/1/)) - ? true - : false; - const dsqlPassword = (mariadbUser === null || mariadbUser === void 0 ? void 0 : mariadbUser.password) - ? (0, decrypt_1.default)({ encryptedString: mariadbUser.password }) - : isUserExisting && password - ? (0, decrypt_1.default)({ encryptedString: password }) - : generate_password_1.default.generate({ - length: 16, - numbers: true, - symbols: true, - uppercase: true, - exclude: "*#.'`\"", - }); - const encryptedPassword = (mariadbUser === null || mariadbUser === void 0 ? void 0 : mariadbUser.password) - ? mariadbUser.password - : isUserExisting - ? password - : (0, encrypt_1.default)({ data: dsqlPassword }); - if (!isUserExisting) { - if (isWildcardHost) { - const _existingUsers = (yield (0, noDatabaseDbHandler_1.default)(`SELECT * FROM mysql.user WHERE user='${mariadbUsername}'`)); - for (let i = 0; i < _existingUsers.length; i++) { - const exUsr = _existingUsers[i]; - yield (0, noDatabaseDbHandler_1.default)(`DROP USER '${exUsr.User}'@'${exUsr.Host}'`); - } - } - const createNewUser = yield (0, noDatabaseDbHandler_1.default)(`CREATE USER IF NOT EXISTS '${mariadbUsername}'@'${mariadbUserHost}' IDENTIFIED BY '${dsqlPassword}'`); - } - if (isPrimary) { - const updateUser = yield (0, dbHandler_1.default)({ - query: `UPDATE users SET mariadb_user = ?, mariadb_host = ?, mariadb_pass = ? WHERE id = ?`, - values: [ - mariadbUsername, - mariadbUserHost, - encryptedPassword, - user_id, - ], - }); - } - const isGrantHandled = yield (0, handleGrants_1.default)({ - username: mariadbUser.username, - host: mariadbUser.host, - grants: mariadbUser.grants && typeof mariadbUser.grants == "string" - ? JSON.parse(mariadbUser.grants) - : [], - userId: String(user_id), - }); - if (!isGrantHandled) { - console.log(`Error in handling grants for user ${mariadbUser.username}@${mariadbUser.host}`); - } - } - catch (error) { - (_d = global.ERROR_CALLBACK) === null || _d === void 0 ? void 0 : _d.call(global, `Error Refreshing MariaDB Users and Grants`, error); - } - } - }); -} diff --git a/dist/package-shared/shell/mariadb-users/resetSQLPasswords.js b/dist/package-shared/shell/mariadb-users/resetSQLPasswords.js index 711250e..00aed47 100644 --- a/dist/package-shared/shell/mariadb-users/resetSQLPasswords.js +++ b/dist/package-shared/shell/mariadb-users/resetSQLPasswords.js @@ -1,71 +1,56 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); require("dotenv").config({ path: "../../.env" }); -const generate_password_1 = __importDefault(require("generate-password")); -const noDatabaseDbHandler_1 = __importDefault(require("../utils/noDatabaseDbHandler")); -const dbHandler_1 = __importDefault(require("../utils/dbHandler")); -const encrypt_1 = __importDefault(require("../../functions/dsql/encrypt")); +import generator from "generate-password"; +import noDatabaseDbHandler from "../utils/noDatabaseDbHandler"; +import dbHandler from "../../functions/backend/dbHandler"; +import encrypt from "../../functions/dsql/encrypt"; +import grabSQLKeyName from "../../utils/grab-sql-key-name"; /** * # Reset SQL Passwords */ -function resetSQLCredentialsPasswords() { - return __awaiter(this, void 0, void 0, function* () { - var _a; - const users = (yield (0, dbHandler_1.default)({ - query: `SELECT * FROM users`, - })); - if (!users) { - process.exit(); - } - for (let i = 0; i < users.length; i++) { - const user = users[i]; - if (!user) - continue; - try { - const maridbUsers = (yield (0, dbHandler_1.default)({ - query: `SELECT * FROM mysql.user WHERE User = 'dsql_user_${user.id}'`, - })); - for (let j = 0; j < maridbUsers.length; j++) { - const { User, Host } = maridbUsers[j]; - const password = generate_password_1.default.generate({ - length: 16, - numbers: true, - symbols: true, - uppercase: true, - exclude: "*#.'`\"", - }); - const encryptedPassword = (0, encrypt_1.default)({ - data: password, - encryptionKey: process.env.DSQL_ENCRYPTION_PASSWORD, - encryptionSalt: process.env.DSQL_ENCRYPTION_SALT, - }); - yield (0, noDatabaseDbHandler_1.default)(`SET PASSWORD FOR '${User}'@'${Host}' = PASSWORD('${password}')`); - if (user.mariadb_user == User && user.mariadb_host == Host) { - const updateUser = yield (0, dbHandler_1.default)({ - query: `UPDATE users SET mariadb_pass = ? WHERE id = ?`, - values: [encryptedPassword, user.id], - }); - } - console.log(`User ${user.id}: ${user.first_name} ${user.last_name} Password Updated successfully added.`); - } - } - catch (error) { - (_a = global.ERROR_CALLBACK) === null || _a === void 0 ? void 0 : _a.call(global, `Error Resetting MariaDB User Password`, error); - } - } +async function resetSQLCredentialsPasswords() { + var _a; + const users = (await dbHandler({ + query: `SELECT * FROM users`, + })); + if (!users) { process.exit(); - }); + } + for (let i = 0; i < users.length; i++) { + const user = users[i]; + if (!user) + continue; + try { + const maridbUsers = (await dbHandler({ + query: `SELECT * FROM mysql.user WHERE User = '${grabSQLKeyName({ type: "user", userId: user.id })}'`, + })); + for (let j = 0; j < maridbUsers.length; j++) { + const { User, Host } = maridbUsers[j]; + const password = generator.generate({ + length: 16, + numbers: true, + symbols: true, + uppercase: true, + exclude: "*#.'`\"", + }); + const encryptedPassword = encrypt({ + data: password, + encryptionKey: process.env.DSQL_ENCRYPTION_PASSWORD, + encryptionSalt: process.env.DSQL_ENCRYPTION_SALT, + }); + await noDatabaseDbHandler(`SET PASSWORD FOR '${User}'@'${Host}' = PASSWORD('${password}')`); + if (user.mariadb_user == User && user.mariadb_host == Host) { + const updateUser = await dbHandler({ + query: `UPDATE users SET mariadb_pass = ? WHERE id = ?`, + values: [encryptedPassword, user.id], + }); + } + console.log(`User ${user.id}: ${user.first_name} ${user.last_name} Password Updated successfully added.`); + } + } + catch (error) { + (_a = global.ERROR_CALLBACK) === null || _a === void 0 ? void 0 : _a.call(global, `Error Resetting MariaDB User Password`, error); + } + } + process.exit(); } resetSQLCredentialsPasswords(); diff --git a/dist/package-shared/shell/mariadb-users/users/create-user.js b/dist/package-shared/shell/mariadb-users/users/create-user.js index dc51bce..f8b982e 100644 --- a/dist/package-shared/shell/mariadb-users/users/create-user.js +++ b/dist/package-shared/shell/mariadb-users/users/create-user.js @@ -1,136 +1,121 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const path_1 = __importDefault(require("path")); +import path from "path"; require("dotenv").config({ path: "../../../.env" }); -const fs_1 = __importDefault(require("fs")); -const child_process_1 = require("child_process"); -const ejson_1 = __importDefault(require("../../../utils/ejson")); -const DB_HANDLER_1 = __importDefault(require("../../../utils/backend/global-db/DB_HANDLER")); -const addDbEntry_1 = __importDefault(require("../../../functions/backend/db/addDbEntry")); -const addMariadbUser_1 = __importDefault(require("../../../functions/backend/addMariadbUser")); -const updateDbEntry_1 = __importDefault(require("../../../functions/backend/db/updateDbEntry")); -const hashPassword_1 = __importDefault(require("../../../functions/dsql/hashPassword")); +import fs from "fs"; +import { execSync } from "child_process"; +import EJSON from "../../../utils/ejson"; +import DB_HANDLER from "../../../utils/backend/global-db/DB_HANDLER"; +import addDbEntry from "../../../functions/backend/db/addDbEntry"; +import addMariadbUser from "../../../functions/backend/addMariadbUser"; +import updateDbEntry from "../../../functions/backend/db/updateDbEntry"; +import hashPassword from "../../../functions/dsql/hashPassword"; +import grabDirNames from "../../../utils/backend/names/grab-dir-names"; const tmpDir = process.argv[process.argv.length - 1]; /** * # Create New User */ -function createUser() { - return __awaiter(this, void 0, void 0, function* () { - var _a; +async function createUser() { + var _a, _b; + /** + * Validate Form + * + * @description Check if request body is valid + */ + try { + const isTmpDir = Boolean(tmpDir === null || tmpDir === void 0 ? void 0 : tmpDir.match(/\.json$/)); + const targetPath = isTmpDir + ? path.resolve(process.cwd(), tmpDir) + : path.resolve(__dirname, "./new-user.json"); + const userObj = EJSON.parse(fs.readFileSync(targetPath, "utf-8")); + if (typeof userObj !== "object" || Array.isArray(userObj)) + throw new Error("User Object Invalid!"); + const ROOT_DIR = path.resolve(__dirname, "../../../"); /** * Validate Form * * @description Check if request body is valid */ - try { - const isTmpDir = Boolean(tmpDir === null || tmpDir === void 0 ? void 0 : tmpDir.match(/\.json$/)); - const targetPath = isTmpDir - ? path_1.default.resolve(process.cwd(), tmpDir) - : path_1.default.resolve(__dirname, "./new-user.json"); - const userObj = ejson_1.default.parse(fs_1.default.readFileSync(targetPath, "utf-8")); - if (typeof userObj !== "object" || Array.isArray(userObj)) - throw new Error("User Object Invalid!"); - const ROOT_DIR = path_1.default.resolve(__dirname, "../../../"); - /** - * Validate Form - * - * @description Check if request body is valid - */ - const first_name = userObj.first_name; - const last_name = userObj.last_name; - const email = userObj.email; - const password = userObj.password; - const username = userObj.username; - if (!(email === null || email === void 0 ? void 0 : email.match(/.*@.*\..*/))) - return false; - if (!(first_name === null || first_name === void 0 ? void 0 : first_name.match(/^[a-zA-Z]+$/)) || - !(last_name === null || last_name === void 0 ? void 0 : last_name.match(/^[a-zA-Z]+$/))) - return false; - if (password === null || password === void 0 ? void 0 : password.match(/ /)) - return false; - if (username === null || username === void 0 ? void 0 : username.match(/ /)) - return false; - let hashedPassword = (0, hashPassword_1.default)({ - encryptionKey: process.env.DSQL_ENCRYPTION_PASSWORD || "", - password: password, - }); - let existingUser = yield (0, DB_HANDLER_1.default)(`SELECT * FROM users WHERE email='${email}'`); - if (existingUser === null || existingUser === void 0 ? void 0 : existingUser[0]) { - console.log("User Exists"); - return false; - } - const newUser = yield (0, addDbEntry_1.default)({ - dbFullName: "datasquirel", - tableName: "users", - data: Object.assign(Object.assign({}, userObj), { password: hashedPassword }), - }); - if (!(newUser === null || newUser === void 0 ? void 0 : newUser.insertId)) - return false; - /** - * Add a Mariadb User for this User - */ - yield (0, addMariadbUser_1.default)({ userId: newUser.insertId }); - const STATIC_ROOT = process.env.DSQL_STATIC_SERVER_DIR; - if (!STATIC_ROOT) { - console.log("Static File ENV not Found!"); - throw new Error("No Static Path"); - } - /** - * Create new user folder and file - * - * @description Create new user folder and file - */ - let newUserSchemaFolderPath = `${process.env.DSQL_USER_DB_SCHEMA_PATH}/user-${newUser.insertId}`; - let newUserMediaFolderPath = path_1.default.join(STATIC_ROOT, `images/user-images/user-${newUser.insertId}`); - fs_1.default.mkdirSync(newUserSchemaFolderPath, { recursive: true }); - fs_1.default.mkdirSync(newUserMediaFolderPath, { recursive: true }); - fs_1.default.writeFileSync(`${newUserSchemaFolderPath}/main.json`, JSON.stringify([]), "utf8"); - const imageBasePath = path_1.default.join(STATIC_ROOT, `images/user-images/user-${newUser.insertId}`); - if (!fs_1.default.existsSync(imageBasePath)) { - fs_1.default.mkdirSync(imageBasePath, { recursive: true }); - } - let imagePath = path_1.default.join(STATIC_ROOT, `images/user-images/user-${newUser.insertId}/user-${newUser.insertId}-profile.jpg`); - let imageThumbnailPath = path_1.default.join(STATIC_ROOT, `images/user-images/user-${newUser.insertId}/user-${newUser.insertId}-profile-thumbnail.jpg`); - let prodImageUrl = imagePath.replace(STATIC_ROOT, process.env.DSQL_STATIC_HOST || ""); - let prodImageThumbnailUrl = imageThumbnailPath.replace(STATIC_ROOT, process.env.DSQL_STATIC_HOST || ""); - fs_1.default.copyFileSync(path_1.default.join(ROOT_DIR, "/public/images/user-preset.png"), imagePath); - fs_1.default.copyFileSync(path_1.default.join(ROOT_DIR, "/public/images/user-preset-thumbnail.png"), imageThumbnailPath); - (0, child_process_1.execSync)(`chmod 644 ${imagePath} ${imageThumbnailPath}`); - const updateImages = yield (0, updateDbEntry_1.default)({ - dbFullName: "datasquirel", - tableName: "users", - identifierColumnName: "id", - identifierValue: newUser.insertId, - data: { - image: prodImageUrl, - image_thumbnail: prodImageThumbnailUrl, - }, - }); - if (isTmpDir) { - try { - fs_1.default.unlinkSync(path_1.default.resolve(process.cwd(), tmpDir)); - } - catch (error) { } - } - return true; - } - catch (error) { - (_a = global.ERROR_CALLBACK) === null || _a === void 0 ? void 0 : _a.call(global, `Error Creating User`, error); + const first_name = userObj.first_name; + const last_name = userObj.last_name; + const email = userObj.email; + const password = userObj.password; + const username = userObj.username; + if (!(email === null || email === void 0 ? void 0 : email.match(/.*@.*\..*/))) + return false; + if (!(first_name === null || first_name === void 0 ? void 0 : first_name.match(/^[a-zA-Z]+$/)) || + !(last_name === null || last_name === void 0 ? void 0 : last_name.match(/^[a-zA-Z]+$/))) + return false; + if (password === null || password === void 0 ? void 0 : password.match(/ /)) + return false; + if (username === null || username === void 0 ? void 0 : username.match(/ /)) + return false; + let hashedPassword = hashPassword({ + encryptionKey: process.env.DSQL_ENCRYPTION_PASSWORD || "", + password: password, + }); + let existingUser = await DB_HANDLER(`SELECT * FROM users WHERE email='${email}'`); + if (existingUser === null || existingUser === void 0 ? void 0 : existingUser[0]) { + console.log("User Exists"); return false; } - }); + const newUser = await addDbEntry({ + dbFullName: "datasquirel", + tableName: "users", + data: Object.assign(Object.assign({}, userObj), { password: hashedPassword }), + }); + if (!((_a = newUser === null || newUser === void 0 ? void 0 : newUser.payload) === null || _a === void 0 ? void 0 : _a.insertId)) + return false; + /** + * Add a Mariadb User for this User + */ + await addMariadbUser({ userId: newUser.payload.insertId }); + const { STATIC_ROOT } = grabDirNames(); + if (!STATIC_ROOT) { + console.log("Static File ENV not Found!"); + throw new Error("No Static Path"); + } + /** + * Create new user folder and file + * + * @description Create new user folder and file + */ + let newUserSchemaFolderPath = `${process.env.DSQL_USER_DB_SCHEMA_PATH}/user-${newUser.payload.insertId}`; + let newUserMediaFolderPath = path.join(STATIC_ROOT, `images/user-images/user-${newUser.payload.insertId}`); + fs.mkdirSync(newUserSchemaFolderPath, { recursive: true }); + fs.mkdirSync(newUserMediaFolderPath, { recursive: true }); + fs.writeFileSync(`${newUserSchemaFolderPath}/main.json`, JSON.stringify([]), "utf8"); + const imageBasePath = path.join(STATIC_ROOT, `images/user-images/user-${newUser.payload.insertId}`); + if (!fs.existsSync(imageBasePath)) { + fs.mkdirSync(imageBasePath, { recursive: true }); + } + let imagePath = path.join(STATIC_ROOT, `images/user-images/user-${newUser.payload.insertId}/user-${newUser.payload.insertId}-profile.jpg`); + let imageThumbnailPath = path.join(STATIC_ROOT, `images/user-images/user-${newUser.payload.insertId}/user-${newUser.payload.insertId}-profile-thumbnail.jpg`); + let prodImageUrl = imagePath.replace(STATIC_ROOT, process.env.DSQL_STATIC_HOST || ""); + let prodImageThumbnailUrl = imageThumbnailPath.replace(STATIC_ROOT, process.env.DSQL_STATIC_HOST || ""); + fs.copyFileSync(path.join(ROOT_DIR, "/public/images/user-preset.png"), imagePath); + fs.copyFileSync(path.join(ROOT_DIR, "/public/images/user-preset-thumbnail.png"), imageThumbnailPath); + execSync(`chmod 644 ${imagePath} ${imageThumbnailPath}`); + const updateImages = await updateDbEntry({ + dbFullName: "datasquirel", + tableName: "users", + identifierColumnName: "id", + identifierValue: newUser.payload.insertId, + data: { + image: prodImageUrl, + image_thumbnail: prodImageThumbnailUrl, + }, + }); + if (isTmpDir) { + try { + fs.unlinkSync(path.resolve(process.cwd(), tmpDir)); + } + catch (error) { } + } + return true; + } + catch (error) { + (_b = global.ERROR_CALLBACK) === null || _b === void 0 ? void 0 : _b.call(global, `Error Creating User`, error); + return false; + } } createUser().then((res) => { if (res) { diff --git a/dist/package-shared/shell/mariadb-users/users/update-user.js b/dist/package-shared/shell/mariadb-users/users/update-user.js index 8996d0a..cb20ada 100644 --- a/dist/package-shared/shell/mariadb-users/users/update-user.js +++ b/dist/package-shared/shell/mariadb-users/users/update-user.js @@ -1,75 +1,59 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const path_1 = __importDefault(require("path")); +import path from "path"; require("dotenv").config({ path: "../../../.env" }); -const fs_1 = __importDefault(require("fs")); -const ejson_1 = __importDefault(require("../../../utils/ejson")); -const hashPassword_1 = __importDefault(require("../../../functions/dsql/hashPassword")); -const updateDbEntry_1 = __importDefault(require("../../../functions/backend/db/updateDbEntry")); +import fs from "fs"; +import EJSON from "../../../utils/ejson"; +import hashPassword from "../../../functions/dsql/hashPassword"; +import updateDbEntry from "../../../functions/backend/db/updateDbEntry"; const tmpDir = process.argv[process.argv.length - 1]; /** * # Create New User */ -function createUser() { - return __awaiter(this, void 0, void 0, function* () { - var _a; - /** - * Validate Form - * - * @description Check if request body is valid - */ - try { - const isTmpDir = Boolean(tmpDir === null || tmpDir === void 0 ? void 0 : tmpDir.match(/\.json$/)); - const targetPath = isTmpDir - ? path_1.default.resolve(process.cwd(), tmpDir) - : path_1.default.resolve(__dirname, "./update-user.json"); - const updateUserObj = ejson_1.default.parse(fs_1.default.readFileSync(targetPath, "utf-8")); - if (typeof updateUserObj !== "object" || Array.isArray(updateUserObj)) - throw new Error("Update User Object Invalid!"); - let hashedPassword = updateUserObj.password - ? (0, hashPassword_1.default)({ - encryptionKey: process.env.DSQL_ENCRYPTION_PASSWORD || "", - password: updateUserObj.password, - }) - : undefined; - let updatePayload = Object.assign({}, updateUserObj); - if (hashedPassword) { - updatePayload["password"] = hashedPassword; - } - const newUser = yield (0, updateDbEntry_1.default)({ - dbFullName: "datasquirel", - tableName: "users", - data: Object.assign(Object.assign({}, updatePayload), { id: undefined }), - identifierColumnName: "id", - identifierValue: updatePayload.id, - }); - if (!(newUser === null || newUser === void 0 ? void 0 : newUser.affectedRows)) - return false; - if (isTmpDir) { - try { - fs_1.default.unlinkSync(path_1.default.resolve(process.cwd(), tmpDir)); - } - catch (error) { } - } - return true; +async function createUser() { + var _a; + /** + * Validate Form + * + * @description Check if request body is valid + */ + try { + const isTmpDir = Boolean(tmpDir === null || tmpDir === void 0 ? void 0 : tmpDir.match(/\.json$/)); + const targetPath = isTmpDir + ? path.resolve(process.cwd(), tmpDir) + : path.resolve(__dirname, "./update-user.json"); + const updateUserObj = EJSON.parse(fs.readFileSync(targetPath, "utf-8")); + if (typeof updateUserObj !== "object" || Array.isArray(updateUserObj)) + throw new Error("Update User Object Invalid!"); + let hashedPassword = updateUserObj.password + ? hashPassword({ + encryptionKey: process.env.DSQL_ENCRYPTION_PASSWORD || "", + password: updateUserObj.password, + }) + : undefined; + let updatePayload = Object.assign({}, updateUserObj); + if (hashedPassword) { + updatePayload["password"] = hashedPassword; } - catch (error) { - (_a = global.ERROR_CALLBACK) === null || _a === void 0 ? void 0 : _a.call(global, `Error Updating User`, error); + const newUser = await updateDbEntry({ + dbFullName: "datasquirel", + tableName: "users", + data: Object.assign(Object.assign({}, updatePayload), { id: undefined }), + identifierColumnName: "id", + identifierValue: updatePayload.id, + }); + if (!(newUser === null || newUser === void 0 ? void 0 : newUser.affectedRows)) return false; + if (isTmpDir) { + try { + fs.unlinkSync(path.resolve(process.cwd(), tmpDir)); + } + catch (error) { } } - }); + return true; + } + catch (error) { + (_a = global.ERROR_CALLBACK) === null || _a === void 0 ? void 0 : _a.call(global, `Error Updating User`, error); + return false; + } } createUser().then((res) => { if (res) { diff --git a/dist/package-shared/shell/readImage.js b/dist/package-shared/shell/readImage.js index 0693d08..4e0f855 100644 --- a/dist/package-shared/shell/readImage.js +++ b/dist/package-shared/shell/readImage.js @@ -1,7 +1,2 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const fs_1 = __importDefault(require("fs")); -const imageBase64 = fs_1.default.readFileSync("./../public/images/unique-tokens-icon.png", "base64"); +import fs from "fs"; +const imageBase64 = fs.readFileSync("./../public/images/unique-tokens-icon.png", "base64"); diff --git a/dist/package-shared/shell/recoverMainJsonFromDb.js b/dist/package-shared/shell/recoverMainJsonFromDb.js index a88da32..dd8258e 100644 --- a/dist/package-shared/shell/recoverMainJsonFromDb.js +++ b/dist/package-shared/shell/recoverMainJsonFromDb.js @@ -1,21 +1,7 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const fs_1 = __importDefault(require("fs")); +import fs from "fs"; require("dotenv").config({ path: "./../.env" }); -const varDatabaseDbHandler_1 = __importDefault(require("../functions/backend/varDatabaseDbHandler")); -const DB_HANDLER_1 = __importDefault(require("../utils/backend/global-db/DB_HANDLER")); +import varDatabaseDbHandler from "../functions/backend/varDatabaseDbHandler"; +import DB_HANDLER from "../utils/backend/global-db/DB_HANDLER"; const userId = process.argv.indexOf("--userId") >= 0 ? process.argv[process.argv.indexOf("--userId") + 1] : null; @@ -24,63 +10,61 @@ const userId = process.argv.indexOf("--userId") >= 0 * * @description Grab Schema */ -function recoverMainJsonFromDb() { - return __awaiter(this, void 0, void 0, function* () { - if (!userId) { - console.log("No user Id provided"); - return; - } - const databases = yield (0, DB_HANDLER_1.default)(`SELECT * FROM user_databases WHERE user_id='${userId}'`); - const dbWrite = []; - for (let i = 0; i < databases.length; i++) { - const { id, db_name, db_slug, db_full_name, db_image, db_description } = databases[i]; - const dbObject = { - dbName: db_name, - dbSlug: db_slug, - dbFullName: db_full_name, - dbDescription: db_description, - dbImage: db_image, - tables: [], +async function recoverMainJsonFromDb() { + if (!userId) { + console.log("No user Id provided"); + return; + } + const databases = await DB_HANDLER(`SELECT * FROM user_databases WHERE user_id='${userId}'`); + const dbWrite = []; + for (let i = 0; i < databases.length; i++) { + const { id, db_name, db_slug, db_full_name, db_image, db_description } = databases[i]; + const dbObject = { + dbName: db_name, + dbSlug: db_slug, + dbFullName: db_full_name, + dbDescription: db_description, + dbImage: db_image, + tables: [], + }; + const tables = await DB_HANDLER(`SELECT * FROM user_database_tables WHERE user_id='${userId}' AND db_id='${id}'`); + for (let j = 0; j < tables.length; j++) { + const { table_name, table_slug, table_description } = tables[j]; + const tableObject = { + tableName: table_slug, + tableFullName: table_name, + fields: [], + indexes: [], }; - const tables = yield (0, DB_HANDLER_1.default)(`SELECT * FROM user_database_tables WHERE user_id='${userId}' AND db_id='${id}'`); - for (let j = 0; j < tables.length; j++) { - const { table_name, table_slug, table_description } = tables[j]; - const tableObject = { - tableName: table_slug, - tableFullName: table_name, - fields: [], - indexes: [], + const tableFields = await varDatabaseDbHandler({ + database: db_full_name, + queryString: `SHOW COLUMNS FROM ${db_full_name}.${table_slug}`, + }); + for (let k = 0; k < tableFields.length; k++) { + const { Field, Type, Null, Default, Key } = tableFields[k]; + const fieldObject = { + fieldName: Field, + dataType: Type.toUpperCase(), }; - const tableFields = yield (0, varDatabaseDbHandler_1.default)({ - database: db_full_name, - queryString: `SHOW COLUMNS FROM ${db_full_name}.${table_slug}`, - }); - for (let k = 0; k < tableFields.length; k++) { - const { Field, Type, Null, Default, Key } = tableFields[k]; - const fieldObject = { - fieldName: Field, - dataType: Type.toUpperCase(), - }; - if ((Default === null || Default === void 0 ? void 0 : Default.match(/./)) && !(Default === null || Default === void 0 ? void 0 : Default.match(/timestamp/i))) - fieldObject["defaultValue"] = Default; - if (Key === null || Key === void 0 ? void 0 : Key.match(/pri/i)) { - fieldObject["primaryKey"] = true; - fieldObject["autoIncrement"] = true; - } - if (Default === null || Default === void 0 ? void 0 : Default.match(/timestamp/i)) - fieldObject["defaultValueLiteral"] = Default; - if (Null === null || Null === void 0 ? void 0 : Null.match(/yes/i)) - fieldObject["nullValue"] = true; - if (Null === null || Null === void 0 ? void 0 : Null.match(/no/i)) - fieldObject["notNullValue"] = true; - tableObject.fields.push(fieldObject); + if ((Default === null || Default === void 0 ? void 0 : Default.match(/./)) && !(Default === null || Default === void 0 ? void 0 : Default.match(/timestamp/i))) + fieldObject["defaultValue"] = Default; + if (Key === null || Key === void 0 ? void 0 : Key.match(/pri/i)) { + fieldObject["primaryKey"] = true; + fieldObject["autoIncrement"] = true; } - dbObject.tables.push(tableObject); + if (Default === null || Default === void 0 ? void 0 : Default.match(/timestamp/i)) + fieldObject["defaultValueLiteral"] = Default; + if (Null === null || Null === void 0 ? void 0 : Null.match(/yes/i)) + fieldObject["nullValue"] = true; + if (Null === null || Null === void 0 ? void 0 : Null.match(/no/i)) + fieldObject["notNullValue"] = true; + tableObject.fields.push(fieldObject); } - dbWrite.push(dbObject); + dbObject.tables.push(tableObject); } - fs_1.default.writeFileSync(`${String(process.env.DSQL_USER_DB_SCHEMA_PATH)}/user-${userId}/main.json`, JSON.stringify(dbWrite, null, 4), "utf-8"); - process.exit(); - }); + dbWrite.push(dbObject); + } + fs.writeFileSync(`${String(process.env.DSQL_USER_DB_SCHEMA_PATH)}/user-${userId}/main.json`, JSON.stringify(dbWrite, null, 4), "utf-8"); + process.exit(); } recoverMainJsonFromDb(); diff --git a/dist/package-shared/shell/resetSQLCredentials.js b/dist/package-shared/shell/resetSQLCredentials.js deleted file mode 100644 index 5946530..0000000 --- a/dist/package-shared/shell/resetSQLCredentials.js +++ /dev/null @@ -1,74 +0,0 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -require("dotenv").config({ path: "./../.env" }); -const generate_password_1 = __importDefault(require("generate-password")); -const noDatabaseDbHandler_1 = __importDefault(require("./utils/noDatabaseDbHandler")); -const dbHandler_1 = __importDefault(require("./utils/dbHandler")); -const encrypt_1 = __importDefault(require("../functions/dsql/encrypt")); -/** - * Create database from Schema Function - * ============================================================================== - * @param {object} params - Single object params - * @param {number|string|null} params.userId - User ID or null - */ -function resetSQLCredentials() { - return __awaiter(this, void 0, void 0, function* () { - var _a; - const users = (yield (0, dbHandler_1.default)({ - query: `SELECT * FROM users`, - })); - if (!users) { - process.exit(); - } - for (let i = 0; i < users.length; i++) { - const user = users[i]; - if (!user) - continue; - const defaultMariadbUserHost = process.env.DSQL_DB_HOST || "127.0.0.1"; - try { - const username = `dsql_user_${user.id}`; - const password = generate_password_1.default.generate({ - length: 16, - numbers: true, - symbols: true, - uppercase: true, - exclude: "*#.'`\"", - }); - const encryptedPassword = (0, encrypt_1.default)({ data: password }); - yield (0, noDatabaseDbHandler_1.default)(`DROP USER IF EXISTS '${username}'@'%'`); - yield (0, noDatabaseDbHandler_1.default)(`DROP USER IF EXISTS '${username}'@'${defaultMariadbUserHost}'`); - yield (0, noDatabaseDbHandler_1.default)(`CREATE USER IF NOT EXISTS '${username}'@'${defaultMariadbUserHost}' IDENTIFIED BY '${password}'`); - yield (0, noDatabaseDbHandler_1.default)(`GRANT ALL PRIVILEGES ON \`datasquirel_user_${user.id}_%\`.* TO '${username}'@'${defaultMariadbUserHost}'`); - yield (0, noDatabaseDbHandler_1.default)(`FLUSH PRIVILEGES`); - const updateUser = yield (0, dbHandler_1.default)({ - query: `UPDATE users SET mariadb_user = ?, mariadb_host = ?, mariadb_pass = ? WHERE id = ?`, - values: [ - username, - defaultMariadbUserHost, - encryptedPassword, - user.id, - ], - }); - console.log(`User ${user.id}: ${user.first_name} ${user.last_name} SQL credentials successfully added.`); - } - catch (error) { - (_a = global.ERROR_CALLBACK) === null || _a === void 0 ? void 0 : _a.call(global, `Error Resetting SQL credentials`, error); - console.log(`Error in adding SQL user =>`, error.message); - } - } - process.exit(); - }); -} -resetSQLCredentials(); diff --git a/dist/package-shared/shell/resetSQLCredentialsPasswords.js b/dist/package-shared/shell/resetSQLCredentialsPasswords.js index cf983cc..0fdac4e 100644 --- a/dist/package-shared/shell/resetSQLCredentialsPasswords.js +++ b/dist/package-shared/shell/resetSQLCredentialsPasswords.js @@ -1,61 +1,46 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); require("dotenv").config({ path: "./../.env" }); -const generate_password_1 = __importDefault(require("generate-password")); -const noDatabaseDbHandler_1 = __importDefault(require("./utils/noDatabaseDbHandler")); -const dbHandler_1 = __importDefault(require("./utils/dbHandler")); -const encrypt_1 = __importDefault(require("../functions/dsql/encrypt")); +import generator from "generate-password"; +import noDatabaseDbHandler from "./utils/noDatabaseDbHandler"; +import dbHandler from "../functions/backend/dbHandler"; +import encrypt from "../functions/dsql/encrypt"; +import grabSQLKeyName from "../utils/grab-sql-key-name"; /** * # Create database from Schema Function */ -function resetSQLCredentialsPasswords() { - return __awaiter(this, void 0, void 0, function* () { - var _a; - const users = (yield (0, dbHandler_1.default)({ - query: `SELECT * FROM users`, - })); - if (!users) { - process.exit(); - } - for (let i = 0; i < users.length; i++) { - const user = users[i]; - if (!user) - continue; - const defaultMariadbUserHost = process.env.DSQL_DB_HOST || "127.0.0.1"; - try { - const username = `dsql_user_${user.id}`; - const password = generate_password_1.default.generate({ - length: 16, - numbers: true, - symbols: true, - uppercase: true, - exclude: "*#.'`\"", - }); - const encryptedPassword = (0, encrypt_1.default)({ data: password }); - yield (0, noDatabaseDbHandler_1.default)(`SET PASSWORD FOR '${username}'@'${defaultMariadbUserHost}' = PASSWORD('${password}')`); - const updateUser = yield (0, dbHandler_1.default)({ - query: `UPDATE users SET mariadb_pass = ? WHERE id = ?`, - values: [encryptedPassword, user.id], - }); - console.log(`User ${user.id}: ${user.first_name} ${user.last_name} Password Updated successfully added.`); - } - catch (error) { - (_a = global.ERROR_CALLBACK) === null || _a === void 0 ? void 0 : _a.call(global, `Error Updating MariaDB User`, error); - } - } +async function resetSQLCredentialsPasswords() { + var _a; + const users = (await dbHandler({ + query: `SELECT * FROM users`, + })); + if (!users) { process.exit(); - }); + } + for (let i = 0; i < users.length; i++) { + const user = users[i]; + if (!user) + continue; + const defaultMariadbUserHost = process.env.DSQL_DB_HOST || "127.0.0.1"; + try { + const username = grabSQLKeyName({ type: "user", userId: user.id }); + const password = generator.generate({ + length: 16, + numbers: true, + symbols: true, + uppercase: true, + exclude: "*#.'`\"", + }); + const encryptedPassword = encrypt({ data: password }); + await noDatabaseDbHandler(`SET PASSWORD FOR '${username}'@'${defaultMariadbUserHost}' = PASSWORD('${password}')`); + const updateUser = await dbHandler({ + query: `UPDATE users SET mariadb_pass = ? WHERE id = ?`, + values: [encryptedPassword, user.id], + }); + console.log(`User ${user.id}: ${user.first_name} ${user.last_name} Password Updated successfully added.`); + } + catch (error) { + (_a = global.ERROR_CALLBACK) === null || _a === void 0 ? void 0 : _a.call(global, `Error Updating MariaDB User`, error); + } + } + process.exit(); } resetSQLCredentialsPasswords(); diff --git a/dist/package-shared/shell/scripts/convert-js-to-ts.js b/dist/package-shared/shell/scripts/convert-js-to-ts.js index 9137347..0f32034 100644 --- a/dist/package-shared/shell/scripts/convert-js-to-ts.js +++ b/dist/package-shared/shell/scripts/convert-js-to-ts.js @@ -1,19 +1,14 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const path_1 = __importDefault(require("path")); -const fs_1 = __importDefault(require("fs")); -const rootDir = path_1.default.resolve(__dirname, "../../../"); +import path from "path"; +import fs from "fs"; +const rootDir = path.resolve(__dirname, "../../../"); const ignorePattern = /\/\.git\/|\/\.next\/|\/\.dist\/|node_modules|\/\.local_dist\/|\/\.tmp\/|\/types\/|\.config\.js|\/public\//; function transformJsToTs(dir) { var _a; - const dirContent = fs_1.default.readdirSync(dir); + const dirContent = fs.readdirSync(dir); for (let i = 0; i < dirContent.length; i++) { const fileFolder = dirContent[i]; - const fullFileFolderPath = path_1.default.join(dir, fileFolder); - const stat = fs_1.default.statSync(fullFileFolderPath); + const fullFileFolderPath = path.join(dir, fileFolder); + const stat = fs.statSync(fullFileFolderPath); if (stat.isDirectory()) { transformJsToTs(fullFileFolderPath); continue; @@ -31,8 +26,8 @@ function transformJsToTs(dir) { console.log(newFilePath); console.log("\n/////////////////////////////////////////"); console.log("/////////////////////////////////////////\n"); - fs_1.default.copyFileSync(fullFileFolderPath, newFilePath); - fs_1.default.unlinkSync(fullFileFolderPath); + fs.copyFileSync(fullFileFolderPath, newFilePath); + fs.unlinkSync(fullFileFolderPath); } } } diff --git a/dist/package-shared/shell/setSQLCredentials.js b/dist/package-shared/shell/setSQLCredentials.js index 7db358c..2321757 100644 --- a/dist/package-shared/shell/setSQLCredentials.js +++ b/dist/package-shared/shell/setSQLCredentials.js @@ -1,22 +1,9 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); require("dotenv").config({ path: "./../.env" }); -const generate_password_1 = __importDefault(require("generate-password")); -const noDatabaseDbHandler_1 = __importDefault(require("./utils/noDatabaseDbHandler")); -const dbHandler_1 = __importDefault(require("./utils/dbHandler")); -const encrypt_1 = __importDefault(require("../functions/dsql/encrypt")); +import generator from "generate-password"; +import noDatabaseDbHandler from "./utils/noDatabaseDbHandler"; +import dbHandler from "../functions/backend/dbHandler"; +import encrypt from "../functions/dsql/encrypt"; +import grabSQLKeyName from "../utils/grab-sql-key-name"; /** ****************************************************************************** */ /** ****************************************************************************** */ /** ****************************************************************************** */ @@ -26,45 +13,43 @@ const encrypt_1 = __importDefault(require("../functions/dsql/encrypt")); /** * # Set SQL Credentials */ -function setSQLCredentials() { - return __awaiter(this, void 0, void 0, function* () { - var _a; - const users = (yield (0, dbHandler_1.default)({ - query: `SELECT * FROM users`, - })); - if (!users) { - process.exit(); +async function setSQLCredentials() { + var _a; + const users = (await dbHandler({ + query: `SELECT * FROM users`, + })); + if (!users) { + process.exit(); + } + for (let i = 0; i < users.length; i++) { + const user = users[i]; + if (!user) + continue; + if (user.mariadb_user && user.mariadb_pass) { + continue; } - for (let i = 0; i < users.length; i++) { - const user = users[i]; - if (!user) - continue; - if (user.mariadb_user && user.mariadb_pass) { - continue; - } - try { - const username = `dsql_user_${user.id}`; - const password = generate_password_1.default.generate({ - length: 16, - numbers: true, - symbols: true, - uppercase: true, - exclude: "*#.'`\"", - }); - const encryptedPassword = (0, encrypt_1.default)({ data: password }); - yield (0, noDatabaseDbHandler_1.default)(`CREATE USER IF NOT EXISTS '${username}'@'127.0.0.1' IDENTIFIED BY '${password}'`); - yield (0, noDatabaseDbHandler_1.default)(`GRANT ALL PRIVILEGES ON \`datasquirel\\_user\\_${user.id}\\_%\`.* TO '${username}'@'127.0.0.1'`); - yield (0, noDatabaseDbHandler_1.default)(`FLUSH PRIVILEGES`); - const updateUser = yield (0, dbHandler_1.default)({ - query: `UPDATE users SET mariadb_user = ?, mariadb_host = '127.0.0.1' mariadb_pass = ? WHERE id = ?`, - values: [username, encryptedPassword, user.id], - }); - console.log(`User ${user.id}: ${user.first_name} ${user.last_name} SQL credentials successfully added.`); - } - catch (error) { - (_a = global.ERROR_CALLBACK) === null || _a === void 0 ? void 0 : _a.call(global, `Error Setting SQL credentials`, error); - } + try { + const username = grabSQLKeyName({ type: "user", userId: user.id }); + const password = generator.generate({ + length: 16, + numbers: true, + symbols: true, + uppercase: true, + exclude: "*#.'`\"", + }); + const encryptedPassword = encrypt({ data: password }); + await noDatabaseDbHandler(`CREATE USER IF NOT EXISTS '${username}'@'127.0.0.1' IDENTIFIED BY '${password}'`); + await noDatabaseDbHandler(`GRANT ALL PRIVILEGES ON \`datasquirel\\_user\\_${user.id}\\_%\`.* TO '${username}'@'127.0.0.1'`); + await noDatabaseDbHandler(`FLUSH PRIVILEGES`); + const updateUser = await dbHandler({ + query: `UPDATE users SET mariadb_user = ?, mariadb_host = '127.0.0.1' mariadb_pass = ? WHERE id = ?`, + values: [username, encryptedPassword, user.id], + }); + console.log(`User ${user.id}: ${user.first_name} ${user.last_name} SQL credentials successfully added.`); } - }); + catch (error) { + (_a = global.ERROR_CALLBACK) === null || _a === void 0 ? void 0 : _a.call(global, `Error Setting SQL credentials`, error); + } + } } setSQLCredentials(); diff --git a/dist/package-shared/shell/tailwindWatch.js b/dist/package-shared/shell/tailwindWatch.js index 468871d..3f843bb 100644 --- a/dist/package-shared/shell/tailwindWatch.js +++ b/dist/package-shared/shell/tailwindWatch.js @@ -1,10 +1,5 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const fs_1 = __importDefault(require("fs")); -const child_process_1 = require("child_process"); +import fs from "fs"; +import { exec } from "child_process"; require("dotenv").config({ path: "./../.env" }); const sourceFile = process.argv.indexOf("--src") >= 0 ? process.argv[process.argv.indexOf("--src") + 1] @@ -19,8 +14,8 @@ const destinationFile = process.argv.indexOf("--dst") >= 0 /** ****************************************************************************** */ /** ****************************************************************************** */ console.log("Running Tailwind CSS compiler ..."); -fs_1.default.watch("./../", (curr, prev) => { - (0, child_process_1.exec)(`bunx tailwindcss -i ./tailwind/main.css -o ./styles/tailwind.css`, (error, stdout, stderr) => { +fs.watch("./../", (curr, prev) => { + exec(`bunx tailwindcss -i ./tailwind/main.css -o ./styles/tailwind.css`, (error, stdout, stderr) => { if (error) { console.log("ERROR =>", error.message); return; diff --git a/dist/package-shared/shell/test-external-server.js b/dist/package-shared/shell/test-external-server.js index b234c94..a3cf3de 100644 --- a/dist/package-shared/shell/test-external-server.js +++ b/dist/package-shared/shell/test-external-server.js @@ -1,19 +1,5 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); require("dotenv").config({ path: "./.env" }); -const grab_dsql_connection_1 = __importDefault(require("../utils/grab-dsql-connection")); +import grabDSQLConnection from "../utils/grab-dsql-connection"; /** * # Main DB Handler Function * @async @@ -25,16 +11,16 @@ const grab_dsql_connection_1 = __importDefault(require("../utils/grab-dsql-conne * * @returns {Promise} */ -(() => __awaiter(void 0, void 0, void 0, function* () { +(async () => { var _a; - const CONNECTION = (0, grab_dsql_connection_1.default)({ noDb: true }); + const CONNECTION = grabDSQLConnection({ noDb: true }); /** * Switch Database * * @description If a database is provided, switch to it */ try { - const result = yield CONNECTION.query("SHOW DATABASES"); + const result = await CONNECTION.query("SHOW DATABASES"); const parsedResults = JSON.parse(JSON.stringify(result)); console.log("parsedResults =>", parsedResults); } @@ -45,4 +31,4 @@ const grab_dsql_connection_1 = __importDefault(require("../utils/grab-dsql-conne CONNECTION === null || CONNECTION === void 0 ? void 0 : CONNECTION.end(); process.exit(); } -}))(); +})(); diff --git a/dist/package-shared/shell/testSQLEscape.d.ts b/dist/package-shared/shell/testSQLEscape.d.ts index 4d08c82..ebef584 100644 --- a/dist/package-shared/shell/testSQLEscape.d.ts +++ b/dist/package-shared/shell/testSQLEscape.d.ts @@ -1,9 +1,3 @@ -/** ****************************************************************************** */ -/** ****************************************************************************** */ -/** ****************************************************************************** */ -/** ****************************************************************************** */ -/** ****************************************************************************** */ -/** ****************************************************************************** */ /** * # Test SQL Escape */ diff --git a/dist/package-shared/shell/testSQLEscape.js b/dist/package-shared/shell/testSQLEscape.js index f0b7eec..202ce56 100644 --- a/dist/package-shared/shell/testSQLEscape.js +++ b/dist/package-shared/shell/testSQLEscape.js @@ -1,80 +1,54 @@ -"use strict"; -// @ts-check -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = testSQLEscape; -//////////////////////////////////////// -//////////////////////////////////////// -//////////////////////////////////////// require("dotenv").config({ path: "./../.env" }); -const generate_password_1 = __importDefault(require("generate-password")); -const noDatabaseDbHandler_1 = __importDefault(require("./utils/noDatabaseDbHandler")); -const dbHandler_1 = __importDefault(require("./utils/dbHandler")); -const encrypt_1 = __importDefault(require("../functions/dsql/encrypt")); -/** ****************************************************************************** */ -/** ****************************************************************************** */ -/** ****************************************************************************** */ -/** ****************************************************************************** */ -/** ****************************************************************************** */ -/** ****************************************************************************** */ +import generator from "generate-password"; +import noDatabaseDbHandler from "./utils/noDatabaseDbHandler"; +import dbHandler from "../functions/backend/dbHandler"; +import encrypt from "../functions/dsql/encrypt"; +import grabSQLKeyName from "../utils/grab-sql-key-name"; /** * # Test SQL Escape */ -function testSQLEscape() { - return __awaiter(this, void 0, void 0, function* () { - var _a; - const users = (yield (0, dbHandler_1.default)({ - query: `SELECT * FROM users`, - })); - if (!users) { - process.exit(); - } - for (let i = 0; i < users.length; i++) { - const user = users[i]; - if (!user) - continue; - const defaultMariadbUserHost = process.env.DSQL_DB_HOST || "127.0.0.1"; - try { - const username = `dsql_user_${user.id}`; - const password = generate_password_1.default.generate({ - length: 16, - numbers: true, - symbols: true, - uppercase: true, - exclude: "*#.'`\"", - }); - const encryptedPassword = (0, encrypt_1.default)({ data: password }); - yield (0, noDatabaseDbHandler_1.default)(`DROP USER '${username}'@'${defaultMariadbUserHost}'`); - yield (0, noDatabaseDbHandler_1.default)(`CREATE USER IF NOT EXISTS '${username}'@'${defaultMariadbUserHost}' IDENTIFIED BY '${password}'`); - yield (0, noDatabaseDbHandler_1.default)(`GRANT ALL PRIVILEGES ON \`datasquirel\\_user\\_${user.id}\\_%\`.* TO '${username}'@'${defaultMariadbUserHost}'`); - yield (0, noDatabaseDbHandler_1.default)(`FLUSH PRIVILEGES`); - const updateUser = yield (0, dbHandler_1.default)({ - query: `UPDATE users SET mariadb_user = ?, mariadb_host = ? mariadb_pass = ? WHERE id = ?`, - values: [ - username, - defaultMariadbUserHost, - encryptedPassword, - user.id, - ], - }); - console.log(`User ${user.id}: ${user.first_name} ${user.last_name} SQL credentials successfully added.`); - } - catch (error) { - (_a = global.ERROR_CALLBACK) === null || _a === void 0 ? void 0 : _a.call(global, `Error Testing SQL Escape`, error); - } - } +export default async function testSQLEscape() { + var _a; + const users = (await dbHandler({ + query: `SELECT * FROM users`, + })); + if (!users) { process.exit(); - }); + } + for (let i = 0; i < users.length; i++) { + const user = users[i]; + if (!user) + continue; + const defaultMariadbUserHost = process.env.DSQL_DB_HOST || "127.0.0.1"; + try { + const username = grabSQLKeyName({ type: "user", userId: user.id }); + const password = generator.generate({ + length: 16, + numbers: true, + symbols: true, + uppercase: true, + exclude: "*#.'`\"", + }); + const encryptedPassword = encrypt({ data: password }); + await noDatabaseDbHandler(`DROP USER '${username}'@'${defaultMariadbUserHost}'`); + await noDatabaseDbHandler(`CREATE USER IF NOT EXISTS '${username}'@'${defaultMariadbUserHost}' IDENTIFIED BY '${password}'`); + await noDatabaseDbHandler(`GRANT ALL PRIVILEGES ON \`datasquirel\\_user\\_${user.id}\\_%\`.* TO '${username}'@'${defaultMariadbUserHost}'`); + await noDatabaseDbHandler(`FLUSH PRIVILEGES`); + const updateUser = await dbHandler({ + query: `UPDATE users SET mariadb_user = ?, mariadb_host = ? mariadb_pass = ? WHERE id = ?`, + values: [ + username, + defaultMariadbUserHost, + encryptedPassword, + user.id, + ], + }); + console.log(`User ${user.id}: ${user.first_name} ${user.last_name} SQL credentials successfully added.`); + } + catch (error) { + (_a = global.ERROR_CALLBACK) === null || _a === void 0 ? void 0 : _a.call(global, `Error Testing SQL Escape`, error); + } + } + process.exit(); } testSQLEscape(); diff --git a/dist/package-shared/shell/updateChildrenTablesOnDb.js b/dist/package-shared/shell/updateChildrenTablesOnDb.js index 8d5512b..fc9992a 100644 --- a/dist/package-shared/shell/updateChildrenTablesOnDb.js +++ b/dist/package-shared/shell/updateChildrenTablesOnDb.js @@ -1,57 +1,41 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const DB_HANDLER_1 = __importDefault(require("../utils/backend/global-db/DB_HANDLER")); -const fs_1 = __importDefault(require("fs")); +import DB_HANDLER from "../utils/backend/global-db/DB_HANDLER"; +import fs from "fs"; require("dotenv").config({ path: "./../.env" }); -function updateChildrenTablesOnDb() { - return __awaiter(this, void 0, void 0, function* () { - var _a; - /** - * Grab Schema - * - * @description Grab Schema - */ - try { - const rootDir = String(process.env.DSQL_USER_DB_SCHEMA_PATH); - const userFolders = fs_1.default.readdirSync(rootDir); - for (let i = 0; i < userFolders.length; i++) { - const folder = userFolders[i]; - const userId = folder.replace(/user-/, ""); - const databases = JSON.parse(fs_1.default.readFileSync(`${rootDir}/${folder}/main.json`, "utf-8")); - for (let j = 0; j < databases.length; j++) { - const db = databases[j]; - const dbTables = db.tables; - for (let k = 0; k < dbTables.length; k++) { - const table = dbTables[k]; - if (table === null || table === void 0 ? void 0 : table.childTable) { - const originTableName = table.childTableName; - const originDbName = table.childTableDbFullName; - const WHERE_CLAUSE = `WHERE user_id='${userId}' AND db_slug='${db.dbSlug}' AND table_slug='${table.tableName}'`; - const existingTableInDb = yield (0, DB_HANDLER_1.default)(`SELECT * FROM user_database_tables ${WHERE_CLAUSE}`); - if (existingTableInDb && existingTableInDb[0]) { - const updateChildrenTablesInfo = yield (0, DB_HANDLER_1.default)(`UPDATE user_database_tables SET child_table='1',child_table_parent_database='${originDbName}',child_table_parent_table='${originTableName}' WHERE id='${existingTableInDb[0].id}'`); - } +async function updateChildrenTablesOnDb() { + var _a; + /** + * Grab Schema + * + * @description Grab Schema + */ + try { + const rootDir = String(process.env.DSQL_USER_DB_SCHEMA_PATH); + const userFolders = fs.readdirSync(rootDir); + for (let i = 0; i < userFolders.length; i++) { + const folder = userFolders[i]; + const userId = folder.replace(/user-/, ""); + const databases = JSON.parse(fs.readFileSync(`${rootDir}/${folder}/main.json`, "utf-8")); + for (let j = 0; j < databases.length; j++) { + const db = databases[j]; + const dbTables = db.tables; + for (let k = 0; k < dbTables.length; k++) { + const table = dbTables[k]; + if (table === null || table === void 0 ? void 0 : table.childTable) { + const originTableName = table.childTableName; + const originDbName = table.childTableDbFullName; + const WHERE_CLAUSE = `WHERE user_id='${userId}' AND db_slug='${db.dbSlug}' AND table_slug='${table.tableName}'`; + const existingTableInDb = await DB_HANDLER(`SELECT * FROM user_database_tables ${WHERE_CLAUSE}`); + if (existingTableInDb && existingTableInDb[0]) { + const updateChildrenTablesInfo = await DB_HANDLER(`UPDATE user_database_tables SET child_table='1',child_table_parent_database='${originDbName}',child_table_parent_table='${originTableName}' WHERE id='${existingTableInDb[0].id}'`); } } } } } - catch (error) { - (_a = global.ERROR_CALLBACK) === null || _a === void 0 ? void 0 : _a.call(global, `Error Updating Children Tables on DB`, error); - } - process.exit(); - }); + } + catch (error) { + (_a = global.ERROR_CALLBACK) === null || _a === void 0 ? void 0 : _a.call(global, `Error Updating Children Tables on DB`, error); + } + process.exit(); } updateChildrenTablesOnDb(); diff --git a/dist/package-shared/shell/updateDateTimestamps.js b/dist/package-shared/shell/updateDateTimestamps.js index 3a1d1e7..53ed2e3 100644 --- a/dist/package-shared/shell/updateDateTimestamps.js +++ b/dist/package-shared/shell/updateDateTimestamps.js @@ -1,41 +1,27 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); require("dotenv").config({ path: "./../.env" }); -const varDatabaseDbHandler_1 = __importDefault(require("../functions/backend/varDatabaseDbHandler")); +import varDatabaseDbHandler from "../functions/backend/varDatabaseDbHandler"; /** * Grab Schema * * @description Grab Schema */ -(0, varDatabaseDbHandler_1.default)({ +varDatabaseDbHandler({ queryString: `SELECT user_database_tables.*,user_databases.db_full_name FROM user_database_tables JOIN user_databases ON user_database_tables.db_id=user_databases.id`, database: "datasquirel", -}).then((tables) => __awaiter(void 0, void 0, void 0, function* () { +}).then(async (tables) => { for (let i = 0; i < tables.length; i++) { const table = tables[i]; const { id, user_id, db_id, db_full_name, table_name, table_slug, table_description, } = table; - const tableInfo = yield (0, varDatabaseDbHandler_1.default)({ + const tableInfo = await varDatabaseDbHandler({ queryString: `SELECT COLUMN_NAME FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_SCHEMA='${db_full_name}' AND TABLE_NAME='${table_slug}'`, }); - const updateCreationDateTimestamp = yield (0, varDatabaseDbHandler_1.default)({ + const updateCreationDateTimestamp = await varDatabaseDbHandler({ queryString: `ALTER TABLE \`${db_full_name}\`.\`${table_slug}\` MODIFY COLUMN date_created_timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP`, }); - const updateDateTimestamp = yield (0, varDatabaseDbHandler_1.default)({ + const updateDateTimestamp = await varDatabaseDbHandler({ queryString: `ALTER TABLE \`${db_full_name}\`.\`${table_slug}\` MODIFY COLUMN date_updated_timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP`, }); console.log("Date Updated Column updated"); } process.exit(); -})); +}); diff --git a/dist/package-shared/shell/updateDbSlugsForTableRecords.js b/dist/package-shared/shell/updateDbSlugsForTableRecords.js index 16778aa..540aab5 100644 --- a/dist/package-shared/shell/updateDbSlugsForTableRecords.js +++ b/dist/package-shared/shell/updateDbSlugsForTableRecords.js @@ -1,21 +1,7 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); require("dotenv").config({ path: "./../.env" }); -const serverError_1 = __importDefault(require("../functions/backend/serverError")); -const varDatabaseDbHandler_1 = __importDefault(require("./utils/varDatabaseDbHandler")); -const DB_HANDLER_1 = __importDefault(require("../utils/backend/global-db/DB_HANDLER")); +import serverError from "../functions/backend/serverError"; +import varDatabaseDbHandler from "./utils/varDatabaseDbHandler"; +import DB_HANDLER from "../utils/backend/global-db/DB_HANDLER"; /** ****************************************************************************** */ /** ****************************************************************************** */ /** ****************************************************************************** */ @@ -27,9 +13,9 @@ const DB_HANDLER_1 = __importDefault(require("../utils/backend/global-db/DB_HAND * * @description Grab Schema */ -(0, varDatabaseDbHandler_1.default)({ +varDatabaseDbHandler({ queryString: `SELECT DISTINCT db_id FROM datasquirel.user_database_tables`, -}).then((tables) => __awaiter(void 0, void 0, void 0, function* () { +}).then(async (tables) => { // console.log(tables); // process.exit(); var _a; @@ -37,11 +23,11 @@ const DB_HANDLER_1 = __importDefault(require("../utils/backend/global-db/DB_HAND const table = tables[i]; try { const { db_id } = table; - const dbSlug = yield (0, DB_HANDLER_1.default)(`SELECT db_slug FROM user_databases WHERE id='${db_id}'`); - const updateTableSlug = yield (0, DB_HANDLER_1.default)(`UPDATE user_database_tables SET db_slug='${dbSlug[0].db_slug}' WHERE db_id='${db_id}'`); + const dbSlug = await DB_HANDLER(`SELECT db_slug FROM user_databases WHERE id='${db_id}'`); + const updateTableSlug = await DB_HANDLER(`UPDATE user_database_tables SET db_slug='${dbSlug[0].db_slug}' WHERE db_id='${db_id}'`); } catch (error) { - (0, serverError_1.default)({ + serverError({ component: "shell/updateDbSlugsForTableRecords/main-catch-error", message: error.message, user: {}, @@ -50,4 +36,4 @@ const DB_HANDLER_1 = __importDefault(require("../utils/backend/global-db/DB_HAND } } process.exit(); -})); +}); diff --git a/dist/package-shared/shell/updateSSLUsers.js b/dist/package-shared/shell/updateSSLUsers.js index e54707a..69ff9e6 100644 --- a/dist/package-shared/shell/updateSSLUsers.js +++ b/dist/package-shared/shell/updateSSLUsers.js @@ -1,19 +1,6 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); require("dotenv").config({ path: "./../.env" }); -const grab_dsql_connection_1 = __importDefault(require("../utils/grab-dsql-connection")); +import grabDSQLConnection from "../utils/grab-dsql-connection"; +import grabSQLKeyName from "../utils/grab-sql-key-name"; /** * # Main DB Handler Function * @async @@ -25,24 +12,24 @@ const grab_dsql_connection_1 = __importDefault(require("../utils/grab-dsql-conne * * @returns {Promise} */ -(() => __awaiter(void 0, void 0, void 0, function* () { +(async () => { var _a, _b; - const CONNECTION = (0, grab_dsql_connection_1.default)(); + const CONNECTION = grabDSQLConnection(); try { - const result = yield CONNECTION.query("SELECT user,host,ssl_type FROM mysql.user"); + const result = await CONNECTION.query("SELECT user,host,ssl_type FROM mysql.user"); const parsedResults = JSON.parse(JSON.stringify(result)); for (let i = 0; i < parsedResults.length; i++) { const user = parsedResults[i]; if (user.User !== process.env.DSQL_DB_READ_ONLY_USERNAME || user.User !== process.env.DSQL_DB_FULL_ACCESS_USERNAME || - !((_a = user.User) === null || _a === void 0 ? void 0 : _a.match(/dsql_user_.*/i))) { + !((_a = user.User) === null || _a === void 0 ? void 0 : _a.match(new RegExp(grabSQLKeyName({ type: "user" }))))) { continue; } const { User, Host, ssl_type } = user; if (ssl_type === "ANY") { continue; } - const addUserSSL = yield CONNECTION.query(`ALTER USER '${User}'@'${Host}'`); + const addUserSSL = await CONNECTION.query(`ALTER USER '${User}'@'${Host}'`); console.log(`addUserSSL => ${User}@${Host}`, addUserSSL); } } @@ -53,4 +40,4 @@ const grab_dsql_connection_1 = __importDefault(require("../utils/grab-dsql-conne CONNECTION.end(); process.exit(); } -}))(); +})(); diff --git a/dist/package-shared/shell/utils/camelJoinedtoCamelSpace.js b/dist/package-shared/shell/utils/camelJoinedtoCamelSpace.js index 239bfc5..d3d3f7b 100644 --- a/dist/package-shared/shell/utils/camelJoinedtoCamelSpace.js +++ b/dist/package-shared/shell/utils/camelJoinedtoCamelSpace.js @@ -1,13 +1,10 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = camelJoinedtoCamelSpace; /** * Convert Camel Joined Text to Camel Spaced Text * ============================================================================== * @description this function takes a camel cased text without spaces, and returns * a camel-case-spaced text */ -function camelJoinedtoCamelSpace(text) { +export default function camelJoinedtoCamelSpace(text) { if (!(text === null || text === void 0 ? void 0 : text.match(/./))) { return ""; } diff --git a/dist/package-shared/shell/utils/create-table-handle-table-record.d.ts b/dist/package-shared/shell/utils/create-table-handle-table-record.d.ts new file mode 100644 index 0000000..be7b3a3 --- /dev/null +++ b/dist/package-shared/shell/utils/create-table-handle-table-record.d.ts @@ -0,0 +1,13 @@ +import { DSQL_TableSchemaType } from "../../types"; +import { DSQL_DATASQUIREL_USER_DATABASES } from "../../types/dsql"; +type Param = { + tableSchema?: DSQL_TableSchemaType; + recordedDbEntry?: DSQL_DATASQUIREL_USER_DATABASES; + update?: boolean; + isMain?: boolean; +}; +/** + * # Handle Table Record Update and Insert + */ +export default function ({ tableSchema, recordedDbEntry, update, isMain, }: Param): Promise; +export {}; diff --git a/dist/package-shared/shell/utils/create-table-handle-table-record.js b/dist/package-shared/shell/utils/create-table-handle-table-record.js new file mode 100644 index 0000000..121746b --- /dev/null +++ b/dist/package-shared/shell/utils/create-table-handle-table-record.js @@ -0,0 +1,83 @@ +import varDatabaseDbHandler from "./varDatabaseDbHandler"; +import numberfy from "../../utils/numberfy"; +import updateDbEntry from "../../functions/backend/db/updateDbEntry"; +import addDbEntry from "../../functions/backend/db/addDbEntry"; +import slugToNormalText from "../../utils/slug-to-normal-text"; +import _ from "lodash"; +/** + * # Handle Table Record Update and Insert + */ +export default async function ({ tableSchema, recordedDbEntry, update, isMain, }) { + var _a; + if (isMain) + return undefined; + let tableId; + const targetDatabase = "datasquirel"; + const targetTableName = "user_database_tables"; + if (!(tableSchema === null || tableSchema === void 0 ? void 0 : tableSchema.tableName)) { + return undefined; + } + const newTableSchema = _.cloneDeep(tableSchema); + try { + if (!recordedDbEntry) { + throw new Error("Recorded Db entry not found!"); + } + // const existingTableName = newTableSchema.tableNameOld + // ? newTableSchema.tableNameOld + // : newTableSchema.tableName; + const newTableEntry = { + user_id: recordedDbEntry.user_id, + db_id: recordedDbEntry.id, + db_slug: recordedDbEntry.db_slug, + table_name: slugToNormalText(newTableSchema.tableName), + table_slug: newTableSchema.tableName, + child_table: newTableSchema.childTable ? 1 : 0, + child_table_parent_database_schema_id: newTableSchema.childTableDbId + ? numberfy(newTableSchema.childTableDbId) + : 0, + child_table_parent_table_schema_id: newTableSchema.childTableId + ? numberfy(newTableSchema.childTableId) + : 0, + table_schema_id: newTableSchema.id + ? numberfy(newTableSchema.id) + : 0, + active_data: newTableSchema.updateData ? 1 : 0, + }; + const existingTable = await varDatabaseDbHandler({ + queryString: `SELECT * FROM ${targetDatabase}.${targetTableName} WHERE db_id = ? AND table_slug = ?`, + queryValuesArray: [ + String(recordedDbEntry.id), + String(newTableSchema.tableName), + ], + }); + const table = existingTable === null || existingTable === void 0 ? void 0 : existingTable[0]; + if (table === null || table === void 0 ? void 0 : table.id) { + tableId = table.id; + if (update) { + await updateDbEntry({ + data: newTableEntry, + identifierColumnName: "id", + identifierValue: table.id, + tableName: targetTableName, + dbFullName: targetDatabase, + }); + } + } + else { + const newTableEntryRes = await addDbEntry({ + data: newTableEntry, + tableName: targetTableName, + dbFullName: targetDatabase, + }); + if ((_a = newTableEntryRes === null || newTableEntryRes === void 0 ? void 0 : newTableEntryRes.payload) === null || _a === void 0 ? void 0 : _a.insertId) { + tableId = newTableEntryRes.payload.insertId; + } + } + if (newTableSchema.tableNameOld) { + } + return tableId; + } + catch (error) { + return undefined; + } +} diff --git a/dist/package-shared/shell/utils/createTable.d.ts b/dist/package-shared/shell/utils/createTable.d.ts index 8d81665..193336d 100644 --- a/dist/package-shared/shell/utils/createTable.d.ts +++ b/dist/package-shared/shell/utils/createTable.d.ts @@ -1,13 +1,15 @@ -import { DSQL_TableSchemaType } from "../../types"; +import { DSQL_FieldSchemaType, DSQL_TableSchemaType } from "../../types"; +import { DSQL_DATASQUIREL_USER_DATABASES } from "../../types/dsql"; type Param = { dbFullName: string; tableName: string; - tableInfoArray: any[]; + tableInfoArray: DSQL_FieldSchemaType[]; tableSchema?: DSQL_TableSchemaType; - recordedDbEntry?: any; + recordedDbEntry?: DSQL_DATASQUIREL_USER_DATABASES; + isMain?: boolean; }; /** * # Create Table Functions */ -export default function createTable({ dbFullName, tableName, tableInfoArray, tableSchema, recordedDbEntry, }: Param): Promise; +export default function createTable({ dbFullName, tableName, tableInfoArray, tableSchema, recordedDbEntry, isMain, }: Param): Promise; export {}; diff --git a/dist/package-shared/shell/utils/createTable.js b/dist/package-shared/shell/utils/createTable.js index 9ce84f5..9c972f9 100644 --- a/dist/package-shared/shell/utils/createTable.js +++ b/dist/package-shared/shell/utils/createTable.js @@ -1,127 +1,55 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = createTable; -const varDatabaseDbHandler_1 = __importDefault(require("./varDatabaseDbHandler")); -const generateColumnDescription_1 = __importDefault(require("./generateColumnDescription")); -const supplementTable_1 = __importDefault(require("./supplementTable")); -const dbHandler_1 = __importDefault(require("./dbHandler")); +import varDatabaseDbHandler from "./varDatabaseDbHandler"; +import generateColumnDescription from "./generateColumnDescription"; +import supplementTable from "./supplementTable"; +import handleTableForeignKey from "./handle-table-foreign-key"; +import createTableHandleTableRecord from "./create-table-handle-table-record"; /** * # Create Table Functions */ -function createTable(_a) { - return __awaiter(this, arguments, void 0, function* ({ dbFullName, tableName, tableInfoArray, tableSchema, recordedDbEntry, }) { - /** - * Format tableInfoArray - * - * @description Format tableInfoArray - */ - const finalTable = (0, supplementTable_1.default)({ tableInfoArray: tableInfoArray }); - /** - * Grab Schema - * - * @description Grab Schema - */ - const createTableQueryArray = []; - createTableQueryArray.push(`CREATE TABLE IF NOT EXISTS \`${dbFullName}\`.\`${tableName}\` (`); - //////////////////////////////////////// - //////////////////////////////////////// - //////////////////////////////////////// - try { - if (!recordedDbEntry) { - throw new Error("Recorded Db entry not found!"); - } - const existingTable = yield (0, varDatabaseDbHandler_1.default)({ - queryString: `SELECT * FROM datasquirel.user_database_tables WHERE db_id = ? AND table_slug = ?`, - queryValuesArray: [recordedDbEntry.id, tableSchema === null || tableSchema === void 0 ? void 0 : tableSchema.tableName], - }); - /** @type {import("../../types").MYSQL_user_database_tables_table_def} */ - const table = existingTable === null || existingTable === void 0 ? void 0 : existingTable[0]; - if (!(table === null || table === void 0 ? void 0 : table.id)) { - const newTableEntry = yield (0, dbHandler_1.default)({ - query: `INSERT INTO datasquirel.user_database_tables SET ?`, - values: { - user_id: recordedDbEntry.user_id, - db_id: recordedDbEntry.id, - db_slug: recordedDbEntry.db_slug, - table_name: tableSchema === null || tableSchema === void 0 ? void 0 : tableSchema.tableFullName, - table_slug: tableSchema === null || tableSchema === void 0 ? void 0 : tableSchema.tableName, - child_table: (tableSchema === null || tableSchema === void 0 ? void 0 : tableSchema.childTable) ? "1" : null, - child_table_parent_database: (tableSchema === null || tableSchema === void 0 ? void 0 : tableSchema.childTableDbFullName) || null, - child_table_parent_table: (tableSchema === null || tableSchema === void 0 ? void 0 : tableSchema.childTableName) || null, - date_created: Date(), - date_created_code: Date.now(), - date_updated: Date(), - date_updated_code: Date.now(), - }, - }); - } - } - catch (error) { } - //////////////////////////////////////// - //////////////////////////////////////// - //////////////////////////////////////// - let primaryKeySet = false; - /** @type {import("../../types").DSQL_FieldSchemaType[]} */ - let foreignKeys = []; - //////////////////////////////////////// - for (let i = 0; i < finalTable.length; i++) { - const column = finalTable[i]; - const { fieldName, dataType, nullValue, primaryKey, autoIncrement, defaultValue, defaultValueLiteral, foreignKey, updatedField, onUpdate, onUpdateLiteral, onDelete, onDeleteLiteral, defaultField, encrypted, json, newTempField, notNullValue, originName, plainText, pattern, patternFlags, richText, } = column; - if (foreignKey) { - foreignKeys.push(Object.assign({}, column)); - } - let { fieldEntryText, newPrimaryKeySet } = (0, generateColumnDescription_1.default)({ - columnData: column, - primaryKeySet: primaryKeySet, - }); - primaryKeySet = newPrimaryKeySet; - //////////////////////////////////////// - const comma = (() => { - if (foreignKeys[0]) - return ","; - if (i === finalTable.length - 1) - return ""; - return ","; - })(); - createTableQueryArray.push(" " + fieldEntryText + comma); - //////////////////////////////////////// - } - if (foreignKeys[0]) { - foreignKeys.forEach((foreighKey, index, array) => { - var _a, _b, _c, _d, _e; - const fieldName = foreighKey.fieldName; - const destinationTableName = (_a = foreighKey.foreignKey) === null || _a === void 0 ? void 0 : _a.destinationTableName; - const destinationTableColumnName = (_b = foreighKey.foreignKey) === null || _b === void 0 ? void 0 : _b.destinationTableColumnName; - const cascadeDelete = (_c = foreighKey.foreignKey) === null || _c === void 0 ? void 0 : _c.cascadeDelete; - const cascadeUpdate = (_d = foreighKey.foreignKey) === null || _d === void 0 ? void 0 : _d.cascadeUpdate; - const foreignKeyName = (_e = foreighKey.foreignKey) === null || _e === void 0 ? void 0 : _e.foreignKeyName; - const comma = (() => { - if (index === foreignKeys.length - 1) - return ""; - return ","; - })(); - createTableQueryArray.push(` CONSTRAINT \`${foreignKeyName}\` FOREIGN KEY (\`${fieldName}\`) REFERENCES \`${destinationTableName}\`(${destinationTableColumnName})${cascadeDelete ? " ON DELETE CASCADE" : ""}${cascadeUpdate ? " ON UPDATE CASCADE" : ""}${comma}`); - }); - } - //////////////////////////////////////// - createTableQueryArray.push(`) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin;`); - const createTableQuery = createTableQueryArray.join("\n"); - //////////////////////////////////////// - const newTable = yield (0, varDatabaseDbHandler_1.default)({ - queryString: createTableQuery, - }); - return newTable; +export default async function createTable({ dbFullName, tableName, tableInfoArray, tableSchema, recordedDbEntry, isMain, }) { + const finalTable = supplementTable({ tableInfoArray: tableInfoArray }); + let tableId = await createTableHandleTableRecord({ + recordedDbEntry, + tableSchema, + isMain, }); + if (!tableId && !isMain) + throw new Error(`Couldn't grab table ID`); + const createTableQueryArray = []; + createTableQueryArray.push(`CREATE TABLE IF NOT EXISTS \`${dbFullName}\`.\`${tableName}\` (`); + let primaryKeySet = false; + for (let i = 0; i < finalTable.length; i++) { + const column = finalTable[i]; + let { fieldEntryText, newPrimaryKeySet } = generateColumnDescription({ + columnData: column, + primaryKeySet: primaryKeySet, + }); + primaryKeySet = newPrimaryKeySet; + const comma = (() => { + if (i === finalTable.length - 1) + return ""; + return ","; + })(); + createTableQueryArray.push(" " + fieldEntryText + comma); + } + createTableQueryArray.push(`) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin;`); + const createTableQuery = createTableQueryArray.join("\n"); + const newTable = await varDatabaseDbHandler({ + queryString: createTableQuery, + }); + for (let i = 0; i < finalTable.length; i++) { + const column = finalTable[i]; + const { foreignKey, fieldName } = column; + if (!fieldName) + continue; + if (foreignKey) { + await handleTableForeignKey({ + dbFullName, + foreignKey, + tableName, + fieldName, + }); + } + } + return tableId; } diff --git a/dist/package-shared/shell/utils/dbHandler.d.ts b/dist/package-shared/shell/utils/dbHandler.d.ts deleted file mode 100644 index b8c16d4..0000000 --- a/dist/package-shared/shell/utils/dbHandler.d.ts +++ /dev/null @@ -1,10 +0,0 @@ -type Param = { - query: string; - values?: string[] | object; -}; -/** - * # Main DB Handler Function - * @requires DSQL_DB_CONN - Gobal Variable for Datasquirel Database - */ -export default function dbHandler({ query, values, }: Param): Promise; -export {}; diff --git a/dist/package-shared/shell/utils/dbHandler.js b/dist/package-shared/shell/utils/dbHandler.js deleted file mode 100644 index 4b373ed..0000000 --- a/dist/package-shared/shell/utils/dbHandler.js +++ /dev/null @@ -1,60 +0,0 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = dbHandler; -const fs_1 = __importDefault(require("fs")); -const path_1 = __importDefault(require("path")); -const grab_dsql_connection_1 = __importDefault(require("../../utils/grab-dsql-connection")); -/** - * # Main DB Handler Function - * @requires DSQL_DB_CONN - Gobal Variable for Datasquirel Database - */ -function dbHandler(_a) { - return __awaiter(this, arguments, void 0, function* ({ query, values, }) { - var _b; - const CONNECTION = (0, grab_dsql_connection_1.default)(); - let results; - try { - if (query && values) { - results = yield CONNECTION.query(query, values); - } - else { - results = yield CONNECTION.query(query); - } - } - catch (error) { - (_b = global.ERROR_CALLBACK) === null || _b === void 0 ? void 0 : _b.call(global, `DB Handler Error...`, error); - if (process.env.FIRST_RUN) { - return null; - } - console.log("ERROR in dbHandler =>", error.message); - console.log(error); - console.log(CONNECTION.config()); - const tmpFolder = path_1.default.resolve(process.cwd(), "./.tmp"); - if (!fs_1.default.existsSync(tmpFolder)) - fs_1.default.mkdirSync(tmpFolder, { recursive: true }); - fs_1.default.appendFileSync(path_1.default.resolve(tmpFolder, "./dbErrorLogs.txt"), JSON.stringify(error, null, 4) + "\n" + Date() + "\n\n\n", "utf8"); - results = null; - } - finally { - yield (CONNECTION === null || CONNECTION === void 0 ? void 0 : CONNECTION.end()); - } - if (results) { - return JSON.parse(JSON.stringify(results)); - } - else { - return null; - } - }); -} diff --git a/dist/package-shared/shell/utils/drop-all-foreign-keys.d.ts b/dist/package-shared/shell/utils/drop-all-foreign-keys.d.ts new file mode 100644 index 0000000..1412eac --- /dev/null +++ b/dist/package-shared/shell/utils/drop-all-foreign-keys.d.ts @@ -0,0 +1,9 @@ +type Param = { + dbFullName: string; + tableName: string; +}; +/** + * # Drop All Foreign Keys + */ +export default function dropAllForeignKeys({ dbFullName, tableName, }: Param): Promise; +export {}; diff --git a/dist/package-shared/shell/utils/drop-all-foreign-keys.js b/dist/package-shared/shell/utils/drop-all-foreign-keys.js new file mode 100644 index 0000000..6d35f6f --- /dev/null +++ b/dist/package-shared/shell/utils/drop-all-foreign-keys.js @@ -0,0 +1,35 @@ +import grabSQLKeyName from "../../utils/grab-sql-key-name"; +import varDatabaseDbHandler from "./varDatabaseDbHandler"; +/** + * # Drop All Foreign Keys + */ +export default async function dropAllForeignKeys({ dbFullName, tableName, }) { + try { + // const rows = await varDatabaseDbHandler({ + // queryString: `SELECT CONSTRAINT_NAME FROM information_schema.REFERENTIAL_CONSTRAINTS WHERE TABLE_NAME = '${tableName}' AND CONSTRAINT_SCHEMA = '${dbFullName}'`, + // }); + // console.log("rows", rows); + // console.log("dbFullName", dbFullName); + // console.log("tableName", tableName); + // for (const row of rows) { + // await varDatabaseDbHandler({ + // queryString: `ALTER TABLE \`${dbFullName}\`.\`${tableName}\` DROP FOREIGN KEY \`${row.CONSTRAINT_NAME}\` + // `, + // }); + // } + const foreignKeys = await varDatabaseDbHandler({ + queryString: `SHOW INDEXES FROM \`${dbFullName}\`.\`${tableName}\` WHERE Key_name LIKE '${grabSQLKeyName({ type: "foreign_key" })}%'`, + }); + for (const fk of foreignKeys) { + if (fk.Key_name.match(new RegExp(grabSQLKeyName({ type: "foreign_key" })))) { + await varDatabaseDbHandler({ + queryString: `ALTER TABLE \`${dbFullName}\`.\`${tableName}\` DROP INDEX \`${fk.Key_name}\` + `, + }); + } + } + } + catch (error) { + console.log(`dropAllForeignKeys ERROR => ${error.message}`); + } +} diff --git a/dist/package-shared/shell/utils/generateColumnDescription.d.ts b/dist/package-shared/shell/utils/generateColumnDescription.d.ts index 39e293f..3e1f977 100644 --- a/dist/package-shared/shell/utils/generateColumnDescription.d.ts +++ b/dist/package-shared/shell/utils/generateColumnDescription.d.ts @@ -1,5 +1,6 @@ +import { DSQL_FieldSchemaType } from "../../types"; type Param = { - columnData: import("../../types").DSQL_FieldSchemaType; + columnData: DSQL_FieldSchemaType; primaryKeySet?: boolean; }; type Return = { diff --git a/dist/package-shared/shell/utils/generateColumnDescription.js b/dist/package-shared/shell/utils/generateColumnDescription.js index a9c0ab7..c7fa1c0 100644 --- a/dist/package-shared/shell/utils/generateColumnDescription.js +++ b/dist/package-shared/shell/utils/generateColumnDescription.js @@ -1,19 +1,14 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = generateColumnDescription; +import dataTypeConstructor from "../../utils/db/schema/data-type-constructor"; +import dataTypeParser from "../../utils/db/schema/data-type-parser"; /** * # Generate Table Column Description */ -function generateColumnDescription({ columnData, primaryKeySet, }) { - /** - * Format tableInfoArray - * - * @description Format tableInfoArray - */ - const { fieldName, dataType, nullValue, primaryKey, autoIncrement, defaultValue, defaultValueLiteral, onUpdateLiteral, notNullValue, } = columnData; +export default function generateColumnDescription({ columnData, primaryKeySet, }) { + const { fieldName, dataType, nullValue, primaryKey, autoIncrement, defaultValue, defaultValueLiteral, onUpdateLiteral, notNullValue, unique, } = columnData; let fieldEntryText = ""; - fieldEntryText += `\`${fieldName}\` ${dataType}`; - //////////////////////////////////////// + const finalDataTypeObject = dataTypeParser(dataType); + const finalDataType = dataTypeConstructor(finalDataTypeObject.type, finalDataTypeObject.limit, finalDataTypeObject.decimal); + fieldEntryText += `\`${fieldName}\` ${finalDataType}`; if (nullValue) { fieldEntryText += " DEFAULT NULL"; } @@ -25,29 +20,29 @@ function generateColumnDescription({ columnData, primaryKeySet, }) { fieldEntryText += ` DEFAULT UUID()`; } else { - fieldEntryText += ` DEFAULT '${defaultValue}'`; + fieldEntryText += ` DEFAULT '${String(defaultValue) + .replace(/^\'|\'$/g, "") + .replace(/\'/g, "\\'")}'`; } } else if (notNullValue) { fieldEntryText += ` NOT NULL`; } - //////////////////////////////////////// if (onUpdateLiteral) { fieldEntryText += ` ON UPDATE ${onUpdateLiteral}`; } - //////////////////////////////////////// if (primaryKey && !primaryKeySet) { fieldEntryText += " PRIMARY KEY"; primaryKeySet = true; } - //////////////////////////////////////// if (autoIncrement) { fieldEntryText += " AUTO_INCREMENT"; primaryKeySet = true; } - //////////////////////////////////////// - //////////////////////////////////////// - //////////////////////////////////////// + if (unique) { + fieldEntryText += " UNIQUE"; + primaryKeySet = true; + } return { fieldEntryText, newPrimaryKeySet: primaryKeySet || false, diff --git a/dist/package-shared/shell/utils/grab-dsql-schema-index-comment.d.ts b/dist/package-shared/shell/utils/grab-dsql-schema-index-comment.d.ts new file mode 100644 index 0000000..d8f98ba --- /dev/null +++ b/dist/package-shared/shell/utils/grab-dsql-schema-index-comment.d.ts @@ -0,0 +1 @@ +export default function grabDSQLSchemaIndexComment(): string; diff --git a/dist/package-shared/shell/utils/grab-dsql-schema-index-comment.js b/dist/package-shared/shell/utils/grab-dsql-schema-index-comment.js new file mode 100644 index 0000000..3d3b766 --- /dev/null +++ b/dist/package-shared/shell/utils/grab-dsql-schema-index-comment.js @@ -0,0 +1,3 @@ +export default function grabDSQLSchemaIndexComment() { + return `dsql_schema_index`; +} diff --git a/dist/package-shared/shell/utils/handle-table-foreign-key.d.ts b/dist/package-shared/shell/utils/handle-table-foreign-key.d.ts new file mode 100644 index 0000000..89d5afc --- /dev/null +++ b/dist/package-shared/shell/utils/handle-table-foreign-key.d.ts @@ -0,0 +1,13 @@ +import { DSQL_ForeignKeyType } from "../../types"; +type Param = { + dbFullName: string; + tableName: string; + foreignKey: DSQL_ForeignKeyType; + fieldName: string; + errorLogs?: any[]; +}; +/** + * # Update table function + */ +export default function handleTableForeignKey({ dbFullName, tableName, foreignKey, errorLogs, fieldName, }: Param): Promise; +export {}; diff --git a/dist/package-shared/shell/utils/handle-table-foreign-key.js b/dist/package-shared/shell/utils/handle-table-foreign-key.js new file mode 100644 index 0000000..aac0d10 --- /dev/null +++ b/dist/package-shared/shell/utils/handle-table-foreign-key.js @@ -0,0 +1,24 @@ +import varDatabaseDbHandler from "./varDatabaseDbHandler"; +/** + * # Update table function + */ +export default async function handleTableForeignKey({ dbFullName, tableName, foreignKey, errorLogs, fieldName, }) { + const { destinationTableName, destinationTableColumnName, cascadeDelete, cascadeUpdate, foreignKeyName, } = foreignKey; + let finalQueryString = `ALTER TABLE \`${dbFullName}\`.\`${tableName}\``; + finalQueryString += ` ADD CONSTRAINT \`${foreignKeyName}\` FOREIGN KEY (\`${fieldName}\`)`; + finalQueryString += ` REFERENCES \`${destinationTableName}\`(\`${destinationTableColumnName}\`)`; + if (cascadeDelete) + finalQueryString += ` ON DELETE CASCADE`; + if (cascadeUpdate) + finalQueryString += ` ON UPDATE CASCADE`; + // let foreinKeyText = `ADD CONSTRAINT \`${foreignKeyName}\` FOREIGN KEY (\`${destinationTableColumnType}\`) REFERENCES \`${destinationTableName}\`(\`${destinationTableColumnName}\`)${ + // cascadeDelete ? " ON DELETE CASCADE" : "" + // }${cascadeUpdate ? " ON UPDATE CASCADE" : ""}`; + // let finalQueryString = `ALTER TABLE \`${dbFullName}\`.\`${tableName}\` ${foreinKeyText}`; + const addForeignKey = await varDatabaseDbHandler({ + queryString: finalQueryString, + }); + if (!(addForeignKey === null || addForeignKey === void 0 ? void 0 : addForeignKey.serverStatus)) { + errorLogs === null || errorLogs === void 0 ? void 0 : errorLogs.push(addForeignKey); + } +} diff --git a/dist/package-shared/shell/utils/noDatabaseDbHandler.js b/dist/package-shared/shell/utils/noDatabaseDbHandler.js index fd976f7..fdb8396 100644 --- a/dist/package-shared/shell/utils/noDatabaseDbHandler.js +++ b/dist/package-shared/shell/utils/noDatabaseDbHandler.js @@ -1,34 +1,17 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = noDatabaseDbHandler; -const dbHandler_1 = __importDefault(require("./dbHandler")); -function noDatabaseDbHandler(queryString) { - return __awaiter(this, void 0, void 0, function* () { - var _a; - let results; - try { - results = yield (0, dbHandler_1.default)({ query: queryString }); - } - catch (error) { - (_a = global.ERROR_CALLBACK) === null || _a === void 0 ? void 0 : _a.call(global, `No DB Handler Error`, error); - } - if (results) { - return results; - } - else { - return null; - } - }); +import dbHandler from "../../functions/backend/dbHandler"; +export default async function noDatabaseDbHandler(queryString) { + var _a; + let results; + try { + results = await dbHandler({ query: queryString }); + } + catch (error) { + (_a = global.ERROR_CALLBACK) === null || _a === void 0 ? void 0 : _a.call(global, `No DB Handler Error`, error); + } + if (results) { + return results; + } + else { + return null; + } } diff --git a/dist/package-shared/shell/utils/slugToCamelTitle.js b/dist/package-shared/shell/utils/slugToCamelTitle.js index 300c96f..5655743 100644 --- a/dist/package-shared/shell/utils/slugToCamelTitle.js +++ b/dist/package-shared/shell/utils/slugToCamelTitle.js @@ -1,10 +1,7 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = slugToCamelTitle; /** * # Sulg To Camel Case */ -function slugToCamelTitle(text) { +export default function slugToCamelTitle(text) { if (text) { let addArray = text.split("-").filter((item) => item !== ""); let camelArray = addArray.map((item) => { diff --git a/dist/package-shared/shell/utils/supplementTable.js b/dist/package-shared/shell/utils/supplementTable.js index c41d3dc..f8f3b8d 100644 --- a/dist/package-shared/shell/utils/supplementTable.js +++ b/dist/package-shared/shell/utils/supplementTable.js @@ -1,10 +1,7 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = supplementTable; /** * # Supplement Table */ -function supplementTable({ tableInfoArray }) { +export default function supplementTable({ tableInfoArray }) { /** * Format tableInfoArray * diff --git a/dist/package-shared/shell/utils/updateTable.d.ts b/dist/package-shared/shell/utils/updateTable.d.ts index 290cf59..80b513d 100644 --- a/dist/package-shared/shell/utils/updateTable.d.ts +++ b/dist/package-shared/shell/utils/updateTable.d.ts @@ -1,19 +1,19 @@ +import { DSQL_DatabaseSchemaType, DSQL_FieldSchemaType, DSQL_IndexSchemaType, DSQL_TableSchemaType } from "../../types"; +import { DSQL_DATASQUIREL_USER_DATABASES } from "../../types/dsql"; type Param = { dbFullName: string; tableName: string; - tableSchema: import("../../types").DSQL_TableSchemaType; - tableNameFull?: string; - tableInfoArray: import("../../types").DSQL_FieldSchemaType[]; + tableSchema: DSQL_TableSchemaType; + tableFields: DSQL_FieldSchemaType[]; userId?: number | string | null; - dbSchema: import("../../types").DSQL_DatabaseSchemaType[]; - tableIndexes?: import("../../types").DSQL_IndexSchemaType[]; + dbSchema: DSQL_DatabaseSchemaType; + tableIndexes?: DSQL_IndexSchemaType[]; clone?: boolean; - tableIndex?: number; - childDb?: boolean; - recordedDbEntry?: any; + recordedDbEntry?: DSQL_DATASQUIREL_USER_DATABASES; + isMain?: boolean; }; /** * # Update table function */ -export default function updateTable({ dbFullName, tableName, tableInfoArray, userId, dbSchema, tableIndexes, tableSchema, clone, childDb, tableIndex, tableNameFull, recordedDbEntry, }: Param): Promise; +export default function updateTable({ dbFullName, tableName, tableFields, userId, dbSchema, tableIndexes, tableSchema, clone, recordedDbEntry, isMain, }: Param): Promise; export {}; diff --git a/dist/package-shared/shell/utils/updateTable.js b/dist/package-shared/shell/utils/updateTable.js index 52af123..2ad40c8 100644 --- a/dist/package-shared/shell/utils/updateTable.js +++ b/dist/package-shared/shell/utils/updateTable.js @@ -1,405 +1,317 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = updateTable; -const fs_1 = __importDefault(require("fs")); -const varDatabaseDbHandler_1 = __importDefault(require("./varDatabaseDbHandler")); -const defaultFieldsRegexp = /^id$|^uuid$|^date_created$|^date_created_code$|^date_created_timestamp$|^date_updated$|^date_updated_code$|^date_updated_timestamp$/; -const generateColumnDescription_1 = __importDefault(require("./generateColumnDescription")); -const dbHandler_1 = __importDefault(require("./dbHandler")); +import varDatabaseDbHandler from "./varDatabaseDbHandler"; +import generateColumnDescription from "./generateColumnDescription"; +import handleTableForeignKey from "./handle-table-foreign-key"; +import dropAllForeignKeys from "./drop-all-foreign-keys"; +import createTableHandleTableRecord from "./create-table-handle-table-record"; +import defaultFieldsRegexp from "../../functions/dsql/default-fields-regexp"; +import handleIndexescreateDbFromSchema from "../createDbFromSchema/handle-indexes"; +import _ from "lodash"; +import { writeUpdatedDbSchema } from "../createDbFromSchema/grab-required-database-schemas"; +import normalizeText from "../../utils/normalize-text"; /** * # Update table function */ -function updateTable(_a) { - return __awaiter(this, arguments, void 0, function* ({ dbFullName, tableName, tableInfoArray, userId, dbSchema, tableIndexes, tableSchema, clone, childDb, tableIndex, tableNameFull, recordedDbEntry, }) { +export default async function updateTable({ dbFullName, tableName, tableFields, userId, dbSchema, tableIndexes, tableSchema, clone, recordedDbEntry, isMain, }) { + /** + * Initialize + * ========================================== + * @description Initial setup + */ + let errorLogs = []; + /** + * @description Initialize table info array. This value will be + * changing depending on if a field is renamed or not. + */ + let upToDateTableFieldsArray = _.cloneDeep(tableFields); + /** + * @type {string[]} + * @description Table update query string array + */ + const updateTableQueryArray = []; + /** + * @description Push the query initial value + */ + updateTableQueryArray.push(`ALTER TABLE \`${dbFullName}\`.\`${tableName}\``); + /** + * @description Grab Table Record + */ + if (!recordedDbEntry && !isMain) { + throw new Error("Recorded Db entry not found!"); + } + let tableID = await createTableHandleTableRecord({ + recordedDbEntry, + tableSchema, + update: true, + isMain, + }); + if (!tableID && !isMain) { + throw new Error("Recorded Table entry not found!"); + } + /** + * Handle Table updates + * + * @description Try to undate table, catch error if anything goes wrong + */ + try { /** - * Initialize - * ========================================== - * @description Initial setup + * Handle MYSQL Table Indexes + * =================================================== + * @description Iterate through each table index(if available) + * and perform operations */ - var _b; - /** @type {any[]} */ - let errorLogs = []; - /** - * @description Initialize table info array. This value will be - * changing depending on if a field is renamed or not. - */ - let upToDateTableFieldsArray = tableInfoArray; - /** - * Handle Table updates - * - * @description Try to undate table, catch error if anything goes wrong - */ - try { - /** - * @type {string[]} - * @description Table update query string array - */ - const updateTableQueryArray = []; - /** - * @type {string[]} - * @description Constriants query string array - */ - const constraintsQueryArray = []; - /** - * @description Push the query initial value - */ - updateTableQueryArray.push(`ALTER TABLE \`${dbFullName}\`.\`${tableName}\``); - if (childDb) { - try { - if (!recordedDbEntry) { - throw new Error("Recorded Db entry not found!"); - } - const existingTable = yield (0, varDatabaseDbHandler_1.default)({ - queryString: `SELECT * FROM datasquirel.user_database_tables WHERE db_id = ? AND table_slug = ?`, - queryValuesArray: [recordedDbEntry.id, tableName], - }); - /** @type {import("../../types").MYSQL_user_database_tables_table_def} */ - const table = existingTable === null || existingTable === void 0 ? void 0 : existingTable[0]; - if (!(table === null || table === void 0 ? void 0 : table.id)) { - const newTableEntry = yield (0, dbHandler_1.default)({ - query: `INSERT INTO datasquirel.user_database_tables SET ?`, - values: { - user_id: recordedDbEntry.user_id, - db_id: recordedDbEntry.id, - db_slug: recordedDbEntry.db_slug, - table_name: tableNameFull, - table_slug: tableName, - child_table: (tableSchema === null || tableSchema === void 0 ? void 0 : tableSchema.childTable) ? "1" : null, - child_table_parent_database: (tableSchema === null || tableSchema === void 0 ? void 0 : tableSchema.childTableDbFullName) || null, - child_table_parent_table: tableSchema.childTableName || null, - date_created: Date(), - date_created_code: Date.now(), - date_updated: Date(), - date_updated_code: Date.now(), - }, - }); - } - } - catch (error) { } + const allExistingIndexes = await varDatabaseDbHandler({ + queryString: `SHOW INDEXES FROM \`${dbFullName}\`.\`${tableName}\` WHERE Index_comment LIKE '%schema_index%'`, + }); + for (let f = 0; f < allExistingIndexes.length; f++) { + const { Key_name } = allExistingIndexes[f]; + try { + const existingKeyInSchema = tableIndexes === null || tableIndexes === void 0 ? void 0 : tableIndexes.find((indexObject) => indexObject.alias === Key_name); + if (!existingKeyInSchema) + throw new Error(`This Index(${Key_name}) Has been Deleted!`); } - /** - * @type {import("../../types").DSQL_MYSQL_SHOW_INDEXES_Type[]} - * @description All indexes from MYSQL db - */ // @ts-ignore - const allExistingIndexes = yield (0, varDatabaseDbHandler_1.default)({ - queryString: `SHOW INDEXES FROM \`${dbFullName}\`.\`${tableName}\``, - }); - /** - * @type {import("../../types").DSQL_MYSQL_SHOW_COLUMNS_Type[]} - * @description All columns from MYSQL db - */ // @ts-ignore - const allExistingColumns = yield (0, varDatabaseDbHandler_1.default)({ - queryString: `SHOW COLUMNS FROM \`${dbFullName}\`.\`${tableName}\``, - }); - //////////////////////////////////////// - /** - * @type {string[]} - * @description Updated column names Array - */ - const updatedColumnsArray = []; - /** - * @description Iterate through every existing column - */ - for (let e = 0; e < allExistingColumns.length; e++) { - const { Field } = allExistingColumns[e]; - if (Field.match(defaultFieldsRegexp)) - continue; + catch (error) { /** - * @description This finds out whether the fieldName corresponds with the MSQL Field name - * if the fildName doesn't match any MYSQL Field name, the field is deleted. + * @description Drop Index: This happens when the MYSQL index is not + * present in the datasquirel DB schema */ - let existingEntry = upToDateTableFieldsArray.filter((column) => column.fieldName === Field || column.originName === Field); - if (existingEntry && existingEntry[0]) { - /** - * @description Check if Field name has been updated - */ - if (existingEntry[0].updatedField && - existingEntry[0].fieldName) { - updatedColumnsArray.push(existingEntry[0].fieldName); - const renameColumn = yield (0, varDatabaseDbHandler_1.default)({ - queryString: `ALTER TABLE \`${dbFullName}\`.\`${tableName}\` RENAME COLUMN \`${existingEntry[0].originName}\` TO \`${existingEntry[0].fieldName}\``, - }); - console.log(`Column Renamed from "${existingEntry[0].originName}" to "${existingEntry[0].fieldName}"`); - /** - * Update Db Schema - * =================================================== - * @description Update Db Schema after renaming column - */ - try { - const userSchemaData = dbSchema; - const targetDbIndex = userSchemaData.findIndex((db) => db.dbFullName === dbFullName); - const targetTableIndex = userSchemaData[targetDbIndex].tables.findIndex((table) => table.tableName === tableName); - const targetFieldIndex = userSchemaData[targetDbIndex].tables[targetTableIndex].fields.findIndex((field) => field.fieldName === existingEntry[0].fieldName); - delete userSchemaData[targetDbIndex].tables[targetTableIndex].fields[targetFieldIndex]["originName"]; - delete userSchemaData[targetDbIndex].tables[targetTableIndex].fields[targetFieldIndex]["updatedField"]; - /** - * @description Set New Table Fields Array - */ - upToDateTableFieldsArray = - userSchemaData[targetDbIndex].tables[targetTableIndex].fields; - fs_1.default.writeFileSync(`${String(process.env.DSQL_USER_DB_SCHEMA_PATH)}/user-${userId}/main.json`, JSON.stringify(userSchemaData), "utf8"); - } - catch ( /** @type {any} */error) { - console.log("Update table error =>", error.message); - } - //////////////////////////////////////// - } - //////////////////////////////////////// - continue; - //////////////////////////////////////// - } - else { - yield (0, varDatabaseDbHandler_1.default)({ - queryString: `ALTER TABLE \`${dbFullName}\`.\`${tableName}\` DROP COLUMN \`${Field}\``, - }); - } - } - /** - * Handle MYSQL Table Indexes - * =================================================== - * @description Iterate through each table index(if available) - * and perform operations - */ - for (let f = 0; f < allExistingIndexes.length; f++) { - const { Key_name, Index_comment } = allExistingIndexes[f]; - /** - * @description Check if this index was specifically created - * by datasquirel - */ - if (Index_comment === null || Index_comment === void 0 ? void 0 : Index_comment.match(/schema_index/)) { - try { - const existingKeyInSchema = tableIndexes === null || tableIndexes === void 0 ? void 0 : tableIndexes.filter((indexObject) => indexObject.alias === Key_name); - if (!(existingKeyInSchema === null || existingKeyInSchema === void 0 ? void 0 : existingKeyInSchema[0])) - throw new Error(`This Index(${Key_name}) Has been Deleted!`); - } - catch (error) { - /** - * @description Drop Index: This happens when the MYSQL index is not - * present in the datasquirel DB schema - */ - yield (0, varDatabaseDbHandler_1.default)({ - queryString: `ALTER TABLE \`${dbFullName}\`.\`${tableName}\` DROP INDEX \`${Key_name}\``, - }); - } - } - } - /** - * Handle DATASQUIREL Table Indexes - * =================================================== - * @description Iterate through each datasquirel schema - * table index(if available), and perform operations - */ - if (tableIndexes && tableIndexes[0]) { - for (let g = 0; g < tableIndexes.length; g++) { - const { indexType, indexName, indexTableFields, alias } = tableIndexes[g]; - if (!(alias === null || alias === void 0 ? void 0 : alias.match(/./))) - continue; - /** - * @description Check for existing Index in MYSQL db - */ - try { - const existingKeyInDb = allExistingIndexes.filter((indexObject) => indexObject.Key_name === alias); - if (!existingKeyInDb[0]) - throw new Error("This Index Does not Exist"); - } - catch (error) { - /** - * @description Create new index if determined that it - * doesn't exist in MYSQL db - */ - yield (0, varDatabaseDbHandler_1.default)({ - queryString: `CREATE${(indexType === null || indexType === void 0 ? void 0 : indexType.match(/fullText/i)) ? " FULLTEXT" : ""} INDEX \`${alias}\` ON \`${dbFullName}\`.\`${tableName}\`(${indexTableFields === null || indexTableFields === void 0 ? void 0 : indexTableFields.map((nm) => nm.value).map((nm) => `\`${nm}\``).join(",")}) COMMENT 'schema_index'`, - }); - } - } - } - /** - * Handle MYSQL Foreign Keys - * =================================================== - * @description Iterate through each datasquirel schema - * table index(if available), and perform operations - */ - /** - * @description All MSQL Foreign Keys - * @type {import("../../types").DSQL_MYSQL_FOREIGN_KEYS_Type[] | null} - */ - const allForeignKeys = yield (0, varDatabaseDbHandler_1.default)({ - queryString: `SELECT CONSTRAINT_NAME FROM INFORMATION_SCHEMA.TABLE_CONSTRAINTS WHERE CONSTRAINT_SCHEMA = '${dbFullName}' AND TABLE_NAME='${tableName}' AND CONSTRAINT_TYPE='FOREIGN KEY'`, - }); - if (allForeignKeys) { - for (let c = 0; c < allForeignKeys.length; c++) { - const { CONSTRAINT_NAME } = allForeignKeys[c]; - /** - * @description Skip if Key is the PRIMARY Key - */ - if (CONSTRAINT_NAME.match(/PRIMARY/)) - continue; - /** - * @description Drop all foreign Keys to avoid MYSQL errors when adding/updating - * Foreign keys - */ - const dropForeignKey = yield (0, varDatabaseDbHandler_1.default)({ - queryString: `ALTER TABLE \`${dbFullName}\`.\`${tableName}\` DROP FOREIGN KEY \`${CONSTRAINT_NAME}\``, - }); - } - } - /** - * Handle DATASQUIREL schema fields for current table - * =================================================== - * @description Iterate through each field object and - * perform operations - */ - for (let i = 0; i < upToDateTableFieldsArray.length; i++) { - const column = upToDateTableFieldsArray[i]; - const prevColumn = upToDateTableFieldsArray[i - 1]; - const nextColumn = upToDateTableFieldsArray[i + 1]; - const { fieldName, dataType, nullValue, primaryKey, autoIncrement, defaultValue, defaultValueLiteral, foreignKey, updatedField, } = column; - //////////////////////////////////////// - /** - * @description Skip default fields - */ - if (fieldName === null || fieldName === void 0 ? void 0 : fieldName.match(/^id$|^date_/)) - continue; - /** - * @description Skip columns that have been updated recently - */ - // if (updatedColumnsArray.includes(fieldName)) continue; - //////////////////////////////////////// - let updateText = ""; - //////////////////////////////////////// - /** @type {any} */ - let existingColumnIndex; - /** - * @description Existing MYSQL field object - */ - let existingColumn = allExistingColumns && allExistingColumns[0] - ? allExistingColumns.filter((_column, _index) => { - if (_column.Field === fieldName) { - existingColumnIndex = _index; - return true; - } - }) - : null; - /** - * @description Construct SQL text snippet for this field - */ - let { fieldEntryText } = (0, generateColumnDescription_1.default)({ - columnData: column, + await varDatabaseDbHandler({ + queryString: `ALTER TABLE \`${dbFullName}\`.\`${tableName}\` DROP INDEX \`${Key_name}\``, }); + } + } + /** + * Handle DATASQUIREL Table Indexes + * =================================================== + * @description Iterate through each datasquirel schema + * table index(if available), and perform operations + */ + if (tableIndexes && tableIndexes[0]) { + handleIndexescreateDbFromSchema({ + dbFullName, + indexes: tableIndexes, + tableName, + }); + } + /** + * Handle MYSQL Foreign Keys + * =================================================== + * @description Iterate through each datasquirel schema + * table index(if available), and perform operations + */ + const allForeignKeys = await varDatabaseDbHandler({ + queryString: `SELECT CONSTRAINT_NAME FROM INFORMATION_SCHEMA.TABLE_CONSTRAINTS WHERE CONSTRAINT_SCHEMA = '${dbFullName}' AND TABLE_NAME='${tableName}' AND CONSTRAINT_TYPE='FOREIGN KEY'`, + }); + if (allForeignKeys) { + for (let c = 0; c < allForeignKeys.length; c++) { + const { CONSTRAINT_NAME } = allForeignKeys[c]; /** - * @description Modify Column(Field) if it already exists - * in MYSQL database + * @description Skip if Key is the PRIMARY Key */ - if (existingColumn && ((_b = existingColumn[0]) === null || _b === void 0 ? void 0 : _b.Field)) { - const { Field, Type, Null, Key, Default, Extra } = existingColumn[0]; - let isColumnReordered = i < existingColumnIndex; - if (Field === fieldName && - !isColumnReordered && - (dataType === null || dataType === void 0 ? void 0 : dataType.toUpperCase()) === Type.toUpperCase()) { - updateText += `MODIFY COLUMN ${fieldEntryText}`; - // continue; - } - else { + if (CONSTRAINT_NAME.match(/PRIMARY/)) + continue; + /** + * @description Drop all foreign Keys to avoid MYSQL errors when adding/updating + * Foreign keys + */ + const dropForeignKey = await varDatabaseDbHandler({ + queryString: `ALTER TABLE \`${dbFullName}\`.\`${tableName}\` DROP FOREIGN KEY \`${CONSTRAINT_NAME}\``, + }); + } + } + /** + * Handle MYSQL Unique Fields + * =================================================== + * @description Find all existing unique field constraints + * and remove them + */ + const allUniqueConstraints = await varDatabaseDbHandler({ + queryString: normalizeText(`SELECT CONSTRAINT_NAME FROM INFORMATION_SCHEMA.TABLE_CONSTRAINTS \ + WHERE CONSTRAINT_SCHEMA = '${dbFullName}' AND TABLE_NAME='${tableName}' AND \ + CONSTRAINT_TYPE='UNIQUE'`), + }); + if (allUniqueConstraints) { + for (let c = 0; c < allUniqueConstraints.length; c++) { + const { CONSTRAINT_NAME } = allUniqueConstraints[c]; + const dropUniqueConstraint = await varDatabaseDbHandler({ + queryString: `ALTER TABLE \`${dbFullName}\`.\`${tableName}\` DROP INDEX \`${CONSTRAINT_NAME}\``, + }); + } + } + /** + * Handle MYSQL Columns (Fields) + * =================================================== + * @description Now handle all fields/columns + */ + let allExistingColumns = await varDatabaseDbHandler({ + queryString: `SHOW COLUMNS FROM \`${dbFullName}\`.\`${tableName}\``, + }); + /** + * @type {string[]} + * @description Updated column names Array + */ + const updatedColumnsArray = []; + /** + * @description Iterate through every existing column + */ + for (let e = 0; e < allExistingColumns.length; e++) { + const { Field } = allExistingColumns[e]; + if (Field.match(defaultFieldsRegexp)) + continue; + /** + * @description This finds out whether the fieldName corresponds with the MSQL Field name + * if the fildName doesn't match any MYSQL Field name, the field is deleted. + */ + let existingEntry = upToDateTableFieldsArray.find((column) => column.fieldName === Field || column.originName === Field); + if (existingEntry) { + /** + * @description Check if Field name has been updated + */ + if (existingEntry.updatedField && existingEntry.fieldName) { + updatedColumnsArray.push(existingEntry.fieldName); + const renameColumn = await varDatabaseDbHandler({ + queryString: `ALTER TABLE \`${dbFullName}\`.\`${tableName}\` RENAME COLUMN \`${existingEntry.originName}\` TO \`${existingEntry.fieldName}\``, + }); + console.log(`Column Renamed from "${existingEntry.originName}" to "${existingEntry.fieldName}"`); + /** + * Update Db Schema + * =================================================== + * @description Update Db Schema after renaming column + */ + try { + const updatedSchemaData = _.cloneDeep(dbSchema); + const targetTableIndex = updatedSchemaData.tables.findIndex((table) => table.tableName === tableName); + const targetFieldIndex = updatedSchemaData.tables[targetTableIndex].fields.findIndex((field) => field.fieldName === existingEntry.fieldName); + delete updatedSchemaData.tables[targetTableIndex] + .fields[targetFieldIndex]["originName"]; + delete updatedSchemaData.tables[targetTableIndex] + .fields[targetFieldIndex]["updatedField"]; + /** + * @description Set New Table Fields Array + */ + upToDateTableFieldsArray = + updatedSchemaData.tables[targetTableIndex].fields; if (userId) { - updateText += `MODIFY COLUMN ${fieldEntryText}${isColumnReordered - ? (prevColumn === null || prevColumn === void 0 ? void 0 : prevColumn.fieldName) - ? " AFTER `" + prevColumn.fieldName + "`" - : (nextColumn === null || nextColumn === void 0 ? void 0 : nextColumn.fieldName) - ? " BEFORE `" + nextColumn.fieldName + "`" - : "" - : ""}`; - } - else { - updateText += `MODIFY COLUMN ${fieldEntryText}`; + writeUpdatedDbSchema({ + dbSchema: updatedSchemaData, + userId, + }); } + allExistingColumns = await varDatabaseDbHandler({ + queryString: `SHOW COLUMNS FROM \`${dbFullName}\`.\`${tableName}\``, + }); } - } - else if (prevColumn && prevColumn.fieldName) { - /** - * @description Add new Column AFTER previous column, if - * previous column exists - */ - updateText += `ADD COLUMN ${fieldEntryText} AFTER \`${prevColumn.fieldName}\``; - } - else if (nextColumn && nextColumn.fieldName) { - /** - * @description Add new Column BEFORE next column, if - * next column exists - */ - updateText += `ADD COLUMN ${fieldEntryText} BEFORE \`${nextColumn.fieldName}\``; - } - else { - /** - * @description Append new column to the end of existing columns - */ - updateText += `ADD COLUMN ${fieldEntryText}`; + catch (error) { + console.log("Update table error =>", error.message); + } + //////////////////////////////////////// } //////////////////////////////////////// - /** - * @description Pust SQL code snippet to updateTableQueryArray Array - * Add a comma(,) to separate from the next snippet - */ - updateTableQueryArray.push(updateText + ","); - /** - * @description Handle foreing keys if available, and if there is no - * "clone" boolean = true - */ - if (!clone && foreignKey) { - const { destinationTableName, destinationTableColumnName, cascadeDelete, cascadeUpdate, foreignKeyName, } = foreignKey; - const foreinKeyText = `ADD CONSTRAINT \`${foreignKeyName}\` FOREIGN KEY (\`${fieldName}\`) REFERENCES \`${destinationTableName}\`(\`${destinationTableColumnName}\`)${cascadeDelete ? " ON DELETE CASCADE" : ""}${cascadeUpdate ? " ON UPDATE CASCADE" : ""}`; - // const foreinKeyText = `ADD CONSTRAINT \`${foreignKeyName}\` FOREIGN KEY (${fieldName}) REFERENCES ${destinationTableName}(${destinationTableColumnName})${cascadeDelete ? " ON DELETE CASCADE" : ""}${cascadeUpdate ? " ON UPDATE CASCADE" : ""}` + ","; - const finalQueryString = `ALTER TABLE \`${dbFullName}\`.\`${tableName}\` ${foreinKeyText}`; - const addForeignKey = yield (0, varDatabaseDbHandler_1.default)({ - queryString: finalQueryString, - }); - if (!(addForeignKey === null || addForeignKey === void 0 ? void 0 : addForeignKey.serverStatus)) { - errorLogs.push(addForeignKey); - } - } + continue; //////////////////////////////////////// } - /** - * @description Construct final SQL query by combning all SQL snippets in - * updateTableQueryArray Arry, and trimming the final comma(,) - */ - const updateTableQuery = updateTableQueryArray - .join(" ") - .replace(/,$/, ""); - //////////////////////////////////////// - /** - * @description Check if SQL snippets array has more than 1 entries - * This is because 1 entry means "ALTER TABLE table_name" only, without any - * Alter directives like "ADD COLUMN" or "MODIFY COLUMN" - */ - if (updateTableQueryArray.length > 1) { - const updateTable = yield (0, varDatabaseDbHandler_1.default)({ - queryString: updateTableQuery, + else { + await varDatabaseDbHandler({ + queryString: `ALTER TABLE \`${dbFullName}\`.\`${tableName}\` DROP COLUMN \`${Field}\``, }); - return updateTable; + } + } + /** + * Handle DATASQUIREL schema fields for current table + * =================================================== + * @description Iterate through each field object and + * perform operations + */ + for (let i = 0; i < upToDateTableFieldsArray.length; i++) { + const column = upToDateTableFieldsArray[i]; + // const prevColumn = upToDateTableFieldsArray[i - 1]; + // const nextColumn = upToDateTableFieldsArray[i + 1]; + const { fieldName, dataType, foreignKey } = column; + if (!fieldName) + continue; + if (defaultFieldsRegexp.test(fieldName)) + continue; + let updateText = ""; + const existingColumnIndex = allExistingColumns === null || allExistingColumns === void 0 ? void 0 : allExistingColumns.findIndex((_column, _index) => _column.Field === fieldName); + const existingColumn = existingColumnIndex >= 0 + ? allExistingColumns[existingColumnIndex] + : undefined; + let { fieldEntryText } = generateColumnDescription({ + columnData: column, + }); + /** + * @description Modify Column(Field) if it already exists + * in MYSQL database + */ + if (existingColumn === null || existingColumn === void 0 ? void 0 : existingColumn.Field) { + const { Field, Type } = existingColumn; + updateText += `MODIFY COLUMN ${fieldEntryText}`; + // if ( + // Field === fieldName && + // dataType?.toUpperCase() === Type.toUpperCase() + // ) { + // } else { + // updateText += `MODIFY COLUMN ${fieldEntryText}`; + // } } else { /** - * @description If only 1 SQL snippet is left in updateTableQueryArray, this - * means that no updates have been made to the table + * @description Append new column to the end of existing columns */ - return "No Changes Made to Table"; + updateText += `ADD COLUMN ${fieldEntryText}`; + } + /** + * @description Pust SQL code snippet to updateTableQueryArray Array + * Add a comma(,) to separate from the next snippet + */ + if (updateText.match(/./)) { + updateTableQueryArray.push(updateText + ","); } } - catch ( /** @type {any} */error) { - console.log('Error in "updateTable" shell function =>', error.message); - return "Error in Updating Table"; + /** + * @description Construct final SQL query by combning all SQL snippets in + * updateTableQueryArray Arry, and trimming the final comma(,) + */ + const updateTableQuery = updateTableQueryArray + .filter((q) => Boolean(q.match(/./))) + .join(" ") + .replace(/,$/, ""); + //////////////////////////////////////// + /** + * @description Check if SQL snippets array has more than 1 entries + * This is because 1 entry means "ALTER TABLE table_name" only, without any + * Alter directives like "ADD COLUMN" or "MODIFY COLUMN" + */ + if (updateTableQueryArray.length > 1) { + const updateTable = await varDatabaseDbHandler({ + queryString: updateTableQuery, + }); + /** + * # Handle Foreign Keys + */ + await dropAllForeignKeys({ dbFullName, tableName }); + for (let i = 0; i < upToDateTableFieldsArray.length; i++) { + const { fieldName, foreignKey } = upToDateTableFieldsArray[i]; + if (!clone && foreignKey && fieldName) { + await handleTableForeignKey({ + dbFullName, + errorLogs, + foreignKey, + fieldName, + tableName, + }); + } + } } - }); + else { + /** + * @description If only 1 SQL snippet is left in updateTableQueryArray, this + * means that no updates have been made to the table + */ + } + return tableID; + } + catch (error) { + console.log('Error in "updateTable" shell function =>', error.message); + return tableID; + } } diff --git a/dist/package-shared/shell/utils/varDatabaseDbHandler.js b/dist/package-shared/shell/utils/varDatabaseDbHandler.js index 1a096a9..0625eb8 100644 --- a/dist/package-shared/shell/utils/varDatabaseDbHandler.js +++ b/dist/package-shared/shell/utils/varDatabaseDbHandler.js @@ -1,62 +1,45 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = varDatabaseDbHandler; -const dbHandler_1 = __importDefault(require("./dbHandler")); +import dbHandler from "../../functions/backend/dbHandler"; /** * # DB handler for specific database */ -function varDatabaseDbHandler(_a) { - return __awaiter(this, arguments, void 0, function* ({ queryString, queryValuesArray, }) { - /** - * Declare variables - * - * @description Declare "results" variable - */ - let results; - /** - * Fetch from db - * - * @description Fetch data from db if no cache - */ - try { - if (queryString && - queryValuesArray && - Array.isArray(queryValuesArray) && - queryValuesArray[0]) { - results = yield (0, dbHandler_1.default)({ - query: queryString, - values: queryValuesArray, - }); - } - else { - results = yield (0, dbHandler_1.default)({ - query: queryString, - }); - } - //////////////////////////////////////// - //////////////////////////////////////// - //////////////////////////////////////// +export default async function varDatabaseDbHandler({ queryString, queryValuesArray, }) { + /** + * Declare variables + * + * @description Declare "results" variable + */ + let results; + /** + * Fetch from db + * + * @description Fetch data from db if no cache + */ + try { + if (queryString && + queryValuesArray && + Array.isArray(queryValuesArray) && + queryValuesArray[0]) { + results = await dbHandler({ + query: queryString, + values: queryValuesArray, + }); } - catch ( /** @type {any} */error) { - console.log("Shell Vardb Error =>", error.message); + else { + results = await dbHandler({ + query: queryString, + }); } - /** - * Return results - * - * @description Return results add to cache if "req" param is passed - */ - return results; - }); + //////////////////////////////////////// + //////////////////////////////////////// + //////////////////////////////////////// + } + catch ( /** @type {any} */error) { + console.log("Shell Vardb Error =>", error.message); + } + /** + * Return results + * + * @description Return results add to cache if "req" param is passed + */ + return results; } diff --git a/dist/package-shared/types/dsql.d.ts b/dist/package-shared/types/dsql.d.ts index 5774dd1..fac9845 100644 --- a/dist/package-shared/types/dsql.d.ts +++ b/dist/package-shared/types/dsql.d.ts @@ -1,4 +1,4 @@ -export declare const DsqlTables: readonly ["users", "mariadb_users", "api_keys", "invitations", "user_users", "delegated_user_tables", "user_databases", "user_database_tables", "user_media", "delegated_users", "unsubscribes", "notifications", "docs_pages", "docs_page_extra_links", "deleted_api_keys", "servers", "process_queue"]; +export declare const DsqlTables: readonly ["users", "mariadb_users", "mariadb_user_databases", "mariadb_user_tables", "mariadb_user_privileges", "api_keys", "api_keys_scoped_resources", "invitations", "delegated_resources", "user_databases", "user_database_tables", "user_media", "user_private_folders", "delegated_users", "unsubscribes", "notifications", "deleted_api_keys", "servers", "process_queue", "backups"]; export type DSQL_DATASQUIREL_USERS = { id?: number; uuid?: string; @@ -20,6 +20,7 @@ export type DSQL_DATASQUIREL_USERS = { mariadb_pass?: string; disk_usage_in_mb?: number; verification_status?: number; + temp_login_code?: string; date_created?: string; date_created_code?: number; date_created_timestamp?: string; @@ -35,7 +36,60 @@ export type DSQL_DATASQUIREL_MARIADB_USERS = { host?: string; password?: string; primary?: number; - grants?: string; + all_databases?: 0 | 1; + all_grants?: 0 | 1; + date_created?: string; + date_created_code?: number; + date_created_timestamp?: string; + date_updated?: string; + date_updated_code?: number; + date_updated_timestamp?: string; +}; +export type DSQL_DATASQUIREL_MARIADB_USER_DATABASES = { + id?: number; + uuid?: string; + user_id?: number; + mariadb_user_id?: number; + db_id?: number; + db_schema_id?: number; + db_slug?: string; + all_tables?: 0 | 1; + all_privileges?: 0 | 1; + date_created?: string; + date_created_code?: number; + date_created_timestamp?: string; + date_updated?: string; + date_updated_code?: number; + date_updated_timestamp?: string; +}; +export type DSQL_DATASQUIREL_MARIADB_USER_TABLES = { + id?: number; + uuid?: string; + user_id?: number; + mariadb_user_id?: number; + db_id?: number; + db_schema_id?: number; + db_slug?: string; + table_schema_id?: number; + table_slug?: string; + all_fields?: 0 | 1; + all_privileges?: 0 | 1; + date_created?: string; + date_created_code?: number; + date_created_timestamp?: string; + date_updated?: string; + date_updated_code?: number; + date_updated_timestamp?: string; +}; +export type DSQL_DATASQUIREL_MARIADB_USER_PRIVILEGES = { + id?: number; + uuid?: string; + user_id?: number; + mariadb_user_id?: number; + db_id?: number; + db_schema_id?: number; + db_slug?: string; + privilege?: "ALTER" | "ALTER ROUTINE" | "CREATE" | "CREATE ROUTINE" | "CREATE TEMPORARY TABLES" | "CREATE VIEW" | "DELETE" | "DROP" | "EVENT" | "EXECUTE" | "FILE" | "INDEX" | "INSERT" | "LOCK TABLES" | "PROCESS" | "REFERENCES" | "RELOAD" | "SELECT" | "SHOW VIEW" | "SUPER" | "TRIGGER" | "UPDATE" | "USAGE"; date_created?: string; date_created_code?: number; date_created_timestamp?: string; @@ -50,7 +104,9 @@ export type DSQL_DATASQUIREL_API_KEYS = { name?: string; slug?: string; key?: string; - scope?: string; + scope?: "readOnly" | "fullAccess"; + all_dbs?: 0 | 1; + media_only?: 0 | 1; csrf?: string; date_created?: string; date_created_code?: number; @@ -59,49 +115,34 @@ export type DSQL_DATASQUIREL_API_KEYS = { date_updated_code?: number; date_updated_timestamp?: string; }; +export type DSQL_DATASQUIREL_API_KEYS_SCOPED_RESOURCES = { + id?: number; + uuid?: string; + user_id?: number; + api_key_id?: number; + db_id?: number; + db_schema_id?: number; + db_slug?: string; + table_schema_id?: number; + table_slug?: string; + all_tables?: 0 | 1; + date_created?: string; + date_created_code?: number; + date_created_timestamp?: string; + date_updated?: string; + date_updated_code?: number; + date_updated_timestamp?: string; +}; export type DSQL_DATASQUIREL_INVITATIONS = { - id?: number; - uuid?: string; - inviting_user_id?: number; - invited_user_email?: string; - invitation_status?: string; - database_access?: string; - priviledge?: string; - db_tables_data?: string; - date_created?: string; - date_created_code?: number; - date_created_timestamp?: string; - date_updated?: string; - date_updated_code?: number; - date_updated_timestamp?: string; -}; -export type DSQL_DATASQUIREL_USER_USERS = { id?: number; uuid?: string; user_id?: number; invited_user_id?: number; - database?: string; + invited_user_email?: string; + invitation_status?: "pending" | "accepted" | "rejected" | "cancelled"; database_access?: string; - first_name?: string; - last_name?: string; - email?: string; - username?: string; - password?: string; - phone?: string; - user_type?: string; - user_priviledge?: string; - image?: string; - image_thumbnail?: string; - city?: string; - state?: string; - country?: string; - zip_code?: string; - address?: string; - social_login?: number; - social_platform?: string; - social_id?: string; - verification_status?: number; - more_user_data?: string; + db_tables_data?: string; + email_sent?: 0 | 1; date_created?: string; date_created_code?: number; date_created_timestamp?: string; @@ -109,14 +150,19 @@ export type DSQL_DATASQUIREL_USER_USERS = { date_updated_code?: number; date_updated_timestamp?: string; }; -export type DSQL_DATASQUIREL_DELEGATED_USER_TABLES = { +export type DSQL_DATASQUIREL_DELEGATED_RESOURCES = { id?: number; uuid?: string; + delegated_users_id?: number; + user_id?: number; delegated_user_id?: number; - root_user_id?: number; - database?: string; - table?: string; - priviledge?: string; + db_id?: number; + db_schema_id?: number; + db_slug?: string; + table_schema_id?: number; + table_slug?: string; + permission?: "read" | "write" | "edit" | "delete"; + all_tables?: 0 | 1; date_created?: string; date_created_code?: number; date_created_timestamp?: string; @@ -128,6 +174,7 @@ export type DSQL_DATASQUIREL_USER_DATABASES = { id?: number; uuid?: string; user_id?: number; + db_schema_id?: number; db_name?: string; db_slug?: string; db_full_name?: string; @@ -138,9 +185,11 @@ export type DSQL_DATASQUIREL_USER_DATABASES = { remote_db_full_name?: string; remote_connection_host?: string; remote_connection_key?: string; - active_clone?: number; + active_clone?: 0 | 1; active_clone_parent_db?: string; + active_clone_parent_db_id?: number; active_data?: number; + last_checked_date_code?: number; date_created?: string; date_created_code?: number; date_created_timestamp?: string; @@ -153,14 +202,16 @@ export type DSQL_DATASQUIREL_USER_DATABASE_TABLES = { uuid?: string; user_id?: number; db_id?: number; + table_schema_id?: number; db_slug?: string; table_name?: string; table_slug?: string; table_description?: string; child_table?: number; - child_table_parent_database?: string; - child_table_parent_table?: string; - active_data?: number; + child_table_parent_database_schema_id?: number; + child_table_parent_table_schema_id?: number; + active_data?: 0 | 1; + last_checked_date_code?: number; date_created?: string; date_created_code?: number; date_created_timestamp?: string; @@ -176,13 +227,29 @@ export type DSQL_DATASQUIREL_USER_MEDIA = { folder?: string; media_url?: string; media_thumbnail_url?: string; - media_path?: string; - media_thumbnail_path?: string; - media_type?: string; + media_base64?: string; + media_thumbnail_base64?: string; + media_type?: "file" | "image" | "video"; + media_stats?: string; + mime_type?: string; width?: number; height?: number; size?: number; - private?: number; + private?: 0 | 1; + private_folder?: 0 | 1; + date_created?: string; + date_created_code?: number; + date_created_timestamp?: string; + date_updated?: string; + date_updated_code?: number; + date_updated_timestamp?: string; +}; +export type DSQL_DATASQUIREL_USER_PRIVATE_FOLDERS = { + id?: number; + uuid?: string; + user_id?: number; + folder_path?: string; + child_folder?: 0 | 1; date_created?: string; date_created_code?: number; date_created_timestamp?: string; @@ -195,8 +262,6 @@ export type DSQL_DATASQUIREL_DELEGATED_USERS = { uuid?: string; user_id?: number; delegated_user_id?: number; - permissions?: string; - permission_level_code?: number; date_created?: string; date_created_code?: number; date_created_timestamp?: string; @@ -230,38 +295,6 @@ export type DSQL_DATASQUIREL_NOTIFICATIONS = { date_updated_code?: number; date_updated_timestamp?: string; }; -export type DSQL_DATASQUIREL_DOCS_PAGES = { - id?: number; - uuid?: string; - title?: string; - slug?: string; - description?: string; - content?: string; - text_content?: string; - level?: number; - page_order?: number; - parent_id?: number; - date_created?: string; - date_created_code?: number; - date_created_timestamp?: string; - date_updated?: string; - date_updated_code?: number; - date_updated_timestamp?: string; -}; -export type DSQL_DATASQUIREL_DOCS_PAGE_EXTRA_LINKS = { - id?: number; - uuid?: string; - docs_page_id?: number; - title?: string; - description?: string; - url?: string; - date_created?: string; - date_created_code?: number; - date_created_timestamp?: string; - date_updated?: string; - date_updated_code?: number; - date_updated_timestamp?: string; -}; export type DSQL_DATASQUIREL_DELETED_API_KEYS = { id?: number; uuid?: string; @@ -307,3 +340,14 @@ export type DSQL_DATASQUIREL_PROCESS_QUEUE = { date_updated_code?: number; date_updated_timestamp?: string; }; +export type DSQL_DATASQUIREL_BACKUPS = { + id?: number; + uuid?: string; + user_id?: number; + date_created?: string; + date_created_code?: number; + date_created_timestamp?: string; + date_updated?: string; + date_updated_code?: number; + date_updated_timestamp?: string; +}; diff --git a/dist/package-shared/types/dsql.js b/dist/package-shared/types/dsql.js index c304dfd..744cebf 100644 --- a/dist/package-shared/types/dsql.js +++ b/dist/package-shared/types/dsql.js @@ -1,22 +1,22 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.DsqlTables = void 0; -exports.DsqlTables = [ +export const DsqlTables = [ "users", "mariadb_users", + "mariadb_user_databases", + "mariadb_user_tables", + "mariadb_user_privileges", "api_keys", + "api_keys_scoped_resources", "invitations", - "user_users", - "delegated_user_tables", + "delegated_resources", "user_databases", "user_database_tables", "user_media", + "user_private_folders", "delegated_users", "unsubscribes", "notifications", - "docs_pages", - "docs_page_extra_links", "deleted_api_keys", "servers", "process_queue", + "backups", ]; diff --git a/dist/package-shared/types/index.d.ts b/dist/package-shared/types/index.d.ts index 9cbbb2c..a8b4527 100644 --- a/dist/package-shared/types/index.d.ts +++ b/dist/package-shared/types/index.d.ts @@ -1,10 +1,33 @@ import type { RequestOptions } from "https"; -import { DSQL_DATASQUIREL_PROCESS_QUEUE, DSQL_DATASQUIREL_USER_DATABASE_TABLES, DSQL_DATASQUIREL_USER_DATABASES, DSQL_DATASQUIREL_USER_MEDIA } from "./dsql"; +import { DSQL_DATASQUIREL_API_KEYS, DSQL_DATASQUIREL_API_KEYS_SCOPED_RESOURCES, DSQL_DATASQUIREL_BACKUPS, DSQL_DATASQUIREL_DELEGATED_RESOURCES, DSQL_DATASQUIREL_DELEGATED_USERS, DSQL_DATASQUIREL_INVITATIONS, DSQL_DATASQUIREL_MARIADB_USERS, DSQL_DATASQUIREL_PROCESS_QUEUE, DSQL_DATASQUIREL_USER_DATABASE_TABLES, DSQL_DATASQUIREL_USER_DATABASES, DSQL_DATASQUIREL_USER_MEDIA, DSQL_DATASQUIREL_USER_PRIVATE_FOLDERS, DSQL_DATASQUIREL_USERS } from "./dsql"; import { Editor } from "tinymce"; import sharp from "sharp"; +import DataTypes from "../data/data-types"; +import { IncomingMessage, ServerResponse } from "http"; +import { CookieNames } from "../dict/cookie-names"; export type DSQL_DatabaseFullName = string; +export type DSQL_DATASQUIREL_USER_BACKUPS_JOIN = DSQL_DATASQUIREL_BACKUPS & { + [k in (typeof UserSelectFields)[number]["alias"]]?: string; +}; +export type DSQL_DATASQUIREL_DELEGATED_DATABASES_JOIN = DSQL_DATASQUIREL_DELEGATED_RESOURCES & DSQL_DATASQUIREL_USER_DATABASES & { + [k in (typeof UserSelectFields)[number]["alias"]]?: string; +} & { + [k in (typeof DelegatedUserSelectFields)[number]["alias"]]?: string; +}; +export type DSQL_DATASQUIREL_DELEGATED_USERS_JOIN = DSQL_DATASQUIREL_DELEGATED_USERS & { + [k in (typeof UserSelectFields)[number]["alias"]]?: string; +} & { + [k in (typeof DelegatedUserSelectFields)[number]["alias"]]?: string; +}; +export declare const UsersOmitedFields: readonly ["password", "social_id", "verification_status", "date_created", "date_created_code", "date_created_timestamp", "date_updated", "date_updated_code", "date_updated_timestamp"]; +export type DSQL_DATASQUIREL_USERS_FILTERED = Omit; +export type DSQL_DATASQUIREL_INVITATIONS_JOIN = DSQL_DATASQUIREL_INVITATIONS & { + [k in (typeof UserSelectFields)[number]["alias"]]?: string; +} & { + [k in (typeof InvitedUserSelectFields)[number]["alias"]]?: string; +}; export interface DSQL_DatabaseSchemaType { - id?: number | string; + id?: string | number; dbName?: string; dbSlug?: string; dbFullName?: string; @@ -13,31 +36,28 @@ export interface DSQL_DatabaseSchemaType { tables: DSQL_TableSchemaType[]; childrenDatabases?: DSQL_ChildrenDatabaseObject[]; childDatabase?: boolean; - childDatabaseDbFullName?: string; + childDatabaseDbId?: string | number; updateData?: boolean; } export interface DSQL_ChildrenDatabaseObject { dbId?: string | number; - dbFullName?: string; } export interface DSQL_TableSchemaType { - id?: number | string; + id?: string | number; tableName: string; - tableFullName: string; tableDescription?: string; fields: DSQL_FieldSchemaType[]; indexes?: DSQL_IndexSchemaType[]; childrenTables?: DSQL_ChildrenTablesType[]; childTable?: boolean; updateData?: boolean; - childTableName?: string; - childTableDbFullName?: string; + childTableId?: string | number; tableNameOld?: string; + childTableDbId?: string | number; } export interface DSQL_ChildrenTablesType { - dbNameFull: string; - tableName: string; - tableNameFull?: string; + tableId?: string | number; + dbId?: string | number; } export declare const TextFieldTypesArray: readonly [{ readonly title: "Plain Text"; @@ -63,8 +83,12 @@ export declare const TextFieldTypesArray: readonly [{ }, { readonly title: "Shell"; readonly value: "shell"; +}, { + readonly title: "Code"; + readonly value: "code"; }]; export type DSQL_FieldSchemaType = { + id?: number | string; fieldName?: string; originName?: string; updatedField?: boolean; @@ -90,6 +114,9 @@ export type DSQL_FieldSchemaType = { cssFiles?: string[]; integerLength?: string | number; decimals?: string | number; + moving?: boolean; + code?: boolean; + options?: string[]; } & { [key in (typeof TextFieldTypesArray)[number]["value"]]?: boolean; }; @@ -102,8 +129,9 @@ export interface DSQL_ForeignKeyType { cascadeUpdate?: boolean; } export interface DSQL_IndexSchemaType { + id?: string | number; indexName?: string; - indexType?: string; + indexType?: (typeof IndexTypes)[number]; indexTableFields?: DSQL_IndexTableFieldType[]; alias?: string; newTempIndex?: boolean; @@ -341,7 +369,10 @@ export interface PostInsertReturn { protocol41: boolean; changedRows: number; } -export type UserType = DATASQUIREL_LoggedInUser & {}; +export type UserType = DATASQUIREL_LoggedInUser & { + isSuperUser?: boolean; + staticHost?: string; +}; export interface ApiKeyDef { name: string; scope: string; @@ -438,9 +469,9 @@ export interface MediaFolderPageContextType { export interface TablesContextType { user: UserType; database: DSQL_MYSQL_user_databases_Type; - tables: MYSQL_user_database_tables_table_def[]; - targetTable: MYSQL_user_database_tables_table_def | null; - setTargetTable: React.Dispatch>; + tables: DSQL_DATASQUIREL_USER_DATABASE_TABLES[]; + targetTable: DSQL_DATASQUIREL_USER_DATABASE_TABLES | null; + setTargetTable: React.Dispatch>; query: any; confirmedDelegetedUser: any; } @@ -462,9 +493,9 @@ export interface EditTableContextType { export interface SingleDatabaseContextType { user: UserType; database: DSQL_MYSQL_user_databases_Type; - tables: MYSQL_user_database_tables_table_def[]; - targetTable: MYSQL_user_database_tables_table_def | null; - setTargetTable: React.Dispatch>; + tables: DSQL_DATASQUIREL_USER_DATABASE_TABLES[]; + targetTable: DSQL_DATASQUIREL_USER_DATABASE_TABLES | null; + setTargetTable: React.Dispatch>; query: any; confirmedDelegetedUser: any; } @@ -502,30 +533,12 @@ export interface DocsAsidePageObject { parent_id?: number; level?: number; } -export interface AllUserUsersContextType { - user?: UserType; - users?: MYSQL_delegated_users_table_def[]; - targetUser?: MYSQL_user_users_table_def | undefined; - setTargetUser?: React.Dispatch>; - databases?: DSQL_MYSQL_user_databases_Type[]; - pendingInvitations?: MYSQL_invitations_table_def[]; - pendingInvitationsReceived?: any[]; - adminUsers?: any[]; - invitedAccounts?: any[]; -} export interface AddSocialLoginContextType { user?: UserType; database?: DSQL_MYSQL_user_databases_Type; query?: any; socialLogins?: SocialLoginObjectType[]; } -export interface DelegatedDbContextType { - user: UserType; - users: MYSQL_user_users_table_def[]; - targetUser: MYSQL_user_users_table_def | undefined; - setTargetUser: React.Dispatch>; - database: DSQL_MYSQL_user_databases_Type; -} export interface AddUserUserContextType { user: UserType; database: DSQL_MYSQL_user_databases_Type; @@ -533,49 +546,25 @@ export interface AddUserUserContextType { query: any; confirmedDelegetedUser: any; } -export interface UserUsersContextType { - user: UserType; - users: MYSQL_user_users_table_def[]; - targetUser?: MYSQL_user_users_table_def; - setTargetUser: React.Dispatch>; - database: DSQL_MYSQL_user_databases_Type; - table: DSQL_TableSchemaType; - dbSchemaData: DSQL_DatabaseSchemaType[]; - query: any; - confirmedDelegetedUser: any; -} -export interface DatabaseSingleUserContextType { - user: UserType; - database: DSQL_MYSQL_user_databases_Type; - singleUser: MYSQL_user_users_table_def; - table: DSQL_TableSchemaType; - dbSchemaData: DSQL_DatabaseSchemaType[]; - query: any; - confirmedDelegetedUser: any; -} -export interface SingleUserUserContextType { - user: UserType; - singleUser: MYSQL_user_users_table_def; -} export interface AddUserContextType { user: UserType; - users: MYSQL_delegated_users_table_def[]; + users: DSQL_DATASQUIREL_DELEGATED_USERS[]; databases: DSQL_MYSQL_user_databases_Type[]; query: any; } export interface MediaContextType { user: UserType; - media: MYSQL_user_media_table_def[]; - targetMedia: MYSQL_user_media_table_def | null; - setTargetMedia: React.Dispatch>; + media: DSQL_DATASQUIREL_USER_MEDIA[]; + targetMedia: DSQL_DATASQUIREL_USER_MEDIA | null; + setTargetMedia: React.Dispatch>; folders: string[]; staticHost: string; } export interface MediaSubFolderContextType { user: UserType; - media: MYSQL_user_media_table_def[]; - targetMedia: MYSQL_user_media_table_def | null; - setTargetMedia: React.Dispatch>; + media: DSQL_DATASQUIREL_USER_MEDIA[]; + targetMedia: DSQL_DATASQUIREL_USER_MEDIA | null; + setTargetMedia: React.Dispatch>; folders: string[]; query: any; folder: string; @@ -599,7 +588,7 @@ export interface SingleTableContextType { user: UserType; database: DSQL_MYSQL_user_databases_Type; table: DSQL_TableSchemaType; - tableRecord: MYSQL_user_database_tables_table_def; + tableRecord: DSQL_DATASQUIREL_USER_DATABASE_TABLES; tableFields: DSQL_FieldSchemaType[]; setTableFields: React.Dispatch>; tableIndexes: DSQL_IndexSchemaType[]; @@ -711,7 +700,7 @@ export interface DbConnectContextType { } export interface ImageObjectType { imageName?: string; - mimeType?: keyof sharp.FormatEnum | sharp.AvailableFormatInfo; + mimeType?: keyof sharp.FormatEnum; imageSize?: number; thumbnailSize?: number; private?: boolean; @@ -769,157 +758,17 @@ export interface UserFileObject2 { root?: string; content?: UserFileObject2[]; } -export interface MYSQL_user_users_table_def { - id?: number; - user_id?: number; - invited_user_id?: number; - database?: string; - database_access?: string; - first_name?: string; - last_name?: string; - email?: string; - username?: string; - password?: string; - phone?: string; - user_type?: string; - user_priviledge?: string; - image?: string; - image_thumbnail?: string; - city?: string; - state?: string; - country?: string; - zip_code?: string; - address?: string; - social_login?: number; - social_platform?: string; - social_id?: string; - verification_status?: number; - more_user_data?: string; - date_created?: string; - date_created_code?: number; - date_created_timestamp?: string; - date_updated?: string; - date_updated_code?: number; - date_updated_timestamp?: string; - inviteeFirstName?: string; - inviteeLastName?: string; - inviteeEmail?: string; - inviteeImage?: string; -} -export interface MYSQL_user_database_tables_table_def { - id?: number; - user_id?: number; - db_id?: number; - db_slug?: string; - table_name?: string; - table_slug?: string; - table_description?: string; - child_table?: number; - active_data?: 0 | 1; - child_table_parent_database?: string; - child_table_parent_table?: string; - date_created?: string; - date_created_code?: number; - date_created_timestamp?: string; - date_updated?: string; - date_updated_code?: number; - date_updated_timestamp?: string; -} -export interface MYSQL_user_media_table_def { - id?: number; - user_id?: number; - media_name?: string; - folder?: string; - media_url?: string; - media_thumbnail_url?: string; - media_path?: string; - media_thumbnail_path?: string; - media_type?: string; - width?: number; - height?: number; - size?: number; - private?: number; - date_created?: string; - date_created_code?: number; - date_created_timestamp?: string; - date_updated?: string; - date_updated_code?: number; - date_updated_timestamp?: string; -} -export interface MYSQL_delegated_users_table_def { - id?: number; - user_id?: number; - delegated_user_id?: number; - permissions?: string; - permission_level_code?: number; - date_created?: string; - date_created_code?: number; - date_created_timestamp?: string; - date_updated?: string; - date_updated_code?: number; - date_updated_timestamp?: string; -} -export interface MYSQL_invitations_table_def { - id?: number; - inviting_user_id?: number; - invited_user_email?: string; - invitation_status?: string; - database_access?: string; - priviledge?: string; - db_tables_data?: string; - date_created?: string; - date_created_code?: number; - date_created_timestamp?: string; - date_updated?: string; - date_updated_code?: number; - date_updated_timestamp?: string; -} -export interface MYSQL_docs_pages_table_def { - id?: number; - title?: string; - slug?: string; - description?: string; - content?: string; - text_content?: string; - level?: number; - page_order?: number; - parent_id?: number; - date_created?: string; - date_created_code?: number; - date_created_timestamp?: string; - date_updated?: string; - date_updated_code?: number; - date_updated_timestamp?: string; -} -export interface MYSQL_delegated_user_tables_table_def { - id?: number; - delegated_user_id?: number; - root_user_id?: number; - database?: string; - table?: string; - priviledge?: string; - date_created?: string; - date_created_code?: number; - date_created_timestamp?: string; - date_updated?: string; - date_updated_code?: number; - date_updated_timestamp?: string; -} export type ApiKeyObject = { + id: number; user_id?: string | number; full_access?: boolean; sign?: string; date_code?: number; - target_database?: string; - target_table?: string; error?: string; }; export type AddApiKeyRequestBody = { - api_key_name: string; - api_key_slug: string; - api_key_scope?: "fullAccess" | "readOnly"; - target_database?: string; - target_table?: string; + api_key: DSQL_DATASQUIREL_API_KEYS; + scoped_resources?: DSQL_DATASQUIREL_API_KEYS_SCOPED_RESOURCES[]; }; export type CheckApiCredentialsFn = (param: CheckApiCredentialsFnParam) => ApiKeyObject | null | undefined; export type CheckApiCredentialsFnParam = { @@ -931,7 +780,7 @@ export type CheckApiCredentialsFnParam = { }; export type FetchApiFn = (url: string, options?: FetchApiOptions, csrf?: boolean) => Promise; export type FetchApiOptions = RequestInit & { - method: "POST" | "GET" | "DELETE" | "PUT" | "PATCH" | "post" | "get" | "delete" | "put" | "patch"; + method: (typeof DataCrudRequestMethods)[number] | (typeof DataCrudRequestMethodsLowerCase)[number]; body?: object | string; headers?: FetchHeader; query?: { @@ -948,14 +797,14 @@ export type FetchApiReturn = { [key: string]: any; }; export declare const ServerQueryOperators: readonly ["AND", "OR"]; -export declare const ServerQueryEqualities: readonly ["EQUAL", "LIKE", "NOT EQUAL", "REGEXP", "FULLTEXT"]; +export declare const ServerQueryEqualities: readonly ["EQUAL", "LIKE", "LIKE_RAW", "NOT LIKE", "NOT LIKE_RAW", "NOT EQUAL", "REGEXP", "FULLTEXT", "IN", "NOT IN", "BETWEEN", "NOT BETWEEN", "IS NULL", "IS NOT NULL", "EXISTS", "NOT EXISTS", "GREATER THAN", "GREATER THAN OR EQUAL", "LESS THAN", "LESS THAN OR EQUAL"]; export type ServerQueryParam = { - selectFields?: string[]; - omitFields?: string[]; + selectFields?: (keyof T)[]; + omitFields?: (keyof T)[]; query?: ServerQueryQueryObject; limit?: number; page?: number; @@ -970,6 +819,7 @@ export type ServerQueryParam; existingData?: T; targetId?: string | number; - sanitize?: (data?: T) => T; + sanitize?: ({ data, batchData }: { + data?: T; + batchData?: T[]; + }) => T | T[]; debug?: boolean; }; export type DsqlCrudTransformDataFunction = ServerQueryParam & { query?: ServerQueryQueryObject; }; -export type DsqlCrudParam = { + tableName: string; + deleteKeyValues?: SQLDeleteData[]; + dbFullName?: string; + data?: any; +}; +export type SQLDeleteData = { + key: keyof T; + value: string | number | null | undefined; + operator?: (typeof ServerQueryEqualities)[number]; +}; +export type DsqlCrudParam = { action: (typeof DsqlCrudActions)[number]; - table: string; + table: K; data?: T; + batchData?: T[]; + deleteData?: T; + deleteKeyValues?: SQLDeleteData[]; targetId?: string | number; targetValue?: string | number; - targetField?: string; + targetField?: keyof T; query?: DsqlCrudQueryObject; - sanitize?: (data?: T) => T; + sanitize?: ({ data, batchData }: { + data?: T; + batchData?: T[]; + }) => T | T[]; debug?: boolean; count?: boolean; countOnly?: boolean; + dbFullName?: string; + dbName?: string; }; export type ErrorCallback = (title: string, error: Error, data?: any) => void; export interface MariaDBUser { - Host: string; - User: string; - Password: string; - Select_priv: string; - Insert_priv: string; - Update_priv: string; - Delete_priv: string; - Create_priv: string; - Drop_priv: string; - Reload_priv: string; - Shutdown_priv: string; - Process_priv: string; - File_priv: string; - Grant_priv: string; - References_priv: string; - Index_priv: string; - Alter_priv: string; - Show_db_priv: string; - Super_priv: string; - Create_tmp_table_priv: string; - Lock_tables_priv: string; - Execute_priv: string; - Repl_slave_priv: string; - Repl_client_priv: string; - Create_view_priv: string; - Show_view_priv: string; - Create_routine_priv: string; - Alter_routine_priv: string; - Create_user_priv: string; - Event_priv: string; - Trigger_priv: string; - Create_tablespace_priv: string; - Delete_history_priv: string; - ssl_type: string; - ssl_cipher: string; - x509_issuer: string; - x509_subject: string; - max_questions: number; - max_updates: number; - max_connections: number; - max_user_connections: number; - plugin: string; - authentication_string: string; - password_expired: string; - is_role: string; - default_role: string; - max_statement_time: number; + Host?: string; + User?: string; + Password?: string; + Select_priv?: string; + Insert_priv?: string; + Update_priv?: string; + Delete_priv?: string; + Create_priv?: string; + Drop_priv?: string; + Reload_priv?: string; + Shutdown_priv?: string; + Process_priv?: string; + File_priv?: string; + Grant_priv?: string; + References_priv?: string; + Index_priv?: string; + Alter_priv?: string; + Show_db_priv?: string; + Super_priv?: string; + Create_tmp_table_priv?: string; + Lock_tables_priv?: string; + Execute_priv?: string; + Repl_slave_priv?: string; + Repl_client_priv?: string; + Create_view_priv?: string; + Show_view_priv?: string; + Create_routine_priv?: string; + Alter_routine_priv?: string; + Create_user_priv?: string; + Event_priv?: string; + Trigger_priv?: string; + Create_tablespace_priv?: string; + Delete_history_priv?: string; + ssl_type?: string; + ssl_cipher?: string; + x509_issuer?: string; + x509_subject?: string; + max_questions?: number; + max_updates?: number; + max_connections?: number; + max_user_connections?: number; + plugin?: string; + authentication_string?: string; + password_expired?: string; + is_role?: string; + default_role?: string; + max_statement_time?: number; } +export declare const QueryFields: readonly ["duplicate", "user_id", "delegated_user_id", "db_id", "table_id", "db_slug"]; export type PagePropsType = { user?: UserType | null; pageUrl?: string | null; - query?: any; + envObject?: { + [k in (typeof EnvKeys)[number]]?: string; + } | null; + query?: { + [k in (typeof QueryFields)[number]]?: string; + }; databases?: DSQL_DATASQUIREL_USER_DATABASES[] | null; + delegatedDatabases?: DSQL_DATASQUIREL_DELEGATED_DATABASES_JOIN[] | null; + delegatedDatabase?: DSQL_DATASQUIREL_DELEGATED_DATABASES_JOIN | null; database?: DSQL_DATASQUIREL_USER_DATABASES | null; + invitationsSent?: DSQL_DATASQUIREL_INVITATIONS_JOIN[] | null; + invitationsReceived?: DSQL_DATASQUIREL_INVITATIONS_JOIN[] | null; + invitationSent?: DSQL_DATASQUIREL_INVITATIONS_JOIN | null; + invitationReceived?: DSQL_DATASQUIREL_INVITATIONS_JOIN | null; databaseTables?: DSQL_DATASQUIREL_USER_DATABASE_TABLES[] | null; + delegatedUsers?: DSQL_DATASQUIREL_DELEGATED_USERS_JOIN[] | null; + delegatedUser?: DSQL_DATASQUIREL_DELEGATED_USERS_JOIN | null; + usersWhoDelegatedMe?: DSQL_DATASQUIREL_DELEGATED_USERS_JOIN[] | null; + userWhoDelegatedMe?: DSQL_DATASQUIREL_DELEGATED_USERS_JOIN | null; databaseTable?: DSQL_DATASQUIREL_USER_DATABASE_TABLES | null; + singleUser?: DSQL_DATASQUIREL_USERS_FILTERED | null; dbCount?: number | null; tableCount?: number | null; mediaCount?: number | null; apiKeysCount?: number | null; databaseSchema?: DSQL_DatabaseSchemaType | null; + clonedDatabaseSchema?: DSQL_DatabaseSchemaType | null; tableSchema?: DSQL_TableSchemaType | null; userMedia?: DSQL_DATASQUIREL_USER_MEDIA[] | null; mediaCurrentFolder?: string | null; appData?: DsqlAppData | null; staticHost?: string | null; folders?: string[] | null; + activeClonedTable?: boolean | null; + tableEntries?: DefaultEntryType[] | null; + tableEntriesCount?: number | null; + tableEntry?: DefaultEntryType | null; + apiKeys?: DSQL_DATASQUIREL_API_KEYS[] | null; + mariadbUsers?: DSQL_DATASQUIREL_MARIADB_USERS[] | null; + mariadbUser?: DSQL_DATASQUIREL_MARIADB_USERS | null; + privateFolders?: DSQL_DATASQUIREL_USER_PRIVATE_FOLDERS[] | null; + privateFolder?: DSQL_DATASQUIREL_USER_PRIVATE_FOLDERS | null; + appConfig?: SiteConfig | null; + userConfig?: SiteConfig | null; + suUsers?: DSQL_DATASQUIREL_USERS[] | null; + suUsersCount?: number | null; + suUser?: DSQL_DATASQUIREL_USERS | null; + dsqlMainSchema?: DSQL_DatabaseSchemaType | null; + suSQLUsers?: MariaDBUser[] | null; + suSQLUsersCount?: number | null; + suSQLUser?: MariaDBUser | null; + backups?: DSQL_DATASQUIREL_BACKUPS[] | null; + backup?: DSQL_DATASQUIREL_BACKUPS | null; + appBackups?: DSQL_DATASQUIREL_BACKUPS[] | null; + appBackup?: DSQL_DATASQUIREL_BACKUPS | null; + userBackups?: DSQL_DATASQUIREL_USER_BACKUPS_JOIN[] | null; + userBackup?: DSQL_DATASQUIREL_USER_BACKUPS_JOIN | null; + suDatabases?: SQLShowDatabaseObject[] | null; + suDatabase?: SQLShowDatabaseObject | null; + isSuperUserPage?: boolean; + appVersion?: (typeof AppVersions)[number]; }; export type APIResponseObject = { success: boolean; payload?: T; + payloadBase64?: string; + payloadThumbnailBase64?: string; + payloadURL?: string; + payloadThumbnailURL?: string; error?: any; msg?: string; - queryRes?: any; + queryObject?: { + sql?: string; + params?: string[]; + }; status?: number; + count?: number; + errors?: DSQLErrorObject[]; + debug?: any; + batchPayload?: any[][] | null; }; export declare const UserTypes: readonly ["su", "admin"]; export declare const SignUpParadigms: readonly [{ @@ -1485,16 +1423,17 @@ export type DockerCompose = { networks: DockerComposeNetworks; name: string; }; -export declare const DockerComposeServices: readonly ["setup", "cron", "reverse-proxy", "webapp", "websocket", "static", "db", "db-load-balancer", "post-db-setup"]; +export declare const DockerComposeServices: readonly ["setup", "cron", "reverse-proxy", "webapp", "websocket", "static", "db", "maxscale", "post-db-setup", "web-app-post-db-setup", "post-replica-db-setup", "db-replica-1", "db-replica-2", "db-cron", "web-app-post-db-setup"]; export type DockerComposeServices = { [key in (typeof DockerComposeServices)[number]]: DockerComposeServiceWithBuildObject; }; export type DockerComposeNetworks = { - datasquirel: { - driver: "bridge"; - ipam: { + [k: string]: { + driver?: "bridge"; + ipam?: { config: DockerComposeNetworkConfigObject[]; }; + external?: boolean; }; }; export type DockerComposeNetworkConfigObject = { @@ -1525,7 +1464,7 @@ export type DockerComposeServicesBuildObject = { dockerfile: string; }; export type DockerComposeServiceNetworkObject = { - datasquirel: { + [k: string]: { ipv4_address: string; }; }; @@ -1539,7 +1478,9 @@ export type SiteSetup = { }; }; }; -export type AppRefObject = {}; +export type AppRefObject = { + currentMariadbUser?: DSQL_DATASQUIREL_MARIADB_USERS; +}; export type DsqlAppData = { DSQL_REMOTE_SQL_HOST?: string; DSQL_SU_USER_ID?: string; @@ -1552,9 +1493,274 @@ export type DsqlAppData = { DSQL_FACEBOOK_APP_ID?: string; DSQL_GITHUB_ID?: string; }; -export declare const MediaTypes: readonly ["image", "file"]; +export declare const MediaTypes: readonly ["image", "file", "video"]; export type MediaUploadDataType = ImageObjectType & FileObjectType & { private?: boolean; + privateFolder?: boolean; + overwrite?: boolean; }; export declare const ImageMimeTypes: (keyof sharp.FormatEnum)[]; +export declare const FileMimeTypes: readonly ["pdf", "csv", "json", "sql", "xlsx", "txt", "zip", "xz", "yaml", "yml"]; +export declare const VideoMimeTypes: readonly ["mp4", "wav"]; +export declare const CurrentlyEditedFieldActions: readonly ["edit-field", "edit-index", "delete-field", "delete-index", "new-field", "new-index", "move-up", "move-down", "complete"]; +export type CurrentlyEditedTableSchemaType = { + action: (typeof CurrentlyEditedFieldActions)[number]; + field?: DSQL_FieldSchemaType; + fieldIndex?: number; + index?: DSQL_IndexSchemaType; + indexIndex?: number; + spliceIndex?: number; + elRef?: React.RefObject; +}; +export type DataTypesType = { + title: string; + name: (typeof DataTypes)[number]["name"]; + value?: string; + argument?: true; + description?: string; + maxValue?: number; +}; +export type DefaultSQLValuesLiteralObject = { + title: string; + value: string; + description?: string; + dataType: (typeof DataTypes)[number]["name"]; +}; +export declare const DefaultSQLValuesLiteral: DefaultSQLValuesLiteralObject[]; +export type ClonedTableInfo = { + dbId?: string | number; + tableId?: string | number; + keepUpdated?: boolean; + keepDataUpdated?: boolean; +}; +export type DefaultEntryType = { + id?: number; + uuid?: string; + date_created?: string; + date_created_code?: number; + date_created_timestamp?: string; + date_updated?: string; + date_updated_code?: number; + date_updated_timestamp?: string; +} & { + [k: string]: string | number | null; +}; +export declare const IndexTypes: readonly ["regular", "full_text"]; +export type LoginUserParam = { + key?: string; + database: string; + payload: { + email?: string; + username?: string; + password?: string; + }; + additionalFields?: string[]; + request?: IncomingMessage & { + [s: string]: any; + }; + response?: ServerResponse & { + [s: string]: any; + }; + encryptionKey?: string; + encryptionSalt?: string; + email_login?: boolean; + email_login_code?: string; + temp_code_field?: string; + token?: boolean; + user_id?: string | number; + skipPassword?: boolean; + debug?: boolean; + skipWriteAuthFile?: boolean; + apiUserID?: string | number; + dbUserId?: string | number; + cleanupTokens?: boolean; + secureCookie?: boolean; +}; +export declare const UserSelectFields: readonly [{ + readonly field: "first_name"; + readonly alias: "user_first_name"; +}, { + readonly field: "last_name"; + readonly alias: "user_last_name"; +}, { + readonly field: "email"; + readonly alias: "user_email"; +}, { + readonly field: "image_thumbnail"; + readonly alias: "user_image_thumbnail"; +}]; +export declare const DelegatedUserSelectFields: readonly [{ + readonly field: "first_name"; + readonly alias: "delegated_user_first_name"; +}, { + readonly field: "last_name"; + readonly alias: "delegated_user_last_name"; +}, { + readonly field: "email"; + readonly alias: "delegated_user_email"; +}, { + readonly field: "image_thumbnail"; + readonly alias: "delegated_user_image_thumbnail"; +}]; +export declare const InvitedUserSelectFields: readonly [{ + readonly field: "first_name"; + readonly alias: "invited_user_first_name"; +}, { + readonly field: "last_name"; + readonly alias: "invited_user_last_name"; +}, { + readonly field: "email"; + readonly alias: "invited_user_email"; +}, { + readonly field: "image_thumbnail"; + readonly alias: "invited_user_image_thumbnail"; +}]; +export type DefaultLocalResourcesHookParams = { + refresh?: number; + setLoading?: React.Dispatch>; + loadingEndTimeout?: number; + user?: UserType | null; + ready?: boolean; + query?: DsqlCrudQueryObject; +}; +export type DelegatedUserPermissionObject = { + dbId?: string | number; + tableId?: string | number; + dbSlug?: string; + tableSlug?: string; + permission?: DSQL_DATASQUIREL_DELEGATED_RESOURCES["permission"]; +}; +export type QueryObject = { + [k in (typeof QueryFields)[number]]?: string; +}; +export type CookiesObject = { + [k in (typeof CookieNames)[keyof typeof CookieNames]]?: string; +}; +export interface CookieOptions { + expires?: Date; + maxAge?: number; + path?: string; + domain?: string; + secure?: boolean; + httpOnly?: boolean; +} +export type DSQLErrorObject = { + sql?: string; + sqlValues?: any[]; + error?: string; +}; +export declare const SQLPermissions: readonly ["ALL PRIVILEGES", "ALTER", "ALTER ROUTINE", "CREATE", "CREATE ROUTINE", "CREATE TEMPORARY TABLES", "CREATE VIEW", "DELETE", "DROP", "EVENT", "EXECUTE", "FILE", "INDEX", "INSERT", "LOCK TABLES", "PROCESS", "REFERENCES", "RELOAD", "REPLICATION CLIENT", "REPLICATION SLAVE", "SELECT", "SHOW VIEW", "SUPER", "TRIGGER", "UPDATE", "USAGE"]; +export declare const UserSQLPermissions: readonly ["SELECT", "ALTER", "ALTER ROUTINE", "CREATE", "CREATE ROUTINE", "CREATE TEMPORARY TABLES", "CREATE VIEW", "DELETE", "DROP", "EVENT", "EXECUTE", "FILE", "INDEX", "INSERT", "LOCK TABLES", "PROCESS", "REFERENCES", "RELOAD", "SHOW VIEW", "SUPER", "TRIGGER", "UPDATE", "USAGE"]; +export type DatabaseScopedAccessObjectAccessedDatabase = { + dbId?: string | number; + dbSlug?: string; + dbSchemaId?: string | number; +}; +export type DatabaseScopedAccessObjectTable = { + dbId?: string | number; + dbSlug: string; + dbSchemaId?: string | number; + tableSlug?: string; + tableSchemaId?: string | number; +}; +export type DatabaseScopedAccessObject = { + accessedDatabase: DatabaseScopedAccessObjectAccessedDatabase; + dbSlug: string; + grants?: UserGrantType[]; + allGrants?: boolean; + tables?: DatabaseScopedAccessObjectTable[]; + allTables?: boolean; +}; +export type UserGrantType = (typeof UserSQLPermissions)[number]; +export type SiteConfig = { + main: SiteConfigMain; + mariadb_servers?: SiteConfigMariadbServers; + maxscale?: SiteConfigMaxscale; +}; +export type SiteConfigMain = { + max_image_width?: SiteConfigMainValue; + thumbnail_size?: SiteConfigMainValue; + sharp_image_quality?: SiteConfigMainValue; + max_backups?: SiteConfigMainValue; + max_disk_usage?: SiteConfigMainValue; +}; +export type SiteConfigMainValue = { + value: number | null; + description?: string | null; +}; +export type SiteConfigMariadbServers = { + primary: SiteConfigMariadbServer; + replicas: SiteConfigMariadbServer[]; +}; +export type SiteConfigMariadbServer = { + server_id: number; + ip: string; + proxy_ip?: string; + master_ip?: string; + master_port?: number; + host?: string; + port: number; + /** + * Whether this replica belongs in the + * same docker compose stack as main + */ + is_stack_replica?: boolean; + users: { + root: SiteConfigMariadbServerUser; + replication: SiteConfigMariadbServerUser; + }; +}; +export type SiteConfigMariadbServerUser = { + user: string; + pass: string; + host?: string; +}; +export type SiteConfigMaxscale = { + read_write_port: number; + read_only_port: number; + admin_port: number; +}; +export declare const APIParadigms: readonly ["crud", "media", "sql"]; +export declare const AppVersions: readonly [{ + readonly title: "Community"; + readonly value: "community"; +}, { + readonly title: "Pro"; + readonly value: "pro"; +}, { + readonly title: "Enterprise"; + readonly value: "enterprise"; +}, { + readonly title: "Full"; + readonly value: "full"; +}]; +export declare const EnvKeys: readonly ["DSQL_HOST", "NEXT_PUBLIC_DSQL_HOST", "DSQL_STATIC_HOST", "DSQL_SOCKET_DOMAIN", "DSQL_HOST_ENV", "DSQL_PORT", "DSQL_PRODUCTION_PORT", "DSQL_STATIC_SERVER_PORT", "DSQL_SITE_URL", "DSQL_REMOTE_SQL_HOST", "NEXT_PUBLIC_DSQL_REMOTE_SQL_HOST", "DSQL_DB_TARGET_IP_ADDRESS", "NEXT_PUBLIC_VERSION", "DSQL_USER_DB_PREFIX", "DSQL_USER_DELEGATED_DB_COOKIE_PREFIX", "DSQL_NETWORK_IP_PREFIX", "DSQL_NETWORK_GATEWAY", "DSQL_NETWORK_SUBNET", "DSQL_MARIADB_MASTER_HOST", "DSQL_DB_HOST", "DSQL_WEB_APP_HOST", "DSQL_DB_USERNAME", "DSQL_DB_PASSWORD", "DSQL_MARIADB_ROOT_PASSWORD", "DSQL_REPLICATION_USER_PASSWORD", "DSQL_DB_NAME", "DSQL_MARIADB_REPLICATION_PASSWORD", "DSQL_MAXSCALE_PASSWORD", "DSQL_DB_READ_ONLY_USERNAME", "DSQL_DB_READ_ONLY_PASSWORD", "DSQL_DB_FULL_ACCESS_USERNAME", "DSQL_DB_FULL_ACCESS_PASSWORD", "DSQL_DB_EXPOSED_PORT", "DSQL_ENCRYPTION_PASSWORD", "DSQL_ENCRYPTION_SALT", "DSQL_SU_USER_ID", "DSQL_SU_USER_UUID", "DSQL_SU_EMAIL", "DSQL_GOOGLE_CLIENT_ID", "NEXT_PUBLIC_DSQL_GOOGLE_CLIENT_ID", "DSQL_FACEBOOK_APP_ID", "DSQL_FACEBOOK_SECRET", "DSQL_MAIL_HOST", "DSQL_MAIL_EMAIL", "DSQL_MAIL_PASSWORD", "DSQL_TINY_MCE_API_KEY", "DSQL_GITHUB_ID", "DSQL_GITHUB_SECRET", "DSQL_GITHUB_WEBHOOK_SECRET", "DSQL_GITHUB_WEBHOOK_URL", "DSQL_DEPLOY_SERVER_PORT", "DSQL_DOCKERFILE", "DSQL_VOLUME_APP", "DSQL_VOLUME_STATIC", "DSQL_VOLUME_STATIC_CONFIGURATION_FILE", "DSQL_VOLUME_DB", "DSQL_VOLUME_DB_CONFIG", "DSQL_VOLUME_DB_SETUP", "DSQL_VOLUME_DB_SSL", "DSQL_USER_LOGIN_KEYS_PATH", "DSQL_API_KEYS_PATH", "DSQL_APP_DIR", "DSQL_DATA_DIR", "DSQL_CONTACT_EMAIL", "DSQL_SSL_DIR", "DSQL_DEPLOYMENT_NAME", "DSQL_COOKIES_PREFIX", "DSQL_COOKIES_KEY_NAME", "DSQL_WEB_APP_FAIL_COUNTS", "NODE_ARCH", "DSQL_WEBSOCKET_PORT", "DSQL_WEBSOCKET_URL", "NEXT_PUBLIC_DSQL_WEBSOCKET_URL", "S3_ACCESS_KEY_ID", "S3_SECRET_ACCESS", "DSQL_ADDITIONAL_MARIADB_SERVERS", "DSQL_ARCJET_KEY"]; +export type SQLShowDatabaseObject = { + Database?: string; +}; +export type AddUpdateMariadbUserAPIReqBody = { + mariadbUser: DSQL_DATASQUIREL_MARIADB_USERS; + grants?: UserGrantType[]; + accessedDatabases?: DatabaseScopedAccessObject[]; + isAllGrants?: boolean; + isAllDbsAccess?: boolean; +}; +export type APIGetMediaParams = { + mediaID?: string | number; + mediaName?: string; + folder?: string; + query?: DsqlCrudQueryObject; + skipBase64?: "true" | "false"; + stream?: "stream"; + thumbnail?: "true" | "false"; +}; +export type AddMediaAPIBody = { + media: MediaUploadDataType[]; + folder?: string | null; + type: (typeof MediaTypes)[number]; +}; export {}; diff --git a/dist/package-shared/types/index.js b/dist/package-shared/types/index.js index 0f01dbd..314c9b3 100644 --- a/dist/package-shared/types/index.js +++ b/dist/package-shared/types/index.js @@ -1,7 +1,15 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.ImageMimeTypes = exports.MediaTypes = exports.DockerComposeServices = exports.DatasquirelWindowEvents = exports.WebSocketEvents = exports.QueueJobTypes = exports.SignUpParadigms = exports.UserTypes = exports.DsqlCrudActions = exports.DataCrudRequestMethods = exports.ServerQueryEqualities = exports.ServerQueryOperators = exports.TextFieldTypesArray = void 0; -exports.TextFieldTypesArray = [ +export const UsersOmitedFields = [ + "password", + "social_id", + "verification_status", + "date_created", + "date_created_code", + "date_created_timestamp", + "date_updated", + "date_updated_code", + "date_updated_timestamp", +]; +export const TextFieldTypesArray = [ { title: "Plain Text", value: "plain" }, { title: "Rich Text", value: "richText" }, { title: "JSON", value: "json" }, @@ -10,19 +18,58 @@ exports.TextFieldTypesArray = [ { title: "CSS", value: "css" }, { title: "Javascript", value: "javascript" }, { title: "Shell", value: "shell" }, + { title: "Code", value: "code" }, ]; -exports.ServerQueryOperators = ["AND", "OR"]; -exports.ServerQueryEqualities = [ +export const ServerQueryOperators = ["AND", "OR"]; +export const ServerQueryEqualities = [ "EQUAL", "LIKE", + "LIKE_RAW", + "NOT LIKE", + "NOT LIKE_RAW", "NOT EQUAL", "REGEXP", "FULLTEXT", + "IN", + "NOT IN", + "BETWEEN", + "NOT BETWEEN", + "IS NULL", + "IS NOT NULL", + "EXISTS", + "NOT EXISTS", + "GREATER THAN", + "GREATER THAN OR EQUAL", + "LESS THAN", + "LESS THAN OR EQUAL", ]; -exports.DataCrudRequestMethods = ["GET", "POST", "PUT", "DELETE"]; -exports.DsqlCrudActions = ["insert", "update", "delete", "get"]; -exports.UserTypes = ["su", "admin"]; -exports.SignUpParadigms = [ +export const DataCrudRequestMethods = [ + "GET", + "POST", + "PUT", + "PATCH", + "DELETE", + "OPTIONS", +]; +export const DataCrudRequestMethodsLowerCase = [ + "get", + "post", + "put", + "patch", + "delete", + "options", +]; +export const DsqlCrudActions = ["insert", "update", "delete", "get"]; +export const QueryFields = [ + "duplicate", + "user_id", + "delegated_user_id", + "db_id", + "table_id", + "db_slug", +]; +export const UserTypes = ["su", "admin"]; +export const SignUpParadigms = [ { name: "email", }, @@ -30,8 +77,8 @@ exports.SignUpParadigms = [ name: "google", }, ]; -exports.QueueJobTypes = ["dummy", "import-database"]; -exports.WebSocketEvents = [ +export const QueueJobTypes = ["dummy", "import-database"]; +export const WebSocketEvents = [ /** * # Client Events * @description Events sent from Client to Server @@ -54,12 +101,12 @@ exports.WebSocketEvents = [ "server:queue-deleted", "server:pty-shell", ]; -exports.DatasquirelWindowEvents = [ +export const DatasquirelWindowEvents = [ "queue-started", "queue-complete", "queue-running", ]; -exports.DockerComposeServices = [ +export const DockerComposeServices = [ "setup", "cron", "reverse-proxy", @@ -67,11 +114,17 @@ exports.DockerComposeServices = [ "websocket", "static", "db", - "db-load-balancer", + "maxscale", "post-db-setup", + "web-app-post-db-setup", + "post-replica-db-setup", + "db-replica-1", + "db-replica-2", + "db-cron", + "web-app-post-db-setup", ]; -exports.MediaTypes = ["image", "file"]; -exports.ImageMimeTypes = [ +export const MediaTypes = ["image", "file", "video"]; +export const ImageMimeTypes = [ "webp", "gif", "svg", @@ -79,3 +132,247 @@ exports.ImageMimeTypes = [ "jpeg", "jpg", ]; +export const FileMimeTypes = [ + "pdf", + "csv", + "json", + "sql", + "xlsx", + "txt", + "zip", + "xz", + "yaml", + "yml", +]; +export const VideoMimeTypes = ["mp4", "wav"]; +export const CurrentlyEditedFieldActions = [ + "edit-field", + "edit-index", + "delete-field", + "delete-index", + "new-field", + "new-index", + "move-up", + "move-down", + "complete", +]; +export const DefaultSQLValuesLiteral = [ + { + title: "CURRENT_TIMESTAMP", + value: "CURRENT_TIMESTAMP", + description: "", + dataType: "TIMESTAMP", + }, + { + title: "UUID", + value: "UUID()", + description: "", + dataType: "UUID", + }, +]; +export const IndexTypes = ["regular", "full_text"]; +export const UserSelectFields = [ + { + field: "first_name", + alias: "user_first_name", + }, + { + field: "last_name", + alias: "user_last_name", + }, + { + field: "email", + alias: "user_email", + }, + { + field: "image_thumbnail", + alias: "user_image_thumbnail", + }, +]; +export const DelegatedUserSelectFields = [ + { + field: "first_name", + alias: "delegated_user_first_name", + }, + { + field: "last_name", + alias: "delegated_user_last_name", + }, + { + field: "email", + alias: "delegated_user_email", + }, + { + field: "image_thumbnail", + alias: "delegated_user_image_thumbnail", + }, +]; +export const InvitedUserSelectFields = [ + { + field: "first_name", + alias: "invited_user_first_name", + }, + { + field: "last_name", + alias: "invited_user_last_name", + }, + { + field: "email", + alias: "invited_user_email", + }, + { + field: "image_thumbnail", + alias: "invited_user_image_thumbnail", + }, +]; +export const SQLPermissions = [ + "ALL PRIVILEGES", + "ALTER", + "ALTER ROUTINE", + "CREATE", + "CREATE ROUTINE", + "CREATE TEMPORARY TABLES", + "CREATE VIEW", + "DELETE", + "DROP", + "EVENT", + "EXECUTE", + "FILE", + "INDEX", + "INSERT", + "LOCK TABLES", + "PROCESS", + "REFERENCES", + "RELOAD", + "REPLICATION CLIENT", + "REPLICATION SLAVE", + "SELECT", + "SHOW VIEW", + "SUPER", + "TRIGGER", + "UPDATE", + "USAGE", +]; +export const UserSQLPermissions = [ + "SELECT", + "ALTER", + "ALTER ROUTINE", + "CREATE", + "CREATE ROUTINE", + "CREATE TEMPORARY TABLES", + "CREATE VIEW", + "DELETE", + "DROP", + "EVENT", + "EXECUTE", + "FILE", + "INDEX", + "INSERT", + "LOCK TABLES", + "PROCESS", + "REFERENCES", + "RELOAD", + "SHOW VIEW", + "SUPER", + "TRIGGER", + "UPDATE", + "USAGE", +]; +export const APIParadigms = ["crud", "media", "sql"]; +export const AppVersions = [ + { + title: "Community", + value: "community", + }, + { + title: "Pro", + value: "pro", + }, + { + title: "Enterprise", + value: "enterprise", + }, + { + title: "Full", + value: "full", + }, +]; +export const EnvKeys = [ + "DSQL_HOST", + "NEXT_PUBLIC_DSQL_HOST", + "DSQL_STATIC_HOST", + "DSQL_SOCKET_DOMAIN", + "DSQL_HOST_ENV", + "DSQL_PORT", + "DSQL_PRODUCTION_PORT", + "DSQL_STATIC_SERVER_PORT", + "DSQL_SITE_URL", + "DSQL_REMOTE_SQL_HOST", + "NEXT_PUBLIC_DSQL_REMOTE_SQL_HOST", + "DSQL_DB_TARGET_IP_ADDRESS", + "NEXT_PUBLIC_VERSION", + "DSQL_USER_DB_PREFIX", + "DSQL_USER_DELEGATED_DB_COOKIE_PREFIX", + "DSQL_NETWORK_IP_PREFIX", + "DSQL_NETWORK_GATEWAY", + "DSQL_NETWORK_SUBNET", + "DSQL_MARIADB_MASTER_HOST", + "DSQL_DB_HOST", + "DSQL_WEB_APP_HOST", + "DSQL_DB_USERNAME", + "DSQL_DB_PASSWORD", + "DSQL_MARIADB_ROOT_PASSWORD", + "DSQL_REPLICATION_USER_PASSWORD", + "DSQL_DB_NAME", + "DSQL_MARIADB_REPLICATION_PASSWORD", + "DSQL_MAXSCALE_PASSWORD", + "DSQL_DB_READ_ONLY_USERNAME", + "DSQL_DB_READ_ONLY_PASSWORD", + "DSQL_DB_FULL_ACCESS_USERNAME", + "DSQL_DB_FULL_ACCESS_PASSWORD", + "DSQL_DB_EXPOSED_PORT", + "DSQL_ENCRYPTION_PASSWORD", + "DSQL_ENCRYPTION_SALT", + "DSQL_SU_USER_ID", + "DSQL_SU_USER_UUID", + "DSQL_SU_EMAIL", + "DSQL_GOOGLE_CLIENT_ID", + "NEXT_PUBLIC_DSQL_GOOGLE_CLIENT_ID", + "DSQL_FACEBOOK_APP_ID", + "DSQL_FACEBOOK_SECRET", + "DSQL_MAIL_HOST", + "DSQL_MAIL_EMAIL", + "DSQL_MAIL_PASSWORD", + "DSQL_TINY_MCE_API_KEY", + "DSQL_GITHUB_ID", + "DSQL_GITHUB_SECRET", + "DSQL_GITHUB_WEBHOOK_SECRET", + "DSQL_GITHUB_WEBHOOK_URL", + "DSQL_DEPLOY_SERVER_PORT", + "DSQL_DOCKERFILE", + "DSQL_VOLUME_APP", + "DSQL_VOLUME_STATIC", + "DSQL_VOLUME_STATIC_CONFIGURATION_FILE", + "DSQL_VOLUME_DB", + "DSQL_VOLUME_DB_CONFIG", + "DSQL_VOLUME_DB_SETUP", + "DSQL_VOLUME_DB_SSL", + "DSQL_USER_LOGIN_KEYS_PATH", + "DSQL_API_KEYS_PATH", + "DSQL_APP_DIR", + "DSQL_DATA_DIR", + "DSQL_CONTACT_EMAIL", + "DSQL_SSL_DIR", + "DSQL_DEPLOYMENT_NAME", + "DSQL_COOKIES_PREFIX", + "DSQL_COOKIES_KEY_NAME", + "DSQL_WEB_APP_FAIL_COUNTS", + "NODE_ARCH", + "DSQL_WEBSOCKET_PORT", + "DSQL_WEBSOCKET_URL", + "NEXT_PUBLIC_DSQL_WEBSOCKET_URL", + "S3_ACCESS_KEY_ID", + "S3_SECRET_ACCESS", + "DSQL_ADDITIONAL_MARIADB_SERVERS", + "DSQL_ARCJET_KEY", +]; diff --git a/dist/package-shared/utils/backend/config/grab-config.d.ts b/dist/package-shared/utils/backend/config/grab-config.d.ts new file mode 100644 index 0000000..1a37dc0 --- /dev/null +++ b/dist/package-shared/utils/backend/config/grab-config.d.ts @@ -0,0 +1,10 @@ +import { SiteConfig } from "../../../types"; +type Params = { + userId?: string | number; +}; +type Return = { + appConfig: SiteConfig; + userConfig: SiteConfig | null; +}; +export default function grabConfig(params?: Params): Return; +export {}; diff --git a/dist/package-shared/utils/backend/config/grab-config.js b/dist/package-shared/utils/backend/config/grab-config.js new file mode 100644 index 0000000..f84c973 --- /dev/null +++ b/dist/package-shared/utils/backend/config/grab-config.js @@ -0,0 +1,24 @@ +import fs from "fs"; +import grabDirNames from "../names/grab-dir-names"; +import EJSON from "../../ejson"; +import envsub from "../../envsub"; +export default function grabConfig(params) { + const { appConfigJSONFile, userConfigJSONFilePath } = grabDirNames({ + userId: params === null || params === void 0 ? void 0 : params.userId, + }); + const appConfigJSON = envsub(fs.readFileSync(appConfigJSONFile, "utf-8")); + const appConfig = EJSON.parse(appConfigJSON); + if (!userConfigJSONFilePath) { + return { appConfig, userConfig: null }; + } + if (!fs.existsSync(userConfigJSONFilePath)) { + fs.writeFileSync(userConfigJSONFilePath, JSON.stringify({ + main: {}, + }), "utf-8"); + } + const userConfigJSON = envsub(fs.readFileSync(userConfigJSONFilePath, "utf-8")); + const userConfig = (EJSON.parse(userConfigJSON) || { + main: {}, + }); + return { appConfig, userConfig }; +} diff --git a/dist/package-shared/utils/backend/config/grab-main-config.d.ts b/dist/package-shared/utils/backend/config/grab-main-config.d.ts new file mode 100644 index 0000000..2c9bb37 --- /dev/null +++ b/dist/package-shared/utils/backend/config/grab-main-config.d.ts @@ -0,0 +1,10 @@ +import { SiteConfigMain } from "../../../types"; +type Params = { + userId?: string | number; +}; +type Return = { + appMainConfig: SiteConfigMain; + userMainConfig?: SiteConfigMain; +}; +export default function grabMainConfig(params?: Params): Return; +export {}; diff --git a/dist/package-shared/utils/backend/config/grab-main-config.js b/dist/package-shared/utils/backend/config/grab-main-config.js new file mode 100644 index 0000000..65321c3 --- /dev/null +++ b/dist/package-shared/utils/backend/config/grab-main-config.js @@ -0,0 +1,6 @@ +import grabConfig from "./grab-config"; +export default function grabMainConfig(params) { + const { appConfig } = grabConfig(); + const { userConfig } = grabConfig({ userId: params === null || params === void 0 ? void 0 : params.userId }); + return { appMainConfig: appConfig.main, userMainConfig: userConfig === null || userConfig === void 0 ? void 0 : userConfig.main }; +} diff --git a/dist/package-shared/utils/backend/config/update-user-config.d.ts b/dist/package-shared/utils/backend/config/update-user-config.d.ts new file mode 100644 index 0000000..bf68909 --- /dev/null +++ b/dist/package-shared/utils/backend/config/update-user-config.d.ts @@ -0,0 +1,11 @@ +import { SiteConfig } from "../../../types"; +type Params = { + userId?: string | number; + newConfig?: SiteConfig; +}; +type Return = { + success?: boolean; + msg?: string; +}; +export default function updateUserConfig({ newConfig, userId, }: Params): Return; +export {}; diff --git a/dist/package-shared/utils/backend/config/update-user-config.js b/dist/package-shared/utils/backend/config/update-user-config.js new file mode 100644 index 0000000..d36d3b0 --- /dev/null +++ b/dist/package-shared/utils/backend/config/update-user-config.js @@ -0,0 +1,25 @@ +import fs from "fs"; +import grabDirNames from "../names/grab-dir-names"; +import grabConfig from "./grab-config"; +import _ from "lodash"; +export default function updateUserConfig({ newConfig, userId, }) { + if (!userId || !newConfig) { + return { + success: false, + msg: `UserID or newConfig not provided`, + }; + } + const { userConfigJSONFilePath } = grabDirNames({ + userId, + }); + if (!userConfigJSONFilePath || !fs.existsSync(userConfigJSONFilePath)) { + return { + success: false, + msg: `userConfigJSONFilePath not found!`, + }; + } + const { userConfig: existingUserConfig } = grabConfig({ userId }); + const updateConfig = _.merge(existingUserConfig, newConfig); + fs.writeFileSync(userConfigJSONFilePath, JSON.stringify(updateConfig), "utf-8"); + return { success: true }; +} diff --git a/dist/package-shared/utils/backend/export-mariadb-database.js b/dist/package-shared/utils/backend/export-mariadb-database.js index 0010c01..7d877ec 100644 --- a/dist/package-shared/utils/backend/export-mariadb-database.js +++ b/dist/package-shared/utils/backend/export-mariadb-database.js @@ -1,13 +1,7 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = exportMariadbDatabase; -const child_process_1 = require("child_process"); -const os_1 = __importDefault(require("os")); -function exportMariadbDatabase({ dbFullName, targetFilePath, mariadbHost, mariadbPass, mariadbUser, }) { - const mysqlDumpPath = os_1.default.platform().match(/win/i) +import { execSync } from "child_process"; +import os from "os"; +export default function exportMariadbDatabase({ dbFullName, targetFilePath, mariadbHost, mariadbPass, mariadbUser, }) { + const mysqlDumpPath = os.platform().match(/win/i) ? "'" + "C:\\Program Files\\MySQL\\MySQL Server 8.0\\bin\\mysqldump.exe" + "'" @@ -19,6 +13,6 @@ function exportMariadbDatabase({ dbFullName, targetFilePath, mariadbHost, mariad let execSyncOptions = { encoding: "utf-8", }; - const dumpDb = (0, child_process_1.execSync)(cmd, execSyncOptions); + const dumpDb = execSync(cmd, execSyncOptions); return dumpDb; } diff --git a/dist/package-shared/utils/backend/global-db/DB_HANDLER.js b/dist/package-shared/utils/backend/global-db/DB_HANDLER.js index d098d54..ac1f7ff 100644 --- a/dist/package-shared/utils/backend/global-db/DB_HANDLER.js +++ b/dist/package-shared/utils/backend/global-db/DB_HANDLER.js @@ -1,42 +1,25 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = DB_HANDLER; -const grab_dsql_connection_1 = __importDefault(require("../../grab-dsql-connection")); +import grabDSQLConnection from "../../grab-dsql-connection"; /** * # DSQL user read-only DB handler * @requires DSQL_DB_CONN - Gobal Variable for Datasquirel Database */ -function DB_HANDLER(...args) { - return __awaiter(this, void 0, void 0, function* () { - var _a; - const CONNECTION = (0, grab_dsql_connection_1.default)(); - try { - if (!CONNECTION) - throw new Error("No Connection provided to DB_HANDLER function!"); - const results = yield CONNECTION.query(...args); - return JSON.parse(JSON.stringify(results)); - } - catch (error) { - (_a = global.ERROR_CALLBACK) === null || _a === void 0 ? void 0 : _a.call(global, `DB_HANDLER Error`, error); - return { - success: false, - error: error.message, - }; - } - finally { - yield (CONNECTION === null || CONNECTION === void 0 ? void 0 : CONNECTION.end()); - } - }); +export default async function DB_HANDLER(...args) { + var _a; + const CONNECTION = grabDSQLConnection(); + try { + if (!CONNECTION) + throw new Error("No Connection provided to DB_HANDLER function!"); + const results = await CONNECTION.query(...args); + return JSON.parse(JSON.stringify(results)); + } + catch (error) { + (_a = global.ERROR_CALLBACK) === null || _a === void 0 ? void 0 : _a.call(global, `DB_HANDLER Error`, error); + return { + success: false, + error: error.message, + }; + } + finally { + await (CONNECTION === null || CONNECTION === void 0 ? void 0 : CONNECTION.end()); + } } diff --git a/dist/package-shared/utils/backend/global-db/DSQL_USER_DB_HANDLER.js b/dist/package-shared/utils/backend/global-db/DSQL_USER_DB_HANDLER.js index 648a5cb..61ad5e4 100644 --- a/dist/package-shared/utils/backend/global-db/DSQL_USER_DB_HANDLER.js +++ b/dist/package-shared/utils/backend/global-db/DSQL_USER_DB_HANDLER.js @@ -1,38 +1,21 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = DSQL_USER_DB_HANDLER; -const conn_db_handler_1 = __importDefault(require("../../db/conn-db-handler")); -const grab_dsql_connection_1 = __importDefault(require("../../grab-dsql-connection")); +import connDbHandler from "../../db/conn-db-handler"; +import grabDSQLConnection from "../../grab-dsql-connection"; /** * # DSQL user read-only DB handler */ -function DSQL_USER_DB_HANDLER(_a) { - return __awaiter(this, arguments, void 0, function* ({ paradigm, queryString, queryValues, }) { - var _b; - const CONNECTION = paradigm == "Read Only" - ? (0, grab_dsql_connection_1.default)({ ro: true }) - : (0, grab_dsql_connection_1.default)({ fa: true }); - try { - return yield (0, conn_db_handler_1.default)(CONNECTION, queryString, queryValues); - } - catch (error) { - (_b = global.ERROR_CALLBACK) === null || _b === void 0 ? void 0 : _b.call(global, `DSQL_USER_DB_HANDLER Error`, error); - return null; - } - finally { - CONNECTION === null || CONNECTION === void 0 ? void 0 : CONNECTION.end(); - } - }); +export default async function DSQL_USER_DB_HANDLER({ paradigm, queryString, queryValues, }) { + var _a; + const CONNECTION = paradigm == "Read Only" + ? grabDSQLConnection({ ro: true }) + : grabDSQLConnection({ fa: true }); + try { + return await connDbHandler(CONNECTION, queryString, queryValues); + } + catch (error) { + (_a = global.ERROR_CALLBACK) === null || _a === void 0 ? void 0 : _a.call(global, `DSQL_USER_DB_HANDLER Error`, error); + return null; + } + finally { + CONNECTION === null || CONNECTION === void 0 ? void 0 : CONNECTION.end(); + } } diff --git a/dist/package-shared/utils/backend/global-db/LOCAL_DB_HANDLER.js b/dist/package-shared/utils/backend/global-db/LOCAL_DB_HANDLER.js index 61951bc..9e5786e 100644 --- a/dist/package-shared/utils/backend/global-db/LOCAL_DB_HANDLER.js +++ b/dist/package-shared/utils/backend/global-db/LOCAL_DB_HANDLER.js @@ -1,39 +1,22 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = LOCAL_DB_HANDLER; -const grab_dsql_connection_1 = __importDefault(require("../../grab-dsql-connection")); +import grabDSQLConnection from "../../grab-dsql-connection"; /** * # DSQL user read-only DB handler */ -function LOCAL_DB_HANDLER(...args) { - return __awaiter(this, void 0, void 0, function* () { - var _a; - const MASTER = (0, grab_dsql_connection_1.default)(); - try { - const results = yield MASTER.query(...args); - return JSON.parse(JSON.stringify(results)); - } - catch (error) { - (_a = global.ERROR_CALLBACK) === null || _a === void 0 ? void 0 : _a.call(global, `LOCAL_DB_HANDLER Error`, error); - return { - success: false, - error: error.message, - }; - } - finally { - yield (MASTER === null || MASTER === void 0 ? void 0 : MASTER.end()); - } - }); +export default async function LOCAL_DB_HANDLER(...args) { + var _a; + const MASTER = grabDSQLConnection(); + try { + const results = await MASTER.query(...args); + return JSON.parse(JSON.stringify(results)); + } + catch (error) { + (_a = global.ERROR_CALLBACK) === null || _a === void 0 ? void 0 : _a.call(global, `LOCAL_DB_HANDLER Error`, error); + return { + success: false, + error: error.message, + }; + } + finally { + await (MASTER === null || MASTER === void 0 ? void 0 : MASTER.end()); + } } diff --git a/dist/package-shared/utils/backend/global-db/NO_DB_HANDLER.js b/dist/package-shared/utils/backend/global-db/NO_DB_HANDLER.js index 497965e..70c0885 100644 --- a/dist/package-shared/utils/backend/global-db/NO_DB_HANDLER.js +++ b/dist/package-shared/utils/backend/global-db/NO_DB_HANDLER.js @@ -1,16 +1,10 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = NO_DB_HANDLER; -const grab_dsql_connection_1 = __importDefault(require("../../grab-dsql-connection")); +import grabDSQLConnection from "../../grab-dsql-connection"; /** * # DSQL user read-only DB handler */ -function NO_DB_HANDLER(...args) { +export default function NO_DB_HANDLER(...args) { var _a; - const CONNECTION = (0, grab_dsql_connection_1.default)(); + const CONNECTION = grabDSQLConnection(); try { return new Promise((resolve, reject) => { CONNECTION.query(...args) diff --git a/dist/package-shared/utils/backend/global-db/ROOT_DB_HANDLER.js b/dist/package-shared/utils/backend/global-db/ROOT_DB_HANDLER.js index a0fcba2..04444b9 100644 --- a/dist/package-shared/utils/backend/global-db/ROOT_DB_HANDLER.js +++ b/dist/package-shared/utils/backend/global-db/ROOT_DB_HANDLER.js @@ -1,16 +1,10 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = ROOT_DB_HANDLER; -const grab_dsql_connection_1 = __importDefault(require("../../grab-dsql-connection")); +import grabDSQLConnection from "../../grab-dsql-connection"; /** * # Root DB handler */ -function ROOT_DB_HANDLER(...args) { +export default function ROOT_DB_HANDLER(...args) { var _a; - const CONNECTION = (0, grab_dsql_connection_1.default)(); + const CONNECTION = grabDSQLConnection(); try { return new Promise((resolve, reject) => { CONNECTION.query(...args) diff --git a/dist/package-shared/utils/backend/grabDbSSL.js b/dist/package-shared/utils/backend/grabDbSSL.js index 2663703..cd5ae75 100644 --- a/dist/package-shared/utils/backend/grabDbSSL.js +++ b/dist/package-shared/utils/backend/grabDbSSL.js @@ -1,25 +1,19 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = grabDbSSL; -const fs_1 = __importDefault(require("fs")); +import fs from "fs"; /** * # Grall SSL */ -function grabDbSSL() { +export default function grabDbSSL() { const SSL_DIR = process.env.DSQL_SSL_DIR; if (!(SSL_DIR === null || SSL_DIR === void 0 ? void 0 : SSL_DIR.match(/./))) { return undefined; } const caFilePath = `${SSL_DIR}/ca-cert.pem`; - if (!fs_1.default.existsSync(caFilePath)) { + if (!fs.existsSync(caFilePath)) { console.log(`${caFilePath} does not exist`); return undefined; } return { - ca: fs_1.default.readFileSync(`${SSL_DIR}/ca-cert.pem`), + ca: fs.readFileSync(`${SSL_DIR}/ca-cert.pem`), // key: fs.readFileSync(`${SSL_DIR}/client-key.pem`), // cert: fs.readFileSync(`${SSL_DIR}/client-cert.pem`), rejectUnauthorized: false, diff --git a/dist/package-shared/utils/backend/import-mariadb-database.js b/dist/package-shared/utils/backend/import-mariadb-database.js index 27a2bbc..f6989ab 100644 --- a/dist/package-shared/utils/backend/import-mariadb-database.js +++ b/dist/package-shared/utils/backend/import-mariadb-database.js @@ -1,37 +1,20 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = importMariadbDatabase; -const child_process_1 = require("child_process"); -const os_1 = __importDefault(require("os")); -const conn_db_handler_1 = __importDefault(require("../db/conn-db-handler")); -function importMariadbDatabase(_a) { - return __awaiter(this, arguments, void 0, function* ({ dbFullName, targetFilePath, mariadbHost, mariadbPass, mariadbUser, }) { - const mysqlPath = os_1.default.platform().match(/win/i) - ? "'" + - "C:\\Program Files\\MySQL\\MySQL Server 8.0\\bin\\mysql.exe" + - "'" - : "mysql"; - const finalMariadbUser = mariadbUser || process.env.DSQL_DB_USERNAME; - const finalMariadbHost = mariadbHost || process.env.DSQL_DB_HOST; - const finalMariadbPass = mariadbPass || process.env.DSQL_DB_PASSWORD; - yield (0, conn_db_handler_1.default)(global.DSQL_DB_CONN, `CREATE DATABASE IF NOT EXISTS ${dbFullName}`); - const cmd = `${mysqlPath} -u ${finalMariadbUser} -h ${finalMariadbHost} -p"${finalMariadbPass}" ${dbFullName} < ${targetFilePath}`; - let execSyncOptions = { - encoding: "utf-8", - }; - const importDb = (0, child_process_1.execSync)(cmd, execSyncOptions); - return importDb; - }); +import { execSync } from "child_process"; +import os from "os"; +import connDbHandler from "../db/conn-db-handler"; +export default async function importMariadbDatabase({ dbFullName, targetFilePath, mariadbHost, mariadbPass, mariadbUser, }) { + const mysqlPath = os.platform().match(/win/i) + ? "'" + + "C:\\Program Files\\MySQL\\MySQL Server 8.0\\bin\\mysql.exe" + + "'" + : "mysql"; + const finalMariadbUser = mariadbUser || process.env.DSQL_DB_USERNAME; + const finalMariadbHost = mariadbHost || process.env.DSQL_DB_HOST; + const finalMariadbPass = mariadbPass || process.env.DSQL_DB_PASSWORD; + await connDbHandler(global.DSQL_DB_CONN, `CREATE DATABASE IF NOT EXISTS ${dbFullName}`); + const cmd = `${mysqlPath} -u ${finalMariadbUser} -h ${finalMariadbHost} -p"${finalMariadbPass}" ${dbFullName} < ${targetFilePath}`; + let execSyncOptions = { + encoding: "utf-8", + }; + const importDb = execSync(cmd, execSyncOptions); + return importDb; } diff --git a/dist/package-shared/utils/backend/names/grab-db-full-name.d.ts b/dist/package-shared/utils/backend/names/grab-db-full-name.d.ts deleted file mode 100644 index 15f4413..0000000 --- a/dist/package-shared/utils/backend/names/grab-db-full-name.d.ts +++ /dev/null @@ -1,8 +0,0 @@ -import { DATASQUIREL_LoggedInUser, UserType } from "../../../types"; -type Param = { - user?: DATASQUIREL_LoggedInUser | UserType; - userId?: string | number | null; - dbSlug?: string; -}; -export default function grabUserDbFullName({ dbSlug, user, userId }: Param): string; -export {}; diff --git a/dist/package-shared/utils/backend/names/grab-db-full-name.js b/dist/package-shared/utils/backend/names/grab-db-full-name.js deleted file mode 100644 index 802b6e0..0000000 --- a/dist/package-shared/utils/backend/names/grab-db-full-name.js +++ /dev/null @@ -1,12 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = grabUserDbFullName; -function grabUserDbFullName({ dbSlug, user, userId }) { - const finalUserId = (user === null || user === void 0 ? void 0 : user.id) || userId; - if (!finalUserId || !dbSlug) - throw new Error(`Couldn't grab full DB name. Missing parameters finalUserId || dbSlug`); - if (dbSlug.match(/[^a-zA-Z0-9-_]/)) { - throw new Error(`Invalid Database slug`); - } - return `datasquirel_user_${finalUserId}_${dbSlug}`; -} diff --git a/dist/package-shared/utils/backend/names/grab-dir-names.d.ts b/dist/package-shared/utils/backend/names/grab-dir-names.d.ts index ad42f56..2bf99fd 100644 --- a/dist/package-shared/utils/backend/names/grab-dir-names.d.ts +++ b/dist/package-shared/utils/backend/names/grab-dir-names.d.ts @@ -3,17 +3,19 @@ type Param = { user?: DATASQUIREL_LoggedInUser | UserType; userId?: string | number | null; appDir?: string; + dataDir?: string; }; export default function grabDirNames(param?: Param): { appDir: string; - schemasDir: string; - userDirPath: string | undefined; + privateDataDir: string; + oldSchemasDir: string; + userConfigJSONFilePath: string | undefined; mainShemaJSONFilePath: string; mainDbTypeDefFile: string; tempDirName: string; defaultTableFieldsJSONFilePath: string; usersSchemaDir: string; - targetUserSchemaDir: string | undefined; + targetUserPrivateDir: string | undefined; userSchemaMainJSONFilePath: string | undefined; userPrivateMediaDir: string | undefined; userPrivateExportsDir: string | undefined; @@ -36,5 +38,32 @@ export default function grabDirNames(param?: Param): { testEnvFile: string; userPublicMediaDir: string | undefined; userTempSQLFilePath: string | undefined; + STATIC_ROOT: string; + appConfigJSONFile: string; + appConfigDir: string; + mariadbMainConfigDir: string; + mariadbMainConfigFile: string; + maxscaleConfigDir: string; + mariadbReplicaConfigDir: string; + DATA_DIR: string; + publicDir: string; + publicSSLDir: string; + appSSLDir: string; + maxscaleConfigFile: string; + mariadbReplicaConfigFile: string; + mainSSLDir: string; + mainDbDataDir: string; + replica1DbDataDir: string; + galeraConfigFile: string; + galeraReplicaConfigFile: string; + dbDockerComposeFile: string; + dbDockerComposeFileAlt: string; + mainDbGrastateDatFile: string; + appSchemaJSONFile: string; + mainBackupDir: string; + userBackupDir: string | undefined; + sqlBackupDirName: string; + schemasBackupDirName: string; + userMainShemaJSONFilePath: string | undefined; }; export {}; diff --git a/dist/package-shared/utils/backend/names/grab-dir-names.js b/dist/package-shared/utils/backend/names/grab-dir-names.js index 6c35f35..5e39253 100644 --- a/dist/package-shared/utils/backend/names/grab-dir-names.js +++ b/dist/package-shared/utils/backend/names/grab-dir-names.js @@ -1,86 +1,125 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = grabDirNames; -const path_1 = __importDefault(require("path")); -function grabDirNames(param) { +import path from "path"; +export default function grabDirNames(param) { var _a; const appDir = (param === null || param === void 0 ? void 0 : param.appDir) || process.env.DSQL_APP_DIR; - const STATIC_ROOT = process.env.DSQL_STATIC_SERVER_DIR || "/static"; + const DATA_DIR = (param === null || param === void 0 ? void 0 : param.dataDir) || process.env.DSQL_DATA_DIR || "/data"; const finalUserId = ((_a = param === null || param === void 0 ? void 0 : param.user) === null || _a === void 0 ? void 0 : _a.id) || (param === null || param === void 0 ? void 0 : param.userId); - const publicImagesDir = path_1.default.join(STATIC_ROOT, `images`); if (!appDir) throw new Error("Please provide the `DSQL_APP_DIR` env variable."); - const schemasDir = process.env.DSQL_DB_SCHEMA_DIR || - path_1.default.join(appDir, "jsonData", "dbSchemas"); + if (!DATA_DIR) + throw new Error("Please provide the `DATA_DIR` env variable."); + const STATIC_ROOT = path.join(DATA_DIR, "static"); + const publicImagesDir = path.join(STATIC_ROOT, `images`); + const publicDir = path.join(appDir, "public"); + const publicSSLDir = path.join(publicDir, "documents", "ssl"); + const appSSLDir = path.join(appDir, "ssl"); + const mainSSLDir = path.join(DATA_DIR, "ssl"); + const privateDataDir = path.join(DATA_DIR, "private"); + /** + * # DB Dir names + * @description Database related Directories + */ + const mainDbDataDir = path.join(DATA_DIR, "db"); + const mainDbGrastateDatFile = path.join(mainDbDataDir, "grastate.dat"); + const replica1DbDataDir = path.join(DATA_DIR, "replica-1"); + const mariadbMainConfigDir = path.join(DATA_DIR, "db-config", "main"); + const mariadbReplicaConfigDir = path.join(DATA_DIR, "db-config", "replica"); + const maxscaleConfigDir = path.join(DATA_DIR, "db-config", "maxscale"); + const mariadbMainConfigFile = path.join(mariadbMainConfigDir, "default.cnf"); + const mariadbReplicaConfigFile = path.join(mariadbReplicaConfigDir, "default.cnf"); + const galeraConfigFile = path.join(mariadbMainConfigDir, "galera.cnf"); + const galeraReplicaConfigFile = path.join(mariadbReplicaConfigDir, "galera.cnf"); + const maxscaleConfigFile = path.join(maxscaleConfigDir, "maxscale.cnf"); + /** + * # Schema Dir names + * @description + */ + const oldSchemasDir = path.join(appDir, "jsonData", "dbSchemas"); + const appSchemaJSONFile = path.join(oldSchemasDir, "1.json"); const tempDirName = ".tmp"; - if (!schemasDir) + const appConfigDir = path.join(appDir, "jsonData", "config"); + const appConfigJSONFile = path.join(appConfigDir, "app-config.json"); + if (!privateDataDir) throw new Error("Please provide the `DSQL_DB_SCHEMA_DIR` env variable."); - const pakageSharedDir = path_1.default.join(appDir, `package-shared`); - const mainDbTypeDefFile = path_1.default.join(pakageSharedDir, `types/dsql.ts`); - const mainShemaJSONFilePath = path_1.default.join(schemasDir, `main.json`); - const defaultTableFieldsJSONFilePath = path_1.default.join(pakageSharedDir, `data/defaultFields.json`); - const usersSchemaDir = path_1.default.join(schemasDir, `users`); - const targetUserSchemaDir = finalUserId - ? path_1.default.join(usersSchemaDir, `user-${finalUserId}`) + const pakageSharedDir = path.join(appDir, `package-shared`); + const mainDbTypeDefFile = path.join(pakageSharedDir, `types/dsql.ts`); + const mainShemaJSONFilePath = path.join(oldSchemasDir, `main.json`); + const defaultTableFieldsJSONFilePath = path.join(pakageSharedDir, `data/defaultFields.json`); + const usersSchemaDir = path.join(privateDataDir, `users`); + const targetUserPrivateDir = finalUserId + ? path.join(usersSchemaDir, `user-${finalUserId}`) : undefined; - const userTempSQLFilePath = targetUserSchemaDir - ? path_1.default.join(targetUserSchemaDir, `tmp.sql`) + const userTempSQLFilePath = targetUserPrivateDir + ? path.join(targetUserPrivateDir, `tmp.sql`) : undefined; - const userDirPath = finalUserId - ? path_1.default.join(usersSchemaDir, `user-${finalUserId}`) + const userMainShemaJSONFilePath = targetUserPrivateDir + ? path.join(targetUserPrivateDir, `main.json`) : undefined; - const userSchemaMainJSONFilePath = userDirPath - ? path_1.default.join(userDirPath, `main.json`) + const userConfigJSONFilePath = targetUserPrivateDir + ? path.join(targetUserPrivateDir, `config.json`) : undefined; - const userPrivateMediaDir = userDirPath - ? path_1.default.join(userDirPath, `media`) + const userSchemaMainJSONFilePath = targetUserPrivateDir + ? path.join(targetUserPrivateDir, `main.json`) : undefined; - const userPrivateExportsDir = userDirPath - ? path_1.default.join(userDirPath, `export`) + const userPrivateMediaDir = targetUserPrivateDir + ? path.join(targetUserPrivateDir, `media`) + : undefined; + const userPrivateExportsDir = targetUserPrivateDir + ? path.join(targetUserPrivateDir, `export`) : undefined; const userPrivateSQLExportsDir = userPrivateExportsDir - ? path_1.default.join(userPrivateExportsDir, `sql`) + ? path.join(userPrivateExportsDir, `sql`) : undefined; const userPrivateTempSQLExportsDir = userPrivateSQLExportsDir - ? path_1.default.join(userPrivateSQLExportsDir, tempDirName) + ? path.join(userPrivateSQLExportsDir, tempDirName) : undefined; const userPrivateTempJSONSchemaFilePath = userPrivateTempSQLExportsDir - ? path_1.default.join(userPrivateTempSQLExportsDir, `schema.json`) + ? path.join(userPrivateTempSQLExportsDir, `schema.json`) : undefined; const userPrivateDbExportZipFileName = `db-export.zip`; const userPrivateDbExportZipFilePath = userPrivateSQLExportsDir - ? path_1.default.join(userPrivateSQLExportsDir, userPrivateDbExportZipFileName) + ? path.join(userPrivateSQLExportsDir, userPrivateDbExportZipFileName) : undefined; const userPublicMediaDir = finalUserId - ? path_1.default.join(publicImagesDir, `user-images/user-${finalUserId}`) + ? path.join(publicImagesDir, `user-images/user-${finalUserId}`) : undefined; const userPrivateDbImportZipFileName = `db-export.zip`; const userPrivateDbImportZipFilePath = userPrivateSQLExportsDir - ? path_1.default.join(userPrivateSQLExportsDir, userPrivateDbImportZipFileName) + ? path.join(userPrivateSQLExportsDir, userPrivateDbImportZipFileName) : undefined; - const dbNginxLoadBalancerConfigFile = path_1.default.join(appDir, "docker/services/mariadb/load-balancer/config/template/nginx.conf"); - let dockerComposeFile = path_1.default.join(appDir, "docker-compose.yml"); - let dockerComposeFileAlt = path_1.default.join(appDir, "docker-compose.yaml"); - const testDockerComposeFile = path_1.default.join(appDir, "test.docker-compose.yml"); - const testDockerComposeFileAlt = path_1.default.join(appDir, "test.docker-compose.yaml"); - const extraDockerComposeFile = path_1.default.join(appDir, "extra.docker-compose.yml"); - const extraDockerComposeFileAlt = path_1.default.join(appDir, "extra.docker-compose.yaml"); - const siteSetupFile = path_1.default.join(appDir, "site-setup.json"); - const envFile = path_1.default.join(appDir, ".env"); - const testEnvFile = path_1.default.join(appDir, "test.env"); + const dbNginxLoadBalancerConfigFile = path.join(appDir, "docker/services/mariadb/load-balancer/config/template/nginx.conf"); + let dockerComposeFile = path.join(appDir, "docker-compose.yml"); + let dockerComposeFileAlt = path.join(appDir, "docker-compose.yaml"); + const testDockerComposeFile = path.join(appDir, "test.docker-compose.yml"); + const testDockerComposeFileAlt = path.join(appDir, "test.docker-compose.yaml"); + const dbDockerComposeFile = path.join(appDir, "db.docker-compose.yml"); + const dbDockerComposeFileAlt = path.join(appDir, "db.docker-compose.yaml"); + const extraDockerComposeFile = path.join(appDir, "extra.docker-compose.yml"); + const extraDockerComposeFileAlt = path.join(appDir, "extra.docker-compose.yaml"); + const siteSetupFile = path.join(appDir, "site-setup.json"); + const envFile = path.join(appDir, ".env"); + const testEnvFile = path.join(appDir, "test.env"); + /** + * # Backup Dir names + * @description + */ + const mainBackupDir = path.join(DATA_DIR, "backups"); + const userBackupDir = targetUserPrivateDir + ? path.join(targetUserPrivateDir, `backups`) + : undefined; + const sqlBackupDirName = `sql`; + const schemasBackupDirName = `schema`; return { appDir, - schemasDir, - userDirPath, + privateDataDir, + oldSchemasDir, + userConfigJSONFilePath, mainShemaJSONFilePath, mainDbTypeDefFile, tempDirName, defaultTableFieldsJSONFilePath, usersSchemaDir, - targetUserSchemaDir, + targetUserPrivateDir, userSchemaMainJSONFilePath, userPrivateMediaDir, userPrivateExportsDir, @@ -103,5 +142,32 @@ function grabDirNames(param) { testEnvFile, userPublicMediaDir, userTempSQLFilePath, + STATIC_ROOT, + appConfigJSONFile, + appConfigDir, + mariadbMainConfigDir, + mariadbMainConfigFile, + maxscaleConfigDir, + mariadbReplicaConfigDir, + DATA_DIR, + publicDir, + publicSSLDir, + appSSLDir, + maxscaleConfigFile, + mariadbReplicaConfigFile, + mainSSLDir, + mainDbDataDir, + replica1DbDataDir, + galeraConfigFile, + galeraReplicaConfigFile, + dbDockerComposeFile, + dbDockerComposeFileAlt, + mainDbGrastateDatFile, + appSchemaJSONFile, + mainBackupDir, + userBackupDir, + sqlBackupDirName, + schemasBackupDirName, + userMainShemaJSONFilePath, }; } diff --git a/dist/package-shared/utils/backend/names/grab-ip-addresses.d.ts b/dist/package-shared/utils/backend/names/grab-ip-addresses.d.ts new file mode 100644 index 0000000..bd1ffa0 --- /dev/null +++ b/dist/package-shared/utils/backend/names/grab-ip-addresses.d.ts @@ -0,0 +1,6 @@ +export default function grabIPAddresses(): { + webAppIP: string; + appCronIP: string; + maxScaleIP: string; + globalIPPrefix: string; +}; diff --git a/dist/package-shared/utils/backend/names/grab-ip-addresses.js b/dist/package-shared/utils/backend/names/grab-ip-addresses.js new file mode 100644 index 0000000..a818fef --- /dev/null +++ b/dist/package-shared/utils/backend/names/grab-ip-addresses.js @@ -0,0 +1,9 @@ +import grabDockerResourceIPNumbers from "../../grab-docker-resource-ip-numbers"; +export default function grabIPAddresses() { + const globalIPPrefix = process.env.DSQL_NETWORK_IP_PREFIX || "172.72.0"; + const { cron, db, maxscale, postDbSetup, web } = grabDockerResourceIPNumbers(); + const webAppIP = `${globalIPPrefix}.${web}`; + const appCronIP = `${globalIPPrefix}.${cron}`; + const maxScaleIP = `${globalIPPrefix}.${maxscale}`; + return { webAppIP, appCronIP, maxScaleIP, globalIPPrefix }; +} diff --git a/dist/package-shared/utils/backend/names/replace-datasquirel-db-name.js b/dist/package-shared/utils/backend/names/replace-datasquirel-db-name.js index 9a1d93d..01346c3 100644 --- a/dist/package-shared/utils/backend/names/replace-datasquirel-db-name.js +++ b/dist/package-shared/utils/backend/names/replace-datasquirel-db-name.js @@ -1,7 +1,4 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = replaceDatasquirelDbName; -function replaceDatasquirelDbName({ str, userId, }) { +export default function replaceDatasquirelDbName({ str, userId, }) { const dbNamePrefix = process.env.DSQL_USER_DB_PREFIX; const userNameRegex = new RegExp(`${dbNamePrefix}\\d+_`, "g"); const newPrefix = `${dbNamePrefix}${userId}_`; diff --git a/dist/package-shared/utils/backend/parseCookies.js b/dist/package-shared/utils/backend/parseCookies.js index 81d0e03..f9ac5c6 100644 --- a/dist/package-shared/utils/backend/parseCookies.js +++ b/dist/package-shared/utils/backend/parseCookies.js @@ -1,6 +1,3 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = parseCookies; /** * Parse request cookies * =================================================== @@ -8,7 +5,7 @@ exports.default = parseCookies; * @description This function takes in a request object and * returns the cookies as a JS object */ -function parseCookies({ request, cookieString, }) { +export default function parseCookies({ request, cookieString, }) { var _a; try { /** @type {string | undefined} */ diff --git a/dist/package-shared/utils/camelJoinedtoCamelSpace.js b/dist/package-shared/utils/camelJoinedtoCamelSpace.js index 239bfc5..d3d3f7b 100644 --- a/dist/package-shared/utils/camelJoinedtoCamelSpace.js +++ b/dist/package-shared/utils/camelJoinedtoCamelSpace.js @@ -1,13 +1,10 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = camelJoinedtoCamelSpace; /** * Convert Camel Joined Text to Camel Spaced Text * ============================================================================== * @description this function takes a camel cased text without spaces, and returns * a camel-case-spaced text */ -function camelJoinedtoCamelSpace(text) { +export default function camelJoinedtoCamelSpace(text) { if (!(text === null || text === void 0 ? void 0 : text.match(/./))) { return ""; } diff --git a/dist/package-shared/utils/check-if-is-master.js b/dist/package-shared/utils/check-if-is-master.js index 21e9e20..5f13010 100644 --- a/dist/package-shared/utils/check-if-is-master.js +++ b/dist/package-shared/utils/check-if-is-master.js @@ -1,7 +1,4 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = checkIfIsMaster; -function checkIfIsMaster({ dbContext, dbFullName }) { +export default function checkIfIsMaster({ dbContext, dbFullName }) { return (dbContext === null || dbContext === void 0 ? void 0 : dbContext.match(/dsql.user/i)) ? false : global.DSQL_USE_LOCAL diff --git a/dist/package-shared/utils/console-colors.js b/dist/package-shared/utils/console-colors.js index f44e56d..e11e857 100644 --- a/dist/package-shared/utils/console-colors.js +++ b/dist/package-shared/utils/console-colors.js @@ -1,6 +1,3 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.ccol = void 0; const consoleColors = { Reset: "\x1b[0m", Bright: "\x1b[1m", @@ -28,5 +25,5 @@ const consoleColors = { BgWhite: "\x1b[47m", BgGray: "\x1b[100m", }; -exports.default = consoleColors; -exports.ccol = consoleColors; +export default consoleColors; +export const ccol = consoleColors; diff --git a/dist/package-shared/utils/cookies-actions.d.ts b/dist/package-shared/utils/cookies-actions.d.ts new file mode 100644 index 0000000..8e4069d --- /dev/null +++ b/dist/package-shared/utils/cookies-actions.d.ts @@ -0,0 +1,7 @@ +import * as http from "http"; +import { CookieOptions } from "../types"; +import { CookieNames } from "../dict/cookie-names"; +export declare function setCookie(res: http.ServerResponse, name: (typeof CookieNames)[keyof typeof CookieNames], value: string, options?: CookieOptions): void; +export declare function getCookie(req: http.IncomingMessage, name: (typeof CookieNames)[keyof typeof CookieNames]): string | null; +export declare function updateCookie(res: http.ServerResponse, name: (typeof CookieNames)[keyof typeof CookieNames], value: string, options?: CookieOptions): void; +export declare function deleteCookie(res: http.ServerResponse, name: (typeof CookieNames)[keyof typeof CookieNames], options?: CookieOptions): void; diff --git a/dist/package-shared/utils/cookies-actions.js b/dist/package-shared/utils/cookies-actions.js new file mode 100644 index 0000000..ca1bc11 --- /dev/null +++ b/dist/package-shared/utils/cookies-actions.js @@ -0,0 +1,43 @@ +export function setCookie(res, name, value, options = {}) { + const cookieParts = [ + `${encodeURIComponent(name)}=${encodeURIComponent(value)}`, + ]; + if (options.expires) { + cookieParts.push(`Expires=${options.expires.toUTCString()}`); + } + if (options.maxAge !== undefined) { + cookieParts.push(`Max-Age=${options.maxAge}`); + } + if (options.path) { + cookieParts.push(`Path=${options.path}`); + } + if (options.domain) { + cookieParts.push(`Domain=${options.domain}`); + } + if (options.secure) { + cookieParts.push("Secure"); + } + if (options.httpOnly) { + cookieParts.push("HttpOnly"); + } + res.setHeader("Set-Cookie", cookieParts.join("; ")); +} +export function getCookie(req, name) { + const cookieHeader = req.headers.cookie; + if (!cookieHeader) + return null; + const cookies = cookieHeader + .split(";") + .reduce((acc, cookie) => { + const [key, val] = cookie.trim().split("=").map(decodeURIComponent); + acc[key] = val; + return acc; + }, {}); + return cookies[name] || null; +} +export function updateCookie(res, name, value, options = {}) { + setCookie(res, name, value, options); +} +export function deleteCookie(res, name, options = {}) { + setCookie(res, name, "", Object.assign(Object.assign({}, options), { expires: new Date(0), maxAge: 0 })); +} diff --git a/dist/package-shared/utils/create-user-sql-user.d.ts b/dist/package-shared/utils/create-user-sql-user.d.ts new file mode 100644 index 0000000..9325dee --- /dev/null +++ b/dist/package-shared/utils/create-user-sql-user.d.ts @@ -0,0 +1,7 @@ +import { UserType } from "../types"; +export default function createUserSQLUser(user: UserType): Promise<{ + fullName: string; + host: string; + username: string; + password: string; +}>; diff --git a/dist/package-shared/utils/create-user-sql-user.js b/dist/package-shared/utils/create-user-sql-user.js new file mode 100644 index 0000000..ea4369d --- /dev/null +++ b/dist/package-shared/utils/create-user-sql-user.js @@ -0,0 +1,41 @@ +import { generate } from "generate-password"; +import dbHandler from "../functions/backend/dbHandler"; +import dsqlCrud from "./data-fetching/crud"; +import encrypt from "../functions/dsql/encrypt"; +import grabUserMainSqlUserName from "./grab-user-main-sql-user-name"; +import grabDbNames from "./grab-db-names"; +import { createNewSQLUser } from "../functions/web-app/mariadb-user/handle-mariadb-user-creation"; +export default async function createUserSQLUser(user) { + const { fullName, host, username: mariaDBUsername, webHost, } = grabUserMainSqlUserName({ user }); + const { userDbPrefix } = grabDbNames({ user }); + await dbHandler({ + query: `DROP USER IF EXISTS '${mariaDBUsername}'@'${webHost}'`, + noErrorLogs: true, + }); + const newPassword = generate({ length: 32 }); + await createNewSQLUser({ + host: webHost, + password: newPassword, + username: mariaDBUsername, + }); + const updateWebHostGrants = (await dbHandler({ + query: `GRANT ALL PRIVILEGES ON \`${userDbPrefix.replace(/\_/g, "\\_")}%\`.* TO '${mariaDBUsername}'@'${webHost}'`, + })); + const updateUser = await dsqlCrud({ + action: "update", + table: "users", + targetField: "id", + targetValue: user.id, + data: { + mariadb_host: webHost, + mariadb_pass: encrypt({ data: newPassword }) || undefined, + mariadb_user: mariaDBUsername, + }, + }); + return { + fullName, + host, + username: mariaDBUsername, + password: newPassword, + }; +} diff --git a/dist/package-shared/utils/data-fetching/crud-get.d.ts b/dist/package-shared/utils/data-fetching/crud-get.d.ts index 1de729b..5224f5d 100644 --- a/dist/package-shared/utils/data-fetching/crud-get.d.ts +++ b/dist/package-shared/utils/data-fetching/crud-get.d.ts @@ -1,3 +1,6 @@ -import { DsqlCrudParam } from "../../types"; -import { DsqlCrudReturn } from "./crud"; -export default function dsqlCrudGet({ table, query, count, countOnly, }: DsqlCrudParam): Promise; +import { APIResponseObject, DsqlCrudParam } from "../../types"; +export default function ({ table, query, count, countOnly, dbFullName, }: Omit, "action" | "data" | "sanitize">): Promise; diff --git a/dist/package-shared/utils/data-fetching/crud-get.js b/dist/package-shared/utils/data-fetching/crud-get.js index 59ce356..8bbb62f 100644 --- a/dist/package-shared/utils/data-fetching/crud-get.js +++ b/dist/package-shared/utils/data-fetching/crud-get.js @@ -1,70 +1,60 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); +import sqlGenerator from "../../functions/dsql/sql/sql-generator"; +import connDbHandler from "../db/conn-db-handler"; +export default async function ({ table, query, count, countOnly, dbFullName, }) { + var _a, _b, _c, _d; + let queryObject; + queryObject = sqlGenerator({ + tableName: table, + genObject: query, + dbFullName, }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = dsqlCrudGet; -const sql_generator_1 = __importDefault(require("../../functions/dsql/sql/sql-generator")); -const conn_db_handler_1 = __importDefault(require("../db/conn-db-handler")); -function dsqlCrudGet(_a) { - return __awaiter(this, arguments, void 0, function* ({ table, query, count, countOnly, }) { - var _b, _c, _d, _e; - let queryObject; - queryObject = (0, sql_generator_1.default)({ + const DB_CONN = global.DSQL_READ_ONLY_DB_CONN || global.DSQL_DB_CONN; + let connQueries = [ + { + query: queryObject === null || queryObject === void 0 ? void 0 : queryObject.string, + values: (queryObject === null || queryObject === void 0 ? void 0 : queryObject.values) || [], + }, + ]; + const countQueryObject = count || countOnly + ? sqlGenerator({ tableName: table, genObject: query, + count: true, + dbFullName, + }) + : undefined; + if (count && countQueryObject) { + connQueries.push({ + query: countQueryObject.string, + values: countQueryObject.values, }); - const DB_CONN = global.DSQL_READ_ONLY_DB_CONN || global.DSQL_DB_CONN; - let connQueries = [ + } + else if (countOnly && countQueryObject) { + connQueries = [ { - query: queryObject === null || queryObject === void 0 ? void 0 : queryObject.string, - values: (queryObject === null || queryObject === void 0 ? void 0 : queryObject.values) || [], - }, - ]; - const countQueryObject = count || countOnly - ? (0, sql_generator_1.default)({ - tableName: table, - genObject: query, - count: true, - }) - : undefined; - if (count && countQueryObject) { - connQueries.push({ query: countQueryObject.string, values: countQueryObject.values, - }); - } - else if (countOnly && countQueryObject) { - connQueries = [ - { - query: countQueryObject.string, - values: countQueryObject.values, - }, - ]; - } - const res = yield (0, conn_db_handler_1.default)(DB_CONN, connQueries); - const isSuccess = Array.isArray(res) && Array.isArray(res[0]); - return { - success: isSuccess, - payload: isSuccess ? (countOnly ? null : res[0]) : null, - error: isSuccess ? undefined : res === null || res === void 0 ? void 0 : res.error, - queryObject, - count: isSuccess - ? ((_c = (_b = res[1]) === null || _b === void 0 ? void 0 : _b[0]) === null || _c === void 0 ? void 0 : _c["COUNT(*)"]) - ? res[1][0]["COUNT(*)"] - : ((_e = (_d = res[0]) === null || _d === void 0 ? void 0 : _d[0]) === null || _e === void 0 ? void 0 : _e["COUNT(*)"]) - ? res[0][0]["COUNT(*)"] - : undefined - : undefined, - }; - }); + }, + ]; + } + const res = await connDbHandler(DB_CONN, connQueries); + const isSuccess = Array.isArray(res) && Array.isArray(res[0]); + return { + success: isSuccess, + payload: isSuccess ? (countOnly ? null : res[0]) : null, + batchPayload: isSuccess ? (countOnly ? null : res) : null, + error: isSuccess ? undefined : res === null || res === void 0 ? void 0 : res.error, + errors: res === null || res === void 0 ? void 0 : res.errors, + queryObject: { + sql: queryObject === null || queryObject === void 0 ? void 0 : queryObject.string, + params: queryObject === null || queryObject === void 0 ? void 0 : queryObject.values, + }, + count: isSuccess + ? ((_b = (_a = res[1]) === null || _a === void 0 ? void 0 : _a[0]) === null || _b === void 0 ? void 0 : _b["COUNT(*)"]) + ? res[1][0]["COUNT(*)"] + : ((_d = (_c = res[0]) === null || _c === void 0 ? void 0 : _c[0]) === null || _d === void 0 ? void 0 : _d["COUNT(*)"]) + ? res[0][0]["COUNT(*)"] + : undefined + : undefined, + }; } diff --git a/dist/package-shared/utils/data-fetching/crud.d.ts b/dist/package-shared/utils/data-fetching/crud.d.ts index c09b5dd..dcd5d18 100644 --- a/dist/package-shared/utils/data-fetching/crud.d.ts +++ b/dist/package-shared/utils/data-fetching/crud.d.ts @@ -1,12 +1,6 @@ -import sqlGenerator from "../../functions/dsql/sql/sql-generator"; -import { DsqlCrudParam, PostReturn } from "../../types"; -export type DsqlCrudReturn = (PostReturn & { - queryObject?: ReturnType>; - count?: number; - batchPayload?: any[][] | null; -}) | null; +import { APIResponseObject, DsqlCrudParam } from "../../types"; export default function dsqlCrud(params: DsqlCrudParam): Promise; +}, K extends string = string>(params: DsqlCrudParam): Promise; diff --git a/dist/package-shared/utils/data-fetching/crud.js b/dist/package-shared/utils/data-fetching/crud.js index abb4550..68b6fef 100644 --- a/dist/package-shared/utils/data-fetching/crud.js +++ b/dist/package-shared/utils/data-fetching/crud.js @@ -1,63 +1,61 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = dsqlCrud; -const post_1 = __importDefault(require("../../actions/post")); -// import dsqlCrudBatchGet from "./crud-batch-get"; -const crud_get_1 = __importDefault(require("./crud-get")); -function dsqlCrud(params) { - return __awaiter(this, void 0, void 0, function* () { - const { action, data, table, targetValue, sanitize, targetField, targetId, } = params; - const finalData = sanitize ? sanitize(data) : data; - switch (action) { - case "get": - return yield (0, crud_get_1.default)(params); - // case "batch-get": - // return await dsqlCrudBatchGet(params); - case "insert": - return yield (0, post_1.default)({ - query: { - action: "insert", - table, - data: finalData, - }, - forceLocal: true, - }); - case "update": - data === null || data === void 0 ? true : delete data.id; - return yield (0, post_1.default)({ - query: { - action: "update", - table, - identifierColumnName: targetField || "id", - identifierValue: String(targetValue || targetId), - data: finalData, - }, - forceLocal: true, - }); - case "delete": - return yield (0, post_1.default)({ - query: { - action: "delete", - table, - identifierColumnName: targetField || "id", - identifierValue: String(targetValue || targetId), - }, - forceLocal: true, - }); - default: - return null; - } - }); +import sqlDeleteGenerator from "../../functions/dsql/sql/sql-delete-generator"; +import dsqlCrudGet from "./crud-get"; +import connDbHandler from "../db/conn-db-handler"; +import addDbEntry from "../../functions/backend/db/addDbEntry"; +import updateDbEntry from "../../functions/backend/db/updateDbEntry"; +export default async function dsqlCrud(params) { + const { action, data, table, targetValue, sanitize, targetField, targetId, dbFullName, deleteData, batchData, deleteKeyValues, } = params; + const finalData = (sanitize ? sanitize({ data }) : data); + const finalBatchData = (sanitize ? sanitize({ batchData }) : batchData); + const DB_CONN = global.DSQL_READ_ONLY_DB_CONN || global.DSQL_DB_CONN; + switch (action) { + case "get": + return await dsqlCrudGet(params); + // case "batch-get": + // return await dsqlCrudBatchGet(params); + case "insert": + const INSERT_RESULT = await addDbEntry({ + data: finalData, + batchData: finalBatchData, + tableName: table, + dbFullName, + }); + return INSERT_RESULT; + case "update": + data === null || data === void 0 ? true : delete data.id; + const UPDATE_RESULT = await updateDbEntry({ + data: finalData, + tableName: table, + dbFullName, + identifierColumnName: (targetField || "id"), + identifierValue: String(targetValue || targetId), + }); + return UPDATE_RESULT; + case "delete": + const deleteQuery = sqlDeleteGenerator({ + data: targetId + ? { id: targetId } + : targetField && targetValue + ? { [targetField]: targetValue } + : deleteData, + tableName: table, + dbFullName, + deleteKeyValues, + }); + const res = (await connDbHandler(DB_CONN, deleteQuery === null || deleteQuery === void 0 ? void 0 : deleteQuery.query, deleteQuery === null || deleteQuery === void 0 ? void 0 : deleteQuery.values)); + return { + success: Boolean(res.affectedRows), + payload: res, + queryObject: { + sql: (deleteQuery === null || deleteQuery === void 0 ? void 0 : deleteQuery.query) || "", + params: (deleteQuery === null || deleteQuery === void 0 ? void 0 : deleteQuery.values) || [], + }, + }; + default: + return { + success: false, + payload: undefined, + msg: "Invalid action", + }; + } } diff --git a/dist/package-shared/utils/data-fetching/method-crud.js b/dist/package-shared/utils/data-fetching/method-crud.js index 235e4f8..2a5c3b6 100644 --- a/dist/package-shared/utils/data-fetching/method-crud.js +++ b/dist/package-shared/utils/data-fetching/method-crud.js @@ -1,164 +1,150 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = dsqlMethodCrud; -const lodash_1 = __importDefault(require("lodash")); -const deserialize_query_1 = __importDefault(require("../deserialize-query")); -const ejson_1 = __importDefault(require("../ejson")); -const numberfy_1 = __importDefault(require("../numberfy")); -const crud_1 = __importDefault(require("./crud")); -function dsqlMethodCrud(_a) { - return __awaiter(this, arguments, void 0, function* ({ method, tableName, addUser, user, extraData, transformData, existingData, body, query, targetId, sanitize, transformQuery, debug, }) { - var _b, _c, _d; - let result = { - success: false, - }; - try { - let finalBody = body; - let finalQuery = (0, deserialize_query_1.default)(query); - let LIMIT = 10; - let PAGE = 1; - let OFFSET = (PAGE - 1) * LIMIT; - if (method == "GET") { - const newFinalQuery = lodash_1.default.cloneDeep(finalQuery || {}); - Object.keys(newFinalQuery).forEach((key) => { - const value = newFinalQuery[key]; - if (typeof value == "string" && value.match(/^\{|^\[/)) { - newFinalQuery[key] = ejson_1.default.stringify(value); - } - if (value == "true") { - newFinalQuery[key] = true; - } - if (value == "false") { - newFinalQuery[key] = false; - } - }); - if (newFinalQuery.limit) - LIMIT = (0, numberfy_1.default)(newFinalQuery.limit); - if (newFinalQuery.page) - PAGE = (0, numberfy_1.default)(newFinalQuery.page); - OFFSET = (PAGE - 1) * LIMIT; - finalQuery = newFinalQuery; - } - let finalData = finalBody - ? Object.assign(Object.assign({}, finalBody), extraData) - : {}; - if ((user === null || user === void 0 ? void 0 : user.id) && addUser) { - finalData = Object.assign(Object.assign({}, finalData), { [addUser.field]: String(user.id) }); - } - if (transformData) { - if (debug) { - console.log("DEBUG:::transforming Data ..."); +import _ from "lodash"; +import deserializeQuery from "../deserialize-query"; +import EJSON from "../ejson"; +import numberfy from "../numberfy"; +import dsqlCrud from "./crud"; +export default async function dsqlMethodCrud({ method, tableName, addUser, user, extraData, transformData, existingData, body, query, targetId, sanitize, transformQuery, debug, }) { + var _a, _b, _c, _d, _e; + let result = { + success: false, + }; + try { + let finalBody = body; + let finalQuery = deserializeQuery(query); + let LIMIT = 10; + let PAGE = 1; + let OFFSET = (PAGE - 1) * LIMIT; + if (method == "GET") { + const newFinalQuery = _.cloneDeep(finalQuery || {}); + Object.keys(newFinalQuery).forEach((key) => { + const value = newFinalQuery[key]; + if (typeof value == "string" && value.match(/^\{|^\[/)) { + newFinalQuery[key] = EJSON.stringify(value); } - finalData = (yield transformData({ - data: finalData, - existingData: existingData, - user, - reqMethod: method, - })); - } - if (transformQuery) { - if (debug) { - console.log("DEBUG:::transforming Query ..."); + if (value == "true") { + newFinalQuery[key] = true; } - finalQuery = yield transformQuery({ - query: finalQuery || {}, - user, - reqMethod: method, - }); - } + if (value == "false") { + newFinalQuery[key] = false; + } + }); + if (newFinalQuery.limit) + LIMIT = numberfy(newFinalQuery.limit); + if (newFinalQuery.page) + PAGE = numberfy(newFinalQuery.page); + OFFSET = (PAGE - 1) * LIMIT; + finalQuery = newFinalQuery; + } + let finalData = finalBody + ? Object.assign(Object.assign({}, finalBody), extraData) + : {}; + if ((user === null || user === void 0 ? void 0 : user.id) && addUser) { + finalData = Object.assign(Object.assign({}, finalData), { [addUser.field]: String(user.id) }); + } + if (transformData) { if (debug) { - console.log("DEBUG:::finalQuery", finalQuery); - console.log("DEBUG:::finalData", finalData); + console.log("DEBUG:::transforming Data ..."); } - switch (method) { - case "GET": - const GET_RESULT = yield (0, crud_1.default)({ - action: "get", - table: tableName, - query: Object.assign(Object.assign({}, finalQuery), { query: Object.assign(Object.assign({}, finalQuery === null || finalQuery === void 0 ? void 0 : finalQuery.query), ((user === null || user === void 0 ? void 0 : user.id) && addUser - ? { - [addUser.field]: { - value: String(user.id), - }, - } - : undefined)), limit: LIMIT, offset: OFFSET }), - sanitize, - }); - result = { - success: Boolean(GET_RESULT === null || GET_RESULT === void 0 ? void 0 : GET_RESULT.success), - payload: GET_RESULT === null || GET_RESULT === void 0 ? void 0 : GET_RESULT.payload, - msg: GET_RESULT === null || GET_RESULT === void 0 ? void 0 : GET_RESULT.msg, - error: GET_RESULT === null || GET_RESULT === void 0 ? void 0 : GET_RESULT.error, - queryObject: GET_RESULT === null || GET_RESULT === void 0 ? void 0 : GET_RESULT.queryObject, - }; - break; - case "POST": - const POST_RESULT = yield (0, crud_1.default)({ - action: "insert", - table: tableName, - data: finalData && ((_b = Object.keys(finalData)) === null || _b === void 0 ? void 0 : _b[0]) - ? finalData - : undefined, - sanitize, - }); - result = { - success: Boolean(POST_RESULT === null || POST_RESULT === void 0 ? void 0 : POST_RESULT.success), - payload: POST_RESULT === null || POST_RESULT === void 0 ? void 0 : POST_RESULT.payload, - msg: POST_RESULT === null || POST_RESULT === void 0 ? void 0 : POST_RESULT.msg, - error: POST_RESULT === null || POST_RESULT === void 0 ? void 0 : POST_RESULT.error, - }; - break; - case "PUT": - const PUT_RESULT = yield (0, crud_1.default)({ - action: "update", - table: tableName, - data: finalData && ((_c = Object.keys(finalData)) === null || _c === void 0 ? void 0 : _c[0]) - ? finalData - : undefined, - targetId, - sanitize, - }); - result = { - success: Boolean(PUT_RESULT === null || PUT_RESULT === void 0 ? void 0 : PUT_RESULT.success), - payload: PUT_RESULT === null || PUT_RESULT === void 0 ? void 0 : PUT_RESULT.payload, - msg: PUT_RESULT === null || PUT_RESULT === void 0 ? void 0 : PUT_RESULT.msg, - error: PUT_RESULT === null || PUT_RESULT === void 0 ? void 0 : PUT_RESULT.error, - }; - break; - case "DELETE": - const DELETE_RESULT = yield (0, crud_1.default)({ - action: "delete", - table: tableName, - targetId, - sanitize, - }); - result = { - success: Boolean(DELETE_RESULT === null || DELETE_RESULT === void 0 ? void 0 : DELETE_RESULT.success), - payload: DELETE_RESULT === null || DELETE_RESULT === void 0 ? void 0 : DELETE_RESULT.payload, - msg: DELETE_RESULT === null || DELETE_RESULT === void 0 ? void 0 : DELETE_RESULT.msg, - error: DELETE_RESULT === null || DELETE_RESULT === void 0 ? void 0 : DELETE_RESULT.error, - }; - break; - default: - break; + finalData = (await transformData({ + data: finalData, + existingData: existingData, + user, + reqMethod: method, + })); + } + if (transformQuery) { + if (debug) { + console.log("DEBUG:::transforming Query ..."); } - return result; + finalQuery = await transformQuery({ + query: finalQuery || {}, + user, + reqMethod: method, + }); } - catch (error) { - (_d = global.ERROR_CALLBACK) === null || _d === void 0 ? void 0 : _d.call(global, `Method Crud Error`, error); - return result; + if (debug) { + console.log("DEBUG:::finalQuery", finalQuery); + console.log("DEBUG:::finalData", finalData); } - }); + switch (method) { + case "GET": + const GET_RESULT = await dsqlCrud({ + action: "get", + table: tableName, + query: Object.assign(Object.assign({}, finalQuery), { query: Object.assign(Object.assign({}, finalQuery === null || finalQuery === void 0 ? void 0 : finalQuery.query), ((user === null || user === void 0 ? void 0 : user.id) && addUser + ? { + [addUser.field]: { + value: String(user.id), + }, + } + : undefined)), limit: LIMIT, offset: OFFSET }), + sanitize, + }); + result = { + success: Boolean(GET_RESULT === null || GET_RESULT === void 0 ? void 0 : GET_RESULT.success), + payload: GET_RESULT === null || GET_RESULT === void 0 ? void 0 : GET_RESULT.payload, + msg: GET_RESULT === null || GET_RESULT === void 0 ? void 0 : GET_RESULT.msg, + error: GET_RESULT === null || GET_RESULT === void 0 ? void 0 : GET_RESULT.error, + queryObject: { + string: ((_a = GET_RESULT === null || GET_RESULT === void 0 ? void 0 : GET_RESULT.queryObject) === null || _a === void 0 ? void 0 : _a.sql) || "", + values: ((_b = GET_RESULT === null || GET_RESULT === void 0 ? void 0 : GET_RESULT.queryObject) === null || _b === void 0 ? void 0 : _b.params) || [], + }, + }; + break; + case "POST": + const POST_RESULT = await dsqlCrud({ + action: "insert", + table: tableName, + data: finalData && ((_c = Object.keys(finalData)) === null || _c === void 0 ? void 0 : _c[0]) + ? finalData + : undefined, + sanitize, + }); + result = { + success: Boolean(POST_RESULT === null || POST_RESULT === void 0 ? void 0 : POST_RESULT.success), + payload: POST_RESULT === null || POST_RESULT === void 0 ? void 0 : POST_RESULT.payload, + msg: POST_RESULT === null || POST_RESULT === void 0 ? void 0 : POST_RESULT.msg, + error: POST_RESULT === null || POST_RESULT === void 0 ? void 0 : POST_RESULT.error, + }; + break; + case "PUT": + const PUT_RESULT = await dsqlCrud({ + action: "update", + table: tableName, + data: finalData && ((_d = Object.keys(finalData)) === null || _d === void 0 ? void 0 : _d[0]) + ? finalData + : undefined, + targetId, + sanitize, + }); + result = { + success: Boolean(PUT_RESULT === null || PUT_RESULT === void 0 ? void 0 : PUT_RESULT.success), + payload: PUT_RESULT === null || PUT_RESULT === void 0 ? void 0 : PUT_RESULT.payload, + msg: PUT_RESULT === null || PUT_RESULT === void 0 ? void 0 : PUT_RESULT.msg, + error: PUT_RESULT === null || PUT_RESULT === void 0 ? void 0 : PUT_RESULT.error, + }; + break; + case "DELETE": + const DELETE_RESULT = await dsqlCrud({ + action: "delete", + table: tableName, + targetId, + sanitize, + }); + result = { + success: Boolean(DELETE_RESULT === null || DELETE_RESULT === void 0 ? void 0 : DELETE_RESULT.success), + payload: DELETE_RESULT === null || DELETE_RESULT === void 0 ? void 0 : DELETE_RESULT.payload, + msg: DELETE_RESULT === null || DELETE_RESULT === void 0 ? void 0 : DELETE_RESULT.msg, + error: DELETE_RESULT === null || DELETE_RESULT === void 0 ? void 0 : DELETE_RESULT.error, + }; + break; + default: + break; + } + return result; + } + catch (error) { + (_e = global.ERROR_CALLBACK) === null || _e === void 0 ? void 0 : _e.call(global, `Method Crud Error`, error); + return result; + } } diff --git a/dist/package-shared/utils/db/conn-db-handler.d.ts b/dist/package-shared/utils/db/conn-db-handler.d.ts index a7ee0b3..a302e51 100644 --- a/dist/package-shared/utils/db/conn-db-handler.d.ts +++ b/dist/package-shared/utils/db/conn-db-handler.d.ts @@ -1,10 +1,12 @@ import { ServerlessMysql } from "serverless-mysql"; +import { DSQLErrorObject } from "../../types"; export type ConnDBHandlerQueryObject = { query: string; values?: (string | number | undefined)[]; }; type Return = ReturnType | null | { - error: string; + error?: string; + errors?: DSQLErrorObject[]; }; /** * # Run Query From MySQL Connection diff --git a/dist/package-shared/utils/db/conn-db-handler.js b/dist/package-shared/utils/db/conn-db-handler.js index 1bcda12..ec8640a 100644 --- a/dist/package-shared/utils/db/conn-db-handler.js +++ b/dist/package-shared/utils/db/conn-db-handler.js @@ -1,25 +1,10 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = connDbHandler; -const debug_log_1 = __importDefault(require("../logging/debug-log")); +import debugLog from "../logging/debug-log"; /** * # Run Query From MySQL Connection * @description Run a query from a pre-existing MySQL/Mariadb Connection * setup with `serverless-mysql` npm module */ -function connDbHandler( +export default async function connDbHandler( /** * ServerlessMySQL Connection Object */ @@ -32,74 +17,83 @@ query, * Array of Values to Sanitize and Inject */ values, debug) { - return __awaiter(this, void 0, void 0, function* () { - var _a, _b; - try { - if (!conn) - throw new Error("No Connection Found!"); - if (!query) - throw new Error("Query String Required!"); - if (typeof query == "string") { - const res = yield conn.query(trimQuery(query), values); - if (debug) { - (0, debug_log_1.default)({ - log: res, - addTime: true, - label: "res", - }); - } - return JSON.parse(JSON.stringify(res)); - } - else if (typeof query == "object") { - const resArray = []; - for (let i = 0; i < query.length; i++) { - try { - const queryObj = query[i]; - const queryObjRes = yield conn.query(trimQuery(queryObj.query), queryObj.values); - if (debug) { - (0, debug_log_1.default)({ - log: queryObjRes, - addTime: true, - label: "queryObjRes", - }); - } - resArray.push(JSON.parse(JSON.stringify(queryObjRes))); - } - catch (error) { - (_a = global.ERROR_CALLBACK) === null || _a === void 0 ? void 0 : _a.call(global, `Connection DB Handler Query Error`, error); - resArray.push(null); - } - } - if (debug) { - (0, debug_log_1.default)({ - log: resArray, - addTime: true, - label: "resArray", - }); - } - return resArray; - } - else { - return null; - } - } - catch (error) { - (_b = global.ERROR_CALLBACK) === null || _b === void 0 ? void 0 : _b.call(global, `Connection DB Handler Error`, error); + var _a, _b; + try { + if (!conn) + throw new Error("No Connection Found!"); + if (!query) + throw new Error("Query String Required!"); + let queryErrorArray = []; + if (typeof query == "string") { + const res = await conn.query(trimQuery(query), values); if (debug) { - (0, debug_log_1.default)({ - log: `Connection DB Handler Error: ${error.message}`, + debugLog({ + log: res, addTime: true, - label: "Error", + label: "res", }); } - return { - error: `Connection DB Handler Error: ${error.message}`, - }; + return JSON.parse(JSON.stringify(res)); } - finally { - conn === null || conn === void 0 ? void 0 : conn.end(); + else if (typeof query == "object") { + const resArray = []; + for (let i = 0; i < query.length; i++) { + let currentQueryError = {}; + try { + const queryObj = query[i]; + currentQueryError.sql = queryObj.query; + currentQueryError.sqlValues = queryObj.values; + const queryObjRes = await conn.query(trimQuery(queryObj.query), queryObj.values); + if (debug) { + debugLog({ + log: queryObjRes, + addTime: true, + label: "queryObjRes", + }); + } + resArray.push(JSON.parse(JSON.stringify(queryObjRes))); + } + catch (error) { + (_a = global.ERROR_CALLBACK) === null || _a === void 0 ? void 0 : _a.call(global, `Connection DB Handler Query Error`, error); + resArray.push(null); + currentQueryError["error"] = error.message; + queryErrorArray.push(currentQueryError); + } + } + if (debug) { + debugLog({ + log: resArray, + addTime: true, + label: "resArray", + }); + } + if (queryErrorArray[0]) { + return { + errors: queryErrorArray, + }; + } + return resArray; } - }); + else { + return null; + } + } + catch (error) { + (_b = global.ERROR_CALLBACK) === null || _b === void 0 ? void 0 : _b.call(global, `Connection DB Handler Error`, error); + if (debug) { + debugLog({ + log: `Connection DB Handler Error: ${error.message}`, + addTime: true, + label: "Error", + }); + } + return { + error: `Connection DB Handler Error: ${error.message}`, + }; + } + finally { + conn === null || conn === void 0 ? void 0 : conn.end(); + } } function trimQuery(query) { return query.replace(/\n/gm, "").replace(/ {2,}/g, "").trim(); diff --git a/dist/package-shared/utils/db/schema/data-type-constructor.d.ts b/dist/package-shared/utils/db/schema/data-type-constructor.d.ts new file mode 100644 index 0000000..9c6b8d4 --- /dev/null +++ b/dist/package-shared/utils/db/schema/data-type-constructor.d.ts @@ -0,0 +1 @@ +export default function dataTypeConstructor(dataType: string, limit?: number, decimal?: number): string; diff --git a/dist/package-shared/utils/db/schema/data-type-constructor.js b/dist/package-shared/utils/db/schema/data-type-constructor.js new file mode 100644 index 0000000..9990ef7 --- /dev/null +++ b/dist/package-shared/utils/db/schema/data-type-constructor.js @@ -0,0 +1,20 @@ +import dataTypeParser, { DataTypesWithNumbers } from "./data-type-parser"; +export default function dataTypeConstructor(dataType, limit, decimal) { + let finalType = dataTypeParser(dataType).type; + if (!DataTypesWithNumbers.includes(finalType)) { + return finalType; + } + if (finalType == "VARCHAR") { + return (finalType += `(${limit || 250})`); + } + if (finalType == "DECIMAL" || + finalType == "FLOAT" || + finalType == "DOUBLE") { + return (finalType += `(${limit || 10},${decimal || 2})`); + } + if (limit && !decimal) + finalType += `(${limit})`; + if (limit && decimal) + finalType += `(${limit},${decimal})`; + return finalType; +} diff --git a/dist/package-shared/utils/db/schema/data-type-parser.d.ts b/dist/package-shared/utils/db/schema/data-type-parser.d.ts new file mode 100644 index 0000000..422bd5a --- /dev/null +++ b/dist/package-shared/utils/db/schema/data-type-parser.d.ts @@ -0,0 +1,10 @@ +import DataTypes from "../../../data/data-types"; +export declare const DataTypesWithNumbers: (typeof DataTypes)[number]["name"][]; +export declare const DataTypesWithTwoNumbers: (typeof DataTypes)[number]["name"][]; +type Return = { + type: (typeof DataTypes)[number]["name"]; + limit?: number; + decimal?: number; +}; +export default function dataTypeParser(dataType?: string): Return; +export {}; diff --git a/dist/package-shared/utils/db/schema/data-type-parser.js b/dist/package-shared/utils/db/schema/data-type-parser.js new file mode 100644 index 0000000..3cbc8c4 --- /dev/null +++ b/dist/package-shared/utils/db/schema/data-type-parser.js @@ -0,0 +1,40 @@ +import numberfy from "../../numberfy"; +export const DataTypesWithNumbers = [ + "DECIMAL", + "DOUBLE", + "FLOAT", + "VARCHAR", +]; +export const DataTypesWithTwoNumbers = [ + "DECIMAL", + "DOUBLE", + "FLOAT", +]; +export default function dataTypeParser(dataType) { + if (!dataType) { + return { + type: "VARCHAR", + limit: 250, + }; + } + const dataTypeArray = dataType.split("("); + const type = dataTypeArray[0]; + const number = dataTypeArray[1]; + if (!DataTypesWithNumbers.includes(type)) { + return { + type, + }; + } + if (number === null || number === void 0 ? void 0 : number.match(/,/)) { + const numberArr = number.split(","); + return { + type, + limit: numberfy(numberArr[0]), + decimal: numberArr[1] ? numberfy(numberArr[1]) : undefined, + }; + } + return { + type, + limit: number ? numberfy(number) : undefined, + }; +} diff --git a/dist/package-shared/utils/db/schema/grab-target-db-schema-index.d.ts b/dist/package-shared/utils/db/schema/grab-target-db-schema-index.d.ts new file mode 100644 index 0000000..b83e8c4 --- /dev/null +++ b/dist/package-shared/utils/db/schema/grab-target-db-schema-index.d.ts @@ -0,0 +1,11 @@ +import { DSQL_ChildrenDatabaseObject, DSQL_ChildrenTablesType, DSQL_DatabaseSchemaType } from "../../../types"; +type Params = { + dbs?: DSQL_DatabaseSchemaType[]; + dbSchema?: DSQL_DatabaseSchemaType; + childDbSchema?: DSQL_ChildrenDatabaseObject; + childTableSchema?: DSQL_ChildrenTablesType; + dbSlug?: string; + dbFullName?: string; +}; +export default function grabTargetDatabaseSchemaIndex({ dbs, dbFullName, dbSlug, dbSchema, childDbSchema, childTableSchema, }: Params): number | undefined; +export {}; diff --git a/dist/package-shared/utils/db/schema/grab-target-db-schema-index.js b/dist/package-shared/utils/db/schema/grab-target-db-schema-index.js new file mode 100644 index 0000000..3a9a0dd --- /dev/null +++ b/dist/package-shared/utils/db/schema/grab-target-db-schema-index.js @@ -0,0 +1,10 @@ +export default function grabTargetDatabaseSchemaIndex({ dbs, dbFullName, dbSlug, dbSchema, childDbSchema, childTableSchema, }) { + if (!dbs) + return undefined; + const targetDbIndex = dbs.findIndex((db) => (dbSlug && dbSlug == db.dbSlug) || + (dbFullName && dbFullName == db.dbFullName) || + (dbSchema && dbSchema.dbSlug && dbSchema.dbSlug == db.dbSlug)); + if (targetDbIndex < 0) + return undefined; + return targetDbIndex; +} diff --git a/dist/package-shared/utils/db/schema/grab-target-table-schema-index.d.ts b/dist/package-shared/utils/db/schema/grab-target-table-schema-index.d.ts new file mode 100644 index 0000000..eeb1ae2 --- /dev/null +++ b/dist/package-shared/utils/db/schema/grab-target-table-schema-index.d.ts @@ -0,0 +1,9 @@ +import { DSQL_ChildrenTablesType, DSQL_TableSchemaType } from "../../../types"; +type Params = { + tables?: DSQL_TableSchemaType[]; + tableSchema?: DSQL_TableSchemaType; + childTableSchema?: DSQL_ChildrenTablesType; + tableName?: string; +}; +export default function grabTargetTableSchemaIndex({ tables, tableName, tableSchema, childTableSchema, }: Params): number | undefined; +export {}; diff --git a/dist/package-shared/utils/db/schema/grab-target-table-schema-index.js b/dist/package-shared/utils/db/schema/grab-target-table-schema-index.js new file mode 100644 index 0000000..8ba2e0f --- /dev/null +++ b/dist/package-shared/utils/db/schema/grab-target-table-schema-index.js @@ -0,0 +1,11 @@ +export default function grabTargetTableSchemaIndex({ tables, tableName, tableSchema, childTableSchema, }) { + if (!tables) + return undefined; + const targetTableIndex = tables.findIndex((tbl) => (tableName && tableName == tbl.tableName) || + (tableSchema && + tableSchema.tableName && + tableSchema.tableName == tbl.tableName)); + if (targetTableIndex < 0) + return undefined; + return targetTableIndex; +} diff --git a/dist/package-shared/utils/db/schema/grab-target-table-schema.d.ts b/dist/package-shared/utils/db/schema/grab-target-table-schema.d.ts new file mode 100644 index 0000000..b0112a0 --- /dev/null +++ b/dist/package-shared/utils/db/schema/grab-target-table-schema.d.ts @@ -0,0 +1,7 @@ +import { DSQL_TableSchemaType } from "../../../types"; +type Params = { + tables: DSQL_TableSchemaType[]; + tableName?: string; +}; +export default function grabTargetTableSchema({ tables, tableName, }: Params): DSQL_TableSchemaType | undefined; +export {}; diff --git a/dist/package-shared/utils/db/schema/grab-target-table-schema.js b/dist/package-shared/utils/db/schema/grab-target-table-schema.js new file mode 100644 index 0000000..8eb81fd --- /dev/null +++ b/dist/package-shared/utils/db/schema/grab-target-table-schema.js @@ -0,0 +1,4 @@ +export default function grabTargetTableSchema({ tables, tableName, }) { + const targetTable = tables.find((tbl) => tableName && tableName == tbl.tableName); + return targetTable; +} diff --git a/dist/package-shared/utils/db/schema/grab-text-field-type.d.ts b/dist/package-shared/utils/db/schema/grab-text-field-type.d.ts new file mode 100644 index 0000000..2f72812 --- /dev/null +++ b/dist/package-shared/utils/db/schema/grab-text-field-type.d.ts @@ -0,0 +1,2 @@ +import { DSQL_FieldSchemaType, TextFieldTypesArray } from "../../../types"; +export default function grabTextFieldType(field: DSQL_FieldSchemaType, nullReturn?: boolean): (typeof TextFieldTypesArray)[number]["value"] | undefined; diff --git a/dist/package-shared/utils/db/schema/grab-text-field-type.js b/dist/package-shared/utils/db/schema/grab-text-field-type.js new file mode 100644 index 0000000..d111a8f --- /dev/null +++ b/dist/package-shared/utils/db/schema/grab-text-field-type.js @@ -0,0 +1,19 @@ +export default function grabTextFieldType(field, nullReturn) { + if (field.richText) + return "richText"; + if (field.json) + return "json"; + if (field.yaml) + return "yaml"; + if (field.html) + return "html"; + if (field.css) + return "css"; + if (field.javascript) + return "javascript"; + if (field.shell) + return "shell"; + if (nullReturn) + return undefined; + return "plain"; +} diff --git a/dist/package-shared/utils/db/schema/resolve-schema-children-handle-children-databases.d.ts b/dist/package-shared/utils/db/schema/resolve-schema-children-handle-children-databases.d.ts new file mode 100644 index 0000000..e1515f5 --- /dev/null +++ b/dist/package-shared/utils/db/schema/resolve-schema-children-handle-children-databases.d.ts @@ -0,0 +1,7 @@ +import { DSQL_DatabaseSchemaType } from "../../../types"; +type Params = { + currentDbSchema: DSQL_DatabaseSchemaType; + userId: string | number; +}; +export default function ({ currentDbSchema, userId }: Params): DSQL_DatabaseSchemaType; +export {}; diff --git a/dist/package-shared/utils/db/schema/resolve-schema-children-handle-children-databases.js b/dist/package-shared/utils/db/schema/resolve-schema-children-handle-children-databases.js new file mode 100644 index 0000000..406185e --- /dev/null +++ b/dist/package-shared/utils/db/schema/resolve-schema-children-handle-children-databases.js @@ -0,0 +1,88 @@ +import { grabPrimaryRequiredDbSchema, writeUpdatedDbSchema, } from "../../../shell/createDbFromSchema/grab-required-database-schemas"; +import _ from "lodash"; +import uniqueByKey from "../../unique-by-key"; +export default function ({ currentDbSchema, userId }) { + var _a, _b, _c; + const newCurrentDbSchema = _.cloneDeep(currentDbSchema); + if (newCurrentDbSchema.childrenDatabases) { + for (let ch = 0; ch < newCurrentDbSchema.childrenDatabases.length; ch++) { + const dbChildDb = newCurrentDbSchema.childrenDatabases[ch]; + if (!dbChildDb.dbId) { + newCurrentDbSchema.childrenDatabases.splice(ch, 1, {}); + continue; + } + const targetChildDatabase = grabPrimaryRequiredDbSchema({ + dbId: dbChildDb.dbId, + userId, + }); + /** + * Delete child database from array if said database + * doesn't exist + */ + if ((targetChildDatabase === null || targetChildDatabase === void 0 ? void 0 : targetChildDatabase.id) && targetChildDatabase.childDatabase) { + targetChildDatabase.tables = [...newCurrentDbSchema.tables]; + writeUpdatedDbSchema({ + dbSchema: targetChildDatabase, + userId, + }); + } + else { + (_a = newCurrentDbSchema.childrenDatabases) === null || _a === void 0 ? void 0 : _a.splice(ch, 1, {}); + } + } + newCurrentDbSchema.childrenDatabases = + uniqueByKey(newCurrentDbSchema.childrenDatabases.filter((db) => Boolean(db.dbId)), "dbId"); + } + /** + * Handle scenario where this database is a child of another + */ + if (currentDbSchema.childDatabase && currentDbSchema.childDatabaseDbId) { + const targetParentDatabase = grabPrimaryRequiredDbSchema({ + dbId: currentDbSchema.childDatabaseDbId, + userId, + }); + if (!targetParentDatabase) { + return newCurrentDbSchema; + } + /** + * Delete child Database key/values from current database if + * the parent database doesn't esit + */ + if (!(targetParentDatabase === null || targetParentDatabase === void 0 ? void 0 : targetParentDatabase.id)) { + delete newCurrentDbSchema.childDatabase; + delete newCurrentDbSchema.childDatabaseDbId; + return newCurrentDbSchema; + } + /** + * New Child Database Object to be appended + */ + const newChildDatabaseObject = { + dbId: currentDbSchema.id, + }; + /** + * Add a new Children array in the target Database if this is the + * first child to be added to said database. Else append to array + * if it exists + */ + if ((targetParentDatabase === null || targetParentDatabase === void 0 ? void 0 : targetParentDatabase.id) && + !((_b = targetParentDatabase.childrenDatabases) === null || _b === void 0 ? void 0 : _b[0])) { + targetParentDatabase.childrenDatabases = [newChildDatabaseObject]; + } + else if ((targetParentDatabase === null || targetParentDatabase === void 0 ? void 0 : targetParentDatabase.id) && + ((_c = targetParentDatabase.childrenDatabases) === null || _c === void 0 ? void 0 : _c[0])) { + const existingChildDb = targetParentDatabase.childrenDatabases.find((db) => db.dbId == currentDbSchema.id); + if (!(existingChildDb === null || existingChildDb === void 0 ? void 0 : existingChildDb.dbId)) { + targetParentDatabase.childrenDatabases.push(newChildDatabaseObject); + } + targetParentDatabase.childrenDatabases = uniqueByKey(targetParentDatabase.childrenDatabases, "dbId"); + } + /** + * Update tables for child database, which is the current database + */ + if (targetParentDatabase === null || targetParentDatabase === void 0 ? void 0 : targetParentDatabase.id) { + newCurrentDbSchema.tables = targetParentDatabase.tables; + writeUpdatedDbSchema({ dbSchema: targetParentDatabase, userId }); + } + } + return newCurrentDbSchema; +} diff --git a/dist/package-shared/utils/db/schema/resolve-schema-children-handle-children-tables.d.ts b/dist/package-shared/utils/db/schema/resolve-schema-children-handle-children-tables.d.ts new file mode 100644 index 0000000..f6dded9 --- /dev/null +++ b/dist/package-shared/utils/db/schema/resolve-schema-children-handle-children-tables.d.ts @@ -0,0 +1,9 @@ +import { DSQL_DatabaseSchemaType, DSQL_TableSchemaType } from "../../../types"; +type Params = { + currentDbSchema: DSQL_DatabaseSchemaType; + currentTableSchema: DSQL_TableSchemaType; + currentTableSchemaIndex: number; + userId: string | number; +}; +export default function ({ currentDbSchema, currentTableSchema, currentTableSchemaIndex, userId, }: Params): DSQL_DatabaseSchemaType; +export {}; diff --git a/dist/package-shared/utils/db/schema/resolve-schema-children-handle-children-tables.js b/dist/package-shared/utils/db/schema/resolve-schema-children-handle-children-tables.js new file mode 100644 index 0000000..269dfad --- /dev/null +++ b/dist/package-shared/utils/db/schema/resolve-schema-children-handle-children-tables.js @@ -0,0 +1,133 @@ +import { grabPrimaryRequiredDbSchema, writeUpdatedDbSchema, } from "../../../shell/createDbFromSchema/grab-required-database-schemas"; +import _ from "lodash"; +import uniqueByKey from "../../unique-by-key"; +export default function ({ currentDbSchema, currentTableSchema, currentTableSchemaIndex, userId, }) { + var _a, _b, _c, _d, _e, _f, _g; + if (!currentDbSchema.dbFullName) { + throw new Error(`Resolve Children tables ERROR => currentDbSchema.dbFullName not found!`); + } + const newCurrentDbSchema = _.cloneDeep(currentDbSchema); + if (newCurrentDbSchema.tables[currentTableSchemaIndex].childrenTables) { + for (let ch = 0; ch < + newCurrentDbSchema.tables[currentTableSchemaIndex].childrenTables + .length; ch++) { + const childTable = newCurrentDbSchema.tables[currentTableSchemaIndex] + .childrenTables[ch]; + if (!childTable.dbId || !childTable.tableId) { + (_a = newCurrentDbSchema.tables[currentTableSchemaIndex].childrenTables) === null || _a === void 0 ? void 0 : _a.splice(ch, 1, {}); + continue; + } + const targetChildTableParentDatabase = grabPrimaryRequiredDbSchema({ + dbId: childTable.dbId, + userId, + }); + /** + * Delete child table from array if the parent database + * of said child table has been deleted or doesn't exist + */ + if (!(targetChildTableParentDatabase === null || targetChildTableParentDatabase === void 0 ? void 0 : targetChildTableParentDatabase.dbFullName)) { + (_b = newCurrentDbSchema.tables[currentTableSchemaIndex].childrenTables) === null || _b === void 0 ? void 0 : _b.splice(ch, 1, {}); + } + else { + /** + * Delete child table from array if the parent database + * exists but the target tabled has been deleted or doesn't + * exist + */ + const targetChildTableParentDatabaseTableIndex = targetChildTableParentDatabase.tables.findIndex((tbl) => tbl.id == childTable.tableId); + const targetChildTableParentDatabaseTable = targetChildTableParentDatabase.tables[targetChildTableParentDatabaseTableIndex]; + if (targetChildTableParentDatabaseTable === null || targetChildTableParentDatabaseTable === void 0 ? void 0 : targetChildTableParentDatabaseTable.childTable) { + targetChildTableParentDatabase.tables[targetChildTableParentDatabaseTableIndex].fields = [...currentTableSchema.fields]; + targetChildTableParentDatabase.tables[targetChildTableParentDatabaseTableIndex].indexes = [...(currentTableSchema.indexes || [])]; + writeUpdatedDbSchema({ + dbSchema: targetChildTableParentDatabase, + userId, + }); + } + else { + (_c = newCurrentDbSchema.tables[currentTableSchemaIndex].childrenTables) === null || _c === void 0 ? void 0 : _c.splice(ch, 1, {}); + } + } + } + if ((_d = newCurrentDbSchema.tables[currentTableSchemaIndex] + .childrenTables) === null || _d === void 0 ? void 0 : _d[0]) { + newCurrentDbSchema.tables[currentTableSchemaIndex].childrenTables = + uniqueByKey(newCurrentDbSchema.tables[currentTableSchemaIndex].childrenTables.filter((tbl) => Boolean(tbl.dbId) && Boolean(tbl.tableId)), "dbId"); + } + else { + delete newCurrentDbSchema.tables[currentTableSchemaIndex] + .childrenTables; + } + } + /** + * Handle scenario where this table is a child of another + */ + if (currentTableSchema.childTable && + currentTableSchema.childTableDbId && + currentTableSchema.childTableDbId) { + const targetParentDatabase = grabPrimaryRequiredDbSchema({ + dbId: currentTableSchema.childTableDbId, + userId, + }); + const targetParentDatabaseTableIndex = targetParentDatabase === null || targetParentDatabase === void 0 ? void 0 : targetParentDatabase.tables.findIndex((tbl) => tbl.id == currentTableSchema.childTableId); + const targetParentDatabaseTable = typeof targetParentDatabaseTableIndex == "number" + ? targetParentDatabaseTableIndex < 0 + ? undefined + : targetParentDatabase === null || targetParentDatabase === void 0 ? void 0 : targetParentDatabase.tables[targetParentDatabaseTableIndex] + : undefined; + /** + * Delete child Table key/values from current database if + * the parent database doesn't esit + */ + if (!(targetParentDatabase === null || targetParentDatabase === void 0 ? void 0 : targetParentDatabase.dbFullName) || + !(targetParentDatabaseTable === null || targetParentDatabaseTable === void 0 ? void 0 : targetParentDatabaseTable.tableName)) { + delete newCurrentDbSchema.tables[currentTableSchemaIndex] + .childTable; + delete newCurrentDbSchema.tables[currentTableSchemaIndex] + .childTableDbId; + delete newCurrentDbSchema.tables[currentTableSchemaIndex] + .childTableId; + delete newCurrentDbSchema.tables[currentTableSchemaIndex] + .childTableDbId; + return newCurrentDbSchema; + } + /** + * New Child Database Table Object to be appended + */ + const newChildDatabaseTableObject = { + tableId: currentTableSchema.id, + dbId: newCurrentDbSchema.id, + }; + /** + * Add a new Children array in the target table schema if this is the + * first child to be added to said table schema. Else append to array + * if it exists + */ + if (typeof targetParentDatabaseTableIndex == "number" && + !((_e = targetParentDatabaseTable.childrenTables) === null || _e === void 0 ? void 0 : _e[0])) { + targetParentDatabase.tables[targetParentDatabaseTableIndex].childrenTables = [newChildDatabaseTableObject]; + } + else if (typeof targetParentDatabaseTableIndex == "number" && + ((_f = targetParentDatabaseTable.childrenTables) === null || _f === void 0 ? void 0 : _f[0])) { + const existingChildDbTable = targetParentDatabaseTable.childrenTables.find((tbl) => tbl.dbId == newCurrentDbSchema.id && + tbl.tableId == currentTableSchema.id); + if (!(existingChildDbTable === null || existingChildDbTable === void 0 ? void 0 : existingChildDbTable.tableId)) { + (_g = targetParentDatabase.tables[targetParentDatabaseTableIndex].childrenTables) === null || _g === void 0 ? void 0 : _g.push(newChildDatabaseTableObject); + } + targetParentDatabase.tables[targetParentDatabaseTableIndex].childrenTables = uniqueByKey(targetParentDatabase.tables[targetParentDatabaseTableIndex] + .childrenTables || [], ["dbId", "tableId"]); + } + /** + * Update fields and indexes for child table, which is the + * current table + */ + if (targetParentDatabaseTable === null || targetParentDatabaseTable === void 0 ? void 0 : targetParentDatabaseTable.tableName) { + newCurrentDbSchema.tables[currentTableSchemaIndex].fields = + targetParentDatabaseTable.fields; + newCurrentDbSchema.tables[currentTableSchemaIndex].indexes = + targetParentDatabaseTable.indexes; + writeUpdatedDbSchema({ dbSchema: targetParentDatabase, userId }); + } + } + return newCurrentDbSchema; +} diff --git a/dist/package-shared/utils/db/schema/resolve-schema-children.d.ts b/dist/package-shared/utils/db/schema/resolve-schema-children.d.ts new file mode 100644 index 0000000..1a6f1f1 --- /dev/null +++ b/dist/package-shared/utils/db/schema/resolve-schema-children.d.ts @@ -0,0 +1,7 @@ +import { DSQL_DatabaseSchemaType } from "../../../types"; +type Params = { + dbSchema: DSQL_DatabaseSchemaType; + userId: string | number; +}; +export default function resolveSchemaChildren({ dbSchema, userId }: Params): DSQL_DatabaseSchemaType; +export {}; diff --git a/dist/package-shared/utils/db/schema/resolve-schema-children.js b/dist/package-shared/utils/db/schema/resolve-schema-children.js new file mode 100644 index 0000000..135b978 --- /dev/null +++ b/dist/package-shared/utils/db/schema/resolve-schema-children.js @@ -0,0 +1,20 @@ +import _ from "lodash"; +import resolveSchemaChildrenHandleChildrenDatabases from "./resolve-schema-children-handle-children-databases"; +import resolveSchemaChildrenHandleChildrenTables from "./resolve-schema-children-handle-children-tables"; +export default function resolveSchemaChildren({ dbSchema, userId }) { + let newDbSchema = _.cloneDeep(dbSchema); + newDbSchema = resolveSchemaChildrenHandleChildrenDatabases({ + currentDbSchema: newDbSchema, + userId, + }); + for (let t = 0; t < newDbSchema.tables.length; t++) { + const tableSchema = newDbSchema.tables[t]; + newDbSchema = resolveSchemaChildrenHandleChildrenTables({ + currentDbSchema: newDbSchema, + currentTableSchema: tableSchema, + currentTableSchemaIndex: t, + userId, + }); + } + return newDbSchema; +} diff --git a/dist/package-shared/utils/db/schema/resolve-schema-foreign-keys.d.ts b/dist/package-shared/utils/db/schema/resolve-schema-foreign-keys.d.ts new file mode 100644 index 0000000..ab5bdc9 --- /dev/null +++ b/dist/package-shared/utils/db/schema/resolve-schema-foreign-keys.d.ts @@ -0,0 +1,7 @@ +import { DSQL_DatabaseSchemaType } from "../../../types"; +type Params = { + dbSchema: DSQL_DatabaseSchemaType; + userId: string | number; +}; +export default function resolveSchemaForeignKeys({ dbSchema, userId }: Params): DSQL_DatabaseSchemaType; +export {}; diff --git a/dist/package-shared/utils/db/schema/resolve-schema-foreign-keys.js b/dist/package-shared/utils/db/schema/resolve-schema-foreign-keys.js new file mode 100644 index 0000000..860c115 --- /dev/null +++ b/dist/package-shared/utils/db/schema/resolve-schema-foreign-keys.js @@ -0,0 +1,27 @@ +import _ from "lodash"; +export default function resolveSchemaForeignKeys({ dbSchema, userId }) { + var _a; + let newDbSchema = _.cloneDeep(dbSchema); + for (let t = 0; t < newDbSchema.tables.length; t++) { + const tableSchema = newDbSchema.tables[t]; + for (let f = 0; f < tableSchema.fields.length; f++) { + const fieldSchema = tableSchema.fields[f]; + if ((_a = fieldSchema.foreignKey) === null || _a === void 0 ? void 0 : _a.destinationTableColumnName) { + const fkDestinationTableIndex = newDbSchema.tables.findIndex((tbl) => { + var _a; + return tbl.tableName == + ((_a = fieldSchema.foreignKey) === null || _a === void 0 ? void 0 : _a.destinationTableName); + }); + /** + * Delete current Foreign Key if related table doesn't exist + * or has been deleted + */ + if (fkDestinationTableIndex < 0) { + delete newDbSchema.tables[t].fields[f].foreignKey; + continue; + } + } + } + } + return newDbSchema; +} diff --git a/dist/package-shared/utils/db/schema/resolve-users-schema-ids.d.ts b/dist/package-shared/utils/db/schema/resolve-users-schema-ids.d.ts new file mode 100644 index 0000000..19014db --- /dev/null +++ b/dist/package-shared/utils/db/schema/resolve-users-schema-ids.d.ts @@ -0,0 +1,10 @@ +import { DSQL_DatabaseSchemaType } from "../../../types"; +type Params = { + userId: string | number; + dbId?: string | number; +}; +export default function resolveUsersSchemaIDs({ userId, dbId }: Params): false | undefined; +export declare function resolveUserDatabaseSchemaIDs({ dbSchema, }: { + dbSchema: DSQL_DatabaseSchemaType; +}): DSQL_DatabaseSchemaType; +export {}; diff --git a/dist/package-shared/utils/db/schema/resolve-users-schema-ids.js b/dist/package-shared/utils/db/schema/resolve-users-schema-ids.js new file mode 100644 index 0000000..5487fea --- /dev/null +++ b/dist/package-shared/utils/db/schema/resolve-users-schema-ids.js @@ -0,0 +1,54 @@ +import fs from "fs"; +import grabDirNames from "../../backend/names/grab-dir-names"; +import _n from "../../numberfy"; +import path from "path"; +import _ from "lodash"; +import EJSON from "../../ejson"; +import { writeUpdatedDbSchema } from "../../../shell/createDbFromSchema/grab-required-database-schemas"; +export default function resolveUsersSchemaIDs({ userId, dbId }) { + const { targetUserPrivateDir, tempDirName } = grabDirNames({ userId }); + if (!targetUserPrivateDir) + return false; + const schemaDirFilesFolders = fs.readdirSync(targetUserPrivateDir); + for (let i = 0; i < schemaDirFilesFolders.length; i++) { + const fileOrFolderName = schemaDirFilesFolders[i]; + if (!fileOrFolderName.match(/^\d+.json/)) + continue; + const fileDbId = _n(fileOrFolderName.split(".").shift()); + if (!fileDbId) + continue; + if (dbId && _n(dbId) !== fileDbId) { + continue; + } + const schemaFullPath = path.join(targetUserPrivateDir, fileOrFolderName); + if (!fs.existsSync(schemaFullPath)) + continue; + const dbSchema = EJSON.parse(fs.readFileSync(schemaFullPath, "utf-8")); + if (!dbSchema) + continue; + let newDbSchema = resolveUserDatabaseSchemaIDs({ dbSchema }); + writeUpdatedDbSchema({ dbSchema: newDbSchema, userId }); + } +} +export function resolveUserDatabaseSchemaIDs({ dbSchema, }) { + let newDbSchema = _.cloneDeep(dbSchema); + if (!newDbSchema.id) + newDbSchema.id = dbSchema.id; + newDbSchema.tables.forEach((tbl, index) => { + var _a; + if (!tbl.id) { + newDbSchema.tables[index].id = index + 1; + } + tbl.fields.forEach((fld, flIndx) => { + if (!fld.id) { + newDbSchema.tables[index].fields[flIndx].id = flIndx + 1; + } + }); + (_a = tbl.indexes) === null || _a === void 0 ? void 0 : _a.forEach((indx, indIndx) => { + if (!indx.id && newDbSchema.tables[index].indexes) { + newDbSchema.tables[index].indexes[indIndx].id = indIndx + 1; + } + }); + }); + return newDbSchema; +} diff --git a/dist/package-shared/utils/db/schema/set-text-field-type.d.ts b/dist/package-shared/utils/db/schema/set-text-field-type.d.ts new file mode 100644 index 0000000..5a9b6ac --- /dev/null +++ b/dist/package-shared/utils/db/schema/set-text-field-type.d.ts @@ -0,0 +1,2 @@ +import { DSQL_FieldSchemaType, TextFieldTypesArray } from "../../../types"; +export default function setTextFieldType(field: DSQL_FieldSchemaType, type?: (typeof TextFieldTypesArray)[number]["value"]): DSQL_FieldSchemaType; diff --git a/dist/package-shared/utils/db/schema/set-text-field-type.js b/dist/package-shared/utils/db/schema/set-text-field-type.js new file mode 100644 index 0000000..8512e74 --- /dev/null +++ b/dist/package-shared/utils/db/schema/set-text-field-type.js @@ -0,0 +1,30 @@ +import _ from "lodash"; +export default function setTextFieldType(field, type) { + const newField = _.cloneDeep(field); + delete newField.css; + delete newField.richText; + delete newField.json; + delete newField.shell; + delete newField.html; + delete newField.javascript; + delete newField.yaml; + delete newField.code; + delete newField.defaultValueLiteral; + if (type == "css") + return Object.assign(Object.assign({}, newField), { css: true }); + if (type == "richText") + return Object.assign(Object.assign({}, newField), { richText: true }); + if (type == "json") + return Object.assign(Object.assign({}, newField), { json: true }); + if (type == "shell") + return Object.assign(Object.assign({}, newField), { shell: true }); + if (type == "html") + return Object.assign(Object.assign({}, newField), { html: true }); + if (type == "yaml") + return Object.assign(Object.assign({}, newField), { yaml: true }); + if (type == "javascript") + return Object.assign(Object.assign({}, newField), { javascript: true }); + if (type == "code") + return Object.assign(Object.assign({}, newField), { code: true }); + return Object.assign({}, newField); +} diff --git a/dist/package-shared/utils/delete-by-key.d.ts b/dist/package-shared/utils/delete-by-key.d.ts new file mode 100644 index 0000000..af4b4dd --- /dev/null +++ b/dist/package-shared/utils/delete-by-key.d.ts @@ -0,0 +1,6 @@ +/** + * # Delete all matches in an Array + */ +export default function deleteByKey(arr: T[], key: keyof T | (keyof T)[]): T[]; diff --git a/dist/package-shared/utils/delete-by-key.js b/dist/package-shared/utils/delete-by-key.js new file mode 100644 index 0000000..3690228 --- /dev/null +++ b/dist/package-shared/utils/delete-by-key.js @@ -0,0 +1,29 @@ +import _ from "lodash"; +/** + * # Delete all matches in an Array + */ +export default function deleteByKey(arr, key) { + let newArray = _.cloneDeep(arr); + for (let i = 0; i < newArray.length; i++) { + const item = newArray[i]; + if (Array.isArray(key)) { + const targetMatches = []; + for (let k = 0; k < key.length; k++) { + const ky = key[k]; + const targetValue = item[ky]; + const targetOriginValue = item[ky]; + targetMatches.push(targetValue == targetOriginValue); + } + if (!targetMatches.find((mtch) => !mtch)) { + newArray.splice(i, 1); + } + } + else { + let existingValue = newArray.find((v) => v[key] == item[key]); + if (existingValue) { + newArray.splice(i, 1); + } + } + } + return newArray; +} diff --git a/dist/package-shared/utils/deserialize-query.js b/dist/package-shared/utils/deserialize-query.js index be4775d..4cc68f5 100644 --- a/dist/package-shared/utils/deserialize-query.js +++ b/dist/package-shared/utils/deserialize-query.js @@ -1,22 +1,16 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = deserializeQuery; -const ejson_1 = __importDefault(require("./ejson")); +import EJSON from "./ejson"; /** * # Convert Serialized Query back to object */ -function deserializeQuery(query) { - let queryObject = typeof query == "object" ? query : Object(ejson_1.default.parse(query)); +export default function deserializeQuery(query) { + let queryObject = typeof query == "object" ? query : Object(EJSON.parse(query)); const keys = Object.keys(queryObject); for (let i = 0; i < keys.length; i++) { const key = keys[i]; const value = queryObject[key]; if (typeof value == "string") { if (value.match(/^\{|^\[/)) { - queryObject[key] = ejson_1.default.parse(value); + queryObject[key] = EJSON.parse(value); } } } diff --git a/dist/package-shared/utils/ejson.js b/dist/package-shared/utils/ejson.js index 8d6a00c..799eb22 100644 --- a/dist/package-shared/utils/ejson.js +++ b/dist/package-shared/utils/ejson.js @@ -1,5 +1,3 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); /** * # EJSON parse string */ @@ -32,4 +30,4 @@ const EJSON = { parse, stringify, }; -exports.default = EJSON; +export default EJSON; diff --git a/dist/package-shared/utils/empty-dir.js b/dist/package-shared/utils/empty-dir.js index 4fc500f..f7d7675 100644 --- a/dist/package-shared/utils/empty-dir.js +++ b/dist/package-shared/utils/empty-dir.js @@ -1,23 +1,17 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = emptyDirectory; -const fs_1 = __importDefault(require("fs")); -const path_1 = __importDefault(require("path")); -function emptyDirectory(dir) { +import fs from "fs"; +import path from "path"; +export default function emptyDirectory(dir) { try { - const dirContent = fs_1.default.readdirSync(dir); + const dirContent = fs.readdirSync(dir); for (let i = 0; i < dirContent.length; i++) { const fileFolder = dirContent[i]; - const fullFileFolderPath = path_1.default.join(dir, fileFolder); - const stat = fs_1.default.statSync(fullFileFolderPath); + const fullFileFolderPath = path.join(dir, fileFolder); + const stat = fs.statSync(fullFileFolderPath); if (stat.isDirectory()) { emptyDirectory(fullFileFolderPath); continue; } - fs_1.default.unlinkSync(fullFileFolderPath); + fs.unlinkSync(fullFileFolderPath); } } catch (error) { diff --git a/dist/package-shared/utils/endConnection.js b/dist/package-shared/utils/endConnection.js index 84aadd0..5703275 100644 --- a/dist/package-shared/utils/endConnection.js +++ b/dist/package-shared/utils/endConnection.js @@ -1,5 +1,3 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); /** * # End MYSQL Connection */ @@ -10,4 +8,4 @@ function endConnection(connection) { }); } } -exports.default = endConnection; +export default endConnection; diff --git a/dist/package-shared/utils/envsub.d.ts b/dist/package-shared/utils/envsub.d.ts new file mode 100644 index 0000000..ddd6ce9 --- /dev/null +++ b/dist/package-shared/utils/envsub.d.ts @@ -0,0 +1 @@ +export default function envsub(str: string): string; diff --git a/dist/package-shared/utils/envsub.js b/dist/package-shared/utils/envsub.js new file mode 100644 index 0000000..e26b9e0 --- /dev/null +++ b/dist/package-shared/utils/envsub.js @@ -0,0 +1,6 @@ +export default function envsub(str) { + return str.replace(/\$([A-Z_]+)|\${([A-Z_]+)}/g, (match, var1, var2) => { + const varName = var1 || var2; + return process.env[varName] || match; + }); +} diff --git a/dist/package-shared/utils/generateColumnDescription.js b/dist/package-shared/utils/generateColumnDescription.js index ff9abc9..3ef7c39 100644 --- a/dist/package-shared/utils/generateColumnDescription.js +++ b/dist/package-shared/utils/generateColumnDescription.js @@ -1,7 +1,4 @@ -"use strict"; // @ts-check -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = generateColumnDescription; /** ****************************************************************************** */ /** ****************************************************************************** */ /** ****************************************************************************** */ @@ -11,7 +8,7 @@ exports.default = generateColumnDescription; /** * # Generate SQL text for Field */ -function generateColumnDescription({ columnData, primaryKeySet, }) { +export default function generateColumnDescription({ columnData, primaryKeySet, }) { /** * Format tableInfoArray * diff --git a/dist/package-shared/utils/grab-api-base-path.d.ts b/dist/package-shared/utils/grab-api-base-path.d.ts new file mode 100644 index 0000000..901fa5f --- /dev/null +++ b/dist/package-shared/utils/grab-api-base-path.d.ts @@ -0,0 +1,7 @@ +declare const APIParadigms: readonly ["crud", "media", "schema"]; +type Params = { + version?: string; + paradigm?: (typeof APIParadigms)[number]; +}; +export default function grabAPIBasePath({ version, paradigm }: Params): string; +export {}; diff --git a/dist/package-shared/utils/grab-api-base-path.js b/dist/package-shared/utils/grab-api-base-path.js new file mode 100644 index 0000000..843dde7 --- /dev/null +++ b/dist/package-shared/utils/grab-api-base-path.js @@ -0,0 +1,8 @@ +const APIParadigms = ["crud", "media", "schema"]; +export default function grabAPIBasePath({ version, paradigm }) { + let basePath = `/api/v${version || "1"}`; + if (paradigm) { + basePath += `/${paradigm}`; + } + return basePath; +} diff --git a/dist/package-shared/utils/grab-app-main-db-schema.d.ts b/dist/package-shared/utils/grab-app-main-db-schema.d.ts new file mode 100644 index 0000000..17b3901 --- /dev/null +++ b/dist/package-shared/utils/grab-app-main-db-schema.d.ts @@ -0,0 +1,2 @@ +import { DSQL_DatabaseSchemaType } from "../types"; +export default function grabAppMainDbSchema(): DSQL_DatabaseSchemaType | undefined; diff --git a/dist/package-shared/utils/grab-app-main-db-schema.js b/dist/package-shared/utils/grab-app-main-db-schema.js new file mode 100644 index 0000000..939f1ff --- /dev/null +++ b/dist/package-shared/utils/grab-app-main-db-schema.js @@ -0,0 +1,11 @@ +import fs from "fs"; +import grabDirNames from "./backend/names/grab-dir-names"; +import EJSON from "./ejson"; +export default function grabAppMainDbSchema() { + const { appSchemaJSONFile } = grabDirNames(); + if (!fs.existsSync(appSchemaJSONFile)) { + return undefined; + } + const parsedAppSchema = EJSON.parse(fs.readFileSync(appSchemaJSONFile, "utf-8")); + return parsedAppSchema; +} diff --git a/dist/package-shared/utils/grab-app-version.d.ts b/dist/package-shared/utils/grab-app-version.d.ts new file mode 100644 index 0000000..895d9ad --- /dev/null +++ b/dist/package-shared/utils/grab-app-version.d.ts @@ -0,0 +1,2 @@ +import { AppVersions } from "../types"; +export default function grabAppVersion(): (typeof AppVersions)[number]; diff --git a/dist/package-shared/utils/grab-app-version.js b/dist/package-shared/utils/grab-app-version.js new file mode 100644 index 0000000..e11e58f --- /dev/null +++ b/dist/package-shared/utils/grab-app-version.js @@ -0,0 +1,11 @@ +import { AppVersions } from "../types"; +export default function grabAppVersion() { + const appVersionEnv = process.env.NEXT_PUBLIC_VERSION; + const finalAppVersion = (appVersionEnv || + "community"); + const targetAppVersion = AppVersions.find((version) => version.value === finalAppVersion); + if (!targetAppVersion) { + throw new Error(`Invalid App Version: ${finalAppVersion}`); + } + return targetAppVersion; +} diff --git a/dist/package-shared/utils/grab-cookie-expirt-date.js b/dist/package-shared/utils/grab-cookie-expirt-date.js index 2364014..68df9b1 100644 --- a/dist/package-shared/utils/grab-cookie-expirt-date.js +++ b/dist/package-shared/utils/grab-cookie-expirt-date.js @@ -1,15 +1,9 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = grabCookieExpiryDate; -const numberfy_1 = __importDefault(require("./numberfy")); -function grabCookieExpiryDate() { +import numberfy from "./numberfy"; +export default function grabCookieExpiryDate() { const ONE_DAY_IN_SECONDS = 60 * 60 * 24; const ONE_WEEK_IN_SECONDS = ONE_DAY_IN_SECONDS * 7; const COOKIE_EXPIRY_TIME_IN_SECONDS = process.env.DSQL_SESSION_EXPIRY_TIME - ? (0, numberfy_1.default)(process.env.DSQL_SESSION_EXPIRY_TIME) + ? numberfy(process.env.DSQL_SESSION_EXPIRY_TIME) : ONE_WEEK_IN_SECONDS; const COOKIE_EXPIRY_IN_MILLISECONDS = COOKIE_EXPIRY_TIME_IN_SECONDS * 1000; const COOKIE_EXPIRY_DATE = new Date(Date.now() + COOKIE_EXPIRY_IN_MILLISECONDS).toUTCString(); diff --git a/dist/package-shared/utils/grab-db-full-name.d.ts b/dist/package-shared/utils/grab-db-full-name.d.ts index 9432fc0..37d6fea 100644 --- a/dist/package-shared/utils/grab-db-full-name.d.ts +++ b/dist/package-shared/utils/grab-db-full-name.d.ts @@ -1,9 +1,17 @@ +import { UserType } from "../types"; type Param = { + /** + * Database full name or slug + */ dbName?: string; userId?: string | number; + user?: UserType | null; }; /** - * # Grab Database Full Name + * # Grab full database name + * @description Grab full database name from slug or full name + * @param param0 + * @returns */ -export default function grabDbFullName({ dbName, userId }: Param): string; +export default function grabDbFullName({ dbName, userId, user, }: Param): string | undefined; export {}; diff --git a/dist/package-shared/utils/grab-db-full-name.js b/dist/package-shared/utils/grab-db-full-name.js index c4f708f..105fcb2 100644 --- a/dist/package-shared/utils/grab-db-full-name.js +++ b/dist/package-shared/utils/grab-db-full-name.js @@ -1,16 +1,20 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = grabDbFullName; +import slugify from "./slugify"; /** - * # Grab Database Full Name + * # Grab full database name + * @description Grab full database name from slug or full name + * @param param0 + * @returns */ -function grabDbFullName({ dbName, userId }) { - if (!dbName) - throw new Error(`Database name not provided to db name parser funciton`); - const sanitizedName = dbName.replace(/[^a-z0-9\_]/g, ""); - const cleanedDbName = sanitizedName.replace(/datasquirel_user_\d+_/, ""); - if (!userId) - return cleanedDbName; - const dbNamePrefix = `datasquirel_user_${userId}_`; - return dbNamePrefix + cleanedDbName; +export default function grabDbFullName({ dbName, userId, user, }) { + const finalUserId = (user === null || user === void 0 ? void 0 : user.id) || userId; + if (!finalUserId) { + return dbName; + } + if (!dbName) { + return; + } + const dbNamePrefix = process.env.DSQL_USER_DB_PREFIX; + const parsedDbName = slugify(dbName, "_"); + const dbSlug = parsedDbName.replace(new RegExp(`${dbNamePrefix}_?\\d+_`), ""); + return slugify(`${dbNamePrefix}_${finalUserId}_${dbSlug}`, "_"); } diff --git a/dist/package-shared/utils/grab-db-names.d.ts b/dist/package-shared/utils/grab-db-names.d.ts new file mode 100644 index 0000000..cac5b32 --- /dev/null +++ b/dist/package-shared/utils/grab-db-names.d.ts @@ -0,0 +1,21 @@ +import { UserType } from "../types"; +type Param = { + /** + * Database full name or slug + */ + dbName?: string; + userId?: string | number; + user?: UserType | null; +}; +/** + * # Grab full database name + * @description Grab full database name from slug or full name + * @param param0 + * @returns + */ +export default function grabDbNames({ dbName, userId, user }: Param): { + userDbPrefix: string; + dbFullName: string | undefined; + dbNamePrefix: string | undefined; +}; +export {}; diff --git a/dist/package-shared/utils/grab-db-names.js b/dist/package-shared/utils/grab-db-names.js new file mode 100644 index 0000000..b56c911 --- /dev/null +++ b/dist/package-shared/utils/grab-db-names.js @@ -0,0 +1,14 @@ +import grabDbFullName from "./grab-db-full-name"; +/** + * # Grab full database name + * @description Grab full database name from slug or full name + * @param param0 + * @returns + */ +export default function grabDbNames({ dbName, userId, user }) { + const dbNamePrefix = process.env.DSQL_USER_DB_PREFIX; + const finalUserId = (user === null || user === void 0 ? void 0 : user.id) || userId; + const userDbPrefix = `${dbNamePrefix}${finalUserId}_`; + const dbFullName = grabDbFullName({ dbName, user, userId }); + return { userDbPrefix, dbFullName, dbNamePrefix }; +} diff --git a/dist/package-shared/utils/grab-docker-resource-ip-numbers.d.ts b/dist/package-shared/utils/grab-docker-resource-ip-numbers.d.ts new file mode 100644 index 0000000..9989110 --- /dev/null +++ b/dist/package-shared/utils/grab-docker-resource-ip-numbers.d.ts @@ -0,0 +1,13 @@ +export default function grabDockerResourceIPNumbers(): { + readonly db: 32; + readonly maxscale: 24; + readonly postDbSetup: 43; + readonly reverse_proxy: 34; + readonly web: 35; + readonly websocket: 36; + readonly cron: 27; + readonly db_cron: 20; + readonly replica_1: 37; + readonly replica_2: 38; + readonly web_app_post_db_setup: 71; +}; diff --git a/dist/package-shared/utils/grab-docker-resource-ip-numbers.js b/dist/package-shared/utils/grab-docker-resource-ip-numbers.js new file mode 100644 index 0000000..9a43e6d --- /dev/null +++ b/dist/package-shared/utils/grab-docker-resource-ip-numbers.js @@ -0,0 +1,15 @@ +export default function grabDockerResourceIPNumbers() { + return { + db: 32, + maxscale: 24, + postDbSetup: 43, + reverse_proxy: 34, + web: 35, + websocket: 36, + cron: 27, + db_cron: 20, + replica_1: 37, + replica_2: 38, + web_app_post_db_setup: 71, + }; +} diff --git a/dist/package-shared/utils/grab-dsql-connection.js b/dist/package-shared/utils/grab-dsql-connection.js index bcec92a..58c22db 100644 --- a/dist/package-shared/utils/grab-dsql-connection.js +++ b/dist/package-shared/utils/grab-dsql-connection.js @@ -1,17 +1,11 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = grabDSQLConnection; -const serverless_mysql_1 = __importDefault(require("serverless-mysql")); +import mysql from "serverless-mysql"; /** * # Grab General CONNECTION for DSQL */ -function grabDSQLConnection(param) { +export default function grabDSQLConnection(param) { if (global.DSQL_USE_LOCAL || (param === null || param === void 0 ? void 0 : param.local)) { return (global.DSQL_DB_CONN || - (0, serverless_mysql_1.default)({ + mysql({ config: { host: process.env.DSQL_DB_HOST, user: process.env.DSQL_DB_USERNAME, @@ -28,7 +22,7 @@ function grabDSQLConnection(param) { } if (param === null || param === void 0 ? void 0 : param.ro) { return (global.DSQL_READ_ONLY_DB_CONN || - (0, serverless_mysql_1.default)({ + mysql({ config: { host: process.env.DSQL_DB_HOST, user: process.env.DSQL_DB_READ_ONLY_USERNAME, @@ -42,7 +36,7 @@ function grabDSQLConnection(param) { } if (param === null || param === void 0 ? void 0 : param.fa) { return (global.DSQL_FULL_ACCESS_DB_CONN || - (0, serverless_mysql_1.default)({ + mysql({ config: { host: process.env.DSQL_DB_HOST, user: process.env.DSQL_DB_FULL_ACCESS_USERNAME, @@ -55,7 +49,7 @@ function grabDSQLConnection(param) { })); } return (global.DSQL_DB_CONN || - (0, serverless_mysql_1.default)({ + mysql({ config: { host: process.env.DSQL_DB_HOST, user: process.env.DSQL_DB_USERNAME, diff --git a/dist/package-shared/utils/grab-host-names.js b/dist/package-shared/utils/grab-host-names.js index f7e52be..d4944ce 100644 --- a/dist/package-shared/utils/grab-host-names.js +++ b/dist/package-shared/utils/grab-host-names.js @@ -1,16 +1,10 @@ -"use strict"; // @ts-check -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = grabHostNames; -const https_1 = __importDefault(require("https")); -const http_1 = __importDefault(require("http")); +import https from "https"; +import http from "http"; /** * # Grab Names For Query */ -function grabHostNames(param) { +export default function grabHostNames(param) { var _a, _b; const finalEnv = (param === null || param === void 0 ? void 0 : param.env) ? Object.assign(Object.assign({}, process.env), param.env) : process.env; @@ -30,7 +24,7 @@ function grabHostNames(param) { return { host: remoteHost || localHost || "datasquirel.com", port: remoteHostPort || localHostPort || 443, - scheme: (scheme === null || scheme === void 0 ? void 0 : scheme.match(/^http$/i)) ? http_1.default : https_1.default, + scheme: (scheme === null || scheme === void 0 ? void 0 : scheme.match(/^http$/i)) ? http : https, user_id: (param === null || param === void 0 ? void 0 : param.userId) || String(finalEnv["DSQL_API_USER_ID"] || 0), }; } diff --git a/dist/package-shared/utils/grab-instance-global-network-name.d.ts b/dist/package-shared/utils/grab-instance-global-network-name.d.ts new file mode 100644 index 0000000..d938db3 --- /dev/null +++ b/dist/package-shared/utils/grab-instance-global-network-name.d.ts @@ -0,0 +1 @@ +export default function grabInstanceGlobalNetWorkName(): string; diff --git a/dist/package-shared/utils/grab-instance-global-network-name.js b/dist/package-shared/utils/grab-instance-global-network-name.js new file mode 100644 index 0000000..45b9f1f --- /dev/null +++ b/dist/package-shared/utils/grab-instance-global-network-name.js @@ -0,0 +1,4 @@ +export default function grabInstanceGlobalNetWorkName() { + const deploymentName = process.env.DSQL_DEPLOYMENT_NAME || "dsql"; + return `${deploymentName}_dsql_global_network`; +} diff --git a/dist/package-shared/utils/grab-keys.js b/dist/package-shared/utils/grab-keys.js index 65e605a..0586fc4 100644 --- a/dist/package-shared/utils/grab-keys.js +++ b/dist/package-shared/utils/grab-keys.js @@ -1,15 +1,9 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = grabKeys; -const numberfy_1 = __importDefault(require("./numberfy")); +import numberfy from "./numberfy"; /** * # Grab Encryption Keys * @description Grab Required Encryption Keys */ -function grabKeys(param) { +export default function grabKeys(param) { return { key: (param === null || param === void 0 ? void 0 : param.encryptionKey) || process.env.DSQL_ENCRYPTION_PASSWORD, keyLen: process.env.DSQL_ENCRYPTION_KEY_LENGTH @@ -22,7 +16,7 @@ function grabKeys(param) { "aes-192-cbc", bufferAllocSize: (param === null || param === void 0 ? void 0 : param.bufferAllocSize) || (process.env.DSQL_ENCRYPTION_BUFFER_ALLOCATION_SIZE - ? (0, numberfy_1.default)(process.env.DSQL_ENCRYPTION_BUFFER_ALLOCATION_SIZE) + ? numberfy(process.env.DSQL_ENCRYPTION_BUFFER_ALLOCATION_SIZE) : undefined) || 16, }; diff --git a/dist/package-shared/utils/grab-query-and-values.js b/dist/package-shared/utils/grab-query-and-values.js index 9ce7c68..5228fcb 100644 --- a/dist/package-shared/utils/grab-query-and-values.js +++ b/dist/package-shared/utils/grab-query-and-values.js @@ -1,14 +1,8 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = apiGetGrabQueryAndValues; -const sql_generator_1 = __importDefault(require("../functions/dsql/sql/sql-generator")); -function apiGetGrabQueryAndValues({ query, values }) { +import sqlGenerator from "../functions/dsql/sql/sql-generator"; +export default function apiGetGrabQueryAndValues({ query, values }) { const queryGenObject = typeof query == "string" ? undefined - : (0, sql_generator_1.default)({ + : sqlGenerator({ tableName: query.table, genObject: query.query, dbFullName: query.dbFullName || "__db", diff --git a/dist/package-shared/utils/grab-sql-key-name.d.ts b/dist/package-shared/utils/grab-sql-key-name.d.ts new file mode 100644 index 0000000..8f793f2 --- /dev/null +++ b/dist/package-shared/utils/grab-sql-key-name.d.ts @@ -0,0 +1,11 @@ +type Param = { + type: "foreign_key" | "index" | "user"; + userId?: string | number; + addDate?: boolean; +}; +/** + * # Grab Key Names + * @description Grab key names for foreign keys and indexes + */ +export default function grabSQLKeyName({ type, userId, addDate }: Param): string; +export {}; diff --git a/dist/package-shared/utils/grab-sql-key-name.js b/dist/package-shared/utils/grab-sql-key-name.js new file mode 100644 index 0000000..d513637 --- /dev/null +++ b/dist/package-shared/utils/grab-sql-key-name.js @@ -0,0 +1,23 @@ +/** + * # Grab Key Names + * @description Grab key names for foreign keys and indexes + */ +export default function grabSQLKeyName({ type, userId, addDate }) { + let prefixParadigm = (() => { + if (type == "foreign_key") + return "fk"; + if (type == "index") + return "indx"; + if (type == "user") + return "user"; + return null; + })(); + let key = `dsql`; + if (prefixParadigm) + key += `_${prefixParadigm}`; + if (userId) + key += `_${userId}`; + if (addDate) + key += `_${Date.now()}`; + return key; +} diff --git a/dist/package-shared/utils/grab-sql-user-name-for-user.d.ts b/dist/package-shared/utils/grab-sql-user-name-for-user.d.ts new file mode 100644 index 0000000..cca4234 --- /dev/null +++ b/dist/package-shared/utils/grab-sql-user-name-for-user.d.ts @@ -0,0 +1 @@ +export default function grabSQLUserNameForUser(userId?: string | number): string; diff --git a/dist/package-shared/utils/grab-sql-user-name-for-user.js b/dist/package-shared/utils/grab-sql-user-name-for-user.js new file mode 100644 index 0000000..28b4b35 --- /dev/null +++ b/dist/package-shared/utils/grab-sql-user-name-for-user.js @@ -0,0 +1,3 @@ +export default function grabSQLUserNameForUser(userId) { + return `dsql_user_${userId || 0}`; +} diff --git a/dist/package-shared/utils/grab-sql-user-name.d.ts b/dist/package-shared/utils/grab-sql-user-name.d.ts new file mode 100644 index 0000000..5c36cb6 --- /dev/null +++ b/dist/package-shared/utils/grab-sql-user-name.d.ts @@ -0,0 +1,12 @@ +import { UserType } from "../types"; +type Params = { + user?: UserType | null; + name?: string; +}; +type Return = { + sqlUsername?: string; + name?: string; + nameWithoutPrefix?: string; +}; +export default function grabSQLUserName({ user, name: passedName, }: Params): Return; +export {}; diff --git a/dist/package-shared/utils/grab-sql-user-name.js b/dist/package-shared/utils/grab-sql-user-name.js new file mode 100644 index 0000000..479e696 --- /dev/null +++ b/dist/package-shared/utils/grab-sql-user-name.js @@ -0,0 +1,22 @@ +import grabSQLUserNameForUser from "./grab-sql-user-name-for-user"; +export default function grabSQLUserName({ user, name: passedName, }) { + if (!user) { + console.log("No User Found"); + return {}; + } + const sqlUsername = grabSQLUserNameForUser(user.id); + const parsedPassedName = passedName + ? passedName.replace(sqlUsername, "").replace(/^_+|_+$/, "") + : undefined; + const name = parsedPassedName + ? `${sqlUsername}_${parsedPassedName}` + : undefined; + if (user.isSuperUser) { + return { + sqlUsername: undefined, + name: passedName, + nameWithoutPrefix: passedName, + }; + } + return { sqlUsername, name, nameWithoutPrefix: parsedPassedName }; +} diff --git a/dist/package-shared/utils/grab-user-main-sql-user-name.d.ts b/dist/package-shared/utils/grab-user-main-sql-user-name.d.ts new file mode 100644 index 0000000..e6165de --- /dev/null +++ b/dist/package-shared/utils/grab-user-main-sql-user-name.d.ts @@ -0,0 +1,14 @@ +import { UserType } from "../types"; +type Params = { + user?: UserType | null; + HOST?: string; + username?: string; +}; +export default function grabUserMainSqlUserName({ HOST, user, username, }: Params): { + username: string; + host: string; + webHost: string; + fullName: string; + sqlUsername: string; +}; +export {}; diff --git a/dist/package-shared/utils/grab-user-main-sql-user-name.js b/dist/package-shared/utils/grab-user-main-sql-user-name.js new file mode 100644 index 0000000..0562198 --- /dev/null +++ b/dist/package-shared/utils/grab-user-main-sql-user-name.js @@ -0,0 +1,16 @@ +import grabSQLUserNameForUser from "./grab-sql-user-name-for-user"; +import grabIPAddresses from "../utils/backend/names/grab-ip-addresses"; +export default function grabUserMainSqlUserName({ HOST, user, username, }) { + const sqlUsername = grabSQLUserNameForUser(user === null || user === void 0 ? void 0 : user.id); + const { webAppIP, maxScaleIP } = grabIPAddresses(); + const finalUsername = username || sqlUsername; + const finalHost = HOST || maxScaleIP || "127.0.0.1"; + const fullName = `${finalUsername}@${webAppIP}`; + return { + username: finalUsername, + host: finalHost, + webHost: webAppIP, + fullName, + sqlUsername, + }; +} diff --git a/dist/package-shared/utils/logging/debug-log.js b/dist/package-shared/utils/logging/debug-log.js index cf80770..9dad991 100644 --- a/dist/package-shared/utils/logging/debug-log.js +++ b/dist/package-shared/utils/logging/debug-log.js @@ -1,20 +1,17 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = debugLog; -const console_colors_1 = require("../console-colors"); +import { ccol } from "../console-colors"; const LogTypes = ["error", "warning"]; -function debugLog({ log, label, title, type, addTime }) { +export default function debugLog({ log, label, title, type, addTime }) { const logType = (() => { switch (type) { case "error": - return console_colors_1.ccol.FgRed; + return ccol.FgRed; case "warning": - return console_colors_1.ccol.FgYellow; + return ccol.FgYellow; default: - return console_colors_1.ccol.FgGreen; + return ccol.FgGreen; } })(); - let logTxt = `${logType}DEBUG${console_colors_1.ccol.Reset}:::`; + let logTxt = `${logType}DEBUG${ccol.Reset}:::`; const date = new Date(); const time = date.toLocaleTimeString("en-US", { hour: "numeric", @@ -24,10 +21,10 @@ function debugLog({ log, label, title, type, addTime }) { }); const logTime = `${date.toLocaleDateString()}][${time}`; if (addTime) - logTxt = `${console_colors_1.ccol.BgWhite}[${logTime}]${console_colors_1.ccol.Reset} ` + logTxt; + logTxt = `${ccol.BgWhite}[${logTime}]${ccol.Reset} ` + logTxt; if (title) - logTxt += `${console_colors_1.ccol.FgBlue}${title}${console_colors_1.ccol.Reset}::`; + logTxt += `${ccol.FgBlue}${title}${ccol.Reset}::`; if (label) - logTxt += `${console_colors_1.ccol.FgWhite}${console_colors_1.ccol.Bright}${label}${console_colors_1.ccol.Reset} =>`; + logTxt += `${ccol.FgWhite}${ccol.Bright}${label}${ccol.Reset} =>`; console.log(logTxt, log); } diff --git a/dist/package-shared/utils/normalize-text.d.ts b/dist/package-shared/utils/normalize-text.d.ts new file mode 100644 index 0000000..788a451 --- /dev/null +++ b/dist/package-shared/utils/normalize-text.d.ts @@ -0,0 +1 @@ +export default function normalizeText(txt: string): string; diff --git a/dist/package-shared/utils/normalize-text.js b/dist/package-shared/utils/normalize-text.js new file mode 100644 index 0000000..c25bf7e --- /dev/null +++ b/dist/package-shared/utils/normalize-text.js @@ -0,0 +1,6 @@ +export default function normalizeText(txt) { + return txt + .replace(/\n|\r|\n\r/g, " ") + .replace(/ {2,}/g, " ") + .trim(); +} diff --git a/dist/package-shared/utils/numberfy.js b/dist/package-shared/utils/numberfy.js index 6141e80..7fbfbb9 100644 --- a/dist/package-shared/utils/numberfy.js +++ b/dist/package-shared/utils/numberfy.js @@ -1,6 +1,3 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = numberfy; /** * # Get Number from any input * @example @@ -10,7 +7,7 @@ exports.default = numberfy; * numberfy("123.456", 0) // 123 * numberfy("123.456", 3) // 123.456 */ -function numberfy(num, decimals) { +export default function numberfy(num, decimals) { var _a; try { const numberString = String(num) diff --git a/dist/package-shared/utils/parse-env.d.ts b/dist/package-shared/utils/parse-env.d.ts index f14d6ab..cfd14e1 100644 --- a/dist/package-shared/utils/parse-env.d.ts +++ b/dist/package-shared/utils/parse-env.d.ts @@ -1,3 +1,80 @@ -export default function parseEnv(envFile: string): { - [k: string]: string; +export default function parseEnv( +/** The file path to the env. Eg. /app/.env */ envFile: string): { + DSQL_HOST: string | undefined; + NEXT_PUBLIC_DSQL_HOST: string | undefined; + DSQL_STATIC_HOST: string | undefined; + DSQL_SOCKET_DOMAIN: string | undefined; + DSQL_HOST_ENV: string | undefined; + DSQL_PORT: string | undefined; + DSQL_PRODUCTION_PORT: string | undefined; + DSQL_STATIC_SERVER_PORT: string | undefined; + DSQL_SITE_URL: string | undefined; + DSQL_REMOTE_SQL_HOST: string | undefined; + NEXT_PUBLIC_DSQL_REMOTE_SQL_HOST: string | undefined; + DSQL_DB_TARGET_IP_ADDRESS: string | undefined; + NEXT_PUBLIC_VERSION: string | undefined; + DSQL_USER_DB_PREFIX: string | undefined; + DSQL_USER_DELEGATED_DB_COOKIE_PREFIX: string | undefined; + DSQL_NETWORK_IP_PREFIX: string | undefined; + DSQL_NETWORK_GATEWAY: string | undefined; + DSQL_NETWORK_SUBNET: string | undefined; + DSQL_MARIADB_MASTER_HOST: string | undefined; + DSQL_DB_HOST: string | undefined; + DSQL_WEB_APP_HOST: string | undefined; + DSQL_DB_USERNAME: string | undefined; + DSQL_DB_PASSWORD: string | undefined; + DSQL_MARIADB_ROOT_PASSWORD: string | undefined; + DSQL_REPLICATION_USER_PASSWORD: string | undefined; + DSQL_DB_NAME: string | undefined; + DSQL_MARIADB_REPLICATION_PASSWORD: string | undefined; + DSQL_MAXSCALE_PASSWORD: string | undefined; + DSQL_DB_READ_ONLY_USERNAME: string | undefined; + DSQL_DB_READ_ONLY_PASSWORD: string | undefined; + DSQL_DB_FULL_ACCESS_USERNAME: string | undefined; + DSQL_DB_FULL_ACCESS_PASSWORD: string | undefined; + DSQL_DB_EXPOSED_PORT: string | undefined; + DSQL_ENCRYPTION_PASSWORD: string | undefined; + DSQL_ENCRYPTION_SALT: string | undefined; + DSQL_SU_USER_ID: string | undefined; + DSQL_SU_USER_UUID: string | undefined; + DSQL_SU_EMAIL: string | undefined; + DSQL_GOOGLE_CLIENT_ID: string | undefined; + NEXT_PUBLIC_DSQL_GOOGLE_CLIENT_ID: string | undefined; + DSQL_FACEBOOK_APP_ID: string | undefined; + DSQL_FACEBOOK_SECRET: string | undefined; + DSQL_MAIL_HOST: string | undefined; + DSQL_MAIL_EMAIL: string | undefined; + DSQL_MAIL_PASSWORD: string | undefined; + DSQL_TINY_MCE_API_KEY: string | undefined; + DSQL_GITHUB_ID: string | undefined; + DSQL_GITHUB_SECRET: string | undefined; + DSQL_GITHUB_WEBHOOK_SECRET: string | undefined; + DSQL_GITHUB_WEBHOOK_URL: string | undefined; + DSQL_DEPLOY_SERVER_PORT: string | undefined; + DSQL_DOCKERFILE: string | undefined; + DSQL_VOLUME_APP: string | undefined; + DSQL_VOLUME_STATIC: string | undefined; + DSQL_VOLUME_STATIC_CONFIGURATION_FILE: string | undefined; + DSQL_VOLUME_DB: string | undefined; + DSQL_VOLUME_DB_CONFIG: string | undefined; + DSQL_VOLUME_DB_SETUP: string | undefined; + DSQL_VOLUME_DB_SSL: string | undefined; + DSQL_USER_LOGIN_KEYS_PATH: string | undefined; + DSQL_API_KEYS_PATH: string | undefined; + DSQL_APP_DIR: string | undefined; + DSQL_DATA_DIR: string | undefined; + DSQL_CONTACT_EMAIL: string | undefined; + DSQL_SSL_DIR: string | undefined; + DSQL_DEPLOYMENT_NAME: string | undefined; + DSQL_COOKIES_PREFIX: string | undefined; + DSQL_COOKIES_KEY_NAME: string | undefined; + DSQL_WEB_APP_FAIL_COUNTS: string | undefined; + NODE_ARCH: string | undefined; + DSQL_WEBSOCKET_PORT: string | undefined; + DSQL_WEBSOCKET_URL: string | undefined; + NEXT_PUBLIC_DSQL_WEBSOCKET_URL: string | undefined; + S3_ACCESS_KEY_ID: string | undefined; + S3_SECRET_ACCESS: string | undefined; + DSQL_ADDITIONAL_MARIADB_SERVERS: string | undefined; + DSQL_ARCJET_KEY: string | undefined; } | undefined; diff --git a/dist/package-shared/utils/parse-env.js b/dist/package-shared/utils/parse-env.js index db3a122..59587c8 100644 --- a/dist/package-shared/utils/parse-env.js +++ b/dist/package-shared/utils/parse-env.js @@ -1,14 +1,9 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = parseEnv; -const fs_1 = __importDefault(require("fs")); -function parseEnv(envFile) { - if (!fs_1.default.existsSync(envFile)) +import fs from "fs"; +export default function parseEnv( +/** The file path to the env. Eg. /app/.env */ envFile) { + if (!fs.existsSync(envFile)) return undefined; - const envTextContent = fs_1.default.readFileSync(envFile, "utf-8"); + const envTextContent = fs.readFileSync(envFile, "utf-8"); const envLines = envTextContent .split("\n") .map((ln) => ln.trim()) diff --git a/dist/package-shared/utils/purge-default-fields.d.ts b/dist/package-shared/utils/purge-default-fields.d.ts new file mode 100644 index 0000000..8285fed --- /dev/null +++ b/dist/package-shared/utils/purge-default-fields.d.ts @@ -0,0 +1,6 @@ +import { DefaultEntryType } from "../types"; +export default function purgeDefaultFields(entry: T | T[]): T | T[]; diff --git a/dist/package-shared/utils/purge-default-fields.js b/dist/package-shared/utils/purge-default-fields.js new file mode 100644 index 0000000..2b47c4d --- /dev/null +++ b/dist/package-shared/utils/purge-default-fields.js @@ -0,0 +1,27 @@ +import _ from "lodash"; +import defaultFieldsRegexp from "../functions/dsql/default-fields-regexp"; +export default function purgeDefaultFields(entry) { + const newEntry = _.cloneDeep(entry); + if (Array.isArray(newEntry)) { + const entryKeys = Object.keys(newEntry[0]); + for (let i = 0; i < newEntry.length; i++) { + for (let j = 0; j < entryKeys.length; j++) { + const entryKey = entryKeys[j]; + if (defaultFieldsRegexp.test(entryKey)) { + delete newEntry[i][entryKey]; + } + } + } + return newEntry; + } + else { + const entryKeys = Object.keys(newEntry); + for (let i = 0; i < entryKeys.length; i++) { + const entryKey = entryKeys[i]; + if (defaultFieldsRegexp.test(entryKey)) { + delete newEntry[entryKey]; + } + } + return newEntry; + } +} diff --git a/dist/package-shared/utils/serialize-cookies.js b/dist/package-shared/utils/serialize-cookies.js index 8b76b11..022ecd3 100644 --- a/dist/package-shared/utils/serialize-cookies.js +++ b/dist/package-shared/utils/serialize-cookies.js @@ -1,12 +1,9 @@ -"use strict"; // @ts-check -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = serializeCookies; /** * # Serialize Cookies * @description Convert cookie object to string array */ -function serializeCookies({ cookies, }) { +export default function serializeCookies({ cookies, }) { let cookiesStringsArray = []; for (let i = 0; i < cookies.length; i++) { const cookieObject = cookies[i]; diff --git a/dist/package-shared/utils/serialize-query.js b/dist/package-shared/utils/serialize-query.js index 7865a66..2dd175a 100644 --- a/dist/package-shared/utils/serialize-query.js +++ b/dist/package-shared/utils/serialize-query.js @@ -1,14 +1,8 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = serializeQuery; -const ejson_1 = __importDefault(require("./ejson")); +import EJSON from "./ejson"; /** * # Serialize Query */ -function serializeQuery(query) { +export default function serializeQuery(query) { let str = "?"; if (typeof query !== "object") { console.log("Invalid Query type"); @@ -29,7 +23,7 @@ function serializeQuery(query) { return; const value = query[key]; if (typeof value === "object") { - const jsonStr = ejson_1.default.stringify(value); + const jsonStr = EJSON.stringify(value); queryArr.push(`${key}=${encodeURIComponent(String(jsonStr))}`); } else if (typeof value === "string" || typeof value === "number") { diff --git a/dist/package-shared/utils/setup-global-network.d.ts b/dist/package-shared/utils/setup-global-network.d.ts new file mode 100644 index 0000000..c66eb0f --- /dev/null +++ b/dist/package-shared/utils/setup-global-network.d.ts @@ -0,0 +1 @@ +export default function setupGlobalNetwork(): void; diff --git a/dist/package-shared/utils/setup-global-network.js b/dist/package-shared/utils/setup-global-network.js new file mode 100644 index 0000000..0031be5 --- /dev/null +++ b/dist/package-shared/utils/setup-global-network.js @@ -0,0 +1,17 @@ +import { execSync } from "child_process"; +import grabInstanceGlobalNetWorkName from "./grab-instance-global-network-name"; +import grabIPAddresses from "./backend/names/grab-ip-addresses"; +export default function setupGlobalNetwork() { + const globalNetworkName = grabInstanceGlobalNetWorkName(); + const { globalIPPrefix } = grabIPAddresses(); + try { + execSync(`docker network rm ${globalNetworkName}`, {}); + } + catch (error) { } + let newNtwkCmd = `docker network create`; + newNtwkCmd += ` --driver bridge`; + newNtwkCmd += ` --subnet ${globalIPPrefix}.0/24`; + newNtwkCmd += ` --gateway ${globalIPPrefix}.1`; + newNtwkCmd += ` ${globalNetworkName}`; + execSync(newNtwkCmd); +} diff --git a/dist/package-shared/utils/slug-to-normal-text.d.ts b/dist/package-shared/utils/slug-to-normal-text.d.ts new file mode 100644 index 0000000..b120137 --- /dev/null +++ b/dist/package-shared/utils/slug-to-normal-text.d.ts @@ -0,0 +1 @@ +export default function slugToNormalText(str?: string): string; diff --git a/dist/package-shared/utils/slug-to-normal-text.js b/dist/package-shared/utils/slug-to-normal-text.js new file mode 100644 index 0000000..01afa5a --- /dev/null +++ b/dist/package-shared/utils/slug-to-normal-text.js @@ -0,0 +1,13 @@ +export default function slugToNormalText(str) { + if (!str) + return ""; + return str + .toLowerCase() + .replace(/ /g, "-") + .replace(/[^a-z0-9\-]/g, "-") + .replace(/-{2,}/g, "-") + .replace(/[-]/g, " ") + .split(" ") + .map((word) => word.charAt(0).toUpperCase() + word.slice(1).toLowerCase()) + .join(" "); +} diff --git a/dist/package-shared/utils/slugToCamelTitle.js b/dist/package-shared/utils/slugToCamelTitle.js index 7c9023b..9e6772e 100644 --- a/dist/package-shared/utils/slugToCamelTitle.js +++ b/dist/package-shared/utils/slugToCamelTitle.js @@ -1,10 +1,7 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = slugToCamelTitle; /** * # Slug to Camel case Title */ -function slugToCamelTitle(text) { +export default function slugToCamelTitle(text) { if (text) { let addArray = text.split("-").filter((item) => item !== ""); let camelArray = addArray.map((item) => { diff --git a/dist/package-shared/utils/slugify.d.ts b/dist/package-shared/utils/slugify.d.ts index 9758272..2a076a1 100644 --- a/dist/package-shared/utils/slugify.d.ts +++ b/dist/package-shared/utils/slugify.d.ts @@ -6,4 +6,4 @@ * slugify("Yes!") // "yes" * slugify("Hello!!! World!") // "hello-world" */ -export default function slugify(str?: string): string; +export default function slugify(str?: string, divider?: "-" | "_" | null, allowTrailingDash?: boolean | null): string; diff --git a/dist/package-shared/utils/slugify.js b/dist/package-shared/utils/slugify.js index 82ce511..92a11ba 100644 --- a/dist/package-shared/utils/slugify.js +++ b/dist/package-shared/utils/slugify.js @@ -1,7 +1,3 @@ -"use strict"; -// @ts-check -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = slugify; /** * # Return the slug of a string * @@ -10,19 +6,23 @@ exports.default = slugify; * slugify("Yes!") // "yes" * slugify("Hello!!! World!") // "hello-world" */ -function slugify(str) { +export default function slugify(str, divider, allowTrailingDash) { + const finalSlugDivider = divider || "-"; try { if (!str) return ""; - return String(str) + let finalStr = String(str) .trim() .toLowerCase() .replace(/ {2,}/g, " ") - .replace(/ /g, "-") - .replace(/[^a-z0-9]/g, "-") - .replace(/-{2,}/g, "-") - .replace(/^-/, "") - .replace(/-$/, ""); + .replace(/ /g, finalSlugDivider) + .replace(/[^a-z0-9]/g, finalSlugDivider) + .replace(/-{2,}|_{2,}/g, finalSlugDivider) + .replace(/^-/, ""); + if (allowTrailingDash) { + return finalStr; + } + return finalStr.replace(/-$/, ""); } catch (error) { console.log(`Slugify ERROR: ${error.message}`); diff --git a/dist/package-shared/utils/sql-equality-parser.d.ts b/dist/package-shared/utils/sql-equality-parser.d.ts new file mode 100644 index 0000000..cd5a253 --- /dev/null +++ b/dist/package-shared/utils/sql-equality-parser.d.ts @@ -0,0 +1,2 @@ +import { ServerQueryEqualities } from "../types"; +export default function sqlEqualityParser(eq: (typeof ServerQueryEqualities)[number]): string; diff --git a/dist/package-shared/utils/sql-equality-parser.js b/dist/package-shared/utils/sql-equality-parser.js new file mode 100644 index 0000000..7623dc2 --- /dev/null +++ b/dist/package-shared/utils/sql-equality-parser.js @@ -0,0 +1,38 @@ +export default function sqlEqualityParser(eq) { + switch (eq) { + case "EQUAL": + return "="; + case "LIKE": + return "LIKE"; + case "NOT LIKE": + return "NOT LIKE"; + case "NOT EQUAL": + return "<>"; + case "IN": + return "IN"; + case "NOT IN": + return "NOT IN"; + case "BETWEEN": + return "BETWEEN"; + case "NOT BETWEEN": + return "NOT BETWEEN"; + case "IS NULL": + return "IS NULL"; + case "IS NOT NULL": + return "IS NOT NULL"; + case "EXISTS": + return "EXISTS"; + case "NOT EXISTS": + return "NOT EXISTS"; + case "GREATER THAN": + return ">"; + case "GREATER THAN OR EQUAL": + return ">="; + case "LESS THAN": + return "<"; + case "LESS THAN OR EQUAL": + return "<="; + default: + return "="; + } +} diff --git a/dist/package-shared/utils/trim-sql.js b/dist/package-shared/utils/trim-sql.js index 21a1916..45d8305 100644 --- a/dist/package-shared/utils/trim-sql.js +++ b/dist/package-shared/utils/trim-sql.js @@ -1,11 +1,8 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = trimSql; /** * # Trim SQL * @description Remove Returns and miltiple spaces from SQL Query */ -function trimSql(sql) { +export default function trimSql(sql) { return sql .replace(/\n|\r|\n\r|\r\n/gm, " ") .replace(/ {2,}/g, " ") diff --git a/dist/package-shared/utils/unique-by-key.d.ts b/dist/package-shared/utils/unique-by-key.d.ts new file mode 100644 index 0000000..0276e48 --- /dev/null +++ b/dist/package-shared/utils/unique-by-key.d.ts @@ -0,0 +1,3 @@ +export default function uniqueByKey(arr: T[], key: keyof T | (keyof T)[]): T[]; diff --git a/dist/package-shared/utils/unique-by-key.js b/dist/package-shared/utils/unique-by-key.js new file mode 100644 index 0000000..c2c3010 --- /dev/null +++ b/dist/package-shared/utils/unique-by-key.js @@ -0,0 +1,28 @@ +import slugify from "./slugify"; +export default function uniqueByKey(arr, key) { + let newArray = []; + let uniqueValues = []; + for (let i = 0; i < arr.length; i++) { + const item = arr[i]; + let targetValue; + if (Array.isArray(key)) { + const targetVals = []; + for (let k = 0; k < key.length; k++) { + const ky = key[k]; + const targetValuek = slugify(String(item[ky])); + targetVals.push(targetValuek); + } + targetValue = slugify(targetVals.join(",")); + } + else { + targetValue = slugify(String(item[key])); + } + if (!targetValue) + continue; + if (uniqueValues.includes(targetValue)) + continue; + newArray.push(item); + uniqueValues.push(targetValue); + } + return newArray; +} diff --git a/dist/package-shared/utils/update-grastate-file-to-latest.d.ts b/dist/package-shared/utils/update-grastate-file-to-latest.d.ts new file mode 100644 index 0000000..476233e --- /dev/null +++ b/dist/package-shared/utils/update-grastate-file-to-latest.d.ts @@ -0,0 +1 @@ +export default function updateGrastateToLatest(): void; diff --git a/dist/package-shared/utils/update-grastate-file-to-latest.js b/dist/package-shared/utils/update-grastate-file-to-latest.js new file mode 100644 index 0000000..e08bcdb --- /dev/null +++ b/dist/package-shared/utils/update-grastate-file-to-latest.js @@ -0,0 +1,8 @@ +import fs from "fs"; +import grabDirNames from "./backend/names/grab-dir-names"; +export default function updateGrastateToLatest() { + const { mainDbGrastateDatFile } = grabDirNames(); + const existingGrastateDatFile = fs.readFileSync(mainDbGrastateDatFile, "utf-8"); + const newGrastateDatFile = existingGrastateDatFile.replace(/safe_to_bootstrap: .*/, `safe_to_bootstrap: 1`); + fs.writeFileSync(mainDbGrastateDatFile, newGrastateDatFile, "utf-8"); +} diff --git a/engine/schema-to-typedef.ts b/engine/schema-to-typedef.ts index 7c0b088..10ffc4b 100644 --- a/engine/schema-to-typedef.ts +++ b/engine/schema-to-typedef.ts @@ -49,7 +49,7 @@ if (args.values.envfile && typeof args.values.envfile == "string") { const finalEnvPath = path.resolve(process.cwd(), args.values.envfile); if (fs.existsSync(finalEnvPath)) { const parsedEnv = parseEnv(finalEnvPath); - appendedEnv = parsedEnv || {}; + appendedEnv = (parsedEnv || {}) as any; if (args.values.debug) { debugLog({ diff --git a/index.ts b/index.ts index a0fa618..c415f08 100644 --- a/index.ts +++ b/index.ts @@ -48,6 +48,8 @@ import dsqlMethodCrud from "./package-shared/utils/data-fetching/method-crud"; import debugLog from "./package-shared/utils/logging/debug-log"; import { ErrorCallback } from "./package-shared/types"; import parseEnv from "./package-shared/utils/parse-env"; +import crud from "./package-shared/api/crud"; +import media from "./package-shared/api/media"; /** * User Functions Object @@ -72,12 +74,11 @@ const user = { }; /** - * Media Functions Object + * API Functions Object */ -const media = { - uploadImage: uploadImage, - uploadFile: uploadFile, - deleteFile: deleteFile, +const api = { + crud, + media, }; /** @@ -94,6 +95,10 @@ const sql = { * Main Export */ const datasquirel = { + /** + * API Actions + */ + api, /** * Get Action */ @@ -102,7 +107,6 @@ const datasquirel = { * Post Action */ post, - media, user, getSchema, client: datasquirelClient, diff --git a/package-shared/actions/post.ts b/package-shared/actions/post.ts index e3de7fa..82b069d 100644 --- a/package-shared/actions/post.ts +++ b/package-shared/actions/post.ts @@ -76,7 +76,7 @@ export default async function post({ } return await apiPost({ - dbFullName: DSQL_DB_NAME, + dbFullName: database || DSQL_DB_NAME, query, dbSchema, queryValues, diff --git a/package-shared/actions/users/login-user.ts b/package-shared/actions/users/login-user.ts index afb4c66..5315a10 100644 --- a/package-shared/actions/users/login-user.ts +++ b/package-shared/actions/users/login-user.ts @@ -1,4 +1,3 @@ -import http from "http"; import fs from "fs"; import path from "path"; import encrypt from "../../functions/dsql/encrypt"; @@ -9,40 +8,11 @@ import { writeAuthFile } from "../../functions/backend/auth/write-auth-files"; import { APILoginFunctionReturn, DSQL_DatabaseSchemaType, + LoginUserParam, PackageUserLoginRequestBody, } from "../../types"; import debugLog from "../../utils/logging/debug-log"; import grabCookieExpiryDate from "../../utils/grab-cookie-expirt-date"; -import emailRegexCheck from "../../functions/email/verification/email-regex-test"; -import emailMxLookup from "../../functions/email/verification/email-mx-lookup"; -import validateEmail from "../../functions/email/fns/validate-email"; - -type Param = { - key?: string; - database: string; - payload: { - email?: string; - username?: string; - password?: string; - }; - additionalFields?: string[]; - request?: http.IncomingMessage & { [s: string]: any }; - response?: http.ServerResponse & { [s: string]: any }; - encryptionKey?: string; - encryptionSalt?: string; - email_login?: boolean; - email_login_code?: string; - temp_code_field?: string; - token?: boolean; - user_id?: string | number; - skipPassword?: boolean; - debug?: boolean; - skipWriteAuthFile?: boolean; - apiUserID?: string | number; - dbUserId?: string | number; - cleanupTokens?: boolean; - secureCookie?: boolean; -}; /** * # Login A user @@ -68,7 +38,7 @@ export default async function loginUser({ cleanupTokens, secureCookie, request, -}: Param): Promise { +}: LoginUserParam): Promise { const grabedHostNames = grabHostNames({ userId: user_id || apiUserID }); const { host, port, scheme } = grabedHostNames; const COOKIE_EXPIRY_DATE = grabCookieExpiryDate(); @@ -156,7 +126,7 @@ export default async function loginUser({ } catch (error) {} httpResponse = await apiLoginUser({ - database: process.env.DSQL_DB_NAME || "", + database: database || process.env.DSQL_DB_NAME || "", email: payload.email, username: payload.username, password: payload.password, diff --git a/package-shared/actions/users/send-email-code.ts b/package-shared/actions/users/send-email-code.ts index 95f598c..d27213c 100644 --- a/package-shared/actions/users/send-email-code.ts +++ b/package-shared/actions/users/send-email-code.ts @@ -23,20 +23,24 @@ type Param = { /** * # Send Email Code to a User */ -export default async function sendEmailCode({ - key, - email, - database, - temp_code_field_name, - mail_domain, - mail_password, - mail_username, - mail_port, - sender, - user_id, - response, - extraCookies, -}: Param): Promise { +export default async function sendEmailCode( + params: Param +): Promise { + const { + key, + email, + database, + temp_code_field_name, + mail_domain, + mail_password, + mail_username, + mail_port, + sender, + user_id, + response, + extraCookies, + } = params; + const grabedHostNames = grabHostNames(); const { host, port, scheme } = grabedHostNames; diff --git a/package-shared/actions/users/user-auth.ts b/package-shared/actions/users/user-auth.ts index 5208722..24b8155 100644 --- a/package-shared/actions/users/user-auth.ts +++ b/package-shared/actions/users/user-auth.ts @@ -3,7 +3,7 @@ import decrypt from "../../functions/dsql/decrypt"; import getAuthCookieNames from "../../functions/backend/cookies/get-auth-cookie-names"; import { checkAuthFile } from "../../functions/backend/auth/write-auth-files"; import parseCookies from "../../utils/backend/parseCookies"; -import { AuthenticatedUser } from "../../types"; +import { AuthenticatedUser, DATASQUIREL_LoggedInUser } from "../../types"; import getCsrfHeaderName from "../../actions/get-csrf-header-name"; import grabHostNames from "../../utils/grab-host-names"; import debugLog from "../../utils/logging/debug-log"; @@ -128,8 +128,7 @@ export default function userAuth({ }; } - let userObject: import("../../types").DATASQUIREL_LoggedInUser = - JSON.parse(userPayloadJSON); + let userObject: DATASQUIREL_LoggedInUser = JSON.parse(userPayloadJSON); if (debug) { debugLog({ diff --git a/package-shared/api/crud/delete.ts b/package-shared/api/crud/delete.ts new file mode 100644 index 0000000..1b16624 --- /dev/null +++ b/package-shared/api/crud/delete.ts @@ -0,0 +1,29 @@ +import path from "path"; +import queryDSQLAPI from "../../functions/api/query-dsql-api"; +import { DsqlCrudQueryObject, SQLDeleteData } from "../../types"; +import grabAPIBasePath from "../../utils/grab-api-base-path"; + +type Params = { + dbName: string; + tableName: string; + deleteSpec?: T & { deleteKeyValues?: SQLDeleteData[] }; + targetID?: string | number; +}; + +export default async function apiCrudDELETE< + T extends { [key: string]: any } = { [key: string]: any } +>({ dbName, tableName, deleteSpec, targetID }: Params) { + const basePath = grabAPIBasePath({ paradigm: "crud" }); + + const finalID = typeof targetID === "number" ? String(targetID) : targetID; + + const finalPath = path.join(basePath, dbName, tableName, finalID || ""); + + const GET_RES = await queryDSQLAPI>({ + method: "DELETE", + path: finalPath, + body: deleteSpec, + }); + + return GET_RES; +} diff --git a/package-shared/api/crud/get.ts b/package-shared/api/crud/get.ts new file mode 100644 index 0000000..96767f3 --- /dev/null +++ b/package-shared/api/crud/get.ts @@ -0,0 +1,34 @@ +import path from "path"; +import queryDSQLAPI from "../../functions/api/query-dsql-api"; +import { APIResponseObject, DsqlCrudQueryObject } from "../../types"; +import grabAPIBasePath from "../../utils/grab-api-base-path"; + +type Params = { + dbName: string; + tableName: string; + query?: DsqlCrudQueryObject; + targetId?: string | number; +}; + +export default async function apiCrudGET< + T extends { [key: string]: any } = { [key: string]: any } +>({ + dbName, + tableName, + query, + targetId, +}: Params): Promise { + const basePath = grabAPIBasePath({ paradigm: "crud" }); + + const finalID = typeof targetId === "number" ? String(targetId) : targetId; + + const finalPath = path.join(basePath, dbName, tableName, finalID || ""); + + const GET_RES = await queryDSQLAPI>({ + method: "GET", + path: finalPath, + query, + }); + + return GET_RES; +} diff --git a/package-shared/api/crud/index.ts b/package-shared/api/crud/index.ts new file mode 100644 index 0000000..8de34aa --- /dev/null +++ b/package-shared/api/crud/index.ts @@ -0,0 +1,14 @@ +import apiCrudGET from "./get"; +import apiCrudPOST from "./post"; +import apiCrudPUT from "./put"; +import apiCrudDELETE from "./delete"; + +const crud = { + get: apiCrudGET, + insert: apiCrudPOST, + update: apiCrudPUT, + delete: apiCrudDELETE, + options: async () => {}, +}; + +export default crud; diff --git a/package-shared/api/crud/post.ts b/package-shared/api/crud/post.ts new file mode 100644 index 0000000..0e2ece9 --- /dev/null +++ b/package-shared/api/crud/post.ts @@ -0,0 +1,42 @@ +import path from "path"; +import queryDSQLAPI from "../../functions/api/query-dsql-api"; +import { APIResponseObject, DsqlCrudQueryObject } from "../../types"; +import grabAPIBasePath from "../../utils/grab-api-base-path"; + +export type APICrudPostParams< + T extends { [key: string]: any } = { [key: string]: any } +> = { + dbName: string; + tableName: string; + body: T; + update?: boolean; +}; + +export default async function apiCrudPOST< + T extends { [key: string]: any } = { [key: string]: any } +>({ + dbName, + tableName, + body, + update, +}: APICrudPostParams): Promise { + const basePath = grabAPIBasePath({ paradigm: "crud" }); + + const passedID = body.id as string | number | undefined; + + const finalID = update + ? typeof passedID === "number" + ? String(passedID) + : passedID + : undefined; + + const finalPath = path.join(basePath, dbName, tableName, finalID || ""); + + const GET_RES = await queryDSQLAPI>({ + method: update ? "PUT" : "POST", + path: finalPath, + body, + }); + + return GET_RES; +} diff --git a/package-shared/api/crud/put.ts b/package-shared/api/crud/put.ts new file mode 100644 index 0000000..a1a99e7 --- /dev/null +++ b/package-shared/api/crud/put.ts @@ -0,0 +1,25 @@ +import apiCrudPOST, { APICrudPostParams } from "./post"; + +type Params = Omit< + APICrudPostParams, + "update" +> & { + targetID: string | number; +}; + +export default async function apiCrudPUT< + T extends { [key: string]: any } = { [key: string]: any } +>({ dbName, tableName, body, targetID }: Params) { + const updatedBody = { ...body } as any; + + if (targetID) { + updatedBody["id"] = targetID; + } + + return await apiCrudPOST({ + dbName, + tableName, + body: updatedBody, + update: true, + }); +} diff --git a/package-shared/api/media/delete.ts b/package-shared/api/media/delete.ts new file mode 100644 index 0000000..a8c85ec --- /dev/null +++ b/package-shared/api/media/delete.ts @@ -0,0 +1,32 @@ +import queryDSQLAPI from "../../functions/api/query-dsql-api"; +import { APIResponseObject } from "../../types"; +import { DSQL_DATASQUIREL_USER_MEDIA } from "../../types/dsql"; +import path from "path"; +import grabAPIBasePath from "../../utils/grab-api-base-path"; + +export default async function apiMediaDELETE(params: { + mediaID?: string | number; +}): Promise< + APIResponseObject< + DSQL_DATASQUIREL_USER_MEDIA | DSQL_DATASQUIREL_USER_MEDIA[] + > +> { + const basePath = grabAPIBasePath({ paradigm: "media" }); + + const mediaID = params.mediaID + ? typeof params.mediaID === "number" + ? String(params.mediaID) + : params.mediaID + : undefined; + + const finalPath = path.join(basePath, mediaID || ""); + + const DELETE_MEDIA_RES = await queryDSQLAPI({ + method: "DELETE", + path: finalPath, + }); + + return DELETE_MEDIA_RES as APIResponseObject< + DSQL_DATASQUIREL_USER_MEDIA | DSQL_DATASQUIREL_USER_MEDIA[] + >; +} diff --git a/package-shared/api/media/get.ts b/package-shared/api/media/get.ts new file mode 100644 index 0000000..55f5305 --- /dev/null +++ b/package-shared/api/media/get.ts @@ -0,0 +1,33 @@ +import queryDSQLAPI from "../../functions/api/query-dsql-api"; +import { APIGetMediaParams, APIResponseObject } from "../../types"; +import path from "path"; +import { DSQL_DATASQUIREL_USER_MEDIA } from "../../types/dsql"; +import grabAPIBasePath from "../../utils/grab-api-base-path"; + +export default async function apiMediaGET( + params: APIGetMediaParams +): Promise< + APIResponseObject< + DSQL_DATASQUIREL_USER_MEDIA | DSQL_DATASQUIREL_USER_MEDIA[] + > +> { + const basePath = grabAPIBasePath({ paradigm: "media" }); + + const mediaID = params.mediaID + ? typeof params.mediaID === "number" + ? String(params.mediaID) + : params.mediaID + : undefined; + + const finalPath = path.join(basePath, mediaID || ""); + + const GET_MEDIA_RES = await queryDSQLAPI({ + method: "GET", + path: finalPath, + query: params, + }); + + return GET_MEDIA_RES as APIResponseObject< + DSQL_DATASQUIREL_USER_MEDIA | DSQL_DATASQUIREL_USER_MEDIA[] + >; +} diff --git a/package-shared/api/media/index.ts b/package-shared/api/media/index.ts new file mode 100644 index 0000000..3c9fc54 --- /dev/null +++ b/package-shared/api/media/index.ts @@ -0,0 +1,11 @@ +import apiMediaGET from "./get"; +import apiMediaPOST from "./post"; +import apiMediaDELETE from "./delete"; + +const media = { + get: apiMediaGET, + add: apiMediaPOST, + delete: apiMediaDELETE, +}; + +export default media; diff --git a/package-shared/api/media/post.ts b/package-shared/api/media/post.ts new file mode 100644 index 0000000..90cc570 --- /dev/null +++ b/package-shared/api/media/post.ts @@ -0,0 +1,24 @@ +import queryDSQLAPI from "../../functions/api/query-dsql-api"; +import { AddMediaAPIBody, APIResponseObject } from "../../types"; +import { DSQL_DATASQUIREL_USER_MEDIA } from "../../types/dsql"; +import grabAPIBasePath from "../../utils/grab-api-base-path"; + +export default async function apiMediaPOST( + params: AddMediaAPIBody +): Promise< + APIResponseObject< + DSQL_DATASQUIREL_USER_MEDIA | DSQL_DATASQUIREL_USER_MEDIA[] + > +> { + const basePath = grabAPIBasePath({ paradigm: "media" }); + + const POST_MEDIA_RES = await queryDSQLAPI({ + method: "POST", + path: basePath, + body: params, + }); + + return POST_MEDIA_RES as APIResponseObject< + DSQL_DATASQUIREL_USER_MEDIA | DSQL_DATASQUIREL_USER_MEDIA[] + >; +} diff --git a/package-shared/api/user/index.ts b/package-shared/api/user/index.ts new file mode 100644 index 0000000..a445ca0 --- /dev/null +++ b/package-shared/api/user/index.ts @@ -0,0 +1,3 @@ +const user = {}; + +export default user; diff --git a/package-shared/data/data-types.ts b/package-shared/data/data-types.ts new file mode 100644 index 0000000..bad6714 --- /dev/null +++ b/package-shared/data/data-types.ts @@ -0,0 +1,105 @@ +const DataTypes = [ + { + title: "VARCHAR", + name: "VARCHAR", + value: "0-255", + argument: true, + description: + "Varchar is simply letters and numbers within the range 0 - 255", + maxValue: 255, + }, + { + title: "TINYINT", + name: "TINYINT", + value: "0-100", + description: "TINYINT means Integers: 0 to 100", + maxValue: 127, + }, + { + title: "SMALLINT", + name: "SMALLINT", + value: "0-255", + description: "SMALLINT means Integers: 0 to 240933", + maxValue: 32767, + }, + { + title: "MEDIUMINT", + name: "MEDIUMINT", + value: "0-255", + description: "MEDIUMINT means Integers: 0 to 1245568545560", + maxValue: 8388607, + }, + { + title: "INT", + name: "INT", + value: "0-255", + description: "INT means Integers: 0 to 12560", + maxValue: 2147483647, + }, + { + title: "BIGINT", + name: "BIGINT", + value: "0-255", + description: "BIGINT means Integers: 0 to 1245569056767568545560", + maxValue: 2e63, + }, + { + title: "TINYTEXT", + name: "TINYTEXT", + value: "0-255", + description: "Text with 255 max characters", + maxValue: 127, + }, + { + title: "TEXT", + name: "TEXT", + value: "0-100", + description: "MEDIUMTEXT is just text with max length 16,777,215", + }, + { + title: "MEDIUMTEXT", + name: "MEDIUMTEXT", + value: "0-255", + description: "MEDIUMTEXT is just text with max length 16,777,215", + }, + { + title: "LONGTEXT", + name: "LONGTEXT", + value: "0-255", + description: "LONGTEXT is just text with max length 4,294,967,295", + }, + { + title: "DECIMAL", + name: "DECIMAL", + description: "Numbers with decimals", + integer: "1-100", + decimals: "1-4", + }, + { + title: "FLOAT", + name: "FLOAT", + description: "Numbers with decimals", + integer: "1-100", + decimals: "1-4", + }, + { + title: "DOUBLE", + name: "DOUBLE", + description: "Numbers with decimals", + integer: "1-100", + decimals: "1-4", + }, + { + title: "UUID", + name: "UUID", + valueLiteral: "UUID()", + description: "A Unique ID", + }, + { + title: "TIMESTAMP", + name: "TIMESTAMP", + description: "Time Stamp", + }, +] as const; + +export default DataTypes; diff --git a/package-shared/data/dataTypes.json b/package-shared/data/dataTypes.json index 2207d5a..c410934 100644 --- a/package-shared/data/dataTypes.json +++ b/package-shared/data/dataTypes.json @@ -88,6 +88,14 @@ "integer": "1-100", "decimals": "1-4" }, + { + "title": "OPTIONS", + "name": "VARCHAR", + "value": "250", + "argument": true, + "description": "This is a custom field which is a varchar under the hood", + "maxValue": 255 + }, { "title": "UUID", "name": "UUID", diff --git a/package-shared/dict/app-names.ts b/package-shared/dict/app-names.ts new file mode 100644 index 0000000..5bd5909 --- /dev/null +++ b/package-shared/dict/app-names.ts @@ -0,0 +1,8 @@ +export const AppNames = { + MaxScaleUserName: "dsql_maxscale_user", + ReplicaUserName: "dsql_replication_user", + DsqlDbPrefix: "datasquirel_user_", + PrivateMediaProceedureName: "dsql_UpdateUserMedia", + PrivateMediaInsertTriggerName: "dsql_trg_user_private_folders_insert", + PrivateMediaDeleteTriggerName: "dsql_trg_user_private_folders_delete", +} as const; diff --git a/package-shared/dict/cookie-names.ts b/package-shared/dict/cookie-names.ts new file mode 100644 index 0000000..dc8c501 --- /dev/null +++ b/package-shared/dict/cookie-names.ts @@ -0,0 +1,5 @@ +export const CookieNames = { + OneTimeLoginEmail: "dsql-one-time-login-email", + DelegatedUserId: "dsql-delegated-user-id", + DelegatedDatabase: "dsql-delegated-database", +} as const; diff --git a/package-shared/dict/local-storage-dict.ts b/package-shared/dict/local-storage-dict.ts new file mode 100644 index 0000000..ea2cdff --- /dev/null +++ b/package-shared/dict/local-storage-dict.ts @@ -0,0 +1,9 @@ +import getCsrfHeaderName from "../actions/get-csrf-header-name"; + +export const LocalStorageDict = { + OneTimeEmail: "dsql-one-time-login-email", + User: "user", + CSRF: getCsrfHeaderName(), + CurrentQueue: "current_queue", + DiskUsage: "disk_usage", +}; diff --git a/package-shared/dict/resource-limits.ts b/package-shared/dict/resource-limits.ts new file mode 100644 index 0000000..80f940c --- /dev/null +++ b/package-shared/dict/resource-limits.ts @@ -0,0 +1,7 @@ +const ResourceLimits = { + user_databases: 20, + table_entries: 20, + general: 20, +} as const; + +export default ResourceLimits; diff --git a/package-shared/functions/api/query-dsql-api.ts b/package-shared/functions/api/query-dsql-api.ts new file mode 100644 index 0000000..6ceb271 --- /dev/null +++ b/package-shared/functions/api/query-dsql-api.ts @@ -0,0 +1,116 @@ +import path from "path"; + +import { OutgoingHttpHeaders } from "http"; +import { + APIResponseObject, + DataCrudRequestMethods, + DataCrudRequestMethodsLowerCase, +} from "../../types"; +import grabHostNames from "../../utils/grab-host-names"; +import serializeQuery from "../../utils/serialize-query"; + +type Param = { + key?: string; + body?: T; + query?: T; + useDefault?: boolean; + path: string; + method?: + | (typeof DataCrudRequestMethods)[number] + | (typeof DataCrudRequestMethodsLowerCase)[number]; +}; + +/** + * # Query DSQL API + */ +export default async function queryDSQLAPI< + T = { [k: string]: any }, + P = { [k: string]: any } +>({ + key, + body, + query, + useDefault, + path: passedPath, + method, +}: Param): Promise> { + const grabedHostNames = grabHostNames({ useDefault }); + const { host, port, scheme } = grabedHostNames; + + try { + /** + * Make https request + * + * @description make a request to datasquirel.com + */ + const httpResponse = await new Promise((resolve, reject) => { + const reqPayload = body ? JSON.stringify(body) : undefined; + + let headers: OutgoingHttpHeaders = { + "Content-Type": "application/json", + Authorization: + key || + (!method || method == "GET" || method == "get" + ? process.env.DSQL_READ_ONLY_API_KEY + : undefined) || + process.env.DSQL_FULL_ACCESS_API_KEY || + process.env.DSQL_API_KEY, + }; + + if (reqPayload) { + headers["Content-Length"] = Buffer.from(reqPayload).length; + } + + let finalPath = path.join("/", passedPath); + + if (query) { + const queryString = serializeQuery(query); + finalPath += `${queryString}`; + } + + const httpsRequest = scheme.request( + { + method: method || "GET", + headers, + port, + hostname: host, + path: finalPath, + }, + + /** + * Callback Function + * + * @description https request callback + */ + (response) => { + var str = ""; + + response.on("data", function (chunk) { + str += chunk; + }); + + response.on("end", function () { + resolve(JSON.parse(str)); + }); + + response.on("error", (err) => { + reject(err); + }); + } + ); + + if (reqPayload) { + httpsRequest.write(reqPayload); + } + httpsRequest.end(); + }); + + return httpResponse as APIResponseObject

; + } catch (error: any) { + return { + success: false, + payload: undefined, + msg: error.message, + }; + } +} diff --git a/package-shared/functions/api/query/get.ts b/package-shared/functions/api/query/get.ts index c10f465..54648f6 100644 --- a/package-shared/functions/api/query/get.ts +++ b/package-shared/functions/api/query/get.ts @@ -73,11 +73,8 @@ export default async function apiGet< if (targetTable) { const clonedTargetTable = _.cloneDeep(targetTable); delete clonedTargetTable.childTable; - delete clonedTargetTable.childTableDbFullName; - delete clonedTargetTable.childTableName; delete clonedTargetTable.childrenTables; delete clonedTargetTable.updateData; - delete clonedTargetTable.tableNameOld; delete clonedTargetTable.indexes; tableSchema = clonedTargetTable; } diff --git a/package-shared/functions/api/query/post.ts b/package-shared/functions/api/query/post.ts index 3b28702..31936a8 100644 --- a/package-shared/functions/api/query/post.ts +++ b/package-shared/functions/api/query/post.ts @@ -48,9 +48,9 @@ export default async function apiPost({ */ try { let { result, error } = await runQuery({ - dbFullName: dbFullName, - query: query, - dbSchema: dbSchema, + dbFullName, + query, + dbSchema, queryValuesArray: queryValues, tableName, dbContext, @@ -89,11 +89,8 @@ export default async function apiPost({ const clonedTargetTable = _.cloneDeep(targetTable); delete clonedTargetTable.childTable; - delete clonedTargetTable.childTableDbFullName; - delete clonedTargetTable.childTableName; delete clonedTargetTable.childrenTables; delete clonedTargetTable.updateData; - delete clonedTargetTable.tableNameOld; delete clonedTargetTable.indexes; tableSchema = clonedTargetTable; diff --git a/package-shared/functions/api/social-login/handleSocialDb.ts b/package-shared/functions/api/social-login/handleSocialDb.ts index 44fc0fd..ac3c92f 100644 --- a/package-shared/functions/api/social-login/handleSocialDb.ts +++ b/package-shared/functions/api/social-login/handleSocialDb.ts @@ -10,6 +10,7 @@ import { APILoginFunctionReturn, HandleSocialDbFunctionParams, } from "../../../types"; +import grabDirNames from "../../../utils/backend/names/grab-dir-names"; /** * # Handle Social DB @@ -151,15 +152,15 @@ export default async function handleSocialDb({ }, }); - if (newUser?.insertId) { + if (newUser?.payload?.insertId) { if (!database) { /** * Add a Mariadb User for this User */ - await addMariadbUser({ userId: newUser.insertId }); + await addMariadbUser({ userId: newUser.payload.insertId }); } - const newUserQueriedQuery = `SELECT * FROM ${dbAppend}users WHERE id='${newUser.insertId}'`; + const newUserQueriedQuery = `SELECT * FROM ${dbAppend}users WHERE id='${newUser.payload.insertId}'`; const newUserQueried = await varDatabaseDbHandler({ database: finalDbName, @@ -182,7 +183,7 @@ export default async function handleSocialDb({ */ let generatedToken = encrypt({ data: JSON.stringify({ - id: newUser.insertId, + id: newUser.payload.insertId, email: supEmail, dateCode: Date.now(), }), @@ -202,7 +203,7 @@ export default async function handleSocialDb({ }).then(() => {}); } - const STATIC_ROOT = process.env.DSQL_STATIC_SERVER_DIR; + const { STATIC_ROOT } = grabDirNames(); if (!STATIC_ROOT) { console.log("Static File ENV not Found!"); @@ -219,11 +220,11 @@ export default async function handleSocialDb({ * @description Create new user folder and file */ if (!database || database?.match(/^datasquirel$/)) { - let newUserSchemaFolderPath = `${process.env.DSQL_USER_DB_SCHEMA_PATH}/user-${newUser.insertId}`; + let newUserSchemaFolderPath = `${process.env.DSQL_USER_DB_SCHEMA_PATH}/user-${newUser.payload.insertId}`; let newUserMediaFolderPath = path.join( STATIC_ROOT, - `images/user-images/user-${newUser.insertId}` + `images/user-images/user-${newUser.payload.insertId}` ); fs.mkdirSync(newUserSchemaFolderPath); diff --git a/package-shared/functions/api/users/api-create-user.ts b/package-shared/functions/api/users/api-create-user.ts index 769b087..2e64d7e 100644 --- a/package-shared/functions/api/users/api-create-user.ts +++ b/package-shared/functions/api/users/api-create-user.ts @@ -1,5 +1,4 @@ -// @ts-check - +import { findDbNameInSchemaDir } from "../../../shell/createDbFromSchema/grab-required-database-schemas"; import { APICreateUserFunctionParams } from "../../../types"; import addUsersTableToDb from "../../backend/addUsersTableToDb"; import addDbEntry from "../../backend/db/addDbEntry"; @@ -39,6 +38,19 @@ export default async function apiCreateUser({ }; } + const targetDbSchema = findDbNameInSchemaDir({ + dbName: dbFullName, + userId, + }); + + if (!targetDbSchema?.id) { + return { + success: false, + msg: "targetDbSchema not found", + payload: null, + }; + } + const hashedPassword = hashPassword({ encryptionKey: finalEncryptionKey, password: String(payload.password), @@ -57,8 +69,8 @@ export default async function apiCreateUser({ const newTable = await addUsersTableToDb({ userId: Number(API_USER_ID), database: dbFullName, - payload: payload, + dbId: targetDbSchema.id, }); fields = await varDatabaseDbHandler({ @@ -87,6 +99,7 @@ export default async function apiCreateUser({ newPayload: { [key]: payload[key], }, + dbId: targetDbSchema.id, }); } } @@ -143,8 +156,8 @@ export default async function apiCreateUser({ }, }); - if (addUser?.insertId) { - const newlyAddedUserQuery = `SELECT id,uuid,first_name,last_name,email,username,image,image_thumbnail,verification_status FROM ${dbFullName}.users WHERE id='${addUser.insertId}'`; + if (addUser?.payload?.insertId) { + const newlyAddedUserQuery = `SELECT id,uuid,first_name,last_name,email,username,image,image_thumbnail,verification_status FROM ${dbFullName}.users WHERE id='${addUser.payload.insertId}'`; const newlyAddedUser = await varDatabaseDbHandler({ queryString: newlyAddedUserQuery, diff --git a/package-shared/functions/api/users/api-login.ts b/package-shared/functions/api/users/api-login.ts index b9e943b..a794c7e 100644 --- a/package-shared/functions/api/users/api-login.ts +++ b/package-shared/functions/api/users/api-login.ts @@ -26,6 +26,14 @@ export default async function apiLoginUser({ debug, }: APILoginFunctionParams): Promise { const dbFullName = grabDbFullName({ dbName: database, userId: dbUserId }); + + if (!dbFullName) { + console.log(`Database Full Name couldn't be grabbed`); + return { + success: false, + msg: `Database Full Name couldn't be grabbed`, + }; + } const dbAppend = global.DSQL_USE_LOCAL ? "" : `${dbFullName}.`; /** @@ -152,6 +160,8 @@ export default async function apiLoginUser({ let userPayload: DATASQUIREL_LoggedInUser = { id: foundUser[0].id, + uid: foundUser[0].uid, + uuid: foundUser[0].uuid, first_name: foundUser[0].first_name, last_name: foundUser[0].last_name, username: foundUser[0].username, diff --git a/package-shared/functions/api/users/reset-password/api-send-reset-password-link.ts b/package-shared/functions/api/users/reset-password/api-send-reset-password-link.ts index 165c8ad..010659a 100644 --- a/package-shared/functions/api/users/reset-password/api-send-reset-password-link.ts +++ b/package-shared/functions/api/users/reset-password/api-send-reset-password-link.ts @@ -30,11 +30,13 @@ export default async function apiSendResetPasswordLink({ }: Param): Promise { const dbFullName = grabDbFullName({ dbName: database, userId: dbUserId }); - /** - * Check input validity - * - * @description Check input validity - */ + if (!dbFullName) { + return { + success: false, + msg: `Couldn't get database full name`, + }; + } + if (email?.match(/ /)) { return { success: false, diff --git a/package-shared/functions/backend/addMariadbUser.ts b/package-shared/functions/backend/addMariadbUser.ts index 2e80137..e162216 100644 --- a/package-shared/functions/backend/addMariadbUser.ts +++ b/package-shared/functions/backend/addMariadbUser.ts @@ -4,6 +4,7 @@ import NO_DB_HANDLER from "../../utils/backend/global-db/NO_DB_HANDLER"; import addDbEntry from "./db/addDbEntry"; import encrypt from "../dsql/encrypt"; import LOCAL_DB_HANDLER from "../../utils/backend/global-db/LOCAL_DB_HANDLER"; +import grabSQLKeyName from "../../utils/grab-sql-key-name"; type Param = { userId: number | string; @@ -16,7 +17,7 @@ export default async function addMariadbUser({ userId }: Param): Promise { try { const defaultMariadbUserHost = process.env.DSQL_DB_HOST || "127.0.0.1"; - const username = `dsql_user_${userId}`; + const username = grabSQLKeyName({ type: "user", userId }); const password = generator.generate({ length: 16, numbers: true, diff --git a/package-shared/functions/backend/addUsersTableToDb.ts b/package-shared/functions/backend/addUsersTableToDb.ts index 4d1226c..a6ebbab 100644 --- a/package-shared/functions/backend/addUsersTableToDb.ts +++ b/package-shared/functions/backend/addUsersTableToDb.ts @@ -1,16 +1,19 @@ import serverError from "./serverError"; import DB_HANDLER from "../../utils/backend/global-db/DB_HANDLER"; -import { default as grabUserSchemaData } from "./grabUserSchemaData"; -import { default as setUserSchemaData } from "./setUserSchemaData"; import addDbEntry from "./db/addDbEntry"; import createDbFromSchema from "../../shell/createDbFromSchema"; import LOCAL_DB_HANDLER from "../../utils/backend/global-db/LOCAL_DB_HANDLER"; import grabNewUsersTableSchema from "./grabNewUsersTableSchema"; +import { + grabPrimaryRequiredDbSchema, + writeUpdatedDbSchema, +} from "../../shell/createDbFromSchema/grab-required-database-schemas"; type Param = { userId: number; database: string; payload?: { [s: string]: any }; + dbId: string | number; }; /** @@ -20,6 +23,7 @@ export default async function addUsersTableToDb({ userId, database, payload, + dbId, }: Param): Promise { try { const dbFullName = database; @@ -27,12 +31,10 @@ export default async function addUsersTableToDb({ const userPreset = grabNewUsersTableSchema({ payload }); if (!userPreset) throw new Error("Couldn't Get User Preset!"); - const userSchemaData = grabUserSchemaData({ userId }); - if (!userSchemaData) throw new Error("User schema data not found!"); - - let targetDatabase = userSchemaData.find( - (db: any) => db.dbFullName === database - ); + let targetDatabase = grabPrimaryRequiredDbSchema({ + dbId, + userId, + }); if (!targetDatabase) { throw new Error("Couldn't Find Target Database!"); @@ -48,7 +50,7 @@ export default async function addUsersTableToDb({ targetDatabase.tables.push(userPreset); } - setUserSchemaData({ schemaData: userSchemaData, userId }); + writeUpdatedDbSchema({ dbSchema: targetDatabase, userId }); const targetDb: any[] | null = global.DSQL_USE_LOCAL ? await LOCAL_DB_HANDLER( diff --git a/package-shared/functions/backend/api-cred.ts b/package-shared/functions/backend/api-cred.ts index f729ce4..452a0fa 100644 --- a/package-shared/functions/backend/api-cred.ts +++ b/package-shared/functions/backend/api-cred.ts @@ -5,56 +5,56 @@ import { CheckApiCredentialsFn } from "../../types"; /** * # Grap API Credentials */ -const grabApiCred: CheckApiCredentialsFn = ({ - key, - database, - table, - user_id, - media, -}) => { - if (!key) return null; - if (!user_id) return null; +// const grabApiCred: CheckApiCredentialsFn = ({ +// key, +// database, +// table, +// user_id, +// media, +// }) => { +// if (!key) return null; +// if (!user_id) return null; - try { - const allowedKeysPath = process.env.DSQL_API_KEYS_PATH; +// try { +// const allowedKeysPath = process.env.DSQL_API_KEYS_PATH; - if (!allowedKeysPath) - throw new Error( - "process.env.DSQL_API_KEYS_PATH variable not found" - ); +// if (!allowedKeysPath) +// throw new Error( +// "process.env.DSQL_API_KEYS_PATH variable not found" +// ); - const ApiJSON = decrypt({ encryptedString: key }); +// const ApiJSON = decrypt({ encryptedString: key }); - const ApiObject: import("../../types").ApiKeyObject = JSON.parse( - ApiJSON || "" - ); +// const ApiObject: import("../../types").ApiKeyObject = JSON.parse( +// ApiJSON || "" +// ); - const isApiKeyValid = fs.existsSync( - `${allowedKeysPath}/${ApiObject.sign}` - ); +// const isApiKeyValid = fs.existsSync( +// `${allowedKeysPath}/${ApiObject.sign}` +// ); - if (String(ApiObject.user_id) !== String(user_id)) return null; +// if (String(ApiObject.user_id) !== String(user_id)) return null; - if (!isApiKeyValid) return null; - if (!ApiObject.target_database) return ApiObject; - if (media) return ApiObject; +// if (!isApiKeyValid) return null; +// if (!ApiObject.target_database) return ApiObject; +// if (media) return ApiObject; - if (!database && ApiObject.target_database) return null; - const isDatabaseAllowed = ApiObject.target_database - ?.split(",") - .includes(String(database)); +// if (!database && ApiObject.target_database) return null; +// const isDatabaseAllowed = ApiObject.target_database +// ?.split(",") +// .includes(String(database)); - if (isDatabaseAllowed && !ApiObject.target_table) return ApiObject; - if (isDatabaseAllowed && !table && ApiObject.target_table) return null; - const isTableAllowed = ApiObject.target_table - ?.split(",") - .includes(String(table)); - if (isTableAllowed) return ApiObject; - return null; - } catch (error: any) { - console.log(`api-cred ERROR: ${error.message}`); - return { error: `api-cred ERROR: ${error.message}` }; - } -}; +// if (isDatabaseAllowed && !ApiObject.target_table) return ApiObject; +// if (isDatabaseAllowed && !table && ApiObject.target_table) return null; +// const isTableAllowed = ApiObject.target_table +// ?.split(",") +// .includes(String(table)); +// if (isTableAllowed) return ApiObject; +// return null; +// } catch (error: any) { +// console.log(`api-cred ERROR: ${error.message}`); +// return { error: `api-cred ERROR: ${error.message}` }; +// } +// }; -export default grabApiCred; +// export default grabApiCred; diff --git a/package-shared/functions/backend/cookies/get-auth-cookie-names.ts b/package-shared/functions/backend/cookies/get-auth-cookie-names.ts index f4fc13a..6ca8e77 100644 --- a/package-shared/functions/backend/cookies/get-auth-cookie-names.ts +++ b/package-shared/functions/backend/cookies/get-auth-cookie-names.ts @@ -1,4 +1,5 @@ import getCsrfHeaderName from "../../../actions/get-csrf-header-name"; +import { AppNames } from "../../../dict/app-names"; type Param = { database?: string; @@ -22,8 +23,14 @@ export default function getAuthCookieNames(params?: Param): Return { process.env.DSQL_COOKIES_ONE_TIME_CODE_NAME || "one-time-code"; const targetDatabase = - params?.database?.replace(/^datasquirel_user_\d+_/, "") || - process.env.DSQL_DB_NAME?.replace(/^datasquirel_user_\d+_/, ""); + params?.database?.replace( + new RegExp(`^${AppNames["DsqlDbPrefix"]}\\d+_`), + "" + ) || + process.env.DSQL_DB_NAME?.replace( + new RegExp(`^${AppNames["DsqlDbPrefix"]}\\d+_`), + "" + ); let keyCookieName = cookiesPrefix; if (params?.userId) keyCookieName += `user_${params.userId}_`; diff --git a/package-shared/functions/backend/createDbSchemaFromDb.ts b/package-shared/functions/backend/createDbSchemaFromDb.ts index 8982489..d5ad0ab 100644 --- a/package-shared/functions/backend/createDbSchemaFromDb.ts +++ b/package-shared/functions/backend/createDbSchemaFromDb.ts @@ -1,24 +1,35 @@ import varDatabaseDbHandler from "../../functions/backend/varDatabaseDbHandler"; -import { default as grabUserSchemaData } from "../../functions/backend/grabUserSchemaData"; -import { default as setUserSchemaData } from "../../functions/backend/setUserSchemaData"; import addDbEntry from "../../functions/backend/db/addDbEntry"; import slugToCamelTitle from "../../shell/utils/slugToCamelTitle"; -import { DSQL_DATASQUIREL_USER_DATABASES } from "../../types/dsql"; +import { + DSQL_DATASQUIREL_USER_DATABASE_TABLES, + DSQL_DATASQUIREL_USER_DATABASES, +} from "../../types/dsql"; import { DSQL_FieldSchemaType, DSQL_IndexSchemaType, DSQL_MYSQL_SHOW_COLUMNS_Type, DSQL_TableSchemaType, } from "../../types"; +import grabDSQLSchemaIndexComment from "../../shell/utils/grab-dsql-schema-index-comment"; +import { + grabPrimaryRequiredDbSchema, + writeUpdatedDbSchema, +} from "../../shell/createDbFromSchema/grab-required-database-schemas"; +import _n from "../../utils/numberfy"; +import dataTypeParser from "../../utils/db/schema/data-type-parser"; +import dataTypeConstructor from "../../utils/db/schema/data-type-constructor"; type Params = { userId: number | string; database: DSQL_DATASQUIREL_USER_DATABASES; + dbId?: string | number; }; export default async function createDbSchemaFromDb({ userId, database, + dbId, }: Params) { try { if (!userId) { @@ -26,12 +37,12 @@ export default async function createDbSchemaFromDb({ return; } - const userSchemaData = grabUserSchemaData({ userId }); - if (!userSchemaData) throw new Error("User schema data not found!"); + const targetDb = grabPrimaryRequiredDbSchema({ + userId, + dbId: database.db_schema_id || dbId, + }); - const targetDb: { tables: object[] } = userSchemaData.filter( - (dbObject) => dbObject.dbFullName === database.db_full_name - )[0]; + if (!targetDb) throw new Error(`Target Db not found!`); const existingTables = await varDatabaseDbHandler({ database: database.db_full_name, @@ -44,21 +55,21 @@ export default async function createDbSchemaFromDb({ const table = existingTables[i]; const tableName = Object.values(table)[0] as string; - const tableInsert = await addDbEntry({ - dbFullName: "datasquirel", - tableName: "user_database_tables", - data: { - user_id: userId, - db_id: database.id, - db_slug: database.db_slug, - table_name: slugToCamelTitle(tableName), - table_slug: tableName, - }, - }); + const tableInsert = + await addDbEntry({ + dbFullName: "datasquirel", + tableName: "user_database_tables", + data: { + user_id: _n(userId), + db_id: database.id, + db_slug: database.db_slug, + table_name: slugToCamelTitle(tableName) || undefined, + table_slug: tableName, + }, + }); const tableObject: DSQL_TableSchemaType = { tableName: tableName, - tableFullName: slugToCamelTitle(tableName) || "", fields: [], indexes: [], }; @@ -75,9 +86,15 @@ export default async function createDbSchemaFromDb({ const { Field, Type, Null, Key, Default, Extra } = tableColumn; + const parsedDataType = dataTypeParser(Type.toUpperCase()); + const fieldObject: DSQL_FieldSchemaType = { fieldName: Field, - dataType: Type.toUpperCase(), + dataType: dataTypeConstructor( + parsedDataType.type, + parsedDataType.limit, + parsedDataType.decimal + ), }; if (Null?.match(/^no$/i)) fieldObject.notNullValue = true; @@ -112,11 +129,16 @@ export default async function createDbSchemaFromDb({ Index_comment, } = indexObject; - if (!Index_comment?.match(/^schema_index$/)) continue; + if ( + !Index_comment?.match( + new RegExp(grabDSQLSchemaIndexComment()) + ) + ) + continue; const indexNewObject: DSQL_IndexSchemaType = { indexType: Index_type?.match(/fulltext/i) - ? "fullText" + ? "full_text" : "regular", indexName: Key_name, indexTableFields: [], @@ -152,8 +174,7 @@ export default async function createDbSchemaFromDb({ targetDb.tables.push(tableObject); } - setUserSchemaData({ schemaData: userSchemaData, userId }); - + writeUpdatedDbSchema({ dbSchema: targetDb, userId }); return true; } catch (error) { console.log(error); diff --git a/package-shared/functions/backend/db/addDbEntry.ts b/package-shared/functions/backend/db/addDbEntry.ts index 84824b1..6d26d9e 100644 --- a/package-shared/functions/backend/db/addDbEntry.ts +++ b/package-shared/functions/backend/db/addDbEntry.ts @@ -7,17 +7,30 @@ import connDbHandler from "../../../utils/db/conn-db-handler"; import checkIfIsMaster from "../../../utils/check-if-is-master"; import { DbContextsArray } from "./runQuery"; import debugLog from "../../../utils/logging/debug-log"; -import { PostInsertReturn } from "../../../types"; +import { + APIResponseObject, + DSQL_TableSchemaType, + PostInsertReturn, +} from "../../../types"; +import purgeDefaultFields from "../../../utils/purge-default-fields"; -type Param = { +export type AddDbEntryParam< + T extends { [k: string]: any } = any, + K extends string = string +> = { dbContext?: (typeof DbContextsArray)[number]; paradigm?: "Read Only" | "Full Access"; dbFullName?: string; - tableName: string; - data: T; - tableSchema?: import("../../../types").DSQL_TableSchemaType; - duplicateColumnName?: string; - duplicateColumnValue?: string; + tableName: K; + data?: T; + batchData?: T[]; + tableSchema?: DSQL_TableSchemaType; + duplicateColumnName?: keyof T; + duplicateColumnValue?: string | number; + /** + * Update Entry if a duplicate is found. + * Requires `duplicateColumnName` and `duplicateColumnValue` parameters + */ update?: boolean; encryptionKey?: string; encryptionSalt?: string; @@ -28,12 +41,16 @@ type Param = { /** * Add a db Entry Function */ -export default async function addDbEntry({ +export default async function addDbEntry< + T extends { [k: string]: any } = any, + K extends string = string +>({ dbContext, paradigm, dbFullName, tableName, data, + batchData, tableSchema, duplicateColumnName, duplicateColumnValue, @@ -42,7 +59,7 @@ export default async function addDbEntry({ encryptionSalt, forceLocal, debug, -}: Param): Promise { +}: AddDbEntryParam): Promise> { const isMaster = forceLocal ? true : checkIfIsMaster({ dbContext, dbFullName }); @@ -62,14 +79,21 @@ export default async function addDbEntry({ ? global.DSQL_DB_CONN : global.DSQL_READ_ONLY_DB_CONN || global.DSQL_DB_CONN; - if (data?.["date_created_timestamp"]) delete data["date_created_timestamp"]; - if (data?.["date_updated_timestamp"]) delete data["date_updated_timestamp"]; - if (data?.["date_updated"]) delete data["date_updated"]; - if (data?.["date_updated_code"]) delete data["date_updated_code"]; - if (data?.["date_created"]) delete data["date_created"]; - if (data?.["date_created_code"]) delete data["date_created_code"]; + let newData = _.cloneDeep(data); + if (newData) { + newData = purgeDefaultFields(newData); + } - if (duplicateColumnName && typeof duplicateColumnName === "string") { + let newBatchData = _.cloneDeep(batchData) as any[]; + if (newBatchData) { + newBatchData = purgeDefaultFields(newBatchData); + } + + if ( + duplicateColumnName && + typeof duplicateColumnName === "string" && + newData + ) { const checkDuplicateQuery = `SELECT * FROM ${ isMaster ? "" : `\`${dbFullName}\`.` }\`${tableName}\` WHERE \`${duplicateColumnName}\`=?`; @@ -81,13 +105,17 @@ export default async function addDbEntry({ ); if (duplicateValue?.[0] && !update) { - return null; - } else if (duplicateValue && duplicateValue[0] && update) { + return { + success: false, + payload: undefined, + msg: "Duplicate entry found", + }; + } else if (duplicateValue?.[0] && update) { return await updateDbEntry({ dbContext, dbFullName, tableName, - data, + data: newData, tableSchema, encryptionKey, encryptionSalt, @@ -97,140 +125,188 @@ export default async function addDbEntry({ } } - const dataKeys = Object.keys(data); + function generateQuery(data: T) { + const dataKeys = Object.keys(data); - let insertKeysArray = []; - let insertValuesArray = []; + let insertKeysArray = []; + let insertValuesArray = []; - for (let i = 0; i < dataKeys.length; i++) { - try { - const dataKey = dataKeys[i]; - let value = data?.[dataKey]; + for (let i = 0; i < dataKeys.length; i++) { + try { + const dataKey = dataKeys[i]; + let value = data[dataKey]; - const targetFieldSchemaArray = tableSchema - ? tableSchema?.fields?.filter( - (field) => field.fieldName == dataKey - ) - : null; - const targetFieldSchema = - targetFieldSchemaArray && targetFieldSchemaArray[0] - ? targetFieldSchemaArray[0] + const targetFieldSchemaArray = tableSchema + ? tableSchema?.fields?.filter( + (field) => field.fieldName == dataKey + ) : null; + const targetFieldSchema = + targetFieldSchemaArray && targetFieldSchemaArray[0] + ? targetFieldSchemaArray[0] + : null; - if (value == null || value == undefined) continue; + if (value == null || value == undefined) continue; - if ( - targetFieldSchema?.dataType?.match(/int$/i) && - typeof value == "string" && - !value?.match(/./) - ) - continue; + if ( + targetFieldSchema?.dataType?.match(/int$/i) && + typeof value == "string" && + !value?.match(/./) + ) + continue; - if (targetFieldSchema?.encrypted) { - value = encrypt({ - data: value, - encryptionKey, - encryptionSalt, - }); - console.log("DSQL: Encrypted value =>", value); - } - - const htmlRegex = /<[^>]+>/g; - - if (targetFieldSchema?.richText || String(value).match(htmlRegex)) { - value = sanitizeHtml(value, sanitizeHtmlOptions); - } - - if (targetFieldSchema?.pattern) { - const pattern = new RegExp( - targetFieldSchema.pattern, - targetFieldSchema.patternFlags || "" - ); - if (!pattern.test(value)) { - console.log("DSQL: Pattern not matched =>", value); - value = ""; + if (targetFieldSchema?.encrypted) { + value = encrypt({ + data: value, + encryptionKey, + encryptionSalt, + }); + console.log("DSQL: Encrypted value =>", value); } - } - insertKeysArray.push("`" + dataKey + "`"); + const htmlRegex = /<[^>]+>/g; - if (typeof value === "object") { - value = JSON.stringify(value); - } + if ( + targetFieldSchema?.richText || + String(value).match(htmlRegex) + ) { + value = sanitizeHtml(value, sanitizeHtmlOptions); + } - if (typeof value == "number") { - insertValuesArray.push(String(value)); - } else { - insertValuesArray.push(value); + if (targetFieldSchema?.pattern) { + const pattern = new RegExp( + targetFieldSchema.pattern, + targetFieldSchema.patternFlags || "" + ); + if (!pattern.test(value)) { + console.log("DSQL: Pattern not matched =>", value); + value = ""; + } + } + + insertKeysArray.push("`" + dataKey + "`"); + + if (typeof value === "object") { + value = JSON.stringify(value); + } + + if (typeof value == "number") { + insertValuesArray.push(String(value)); + } else { + insertValuesArray.push(value); + } + } catch (error: any) { + console.log( + "DSQL: Error in parsing data keys =>", + error.message + ); + global.ERROR_CALLBACK?.( + `Error parsing Data Keys`, + error as Error + ); + continue; } - } catch (error: any) { - console.log("DSQL: Error in parsing data keys =>", error.message); - global.ERROR_CALLBACK?.(`Error parsing Data Keys`, error as Error); - continue; } - } - if (!data?.["date_created"]) { insertKeysArray.push("`date_created`"); insertValuesArray.push(Date()); - } - if (!data?.["date_created_code"]) { insertKeysArray.push("`date_created_code`"); insertValuesArray.push(Date.now()); - } - if (!data?.["date_updated"]) { insertKeysArray.push("`date_updated`"); insertValuesArray.push(Date()); - } - if (!data?.["date_updated_code"]) { insertKeysArray.push("`date_updated_code`"); insertValuesArray.push(Date.now()); + + const queryValuesArray = insertValuesArray; + + return { queryValuesArray, insertValuesArray, insertKeysArray }; } - const query = `INSERT INTO ${ - isMaster ? "" : `\`${dbFullName}\`.` - }\`${tableName}\` (${insertKeysArray.join(",")}) VALUES (${insertValuesArray - .map(() => "?") - .join(",")})`; - const queryValuesArray = insertValuesArray; + if (newData) { + const { insertKeysArray, insertValuesArray, queryValuesArray } = + generateQuery(newData); - if (debug) { - debugLog({ - log: DB_CONN?.getConfig(), - addTime: true, - label: "DB_CONN Config", - }); + const query = `INSERT INTO ${ + isMaster && !dbFullName ? "" : `\`${dbFullName}\`.` + }\`${tableName}\` (${insertKeysArray.join( + "," + )}) VALUES (${insertValuesArray.map(() => "?").join(",")})`; - debugLog({ - log: query, - addTime: true, - label: "query", - }); + const newInsert = await connDbHandler( + DB_CONN, + query, + queryValuesArray, + debug + ); - debugLog({ - log: queryValuesArray, - addTime: true, - label: "queryValuesArray", - }); + return { + success: Boolean(newInsert?.insertId), + payload: newInsert, + queryObject: { + sql: query, + params: queryValuesArray, + }, + }; + } else if (newBatchData) { + let batchInsertKeysArray: string[] | undefined; + let batchInsertValuesArray: any[][] = []; + let batchQueryValuesArray: any[][] = []; + + for (let i = 0; i < newBatchData.length; i++) { + const singleBatchData = newBatchData[i]; + const { insertKeysArray, insertValuesArray, queryValuesArray } = + generateQuery(singleBatchData); + + if (!batchInsertKeysArray) { + batchInsertKeysArray = insertKeysArray; + } + + batchInsertValuesArray.push(insertValuesArray); + batchQueryValuesArray.push(queryValuesArray); + } + + const query = `INSERT INTO ${ + isMaster && !dbFullName ? "" : `\`${dbFullName}\`.` + }\`${tableName}\` (${batchInsertKeysArray?.join( + "," + )}) VALUES ${batchInsertValuesArray + .map((vl) => `(${vl.map(() => "?").join(",")})`) + .join(",")}`; + + console.log("query", query); + console.log("batchQueryValuesArray", batchQueryValuesArray); + + const newInsert = await connDbHandler( + DB_CONN, + query, + batchQueryValuesArray.flat(), + debug + ); + + if (debug) { + debugLog({ + log: newInsert, + addTime: true, + label: "newInsert", + }); + } + + return { + success: Boolean(newInsert?.insertId), + payload: newInsert, + queryObject: { + sql: query, + params: batchQueryValuesArray.flat(), + }, + }; + } else { + return { + success: false, + payload: undefined, + msg: "No data provided", + }; } - - const newInsert = await connDbHandler( - DB_CONN, - query, - queryValuesArray, - debug - ); - - if (debug) { - debugLog({ - log: newInsert, - addTime: true, - label: "newInsert", - }); - } - - return newInsert; } diff --git a/package-shared/functions/backend/db/deleteDbEntry.ts b/package-shared/functions/backend/db/deleteDbEntry.ts index 29c7880..9ae0b7e 100644 --- a/package-shared/functions/backend/db/deleteDbEntry.ts +++ b/package-shared/functions/backend/db/deleteDbEntry.ts @@ -1,13 +1,14 @@ +import { DSQL_TableSchemaType, PostInsertReturn } from "../../../types"; import checkIfIsMaster from "../../../utils/check-if-is-master"; import connDbHandler from "../../../utils/db/conn-db-handler"; import { DbContextsArray } from "./runQuery"; -type Param = { +type Param = { dbContext?: (typeof DbContextsArray)[number]; - dbFullName: string; - tableName: string; - tableSchema?: import("../../../types").DSQL_TableSchemaType; - identifierColumnName: string; + dbFullName?: string; + tableName: K; + tableSchema?: DSQL_TableSchemaType; + identifierColumnName: keyof T; identifierValue: string | number; forceLocal?: boolean; }; @@ -16,14 +17,17 @@ type Param = { * # Delete DB Entry Function * @description */ -export default async function deleteDbEntry({ +export default async function deleteDbEntry< + T extends { [k: string]: any } = any, + K extends string = string +>({ dbContext, dbFullName, tableName, identifierColumnName, identifierValue, forceLocal, -}: Param): Promise { +}: Param): Promise { try { const isMaster = forceLocal ? true @@ -32,9 +36,6 @@ export default async function deleteDbEntry({ const DB_CONN = isMaster ? global.DSQL_DB_CONN : global.DSQL_FULL_ACCESS_DB_CONN || global.DSQL_DB_CONN; - const DB_RO_CONN = isMaster - ? global.DSQL_DB_CONN - : global.DSQL_READ_ONLY_DB_CONN || global.DSQL_DB_CONN; /** * Execution @@ -42,8 +43,8 @@ export default async function deleteDbEntry({ * @description */ const query = `DELETE FROM ${ - isMaster ? "" : `\`${dbFullName}\`.` - }\`${tableName}\` WHERE \`${identifierColumnName}\`=?`; + isMaster && !dbFullName ? "" : `\`${dbFullName}\`.` + }\`${tableName}\` WHERE \`${identifierColumnName.toString()}\`=?`; const deletedEntry = await connDbHandler(DB_CONN, query, [ identifierValue, diff --git a/package-shared/functions/backend/db/runQuery.ts b/package-shared/functions/backend/db/runQuery.ts index 9dee9ee..45b34d6 100644 --- a/package-shared/functions/backend/db/runQuery.ts +++ b/package-shared/functions/backend/db/runQuery.ts @@ -126,7 +126,7 @@ export default async function runQuery({ case "insert": result = await addDbEntry({ dbContext, - dbFullName: dbFullName, + dbFullName, tableName: table, data: data, update, @@ -145,7 +145,7 @@ export default async function runQuery({ case "update": result = await updateDbEntry({ dbContext, - dbFullName: dbFullName, + dbFullName, tableName: table, data: data, identifierColumnName, @@ -158,7 +158,7 @@ export default async function runQuery({ case "delete": result = await deleteDbEntry({ dbContext, - dbFullName: dbFullName, + dbFullName, tableName: table, identifierColumnName, identifierValue, diff --git a/package-shared/functions/backend/db/updateDbEntry.ts b/package-shared/functions/backend/db/updateDbEntry.ts index 2e88fea..fd1dada 100644 --- a/package-shared/functions/backend/db/updateDbEntry.ts +++ b/package-shared/functions/backend/db/updateDbEntry.ts @@ -4,7 +4,13 @@ import encrypt from "../../dsql/encrypt"; import checkIfIsMaster from "../../../utils/check-if-is-master"; import connDbHandler from "../../../utils/db/conn-db-handler"; import { DbContextsArray } from "./runQuery"; -import { PostInsertReturn } from "../../../types"; +import { + APIResponseObject, + DSQL_TableSchemaType, + PostInsertReturn, +} from "../../../types"; +import _ from "lodash"; +import purgeDefaultFields from "../../../utils/purge-default-fields"; type Param = { dbContext?: (typeof DbContextsArray)[number]; @@ -12,8 +18,8 @@ type Param = { tableName: string; encryptionKey?: string; encryptionSalt?: string; - data: any; - tableSchema?: import("../../../types").DSQL_TableSchemaType; + data?: T; + tableSchema?: DSQL_TableSchemaType; identifierColumnName: keyof T; identifierValue: string | number; forceLocal?: boolean; @@ -36,11 +42,17 @@ export default async function updateDbEntry< encryptionKey, encryptionSalt, forceLocal, -}: Param): Promise { +}: Param): Promise> { /** * Check if data is valid */ - if (!data || !Object.keys(data).length) return null; + if (!data || !Object.keys(data).length) { + return { + success: false, + payload: undefined, + msg: "No data provided", + }; + } const isMaster = forceLocal ? true @@ -54,12 +66,15 @@ export default async function updateDbEntry< //////////////////////////////////////// //////////////////////////////////////// + let newData = _.cloneDeep(data); + newData = purgeDefaultFields(newData); + /** * Declare variables * * @description Declare "results" variable */ - const dataKeys = Object.keys(data); + const dataKeys = Object.keys(newData); let updateKeyValueArray = []; let updateValues = []; @@ -67,8 +82,7 @@ export default async function updateDbEntry< for (let i = 0; i < dataKeys.length; i++) { try { const dataKey = dataKeys[i]; - // @ts-ignore - let value = data[dataKey]; + let value = newData[dataKey]; const targetFieldSchemaArray = tableSchema ? tableSchema?.fields?.filter( @@ -159,7 +173,7 @@ export default async function updateDbEntry< //////////////////////////////////////// const query = `UPDATE ${ - isMaster ? "" : `\`${dbFullName}\`.` + isMaster && !dbFullName ? "" : `\`${dbFullName}\`.` }\`${tableName}\` SET ${updateKeyValueArray.join(",")} WHERE \`${ identifierColumnName as string }\`=?`; @@ -171,5 +185,12 @@ export default async function updateDbEntry< /** * Return statement */ - return updatedEntry; + return { + success: Boolean(updatedEntry?.affectedRows), + payload: updatedEntry, + queryObject: { + sql: query, + params: updateValues, + }, + }; } diff --git a/package-shared/functions/backend/dbHandler.ts b/package-shared/functions/backend/dbHandler.ts index 1595e8a..21c117a 100644 --- a/package-shared/functions/backend/dbHandler.ts +++ b/package-shared/functions/backend/dbHandler.ts @@ -1,69 +1,63 @@ import fs from "fs"; -import serverError from "./serverError"; -import grabDSQLConnection from "../../utils/grab-dsql-connection"; import path from "path"; +import grabDSQLConnection from "../../utils/grab-dsql-connection"; + +type Param = { + query: string; + values?: string[] | object; + noErrorLogs?: boolean; +}; /** * # Main DB Handler Function + * @requires DSQL_DB_CONN - Gobal Variable for Datasquirel Database */ -export default async function dbHandler(...args: any[]) { - process.env.NODE_ENV?.match(/dev/) && - fs.appendFileSync( - "./.tmp/sqlQuery.sql", - args[0] + "\n" + Date() + "\n\n\n", - "utf8" - ); - +export default async function dbHandler({ + query, + values, + noErrorLogs, +}: Param): Promise { const CONNECTION = grabDSQLConnection(); let results; - /** - * Fetch from db - * - * @description Fetch data from db if no cache - */ try { - results = await new Promise((resolve, reject) => { - CONNECTION.query( - ...args, - (error: any, result: any, fields: any) => { - if (error) { - resolve({ error: error.message }); - } else { - resolve(result); - } - } - ); - }); + if (query && values) { + results = await CONNECTION.query(query, values); + } else { + results = await CONNECTION.query(query); + } } catch (error: any) { - const tmpFolder = path.resolve(process.cwd(), "./.tmp"); - if (!fs.existsSync(tmpFolder)) - fs.mkdirSync(tmpFolder, { recursive: true }); + if (!noErrorLogs) { + global.ERROR_CALLBACK?.(`DB Handler Error...`, error as Error); + } - fs.appendFileSync( - path.resolve(tmpFolder, "./dbErrorLogs.txt"), - JSON.stringify(error, null, 4) + "\n" + Date() + "\n\n\n", - "utf8" - ); + if (process.env.FIRST_RUN) { + return null; + } + + if (!noErrorLogs) { + console.log("ERROR in dbHandler =>", error.message); + console.log(error); + console.log(CONNECTION.config()); + + const tmpFolder = path.resolve(process.cwd(), "./.tmp"); + + if (!fs.existsSync(tmpFolder)) + fs.mkdirSync(tmpFolder, { recursive: true }); + + fs.appendFileSync( + path.resolve(tmpFolder, "./dbErrorLogs.txt"), + JSON.stringify(error, null, 4) + "\n" + Date() + "\n\n\n", + "utf8" + ); + } results = null; - - global.ERROR_CALLBACK?.(`DB Handler Error`, error as Error); - - serverError({ - component: "dbHandler", - message: error.message, - }); } finally { await CONNECTION?.end(); } - /** - * Return results - * - * @description Return results add to cache if "req" param is passed - */ if (results) { return JSON.parse(JSON.stringify(results)); } else { diff --git a/package-shared/functions/backend/defaultFieldsRegexp.ts b/package-shared/functions/backend/defaultFieldsRegexp.ts deleted file mode 100644 index 6171a8f..0000000 --- a/package-shared/functions/backend/defaultFieldsRegexp.ts +++ /dev/null @@ -1,9 +0,0 @@ -/** - * Regular expression to match default fields - * - * @description Regular expression to match default fields - */ -const defaultFieldsRegexp = - /^id$|^uuid$|^date_created$|^date_created_code$|^date_created_timestamp$|^date_updated$|^date_updated_code$|^date_updated_timestamp$/; - -export default defaultFieldsRegexp; diff --git a/package-shared/functions/backend/grab-mariadb-main-user-for-user.ts b/package-shared/functions/backend/grab-mariadb-main-user-for-user.ts new file mode 100644 index 0000000..01d6515 --- /dev/null +++ b/package-shared/functions/backend/grab-mariadb-main-user-for-user.ts @@ -0,0 +1,70 @@ +import { UserType } from "../../types"; +import dbHandler from "./dbHandler"; +import dsqlCrud from "../../utils/data-fetching/crud"; +import { DSQL_DATASQUIREL_USERS, DsqlTables } from "../../types/dsql"; +import decrypt from "../dsql/decrypt"; +import createUserSQLUser from "../../utils/create-user-sql-user"; +import grabUserMainSqlUserName from "../../utils/grab-user-main-sql-user-name"; + +type Params = { + user: UserType; +}; + +type Return = { + fullName?: string; + host?: string; + username?: string; + password?: string; +}; + +export default async function grabMariadbMainUserForUser({ + user, +}: Params): Promise { + const { + fullName, + host, + username: mariaDBUsername, + webHost, + } = grabUserMainSqlUserName({ user }); + + const existingWebAppUser = (await dbHandler({ + query: `SELECT * FROM mysql.user WHERE user=? AND host=?`, + values: [mariaDBUsername, webHost], + })) as any[]; + + if (!existingWebAppUser?.[0]) { + return await createUserSQLUser(user); + } else { + const existingUserRecord = await dsqlCrud< + DSQL_DATASQUIREL_USERS, + (typeof DsqlTables)[number] + >({ + action: "get", + table: "users", + query: { + query: { + id: { + value: String(user.id), + }, + }, + }, + }); + + const targetUser = ( + existingUserRecord?.payload as DSQL_DATASQUIREL_USERS[] | undefined + )?.[0]; + + if (!targetUser?.id) { + return {}; + } + + return { + fullName, + host, + username: mariaDBUsername, + password: decrypt({ + encryptedString: targetUser.mariadb_pass || "", + }), + }; + } +} diff --git a/package-shared/functions/backend/grabUserSchemaData.ts b/package-shared/functions/backend/grabUserSchemaData.ts deleted file mode 100644 index b594b46..0000000 --- a/package-shared/functions/backend/grabUserSchemaData.ts +++ /dev/null @@ -1,38 +0,0 @@ -import { DSQL_DatabaseSchemaType, UserType } from "../../types"; -import serverError from "./serverError"; -import fs from "fs"; -import grabDirNames from "../../utils/backend/names/grab-dir-names"; -import EJSON from "../../utils/ejson"; - -type Params = { - userId?: string | number; -}; - -/** - * # Grab User Schema Data - */ -export default function grabUserSchemaData({ - userId, -}: Params): DSQL_DatabaseSchemaType[] | null { - try { - const { userSchemaMainJSONFilePath } = grabDirNames({ userId }); - const schemaJSON = fs.readFileSync( - userSchemaMainJSONFilePath || "", - "utf-8" - ); - const schemaObj = EJSON.parse(schemaJSON) as DSQL_DatabaseSchemaType[]; - return schemaObj; - } catch (error: any) { - serverError({ - component: "grabUserSchemaData", - message: error.message, - }); - - global.ERROR_CALLBACK?.( - `Error Grabbing User Schema Data`, - error as Error - ); - - return null; - } -} diff --git a/package-shared/functions/backend/parseDbResults.ts b/package-shared/functions/backend/parseDbResults.ts index 63f759b..5800f01 100644 --- a/package-shared/functions/backend/parseDbResults.ts +++ b/package-shared/functions/backend/parseDbResults.ts @@ -1,7 +1,7 @@ // @ts-check import decrypt from "../dsql/decrypt"; -import defaultFieldsRegexp from "./defaultFieldsRegexp"; +import defaultFieldsRegexp from "../dsql/default-fields-regexp"; type Param = { unparsedResults: any[]; diff --git a/package-shared/functions/backend/setUserSchemaData.ts b/package-shared/functions/backend/setUserSchemaData.ts deleted file mode 100644 index f627410..0000000 --- a/package-shared/functions/backend/setUserSchemaData.ts +++ /dev/null @@ -1,43 +0,0 @@ -import serverError from "./serverError"; -import fs from "fs"; -import path from "path"; -import { DSQL_DatabaseSchemaType } from "../../types"; -import grabDirNames from "../../utils/backend/names/grab-dir-names"; - -type Param = { - userId: string | number; - schemaData: DSQL_DatabaseSchemaType[]; -}; - -/** - * # Set User Schema Data - */ -export default function setUserSchemaData({ - userId, - schemaData, -}: Param): boolean { - try { - const { userSchemaMainJSONFilePath } = grabDirNames({ userId }); - - if (!userSchemaMainJSONFilePath) { - throw new Error(`No User Schema JSON found!`); - } - - fs.writeFileSync( - userSchemaMainJSONFilePath, - JSON.stringify(schemaData), - "utf8" - ); - - return true; - } catch (error: any) { - serverError({ - component: "/functions/backend/setUserSchemaData", - message: error.message, - }); - - global.ERROR_CALLBACK?.(`Error Setting User Schema`, error as Error); - - return false; - } -} diff --git a/package-shared/functions/backend/su-db-handler.ts b/package-shared/functions/backend/su-db-handler.ts new file mode 100644 index 0000000..a3a4584 --- /dev/null +++ b/package-shared/functions/backend/su-db-handler.ts @@ -0,0 +1,31 @@ +import mysql from "serverless-mysql"; +import { UserType } from "../../types"; +import connDbHandler from "../../utils/db/conn-db-handler"; + +type Params = { + query?: string; + values?: any[]; + database?: string; + user: UserType; +}; + +export default async function suDbHandler({ + query, + database, + user, + values, +}: Params) { + const connection = mysql({ + config: { + host: process.env.DSQL_DB_HOST, + user: process.env.DSQL_DB_USERNAME, + password: process.env.DSQL_DB_PASSWORD, + database: database, + charset: "utf8mb4", + }, + }); + + const results = await connDbHandler(connection, query); + + return results; +} diff --git a/package-shared/functions/backend/updateUsersTableSchema.ts b/package-shared/functions/backend/updateUsersTableSchema.ts index 66bde09..ec05d8d 100644 --- a/package-shared/functions/backend/updateUsersTableSchema.ts +++ b/package-shared/functions/backend/updateUsersTableSchema.ts @@ -1,14 +1,17 @@ import serverError from "./serverError"; -import grabUserSchemaData from "./grabUserSchemaData"; -import setUserSchemaData from "./setUserSchemaData"; import createDbFromSchema from "../../shell/createDbFromSchema"; import grabSchemaFieldsFromData from "./grabSchemaFieldsFromData"; +import { + grabPrimaryRequiredDbSchema, + writeUpdatedDbSchema, +} from "../../shell/createDbFromSchema/grab-required-database-schemas"; type Param = { userId: number | string; database: string; newFields?: string[]; newPayload?: { [s: string]: any }; + dbId: string | number; }; /** @@ -19,27 +22,25 @@ export default async function updateUsersTableSchema({ database, newFields, newPayload, + dbId, }: Param): Promise { try { const dbFullName = database; - const userSchemaData = grabUserSchemaData({ userId }); - if (!userSchemaData) throw new Error("User schema data not found!"); + let targetDatabase = grabPrimaryRequiredDbSchema({ + dbId, + userId, + }); - let targetDatabaseIndex = userSchemaData.findIndex( - (db) => db.dbFullName === database - ); - - if (targetDatabaseIndex < 0) { + if (!targetDatabase) { throw new Error("Couldn't Find Target Database!"); } - let existingTableIndex = userSchemaData[ - targetDatabaseIndex - ]?.tables.findIndex((table) => table.tableName === "users"); + let existingTableIndex = targetDatabase?.tables.findIndex( + (table) => table.tableName === "users" + ); - const usersTable = - userSchemaData[targetDatabaseIndex].tables[existingTableIndex]; + const usersTable = targetDatabase.tables[existingTableIndex]; if (!usersTable?.fields?.[0]) throw new Error("Users Table Not Found!"); @@ -56,7 +57,7 @@ export default async function updateUsersTableSchema({ usersTable.fields.splice(finalSpliceStartIndex, 0, ...additionalFields); - setUserSchemaData({ schemaData: userSchemaData, userId }); + writeUpdatedDbSchema({ dbSchema: targetDatabase, userId }); const dbShellUpdate = await createDbFromSchema({ userId, diff --git a/package-shared/functions/backend/user-db-handler.ts b/package-shared/functions/backend/user-db-handler.ts new file mode 100644 index 0000000..6381fb2 --- /dev/null +++ b/package-shared/functions/backend/user-db-handler.ts @@ -0,0 +1,39 @@ +import mysql from "serverless-mysql"; +import { DSQL_TableSchemaType, UserType } from "../../types"; +import grabMariadbMainUserForUser from "./grab-mariadb-main-user-for-user"; +import connDbHandler from "../../utils/db/conn-db-handler"; + +type Params = { + query?: string; + values?: any[]; + database?: string; + tableSchema?: DSQL_TableSchemaType; + debug?: boolean; + user: UserType; +}; + +export default async function userDbHandler({ + query, + user, + database, + debug, + tableSchema, + values, +}: Params) { + const { fullName, host, username, password } = + await grabMariadbMainUserForUser({ user }); + + const connection = mysql({ + config: { + host, + user: username, + password: password, + database: database, + charset: "utf8mb4", + }, + }); + + const results = await connDbHandler(connection, query); + + return results; +} diff --git a/package-shared/functions/backend/varDatabaseDbHandler.ts b/package-shared/functions/backend/varDatabaseDbHandler.ts index df6e516..758b5c3 100644 --- a/package-shared/functions/backend/varDatabaseDbHandler.ts +++ b/package-shared/functions/backend/varDatabaseDbHandler.ts @@ -2,12 +2,13 @@ import parseDbResults from "./parseDbResults"; import serverError from "./serverError"; import grabDSQLConnection from "../../utils/grab-dsql-connection"; import connDbHandler from "../../utils/db/conn-db-handler"; +import { DSQL_TableSchemaType } from "../../types"; type Param = { queryString: string; queryValuesArray?: any[]; database?: string; - tableSchema?: import("../../types").DSQL_TableSchemaType; + tableSchema?: DSQL_TableSchemaType; debug?: boolean; }; diff --git a/package-shared/functions/dsql/db-schema-to-type.ts b/package-shared/functions/dsql/db-schema-to-type.ts index eac197e..42b639e 100644 --- a/package-shared/functions/dsql/db-schema-to-type.ts +++ b/package-shared/functions/dsql/db-schema-to-type.ts @@ -8,7 +8,7 @@ import { import _ from "lodash"; import EJSON from "../../utils/ejson"; import generateTypeDefinition from "./generate-type-definitions"; -import path from "path"; +import { AppNames } from "../../dict/app-names"; type Params = { dbSchema?: DSQL_DatabaseSchemaType; @@ -16,16 +16,12 @@ type Params = { export default function dbSchemaToType(params?: Params): string[] | undefined { let datasquirelSchema; - const defaultTableFieldsJSONFilePath = path.resolve( - __dirname, - "../../data/defaultFields.json" - ); + const { mainShemaJSONFilePath, defaultTableFieldsJSONFilePath } = + grabDirNames(); if (params?.dbSchema) { datasquirelSchema = params.dbSchema; } else { - const { mainShemaJSONFilePath } = grabDirNames(); - const mainSchema = EJSON.parse( fs.readFileSync(mainShemaJSONFilePath, "utf-8") ) as DSQL_DatabaseSchemaType[]; @@ -49,7 +45,7 @@ export default function dbSchemaToType(params?: Params): string[] | undefined { let newDefaultFields = _.cloneDeep(defaultFields); return { ...tblSchm, - fields: params?.dbSchema + fields: tblSchm.fields.find((fld) => fld.fieldName == "id") ? tblSchm.fields : [ newDefaultFields.shift(), @@ -62,7 +58,10 @@ export default function dbSchemaToType(params?: Params): string[] | undefined { const defDbName = ( datasquirelSchema.dbName || - datasquirelSchema.dbFullName?.replace(/datasquirel_user_\d+_/, "") + datasquirelSchema.dbFullName?.replace( + new RegExp(`${AppNames["DsqlDbPrefix"]}\\d+_`), + "" + ) ) ?.toUpperCase() .replace(/ /g, "_"); diff --git a/package-shared/functions/dsql/decrypt.ts b/package-shared/functions/dsql/decrypt.ts index a47888c..99f7c20 100644 --- a/package-shared/functions/dsql/decrypt.ts +++ b/package-shared/functions/dsql/decrypt.ts @@ -52,6 +52,7 @@ export default function decrypt({ return decrypted; } catch (error: any) { console.log("Error in decrypting =>", error.message); + console.log("encryptedString =>", encryptedString); global.ERROR_CALLBACK?.(`Error Decrypting data`, error as Error); return encryptedString; } diff --git a/package-shared/functions/dsql/default-fields-regexp.ts b/package-shared/functions/dsql/default-fields-regexp.ts index aa5c987..77c6882 100644 --- a/package-shared/functions/dsql/default-fields-regexp.ts +++ b/package-shared/functions/dsql/default-fields-regexp.ts @@ -1,16 +1,9 @@ -// @ts-check - /** - * Check for user in local storage + * Regular expression to match default fields * - * @description Preventdefault, declare variables + * @description Regular expression to match default fields */ - const defaultFieldsRegexp = - /^id$|^uuid$|^date_created$|^date_created_code$|^date_created_timestamp$|^date_updated$|^date_updated_code$|^date_updated_timestamp$/; - -//////////////////////////////////////// -//////////////////////////////////////// -//////////////////////////////////////// + /^id$|^uuid$|^uid$|^date_created$|^date_created_code$|^date_created_timestamp$|^date_updated$|^date_updated_code$|^date_updated_timestamp$/; export default defaultFieldsRegexp; diff --git a/package-shared/functions/dsql/generate-type-definitions.ts b/package-shared/functions/dsql/generate-type-definitions.ts index 7d426e9..29dbff3 100644 --- a/package-shared/functions/dsql/generate-type-definitions.ts +++ b/package-shared/functions/dsql/generate-type-definitions.ts @@ -1,4 +1,4 @@ -import { DSQL_TableSchemaType } from "../../types"; +import { DSQL_FieldSchemaType, DSQL_TableSchemaType } from "../../types"; import defaultFieldsRegexp from "./default-fields-regexp"; type Param = { @@ -8,6 +8,7 @@ type Param = { typeDefName?: string; allValuesOptional?: boolean; addExport?: boolean; + dbName?: string; }; export default function generateTypeDefinition({ @@ -17,21 +18,34 @@ export default function generateTypeDefinition({ typeDefName, allValuesOptional, addExport, + dbName, }: Param): string | null { let typeDefinition: string | null = ``; try { - const tdName = - typeDefName || - `DSQL_${query.single}_${query.single_table}`.toUpperCase(); + const tdName = typeDefName + ? typeDefName + : dbName + ? `DSQL_${dbName}_${table.tableName}`.toUpperCase() + : `DSQL_${query.single}_${query.single_table}`.toUpperCase(); const fields = table.fields; - function typeMap(type: string) { - if (type?.match(/int/i)) { + function typeMap(schemaType: DSQL_FieldSchemaType) { + if (schemaType.options && schemaType.options.length > 0) { + return schemaType.options + .map((opt) => + schemaType.dataType?.match(/int/i) || + typeof opt == "number" + ? `${opt}` + : `"${opt}"` + ) + .join(" | "); + } + if (schemaType.dataType?.match(/int/i)) { return "number"; } - if (type?.match(/text|varchar|timestamp/i)) { + if (schemaType.dataType?.match(/text|varchar|timestamp/i)) { return "string"; } @@ -48,21 +62,19 @@ export default function generateTypeDefinition({ fields.forEach((field) => { const nullValue = allValuesOptional - ? "?" - : field.nullValue ? "?" : field.fieldName?.match(defaultFieldsRegexp) ? "?" - : ""; + : field.notNullValue + ? "" + : "?"; typesArrayTypeScript.push( - ` ${field.fieldName}${nullValue}: ${typeMap( - field.dataType || "" - )};` + ` ${field.fieldName}${nullValue}: ${typeMap(field)};` ); typesArrayJavascript.push( - ` * @property {${typeMap(field.dataType || "")}${nullValue}} ${ + ` * @property {${typeMap(field)}${nullValue}} ${ field.fieldName }` ); diff --git a/package-shared/functions/dsql/sql/sql-delete-generator.ts b/package-shared/functions/dsql/sql/sql-delete-generator.ts index eb12f1d..cba4575 100644 --- a/package-shared/functions/dsql/sql/sql-delete-generator.ts +++ b/package-shared/functions/dsql/sql/sql-delete-generator.ts @@ -1,3 +1,6 @@ +import { SQLDeleteGeneratorParams } from "../../../types"; +import sqlEqualityParser from "../../../utils/sql-equality-parser"; + interface SQLDeleteGenReturn { query: string; values: string[]; @@ -8,13 +11,10 @@ interface SQLDeleteGenReturn { */ export default function sqlDeleteGenerator({ tableName, - data, + deleteKeyValues, dbFullName, -}: { - data: any; - tableName: string; - dbFullName?: string; -}): SQLDeleteGenReturn | undefined { + data, +}: SQLDeleteGeneratorParams): SQLDeleteGenReturn | undefined { const finalDbName = dbFullName ? `${dbFullName}.` : ""; try { @@ -23,17 +23,44 @@ export default function sqlDeleteGenerator({ let deleteBatch: string[] = []; let queryArr: string[] = []; - Object.keys(data).forEach((ky) => { - deleteBatch.push(`${ky}=?`); - queryArr.push(data[ky]); - }); + if (data) { + Object.keys(data).forEach((ky) => { + let value = data[ky] as string | number | null | undefined; + const parsedValue = + typeof value == "number" ? String(value) : value; + + if (!parsedValue) return; + + if (parsedValue.match(/%/)) { + deleteBatch.push(`${ky} LIKE ?`); + queryArr.push(parsedValue); + } else { + deleteBatch.push(`${ky}=?`); + queryArr.push(parsedValue); + } + }); + } else if (deleteKeyValues) { + deleteKeyValues.forEach((ky) => { + let value = ky.value as string | number | null | undefined; + const parsedValue = + typeof value == "number" ? String(value) : value; + + if (!parsedValue) return; + + const operator = sqlEqualityParser(ky.operator || "EQUAL"); + + deleteBatch.push(`${ky.key} ${operator} ?`); + queryArr.push(parsedValue); + }); + } + queryStr += ` WHERE ${deleteBatch.join(" AND ")}`; return { query: queryStr, values: queryArr, }; - } catch (/** @type {any} */ error: any) { + } catch (error: any) { console.log(`SQL delete gen ERROR: ${error.message}`); return undefined; } diff --git a/package-shared/functions/dsql/sql/sql-gen-operator-gen.ts b/package-shared/functions/dsql/sql/sql-gen-operator-gen.ts new file mode 100644 index 0000000..075a96c --- /dev/null +++ b/package-shared/functions/dsql/sql/sql-gen-operator-gen.ts @@ -0,0 +1,50 @@ +import sqlEqualityParser from "../../../utils/sql-equality-parser"; +import { ServerQueryEqualities } from "../../../types"; + +type Params = { + fieldName: string; + value?: string; + equality: (typeof ServerQueryEqualities)[number]; +}; + +/** + * # SQL Gen Operator Gen + * @description Generates an SQL operator for node module `mysql` or `serverless-mysql` + */ +export default function sqlGenOperatorGen({ + fieldName, + value, + equality, +}: Params): string { + if (value) { + if (equality == "LIKE") { + return `LOWER(${fieldName}) LIKE LOWER('%${value}%')`; + } else if (equality == "LIKE_RAW") { + return `LOWER(${fieldName}) LIKE LOWER('${value}')`; + } else if (equality == "NOT LIKE") { + return `LOWER(${fieldName}) NOT LIKE LOWER('%${value}%')`; + } else if (equality == "NOT LIKE_RAW") { + return `LOWER(${fieldName}) NOT LIKE LOWER('${value}')`; + } else if (equality == "REGEXP") { + return `LOWER(${fieldName}) REGEXP LOWER('${value}')`; + } else if (equality == "FULLTEXT") { + return `MATCH(${fieldName}) AGAINST('${value}' IN BOOLEAN MODE)`; + } else if (equality == "NOT EQUAL") { + return `${fieldName} != ${value}`; + } else if (equality) { + return `${fieldName} ${sqlEqualityParser(equality)} ${value}`; + } else { + return `${fieldName} = ${value}`; + } + } else { + if (equality == "IS NULL") { + return `${fieldName} IS NULL`; + } else if (equality == "IS NOT NULL") { + return `${fieldName} IS NOT NULL`; + } else if (equality) { + return `${fieldName} ${sqlEqualityParser(equality)} ?`; + } else { + return `${fieldName} = ?`; + } + } +} diff --git a/package-shared/functions/dsql/sql/sql-generator.ts b/package-shared/functions/dsql/sql/sql-generator.ts index 56cb4dd..f9f8c26 100644 --- a/package-shared/functions/dsql/sql/sql-generator.ts +++ b/package-shared/functions/dsql/sql/sql-generator.ts @@ -1,3 +1,4 @@ +import sqlEqualityParser from "../../../utils/sql-equality-parser"; import { ServerQueryParam, ServerQueryParamsJoin, @@ -64,16 +65,30 @@ export default function sqlGenerator< typeof queryObj.value == "number" ) { const valueParsed = String(queryObj.value); + const operator = sqlEqualityParser(queryObj.equality || "EQUAL"); if (queryObj.equality == "LIKE") { str = `LOWER(${finalFieldName}) LIKE LOWER('%${valueParsed}%')`; + } else if (queryObj.equality == "LIKE_RAW") { + str = `LOWER(${finalFieldName}) LIKE LOWER(?)`; + sqlSearhValues.push(valueParsed); + } else if (queryObj.equality == "NOT LIKE") { + str = `LOWER(${finalFieldName}) NOT LIKE LOWER('%${valueParsed}%')`; + } else if (queryObj.equality == "NOT LIKE_RAW") { + str = `LOWER(${finalFieldName}) NOT LIKE LOWER(?)`; + sqlSearhValues.push(valueParsed); } else if (queryObj.equality == "REGEXP") { - str = `${finalFieldName} REGEXP '${valueParsed}'`; + str = `LOWER(${finalFieldName}) REGEXP LOWER(?)`; + sqlSearhValues.push(valueParsed); } else if (queryObj.equality == "FULLTEXT") { - str = `MATCH(${finalFieldName}) AGAINST('${valueParsed}' IN BOOLEAN MODE)`; + str = `MATCH(${finalFieldName}) AGAINST(? IN BOOLEAN MODE)`; + sqlSearhValues.push(valueParsed); } else if (queryObj.equality == "NOT EQUAL") { str = `${finalFieldName} != ?`; sqlSearhValues.push(valueParsed); + } else if (queryObj.equality) { + str = `${finalFieldName} ${operator} ?`; + sqlSearhValues.push(valueParsed); } else { sqlSearhValues.push(valueParsed); } @@ -173,7 +188,7 @@ export default function sqlGenerator< } else if (genObject?.selectFields?.[0]) { if (genObject.join) { str += ` ${genObject.selectFields - ?.map((fld) => `${finalDbName}${tableName}.${fld}`) + ?.map((fld) => `${finalDbName}${tableName}.${String(fld)}`) .join(",")}`; } else { str += ` ${genObject.selectFields?.join(",")}`; @@ -272,12 +287,19 @@ export default function sqlGenerator< queryString += ` WHERE ${sqlSearhString.join(` ${stringOperator} `)}`; } - if (genObject?.order && !count) + if (genObject?.group?.[0]) { + queryString += ` GROUP BY ${genObject.group + .map((g) => `\`${g.toString()}\``) + .join(",")}`; + } + + if (genObject?.order && !count) { queryString += ` ORDER BY ${ genObject.join ? `${finalDbName}${tableName}.${String(genObject.order.field)}` : String(genObject.order.field) } ${genObject.order.strategy}`; + } if (genObject?.limit && !count) queryString += ` LIMIT ${genObject.limit}`; if (genObject?.offset && !count) diff --git a/package-shared/functions/dsql/sync-databases/index.ts b/package-shared/functions/dsql/sync-databases/index.ts new file mode 100644 index 0000000..fddb6d4 --- /dev/null +++ b/package-shared/functions/dsql/sync-databases/index.ts @@ -0,0 +1,117 @@ +import mysql, { Connection } from "mysql"; +import { exec } from "child_process"; +import { promisify } from "util"; + +// Configuration interface +interface DatabaseConfig { + host: string; + user: string; + password: string; + database?: string; // Optional for global connection +} + +// Master status interface +interface MasterStatus { + File: string; + Position: number; + Binlog_Do_DB?: string; + Binlog_Ignore_DB?: string; +} + +function getConnection(config: DatabaseConfig): Connection { + return mysql.createConnection(config); +} + +function getMasterStatus(config: DatabaseConfig): Promise { + return new Promise((resolve, reject) => { + const connection = getConnection(config); + connection.query("SHOW MASTER STATUS", (error, results) => { + connection.end(); + if (error) reject(error); + else resolve(results[0] as MasterStatus); + }); + }); +} + +async function syncDatabases() { + const config: DatabaseConfig = { + host: "localhost", + user: "root", + password: "your_password", + }; + + let lastPosition: number | null = null; // Track last synced position + + while (true) { + try { + // Get current master status + const { File, Position } = await getMasterStatus(config); + + // Determine start position (use lastPosition or 4 if first run) + const startPosition = lastPosition !== null ? lastPosition + 1 : 4; + if (startPosition >= Position) { + await new Promise((resolve) => setTimeout(resolve, 5000)); // Wait 5 seconds if no new changes + continue; + } + + // Execute mysqlbinlog to get changes + const execPromise = promisify(exec); + const { stdout } = await execPromise( + `mysqlbinlog --database=db_master ${File} --start-position=${startPosition} --stop-position=${Position}` + ); + + if (stdout) { + const connection = getConnection({ + ...config, + database: "db_slave", + }); + return new Promise((resolve, reject) => { + connection.query(stdout, (error) => { + connection.end(); + if (error) reject(error); + else { + lastPosition = Position; + console.log( + `Synced up to position ${Position} at ${new Date().toISOString()}` + ); + resolve(null); + } + }); + }); + } + } catch (error) { + console.error("Sync error:", error); + } + + await new Promise((resolve) => setTimeout(resolve, 5000)); // Check every 5 seconds + } +} + +// Initialize db_slave with db_master data +async function initializeSlave() { + const config: DatabaseConfig = { + host: "localhost", + user: "root", + password: "your_password", + }; + + try { + await promisify(exec)( + `mysqldump -u ${config.user} -p${config.password} db_master > db_master_backup.sql` + ); + await promisify(exec)( + `mysql -u ${config.user} -p${config.password} db_slave < db_master_backup.sql` + ); + console.log("Slave initialized with master data"); + } catch (error) { + console.error("Initialization error:", error); + } +} + +// Run the sync process +async function main() { + await initializeSlave(); + await syncDatabases(); +} + +main().catch(console.error); diff --git a/package-shared/functions/dsql/triggers-and-stored-proceedures/create-duplicate-tables-triggers.ts b/package-shared/functions/dsql/triggers-and-stored-proceedures/create-duplicate-tables-triggers.ts new file mode 100644 index 0000000..0e28843 --- /dev/null +++ b/package-shared/functions/dsql/triggers-and-stored-proceedures/create-duplicate-tables-triggers.ts @@ -0,0 +1,3 @@ +type Params = {}; + +function createDuplicateTablesTriggers({}: Params) {} diff --git a/package-shared/functions/dsql/triggers-and-stored-proceedures/database-replication.md b/package-shared/functions/dsql/triggers-and-stored-proceedures/database-replication.md new file mode 100644 index 0000000..6e4f45e --- /dev/null +++ b/package-shared/functions/dsql/triggers-and-stored-proceedures/database-replication.md @@ -0,0 +1,106 @@ +```sql +DELIMITER // + +CREATE PROCEDURE dsql_replicate_databases(IN source_db VARCHAR(64), IN target_db VARCHAR(64)) +BEGIN + -- Declare variables + DECLARE done INT DEFAULT FALSE; + DECLARE table_name VARCHAR(64); + DECLARE column_list TEXT; + DECLARE trigger_sql TEXT; + + -- Cursor to iterate over tables in source_db + DECLARE cur CURSOR FOR + SELECT TABLE_NAME + FROM INFORMATION_SCHEMA.TABLES + WHERE TABLE_SCHEMA = source_db; + + -- Handler for end of cursor + DECLARE CONTINUE HANDLER FOR NOT FOUND SET done = TRUE; + + -- Start transaction to ensure consistency + START TRANSACTION; + + -- Open cursor + OPEN cur; + + read_loop: LOOP + FETCH cur INTO table_name; + IF done THEN + LEAVE read_loop; + END IF; + + -- Dynamically get column names for the table + SELECT GROUP_CONCAT(CONCAT('NEW.', COLUMN_NAME)) + INTO column_list + FROM INFORMATION_SCHEMA.COLUMNS + WHERE TABLE_SCHEMA = source_db + AND TABLE_NAME = table_name; + + -- Drop existing triggers if they exist + SET @drop_trigger_insert = CONCAT('DROP TRIGGER IF EXISTS after_insert_', table_name); + SET @drop_trigger_update = CONCAT('DROP TRIGGER IF EXISTS after_update_', table_name); + SET @drop_trigger_delete = CONCAT('DROP TRIGGER IF EXISTS after_delete_', table_name); + PREPARE stmt_drop_insert FROM @drop_trigger_insert; + EXECUTE stmt_drop_insert; + DEALLOCATE PREPARE stmt_drop_insert; + PREPARE stmt_drop_update FROM @drop_trigger_update; + EXECUTE stmt_drop_update; + DEALLOCATE PREPARE stmt_drop_update; + PREPARE stmt_drop_delete FROM @drop_trigger_delete; + EXECUTE stmt_drop_delete; + DEALLOCATE PREPARE stmt_drop_delete; + + -- Create INSERT trigger + SET @trigger_sql = CONCAT( + 'CREATE TRIGGER after_insert_', table_name, + ' AFTER INSERT ON ', source_db, '.', table_name, ' FOR EACH ROW ', + 'BEGIN ', + 'INSERT INTO ', target_db, '.', table_name, ' (', + (SELECT GROUP_CONCAT(COLUMN_NAME) FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_SCHEMA = source_db AND TABLE_NAME = table_name), ') ', + 'VALUES (', column_list, '); ', + 'END;' + ); + PREPARE stmt FROM @trigger_sql; + EXECUTE stmt; + DEALLOCATE PREPARE stmt; + + -- Create UPDATE trigger + SET @trigger_sql = CONCAT( + 'CREATE TRIGGER after_update_', table_name, + ' AFTER UPDATE ON ', source_db, '.', table_name, ' FOR EACH ROW ', + 'BEGIN ', + 'UPDATE ', target_db, '.', table_name, ' SET ', + (SELECT GROUP_CONCAT(CONCAT(COLUMN_NAME, '=NEW.', COLUMN_NAME)) + FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_SCHEMA = source_db AND TABLE_NAME = table_name), + ' WHERE ', + (SELECT CONCAT('id=NEW.id') + FROM INFORMATION_SCHEMA.COLUMNS + WHERE TABLE_SCHEMA = source_db AND TABLE_NAME = table_name AND COLUMN_NAME = 'id' LIMIT 1), '; ', + 'END;' + ); + PREPARE stmt FROM @trigger_sql; + EXECUTE stmt; + DEALLOCATE PREPARE stmt; + + -- Create DELETE trigger + SET @trigger_sql = CONCAT( + 'CREATE TRIGGER after_delete_', table_name, + ' AFTER DELETE ON ', source_db, '.', table_name, ' FOR EACH ROW ', + 'BEGIN ', + 'DELETE FROM ', target_db, '.', table_name, ' WHERE id=OLD.id; ', + 'END;' + ); + PREPARE stmt FROM @trigger_sql; + EXECUTE stmt; + DEALLOCATE PREPARE stmt; + + END LOOP; + + CLOSE cur; + COMMIT; + +END // + +DELIMITER ; +``` diff --git a/package-shared/functions/dsql/triggers-and-stored-proceedures/database-replication.sql b/package-shared/functions/dsql/triggers-and-stored-proceedures/database-replication.sql new file mode 100644 index 0000000..728b5d9 --- /dev/null +++ b/package-shared/functions/dsql/triggers-and-stored-proceedures/database-replication.sql @@ -0,0 +1,212 @@ +DELIMITER / / CREATE PROCEDURE replicate_databases( + IN source_db VARCHAR(64), + IN target_db VARCHAR(64) +) BEGIN -- Declare variables +DECLARE done INT DEFAULT FALSE; + +DECLARE table_name VARCHAR(64); + +DECLARE column_list TEXT; + +DECLARE trigger_sql TEXT; + +-- Cursor to iterate over tables in source_db +DECLARE cur CURSOR FOR +SELECT + TABLE_NAME +FROM + INFORMATION_SCHEMA.TABLES +WHERE + TABLE_SCHEMA = source_db; + +-- Handler for end of cursor +DECLARE CONTINUE HANDLER FOR NOT FOUND +SET + done = TRUE; + +-- Start transaction to ensure consistency +START TRANSACTION; + +-- Open cursor +OPEN cur; + +read_loop: LOOP FETCH cur INTO table_name; + +IF done THEN LEAVE read_loop; + +END IF; + +-- Dynamically get column names for the table +SELECT + GROUP_CONCAT(CONCAT('NEW.', COLUMN_NAME)) INTO column_list +FROM + INFORMATION_SCHEMA.COLUMNS +WHERE + TABLE_SCHEMA = source_db + AND TABLE_NAME = table_name; + +-- Drop existing triggers if they exist +SET + @drop_trigger_insert = CONCAT( + 'DROP TRIGGER IF EXISTS after_insert_', + table_name + ); + +SET + @drop_trigger_update = CONCAT( + 'DROP TRIGGER IF EXISTS after_update_', + table_name + ); + +SET + @drop_trigger_delete = CONCAT( + 'DROP TRIGGER IF EXISTS after_delete_', + table_name + ); + +PREPARE stmt_drop_insert +FROM + @drop_trigger_insert; + +EXECUTE stmt_drop_insert; + +DEALLOCATE PREPARE stmt_drop_insert; + +PREPARE stmt_drop_update +FROM + @drop_trigger_update; + +EXECUTE stmt_drop_update; + +DEALLOCATE PREPARE stmt_drop_update; + +PREPARE stmt_drop_delete +FROM + @drop_trigger_delete; + +EXECUTE stmt_drop_delete; + +DEALLOCATE PREPARE stmt_drop_delete; + +-- Create INSERT trigger +SET + @trigger_sql = CONCAT( + 'CREATE TRIGGER after_insert_', + table_name, + ' AFTER INSERT ON ', + source_db, + '.', + table_name, + ' FOR EACH ROW ', + 'BEGIN ', + 'INSERT INTO ', + target_db, + '.', + table_name, + ' (', + ( + SELECT + GROUP_CONCAT(COLUMN_NAME) + FROM + INFORMATION_SCHEMA.COLUMNS + WHERE + TABLE_SCHEMA = source_db + AND TABLE_NAME = table_name + ), + ') ', + 'VALUES (', + column_list, + '); ', + 'END;' + ); + +PREPARE stmt +FROM + @trigger_sql; + +EXECUTE stmt; + +DEALLOCATE PREPARE stmt; + +-- Create UPDATE trigger +SET + @trigger_sql = CONCAT( + 'CREATE TRIGGER after_update_', + table_name, + ' AFTER UPDATE ON ', + source_db, + '.', + table_name, + ' FOR EACH ROW ', + 'BEGIN ', + 'UPDATE ', + target_db, + '.', + table_name, + ' SET ', + ( + SELECT + GROUP_CONCAT(CONCAT(COLUMN_NAME, '=NEW.', COLUMN_NAME)) + FROM + INFORMATION_SCHEMA.COLUMNS + WHERE + TABLE_SCHEMA = source_db + AND TABLE_NAME = table_name + ), + ' WHERE ', + ( + SELECT + CONCAT('id=NEW.id') + FROM + INFORMATION_SCHEMA.COLUMNS + WHERE + TABLE_SCHEMA = source_db + AND TABLE_NAME = table_name + AND COLUMN_NAME = 'id' + LIMIT + 1 + ), '; ', 'END;' + ); + +PREPARE stmt +FROM + @trigger_sql; + +EXECUTE stmt; + +DEALLOCATE PREPARE stmt; + +-- Create DELETE trigger +SET + @trigger_sql = CONCAT( + 'CREATE TRIGGER after_delete_', + table_name, + ' AFTER DELETE ON ', + source_db, + '.', + table_name, + ' FOR EACH ROW ', + 'BEGIN ', + 'DELETE FROM ', + target_db, + '.', + table_name, + ' WHERE id=OLD.id; ', + 'END;' + ); + +PREPARE stmt +FROM + @trigger_sql; + +EXECUTE stmt; + +DEALLOCATE PREPARE stmt; + +END LOOP; + +CLOSE cur; + +COMMIT; + +END / / DELIMITER; \ No newline at end of file diff --git a/package-shared/functions/dsql/triggers-and-stored-proceedures/grab-trigger-name.ts b/package-shared/functions/dsql/triggers-and-stored-proceedures/grab-trigger-name.ts new file mode 100644 index 0000000..cbcbcb2 --- /dev/null +++ b/package-shared/functions/dsql/triggers-and-stored-proceedures/grab-trigger-name.ts @@ -0,0 +1,23 @@ +export const TriggerParadigms = ["sync_tables", "sync_dbs"] as const; + +type Params = { + userId?: string | number; + paradigm: (typeof TriggerParadigms)[number]; + dbId?: string | number; + tableName?: string; +}; + +export default function grabTriggerName({ + userId, + paradigm, + dbId, + tableName, +}: Params) { + let triggerName = `dsql_trig_${paradigm}`; + + if (userId) triggerName += `_${userId}`; + if (dbId) triggerName += `_${dbId}`; + if (tableName) triggerName += `_${tableName}`; + + return triggerName; +} diff --git a/package-shared/functions/dsql/triggers-and-stored-proceedures/stored-proceedure-sql-gen.ts b/package-shared/functions/dsql/triggers-and-stored-proceedures/stored-proceedure-sql-gen.ts new file mode 100644 index 0000000..0577f91 --- /dev/null +++ b/package-shared/functions/dsql/triggers-and-stored-proceedures/stored-proceedure-sql-gen.ts @@ -0,0 +1,44 @@ +import { DSQL_DatabaseSchemaType, DSQL_TableSchemaType } from "../../../types"; + +const TriggerTypes = [ + { + name: "after_insert", + value: "INSERT", + }, + { + name: "after_update", + value: "UPDATE", + }, + { + name: "after_delete", + value: "DELETE", + }, +] as const; + +export type TriggerSQLGenParams = { + type: (typeof TriggerTypes)[number]; + srcDbSchema: DSQL_DatabaseSchemaType; + srcTableSchema: DSQL_TableSchemaType; + content: string; + proceedureName: string; +}; + +export default function triggerSQLGen({ + type, + srcDbSchema, + srcTableSchema, + content, + proceedureName, +}: TriggerSQLGenParams) { + let sql = `DELIMITER //\n`; + + sql += `CREATE PROCEDURE ${proceedureName}`; + sql += `\nBEGIN`; + + sql += ` ${content}`; + + sql += `\nEND //`; + sql += `\nDELIMITER\n`; + + return sql; +} diff --git a/package-shared/functions/dsql/triggers-and-stored-proceedures/table-replication-trigger-sql-gen.ts b/package-shared/functions/dsql/triggers-and-stored-proceedures/table-replication-trigger-sql-gen.ts new file mode 100644 index 0000000..7c3837e --- /dev/null +++ b/package-shared/functions/dsql/triggers-and-stored-proceedures/table-replication-trigger-sql-gen.ts @@ -0,0 +1,48 @@ +import { DSQL_DatabaseSchemaType, DSQL_TableSchemaType } from "../../../types"; +import triggerSQLGen, { TriggerSQLGenParams } from "./trigger-sql-gen"; + +type Params = TriggerSQLGenParams & { + dstDbSchema: DSQL_DatabaseSchemaType; + dstTableSchema: DSQL_TableSchemaType; +}; + +export default function tableReplicationTriggerSQLGen({ + type, + dstDbSchema, + dstTableSchema, + srcDbSchema, + srcTableSchema, + userId, + paradigm, +}: Params) { + let sql = `CREATE TRIGGER`; + + const srcColumns = srcTableSchema.fields + .map((fld) => fld.fieldName) + .filter((fld) => typeof fld == "string"); + const dstColumns = dstTableSchema.fields + .map((fld) => fld.fieldName) + .filter((fld) => typeof fld == "string"); + + if (type.name == "after_insert") { + sql += ` INSERT INTO ${dstDbSchema.dbFullName}.${dstTableSchema.tableName}`; + sql += ` (${dstColumns.join(",")})`; + sql += ` VALUES (${dstColumns.map((c) => `NEW.${c}`).join(",")})`; + } else if (type.name == "after_update") { + sql += ` UPDATE ${dstDbSchema.dbFullName}.${dstTableSchema.tableName}`; + sql += ` SET ${dstColumns.map((c) => `${c}=NEW.${c}`).join(",")}`; + sql += ` WHERE id = NEW.id`; + } else if (type.name == "after_delete") { + sql += ` DELETE FROM ${dstDbSchema.dbFullName}.${dstTableSchema.tableName}`; + sql += ` WHERE id = OLD.id`; + } + + return triggerSQLGen({ + content: sql, + srcDbSchema, + srcTableSchema, + type, + paradigm, + userId, + }); +} diff --git a/package-shared/functions/dsql/triggers-and-stored-proceedures/table-replication.md b/package-shared/functions/dsql/triggers-and-stored-proceedures/table-replication.md new file mode 100644 index 0000000..00195d0 --- /dev/null +++ b/package-shared/functions/dsql/triggers-and-stored-proceedures/table-replication.md @@ -0,0 +1,92 @@ +```sql +DELIMITER // + +CREATE PROCEDURE dsql_replicate_two_tables( + IN source_db VARCHAR(64), + IN target_db VARCHAR(64), + IN source_table VARCHAR(64), + IN target_table VARCHAR(64) +) +BEGIN + -- Declare variables + DECLARE column_list TEXT; + DECLARE set_clause TEXT; + DECLARE trigger_sql TEXT; + + -- Start transaction to ensure consistency + START TRANSACTION; + + -- Dynamically get column names for the source table + SELECT GROUP_CONCAT(CONCAT('NEW.', COLUMN_NAME)) + INTO column_list + FROM INFORMATION_SCHEMA.COLUMNS + WHERE TABLE_SCHEMA = source_db + AND TABLE_NAME = source_table; + + SELECT GROUP_CONCAT(CONCAT(COLUMN_NAME, '=NEW.', COLUMN_NAME)) + INTO set_clause + FROM INFORMATION_SCHEMA.COLUMNS + WHERE TABLE_SCHEMA = source_db + AND TABLE_NAME = source_table; + + -- Drop existing triggers if they exist + SET @drop_trigger_insert = CONCAT('DROP TRIGGER IF EXISTS after_insert_', source_table); + SET @drop_trigger_update = CONCAT('DROP TRIGGER IF EXISTS after_update_', source_table); + SET @drop_trigger_delete = CONCAT('DROP TRIGGER IF EXISTS after_delete_', source_table); + PREPARE stmt_drop_insert FROM @drop_trigger_insert; + EXECUTE stmt_drop_insert; + DEALLOCATE PREPARE stmt_drop_insert; + PREPARE stmt_drop_update FROM @drop_trigger_update; + EXECUTE stmt_drop_update; + DEALLOCATE PREPARE stmt_drop_update; + PREPARE stmt_drop_delete FROM @drop_trigger_delete; + EXECUTE stmt_drop_delete; + DEALLOCATE PREPARE stmt_drop_delete; + + -- Create INSERT trigger + SET @trigger_sql = CONCAT( + 'CREATE TRIGGER after_insert_', source_table, + ' AFTER INSERT ON ', source_db, '.', source_table, ' FOR EACH ROW ', + 'BEGIN ', + 'INSERT INTO ', target_db, '.', target_table, ' (', + (SELECT GROUP_CONCAT(COLUMN_NAME) FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_SCHEMA = source_db AND TABLE_NAME = source_table), ') ', + 'VALUES (', column_list, '); ', + 'END;' + ); + PREPARE stmt FROM @trigger_sql; + EXECUTE stmt; + DEALLOCATE PREPARE stmt; + + -- Create UPDATE trigger + -- Assume 'id' as the primary key; adjust if different + SET @trigger_sql = CONCAT( + 'CREATE TRIGGER after_update_', source_table, + ' AFTER UPDATE ON ', source_db, '.', source_table, ' FOR EACH ROW ', + 'BEGIN ', + 'UPDATE ', target_db, '.', target_table, ' SET ', + set_clause, + ' WHERE id = NEW.id; ', + 'END;' + ); + PREPARE stmt FROM @trigger_sql; + EXECUTE stmt; + DEALLOCATE PREPARE stmt; + + -- Create DELETE trigger + SET @trigger_sql = CONCAT( + 'CREATE TRIGGER after_delete_', source_table, + ' AFTER DELETE ON ', source_db, '.', source_table, ' FOR EACH ROW ', + 'BEGIN ', + 'DELETE FROM ', target_db, '.', target_table, ' WHERE id = OLD.id; ', + 'END;' + ); + PREPARE stmt FROM @trigger_sql; + EXECUTE stmt; + DEALLOCATE PREPARE stmt; + + COMMIT; + +END // + +DELIMITER ; +``` diff --git a/package-shared/functions/dsql/triggers-and-stored-proceedures/trigger-sql-gen.ts b/package-shared/functions/dsql/triggers-and-stored-proceedures/trigger-sql-gen.ts new file mode 100644 index 0000000..418897a --- /dev/null +++ b/package-shared/functions/dsql/triggers-and-stored-proceedures/trigger-sql-gen.ts @@ -0,0 +1,53 @@ +import { DSQL_DatabaseSchemaType, DSQL_TableSchemaType } from "../../../types"; +import grabTriggerName, { TriggerParadigms } from "./grab-trigger-name"; + +const TriggerTypes = [ + { + name: "after_insert", + value: "INSERT", + }, + { + name: "after_update", + value: "UPDATE", + }, + { + name: "after_delete", + value: "DELETE", + }, +] as const; + +export type TriggerSQLGenParams = { + type: (typeof TriggerTypes)[number]; + srcDbSchema: DSQL_DatabaseSchemaType; + srcTableSchema: DSQL_TableSchemaType; + content: string; + userId?: string | number; + paradigm: (typeof TriggerParadigms)[number]; +}; + +export default function triggerSQLGen({ + type, + srcDbSchema, + srcTableSchema, + content, + userId, + paradigm, +}: TriggerSQLGenParams) { + let sql = `CREATE TRIGGER`; + + let triggerName = grabTriggerName({ + paradigm, + dbId: srcDbSchema.id, + tableName: srcTableSchema.tableName, + userId, + }); + + sql += ` ${triggerName}`; + sql += ` AFTER ${type.value} ON ${srcTableSchema.tableName}`; + sql += ` FOR EACH ROW BEGIN`; + + sql += ` ${content}`; + sql += ` END`; + + return sql; +} diff --git a/package-shared/functions/web-app/db/grab-user-resource/index.ts b/package-shared/functions/web-app/db/grab-user-resource/index.ts new file mode 100644 index 0000000..47fae8e --- /dev/null +++ b/package-shared/functions/web-app/db/grab-user-resource/index.ts @@ -0,0 +1,44 @@ +import { DsqlCrudQueryObject } from "../../../../types"; +import { DsqlTables } from "../../../../types/dsql"; +import dsqlCrud from "../../../../utils/data-fetching/crud"; +import query from "./query"; +import _ from "lodash"; +import _n from "../../../../utils/numberfy"; + +export type GrabUserResourceParams = { + query?: DsqlCrudQueryObject; + userId?: string | number; + tableName: (typeof DsqlTables)[number]; + count?: boolean; + countOnly?: boolean; + noLimit?: boolean; + isSuperUser?: boolean; + targetID?: string | number; +}; + +export default async function dbGrabUserResource< + T extends { [k: string]: any } = any +>(params: GrabUserResourceParams) { + let queryObject = query(params); + + let result = await dsqlCrud({ + action: "get", + table: params.tableName, + query: queryObject, + count: params.count, + countOnly: params.countOnly, + }); + + const payload = result?.payload as T[] | undefined; + + return { + batch: payload || null, + single: payload?.[0] || null, + debug: { + queryObject: result?.queryObject, + error: result?.error, + msg: result?.msg, + }, + count: _n(result?.count), + }; +} diff --git a/package-shared/functions/web-app/db/grab-user-resource/query.ts b/package-shared/functions/web-app/db/grab-user-resource/query.ts new file mode 100644 index 0000000..5a9a358 --- /dev/null +++ b/package-shared/functions/web-app/db/grab-user-resource/query.ts @@ -0,0 +1,39 @@ +import { GrabUserResourceParams } from "."; +import _ from "lodash"; +import { DsqlCrudQueryObject } from "../../../../types"; +import ResourceLimits from "../../../../dict/resource-limits"; +import _n from "../../../../utils/numberfy"; + +export default function (params?: GrabUserResourceParams) { + let queryObject: DsqlCrudQueryObject<{ [k: string]: any }> = { + limit: params?.noLimit ? undefined : ResourceLimits["general"], + order: { + field: "id", + strategy: "DESC", + }, + }; + + if (params?.targetID) { + const targetIDQuery: DsqlCrudQueryObject<{ [k: string]: any }> = { + query: { + id: { + value: _n(params.targetID).toString(), + }, + }, + }; + queryObject = _.merge(queryObject, targetIDQuery); + } + + let queryFixedObject: DsqlCrudQueryObject<{ [k: string]: any }> = + params?.isSuperUser + ? {} + : { + query: { + user_id: { + value: String(params?.userId || 0), + }, + }, + }; + + return _.merge(queryObject, params?.query, queryFixedObject); +} diff --git a/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-creation.ts b/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-creation.ts new file mode 100644 index 0000000..5889b78 --- /dev/null +++ b/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-creation.ts @@ -0,0 +1,87 @@ +import { DSQL_DATASQUIREL_MARIADB_USERS } from "../../../types/dsql"; +import { UserType } from "../../../types"; +import dbHandler from "../../backend/dbHandler"; +import normalizeText from "../../../utils/normalize-text"; +import decrypt from "../../dsql/decrypt"; + +type Params = { + user: UserType; + existingRecord?: DSQL_DATASQUIREL_MARIADB_USERS | null; + updatedRecord: DSQL_DATASQUIREL_MARIADB_USERS; +}; + +type Return = { + msg?: string; + success?: boolean; +}; + +export default async function handleMariadbUserCreation({ + user, + existingRecord, + updatedRecord, +}: Params): Promise { + const parsedPassword = decrypt({ + encryptedString: updatedRecord?.password || "", + }); + + if (existingRecord?.id && updatedRecord?.id) { + if ( + existingRecord.username !== updatedRecord.username || + existingRecord.host !== updatedRecord.host + ) { + const renameSQLUser = await dbHandler({ + query: normalizeText(` + RENAME USER '${existingRecord.username}'@'${existingRecord.host}' \ + TO '${updatedRecord.username}'@'${updatedRecord.host}' + `), + }); + + if (!renameSQLUser) { + await createNewSQLUser({ + host: updatedRecord.host, + password: parsedPassword, + username: updatedRecord.username, + }); + } + } + + const updateSQLUser = await dbHandler({ + query: normalizeText(` + ALTER USER '${updatedRecord.username}'@'${updatedRecord.host}' \ + IDENTIFIED BY '${parsedPassword}' + `), + }); + + if (!updateSQLUser) { + await createNewSQLUser({ + host: updatedRecord.host, + password: parsedPassword, + username: updatedRecord.username, + }); + } + } else if (!existingRecord?.id && updatedRecord?.id) { + await createNewSQLUser({ + host: updatedRecord.host, + password: parsedPassword, + username: updatedRecord.username, + }); + } + + return { success: true }; +} + +type CreateNewUserParams = { + username?: string; + host?: string; + password?: string; +}; + +export async function createNewSQLUser({ + host, + password, + username, +}: CreateNewUserParams) { + return await dbHandler({ + query: `CREATE USER IF NOT EXISTS '${username}'@'${host}' IDENTIFIED BY '${password}'`, + }); +} diff --git a/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-grants-for-databases-cleanup-records.ts b/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-grants-for-databases-cleanup-records.ts new file mode 100644 index 0000000..2d477c5 --- /dev/null +++ b/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-grants-for-databases-cleanup-records.ts @@ -0,0 +1,66 @@ +import { + DSQL_DATASQUIREL_MARIADB_USER_DATABASES, + DSQL_DATASQUIREL_MARIADB_USER_PRIVILEGES, + DSQL_DATASQUIREL_MARIADB_USER_TABLES, + DSQL_DATASQUIREL_MARIADB_USERS, + DsqlTables, +} from "../../../types/dsql"; +import { UserType } from "../../../types"; +import dsqlCrud from "../../../utils/data-fetching/crud"; +import _n from "../../../utils/numberfy"; + +type Params = { + user: UserType; + updatedRecord: DSQL_DATASQUIREL_MARIADB_USERS; +}; + +type Return = { + msg?: string; + success?: boolean; +}; + +export default async function handleMariadbUserGrantsForDatabasesCleanUpRecords({ + user, + updatedRecord, +}: Params): Promise { + /** + * # Clean up Records + */ + await dsqlCrud< + DSQL_DATASQUIREL_MARIADB_USER_DATABASES, + (typeof DsqlTables)[number] + >({ + action: "delete", + table: "mariadb_user_databases", + deleteData: { + user_id: user.id, + mariadb_user_id: updatedRecord.id, + }, + }); + + await dsqlCrud< + DSQL_DATASQUIREL_MARIADB_USER_PRIVILEGES, + (typeof DsqlTables)[number] + >({ + action: "delete", + table: "mariadb_user_privileges", + deleteData: { + user_id: user.id, + mariadb_user_id: updatedRecord.id, + }, + }); + + await dsqlCrud< + DSQL_DATASQUIREL_MARIADB_USER_TABLES, + (typeof DsqlTables)[number] + >({ + action: "delete", + table: "mariadb_user_tables", + deleteData: { + user_id: user.id, + mariadb_user_id: updatedRecord.id, + }, + }); + + return { success: true }; +} diff --git a/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-grants-for-databases-recreate-grants.ts b/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-grants-for-databases-recreate-grants.ts new file mode 100644 index 0000000..67f5493 --- /dev/null +++ b/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-grants-for-databases-recreate-grants.ts @@ -0,0 +1,105 @@ +import { DSQL_DATASQUIREL_MARIADB_USERS } from "../../../types/dsql"; +import { + DatabaseScopedAccessObject, + UserSQLPermissions, + UserType, +} from "../../../types"; +import grabDbFullName from "../../../utils/grab-db-full-name"; +import dbHandler from "../../backend/dbHandler"; +import normalizeText from "../../../utils/normalize-text"; + +type Params = { + currentAccessedDatabase: DatabaseScopedAccessObject; + user: UserType; + updatedRecord: DSQL_DATASQUIREL_MARIADB_USERS; +}; + +type Return = { + msg?: string; + success?: boolean; +}; + +export default async function handleMariadbUserGrantsForDatabasesRecreateGrants({ + currentAccessedDatabase, + user, + updatedRecord, +}: Params): Promise { + const { accessedDatabase, dbSlug, allGrants, allTables, grants, tables } = + currentAccessedDatabase; + + const dbFullName = grabDbFullName({ + user, + dbName: dbSlug, + }); + + if (allGrants && allTables) { + const grantAllPrivileges = await dbHandler({ + query: normalizeText(` + GRANT ALL PRIVILEGES ON \`${dbFullName}\`.* TO \ + '${updatedRecord.username}'@'${updatedRecord.host}' + `), + }); + + return { success: true }; + } + + if (allGrants && tables?.[0]) { + for (let t = 0; t < tables.length; t++) { + const table = tables[t]; + + // queries.push( + // normalizeText(` + // GRANT ALL PRIVILEGES ON \`${dbFullName}\`.\`${table.tableSlug}\` \ + // TO '${updatedRecord.username}'@'${updatedRecord.host}' + // `) + // ); + + const grantAllPrivilegesToTables = await dbHandler({ + query: normalizeText(` + GRANT ALL PRIVILEGES ON \`${dbFullName}\`.\`${table.tableSlug}\` \ + TO '${updatedRecord.username}'@'${updatedRecord.host}' + `), + }); + } + + return { success: true }; + } + + if (grants?.[0]) { + const isGrantsInalid = grants.find( + (g) => !UserSQLPermissions.includes(g) + ); + + if (isGrantsInalid) { + return { msg: `grants is/are invalid!` }; + } + + if (tables?.[0]) { + for (let t = 0; t < tables.length; t++) { + const table = tables[t]; + + const grantSpecificPrivilegesToTables = await dbHandler({ + query: normalizeText(` + GRANT ${grants.join(",")} ON \ + \`${dbFullName}\`.\`${table.tableSlug}\` TO \ + '${updatedRecord.username}'@'${updatedRecord.host}' + `), + }); + } + + return { success: true }; + } else { + const grantSecificPrivilegesToAllTables = await dbHandler({ + query: normalizeText(` + GRANT ${grants.join(",")} ON \ + \`${dbFullName}\`.* TO \ + '${updatedRecord.username}'@'${updatedRecord.host}' + `), + }); + + return { success: true }; + } + } + + return { success: true }; +} diff --git a/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-grants-for-databases-recreate-records.ts b/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-grants-for-databases-recreate-records.ts new file mode 100644 index 0000000..83e68b6 --- /dev/null +++ b/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-grants-for-databases-recreate-records.ts @@ -0,0 +1,107 @@ +import { + DSQL_DATASQUIREL_MARIADB_USER_DATABASES, + DSQL_DATASQUIREL_MARIADB_USER_PRIVILEGES, + DSQL_DATASQUIREL_MARIADB_USER_TABLES, + DSQL_DATASQUIREL_MARIADB_USERS, + DsqlTables, +} from "../../../types/dsql"; +import { + DatabaseScopedAccessObject, + UserSQLPermissions, + UserType, +} from "../../../types"; +import dsqlCrud from "../../../utils/data-fetching/crud"; +import _n from "../../../utils/numberfy"; + +type Params = { + currentAccessedDatabase: DatabaseScopedAccessObject; + user: UserType; + updatedRecord: DSQL_DATASQUIREL_MARIADB_USERS; +}; + +type Return = { + msg?: string; + success?: boolean; +}; + +export default async function handleMariadbUserGrantsForDatabasesRecreateRecordsForDatabase({ + currentAccessedDatabase, + user, + updatedRecord, +}: Params): Promise { + const { accessedDatabase, dbSlug, allGrants, allTables, grants, tables } = + currentAccessedDatabase; + + const insertSQLDbRecord = await dsqlCrud< + DSQL_DATASQUIREL_MARIADB_USER_DATABASES, + (typeof DsqlTables)[number] + >({ + action: "insert", + table: "mariadb_user_databases", + data: { + all_privileges: allGrants ? 1 : 0, + all_tables: allTables ? 1 : 0, + db_id: _n(accessedDatabase.dbId), + db_slug: accessedDatabase.dbSlug, + db_schema_id: _n(accessedDatabase.dbSchemaId), + user_id: user.id, + mariadb_user_id: updatedRecord.id, + }, + }); + + if (tables?.[0]) { + for (let t = 0; t < tables.length; t++) { + const table = tables[t]; + + const insertTable = await dsqlCrud< + DSQL_DATASQUIREL_MARIADB_USER_TABLES, + (typeof DsqlTables)[number] + >({ + action: "insert", + table: "mariadb_user_tables", + data: { + all_privileges: allGrants ? 1 : 0, + all_fields: 1, + user_id: user.id, + mariadb_user_id: updatedRecord.id, + table_slug: table.tableSlug, + db_id: _n(table.dbId), + db_slug: table.dbSlug, + db_schema_id: _n(table.dbSchemaId), + }, + }); + } + } + + if (grants?.[0]) { + const isGrantsInalid = grants.find( + (g) => !UserSQLPermissions.includes(g) + ); + + if (isGrantsInalid) { + return { msg: `grants is/are invalid!` }; + } + + for (let t = 0; t < grants.length; t++) { + const grant = grants[t]; + + await dsqlCrud< + DSQL_DATASQUIREL_MARIADB_USER_PRIVILEGES, + (typeof DsqlTables)[number] + >({ + action: "insert", + table: "mariadb_user_privileges", + data: { + user_id: user.id, + mariadb_user_id: updatedRecord.id, + privilege: grant, + db_id: _n(accessedDatabase.dbId), + db_slug: accessedDatabase.dbSlug, + db_schema_id: _n(accessedDatabase.dbSchemaId), + }, + }); + } + } + + return { success: true }; +} diff --git a/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-grants-for-databases.ts b/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-grants-for-databases.ts new file mode 100644 index 0000000..fe1d8af --- /dev/null +++ b/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-grants-for-databases.ts @@ -0,0 +1,40 @@ +import { DSQL_DATASQUIREL_MARIADB_USERS } from "../../../types/dsql"; +import { DatabaseScopedAccessObject, UserType } from "../../../types"; +import handleMariadbUserGrantsForDatabasesRecreateRecordsForDatabase from "./handle-mariadb-user-grants-for-databases-recreate-records"; +import handleMariadbUserGrantsForDatabasesRecreateGrants from "./handle-mariadb-user-grants-for-databases-recreate-grants"; + +type Params = { + accessedDatabases: DatabaseScopedAccessObject[]; + user: UserType; + updatedRecord: DSQL_DATASQUIREL_MARIADB_USERS; +}; + +type Return = { + msg?: string; + success?: boolean; +}; + +export default async function handleMariadbUserGrantsForDatabases({ + accessedDatabases, + user, + updatedRecord, +}: Params): Promise { + /** + * # Recreate Records + */ + for (let i = 0; i < accessedDatabases.length; i++) { + await handleMariadbUserGrantsForDatabasesRecreateRecordsForDatabase({ + currentAccessedDatabase: accessedDatabases[i], + updatedRecord, + user, + }); + + await handleMariadbUserGrantsForDatabasesRecreateGrants({ + currentAccessedDatabase: accessedDatabases[i], + updatedRecord, + user, + }); + } + + return { success: true }; +} diff --git a/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-grants.ts b/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-grants.ts new file mode 100644 index 0000000..ba3529e --- /dev/null +++ b/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-grants.ts @@ -0,0 +1,106 @@ +import { + AddUpdateMariadbUserAPIReqBody, + UserSQLPermissions, + UserType, +} from "../../../types"; +import { + DSQL_DATASQUIREL_MARIADB_USER_PRIVILEGES, + DSQL_DATASQUIREL_MARIADB_USERS, + DsqlTables, +} from "../../../types/dsql"; +import dsqlCrud from "../../../utils/data-fetching/crud"; +import grabDbNames from "../../../utils/grab-db-names"; +import normalizeText from "../../../utils/normalize-text"; +import dbHandler from "../../backend/dbHandler"; +import handleMariadbUserGrantsForDatabases from "./handle-mariadb-user-grants-for-databases"; +import revokeAllExistingGrants from "./revoke-all-existing-grants"; + +type Params = AddUpdateMariadbUserAPIReqBody & { + user: UserType; + updatedRecord: DSQL_DATASQUIREL_MARIADB_USERS; +}; + +type Return = { + msg?: string; + success?: boolean; +}; + +export default async function handleMariadbUserGrants({ + accessedDatabases, + grants, + isAllDbsAccess, + isAllGrants, + user, + updatedRecord, +}: Params): Promise { + const { userDbPrefix } = grabDbNames({ user }); + + /** + * # Revoke All Existing Grants + */ + await revokeAllExistingGrants({ updatedRecord, user }); + + /** + * # Recreate Grants + */ + if (isAllGrants && isAllDbsAccess) { + const grantAllPrivileges = await dbHandler({ + query: normalizeText(` + GRANT ALL PRIVILEGES ON \ + \`${userDbPrefix.replace(/\_/g, "\\_")}%\`.* TO \ + '${updatedRecord.username}'@'${updatedRecord.host}' + `), + }); + return { success: true }; + } + + if (isAllDbsAccess && grants) { + const isGrantsInalid = grants.find( + (g) => !UserSQLPermissions.includes(g) + ); + + if (isGrantsInalid) { + return { msg: `grants is/are invalid!` }; + } + + const grantQuery = normalizeText(` + GRANT ${grants.join(",")} ON \`${userDbPrefix}%\`.* TO \ + '${updatedRecord.username}'@'${updatedRecord.host}' + `); + + const grantSpecificPrivilegesToAllDbs = await dbHandler({ + query: grantQuery, + }); + + for (let t = 0; t < grants.length; t++) { + const grant = grants[t]; + + const addGrant = await dsqlCrud< + DSQL_DATASQUIREL_MARIADB_USER_PRIVILEGES, + (typeof DsqlTables)[number] + >({ + action: "insert", + table: "mariadb_user_privileges", + data: { + user_id: user.id, + mariadb_user_id: updatedRecord.id, + privilege: grant, + }, + }); + } + + return { success: true }; + } + + if (accessedDatabases?.[0]) { + const res = await handleMariadbUserGrantsForDatabases({ + accessedDatabases, + updatedRecord, + user, + }); + + return res; + } + + return {}; +} diff --git a/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-record.ts b/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-record.ts new file mode 100644 index 0000000..c0daca5 --- /dev/null +++ b/package-shared/functions/web-app/mariadb-user/handle-mariadb-user-record.ts @@ -0,0 +1,89 @@ +import { AddUpdateMariadbUserAPIReqBody, UserType } from "../../../types"; +import { + DSQL_DATASQUIREL_MARIADB_USERS, + DsqlTables, +} from "../../../types/dsql"; +import grabSQLUserName from "../../../utils/grab-sql-user-name"; +import addDbEntry from "../../backend/db/addDbEntry"; +import encrypt from "../../dsql/encrypt"; +import dbGrabUserResource from "../db/grab-user-resource"; + +type Params = AddUpdateMariadbUserAPIReqBody & { + user: UserType; +}; + +type Return = { + existingRecord?: DSQL_DATASQUIREL_MARIADB_USERS | null; + updatedRecord?: DSQL_DATASQUIREL_MARIADB_USERS | null; + msg?: string; +}; + +export default async function handleMariadbUserRecord({ + mariadbUser, + accessedDatabases, + grants, + isAllDbsAccess, + isAllGrants, + user, +}: Params): Promise { + const { name: finalMariadbUserName } = grabSQLUserName({ + name: mariadbUser.username, + user, + }); + + const finalPassword = mariadbUser.password?.replace(/ /g, ""); + if (!finalPassword) return { msg: `Couldn't get password` }; + + const encryptedFinalPassword = encrypt({ data: finalPassword }); + const finalHost = mariadbUser.host?.replace(/ /g, ""); + + const newMariadbUser: DSQL_DATASQUIREL_MARIADB_USERS = { + password: encryptedFinalPassword || undefined, + username: finalMariadbUserName, + all_databases: isAllDbsAccess ? 1 : 0, + all_grants: isAllGrants ? 1 : 0, + host: finalHost, + user_id: user.id, + }; + + let { single: existingRecord } = + await dbGrabUserResource({ + tableName: "mariadb_users", + userId: user.id, + query: { + query: { + id: { + value: String(mariadbUser.id || 0), + }, + }, + }, + }); + + const record = await addDbEntry< + DSQL_DATASQUIREL_MARIADB_USERS, + (typeof DsqlTables)[number] + >({ + tableName: "mariadb_users", + data: newMariadbUser, + update: true, + duplicateColumnName: "id", + duplicateColumnValue: (existingRecord?.id || 0).toString(), + }); + + let { single: updatedRecord } = + await dbGrabUserResource({ + tableName: "mariadb_users", + userId: user.id, + query: { + query: { + id: { + value: String( + existingRecord?.id || record?.payload?.insertId || 0 + ), + }, + }, + }, + }); + + return { existingRecord, updatedRecord }; +} diff --git a/package-shared/functions/web-app/mariadb-user/revoke-all-existing-grants.ts b/package-shared/functions/web-app/mariadb-user/revoke-all-existing-grants.ts new file mode 100644 index 0000000..3c17af8 --- /dev/null +++ b/package-shared/functions/web-app/mariadb-user/revoke-all-existing-grants.ts @@ -0,0 +1,70 @@ +import { UserType } from "../../../types"; +import { DSQL_DATASQUIREL_MARIADB_USERS } from "../../../types/dsql"; +import grabDbNames from "../../../utils/grab-db-names"; +import normalizeText from "../../../utils/normalize-text"; +import dbHandler from "../../backend/dbHandler"; +import decrypt from "../../dsql/decrypt"; +import { createNewSQLUser } from "./handle-mariadb-user-creation"; + +type Params = { + user: UserType; + updatedRecord: DSQL_DATASQUIREL_MARIADB_USERS; +}; + +type Return = { + msg?: string; + success?: boolean; +}; + +export default async function revokeAllExistingGrants({ + user, + updatedRecord, +}: Params): Promise { + const { userDbPrefix } = grabDbNames({ user }); + const parsedPassword = decrypt({ + encryptedString: updatedRecord?.password || "", + }); + + const revokeAllPrivileges = await dbHandler({ + query: normalizeText(` + REVOKE ALL PRIVILEGES ON *.* FROM '${updatedRecord.username}'@'${updatedRecord.host}' + `), + }); + + if (!revokeAllPrivileges) { + await createNewSQLUser({ + host: updatedRecord.host, + password: parsedPassword, + username: updatedRecord.username, + }); + } + + const revokeGrantOption = await dbHandler({ + query: normalizeText(` + REVOKE GRANT OPTION ON *.* FROM '${updatedRecord.username}'@'${updatedRecord.host}' + `), + }); + + const userGrants = (await dbHandler({ + query: `SHOW GRANTS FOR '${updatedRecord.username}'@'${updatedRecord.host}'`, + })) as any[]; + + for (let i = 0; i < userGrants.length; i++) { + const grantObject = userGrants[i]; + const grant = grantObject?.[Object.keys(grantObject)[0]]; + + if (!grant?.match(/GRANT USAGE .* IDENTIFIED BY PASSWORD/)) { + const revokeGrantText = grant + .replace(/GRANT/, "REVOKE") + .replace(/ TO /, " FROM "); + + const revokePrivilege = await dbHandler({ query: revokeGrantText }); + } + } + + const flushPrivileges = await dbHandler({ + query: `FLUSH PRIVILEGES`, + }); + + return { success: true }; +} diff --git a/package-shared/shell/createDbFromSchema/check-db-record.ts b/package-shared/shell/createDbFromSchema/check-db-record.ts index f781d03..371bd78 100644 --- a/package-shared/shell/createDbFromSchema/check-db-record.ts +++ b/package-shared/shell/createDbFromSchema/check-db-record.ts @@ -3,10 +3,12 @@ import { DSQL_DatabaseSchemaType, PostInsertReturn } from "../../types"; import { DSQL_DATASQUIREL_USER_DATABASES } from "../../types/dsql"; import numberfy from "../../utils/numberfy"; import addDbEntry from "../../functions/backend/db/addDbEntry"; +import updateDbEntry from "../../functions/backend/db/updateDbEntry"; type Param = { userId?: number | string | null; dbSchema: DSQL_DatabaseSchemaType; + isMain?: boolean; }; /** @@ -16,7 +18,10 @@ type Param = { export default async function checkDbRecordCreateDbSchema({ userId, dbSchema, + isMain, }: Param): Promise { + if (isMain) return undefined; + try { const { dbFullName, @@ -25,7 +30,8 @@ export default async function checkDbRecordCreateDbSchema({ dbDescription, dbImage, childDatabase, - childDatabaseDbFullName, + childDatabaseDbId, + id, } = dbSchema; let recordedDbEntryArray = userId @@ -38,31 +44,41 @@ export default async function checkDbRecordCreateDbSchema({ let recordedDbEntry: DSQL_DATASQUIREL_USER_DATABASES | undefined = recordedDbEntryArray?.[0]; + const newDbEntryObj: DSQL_DATASQUIREL_USER_DATABASES = { + user_id: numberfy(userId), + db_name: dbName, + db_slug: dbSlug, + db_full_name: dbFullName, + db_description: dbDescription, + db_image: dbImage, + active_clone: childDatabase ? 1 : undefined, + db_schema_id: numberfy(id), + active_clone_parent_db_id: numberfy(childDatabaseDbId), + }; + if (!recordedDbEntry?.id && userId) { - const newDbEntryObj: DSQL_DATASQUIREL_USER_DATABASES = { - user_id: numberfy(userId), - db_name: dbName, - db_slug: dbSlug, - db_full_name: dbFullName, - db_description: dbDescription, - db_image: dbImage, - active_clone: childDatabase ? 1 : undefined, - active_clone_parent_db: childDatabaseDbFullName, - }; + const newDbEntry = + await addDbEntry({ + data: newDbEntryObj, + tableName: "user_databases", + forceLocal: true, + }); - const newDbEntry = (await addDbEntry({ - data: newDbEntryObj, - tableName: "user_databases", - forceLocal: true, - })) as PostInsertReturn; - - if (newDbEntry.insertId) { + if (newDbEntry.payload?.insertId) { recordedDbEntryArray = await varDatabaseDbHandler({ queryString: `SELECT * FROM datasquirel.user_databases WHERE db_full_name = ?`, queryValuesArray: [dbFullName || "NULL"], }); recordedDbEntry = recordedDbEntryArray?.[0]; } + } else if (recordedDbEntry?.id) { + await updateDbEntry({ + data: newDbEntryObj, + tableName: "user_databases", + forceLocal: true, + identifierColumnName: "id", + identifierValue: String(recordedDbEntry.id), + }); } return recordedDbEntry; diff --git a/package-shared/shell/createDbFromSchema/check-table-record.ts b/package-shared/shell/createDbFromSchema/check-table-record.ts index 233f431..c295407 100644 --- a/package-shared/shell/createDbFromSchema/check-table-record.ts +++ b/package-shared/shell/createDbFromSchema/check-table-record.ts @@ -1,5 +1,4 @@ import varDatabaseDbHandler from "../utils/varDatabaseDbHandler"; -import dbHandler from "../utils/dbHandler"; import { DSQL_DatabaseSchemaType, DSQL_TableSchemaType, @@ -71,34 +70,34 @@ export default async function checkTableRecordCreateDbSchema({ user_id: numberfy(userId), db_id: dbRecord?.id, db_slug: dbRecord?.db_slug, - table_name: tableSchema.tableFullName, table_slug: tableSchema.tableName, }; - if (tableSchema?.childTable && tableSchema.childTableName) { + if (tableSchema?.childTable && tableSchema.childTableId) { const parentDb = dbSchema.find( - (db) => db.dbFullName == tableSchema.childTableDbFullName + (db) => db.id == tableSchema.childTableDbId ); const parentDbTable = parentDb?.tables.find( - (tbl) => tbl.tableName == tableSchema.childTableName + (tbl) => tbl.id == tableSchema.childTableId ); if (parentDb && parentDbTable) { newTableInsertObject["child_table"] = 1; - newTableInsertObject["child_table_parent_database"] = - parentDb.dbFullName; - newTableInsertObject["child_table_parent_table"] = - parentDbTable.tableName; + newTableInsertObject[ + "child_table_parent_database_schema_id" + ] = numberfy(parentDb.id); + newTableInsertObject["child_table_parent_table_schema_id"] = + numberfy(parentDbTable.id); } } - const newTableRecordEntry = (await addDbEntry({ + const newTableRecordEntry = await addDbEntry({ data: newTableInsertObject, tableName: "user_database_tables", dbContext: "Master", forceLocal: true, - })) as PostInsertReturn; + }); - if (newTableRecordEntry.insertId) { + if (newTableRecordEntry.payload?.insertId) { recordedTableEntryArray = await varDatabaseDbHandler({ queryString: queryObj?.string || "", queryValuesArray: queryObj?.values, diff --git a/package-shared/shell/createDbFromSchema/grab-required-database-schemas.ts b/package-shared/shell/createDbFromSchema/grab-required-database-schemas.ts new file mode 100644 index 0000000..24a5cdb --- /dev/null +++ b/package-shared/shell/createDbFromSchema/grab-required-database-schemas.ts @@ -0,0 +1,317 @@ +import fs from "fs"; +import path from "path"; +import grabDirNames from "../../utils/backend/names/grab-dir-names"; +import EJSON from "../../utils/ejson"; +import { DSQL_DatabaseSchemaType } from "../../types"; +import numberfy from "../../utils/numberfy"; +import _ from "lodash"; +import uniqueByKey from "../../utils/unique-by-key"; + +type Params = { + userId?: string | number | null; + dbId?: string | number; + dbSlug?: string; +}; + +export default function grabRequiredDatabaseSchemas( + params: Params +): DSQL_DatabaseSchemaType[] | undefined { + const primaryDbSchema = grabPrimaryRequiredDbSchema(params); + if (!primaryDbSchema) return undefined; + + let relatedDatabases: DSQL_DatabaseSchemaType[] = []; + + const childrenDatabases = primaryDbSchema.childrenDatabases || []; + const childrenTables = + primaryDbSchema.tables + .map((tbl) => { + return tbl.childrenTables || []; + }) + .flat() || []; + + for (let i = 0; i < childrenDatabases.length; i++) { + const childDb = childrenDatabases[i]; + const childDbSchema = grabPrimaryRequiredDbSchema({ + userId: params.userId, + dbId: childDb.dbId, + }); + if (!childDbSchema?.dbSlug) continue; + relatedDatabases.push(childDbSchema); + } + + for (let i = 0; i < childrenTables.length; i++) { + const childTbl = childrenTables[i]; + const childTableDbSchema = grabPrimaryRequiredDbSchema({ + userId: params.userId, + dbId: childTbl.dbId, + }); + if (!childTableDbSchema?.dbSlug) continue; + relatedDatabases.push(childTableDbSchema); + } + + return uniqueByKey([primaryDbSchema, ...relatedDatabases], "dbFullName"); +} + +export function grabPrimaryRequiredDbSchema({ userId, dbId, dbSlug }: Params) { + let finalDbId = dbId; + + if (!finalDbId && userId && dbSlug) { + const searchedDb = findDbNameInSchemaDir({ dbName: dbSlug, userId }); + + if (searchedDb?.id) { + finalDbId = searchedDb.id; + } + } + + if (!finalDbId) { + return undefined; + } + + const { targetUserPrivateDir, oldSchemasDir } = grabDirNames({ + userId, + }); + + const finalSchemaDir = targetUserPrivateDir || oldSchemasDir; + + if (!finalSchemaDir) { + console.log(`finalSchemaDir not found!`); + return undefined; + } + + if (finalDbId) { + const dbIdSchema = path.resolve(finalSchemaDir, `${finalDbId}.json`); + if (fs.existsSync(dbIdSchema)) { + const dbIdSchemaObject = EJSON.parse( + fs.readFileSync(dbIdSchema, "utf-8") + ) as DSQL_DatabaseSchemaType | undefined; + return dbIdSchemaObject; + } + } + + const dbSchemasFiles = fs.readdirSync(finalSchemaDir); + + let targetDbSchema: DSQL_DatabaseSchemaType | undefined; + + try { + for (let i = 0; i < dbSchemasFiles.length; i++) { + const fileOrPath = dbSchemasFiles[i]; + if (!fileOrPath.endsWith(`.json`)) continue; + if (!fileOrPath.match(/^\d+.json/)) continue; + + const targetFileJSONPath = path.join(finalSchemaDir, fileOrPath); + const targetSchema = EJSON.parse( + fs.readFileSync(targetFileJSONPath, "utf-8") + ) as DSQL_DatabaseSchemaType | undefined; + + if (targetSchema && finalDbId && targetSchema?.id == finalDbId) { + targetDbSchema = targetSchema; + } + } + } catch (error) {} + + if (targetDbSchema) { + return targetDbSchema; + } + // else if ( dbFullName) { + // let existingSchemaInMainJSON = findTargetDbSchemaFromMainSchema( + // dbFullName + // ); + + // const nextID = grabLatestDbSchemaID(finalSchemaDir); + + // if (existingSchemaInMainJSON) { + // existingSchemaInMainJSON.id = nextID; + // fs.writeFileSync( + // path.join(finalSchemaDir, `${nextID}.json`), + // EJSON.stringify(existingSchemaInMainJSON) || "[]" + // ); + // return existingSchemaInMainJSON; + // } + // } + + console.log(`userSchemaDir not found!`); + console.log(`userId`, userId); + return undefined; +} + +export function findDbNameInSchemaDir({ + userId, + dbName, +}: { + userId?: string | number; + dbName?: string; +}) { + if (!userId) { + console.log(`userId not provided!`); + return undefined; + } + + if (!dbName) { + console.log(`dbName not provided!`); + return undefined; + } + + const { targetUserPrivateDir } = grabDirNames({ + userId, + }); + + if (!targetUserPrivateDir) { + console.log(`targetUserPrivateDir not found!`); + return undefined; + } + + const dbSchemasFiles = fs.readdirSync(targetUserPrivateDir); + + let targetDbSchema: DSQL_DatabaseSchemaType | undefined; + + try { + for (let i = 0; i < dbSchemasFiles.length; i++) { + const fileOrPath = dbSchemasFiles[i]; + if (!fileOrPath.endsWith(`.json`)) continue; + if (!fileOrPath.match(/^\d+.json/)) continue; + + const targetFileJSONPath = path.join( + targetUserPrivateDir, + fileOrPath + ); + const targetSchema = EJSON.parse( + fs.readFileSync(targetFileJSONPath, "utf-8") + ) as DSQL_DatabaseSchemaType | undefined; + + if (!targetSchema) continue; + + if ( + targetSchema.dbFullName == dbName || + targetSchema.dbSlug == dbName + ) { + targetDbSchema = targetSchema; + return targetSchema; + } + } + } catch (error) {} + + return targetDbSchema; +} + +type UpdateDbSchemaParam = { + dbSchema: DSQL_DatabaseSchemaType; + userId?: string | number | null; +}; + +export function writeUpdatedDbSchema({ + dbSchema, + userId, +}: UpdateDbSchemaParam): { success?: boolean; dbSchemaId?: string | number } { + const { targetUserPrivateDir } = grabDirNames({ + userId, + }); + + if (!targetUserPrivateDir) { + console.log(`user ${userId} has no targetUserPrivateDir`); + return {}; + } + + if (dbSchema.id) { + const dbIdSchemaPath = path.join( + targetUserPrivateDir, + `${dbSchema.id}.json` + ); + + fs.writeFileSync(dbIdSchemaPath, EJSON.stringify(dbSchema) || "[]"); + + return { success: true }; + } else { + const nextID = grabLatestDbSchemaID(targetUserPrivateDir); + + dbSchema.id = nextID; + + fs.writeFileSync( + path.join(targetUserPrivateDir, `${nextID}.json`), + EJSON.stringify(dbSchema) || "[]" + ); + + return { success: true, dbSchemaId: nextID }; + } +} + +export function deleteDbSchema({ dbSchema, userId }: UpdateDbSchemaParam) { + const { targetUserPrivateDir, userSchemaMainJSONFilePath } = grabDirNames({ + userId, + }); + + if (!targetUserPrivateDir) return; + + const targetDbSchema = grabPrimaryRequiredDbSchema({ + dbId: dbSchema.id, + userId, + }); + + const schemaFile = path.join( + targetUserPrivateDir, + `${targetDbSchema?.id}.json` + ); + + try { + fs.unlinkSync(schemaFile); + } catch (error) {} + + if ( + userSchemaMainJSONFilePath && + fs.existsSync(userSchemaMainJSONFilePath) + ) { + try { + let allDbSchemas = EJSON.parse( + fs.readFileSync(userSchemaMainJSONFilePath, "utf-8") + ) as DSQL_DatabaseSchemaType[] | undefined; + + if (allDbSchemas?.[0]) { + for (let i = 0; i < allDbSchemas.length; i++) { + const dbSch = allDbSchemas[i]; + if ( + dbSch.dbFullName == dbSchema.dbFullName || + dbSch.id == dbSchema.id + ) { + allDbSchemas.splice(i, 1); + } + } + + fs.writeFileSync( + userSchemaMainJSONFilePath, + EJSON.stringify(allDbSchemas) || "[]" + ); + } + } catch (error) {} + } +} + +export function findTargetDbSchemaFromMainSchema( + schemas: DSQL_DatabaseSchemaType[], + dbFullName?: string, + dbId?: string | number +): DSQL_DatabaseSchemaType | undefined { + const targetDbSchema = schemas.find( + (sch) => sch.dbFullName == dbFullName || (dbId && sch.id == dbId) + ); + return targetDbSchema; +} + +export function grabLatestDbSchemaID(userSchemaDir: string) { + const dbSchemasFiles = fs.readdirSync(userSchemaDir); + const dbNumbers = dbSchemasFiles + .filter((dbSch) => { + if (!dbSch.endsWith(`.json`)) return false; + if (dbSch.match(/^\d+\.json/)) return true; + return false; + }) + .map((dbSch) => numberfy(dbSch.replace(/[^0-9]/g, ""))); + + if (dbNumbers[0]) + return ( + (dbNumbers + .sort((a, b) => { + return a - b; + }) + .pop() || 0) + 1 + ); + return 1; +} diff --git a/package-shared/shell/createDbFromSchema/handle-indexes.ts b/package-shared/shell/createDbFromSchema/handle-indexes.ts index 7f9f0a5..c32e42e 100644 --- a/package-shared/shell/createDbFromSchema/handle-indexes.ts +++ b/package-shared/shell/createDbFromSchema/handle-indexes.ts @@ -1,5 +1,9 @@ import varDatabaseDbHandler from "../utils/varDatabaseDbHandler"; -import { DSQL_IndexSchemaType } from "../../types"; +import { + DSQL_IndexSchemaType, + DSQL_MYSQL_SHOW_INDEXES_Type, +} from "../../types"; +import grabDSQLSchemaIndexComment from "../utils/grab-dsql-schema-index-comment"; type Param = { tableName: string; @@ -18,6 +22,11 @@ export default async function handleIndexescreateDbFromSchema({ tableName, indexes, }: Param) { + const allExistingIndexes: DSQL_MYSQL_SHOW_INDEXES_Type[] = + await varDatabaseDbHandler({ + queryString: `SHOW INDEXES FROM \`${dbFullName}\`.\`${tableName}\``, + }); + for (let g = 0; g < indexes.length; g++) { const { indexType, indexName, indexTableFields, alias } = indexes[g]; @@ -27,38 +36,32 @@ export default async function handleIndexescreateDbFromSchema({ * @description Check for existing Index in MYSQL db */ try { - /** - * @type {import("../../types").DSQL_MYSQL_SHOW_INDEXES_Type[]} - * @description All indexes from MYSQL db - */ // @ts-ignore - const allExistingIndexes: import("../../types").DSQL_MYSQL_SHOW_INDEXES_Type[] = - await varDatabaseDbHandler({ - queryString: `SHOW INDEXES FROM \`${dbFullName}\`.\`${tableName}\``, - }); - const existingKeyInDb = allExistingIndexes.filter( (indexObject) => indexObject.Key_name === alias ); + if (!existingKeyInDb[0]) throw new Error("This Index Does not Exist"); } catch (error) { - global.ERROR_CALLBACK?.( - `Error Handling Indexes on Creating Schema`, - error as Error - ); - /** * @description Create new index if determined that it * doesn't exist in MYSQL db */ - await varDatabaseDbHandler({ - queryString: `CREATE${ - indexType?.match(/fullText/i) ? " FULLTEXT" : "" - } INDEX \`${alias}\` ON \`${dbFullName}\`.\`${tableName}\`(${indexTableFields - ?.map((nm) => nm.value) - .map((nm) => `\`${nm}\``) - .join(",")}) COMMENT 'schema_index'`, - }); + const queryString = `CREATE${ + indexType == "full_text" ? " FULLTEXT" : "" + } INDEX \`${alias}\` ON \`${dbFullName}\`.\`${tableName}\`(${indexTableFields + ?.map((nm) => nm.value) + .map((nm) => `\`${nm}\``) + .join( + "," + )}) COMMENT '${grabDSQLSchemaIndexComment()} ${indexName}'`; + + const addIndex = await varDatabaseDbHandler({ queryString }); } } + + const allExistingIndexesAfterUpdate: DSQL_MYSQL_SHOW_INDEXES_Type[] = + await varDatabaseDbHandler({ + queryString: `SHOW INDEXES FROM \`${dbFullName}\`.\`${tableName}\``, + }); } diff --git a/package-shared/shell/createDbFromSchema/index.ts b/package-shared/shell/createDbFromSchema/index.ts index c7c6adc..917a5cb 100644 --- a/package-shared/shell/createDbFromSchema/index.ts +++ b/package-shared/shell/createDbFromSchema/index.ts @@ -1,21 +1,22 @@ -import fs from "fs"; - import noDatabaseDbHandler from "../utils/noDatabaseDbHandler"; import varDatabaseDbHandler from "../utils/varDatabaseDbHandler"; import createTable from "../utils/createTable"; import updateTable from "../utils/updateTable"; -import dbHandler from "../utils/dbHandler"; -import EJSON from "../../utils/ejson"; import { DSQL_DatabaseSchemaType } from "../../types"; import grabDirNames from "../../utils/backend/names/grab-dir-names"; import checkDbRecordCreateDbSchema from "./check-db-record"; -import checkTableRecordCreateDbSchema from "./check-table-record"; import handleIndexescreateDbFromSchema from "./handle-indexes"; +import grabRequiredDatabaseSchemas, { + grabPrimaryRequiredDbSchema, +} from "./grab-required-database-schemas"; +import dbHandler from "../../functions/backend/dbHandler"; type Param = { userId?: number | string | null; targetDatabase?: string; - dbSchemaData?: import("../../types").DSQL_DatabaseSchemaType[]; + dbSchemaData?: DSQL_DatabaseSchemaType[]; + targetTable?: string; + dbId?: string | number; }; /** @@ -26,84 +27,79 @@ export default async function createDbFromSchema({ userId, targetDatabase, dbSchemaData, + targetTable, + dbId, }: Param): Promise { - const { userSchemaMainJSONFilePath, mainShemaJSONFilePath } = grabDirNames({ - userId, - }); - - const schemaPath = userSchemaMainJSONFilePath || mainShemaJSONFilePath; - - const dbSchema: DSQL_DatabaseSchemaType[] | undefined = - dbSchemaData || - (EJSON.parse(fs.readFileSync(schemaPath, "utf8")) as - | DSQL_DatabaseSchemaType[] - | undefined); - - if (!dbSchema) { - console.log("Schema Not Found!"); - return false; - } - - for (let i = 0; i < dbSchema.length; i++) { - const database: DSQL_DatabaseSchemaType = dbSchema[i]; - - const { dbFullName, tables, dbSlug, childrenDatabases } = database; - - if (!dbFullName) continue; - - if (targetDatabase && dbFullName != targetDatabase) { - continue; - } - - const dbCheck: any = await noDatabaseDbHandler( - `SELECT SCHEMA_NAME AS dbFullName FROM INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME = '${dbFullName}'` - ); - - if (!dbCheck?.[0]?.dbFullName) { - const newDatabase = await noDatabaseDbHandler( - `CREATE DATABASE IF NOT EXISTS \`${dbFullName}\` CHARACTER SET utf8mb4 COLLATE utf8mb4_bin` - ); - } - - const allTables: any = await noDatabaseDbHandler( - `SELECT TABLE_NAME FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_SCHEMA='${dbFullName}'` - ); - - let recordedDbEntry = await checkDbRecordCreateDbSchema({ - dbSchema: database, + try { + const { userSchemaMainJSONFilePath } = grabDirNames({ userId, }); - for (let tb = 0; tb < allTables.length; tb++) { - const { TABLE_NAME } = allTables[tb]; + let dbSchema = dbSchemaData + ? dbSchemaData + : dbId + ? grabRequiredDatabaseSchemas({ + dbId, + userId, + }) + : undefined; - const targetTableSchema = tables.find( - (_table) => _table.tableName === TABLE_NAME + if (!dbSchema) { + console.log("Schema Not Found!"); + return false; + } + + const isMain = !userSchemaMainJSONFilePath; + + for (let i = 0; i < dbSchema.length; i++) { + const database: DSQL_DatabaseSchemaType = dbSchema[i]; + + const { dbFullName, tables, dbSlug, childrenDatabases } = database; + + if (!dbFullName) continue; + + if (targetDatabase && dbFullName != targetDatabase) { + continue; + } + + console.log(`Handling database => ${dbFullName}`); + + const dbCheck: any = await noDatabaseDbHandler( + `SELECT SCHEMA_NAME AS dbFullName FROM INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME = '${dbFullName}'` ); - /** - * @description Check if TABLE_NAME is part of the tables contained - * in the user schema JSON. If it's not, the table is either deleted - * or the table name has been recently changed - */ - if (!targetTableSchema) { - const oldTable = tables.find( - (_table) => - _table.tableNameOld && - _table.tableNameOld === TABLE_NAME + if (!dbCheck?.[0]?.dbFullName) { + const newDatabase = await noDatabaseDbHandler( + `CREATE DATABASE IF NOT EXISTS \`${dbFullName}\` CHARACTER SET utf8mb4 COLLATE utf8mb4_bin` + ); + } + + const allTables: any = await noDatabaseDbHandler( + `SELECT TABLE_NAME FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_SCHEMA='${dbFullName}'` + ); + + let recordedDbEntry = await checkDbRecordCreateDbSchema({ + dbSchema: database, + userId, + isMain, + }); + + for (let tb = 0; tb < allTables.length; tb++) { + const { TABLE_NAME } = allTables[tb]; + + const targetTableSchema = tables.find( + (_table) => _table.tableName === TABLE_NAME ); /** - * @description Check if this table has been recently renamed. Rename - * table id true. Drop table if false + * @description Check if TABLE_NAME is part of the tables contained + * in the user schema JSON. If it's not, the table is either deleted + * or the table name has been recently changed */ - if (oldTable) { - console.log("Renaming Table"); - await varDatabaseDbHandler({ - queryString: `RENAME TABLE \`${dbFullName}\`.\`${oldTable.tableNameOld}\` TO \`${oldTable.tableName}\``, - }); - } else { - console.log(`Dropping Table from ${dbFullName}`); + if (!targetTableSchema) { + console.log( + `Dropping Table ${TABLE_NAME} from ${dbFullName}` + ); await varDatabaseDbHandler({ queryString: `DROP TABLE \`${dbFullName}\`.\`${TABLE_NAME}\``, }); @@ -112,130 +108,175 @@ export default async function createDbFromSchema({ query: `DELETE FROM datasquirel.user_database_tables WHERE user_id = ? AND db_slug = ? AND table_slug = ?`, values: [userId, dbSlug, TABLE_NAME], }); + + // const oldTable = tables.find( + // (_table) => + // _table.tableNameOld && + // _table.tableNameOld === TABLE_NAME + // ); + + // /** + // * @description Check if this table has been recently renamed. Rename + // * table id true. Drop table if false + // */ + // if (oldTable) { + // console.log("Renaming Table"); + // await varDatabaseDbHandler({ + // queryString: `RENAME TABLE \`${dbFullName}\`.\`${oldTable.tableNameOld}\` TO \`${oldTable.tableName}\``, + // }); + // } else { + // console.log( + // `Dropping Table ${TABLE_NAME} from ${dbFullName}` + // ); + // await varDatabaseDbHandler({ + // queryString: `DROP TABLE \`${dbFullName}\`.\`${TABLE_NAME}\``, + // }); + + // const deleteTableEntry = await dbHandler({ + // query: `DELETE FROM datasquirel.user_database_tables WHERE user_id = ? AND db_slug = ? AND table_slug = ?`, + // values: [userId, dbSlug, TABLE_NAME], + // }); + // } } } - } - - /** - * @description Iterate through each table and perform table actions - */ - for (let t = 0; t < tables.length; t++) { - const table = tables[t]; - - const { tableName, fields, indexes } = table; /** - * @description Check if table exists - * @type {any} + * @description Iterate through each table and perform table actions */ - const tableCheck: any = await varDatabaseDbHandler({ - queryString: ` - SELECT EXISTS ( - SELECT - TABLE_NAME - FROM - information_schema.TABLES - WHERE - TABLE_SCHEMA = ? AND - TABLE_NAME = ? - ) AS tableExists`, - queryValuesArray: [dbFullName, table.tableName], - }); + for (let t = 0; t < tables.length; t++) { + const table = tables[t]; - //////////////////////////////////////// + const { tableName, fields, indexes } = table; + + if (targetTable && tableName !== targetTable) continue; + + console.log(`Handling table => ${tableName}`); - if (tableCheck && tableCheck[0]?.tableExists > 0) { /** - * @description Update table if table exists + * @description Check if table exists + * @type {any} */ - const updateExistingTable = await updateTable({ - dbFullName: dbFullName, - tableName: tableName, - tableNameFull: table.tableFullName, - tableInfoArray: fields, - userId, - dbSchema, - tableIndexes: indexes, - tableIndex: t, - childDb: database.childDatabase || undefined, - recordedDbEntry, - tableSchema: table, + const tableCheck: any = await varDatabaseDbHandler({ + queryString: ` + SELECT EXISTS ( + SELECT + TABLE_NAME + FROM + information_schema.TABLES + WHERE + TABLE_SCHEMA = ? AND + TABLE_NAME = ? + ) AS tableExists`, + queryValuesArray: [dbFullName, table.tableName], }); - if (table.childrenTables && table.childrenTables[0]) { - for (let ch = 0; ch < table.childrenTables.length; ch++) { - const childTable = table.childrenTables[ch]; + if (tableCheck && tableCheck[0]?.tableExists > 0) { + /** + * @description Update table if table exists + */ + const updateExistingTable = await updateTable({ + dbFullName: dbFullName, + tableName: tableName, + tableFields: fields, + userId, + dbSchema: database, + tableIndexes: indexes, + recordedDbEntry, + tableSchema: table, + isMain, + }); - const updateExistingChildTable = await updateTable({ - dbFullName: childTable.dbNameFull, - tableName: childTable.tableName, - tableNameFull: childTable.tableNameFull, - tableInfoArray: fields, - userId, - dbSchema, - tableIndexes: indexes, - clone: true, - childDb: database.childDatabase || undefined, - recordedDbEntry, - tableSchema: table, + if (table.childrenTables && table.childrenTables[0]) { + for ( + let ch = 0; + ch < table.childrenTables.length; + ch++ + ) { + const childTable = table.childrenTables[ch]; + + const childTableParentDbSchema = + grabPrimaryRequiredDbSchema({ + dbId: childTable.dbId, + userId, + }); + + if (!childTableParentDbSchema?.dbFullName) continue; + + const childTableSchema = + childTableParentDbSchema.tables.find( + (tbl) => tbl.id == childTable.tableId + ); + + if (!childTableSchema) continue; + + const updateExistingChildTable = await updateTable({ + dbFullName: childTableParentDbSchema.dbFullName, + tableName: childTableSchema.tableName, + tableFields: childTableSchema.fields, + userId, + dbSchema: childTableParentDbSchema, + tableIndexes: childTableSchema.indexes, + clone: true, + recordedDbEntry, + tableSchema: table, + isMain, + }); + } + } + } else { + /** + * @description Create new Table if table doesnt exist + */ + const createNewTable = await createTable({ + tableName: tableName, + tableInfoArray: fields, + dbFullName: dbFullName, + tableSchema: table, + recordedDbEntry, + isMain, + }); + + /** + * Handle DATASQUIREL Table Indexes + * =================================================== + * @description Iterate through each datasquirel schema + * table index(if available), and perform operations + */ + if (indexes?.[0]) { + handleIndexescreateDbFromSchema({ + dbFullName, + indexes, + tableName, }); } } - } else { - /** - * @description Create new Table if table doesnt exist - */ - const createNewTable = await createTable({ - tableName: tableName, - tableInfoArray: fields, - dbFullName: dbFullName, - tableSchema: table, - recordedDbEntry, - }); + } - /** - * Handle DATASQUIREL Table Indexes - * =================================================== - * @description Iterate through each datasquirel schema - * table index(if available), and perform operations - */ - if (indexes?.[0]) { - handleIndexescreateDbFromSchema({ - dbFullName, - indexes, - tableName, - }); + /** + * @description Check all children databases + */ + if (childrenDatabases?.[0]) { + for (let ch = 0; ch < childrenDatabases.length; ch++) { + const childDb = childrenDatabases[ch]; + const { dbId } = childDb; + + const targetDatabase = dbSchema.find( + (dbSch) => dbSch.childDatabaseDbId == dbId + ); + + if (targetDatabase?.id) { + await createDbFromSchema({ + userId, + dbId: targetDatabase?.id, + }); + } } } - - const tableRecord = await checkTableRecordCreateDbSchema({ - dbFullName, - dbSchema, - tableSchema: table, - dbRecord: recordedDbEntry, - userId, - }); } - /** - * @description Check all children databases - */ - if (childrenDatabases?.[0]) { - for (let ch = 0; ch < childrenDatabases.length; ch++) { - const childDb = childrenDatabases[ch]; - const { dbId } = childDb; - - const targetDatabase = dbSchema.find( - (dbSch) => dbSch.id == dbId - ); - - await createDbFromSchema({ - userId, - targetDatabase: targetDatabase?.dbFullName, - }); - } - } + return true; + } catch (error: any) { + console.log(`createDbFromSchema ERROR => ${error.message}`); + return false; } - - return true; } diff --git a/package-shared/shell/grantFullPriviledges.ts b/package-shared/shell/grantFullPriviledges.ts index ff01ba8..cc7b9a4 100644 --- a/package-shared/shell/grantFullPriviledges.ts +++ b/package-shared/shell/grantFullPriviledges.ts @@ -1,4 +1,5 @@ require("dotenv").config({ path: "./../.env" }); +import { AppNames } from "../dict/app-names"; import serverError from "../functions/backend/serverError"; import noDatabaseDbHandler from "./utils/noDatabaseDbHandler"; @@ -16,26 +17,17 @@ async function grantFullPrivileges({ userId }: { userId: string | null }) { const allDatabases = await noDatabaseDbHandler(`SHOW DATABASES`); - const datasquirelUserDatabases = allDatabases.filter( - (/** @type {any} */ database: any) => - database.Database.match(/datasquirel_user_/) + const datasquirelUserDatabases = allDatabases.filter((database: any) => + database.Database.match(new RegExp(`^${AppNames["DsqlDbPrefix"]}`)) ); for (let i = 0; i < datasquirelUserDatabases.length; i++) { const datasquirelUserDatabase = datasquirelUserDatabases[i]; const { Database } = datasquirelUserDatabase; - - // const grantDbPriviledges = await noDatabaseDbHandler( - // `GRANT ALL PRIVILEGES ON ${Database}.* TO '${process.env.DSQL_DB_FULL_ACCESS_USERNAME}'@'%' WITH GRANT OPTION` - // ); - - // const grantRead = await noDatabaseDbHandler( - // `GRANT SELECT ON ${Database}.* TO '${process.env.DSQL_DB_READ_ONLY_USERNAME}'@'%'` - // ); } const flushPriviledged = await noDatabaseDbHandler(`FLUSH PRIVILEGES`); - } catch (/** @type {any} */ error: any) { + } catch (error: any) { serverError({ component: "shell/grantDbPriviledges/main-catch-error", message: error.message, diff --git a/package-shared/shell/mariadb-users/refreshUsersAndGrants.ts b/package-shared/shell/mariadb-users/refreshUsersAndGrants.ts deleted file mode 100644 index 6f99105..0000000 --- a/package-shared/shell/mariadb-users/refreshUsersAndGrants.ts +++ /dev/null @@ -1,137 +0,0 @@ -import generator from "generate-password"; -import noDatabaseDbHandler from "../utils/noDatabaseDbHandler"; -import dbHandler from "../utils/dbHandler"; -import handleGrants from "./handleGrants"; -import encrypt from "../../functions/dsql/encrypt"; -import decrypt from "../../functions/dsql/decrypt"; -import { DSQL_DATASQUIREL_MARIADB_USERS } from "../../types/dsql"; -import { MariaDBUser } from "../../types"; - -type Param = { - userId?: number | string; - mariadbUserHost?: string; - mariadbUsername?: string; - sqlUserID?: string | number; -}; - -/** - * # Refresh Mariadb User Grants - */ -export default async function refreshUsersAndGrants({ - userId, - mariadbUserHost, - mariadbUsername, - sqlUserID, -}: Param) { - const mariadbUsers = (await dbHandler({ - query: `SELECT * FROM mariadb_users`, - })) as any[] | null; - - if (!mariadbUsers?.[0]) { - return; - } - - const isRootUser = userId - ? userId == Number(process.env.DSQL_SU_USER_ID) - : false; - - const isWildcardHost = mariadbUserHost == "%"; - - if (isWildcardHost && !isRootUser) { - return; - } - - for (let i = 0; i < mariadbUsers.length; i++) { - const mariadbUser = mariadbUsers[i] as - | DSQL_DATASQUIREL_MARIADB_USERS - | undefined; - - if (!mariadbUser) continue; - if (userId && mariadbUser.user_id != userId) continue; - if (sqlUserID && mariadbUser.id != sqlUserID) continue; - - try { - const { username, password, host, user_id } = mariadbUser; - - const existingUser = await noDatabaseDbHandler( - `SELECT * FROM mysql.user WHERE User = '${username}' AND Host = '${host}'` - ); - - const isUserExisting = Boolean(existingUser?.[0]?.User); - - const isPrimary = String(mariadbUser.primary)?.match(/1/) - ? true - : false; - - const dsqlPassword = mariadbUser?.password - ? decrypt({ encryptedString: mariadbUser.password }) - : isUserExisting && password - ? decrypt({ encryptedString: password }) - : generator.generate({ - length: 16, - numbers: true, - symbols: true, - uppercase: true, - exclude: "*#.'`\"", - }); - - const encryptedPassword = mariadbUser?.password - ? mariadbUser.password - : isUserExisting - ? password - : encrypt({ data: dsqlPassword }); - - if (!isUserExisting) { - if (isWildcardHost) { - const _existingUsers = (await noDatabaseDbHandler( - `SELECT * FROM mysql.user WHERE user='${mariadbUsername}'` - )) as MariaDBUser[]; - - for (let i = 0; i < _existingUsers.length; i++) { - const exUsr = _existingUsers[i]; - await noDatabaseDbHandler( - `DROP USER '${exUsr.User}'@'${exUsr.Host}'` - ); - } - } - - const createNewUser = await noDatabaseDbHandler( - `CREATE USER IF NOT EXISTS '${mariadbUsername}'@'${mariadbUserHost}' IDENTIFIED BY '${dsqlPassword}'` - ); - } - - if (isPrimary) { - const updateUser = await dbHandler({ - query: `UPDATE users SET mariadb_user = ?, mariadb_host = ?, mariadb_pass = ? WHERE id = ?`, - values: [ - mariadbUsername, - mariadbUserHost, - encryptedPassword, - user_id, - ], - }); - } - - const isGrantHandled = await handleGrants({ - username: mariadbUser.username, - host: mariadbUser.host, - grants: - mariadbUser.grants && typeof mariadbUser.grants == "string" - ? JSON.parse(mariadbUser.grants) - : [], - userId: String(user_id), - }); - - if (!isGrantHandled) { - console.log( - `Error in handling grants for user ${mariadbUser.username}@${mariadbUser.host}` - ); - } - } catch (error: any) { - global.ERROR_CALLBACK?.( - `Error Refreshing MariaDB Users and Grants`, - error as Error - ); - } - } -} diff --git a/package-shared/shell/mariadb-users/resetSQLPasswords.ts b/package-shared/shell/mariadb-users/resetSQLPasswords.ts index 86e4896..4297b89 100644 --- a/package-shared/shell/mariadb-users/resetSQLPasswords.ts +++ b/package-shared/shell/mariadb-users/resetSQLPasswords.ts @@ -1,8 +1,9 @@ require("dotenv").config({ path: "../../.env" }); import generator from "generate-password"; import noDatabaseDbHandler from "../utils/noDatabaseDbHandler"; -import dbHandler from "../utils/dbHandler"; +import dbHandler from "../../functions/backend/dbHandler"; import encrypt from "../../functions/dsql/encrypt"; +import grabSQLKeyName from "../../utils/grab-sql-key-name"; /** * # Reset SQL Passwords @@ -23,7 +24,9 @@ async function resetSQLCredentialsPasswords() { try { const maridbUsers = (await dbHandler({ - query: `SELECT * FROM mysql.user WHERE User = 'dsql_user_${user.id}'`, + query: `SELECT * FROM mysql.user WHERE User = '${grabSQLKeyName( + { type: "user", userId: user.id } + )}'`, })) as any[]; for (let j = 0; j < maridbUsers.length; j++) { diff --git a/package-shared/shell/mariadb-users/users/create-user.ts b/package-shared/shell/mariadb-users/users/create-user.ts index 414bd72..e4a28da 100644 --- a/package-shared/shell/mariadb-users/users/create-user.ts +++ b/package-shared/shell/mariadb-users/users/create-user.ts @@ -8,6 +8,8 @@ import addDbEntry from "../../../functions/backend/db/addDbEntry"; import addMariadbUser from "../../../functions/backend/addMariadbUser"; import updateDbEntry from "../../../functions/backend/db/updateDbEntry"; import hashPassword from "../../../functions/dsql/hashPassword"; +import { DSQL_DATASQUIREL_USERS } from "../../../types/dsql"; +import grabDirNames from "../../../utils/backend/names/grab-dir-names"; const tmpDir = process.argv[process.argv.length - 1]; @@ -76,14 +78,14 @@ async function createUser() { data: { ...userObj, password: hashedPassword }, }); - if (!newUser?.insertId) return false; + if (!newUser?.payload?.insertId) return false; /** * Add a Mariadb User for this User */ - await addMariadbUser({ userId: newUser.insertId }); + await addMariadbUser({ userId: newUser.payload.insertId }); - const STATIC_ROOT = process.env.DSQL_STATIC_SERVER_DIR; + const { STATIC_ROOT } = grabDirNames(); if (!STATIC_ROOT) { console.log("Static File ENV not Found!"); @@ -95,10 +97,10 @@ async function createUser() { * * @description Create new user folder and file */ - let newUserSchemaFolderPath = `${process.env.DSQL_USER_DB_SCHEMA_PATH}/user-${newUser.insertId}`; + let newUserSchemaFolderPath = `${process.env.DSQL_USER_DB_SCHEMA_PATH}/user-${newUser.payload.insertId}`; let newUserMediaFolderPath = path.join( STATIC_ROOT, - `images/user-images/user-${newUser.insertId}` + `images/user-images/user-${newUser.payload.insertId}` ); fs.mkdirSync(newUserSchemaFolderPath, { recursive: true }); @@ -112,7 +114,7 @@ async function createUser() { const imageBasePath = path.join( STATIC_ROOT, - `images/user-images/user-${newUser.insertId}` + `images/user-images/user-${newUser.payload.insertId}` ); if (!fs.existsSync(imageBasePath)) { @@ -121,12 +123,12 @@ async function createUser() { let imagePath = path.join( STATIC_ROOT, - `images/user-images/user-${newUser.insertId}/user-${newUser.insertId}-profile.jpg` + `images/user-images/user-${newUser.payload.insertId}/user-${newUser.payload.insertId}-profile.jpg` ); let imageThumbnailPath = path.join( STATIC_ROOT, - `images/user-images/user-${newUser.insertId}/user-${newUser.insertId}-profile-thumbnail.jpg` + `images/user-images/user-${newUser.payload.insertId}/user-${newUser.payload.insertId}-profile-thumbnail.jpg` ); let prodImageUrl = imagePath.replace( @@ -149,11 +151,11 @@ async function createUser() { execSync(`chmod 644 ${imagePath} ${imageThumbnailPath}`); - const updateImages = await updateDbEntry({ + const updateImages = await updateDbEntry({ dbFullName: "datasquirel", tableName: "users", identifierColumnName: "id", - identifierValue: newUser.insertId, + identifierValue: newUser.payload.insertId, data: { image: prodImageUrl, image_thumbnail: prodImageThumbnailUrl, diff --git a/package-shared/shell/resetSQLCredentials.ts b/package-shared/shell/resetSQLCredentials.ts deleted file mode 100644 index c4fc665..0000000 --- a/package-shared/shell/resetSQLCredentials.ts +++ /dev/null @@ -1,80 +0,0 @@ -require("dotenv").config({ path: "./../.env" }); -import generator from "generate-password"; -import noDatabaseDbHandler from "./utils/noDatabaseDbHandler"; -import dbHandler from "./utils/dbHandler"; -import encrypt from "../functions/dsql/encrypt"; - -/** - * Create database from Schema Function - * ============================================================================== - * @param {object} params - Single object params - * @param {number|string|null} params.userId - User ID or null - */ -async function resetSQLCredentials() { - const users = (await dbHandler({ - query: `SELECT * FROM users`, - })) as any[]; - - if (!users) { - process.exit(); - } - - for (let i = 0; i < users.length; i++) { - const user = users[i]; - - if (!user) continue; - - const defaultMariadbUserHost = process.env.DSQL_DB_HOST || "127.0.0.1"; - - try { - const username = `dsql_user_${user.id}`; - const password = generator.generate({ - length: 16, - numbers: true, - symbols: true, - uppercase: true, - exclude: "*#.'`\"", - }); - const encryptedPassword = encrypt({ data: password }); - - await noDatabaseDbHandler(`DROP USER IF EXISTS '${username}'@'%'`); - await noDatabaseDbHandler( - `DROP USER IF EXISTS '${username}'@'${defaultMariadbUserHost}'` - ); - - await noDatabaseDbHandler( - `CREATE USER IF NOT EXISTS '${username}'@'${defaultMariadbUserHost}' IDENTIFIED BY '${password}'` - ); - - await noDatabaseDbHandler( - `GRANT ALL PRIVILEGES ON \`datasquirel_user_${user.id}_%\`.* TO '${username}'@'${defaultMariadbUserHost}'` - ); - - await noDatabaseDbHandler(`FLUSH PRIVILEGES`); - - const updateUser = await dbHandler({ - query: `UPDATE users SET mariadb_user = ?, mariadb_host = ?, mariadb_pass = ? WHERE id = ?`, - values: [ - username, - defaultMariadbUserHost, - encryptedPassword, - user.id, - ], - }); - - console.log( - `User ${user.id}: ${user.first_name} ${user.last_name} SQL credentials successfully added.` - ); - } catch (error: any) { - global.ERROR_CALLBACK?.( - `Error Resetting SQL credentials`, - error as Error - ); - console.log(`Error in adding SQL user =>`, error.message); - } - } - - process.exit(); -} - -resetSQLCredentials(); diff --git a/package-shared/shell/resetSQLCredentialsPasswords.ts b/package-shared/shell/resetSQLCredentialsPasswords.ts index 87deada..94f2d7c 100644 --- a/package-shared/shell/resetSQLCredentialsPasswords.ts +++ b/package-shared/shell/resetSQLCredentialsPasswords.ts @@ -1,8 +1,9 @@ require("dotenv").config({ path: "./../.env" }); import generator from "generate-password"; import noDatabaseDbHandler from "./utils/noDatabaseDbHandler"; -import dbHandler from "./utils/dbHandler"; +import dbHandler from "../functions/backend/dbHandler"; import encrypt from "../functions/dsql/encrypt"; +import grabSQLKeyName from "../utils/grab-sql-key-name"; /** * # Create database from Schema Function @@ -24,7 +25,7 @@ async function resetSQLCredentialsPasswords() { const defaultMariadbUserHost = process.env.DSQL_DB_HOST || "127.0.0.1"; try { - const username = `dsql_user_${user.id}`; + const username = grabSQLKeyName({ type: "user", userId: user.id }); const password = generator.generate({ length: 16, numbers: true, diff --git a/package-shared/shell/setSQLCredentials.ts b/package-shared/shell/setSQLCredentials.ts index 667707c..c0f80b5 100644 --- a/package-shared/shell/setSQLCredentials.ts +++ b/package-shared/shell/setSQLCredentials.ts @@ -1,8 +1,9 @@ require("dotenv").config({ path: "./../.env" }); import generator from "generate-password"; import noDatabaseDbHandler from "./utils/noDatabaseDbHandler"; -import dbHandler from "./utils/dbHandler"; +import dbHandler from "../functions/backend/dbHandler"; import encrypt from "../functions/dsql/encrypt"; +import grabSQLKeyName from "../utils/grab-sql-key-name"; /** ****************************************************************************** */ /** ****************************************************************************** */ @@ -32,7 +33,7 @@ async function setSQLCredentials() { } try { - const username = `dsql_user_${user.id}`; + const username = grabSQLKeyName({ type: "user", userId: user.id }); const password = generator.generate({ length: 16, numbers: true, diff --git a/package-shared/shell/testSQLEscape.ts b/package-shared/shell/testSQLEscape.ts index d2e9bc0..4772757 100644 --- a/package-shared/shell/testSQLEscape.ts +++ b/package-shared/shell/testSQLEscape.ts @@ -1,21 +1,9 @@ -// @ts-check - -//////////////////////////////////////// -//////////////////////////////////////// -//////////////////////////////////////// - require("dotenv").config({ path: "./../.env" }); import generator from "generate-password"; import noDatabaseDbHandler from "./utils/noDatabaseDbHandler"; -import dbHandler from "./utils/dbHandler"; +import dbHandler from "../functions/backend/dbHandler"; import encrypt from "../functions/dsql/encrypt"; - -/** ****************************************************************************** */ -/** ****************************************************************************** */ -/** ****************************************************************************** */ -/** ****************************************************************************** */ -/** ****************************************************************************** */ -/** ****************************************************************************** */ +import grabSQLKeyName from "../utils/grab-sql-key-name"; /** * # Test SQL Escape @@ -37,7 +25,7 @@ export default async function testSQLEscape() { const defaultMariadbUserHost = process.env.DSQL_DB_HOST || "127.0.0.1"; try { - const username = `dsql_user_${user.id}`; + const username = grabSQLKeyName({ type: "user", userId: user.id }); const password = generator.generate({ length: 16, numbers: true, diff --git a/package-shared/shell/updateSSLUsers.ts b/package-shared/shell/updateSSLUsers.ts index 8c842d6..d5e4043 100644 --- a/package-shared/shell/updateSSLUsers.ts +++ b/package-shared/shell/updateSSLUsers.ts @@ -1,6 +1,7 @@ require("dotenv").config({ path: "./../.env" }); import mysql from "serverless-mysql"; import grabDSQLConnection from "../utils/grab-dsql-connection"; +import grabSQLKeyName from "../utils/grab-sql-key-name"; /** * # Main DB Handler Function @@ -28,7 +29,7 @@ import grabDSQLConnection from "../utils/grab-dsql-connection"; if ( user.User !== process.env.DSQL_DB_READ_ONLY_USERNAME || user.User !== process.env.DSQL_DB_FULL_ACCESS_USERNAME || - !user.User?.match(/dsql_user_.*/i) + !user.User?.match(new RegExp(grabSQLKeyName({ type: "user" }))) ) { continue; } diff --git a/package-shared/shell/utils/create-table-handle-table-record.ts b/package-shared/shell/utils/create-table-handle-table-record.ts new file mode 100644 index 0000000..339c75a --- /dev/null +++ b/package-shared/shell/utils/create-table-handle-table-record.ts @@ -0,0 +1,112 @@ +import varDatabaseDbHandler from "./varDatabaseDbHandler"; +import { DSQL_TableSchemaType } from "../../types"; +import { + DSQL_DATASQUIREL_USER_DATABASE_TABLES, + DSQL_DATASQUIREL_USER_DATABASES, +} from "../../types/dsql"; +import numberfy from "../../utils/numberfy"; +import updateDbEntry from "../../functions/backend/db/updateDbEntry"; +import addDbEntry from "../../functions/backend/db/addDbEntry"; +import slugToNormalText from "../../utils/slug-to-normal-text"; +import debugLog from "../../utils/logging/debug-log"; +import _ from "lodash"; + +type Param = { + tableSchema?: DSQL_TableSchemaType; + recordedDbEntry?: DSQL_DATASQUIREL_USER_DATABASES; + update?: boolean; + isMain?: boolean; +}; + +/** + * # Handle Table Record Update and Insert + */ +export default async function ({ + tableSchema, + recordedDbEntry, + update, + isMain, +}: Param): Promise { + if (isMain) return undefined; + + let tableId: number | undefined; + + const targetDatabase = "datasquirel"; + const targetTableName = "user_database_tables"; + + if (!tableSchema?.tableName) { + return undefined; + } + + const newTableSchema = _.cloneDeep(tableSchema); + + try { + if (!recordedDbEntry) { + throw new Error("Recorded Db entry not found!"); + } + + // const existingTableName = newTableSchema.tableNameOld + // ? newTableSchema.tableNameOld + // : newTableSchema.tableName; + + const newTableEntry: DSQL_DATASQUIREL_USER_DATABASE_TABLES = { + user_id: recordedDbEntry.user_id, + db_id: recordedDbEntry.id, + db_slug: recordedDbEntry.db_slug, + table_name: slugToNormalText(newTableSchema.tableName), + table_slug: newTableSchema.tableName, + child_table: newTableSchema.childTable ? 1 : 0, + child_table_parent_database_schema_id: newTableSchema.childTableDbId + ? numberfy(newTableSchema.childTableDbId) + : 0, + child_table_parent_table_schema_id: newTableSchema.childTableId + ? numberfy(newTableSchema.childTableId) + : 0, + table_schema_id: newTableSchema.id + ? numberfy(newTableSchema.id) + : 0, + active_data: newTableSchema.updateData ? 1 : 0, + }; + + const existingTable = await varDatabaseDbHandler({ + queryString: `SELECT * FROM ${targetDatabase}.${targetTableName} WHERE db_id = ? AND table_slug = ?`, + queryValuesArray: [ + String(recordedDbEntry.id), + String(newTableSchema.tableName), + ], + }); + + const table: DSQL_DATASQUIREL_USER_DATABASE_TABLES = existingTable?.[0]; + + if (table?.id) { + tableId = table.id; + if (update) { + await updateDbEntry({ + data: newTableEntry, + identifierColumnName: "id", + identifierValue: table.id, + tableName: targetTableName, + dbFullName: targetDatabase, + }); + } + } else { + const newTableEntryRes = + await addDbEntry({ + data: newTableEntry, + tableName: targetTableName, + dbFullName: targetDatabase, + }); + + if (newTableEntryRes?.payload?.insertId) { + tableId = newTableEntryRes.payload.insertId; + } + } + + if (newTableSchema.tableNameOld) { + } + + return tableId; + } catch (error) { + return undefined; + } +} diff --git a/package-shared/shell/utils/createTable.ts b/package-shared/shell/utils/createTable.ts index 14959db..e93f3b7 100644 --- a/package-shared/shell/utils/createTable.ts +++ b/package-shared/shell/utils/createTable.ts @@ -1,15 +1,18 @@ import varDatabaseDbHandler from "./varDatabaseDbHandler"; import generateColumnDescription from "./generateColumnDescription"; import supplementTable from "./supplementTable"; -import dbHandler from "./dbHandler"; -import { DSQL_DatabaseSchemaType, DSQL_TableSchemaType } from "../../types"; +import { DSQL_FieldSchemaType, DSQL_TableSchemaType } from "../../types"; +import { DSQL_DATASQUIREL_USER_DATABASES } from "../../types/dsql"; +import handleTableForeignKey from "./handle-table-foreign-key"; +import createTableHandleTableRecord from "./create-table-handle-table-record"; type Param = { dbFullName: string; tableName: string; - tableInfoArray: any[]; + tableInfoArray: DSQL_FieldSchemaType[]; tableSchema?: DSQL_TableSchemaType; - recordedDbEntry?: any; + recordedDbEntry?: DSQL_DATASQUIREL_USER_DATABASES; + isMain?: boolean; }; /** @@ -21,110 +24,28 @@ export default async function createTable({ tableInfoArray, tableSchema, recordedDbEntry, + isMain, }: Param) { - /** - * Format tableInfoArray - * - * @description Format tableInfoArray - */ const finalTable = supplementTable({ tableInfoArray: tableInfoArray }); - /** - * Grab Schema - * - * @description Grab Schema - */ + let tableId = await createTableHandleTableRecord({ + recordedDbEntry, + tableSchema, + isMain, + }); + + if (!tableId && !isMain) throw new Error(`Couldn't grab table ID`); + const createTableQueryArray = []; createTableQueryArray.push( `CREATE TABLE IF NOT EXISTS \`${dbFullName}\`.\`${tableName}\` (` ); - //////////////////////////////////////// - //////////////////////////////////////// - //////////////////////////////////////// - - try { - if (!recordedDbEntry) { - throw new Error("Recorded Db entry not found!"); - } - - const existingTable = await varDatabaseDbHandler({ - queryString: `SELECT * FROM datasquirel.user_database_tables WHERE db_id = ? AND table_slug = ?`, - queryValuesArray: [recordedDbEntry.id, tableSchema?.tableName], - }); - - /** @type {import("../../types").MYSQL_user_database_tables_table_def} */ - const table: import("../../types").MYSQL_user_database_tables_table_def = - existingTable?.[0]; - - if (!table?.id) { - const newTableEntry = await dbHandler({ - query: `INSERT INTO datasquirel.user_database_tables SET ?`, - values: { - user_id: recordedDbEntry.user_id, - db_id: recordedDbEntry.id, - db_slug: recordedDbEntry.db_slug, - table_name: tableSchema?.tableFullName, - table_slug: tableSchema?.tableName, - child_table: tableSchema?.childTable ? "1" : null, - child_table_parent_database: - tableSchema?.childTableDbFullName || null, - child_table_parent_table: - tableSchema?.childTableName || null, - date_created: Date(), - date_created_code: Date.now(), - date_updated: Date(), - date_updated_code: Date.now(), - }, - }); - } - } catch (error) {} - - //////////////////////////////////////// - //////////////////////////////////////// - //////////////////////////////////////// - let primaryKeySet = false; - /** @type {import("../../types").DSQL_FieldSchemaType[]} */ - let foreignKeys: import("../../types").DSQL_FieldSchemaType[] = []; - - //////////////////////////////////////// - for (let i = 0; i < finalTable.length; i++) { const column = finalTable[i]; - const { - fieldName, - dataType, - nullValue, - primaryKey, - autoIncrement, - defaultValue, - defaultValueLiteral, - foreignKey, - updatedField, - onUpdate, - onUpdateLiteral, - onDelete, - onDeleteLiteral, - defaultField, - encrypted, - json, - newTempField, - notNullValue, - originName, - plainText, - pattern, - patternFlags, - richText, - } = column; - - if (foreignKey) { - foreignKeys.push({ - ...column, - }); - } let { fieldEntryText, newPrimaryKeySet } = generateColumnDescription({ columnData: column, @@ -133,56 +54,39 @@ export default async function createTable({ primaryKeySet = newPrimaryKeySet; - //////////////////////////////////////// - const comma = (() => { - if (foreignKeys[0]) return ","; if (i === finalTable.length - 1) return ""; return ","; })(); createTableQueryArray.push(" " + fieldEntryText + comma); - - //////////////////////////////////////// } - if (foreignKeys[0]) { - foreignKeys.forEach((foreighKey, index, array) => { - const fieldName = foreighKey.fieldName; - const destinationTableName = - foreighKey.foreignKey?.destinationTableName; - const destinationTableColumnName = - foreighKey.foreignKey?.destinationTableColumnName; - const cascadeDelete = foreighKey.foreignKey?.cascadeDelete; - const cascadeUpdate = foreighKey.foreignKey?.cascadeUpdate; - const foreignKeyName = foreighKey.foreignKey?.foreignKeyName; - - const comma = (() => { - if (index === foreignKeys.length - 1) return ""; - return ","; - })(); - - createTableQueryArray.push( - ` CONSTRAINT \`${foreignKeyName}\` FOREIGN KEY (\`${fieldName}\`) REFERENCES \`${destinationTableName}\`(${destinationTableColumnName})${ - cascadeDelete ? " ON DELETE CASCADE" : "" - }${cascadeUpdate ? " ON UPDATE CASCADE" : ""}${comma}` - ); - }); - } - - //////////////////////////////////////// - createTableQueryArray.push( `) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin;` ); const createTableQuery = createTableQueryArray.join("\n"); - //////////////////////////////////////// - const newTable = await varDatabaseDbHandler({ queryString: createTableQuery, }); - return newTable; + for (let i = 0; i < finalTable.length; i++) { + const column = finalTable[i]; + const { foreignKey, fieldName } = column; + + if (!fieldName) continue; + + if (foreignKey) { + await handleTableForeignKey({ + dbFullName, + foreignKey, + tableName, + fieldName, + }); + } + } + + return tableId; } diff --git a/package-shared/shell/utils/dbHandler.ts b/package-shared/shell/utils/dbHandler.ts deleted file mode 100644 index 0721d40..0000000 --- a/package-shared/shell/utils/dbHandler.ts +++ /dev/null @@ -1,58 +0,0 @@ -import fs from "fs"; -import path from "path"; -import grabDSQLConnection from "../../utils/grab-dsql-connection"; - -type Param = { - query: string; - values?: string[] | object; -}; - -/** - * # Main DB Handler Function - * @requires DSQL_DB_CONN - Gobal Variable for Datasquirel Database - */ -export default async function dbHandler({ - query, - values, -}: Param): Promise { - const CONNECTION = grabDSQLConnection(); - - let results; - - try { - if (query && values) { - results = await CONNECTION.query(query, values); - } else { - results = await CONNECTION.query(query); - } - } catch (error: any) { - global.ERROR_CALLBACK?.(`DB Handler Error...`, error as Error); - - if (process.env.FIRST_RUN) { - return null; - } - - console.log("ERROR in dbHandler =>", error.message); - console.log(error); - console.log(CONNECTION.config()); - - const tmpFolder = path.resolve(process.cwd(), "./.tmp"); - if (!fs.existsSync(tmpFolder)) - fs.mkdirSync(tmpFolder, { recursive: true }); - - fs.appendFileSync( - path.resolve(tmpFolder, "./dbErrorLogs.txt"), - JSON.stringify(error, null, 4) + "\n" + Date() + "\n\n\n", - "utf8" - ); - results = null; - } finally { - await CONNECTION?.end(); - } - - if (results) { - return JSON.parse(JSON.stringify(results)); - } else { - return null; - } -} diff --git a/package-shared/shell/utils/drop-all-foreign-keys.ts b/package-shared/shell/utils/drop-all-foreign-keys.ts new file mode 100644 index 0000000..7357111 --- /dev/null +++ b/package-shared/shell/utils/drop-all-foreign-keys.ts @@ -0,0 +1,53 @@ +import grabSQLKeyName from "../../utils/grab-sql-key-name"; +import varDatabaseDbHandler from "./varDatabaseDbHandler"; + +type Param = { + dbFullName: string; + tableName: string; +}; + +/** + * # Drop All Foreign Keys + */ +export default async function dropAllForeignKeys({ + dbFullName, + tableName, +}: Param) { + try { + // const rows = await varDatabaseDbHandler({ + // queryString: `SELECT CONSTRAINT_NAME FROM information_schema.REFERENTIAL_CONSTRAINTS WHERE TABLE_NAME = '${tableName}' AND CONSTRAINT_SCHEMA = '${dbFullName}'`, + // }); + + // console.log("rows", rows); + // console.log("dbFullName", dbFullName); + // console.log("tableName", tableName); + + // for (const row of rows) { + // await varDatabaseDbHandler({ + // queryString: `ALTER TABLE \`${dbFullName}\`.\`${tableName}\` DROP FOREIGN KEY \`${row.CONSTRAINT_NAME}\` + // `, + // }); + // } + + const foreignKeys = await varDatabaseDbHandler({ + queryString: `SHOW INDEXES FROM \`${dbFullName}\`.\`${tableName}\` WHERE Key_name LIKE '${grabSQLKeyName( + { type: "foreign_key" } + )}%'`, + }); + + for (const fk of foreignKeys) { + if ( + fk.Key_name.match( + new RegExp(grabSQLKeyName({ type: "foreign_key" })) + ) + ) { + await varDatabaseDbHandler({ + queryString: `ALTER TABLE \`${dbFullName}\`.\`${tableName}\` DROP INDEX \`${fk.Key_name}\` + `, + }); + } + } + } catch (error: any) { + console.log(`dropAllForeignKeys ERROR => ${error.message}`); + } +} diff --git a/package-shared/shell/utils/generateColumnDescription.ts b/package-shared/shell/utils/generateColumnDescription.ts index 4bbfb42..af7e804 100644 --- a/package-shared/shell/utils/generateColumnDescription.ts +++ b/package-shared/shell/utils/generateColumnDescription.ts @@ -1,5 +1,9 @@ +import { DSQL_FieldSchemaType } from "../../types"; +import dataTypeConstructor from "../../utils/db/schema/data-type-constructor"; +import dataTypeParser from "../../utils/db/schema/data-type-parser"; + type Param = { - columnData: import("../../types").DSQL_FieldSchemaType; + columnData: DSQL_FieldSchemaType; primaryKeySet?: boolean; }; @@ -15,11 +19,6 @@ export default function generateColumnDescription({ columnData, primaryKeySet, }: Param): Return { - /** - * Format tableInfoArray - * - * @description Format tableInfoArray - */ const { fieldName, dataType, @@ -30,13 +29,19 @@ export default function generateColumnDescription({ defaultValueLiteral, onUpdateLiteral, notNullValue, + unique, } = columnData; let fieldEntryText = ""; - fieldEntryText += `\`${fieldName}\` ${dataType}`; + const finalDataTypeObject = dataTypeParser(dataType); + const finalDataType = dataTypeConstructor( + finalDataTypeObject.type, + finalDataTypeObject.limit, + finalDataTypeObject.decimal + ); - //////////////////////////////////////// + fieldEntryText += `\`${fieldName}\` ${finalDataType}`; if (nullValue) { fieldEntryText += " DEFAULT NULL"; @@ -46,35 +51,32 @@ export default function generateColumnDescription({ if (String(defaultValue).match(/uuid\(\)/i)) { fieldEntryText += ` DEFAULT UUID()`; } else { - fieldEntryText += ` DEFAULT '${defaultValue}'`; + fieldEntryText += ` DEFAULT '${String(defaultValue) + .replace(/^\'|\'$/g, "") + .replace(/\'/g, "\\'")}'`; } } else if (notNullValue) { fieldEntryText += ` NOT NULL`; } - //////////////////////////////////////// - if (onUpdateLiteral) { fieldEntryText += ` ON UPDATE ${onUpdateLiteral}`; } - //////////////////////////////////////// - if (primaryKey && !primaryKeySet) { fieldEntryText += " PRIMARY KEY"; primaryKeySet = true; } - //////////////////////////////////////// - if (autoIncrement) { fieldEntryText += " AUTO_INCREMENT"; primaryKeySet = true; } - //////////////////////////////////////// - //////////////////////////////////////// - //////////////////////////////////////// + if (unique) { + fieldEntryText += " UNIQUE"; + primaryKeySet = true; + } return { fieldEntryText, diff --git a/package-shared/shell/utils/grab-dsql-schema-index-comment.ts b/package-shared/shell/utils/grab-dsql-schema-index-comment.ts new file mode 100644 index 0000000..3d3b766 --- /dev/null +++ b/package-shared/shell/utils/grab-dsql-schema-index-comment.ts @@ -0,0 +1,3 @@ +export default function grabDSQLSchemaIndexComment() { + return `dsql_schema_index`; +} diff --git a/package-shared/shell/utils/handle-table-foreign-key.ts b/package-shared/shell/utils/handle-table-foreign-key.ts new file mode 100644 index 0000000..2c9a67c --- /dev/null +++ b/package-shared/shell/utils/handle-table-foreign-key.ts @@ -0,0 +1,51 @@ +import varDatabaseDbHandler from "./varDatabaseDbHandler"; +import { DSQL_ForeignKeyType } from "../../types"; + +type Param = { + dbFullName: string; + tableName: string; + foreignKey: DSQL_ForeignKeyType; + fieldName: string; + errorLogs?: any[]; +}; + +/** + * # Update table function + */ +export default async function handleTableForeignKey({ + dbFullName, + tableName, + foreignKey, + errorLogs, + fieldName, +}: Param) { + const { + destinationTableName, + destinationTableColumnName, + cascadeDelete, + cascadeUpdate, + foreignKeyName, + } = foreignKey; + + let finalQueryString = `ALTER TABLE \`${dbFullName}\`.\`${tableName}\``; + + finalQueryString += ` ADD CONSTRAINT \`${foreignKeyName}\` FOREIGN KEY (\`${fieldName}\`)`; + finalQueryString += ` REFERENCES \`${destinationTableName}\`(\`${destinationTableColumnName}\`)`; + + if (cascadeDelete) finalQueryString += ` ON DELETE CASCADE`; + if (cascadeUpdate) finalQueryString += ` ON UPDATE CASCADE`; + + // let foreinKeyText = `ADD CONSTRAINT \`${foreignKeyName}\` FOREIGN KEY (\`${destinationTableColumnType}\`) REFERENCES \`${destinationTableName}\`(\`${destinationTableColumnName}\`)${ + // cascadeDelete ? " ON DELETE CASCADE" : "" + // }${cascadeUpdate ? " ON UPDATE CASCADE" : ""}`; + + // let finalQueryString = `ALTER TABLE \`${dbFullName}\`.\`${tableName}\` ${foreinKeyText}`; + + const addForeignKey = await varDatabaseDbHandler({ + queryString: finalQueryString, + }); + + if (!addForeignKey?.serverStatus) { + errorLogs?.push(addForeignKey); + } +} diff --git a/package-shared/shell/utils/noDatabaseDbHandler.ts b/package-shared/shell/utils/noDatabaseDbHandler.ts index e47d71b..851471a 100644 --- a/package-shared/shell/utils/noDatabaseDbHandler.ts +++ b/package-shared/shell/utils/noDatabaseDbHandler.ts @@ -1,4 +1,4 @@ -import dbHandler from "./dbHandler"; +import dbHandler from "../../functions/backend/dbHandler"; export default async function noDatabaseDbHandler( queryString: string diff --git a/package-shared/shell/utils/updateTable.ts b/package-shared/shell/utils/updateTable.ts index eb0aaf5..0c18010 100644 --- a/package-shared/shell/utils/updateTable.ts +++ b/package-shared/shell/utils/updateTable.ts @@ -1,25 +1,36 @@ -import fs from "fs"; import varDatabaseDbHandler from "./varDatabaseDbHandler"; -const defaultFieldsRegexp = - /^id$|^uuid$|^date_created$|^date_created_code$|^date_created_timestamp$|^date_updated$|^date_updated_code$|^date_updated_timestamp$/; - import generateColumnDescription from "./generateColumnDescription"; -import dbHandler from "./dbHandler"; +import { + DSQL_DatabaseSchemaType, + DSQL_FieldSchemaType, + DSQL_IndexSchemaType, + DSQL_MYSQL_FOREIGN_KEYS_Type, + DSQL_MYSQL_SHOW_COLUMNS_Type, + DSQL_MYSQL_SHOW_INDEXES_Type, + DSQL_TableSchemaType, +} from "../../types"; +import handleTableForeignKey from "./handle-table-foreign-key"; +import dropAllForeignKeys from "./drop-all-foreign-keys"; +import createTableHandleTableRecord from "./create-table-handle-table-record"; +import { DSQL_DATASQUIREL_USER_DATABASES } from "../../types/dsql"; +import defaultFieldsRegexp from "../../functions/dsql/default-fields-regexp"; +import handleIndexescreateDbFromSchema from "../createDbFromSchema/handle-indexes"; +import _ from "lodash"; +import { writeUpdatedDbSchema } from "../createDbFromSchema/grab-required-database-schemas"; +import normalizeText from "../../utils/normalize-text"; type Param = { dbFullName: string; tableName: string; - tableSchema: import("../../types").DSQL_TableSchemaType; - tableNameFull?: string; - tableInfoArray: import("../../types").DSQL_FieldSchemaType[]; + tableSchema: DSQL_TableSchemaType; + tableFields: DSQL_FieldSchemaType[]; userId?: number | string | null; - dbSchema: import("../../types").DSQL_DatabaseSchemaType[]; - tableIndexes?: import("../../types").DSQL_IndexSchemaType[]; + dbSchema: DSQL_DatabaseSchemaType; + tableIndexes?: DSQL_IndexSchemaType[]; clone?: boolean; - tableIndex?: number; - childDb?: boolean; - recordedDbEntry?: any; + recordedDbEntry?: DSQL_DATASQUIREL_USER_DATABASES; + isMain?: boolean; }; /** @@ -28,31 +39,59 @@ type Param = { export default async function updateTable({ dbFullName, tableName, - tableInfoArray, + tableFields, userId, dbSchema, tableIndexes, tableSchema, clone, - childDb, - tableIndex, - tableNameFull, recordedDbEntry, -}: Param) { + isMain, +}: Param): Promise { /** * Initialize * ========================================== * @description Initial setup */ - /** @type {any[]} */ let errorLogs: any[] = []; /** * @description Initialize table info array. This value will be * changing depending on if a field is renamed or not. */ - let upToDateTableFieldsArray = tableInfoArray; + let upToDateTableFieldsArray = _.cloneDeep(tableFields); + + /** + * @type {string[]} + * @description Table update query string array + */ + const updateTableQueryArray: string[] = []; + + /** + * @description Push the query initial value + */ + updateTableQueryArray.push( + `ALTER TABLE \`${dbFullName}\`.\`${tableName}\`` + ); + + /** + * @description Grab Table Record + */ + if (!recordedDbEntry && !isMain) { + throw new Error("Recorded Db entry not found!"); + } + + let tableID = await createTableHandleTableRecord({ + recordedDbEntry, + tableSchema, + update: true, + isMain, + }); + + if (!tableID && !isMain) { + throw new Error("Recorded Table entry not found!"); + } /** * Handle Table updates @@ -60,220 +99,36 @@ export default async function updateTable({ * @description Try to undate table, catch error if anything goes wrong */ try { - /** - * @type {string[]} - * @description Table update query string array - */ - const updateTableQueryArray: string[] = []; - - /** - * @type {string[]} - * @description Constriants query string array - */ - const constraintsQueryArray: string[] = []; - - /** - * @description Push the query initial value - */ - updateTableQueryArray.push( - `ALTER TABLE \`${dbFullName}\`.\`${tableName}\`` - ); - - if (childDb) { - try { - if (!recordedDbEntry) { - throw new Error("Recorded Db entry not found!"); - } - - const existingTable = await varDatabaseDbHandler({ - queryString: `SELECT * FROM datasquirel.user_database_tables WHERE db_id = ? AND table_slug = ?`, - queryValuesArray: [recordedDbEntry.id, tableName], - }); - - /** @type {import("../../types").MYSQL_user_database_tables_table_def} */ - const table: import("../../types").MYSQL_user_database_tables_table_def = - existingTable?.[0]; - - if (!table?.id) { - const newTableEntry = await dbHandler({ - query: `INSERT INTO datasquirel.user_database_tables SET ?`, - values: { - user_id: recordedDbEntry.user_id, - db_id: recordedDbEntry.id, - db_slug: recordedDbEntry.db_slug, - table_name: tableNameFull, - table_slug: tableName, - child_table: tableSchema?.childTable ? "1" : null, - child_table_parent_database: - tableSchema?.childTableDbFullName || null, - child_table_parent_table: - tableSchema.childTableName || null, - date_created: Date(), - date_created_code: Date.now(), - date_updated: Date(), - date_updated_code: Date.now(), - }, - }); - } - } catch (error) {} - } - - /** - * @type {import("../../types").DSQL_MYSQL_SHOW_INDEXES_Type[]} - * @description All indexes from MYSQL db - */ // @ts-ignore - const allExistingIndexes: import("../../types").DSQL_MYSQL_SHOW_INDEXES_Type[] = - await varDatabaseDbHandler({ - queryString: `SHOW INDEXES FROM \`${dbFullName}\`.\`${tableName}\``, - }); - - /** - * @type {import("../../types").DSQL_MYSQL_SHOW_COLUMNS_Type[]} - * @description All columns from MYSQL db - */ // @ts-ignore - const allExistingColumns: import("../../types").DSQL_MYSQL_SHOW_COLUMNS_Type[] = - await varDatabaseDbHandler({ - queryString: `SHOW COLUMNS FROM \`${dbFullName}\`.\`${tableName}\``, - }); - - //////////////////////////////////////// - - /** - * @type {string[]} - * @description Updated column names Array - */ - const updatedColumnsArray: string[] = []; - - /** - * @description Iterate through every existing column - */ - for (let e = 0; e < allExistingColumns.length; e++) { - const { Field } = allExistingColumns[e]; - - if (Field.match(defaultFieldsRegexp)) continue; - - /** - * @description This finds out whether the fieldName corresponds with the MSQL Field name - * if the fildName doesn't match any MYSQL Field name, the field is deleted. - */ - let existingEntry = upToDateTableFieldsArray.filter( - (column) => - column.fieldName === Field || column.originName === Field - ); - - if (existingEntry && existingEntry[0]) { - /** - * @description Check if Field name has been updated - */ - if ( - existingEntry[0].updatedField && - existingEntry[0].fieldName - ) { - updatedColumnsArray.push(existingEntry[0].fieldName); - - const renameColumn = await varDatabaseDbHandler({ - queryString: `ALTER TABLE \`${dbFullName}\`.\`${tableName}\` RENAME COLUMN \`${existingEntry[0].originName}\` TO \`${existingEntry[0].fieldName}\``, - }); - - console.log( - `Column Renamed from "${existingEntry[0].originName}" to "${existingEntry[0].fieldName}"` - ); - - /** - * Update Db Schema - * =================================================== - * @description Update Db Schema after renaming column - */ - try { - const userSchemaData = dbSchema; - - const targetDbIndex = userSchemaData.findIndex( - (db) => db.dbFullName === dbFullName - ); - const targetTableIndex = userSchemaData[ - targetDbIndex - ].tables.findIndex( - (table) => table.tableName === tableName - ); - const targetFieldIndex = userSchemaData[ - targetDbIndex - ].tables[targetTableIndex].fields.findIndex( - (field) => - field.fieldName === existingEntry[0].fieldName - ); - - delete userSchemaData[targetDbIndex].tables[ - targetTableIndex - ].fields[targetFieldIndex]["originName"]; - delete userSchemaData[targetDbIndex].tables[ - targetTableIndex - ].fields[targetFieldIndex]["updatedField"]; - - /** - * @description Set New Table Fields Array - */ - upToDateTableFieldsArray = - userSchemaData[targetDbIndex].tables[ - targetTableIndex - ].fields; - - fs.writeFileSync( - `${String( - process.env.DSQL_USER_DB_SCHEMA_PATH - )}/user-${userId}/main.json`, - JSON.stringify(userSchemaData), - "utf8" - ); - } catch (/** @type {any} */ error: any) { - console.log("Update table error =>", error.message); - } - - //////////////////////////////////////// - } - - //////////////////////////////////////// - - continue; - - //////////////////////////////////////// - } else { - await varDatabaseDbHandler({ - queryString: `ALTER TABLE \`${dbFullName}\`.\`${tableName}\` DROP COLUMN \`${Field}\``, - }); - } - } - /** * Handle MYSQL Table Indexes * =================================================== * @description Iterate through each table index(if available) * and perform operations */ - for (let f = 0; f < allExistingIndexes.length; f++) { - const { Key_name, Index_comment } = allExistingIndexes[f]; + const allExistingIndexes: DSQL_MYSQL_SHOW_INDEXES_Type[] = + await varDatabaseDbHandler({ + queryString: `SHOW INDEXES FROM \`${dbFullName}\`.\`${tableName}\` WHERE Index_comment LIKE '%schema_index%'`, + }); - /** - * @description Check if this index was specifically created - * by datasquirel - */ - if (Index_comment?.match(/schema_index/)) { - try { - const existingKeyInSchema = tableIndexes?.filter( - (indexObject) => indexObject.alias === Key_name + for (let f = 0; f < allExistingIndexes.length; f++) { + const { Key_name } = allExistingIndexes[f]; + + try { + const existingKeyInSchema = tableIndexes?.find( + (indexObject) => indexObject.alias === Key_name + ); + if (!existingKeyInSchema) + throw new Error( + `This Index(${Key_name}) Has been Deleted!` ); - if (!existingKeyInSchema?.[0]) - throw new Error( - `This Index(${Key_name}) Has been Deleted!` - ); - } catch (error) { - /** - * @description Drop Index: This happens when the MYSQL index is not - * present in the datasquirel DB schema - */ - await varDatabaseDbHandler({ - queryString: `ALTER TABLE \`${dbFullName}\`.\`${tableName}\` DROP INDEX \`${Key_name}\``, - }); - } + } catch (error) { + /** + * @description Drop Index: This happens when the MYSQL index is not + * present in the datasquirel DB schema + */ + await varDatabaseDbHandler({ + queryString: `ALTER TABLE \`${dbFullName}\`.\`${tableName}\` DROP INDEX \`${Key_name}\``, + }); } } @@ -284,36 +139,11 @@ export default async function updateTable({ * table index(if available), and perform operations */ if (tableIndexes && tableIndexes[0]) { - for (let g = 0; g < tableIndexes.length; g++) { - const { indexType, indexName, indexTableFields, alias } = - tableIndexes[g]; - - if (!alias?.match(/./)) continue; - - /** - * @description Check for existing Index in MYSQL db - */ - try { - const existingKeyInDb = allExistingIndexes.filter( - (indexObject) => indexObject.Key_name === alias - ); - if (!existingKeyInDb[0]) - throw new Error("This Index Does not Exist"); - } catch (error) { - /** - * @description Create new index if determined that it - * doesn't exist in MYSQL db - */ - await varDatabaseDbHandler({ - queryString: `CREATE${ - indexType?.match(/fullText/i) ? " FULLTEXT" : "" - } INDEX \`${alias}\` ON \`${dbFullName}\`.\`${tableName}\`(${indexTableFields - ?.map((nm) => nm.value) - .map((nm) => `\`${nm}\``) - .join(",")}) COMMENT 'schema_index'`, - }); - } - } + handleIndexescreateDbFromSchema({ + dbFullName, + indexes: tableIndexes, + tableName, + }); } /** @@ -322,16 +152,10 @@ export default async function updateTable({ * @description Iterate through each datasquirel schema * table index(if available), and perform operations */ - - /** - * @description All MSQL Foreign Keys - * @type {import("../../types").DSQL_MYSQL_FOREIGN_KEYS_Type[] | null} - */ - const allForeignKeys: - | import("../../types").DSQL_MYSQL_FOREIGN_KEYS_Type[] - | null = await varDatabaseDbHandler({ - queryString: `SELECT CONSTRAINT_NAME FROM INFORMATION_SCHEMA.TABLE_CONSTRAINTS WHERE CONSTRAINT_SCHEMA = '${dbFullName}' AND TABLE_NAME='${tableName}' AND CONSTRAINT_TYPE='FOREIGN KEY'`, - }); + const allForeignKeys: DSQL_MYSQL_FOREIGN_KEYS_Type[] | null = + await varDatabaseDbHandler({ + queryString: `SELECT CONSTRAINT_NAME FROM INFORMATION_SCHEMA.TABLE_CONSTRAINTS WHERE CONSTRAINT_SCHEMA = '${dbFullName}' AND TABLE_NAME='${tableName}' AND CONSTRAINT_TYPE='FOREIGN KEY'`, + }); if (allForeignKeys) { for (let c = 0; c < allForeignKeys.length; c++) { @@ -352,6 +176,136 @@ export default async function updateTable({ } } + /** + * Handle MYSQL Unique Fields + * =================================================== + * @description Find all existing unique field constraints + * and remove them + */ + const allUniqueConstraints: DSQL_MYSQL_FOREIGN_KEYS_Type[] | null = + await varDatabaseDbHandler({ + queryString: + normalizeText(`SELECT CONSTRAINT_NAME FROM INFORMATION_SCHEMA.TABLE_CONSTRAINTS \ + WHERE CONSTRAINT_SCHEMA = '${dbFullName}' AND TABLE_NAME='${tableName}' AND \ + CONSTRAINT_TYPE='UNIQUE'`), + }); + + if (allUniqueConstraints) { + for (let c = 0; c < allUniqueConstraints.length; c++) { + const { CONSTRAINT_NAME } = allUniqueConstraints[c]; + const dropUniqueConstraint = await varDatabaseDbHandler({ + queryString: `ALTER TABLE \`${dbFullName}\`.\`${tableName}\` DROP INDEX \`${CONSTRAINT_NAME}\``, + }); + } + } + + /** + * Handle MYSQL Columns (Fields) + * =================================================== + * @description Now handle all fields/columns + */ + let allExistingColumns: DSQL_MYSQL_SHOW_COLUMNS_Type[] = + await varDatabaseDbHandler({ + queryString: `SHOW COLUMNS FROM \`${dbFullName}\`.\`${tableName}\``, + }); + + /** + * @type {string[]} + * @description Updated column names Array + */ + const updatedColumnsArray: string[] = []; + + /** + * @description Iterate through every existing column + */ + for (let e = 0; e < allExistingColumns.length; e++) { + const { Field } = allExistingColumns[e]; + + if (Field.match(defaultFieldsRegexp)) continue; + + /** + * @description This finds out whether the fieldName corresponds with the MSQL Field name + * if the fildName doesn't match any MYSQL Field name, the field is deleted. + */ + let existingEntry = upToDateTableFieldsArray.find( + (column) => + column.fieldName === Field || column.originName === Field + ); + + if (existingEntry) { + /** + * @description Check if Field name has been updated + */ + if (existingEntry.updatedField && existingEntry.fieldName) { + updatedColumnsArray.push(existingEntry.fieldName); + + const renameColumn = await varDatabaseDbHandler({ + queryString: `ALTER TABLE \`${dbFullName}\`.\`${tableName}\` RENAME COLUMN \`${existingEntry.originName}\` TO \`${existingEntry.fieldName}\``, + }); + + console.log( + `Column Renamed from "${existingEntry.originName}" to "${existingEntry.fieldName}"` + ); + + /** + * Update Db Schema + * =================================================== + * @description Update Db Schema after renaming column + */ + try { + const updatedSchemaData = _.cloneDeep(dbSchema); + + const targetTableIndex = + updatedSchemaData.tables.findIndex( + (table) => table.tableName === tableName + ); + const targetFieldIndex = updatedSchemaData.tables[ + targetTableIndex + ].fields.findIndex( + (field) => + field.fieldName === existingEntry.fieldName + ); + + delete updatedSchemaData.tables[targetTableIndex] + .fields[targetFieldIndex]["originName"]; + delete updatedSchemaData.tables[targetTableIndex] + .fields[targetFieldIndex]["updatedField"]; + + /** + * @description Set New Table Fields Array + */ + upToDateTableFieldsArray = + updatedSchemaData.tables[targetTableIndex].fields; + + if (userId) { + writeUpdatedDbSchema({ + dbSchema: updatedSchemaData, + userId, + }); + } + + allExistingColumns = await varDatabaseDbHandler({ + queryString: `SHOW COLUMNS FROM \`${dbFullName}\`.\`${tableName}\``, + }); + } catch (error: any) { + console.log("Update table error =>", error.message); + } + + //////////////////////////////////////// + } + + //////////////////////////////////////// + + continue; + + //////////////////////////////////////// + } else { + await varDatabaseDbHandler({ + queryString: `ALTER TABLE \`${dbFullName}\`.\`${tableName}\` DROP COLUMN \`${Field}\``, + }); + } + } + /** * Handle DATASQUIREL schema fields for current table * =================================================== @@ -360,57 +314,25 @@ export default async function updateTable({ */ for (let i = 0; i < upToDateTableFieldsArray.length; i++) { const column = upToDateTableFieldsArray[i]; - const prevColumn = upToDateTableFieldsArray[i - 1]; - const nextColumn = upToDateTableFieldsArray[i + 1]; + // const prevColumn = upToDateTableFieldsArray[i - 1]; + // const nextColumn = upToDateTableFieldsArray[i + 1]; - const { - fieldName, - dataType, - nullValue, - primaryKey, - autoIncrement, - defaultValue, - defaultValueLiteral, - foreignKey, - updatedField, - } = column; + const { fieldName, dataType, foreignKey } = column; - //////////////////////////////////////// - - /** - * @description Skip default fields - */ - if (fieldName?.match(/^id$|^date_/)) continue; - /** - * @description Skip columns that have been updated recently - */ - // if (updatedColumnsArray.includes(fieldName)) continue; - - //////////////////////////////////////// + if (!fieldName) continue; + if (defaultFieldsRegexp.test(fieldName)) continue; let updateText = ""; - //////////////////////////////////////// + const existingColumnIndex = allExistingColumns?.findIndex( + (_column, _index) => _column.Field === fieldName + ); - /** @type {any} */ - let existingColumnIndex: any; + const existingColumn = + existingColumnIndex >= 0 + ? allExistingColumns[existingColumnIndex] + : undefined; - /** - * @description Existing MYSQL field object - */ - let existingColumn = - allExistingColumns && allExistingColumns[0] - ? allExistingColumns.filter((_column, _index) => { - if (_column.Field === fieldName) { - existingColumnIndex = _index; - return true; - } - }) - : null; - - /** - * @description Construct SQL text snippet for this field - */ let { fieldEntryText } = generateColumnDescription({ columnData: column, }); @@ -419,46 +341,18 @@ export default async function updateTable({ * @description Modify Column(Field) if it already exists * in MYSQL database */ - if (existingColumn && existingColumn[0]?.Field) { - const { Field, Type, Null, Key, Default, Extra } = - existingColumn[0]; + if (existingColumn?.Field) { + const { Field, Type } = existingColumn; - let isColumnReordered = i < existingColumnIndex; + updateText += `MODIFY COLUMN ${fieldEntryText}`; - if ( - Field === fieldName && - !isColumnReordered && - dataType?.toUpperCase() === Type.toUpperCase() - ) { - updateText += `MODIFY COLUMN ${fieldEntryText}`; - // continue; - } else { - if (userId) { - updateText += `MODIFY COLUMN ${fieldEntryText}${ - isColumnReordered - ? prevColumn?.fieldName - ? " AFTER `" + prevColumn.fieldName + "`" - : nextColumn?.fieldName - ? " BEFORE `" + nextColumn.fieldName + "`" - : "" - : "" - }`; - } else { - updateText += `MODIFY COLUMN ${fieldEntryText}`; - } - } - } else if (prevColumn && prevColumn.fieldName) { - /** - * @description Add new Column AFTER previous column, if - * previous column exists - */ - updateText += `ADD COLUMN ${fieldEntryText} AFTER \`${prevColumn.fieldName}\``; - } else if (nextColumn && nextColumn.fieldName) { - /** - * @description Add new Column BEFORE next column, if - * next column exists - */ - updateText += `ADD COLUMN ${fieldEntryText} BEFORE \`${nextColumn.fieldName}\``; + // if ( + // Field === fieldName && + // dataType?.toUpperCase() === Type.toUpperCase() + // ) { + // } else { + // updateText += `MODIFY COLUMN ${fieldEntryText}`; + // } } else { /** * @description Append new column to the end of existing columns @@ -466,44 +360,13 @@ export default async function updateTable({ updateText += `ADD COLUMN ${fieldEntryText}`; } - //////////////////////////////////////// - /** * @description Pust SQL code snippet to updateTableQueryArray Array * Add a comma(,) to separate from the next snippet */ - updateTableQueryArray.push(updateText + ","); - - /** - * @description Handle foreing keys if available, and if there is no - * "clone" boolean = true - */ - if (!clone && foreignKey) { - const { - destinationTableName, - destinationTableColumnName, - cascadeDelete, - cascadeUpdate, - foreignKeyName, - } = foreignKey; - - const foreinKeyText = `ADD CONSTRAINT \`${foreignKeyName}\` FOREIGN KEY (\`${fieldName}\`) REFERENCES \`${destinationTableName}\`(\`${destinationTableColumnName}\`)${ - cascadeDelete ? " ON DELETE CASCADE" : "" - }${cascadeUpdate ? " ON UPDATE CASCADE" : ""}`; - // const foreinKeyText = `ADD CONSTRAINT \`${foreignKeyName}\` FOREIGN KEY (${fieldName}) REFERENCES ${destinationTableName}(${destinationTableColumnName})${cascadeDelete ? " ON DELETE CASCADE" : ""}${cascadeUpdate ? " ON UPDATE CASCADE" : ""}` + ","; - - const finalQueryString = `ALTER TABLE \`${dbFullName}\`.\`${tableName}\` ${foreinKeyText}`; - - const addForeignKey = await varDatabaseDbHandler({ - queryString: finalQueryString, - }); - - if (!addForeignKey?.serverStatus) { - errorLogs.push(addForeignKey); - } + if (updateText.match(/./)) { + updateTableQueryArray.push(updateText + ","); } - - //////////////////////////////////////// } /** @@ -511,6 +374,7 @@ export default async function updateTable({ * updateTableQueryArray Arry, and trimming the final comma(,) */ const updateTableQuery = updateTableQueryArray + .filter((q) => Boolean(q.match(/./))) .join(" ") .replace(/,$/, ""); @@ -526,17 +390,34 @@ export default async function updateTable({ queryString: updateTableQuery, }); - return updateTable; + /** + * # Handle Foreign Keys + */ + await dropAllForeignKeys({ dbFullName, tableName }); + + for (let i = 0; i < upToDateTableFieldsArray.length; i++) { + const { fieldName, foreignKey } = upToDateTableFieldsArray[i]; + if (!clone && foreignKey && fieldName) { + await handleTableForeignKey({ + dbFullName, + errorLogs, + foreignKey, + fieldName, + tableName, + }); + } + } } else { /** * @description If only 1 SQL snippet is left in updateTableQueryArray, this * means that no updates have been made to the table */ - return "No Changes Made to Table"; } - } catch (/** @type {any} */ error: any) { + + return tableID; + } catch (error: any) { console.log('Error in "updateTable" shell function =>', error.message); - return "Error in Updating Table"; + return tableID; } } diff --git a/package-shared/shell/utils/varDatabaseDbHandler.ts b/package-shared/shell/utils/varDatabaseDbHandler.ts index 054d086..d3d2457 100644 --- a/package-shared/shell/utils/varDatabaseDbHandler.ts +++ b/package-shared/shell/utils/varDatabaseDbHandler.ts @@ -1,5 +1,4 @@ -import dbHandler from "./dbHandler"; -import { DSQL_TableSchemaType } from "../../types"; +import dbHandler from "../../functions/backend/dbHandler"; type Param = { queryString: string; diff --git a/package-shared/sqls/grab-foreign-key.sql b/package-shared/sqls/grab-foreign-key.sql new file mode 100644 index 0000000..e1dc5f4 --- /dev/null +++ b/package-shared/sqls/grab-foreign-key.sql @@ -0,0 +1,16 @@ +SELECT + kcu.TABLE_NAME, + kcu.COLUMN_NAME, + kcu.CONSTRAINT_NAME, + kcu.REFERENCED_TABLE_NAME, + kcu.REFERENCED_COLUMN_NAME, + rc.UPDATE_RULE, + rc.DELETE_RULE +FROM + INFORMATION_SCHEMA.KEY_COLUMN_USAGE kcu + JOIN INFORMATION_SCHEMA.REFERENTIAL_CONSTRAINTS rc ON kcu.CONSTRAINT_NAME = rc.CONSTRAINT_NAME + AND kcu.TABLE_SCHEMA = rc.CONSTRAINT_SCHEMA +WHERE + kcu.TABLE_SCHEMA = 'datasquirel' + AND kcu.TABLE_NAME = '{{TABLE_NAME}}' + AND kcu.REFERENCED_TABLE_NAME IS NOT NULL; \ No newline at end of file diff --git a/package-shared/types/dsql.ts b/package-shared/types/dsql.ts index d162498..2e7bb02 100644 --- a/package-shared/types/dsql.ts +++ b/package-shared/types/dsql.ts @@ -1,21 +1,24 @@ export const DsqlTables = [ "users", "mariadb_users", + "mariadb_user_databases", + "mariadb_user_tables", + "mariadb_user_privileges", "api_keys", + "api_keys_scoped_resources", "invitations", - "user_users", - "delegated_user_tables", + "delegated_resources", "user_databases", "user_database_tables", "user_media", + "user_private_folders", "delegated_users", "unsubscribes", "notifications", - "docs_pages", - "docs_page_extra_links", "deleted_api_keys", "servers", "process_queue", + "backups", ] as const export type DSQL_DATASQUIREL_USERS = { @@ -39,6 +42,7 @@ export type DSQL_DATASQUIREL_USERS = { mariadb_pass?: string; disk_usage_in_mb?: number; verification_status?: number; + temp_login_code?: string; date_created?: string; date_created_code?: number; date_created_timestamp?: string; @@ -55,7 +59,63 @@ export type DSQL_DATASQUIREL_MARIADB_USERS = { host?: string; password?: string; primary?: number; - grants?: string; + all_databases?: 0 | 1; + all_grants?: 0 | 1; + date_created?: string; + date_created_code?: number; + date_created_timestamp?: string; + date_updated?: string; + date_updated_code?: number; + date_updated_timestamp?: string; +} + +export type DSQL_DATASQUIREL_MARIADB_USER_DATABASES = { + id?: number; + uuid?: string; + user_id?: number; + mariadb_user_id?: number; + db_id?: number; + db_schema_id?: number; + db_slug?: string; + all_tables?: 0 | 1; + all_privileges?: 0 | 1; + date_created?: string; + date_created_code?: number; + date_created_timestamp?: string; + date_updated?: string; + date_updated_code?: number; + date_updated_timestamp?: string; +} + +export type DSQL_DATASQUIREL_MARIADB_USER_TABLES = { + id?: number; + uuid?: string; + user_id?: number; + mariadb_user_id?: number; + db_id?: number; + db_schema_id?: number; + db_slug?: string; + table_schema_id?: number; + table_slug?: string; + all_fields?: 0 | 1; + all_privileges?: 0 | 1; + date_created?: string; + date_created_code?: number; + date_created_timestamp?: string; + date_updated?: string; + date_updated_code?: number; + date_updated_timestamp?: string; +} + +export type DSQL_DATASQUIREL_MARIADB_USER_PRIVILEGES = { + id?: number; + uuid?: string; + user_id?: number; + mariadb_user_id?: number; + db_id?: number; + db_schema_id?: number; + db_slug?: string; + privilege?: "ALTER" | "ALTER ROUTINE" | "CREATE" | "CREATE ROUTINE" | "CREATE TEMPORARY TABLES" | "CREATE VIEW" | "DELETE" | "DROP" | "EVENT" | "EXECUTE" | "FILE" | "INDEX" | "INSERT" | "LOCK TABLES" | "PROCESS" | "REFERENCES" | "RELOAD" | "SELECT" | "SHOW VIEW" | "SUPER" | "TRIGGER" | "UPDATE" | "USAGE"; date_created?: string; date_created_code?: number; date_created_timestamp?: string; @@ -71,7 +131,9 @@ export type DSQL_DATASQUIREL_API_KEYS = { name?: string; slug?: string; key?: string; - scope?: string; + scope?: "readOnly" | "fullAccess"; + all_dbs?: 0 | 1; + media_only?: 0 | 1; csrf?: string; date_created?: string; date_created_code?: number; @@ -81,50 +143,35 @@ export type DSQL_DATASQUIREL_API_KEYS = { date_updated_timestamp?: string; } +export type DSQL_DATASQUIREL_API_KEYS_SCOPED_RESOURCES = { + id?: number; + uuid?: string; + user_id?: number; + api_key_id?: number; + db_id?: number; + db_schema_id?: number; + db_slug?: string; + table_schema_id?: number; + table_slug?: string; + all_tables?: 0 | 1; + date_created?: string; + date_created_code?: number; + date_created_timestamp?: string; + date_updated?: string; + date_updated_code?: number; + date_updated_timestamp?: string; +} + export type DSQL_DATASQUIREL_INVITATIONS = { - id?: number; - uuid?: string; - inviting_user_id?: number; - invited_user_email?: string; - invitation_status?: string; - database_access?: string; - priviledge?: string; - db_tables_data?: string; - date_created?: string; - date_created_code?: number; - date_created_timestamp?: string; - date_updated?: string; - date_updated_code?: number; - date_updated_timestamp?: string; -} - -export type DSQL_DATASQUIREL_USER_USERS = { id?: number; uuid?: string; user_id?: number; invited_user_id?: number; - database?: string; + invited_user_email?: string; + invitation_status?: "pending" | "accepted" | "rejected" | "cancelled"; database_access?: string; - first_name?: string; - last_name?: string; - email?: string; - username?: string; - password?: string; - phone?: string; - user_type?: string; - user_priviledge?: string; - image?: string; - image_thumbnail?: string; - city?: string; - state?: string; - country?: string; - zip_code?: string; - address?: string; - social_login?: number; - social_platform?: string; - social_id?: string; - verification_status?: number; - more_user_data?: string; + db_tables_data?: string; + email_sent?: 0 | 1; date_created?: string; date_created_code?: number; date_created_timestamp?: string; @@ -133,14 +180,19 @@ export type DSQL_DATASQUIREL_USER_USERS = { date_updated_timestamp?: string; } -export type DSQL_DATASQUIREL_DELEGATED_USER_TABLES = { +export type DSQL_DATASQUIREL_DELEGATED_RESOURCES = { id?: number; uuid?: string; + delegated_users_id?: number; + user_id?: number; delegated_user_id?: number; - root_user_id?: number; - database?: string; - table?: string; - priviledge?: string; + db_id?: number; + db_schema_id?: number; + db_slug?: string; + table_schema_id?: number; + table_slug?: string; + permission?: "read" | "write" | "edit" | "delete"; + all_tables?: 0 | 1; date_created?: string; date_created_code?: number; date_created_timestamp?: string; @@ -153,6 +205,7 @@ export type DSQL_DATASQUIREL_USER_DATABASES = { id?: number; uuid?: string; user_id?: number; + db_schema_id?: number; db_name?: string; db_slug?: string; db_full_name?: string; @@ -163,9 +216,11 @@ export type DSQL_DATASQUIREL_USER_DATABASES = { remote_db_full_name?: string; remote_connection_host?: string; remote_connection_key?: string; - active_clone?: number; + active_clone?: 0 | 1; active_clone_parent_db?: string; + active_clone_parent_db_id?: number; active_data?: number; + last_checked_date_code?: number; date_created?: string; date_created_code?: number; date_created_timestamp?: string; @@ -179,14 +234,16 @@ export type DSQL_DATASQUIREL_USER_DATABASE_TABLES = { uuid?: string; user_id?: number; db_id?: number; + table_schema_id?: number; db_slug?: string; table_name?: string; table_slug?: string; table_description?: string; child_table?: number; - child_table_parent_database?: string; - child_table_parent_table?: string; - active_data?: number; + child_table_parent_database_schema_id?: number; + child_table_parent_table_schema_id?: number; + active_data?: 0 | 1; + last_checked_date_code?: number; date_created?: string; date_created_code?: number; date_created_timestamp?: string; @@ -203,13 +260,30 @@ export type DSQL_DATASQUIREL_USER_MEDIA = { folder?: string; media_url?: string; media_thumbnail_url?: string; - media_path?: string; - media_thumbnail_path?: string; - media_type?: string; + media_base64?: string; + media_thumbnail_base64?: string; + media_type?: "file" | "image" | "video"; + media_stats?: string; + mime_type?: string; width?: number; height?: number; size?: number; - private?: number; + private?: 0 | 1; + private_folder?: 0 | 1; + date_created?: string; + date_created_code?: number; + date_created_timestamp?: string; + date_updated?: string; + date_updated_code?: number; + date_updated_timestamp?: string; +} + +export type DSQL_DATASQUIREL_USER_PRIVATE_FOLDERS = { + id?: number; + uuid?: string; + user_id?: number; + folder_path?: string; + child_folder?: 0 | 1; date_created?: string; date_created_code?: number; date_created_timestamp?: string; @@ -223,8 +297,6 @@ export type DSQL_DATASQUIREL_DELEGATED_USERS = { uuid?: string; user_id?: number; delegated_user_id?: number; - permissions?: string; - permission_level_code?: number; date_created?: string; date_created_code?: number; date_created_timestamp?: string; @@ -261,40 +333,6 @@ export type DSQL_DATASQUIREL_NOTIFICATIONS = { date_updated_timestamp?: string; } -export type DSQL_DATASQUIREL_DOCS_PAGES = { - id?: number; - uuid?: string; - title?: string; - slug?: string; - description?: string; - content?: string; - text_content?: string; - level?: number; - page_order?: number; - parent_id?: number; - date_created?: string; - date_created_code?: number; - date_created_timestamp?: string; - date_updated?: string; - date_updated_code?: number; - date_updated_timestamp?: string; -} - -export type DSQL_DATASQUIREL_DOCS_PAGE_EXTRA_LINKS = { - id?: number; - uuid?: string; - docs_page_id?: number; - title?: string; - description?: string; - url?: string; - date_created?: string; - date_created_code?: number; - date_created_timestamp?: string; - date_updated?: string; - date_updated_code?: number; - date_updated_timestamp?: string; -} - export type DSQL_DATASQUIREL_DELETED_API_KEYS = { id?: number; uuid?: string; @@ -341,4 +379,16 @@ export type DSQL_DATASQUIREL_PROCESS_QUEUE = { date_updated?: string; date_updated_code?: number; date_updated_timestamp?: string; +} + +export type DSQL_DATASQUIREL_BACKUPS = { + id?: number; + uuid?: string; + user_id?: number; + date_created?: string; + date_created_code?: number; + date_created_timestamp?: string; + date_updated?: string; + date_updated_code?: number; + date_updated_timestamp?: string; } \ No newline at end of file diff --git a/package-shared/types/index.ts b/package-shared/types/index.ts index f9a8e79..fd38b13 100644 --- a/package-shared/types/index.ts +++ b/package-shared/types/index.ts @@ -1,17 +1,72 @@ import type { RequestOptions } from "https"; + import { + DSQL_DATASQUIREL_API_KEYS, + DSQL_DATASQUIREL_API_KEYS_SCOPED_RESOURCES, + DSQL_DATASQUIREL_BACKUPS, + DSQL_DATASQUIREL_DELEGATED_RESOURCES, + DSQL_DATASQUIREL_DELEGATED_USERS, + DSQL_DATASQUIREL_INVITATIONS, + DSQL_DATASQUIREL_MARIADB_USERS, DSQL_DATASQUIREL_PROCESS_QUEUE, DSQL_DATASQUIREL_USER_DATABASE_TABLES, DSQL_DATASQUIREL_USER_DATABASES, DSQL_DATASQUIREL_USER_MEDIA, + DSQL_DATASQUIREL_USER_PRIVATE_FOLDERS, + DSQL_DATASQUIREL_USERS, } from "./dsql"; import { Editor } from "tinymce"; import sharp from "sharp"; +import DataTypes from "../data/data-types"; +import { IncomingMessage, ServerResponse } from "http"; +import { CookieNames } from "../dict/cookie-names"; export type DSQL_DatabaseFullName = string; +export type DSQL_DATASQUIREL_USER_BACKUPS_JOIN = DSQL_DATASQUIREL_BACKUPS & { + [k in (typeof UserSelectFields)[number]["alias"]]?: string; +}; + +export type DSQL_DATASQUIREL_DELEGATED_DATABASES_JOIN = + DSQL_DATASQUIREL_DELEGATED_RESOURCES & + DSQL_DATASQUIREL_USER_DATABASES & { + [k in (typeof UserSelectFields)[number]["alias"]]?: string; + } & { + [k in (typeof DelegatedUserSelectFields)[number]["alias"]]?: string; + }; + +export type DSQL_DATASQUIREL_DELEGATED_USERS_JOIN = + DSQL_DATASQUIREL_DELEGATED_USERS & { + [k in (typeof UserSelectFields)[number]["alias"]]?: string; + } & { + [k in (typeof DelegatedUserSelectFields)[number]["alias"]]?: string; + }; + +export const UsersOmitedFields = [ + "password", + "social_id", + "verification_status", + "date_created", + "date_created_code", + "date_created_timestamp", + "date_updated", + "date_updated_code", + "date_updated_timestamp", +] as const; + +export type DSQL_DATASQUIREL_USERS_FILTERED = Omit< + DSQL_DATASQUIREL_USERS, + (typeof UsersOmitedFields)[number] +>; + +export type DSQL_DATASQUIREL_INVITATIONS_JOIN = DSQL_DATASQUIREL_INVITATIONS & { + [k in (typeof UserSelectFields)[number]["alias"]]?: string; +} & { + [k in (typeof InvitedUserSelectFields)[number]["alias"]]?: string; +}; + export interface DSQL_DatabaseSchemaType { - id?: number | string; + id?: string | number; dbName?: string; dbSlug?: string; dbFullName?: string; @@ -20,34 +75,31 @@ export interface DSQL_DatabaseSchemaType { tables: DSQL_TableSchemaType[]; childrenDatabases?: DSQL_ChildrenDatabaseObject[]; childDatabase?: boolean; - childDatabaseDbFullName?: string; + childDatabaseDbId?: string | number; updateData?: boolean; } export interface DSQL_ChildrenDatabaseObject { dbId?: string | number; - dbFullName?: string; } export interface DSQL_TableSchemaType { - id?: number | string; + id?: string | number; tableName: string; - tableFullName: string; tableDescription?: string; fields: DSQL_FieldSchemaType[]; indexes?: DSQL_IndexSchemaType[]; childrenTables?: DSQL_ChildrenTablesType[]; childTable?: boolean; updateData?: boolean; - childTableName?: string; - childTableDbFullName?: string; + childTableId?: string | number; tableNameOld?: string; + childTableDbId?: string | number; } export interface DSQL_ChildrenTablesType { - dbNameFull: string; - tableName: string; - tableNameFull?: string; + tableId?: string | number; + dbId?: string | number; } export const TextFieldTypesArray = [ @@ -59,9 +111,11 @@ export const TextFieldTypesArray = [ { title: "CSS", value: "css" }, { title: "Javascript", value: "javascript" }, { title: "Shell", value: "shell" }, + { title: "Code", value: "code" }, ] as const; export type DSQL_FieldSchemaType = { + id?: number | string; fieldName?: string; originName?: string; updatedField?: boolean; @@ -87,6 +141,9 @@ export type DSQL_FieldSchemaType = { cssFiles?: string[]; integerLength?: string | number; decimals?: string | number; + moving?: boolean; + code?: boolean; + options?: string[]; } & { [key in (typeof TextFieldTypesArray)[number]["value"]]?: boolean; }; @@ -101,8 +158,9 @@ export interface DSQL_ForeignKeyType { } export interface DSQL_IndexSchemaType { + id?: string | number; indexName?: string; - indexType?: string; + indexType?: (typeof IndexTypes)[number]; indexTableFields?: DSQL_IndexTableFieldType[]; alias?: string; newTempIndex?: boolean; @@ -367,7 +425,10 @@ export interface PostInsertReturn { changedRows: number; } -export type UserType = DATASQUIREL_LoggedInUser & {}; +export type UserType = DATASQUIREL_LoggedInUser & { + isSuperUser?: boolean; + staticHost?: string; +}; export interface ApiKeyDef { name: string; @@ -483,10 +544,10 @@ export interface MediaFolderPageContextType { export interface TablesContextType { user: UserType; database: DSQL_MYSQL_user_databases_Type; - tables: MYSQL_user_database_tables_table_def[]; - targetTable: MYSQL_user_database_tables_table_def | null; + tables: DSQL_DATASQUIREL_USER_DATABASE_TABLES[]; + targetTable: DSQL_DATASQUIREL_USER_DATABASE_TABLES | null; setTargetTable: React.Dispatch< - React.SetStateAction + React.SetStateAction >; query: any; confirmedDelegetedUser: any; @@ -517,10 +578,10 @@ export interface EditTableContextType { export interface SingleDatabaseContextType { user: UserType; database: DSQL_MYSQL_user_databases_Type; - tables: MYSQL_user_database_tables_table_def[]; - targetTable: MYSQL_user_database_tables_table_def | null; + tables: DSQL_DATASQUIREL_USER_DATABASE_TABLES[]; + targetTable: DSQL_DATASQUIREL_USER_DATABASE_TABLES | null; setTargetTable: React.Dispatch< - React.SetStateAction + React.SetStateAction >; query: any; confirmedDelegetedUser: any; @@ -565,20 +626,6 @@ export interface DocsAsidePageObject { level?: number; } -export interface AllUserUsersContextType { - user?: UserType; - users?: MYSQL_delegated_users_table_def[]; - targetUser?: MYSQL_user_users_table_def | undefined; - setTargetUser?: React.Dispatch< - React.SetStateAction - >; - databases?: DSQL_MYSQL_user_databases_Type[]; - pendingInvitations?: MYSQL_invitations_table_def[]; - pendingInvitationsReceived?: any[]; - adminUsers?: any[]; - invitedAccounts?: any[]; -} - export interface AddSocialLoginContextType { user?: UserType; database?: DSQL_MYSQL_user_databases_Type; @@ -586,16 +633,6 @@ export interface AddSocialLoginContextType { socialLogins?: SocialLoginObjectType[]; } -export interface DelegatedDbContextType { - user: UserType; - users: MYSQL_user_users_table_def[]; - targetUser: MYSQL_user_users_table_def | undefined; - setTargetUser: React.Dispatch< - React.SetStateAction - >; - database: DSQL_MYSQL_user_databases_Type; -} - export interface AddUserUserContextType { user: UserType; database: DSQL_MYSQL_user_databases_Type; @@ -604,48 +641,19 @@ export interface AddUserUserContextType { confirmedDelegetedUser: any; } -export interface UserUsersContextType { - user: UserType; - users: MYSQL_user_users_table_def[]; - targetUser?: MYSQL_user_users_table_def; - setTargetUser: React.Dispatch< - React.SetStateAction - >; - database: DSQL_MYSQL_user_databases_Type; - table: DSQL_TableSchemaType; - dbSchemaData: DSQL_DatabaseSchemaType[]; - query: any; - confirmedDelegetedUser: any; -} - -export interface DatabaseSingleUserContextType { - user: UserType; - database: DSQL_MYSQL_user_databases_Type; - singleUser: MYSQL_user_users_table_def; - table: DSQL_TableSchemaType; - dbSchemaData: DSQL_DatabaseSchemaType[]; - query: any; - confirmedDelegetedUser: any; -} - -export interface SingleUserUserContextType { - user: UserType; - singleUser: MYSQL_user_users_table_def; -} - export interface AddUserContextType { user: UserType; - users: MYSQL_delegated_users_table_def[]; + users: DSQL_DATASQUIREL_DELEGATED_USERS[]; databases: DSQL_MYSQL_user_databases_Type[]; query: any; } export interface MediaContextType { user: UserType; - media: MYSQL_user_media_table_def[]; - targetMedia: MYSQL_user_media_table_def | null; + media: DSQL_DATASQUIREL_USER_MEDIA[]; + targetMedia: DSQL_DATASQUIREL_USER_MEDIA | null; setTargetMedia: React.Dispatch< - React.SetStateAction + React.SetStateAction >; folders: string[]; staticHost: string; @@ -653,10 +661,10 @@ export interface MediaContextType { export interface MediaSubFolderContextType { user: UserType; - media: MYSQL_user_media_table_def[]; - targetMedia: MYSQL_user_media_table_def | null; + media: DSQL_DATASQUIREL_USER_MEDIA[]; + targetMedia: DSQL_DATASQUIREL_USER_MEDIA | null; setTargetMedia: React.Dispatch< - React.SetStateAction + React.SetStateAction >; folders: string[]; query: any; @@ -689,7 +697,7 @@ export interface SingleTableContextType { user: UserType; database: DSQL_MYSQL_user_databases_Type; table: DSQL_TableSchemaType; - tableRecord: MYSQL_user_database_tables_table_def; + tableRecord: DSQL_DATASQUIREL_USER_DATABASE_TABLES; tableFields: DSQL_FieldSchemaType[]; setTableFields: React.Dispatch< React.SetStateAction @@ -830,7 +838,7 @@ export interface DbConnectContextType { export interface ImageObjectType { imageName?: string; - mimeType?: keyof sharp.FormatEnum | sharp.AvailableFormatInfo; + mimeType?: keyof sharp.FormatEnum; imageSize?: number; thumbnailSize?: number; private?: boolean; @@ -895,165 +903,18 @@ export interface UserFileObject2 { content?: UserFileObject2[]; } -export interface MYSQL_user_users_table_def { - id?: number; - user_id?: number; - invited_user_id?: number; - database?: string; - database_access?: string; - first_name?: string; - last_name?: string; - email?: string; - username?: string; - password?: string; - phone?: string; - user_type?: string; - user_priviledge?: string; - image?: string; - image_thumbnail?: string; - city?: string; - state?: string; - country?: string; - zip_code?: string; - address?: string; - social_login?: number; - social_platform?: string; - social_id?: string; - verification_status?: number; - more_user_data?: string; - date_created?: string; - date_created_code?: number; - date_created_timestamp?: string; - date_updated?: string; - date_updated_code?: number; - date_updated_timestamp?: string; - inviteeFirstName?: string; - inviteeLastName?: string; - inviteeEmail?: string; - inviteeImage?: string; -} - -export interface MYSQL_user_database_tables_table_def { - id?: number; - user_id?: number; - db_id?: number; - db_slug?: string; - table_name?: string; - table_slug?: string; - table_description?: string; - child_table?: number; - active_data?: 0 | 1; - child_table_parent_database?: string; - child_table_parent_table?: string; - date_created?: string; - date_created_code?: number; - date_created_timestamp?: string; - date_updated?: string; - date_updated_code?: number; - date_updated_timestamp?: string; -} - -export interface MYSQL_user_media_table_def { - id?: number; - user_id?: number; - media_name?: string; - folder?: string; - media_url?: string; - media_thumbnail_url?: string; - media_path?: string; - media_thumbnail_path?: string; - media_type?: string; - width?: number; - height?: number; - size?: number; - private?: number; - date_created?: string; - date_created_code?: number; - date_created_timestamp?: string; - date_updated?: string; - date_updated_code?: number; - date_updated_timestamp?: string; -} - -export interface MYSQL_delegated_users_table_def { - id?: number; - user_id?: number; - delegated_user_id?: number; - permissions?: string; - permission_level_code?: number; - date_created?: string; - date_created_code?: number; - date_created_timestamp?: string; - date_updated?: string; - date_updated_code?: number; - date_updated_timestamp?: string; -} - -export interface MYSQL_invitations_table_def { - id?: number; - inviting_user_id?: number; - invited_user_email?: string; - invitation_status?: string; - database_access?: string; - priviledge?: string; - db_tables_data?: string; - date_created?: string; - date_created_code?: number; - date_created_timestamp?: string; - date_updated?: string; - date_updated_code?: number; - date_updated_timestamp?: string; -} - -export interface MYSQL_docs_pages_table_def { - id?: number; - title?: string; - slug?: string; - description?: string; - content?: string; - text_content?: string; - level?: number; - page_order?: number; - parent_id?: number; - date_created?: string; - date_created_code?: number; - date_created_timestamp?: string; - date_updated?: string; - date_updated_code?: number; - date_updated_timestamp?: string; -} - -export interface MYSQL_delegated_user_tables_table_def { - id?: number; - delegated_user_id?: number; - root_user_id?: number; - database?: string; - table?: string; - priviledge?: string; - date_created?: string; - date_created_code?: number; - date_created_timestamp?: string; - date_updated?: string; - date_updated_code?: number; - date_updated_timestamp?: string; -} - export type ApiKeyObject = { + id: number; user_id?: string | number; full_access?: boolean; sign?: string; date_code?: number; - target_database?: string; - target_table?: string; error?: string; }; export type AddApiKeyRequestBody = { - api_key_name: string; - api_key_slug: string; - api_key_scope?: "fullAccess" | "readOnly"; - target_database?: string; - target_table?: string; + api_key: DSQL_DATASQUIREL_API_KEYS; + scoped_resources?: DSQL_DATASQUIREL_API_KEYS_SCOPED_RESOURCES[]; }; export type CheckApiCredentialsFn = ( @@ -1075,16 +936,8 @@ export type FetchApiFn = ( export type FetchApiOptions = RequestInit & { method: - | "POST" - | "GET" - | "DELETE" - | "PUT" - | "PATCH" - | "post" - | "get" - | "delete" - | "put" - | "patch"; + | (typeof DataCrudRequestMethods)[number] + | (typeof DataCrudRequestMethodsLowerCase)[number]; body?: object | string; headers?: FetchHeader; query?: { [key: string]: any }; @@ -1105,16 +958,31 @@ export const ServerQueryOperators = ["AND", "OR"] as const; export const ServerQueryEqualities = [ "EQUAL", "LIKE", + "LIKE_RAW", + "NOT LIKE", + "NOT LIKE_RAW", "NOT EQUAL", "REGEXP", "FULLTEXT", + "IN", + "NOT IN", + "BETWEEN", + "NOT BETWEEN", + "IS NULL", + "IS NOT NULL", + "EXISTS", + "NOT EXISTS", + "GREATER THAN", + "GREATER THAN OR EQUAL", + "LESS THAN", + "LESS THAN OR EQUAL", ] as const; export type ServerQueryParam< T extends { [k: string]: any } = { [k: string]: any } > = { - selectFields?: string[]; - omitFields?: string[]; + selectFields?: (keyof T)[]; + omitFields?: (keyof T)[]; query?: ServerQueryQueryObject; limit?: number; page?: number; @@ -1129,6 +997,7 @@ export type ServerQueryParam< fieldName: keyof T; }; join?: ServerQueryParamsJoin[]; + group?: (keyof T)[]; [key: string]: any; }; @@ -1151,7 +1020,7 @@ export type ServerQueryQueryObject = export type FetchDataParams = { path: string; - method?: "GET" | "POST" | "PATCH" | "PUT" | "DELETE"; + method?: (typeof DataCrudRequestMethods)[number]; body?: object | string; query?: AuthFetchQuery; tableName?: string; @@ -1495,7 +1364,23 @@ export type ApiGetQueryObject< dbFullName?: string; }; -export const DataCrudRequestMethods = ["GET", "POST", "PUT", "DELETE"] as const; +export const DataCrudRequestMethods = [ + "GET", + "POST", + "PUT", + "PATCH", + "DELETE", + "OPTIONS", +] as const; + +export const DataCrudRequestMethodsLowerCase = [ + "get", + "post", + "put", + "patch", + "delete", + "options", +] as const; export type DsqlMethodCrudParam< T extends { [key: string]: any } = { [key: string]: any } @@ -1513,7 +1398,7 @@ export type DsqlMethodCrudParam< transformQuery?: DsqlCrudTransformQueryFunction; existingData?: T; targetId?: string | number; - sanitize?: (data?: T) => T; + sanitize?: ({ data, batchData }: { data?: T; batchData?: T[] }) => T | T[]; debug?: boolean; }; @@ -1542,102 +1427,186 @@ export type DsqlCrudQueryObject< query?: ServerQueryQueryObject; }; -export type DsqlCrudParam< +export type SQLDeleteGeneratorParams< T extends { [key: string]: any } = { [key: string]: any } +> = { + tableName: string; + deleteKeyValues?: SQLDeleteData[]; + dbFullName?: string; + data?: any; +}; + +export type SQLDeleteData< + T extends { [key: string]: any } = { [key: string]: any } +> = { + key: keyof T; + value: string | number | null | undefined; + operator?: (typeof ServerQueryEqualities)[number]; +}; + +export type DsqlCrudParam< + T extends { [key: string]: any } = { [key: string]: any }, + K extends string = string > = { action: (typeof DsqlCrudActions)[number]; - table: string; + table: K; data?: T; + batchData?: T[]; + deleteData?: T; + deleteKeyValues?: SQLDeleteData[]; targetId?: string | number; targetValue?: string | number; - targetField?: string; + targetField?: keyof T; query?: DsqlCrudQueryObject; - sanitize?: (data?: T) => T; + sanitize?: ({ data, batchData }: { data?: T; batchData?: T[] }) => T | T[]; debug?: boolean; count?: boolean; countOnly?: boolean; + dbFullName?: string; + dbName?: string; }; export type ErrorCallback = (title: string, error: Error, data?: any) => void; export interface MariaDBUser { - Host: string; - User: string; - Password: string; - Select_priv: string; - Insert_priv: string; - Update_priv: string; - Delete_priv: string; - Create_priv: string; - Drop_priv: string; - Reload_priv: string; - Shutdown_priv: string; - Process_priv: string; - File_priv: string; - Grant_priv: string; - References_priv: string; - Index_priv: string; - Alter_priv: string; - Show_db_priv: string; - Super_priv: string; - Create_tmp_table_priv: string; - Lock_tables_priv: string; - Execute_priv: string; - Repl_slave_priv: string; - Repl_client_priv: string; - Create_view_priv: string; - Show_view_priv: string; - Create_routine_priv: string; - Alter_routine_priv: string; - Create_user_priv: string; - Event_priv: string; - Trigger_priv: string; - Create_tablespace_priv: string; - Delete_history_priv: string; - ssl_type: string; - ssl_cipher: string; - x509_issuer: string; - x509_subject: string; - max_questions: number; - max_updates: number; - max_connections: number; - max_user_connections: number; - plugin: string; - authentication_string: string; - password_expired: string; - is_role: string; - default_role: string; - max_statement_time: number; + Host?: string; + User?: string; + Password?: string; + Select_priv?: string; + Insert_priv?: string; + Update_priv?: string; + Delete_priv?: string; + Create_priv?: string; + Drop_priv?: string; + Reload_priv?: string; + Shutdown_priv?: string; + Process_priv?: string; + File_priv?: string; + Grant_priv?: string; + References_priv?: string; + Index_priv?: string; + Alter_priv?: string; + Show_db_priv?: string; + Super_priv?: string; + Create_tmp_table_priv?: string; + Lock_tables_priv?: string; + Execute_priv?: string; + Repl_slave_priv?: string; + Repl_client_priv?: string; + Create_view_priv?: string; + Show_view_priv?: string; + Create_routine_priv?: string; + Alter_routine_priv?: string; + Create_user_priv?: string; + Event_priv?: string; + Trigger_priv?: string; + Create_tablespace_priv?: string; + Delete_history_priv?: string; + ssl_type?: string; + ssl_cipher?: string; + x509_issuer?: string; + x509_subject?: string; + max_questions?: number; + max_updates?: number; + max_connections?: number; + max_user_connections?: number; + plugin?: string; + authentication_string?: string; + password_expired?: string; + is_role?: string; + default_role?: string; + max_statement_time?: number; } +export const QueryFields = [ + "duplicate", + "user_id", + "delegated_user_id", + "db_id", + "table_id", + "db_slug", +] as const; + export type PagePropsType = { user?: UserType | null; pageUrl?: string | null; - query?: any; + envObject?: { [k in (typeof EnvKeys)[number]]?: string } | null; + query?: { [k in (typeof QueryFields)[number]]?: string }; databases?: DSQL_DATASQUIREL_USER_DATABASES[] | null; + delegatedDatabases?: DSQL_DATASQUIREL_DELEGATED_DATABASES_JOIN[] | null; + delegatedDatabase?: DSQL_DATASQUIREL_DELEGATED_DATABASES_JOIN | null; database?: DSQL_DATASQUIREL_USER_DATABASES | null; + invitationsSent?: DSQL_DATASQUIREL_INVITATIONS_JOIN[] | null; + invitationsReceived?: DSQL_DATASQUIREL_INVITATIONS_JOIN[] | null; + invitationSent?: DSQL_DATASQUIREL_INVITATIONS_JOIN | null; + invitationReceived?: DSQL_DATASQUIREL_INVITATIONS_JOIN | null; databaseTables?: DSQL_DATASQUIREL_USER_DATABASE_TABLES[] | null; + delegatedUsers?: DSQL_DATASQUIREL_DELEGATED_USERS_JOIN[] | null; + delegatedUser?: DSQL_DATASQUIREL_DELEGATED_USERS_JOIN | null; + usersWhoDelegatedMe?: DSQL_DATASQUIREL_DELEGATED_USERS_JOIN[] | null; + userWhoDelegatedMe?: DSQL_DATASQUIREL_DELEGATED_USERS_JOIN | null; databaseTable?: DSQL_DATASQUIREL_USER_DATABASE_TABLES | null; + singleUser?: DSQL_DATASQUIREL_USERS_FILTERED | null; dbCount?: number | null; tableCount?: number | null; mediaCount?: number | null; apiKeysCount?: number | null; databaseSchema?: DSQL_DatabaseSchemaType | null; + clonedDatabaseSchema?: DSQL_DatabaseSchemaType | null; tableSchema?: DSQL_TableSchemaType | null; userMedia?: DSQL_DATASQUIREL_USER_MEDIA[] | null; mediaCurrentFolder?: string | null; appData?: DsqlAppData | null; staticHost?: string | null; folders?: string[] | null; + activeClonedTable?: boolean | null; + tableEntries?: DefaultEntryType[] | null; + tableEntriesCount?: number | null; + tableEntry?: DefaultEntryType | null; + apiKeys?: DSQL_DATASQUIREL_API_KEYS[] | null; + mariadbUsers?: DSQL_DATASQUIREL_MARIADB_USERS[] | null; + mariadbUser?: DSQL_DATASQUIREL_MARIADB_USERS | null; + privateFolders?: DSQL_DATASQUIREL_USER_PRIVATE_FOLDERS[] | null; + privateFolder?: DSQL_DATASQUIREL_USER_PRIVATE_FOLDERS | null; + appConfig?: SiteConfig | null; + userConfig?: SiteConfig | null; + suUsers?: DSQL_DATASQUIREL_USERS[] | null; + suUsersCount?: number | null; + suUser?: DSQL_DATASQUIREL_USERS | null; + dsqlMainSchema?: DSQL_DatabaseSchemaType | null; + suSQLUsers?: MariaDBUser[] | null; + suSQLUsersCount?: number | null; + suSQLUser?: MariaDBUser | null; + backups?: DSQL_DATASQUIREL_BACKUPS[] | null; + backup?: DSQL_DATASQUIREL_BACKUPS | null; + appBackups?: DSQL_DATASQUIREL_BACKUPS[] | null; + appBackup?: DSQL_DATASQUIREL_BACKUPS | null; + userBackups?: DSQL_DATASQUIREL_USER_BACKUPS_JOIN[] | null; + userBackup?: DSQL_DATASQUIREL_USER_BACKUPS_JOIN | null; + suDatabases?: SQLShowDatabaseObject[] | null; + suDatabase?: SQLShowDatabaseObject | null; + isSuperUserPage?: boolean; + appVersion?: (typeof AppVersions)[number]; }; export type APIResponseObject = { success: boolean; payload?: T; + payloadBase64?: string; + payloadThumbnailBase64?: string; + payloadURL?: string; + payloadThumbnailURL?: string; error?: any; msg?: string; - queryRes?: any; + queryObject?: { + sql?: string; + params?: string[]; + }; status?: number; + count?: number; + errors?: DSQLErrorObject[]; + debug?: any; + batchPayload?: any[][] | null; }; export const UserTypes = ["su", "admin"] as const; @@ -1719,8 +1688,14 @@ export const DockerComposeServices = [ "websocket", "static", "db", - "db-load-balancer", + "maxscale", "post-db-setup", + "web-app-post-db-setup", + "post-replica-db-setup", + "db-replica-1", + "db-replica-2", + "db-cron", + "web-app-post-db-setup", ] as const; export type DockerComposeServices = { @@ -1728,11 +1703,12 @@ export type DockerComposeServices = { }; export type DockerComposeNetworks = { - datasquirel: { - driver: "bridge"; - ipam: { + [k: string]: { + driver?: "bridge"; + ipam?: { config: DockerComposeNetworkConfigObject[]; }; + external?: boolean; }; }; @@ -1771,7 +1747,7 @@ export type DockerComposeServicesBuildObject = { }; export type DockerComposeServiceNetworkObject = { - datasquirel: { + [k: string]: { ipv4_address: string; }; }; @@ -1787,7 +1763,9 @@ export type SiteSetup = { }; }; -export type AppRefObject = {}; +export type AppRefObject = { + currentMariadbUser?: DSQL_DATASQUIREL_MARIADB_USERS; +}; export type DsqlAppData = { DSQL_REMOTE_SQL_HOST?: string; @@ -1802,10 +1780,14 @@ export type DsqlAppData = { DSQL_GITHUB_ID?: string; }; -export const MediaTypes = ["image", "file"] as const; +export const MediaTypes = ["image", "file", "video"] as const; export type MediaUploadDataType = ImageObjectType & - FileObjectType & { private?: boolean }; + FileObjectType & { + private?: boolean; + privateFolder?: boolean; + overwrite?: boolean; + }; export const ImageMimeTypes: (keyof sharp.FormatEnum)[] = [ "webp", @@ -1815,3 +1797,481 @@ export const ImageMimeTypes: (keyof sharp.FormatEnum)[] = [ "jpeg", "jpg", ] as const; + +export const FileMimeTypes = [ + "pdf", + "csv", + "json", + "sql", + "xlsx", + "txt", + "zip", + "xz", + "yaml", + "yml", +] as const; + +export const VideoMimeTypes = ["mp4", "wav"] as const; + +export const CurrentlyEditedFieldActions = [ + "edit-field", + "edit-index", + "delete-field", + "delete-index", + "new-field", + "new-index", + "move-up", + "move-down", + "complete", +] as const; + +export type CurrentlyEditedTableSchemaType = { + action: (typeof CurrentlyEditedFieldActions)[number]; + field?: DSQL_FieldSchemaType; + fieldIndex?: number; + index?: DSQL_IndexSchemaType; + indexIndex?: number; + spliceIndex?: number; + elRef?: React.RefObject; +}; + +export type DataTypesType = { + title: string; + name: (typeof DataTypes)[number]["name"]; + value?: string; + argument?: true; + description?: string; + maxValue?: number; +}; + +export type DefaultSQLValuesLiteralObject = { + title: string; + value: string; + description?: string; + dataType: (typeof DataTypes)[number]["name"]; +}; + +export const DefaultSQLValuesLiteral: DefaultSQLValuesLiteralObject[] = [ + { + title: "CURRENT_TIMESTAMP", + value: "CURRENT_TIMESTAMP", + description: "", + dataType: "TIMESTAMP", + }, + { + title: "UUID", + value: "UUID()", + description: "", + dataType: "UUID", + }, +] as const; + +export type ClonedTableInfo = { + dbId?: string | number; + tableId?: string | number; + keepUpdated?: boolean; + keepDataUpdated?: boolean; +}; + +export type DefaultEntryType = { + id?: number; + uuid?: string; + date_created?: string; + date_created_code?: number; + date_created_timestamp?: string; + date_updated?: string; + date_updated_code?: number; + date_updated_timestamp?: string; +} & { + [k: string]: string | number | null; +}; + +export const IndexTypes = ["regular", "full_text"] as const; + +export type LoginUserParam = { + key?: string; + database: string; + payload: { + email?: string; + username?: string; + password?: string; + }; + additionalFields?: string[]; + request?: IncomingMessage & { [s: string]: any }; + response?: ServerResponse & { [s: string]: any }; + encryptionKey?: string; + encryptionSalt?: string; + email_login?: boolean; + email_login_code?: string; + temp_code_field?: string; + token?: boolean; + user_id?: string | number; + skipPassword?: boolean; + debug?: boolean; + skipWriteAuthFile?: boolean; + apiUserID?: string | number; + dbUserId?: string | number; + cleanupTokens?: boolean; + secureCookie?: boolean; +}; + +export const UserSelectFields = [ + { + field: "first_name", + alias: "user_first_name", + }, + { + field: "last_name", + alias: "user_last_name", + }, + { + field: "email", + alias: "user_email", + }, + { + field: "image_thumbnail", + alias: "user_image_thumbnail", + }, +] as const; + +export const DelegatedUserSelectFields = [ + { + field: "first_name", + alias: "delegated_user_first_name", + }, + { + field: "last_name", + alias: "delegated_user_last_name", + }, + { + field: "email", + alias: "delegated_user_email", + }, + { + field: "image_thumbnail", + alias: "delegated_user_image_thumbnail", + }, +] as const; + +export const InvitedUserSelectFields = [ + { + field: "first_name", + alias: "invited_user_first_name", + }, + { + field: "last_name", + alias: "invited_user_last_name", + }, + { + field: "email", + alias: "invited_user_email", + }, + { + field: "image_thumbnail", + alias: "invited_user_image_thumbnail", + }, +] as const; + +export type DefaultLocalResourcesHookParams< + T extends { [k: string]: any } = { [k: string]: any } +> = { + refresh?: number; + setLoading?: React.Dispatch>; + loadingEndTimeout?: number; + user?: UserType | null; + ready?: boolean; + query?: DsqlCrudQueryObject; +}; + +export type DelegatedUserPermissionObject = { + dbId?: string | number; + tableId?: string | number; + dbSlug?: string; + tableSlug?: string; + permission?: DSQL_DATASQUIREL_DELEGATED_RESOURCES["permission"]; +}; + +export type QueryObject = { [k in (typeof QueryFields)[number]]?: string }; + +export type CookiesObject = { + [k in (typeof CookieNames)[keyof typeof CookieNames]]?: string; +}; + +export interface CookieOptions { + expires?: Date; + maxAge?: number; + path?: string; + domain?: string; + secure?: boolean; + httpOnly?: boolean; +} + +export type DSQLErrorObject = { + sql?: string; + sqlValues?: any[]; + error?: string; +}; + +export const SQLPermissions = [ + "ALL PRIVILEGES", + "ALTER", + "ALTER ROUTINE", + "CREATE", + "CREATE ROUTINE", + "CREATE TEMPORARY TABLES", + "CREATE VIEW", + "DELETE", + "DROP", + "EVENT", + "EXECUTE", + "FILE", + "INDEX", + "INSERT", + "LOCK TABLES", + "PROCESS", + "REFERENCES", + "RELOAD", + "REPLICATION CLIENT", + "REPLICATION SLAVE", + "SELECT", + "SHOW VIEW", + "SUPER", + "TRIGGER", + "UPDATE", + "USAGE", +] as const; + +export const UserSQLPermissions = [ + "SELECT", + "ALTER", + "ALTER ROUTINE", + "CREATE", + "CREATE ROUTINE", + "CREATE TEMPORARY TABLES", + "CREATE VIEW", + "DELETE", + "DROP", + "EVENT", + "EXECUTE", + "FILE", + "INDEX", + "INSERT", + "LOCK TABLES", + "PROCESS", + "REFERENCES", + "RELOAD", + "SHOW VIEW", + "SUPER", + "TRIGGER", + "UPDATE", + "USAGE", +] as const; + +export type DatabaseScopedAccessObjectAccessedDatabase = { + dbId?: string | number; + dbSlug?: string; + dbSchemaId?: string | number; +}; + +export type DatabaseScopedAccessObjectTable = { + dbId?: string | number; + dbSlug: string; + dbSchemaId?: string | number; + tableSlug?: string; + tableSchemaId?: string | number; +}; + +export type DatabaseScopedAccessObject = { + accessedDatabase: DatabaseScopedAccessObjectAccessedDatabase; + dbSlug: string; + grants?: UserGrantType[]; + allGrants?: boolean; + tables?: DatabaseScopedAccessObjectTable[]; + allTables?: boolean; +}; + +export type UserGrantType = (typeof UserSQLPermissions)[number]; + +export type SiteConfig = { + main: SiteConfigMain; + mariadb_servers?: SiteConfigMariadbServers; + maxscale?: SiteConfigMaxscale; +}; + +export type SiteConfigMain = { + max_image_width?: SiteConfigMainValue; + thumbnail_size?: SiteConfigMainValue; + sharp_image_quality?: SiteConfigMainValue; + max_backups?: SiteConfigMainValue; + max_disk_usage?: SiteConfigMainValue; +}; + +export type SiteConfigMainValue = { + value: number | null; + description?: string | null; +}; + +export type SiteConfigMariadbServers = { + primary: SiteConfigMariadbServer; + replicas: SiteConfigMariadbServer[]; +}; + +export type SiteConfigMariadbServer = { + server_id: number; + ip: string; + proxy_ip?: string; + master_ip?: string; + master_port?: number; + host?: string; + port: number; + /** + * Whether this replica belongs in the + * same docker compose stack as main + */ + is_stack_replica?: boolean; + users: { + root: SiteConfigMariadbServerUser; + replication: SiteConfigMariadbServerUser; + }; +}; + +export type SiteConfigMariadbServerUser = { + user: string; + pass: string; + host?: string; +}; + +export type SiteConfigMaxscale = { + read_write_port: number; + read_only_port: number; + admin_port: number; +}; + +export const APIParadigms = ["crud", "media", "sql"] as const; + +export const AppVersions = [ + { + title: "Community", + value: "community", + }, + { + title: "Pro", + value: "pro", + }, + { + title: "Enterprise", + value: "enterprise", + }, + { + title: "Full", + value: "full", + }, +] as const; + +export const EnvKeys = [ + "DSQL_HOST", + "NEXT_PUBLIC_DSQL_HOST", + "DSQL_STATIC_HOST", + "DSQL_SOCKET_DOMAIN", + "DSQL_HOST_ENV", + "DSQL_PORT", + "DSQL_PRODUCTION_PORT", + "DSQL_STATIC_SERVER_PORT", + "DSQL_SITE_URL", + "DSQL_REMOTE_SQL_HOST", + "NEXT_PUBLIC_DSQL_REMOTE_SQL_HOST", + "DSQL_DB_TARGET_IP_ADDRESS", + "NEXT_PUBLIC_VERSION", + "DSQL_USER_DB_PREFIX", + "DSQL_USER_DELEGATED_DB_COOKIE_PREFIX", + "DSQL_NETWORK_IP_PREFIX", + "DSQL_NETWORK_GATEWAY", + "DSQL_NETWORK_SUBNET", + "DSQL_MARIADB_MASTER_HOST", + "DSQL_DB_HOST", + "DSQL_WEB_APP_HOST", + "DSQL_DB_USERNAME", + "DSQL_DB_PASSWORD", + "DSQL_MARIADB_ROOT_PASSWORD", + "DSQL_REPLICATION_USER_PASSWORD", + "DSQL_DB_NAME", + "DSQL_MARIADB_REPLICATION_PASSWORD", + "DSQL_MAXSCALE_PASSWORD", + "DSQL_DB_READ_ONLY_USERNAME", + "DSQL_DB_READ_ONLY_PASSWORD", + "DSQL_DB_FULL_ACCESS_USERNAME", + "DSQL_DB_FULL_ACCESS_PASSWORD", + "DSQL_DB_EXPOSED_PORT", + "DSQL_ENCRYPTION_PASSWORD", + "DSQL_ENCRYPTION_SALT", + "DSQL_SU_USER_ID", + "DSQL_SU_USER_UUID", + "DSQL_SU_EMAIL", + "DSQL_GOOGLE_CLIENT_ID", + "NEXT_PUBLIC_DSQL_GOOGLE_CLIENT_ID", + "DSQL_FACEBOOK_APP_ID", + "DSQL_FACEBOOK_SECRET", + "DSQL_MAIL_HOST", + "DSQL_MAIL_EMAIL", + "DSQL_MAIL_PASSWORD", + "DSQL_TINY_MCE_API_KEY", + "DSQL_GITHUB_ID", + "DSQL_GITHUB_SECRET", + "DSQL_GITHUB_WEBHOOK_SECRET", + "DSQL_GITHUB_WEBHOOK_URL", + "DSQL_DEPLOY_SERVER_PORT", + "DSQL_DOCKERFILE", + "DSQL_VOLUME_APP", + "DSQL_VOLUME_STATIC", + "DSQL_VOLUME_STATIC_CONFIGURATION_FILE", + "DSQL_VOLUME_DB", + "DSQL_VOLUME_DB_CONFIG", + "DSQL_VOLUME_DB_SETUP", + "DSQL_VOLUME_DB_SSL", + "DSQL_USER_LOGIN_KEYS_PATH", + "DSQL_API_KEYS_PATH", + "DSQL_APP_DIR", + "DSQL_DATA_DIR", + "DSQL_CONTACT_EMAIL", + "DSQL_SSL_DIR", + "DSQL_DEPLOYMENT_NAME", + "DSQL_COOKIES_PREFIX", + "DSQL_COOKIES_KEY_NAME", + "DSQL_WEB_APP_FAIL_COUNTS", + "NODE_ARCH", + "DSQL_WEBSOCKET_PORT", + "DSQL_WEBSOCKET_URL", + "NEXT_PUBLIC_DSQL_WEBSOCKET_URL", + "S3_ACCESS_KEY_ID", + "S3_SECRET_ACCESS", + "DSQL_ADDITIONAL_MARIADB_SERVERS", + "DSQL_ARCJET_KEY", +] as const; + +export type SQLShowDatabaseObject = { + Database?: string; +}; + +export type AddUpdateMariadbUserAPIReqBody = { + mariadbUser: DSQL_DATASQUIREL_MARIADB_USERS; + grants?: UserGrantType[]; + accessedDatabases?: DatabaseScopedAccessObject[]; + isAllGrants?: boolean; + isAllDbsAccess?: boolean; +}; + +export type APIGetMediaParams = { + mediaID?: string | number; + mediaName?: string; + folder?: string; + query?: DsqlCrudQueryObject; + skipBase64?: "true" | "false"; + stream?: "stream"; + thumbnail?: "true" | "false"; +}; + +export type AddMediaAPIBody = { + media: MediaUploadDataType[]; + folder?: string | null; + type: (typeof MediaTypes)[number]; +}; diff --git a/package-shared/utils/backend/config/grab-config.ts b/package-shared/utils/backend/config/grab-config.ts new file mode 100644 index 0000000..ad4e448 --- /dev/null +++ b/package-shared/utils/backend/config/grab-config.ts @@ -0,0 +1,47 @@ +import fs from "fs"; +import { SiteConfig } from "../../../types"; +import grabDirNames from "../names/grab-dir-names"; +import EJSON from "../../ejson"; +import envsub from "../../envsub"; + +type Params = { + userId?: string | number; +}; + +type Return = { + appConfig: SiteConfig; + userConfig: SiteConfig | null; +}; + +export default function grabConfig(params?: Params): Return { + const { appConfigJSONFile, userConfigJSONFilePath } = grabDirNames({ + userId: params?.userId, + }); + + const appConfigJSON = envsub(fs.readFileSync(appConfigJSONFile, "utf-8")); + const appConfig = EJSON.parse(appConfigJSON) as SiteConfig; + + if (!userConfigJSONFilePath) { + return { appConfig, userConfig: null }; + } + + if (!fs.existsSync(userConfigJSONFilePath)) { + fs.writeFileSync( + userConfigJSONFilePath, + JSON.stringify({ + main: {}, + }), + "utf-8" + ); + } + + const userConfigJSON = envsub( + fs.readFileSync(userConfigJSONFilePath, "utf-8") + ); + + const userConfig = (EJSON.parse(userConfigJSON) || { + main: {}, + }) as SiteConfig; + + return { appConfig, userConfig }; +} diff --git a/package-shared/utils/backend/config/grab-main-config.ts b/package-shared/utils/backend/config/grab-main-config.ts new file mode 100644 index 0000000..9ce18be --- /dev/null +++ b/package-shared/utils/backend/config/grab-main-config.ts @@ -0,0 +1,18 @@ +import { SiteConfigMain } from "../../../types"; +import grabConfig from "./grab-config"; + +type Params = { + userId?: string | number; +}; + +type Return = { + appMainConfig: SiteConfigMain; + userMainConfig?: SiteConfigMain; +}; + +export default function grabMainConfig(params?: Params): Return { + const { appConfig } = grabConfig(); + const { userConfig } = grabConfig({ userId: params?.userId }); + + return { appMainConfig: appConfig.main, userMainConfig: userConfig?.main }; +} diff --git a/package-shared/utils/backend/config/update-user-config.ts b/package-shared/utils/backend/config/update-user-config.ts new file mode 100644 index 0000000..4a39655 --- /dev/null +++ b/package-shared/utils/backend/config/update-user-config.ts @@ -0,0 +1,50 @@ +import fs from "fs"; +import grabDirNames from "../names/grab-dir-names"; +import grabConfig from "./grab-config"; +import _ from "lodash"; +import { SiteConfig } from "../../../types"; + +type Params = { + userId?: string | number; + newConfig?: SiteConfig; +}; + +type Return = { + success?: boolean; + msg?: string; +}; + +export default function updateUserConfig({ + newConfig, + userId, +}: Params): Return { + if (!userId || !newConfig) { + return { + success: false, + msg: `UserID or newConfig not provided`, + }; + } + + const { userConfigJSONFilePath } = grabDirNames({ + userId, + }); + + if (!userConfigJSONFilePath || !fs.existsSync(userConfigJSONFilePath)) { + return { + success: false, + msg: `userConfigJSONFilePath not found!`, + }; + } + + const { userConfig: existingUserConfig } = grabConfig({ userId }); + + const updateConfig = _.merge(existingUserConfig, newConfig); + + fs.writeFileSync( + userConfigJSONFilePath, + JSON.stringify(updateConfig), + "utf-8" + ); + + return { success: true }; +} diff --git a/package-shared/utils/backend/names/grab-db-full-name.ts b/package-shared/utils/backend/names/grab-db-full-name.ts deleted file mode 100644 index 84d5dd8..0000000 --- a/package-shared/utils/backend/names/grab-db-full-name.ts +++ /dev/null @@ -1,22 +0,0 @@ -import { DATASQUIREL_LoggedInUser, UserType } from "../../../types"; - -type Param = { - user?: DATASQUIREL_LoggedInUser | UserType; - userId?: string | number | null; - dbSlug?: string; -}; - -export default function grabUserDbFullName({ dbSlug, user, userId }: Param) { - const finalUserId = user?.id || userId; - - if (!finalUserId || !dbSlug) - throw new Error( - `Couldn't grab full DB name. Missing parameters finalUserId || dbSlug` - ); - - if (dbSlug.match(/[^a-zA-Z0-9-_]/)) { - throw new Error(`Invalid Database slug`); - } - - return `datasquirel_user_${finalUserId}_${dbSlug}`; -} diff --git a/package-shared/utils/backend/names/grab-dir-names.ts b/package-shared/utils/backend/names/grab-dir-names.ts index 58bfbdb..32c3566 100644 --- a/package-shared/utils/backend/names/grab-dir-names.ts +++ b/package-shared/utils/backend/names/grab-dir-names.ts @@ -5,24 +5,69 @@ type Param = { user?: DATASQUIREL_LoggedInUser | UserType; userId?: string | number | null; appDir?: string; + dataDir?: string; }; export default function grabDirNames(param?: Param) { const appDir = param?.appDir || process.env.DSQL_APP_DIR; - const STATIC_ROOT = process.env.DSQL_STATIC_SERVER_DIR || "/static"; + const DATA_DIR = param?.dataDir || process.env.DSQL_DATA_DIR || "/data"; const finalUserId = param?.user?.id || param?.userId; - const publicImagesDir = path.join(STATIC_ROOT, `images`); - if (!appDir) throw new Error("Please provide the `DSQL_APP_DIR` env variable."); - const schemasDir = - process.env.DSQL_DB_SCHEMA_DIR || - path.join(appDir, "jsonData", "dbSchemas"); + if (!DATA_DIR) + throw new Error("Please provide the `DATA_DIR` env variable."); + + const STATIC_ROOT = path.join(DATA_DIR, "static"); + const publicImagesDir = path.join(STATIC_ROOT, `images`); + + const publicDir = path.join(appDir, "public"); + const publicSSLDir = path.join(publicDir, "documents", "ssl"); + const appSSLDir = path.join(appDir, "ssl"); + const mainSSLDir = path.join(DATA_DIR, "ssl"); + + const privateDataDir = path.join(DATA_DIR, "private"); + + /** + * # DB Dir names + * @description Database related Directories + */ + const mainDbDataDir = path.join(DATA_DIR, "db"); + const mainDbGrastateDatFile = path.join(mainDbDataDir, "grastate.dat"); + const replica1DbDataDir = path.join(DATA_DIR, "replica-1"); + + const mariadbMainConfigDir = path.join(DATA_DIR, "db-config", "main"); + const mariadbReplicaConfigDir = path.join(DATA_DIR, "db-config", "replica"); + const maxscaleConfigDir = path.join(DATA_DIR, "db-config", "maxscale"); + + const mariadbMainConfigFile = path.join( + mariadbMainConfigDir, + "default.cnf" + ); + const mariadbReplicaConfigFile = path.join( + mariadbReplicaConfigDir, + "default.cnf" + ); + const galeraConfigFile = path.join(mariadbMainConfigDir, "galera.cnf"); + const galeraReplicaConfigFile = path.join( + mariadbReplicaConfigDir, + "galera.cnf" + ); + const maxscaleConfigFile = path.join(maxscaleConfigDir, "maxscale.cnf"); + + /** + * # Schema Dir names + * @description + */ + const oldSchemasDir = path.join(appDir, "jsonData", "dbSchemas"); + const appSchemaJSONFile = path.join(oldSchemasDir, "1.json"); const tempDirName = ".tmp"; - if (!schemasDir) + const appConfigDir = path.join(appDir, "jsonData", "config"); + const appConfigJSONFile = path.join(appConfigDir, "app-config.json"); + + if (!privateDataDir) throw new Error( "Please provide the `DSQL_DB_SCHEMA_DIR` env variable." ); @@ -30,31 +75,35 @@ export default function grabDirNames(param?: Param) { const pakageSharedDir = path.join(appDir, `package-shared`); const mainDbTypeDefFile = path.join(pakageSharedDir, `types/dsql.ts`); - const mainShemaJSONFilePath = path.join(schemasDir, `main.json`); + const mainShemaJSONFilePath = path.join(oldSchemasDir, `main.json`); const defaultTableFieldsJSONFilePath = path.join( pakageSharedDir, `data/defaultFields.json` ); - const usersSchemaDir = path.join(schemasDir, `users`); - const targetUserSchemaDir = finalUserId + const usersSchemaDir = path.join(privateDataDir, `users`); + const targetUserPrivateDir = finalUserId ? path.join(usersSchemaDir, `user-${finalUserId}`) : undefined; - const userTempSQLFilePath = targetUserSchemaDir - ? path.join(targetUserSchemaDir, `tmp.sql`) + const userTempSQLFilePath = targetUserPrivateDir + ? path.join(targetUserPrivateDir, `tmp.sql`) + : undefined; + const userMainShemaJSONFilePath = targetUserPrivateDir + ? path.join(targetUserPrivateDir, `main.json`) : undefined; - const userDirPath = finalUserId - ? path.join(usersSchemaDir, `user-${finalUserId}`) + const userConfigJSONFilePath = targetUserPrivateDir + ? path.join(targetUserPrivateDir, `config.json`) : undefined; - const userSchemaMainJSONFilePath = userDirPath - ? path.join(userDirPath, `main.json`) + + const userSchemaMainJSONFilePath = targetUserPrivateDir + ? path.join(targetUserPrivateDir, `main.json`) : undefined; - const userPrivateMediaDir = userDirPath - ? path.join(userDirPath, `media`) + const userPrivateMediaDir = targetUserPrivateDir + ? path.join(targetUserPrivateDir, `media`) : undefined; - const userPrivateExportsDir = userDirPath - ? path.join(userDirPath, `export`) + const userPrivateExportsDir = targetUserPrivateDir + ? path.join(targetUserPrivateDir, `export`) : undefined; const userPrivateSQLExportsDir = userPrivateExportsDir ? path.join(userPrivateExportsDir, `sql`) @@ -91,6 +140,8 @@ export default function grabDirNames(param?: Param) { appDir, "test.docker-compose.yaml" ); + const dbDockerComposeFile = path.join(appDir, "db.docker-compose.yml"); + const dbDockerComposeFileAlt = path.join(appDir, "db.docker-compose.yaml"); const extraDockerComposeFile = path.join( appDir, "extra.docker-compose.yml" @@ -105,16 +156,29 @@ export default function grabDirNames(param?: Param) { const envFile = path.join(appDir, ".env"); const testEnvFile = path.join(appDir, "test.env"); + /** + * # Backup Dir names + * @description + */ + const mainBackupDir = path.join(DATA_DIR, "backups"); + const userBackupDir = targetUserPrivateDir + ? path.join(targetUserPrivateDir, `backups`) + : undefined; + + const sqlBackupDirName = `sql`; + const schemasBackupDirName = `schema`; + return { appDir, - schemasDir, - userDirPath, + privateDataDir, + oldSchemasDir, + userConfigJSONFilePath, mainShemaJSONFilePath, mainDbTypeDefFile, tempDirName, defaultTableFieldsJSONFilePath, usersSchemaDir, - targetUserSchemaDir, + targetUserPrivateDir, userSchemaMainJSONFilePath, userPrivateMediaDir, userPrivateExportsDir, @@ -137,5 +201,32 @@ export default function grabDirNames(param?: Param) { testEnvFile, userPublicMediaDir, userTempSQLFilePath, + STATIC_ROOT, + appConfigJSONFile, + appConfigDir, + mariadbMainConfigDir, + mariadbMainConfigFile, + maxscaleConfigDir, + mariadbReplicaConfigDir, + DATA_DIR, + publicDir, + publicSSLDir, + appSSLDir, + maxscaleConfigFile, + mariadbReplicaConfigFile, + mainSSLDir, + mainDbDataDir, + replica1DbDataDir, + galeraConfigFile, + galeraReplicaConfigFile, + dbDockerComposeFile, + dbDockerComposeFileAlt, + mainDbGrastateDatFile, + appSchemaJSONFile, + mainBackupDir, + userBackupDir, + sqlBackupDirName, + schemasBackupDirName, + userMainShemaJSONFilePath, }; } diff --git a/package-shared/utils/backend/names/grab-ip-addresses.ts b/package-shared/utils/backend/names/grab-ip-addresses.ts new file mode 100644 index 0000000..a19ba60 --- /dev/null +++ b/package-shared/utils/backend/names/grab-ip-addresses.ts @@ -0,0 +1,13 @@ +import grabDockerResourceIPNumbers from "../../grab-docker-resource-ip-numbers"; + +export default function grabIPAddresses() { + const globalIPPrefix = process.env.DSQL_NETWORK_IP_PREFIX || "172.72.0"; + const { cron, db, maxscale, postDbSetup, web } = + grabDockerResourceIPNumbers(); + + const webAppIP = `${globalIPPrefix}.${web}`; + const appCronIP = `${globalIPPrefix}.${cron}`; + const maxScaleIP = `${globalIPPrefix}.${maxscale}`; + + return { webAppIP, appCronIP, maxScaleIP, globalIPPrefix }; +} diff --git a/package-shared/utils/cookies-actions.ts b/package-shared/utils/cookies-actions.ts new file mode 100644 index 0000000..954b1ab --- /dev/null +++ b/package-shared/utils/cookies-actions.ts @@ -0,0 +1,74 @@ +import * as http from "http"; +import { CookieOptions } from "../types"; +import { CookieNames } from "../dict/cookie-names"; + +export function setCookie( + res: http.ServerResponse, + name: (typeof CookieNames)[keyof typeof CookieNames], + value: string, + options: CookieOptions = {} +): void { + const cookieParts: string[] = [ + `${encodeURIComponent(name)}=${encodeURIComponent(value)}`, + ]; + + if (options.expires) { + cookieParts.push(`Expires=${options.expires.toUTCString()}`); + } + if (options.maxAge !== undefined) { + cookieParts.push(`Max-Age=${options.maxAge}`); + } + if (options.path) { + cookieParts.push(`Path=${options.path}`); + } + if (options.domain) { + cookieParts.push(`Domain=${options.domain}`); + } + if (options.secure) { + cookieParts.push("Secure"); + } + if (options.httpOnly) { + cookieParts.push("HttpOnly"); + } + + res.setHeader("Set-Cookie", cookieParts.join("; ")); +} + +export function getCookie( + req: http.IncomingMessage, + name: (typeof CookieNames)[keyof typeof CookieNames] +): string | null { + const cookieHeader = req.headers.cookie; + if (!cookieHeader) return null; + + const cookies = cookieHeader + .split(";") + .reduce((acc: { [key: string]: string }, cookie: string) => { + const [key, val] = cookie.trim().split("=").map(decodeURIComponent); + acc[key] = val; + return acc; + }, {}); + + return cookies[name] || null; +} + +export function updateCookie( + res: http.ServerResponse, + name: (typeof CookieNames)[keyof typeof CookieNames], + value: string, + options: CookieOptions = {} +): void { + setCookie(res, name, value, options); +} + +export function deleteCookie( + res: http.ServerResponse, + name: (typeof CookieNames)[keyof typeof CookieNames], + options: CookieOptions = {} +): void { + setCookie(res, name, "", { + ...options, + expires: new Date(0), + maxAge: 0, + }); +} diff --git a/package-shared/utils/create-user-sql-user.ts b/package-shared/utils/create-user-sql-user.ts new file mode 100644 index 0000000..8d0f949 --- /dev/null +++ b/package-shared/utils/create-user-sql-user.ts @@ -0,0 +1,72 @@ +import { generate } from "generate-password"; +import dbHandler from "../functions/backend/dbHandler"; +import dsqlCrud from "./data-fetching/crud"; +import { DSQL_DATASQUIREL_USERS, DsqlTables } from "../types/dsql"; +import encrypt from "../functions/dsql/encrypt"; +import { UserType } from "../types"; +import grabUserMainSqlUserName from "./grab-user-main-sql-user-name"; +import grabDbNames from "./grab-db-names"; +import { createNewSQLUser } from "../functions/web-app/mariadb-user/handle-mariadb-user-creation"; + +type Params = { + user: UserType; +}; + +type Return = { + fullName?: string; + host?: string; + username?: string; + password?: string; +}; + +export default async function createUserSQLUser(user: UserType) { + const { + fullName, + host, + username: mariaDBUsername, + webHost, + } = grabUserMainSqlUserName({ user }); + const { userDbPrefix } = grabDbNames({ user }); + + await dbHandler({ + query: `DROP USER IF EXISTS '${mariaDBUsername}'@'${webHost}'`, + noErrorLogs: true, + }); + + const newPassword = generate({ length: 32 }); + + await createNewSQLUser({ + host: webHost, + password: newPassword, + username: mariaDBUsername, + }); + + const updateWebHostGrants = (await dbHandler({ + query: `GRANT ALL PRIVILEGES ON \`${userDbPrefix.replace( + /\_/g, + "\\_" + )}%\`.* TO '${mariaDBUsername}'@'${webHost}'`, + })) as any[]; + + const updateUser = await dsqlCrud< + DSQL_DATASQUIREL_USERS, + (typeof DsqlTables)[number] + >({ + action: "update", + table: "users", + targetField: "id", + targetValue: user.id, + data: { + mariadb_host: webHost, + mariadb_pass: encrypt({ data: newPassword }) || undefined, + mariadb_user: mariaDBUsername, + }, + }); + + return { + fullName, + host, + username: mariaDBUsername, + password: newPassword, + }; +} diff --git a/package-shared/utils/data-fetching/crud-get.ts b/package-shared/utils/data-fetching/crud-get.ts index 5488eb0..868bbcf 100644 --- a/package-shared/utils/data-fetching/crud-get.ts +++ b/package-shared/utils/data-fetching/crud-get.ts @@ -1,19 +1,25 @@ import sqlGenerator from "../../functions/dsql/sql/sql-generator"; -import { DsqlCrudParam } from "../../types"; +import { APIResponseObject, DsqlCrudParam } from "../../types"; import connDbHandler, { ConnDBHandlerQueryObject } from "../db/conn-db-handler"; -import { DsqlCrudReturn } from "./crud"; -export default async function dsqlCrudGet({ +export default async function < + T extends { [key: string]: any } = { [key: string]: any } +>({ table, query, count, countOnly, -}: DsqlCrudParam): Promise { + dbFullName, +}: Omit< + DsqlCrudParam, + "action" | "data" | "sanitize" +>): Promise { let queryObject: ReturnType> | undefined; queryObject = sqlGenerator({ tableName: table, genObject: query, + dbFullName, }); const DB_CONN = global.DSQL_READ_ONLY_DB_CONN || global.DSQL_DB_CONN; @@ -31,6 +37,7 @@ export default async function dsqlCrudGet({ tableName: table, genObject: query, count: true, + dbFullName, }) : undefined; @@ -55,8 +62,13 @@ export default async function dsqlCrudGet({ return { success: isSuccess, payload: isSuccess ? (countOnly ? null : res[0]) : null, + batchPayload: isSuccess ? (countOnly ? null : res) : null, error: isSuccess ? undefined : res?.error, - queryObject, + errors: res?.errors, + queryObject: { + sql: queryObject?.string, + params: queryObject?.values, + }, count: isSuccess ? res[1]?.[0]?.["COUNT(*)"] ? res[1][0]["COUNT(*)"] diff --git a/package-shared/utils/data-fetching/crud.ts b/package-shared/utils/data-fetching/crud.ts index 6e7fff2..ccb3b58 100644 --- a/package-shared/utils/data-fetching/crud.ts +++ b/package-shared/utils/data-fetching/crud.ts @@ -1,20 +1,21 @@ -import post from "../../actions/post"; +import sqlDeleteGenerator from "../../functions/dsql/sql/sql-delete-generator"; import sqlGenerator from "../../functions/dsql/sql/sql-generator"; -import { DsqlCrudParam, PostReturn } from "../../types"; -// import dsqlCrudBatchGet from "./crud-batch-get"; +import { + APIResponseObject, + DsqlCrudParam, + DSQLErrorObject, + PostInsertReturn, + PostReturn, +} from "../../types"; import dsqlCrudGet from "./crud-get"; - -export type DsqlCrudReturn = - | (PostReturn & { - queryObject?: ReturnType>; - count?: number; - batchPayload?: any[][] | null; - }) - | null; +import connDbHandler from "../db/conn-db-handler"; +import addDbEntry from "../../functions/backend/db/addDbEntry"; +import updateDbEntry from "../../functions/backend/db/updateDbEntry"; export default async function dsqlCrud< - T extends { [key: string]: any } = { [key: string]: any } ->(params: DsqlCrudParam): Promise { + T extends { [key: string]: any } = { [key: string]: any }, + K extends string = string +>(params: DsqlCrudParam): Promise { const { action, data, @@ -23,8 +24,17 @@ export default async function dsqlCrud< sanitize, targetField, targetId, + dbFullName, + deleteData, + batchData, + deleteKeyValues, } = params; - const finalData = sanitize ? sanitize(data) : data; + const finalData = (sanitize ? sanitize({ data }) : data) as T; + const finalBatchData = ( + sanitize ? sanitize({ batchData }) : batchData + ) as T[]; + + const DB_CONN = global.DSQL_READ_ONLY_DB_CONN || global.DSQL_DB_CONN; switch (action) { case "get": @@ -34,41 +44,59 @@ export default async function dsqlCrud< // return await dsqlCrudBatchGet(params); case "insert": - return await post({ - query: { - action: "insert", - table, - data: finalData, - }, - forceLocal: true, + const INSERT_RESULT = await addDbEntry({ + data: finalData, + batchData: finalBatchData, + tableName: table, + dbFullName, }); + return INSERT_RESULT; case "update": delete data?.id; - return await post({ - query: { - action: "update", - table, - identifierColumnName: targetField || "id", - identifierValue: String(targetValue || targetId), - data: finalData, - }, - forceLocal: true, + const UPDATE_RESULT = await updateDbEntry({ + data: finalData, + tableName: table, + dbFullName, + identifierColumnName: (targetField || "id") as string, + identifierValue: String(targetValue || targetId), }); + return UPDATE_RESULT; + case "delete": - return await post({ - query: { - action: "delete", - table, - identifierColumnName: targetField || "id", - identifierValue: String(targetValue || targetId), - }, - forceLocal: true, + const deleteQuery = sqlDeleteGenerator({ + data: targetId + ? { id: targetId } + : targetField && targetValue + ? { [targetField]: targetValue } + : deleteData, + tableName: table, + dbFullName, + deleteKeyValues, }); + const res = (await connDbHandler( + DB_CONN, + deleteQuery?.query, + deleteQuery?.values + )) as PostInsertReturn; + + return { + success: Boolean(res.affectedRows), + payload: res, + queryObject: { + sql: deleteQuery?.query || "", + params: deleteQuery?.values || [], + }, + }; + default: - return null; + return { + success: false, + payload: undefined, + msg: "Invalid action", + }; } } diff --git a/package-shared/utils/data-fetching/method-crud.ts b/package-shared/utils/data-fetching/method-crud.ts index 236e50f..83c31a3 100644 --- a/package-shared/utils/data-fetching/method-crud.ts +++ b/package-shared/utils/data-fetching/method-crud.ts @@ -150,7 +150,10 @@ export default async function dsqlMethodCrud< payload: GET_RESULT?.payload, msg: GET_RESULT?.msg, error: GET_RESULT?.error, - queryObject: GET_RESULT?.queryObject, + queryObject: { + string: GET_RESULT?.queryObject?.sql || "", + values: GET_RESULT?.queryObject?.params || [], + }, }; break; diff --git a/package-shared/utils/db/conn-db-handler.ts b/package-shared/utils/db/conn-db-handler.ts index 5bd8c7e..c9daace 100644 --- a/package-shared/utils/db/conn-db-handler.ts +++ b/package-shared/utils/db/conn-db-handler.ts @@ -1,12 +1,16 @@ import { ServerlessMysql } from "serverless-mysql"; import debugLog from "../logging/debug-log"; +import { DSQLErrorObject } from "../../types"; export type ConnDBHandlerQueryObject = { query: string; values?: (string | number | undefined)[]; }; -type Return = ReturnType | null | { error: string }; +type Return = + | ReturnType + | null + | { error?: string; errors?: DSQLErrorObject[] }; /** * # Run Query From MySQL Connection @@ -32,6 +36,8 @@ export default async function connDbHandler( if (!conn) throw new Error("No Connection Found!"); if (!query) throw new Error("Query String Required!"); + let queryErrorArray: DSQLErrorObject[] = []; + if (typeof query == "string") { const res = await conn.query(trimQuery(query), values); @@ -48,8 +54,14 @@ export default async function connDbHandler( const resArray = []; for (let i = 0; i < query.length; i++) { + let currentQueryError: DSQLErrorObject = {}; + try { const queryObj = query[i]; + + currentQueryError.sql = queryObj.query; + currentQueryError.sqlValues = queryObj.values; + const queryObjRes = await conn.query( trimQuery(queryObj.query), queryObj.values @@ -70,6 +82,8 @@ export default async function connDbHandler( error as Error ); resArray.push(null); + currentQueryError["error"] = error.message; + queryErrorArray.push(currentQueryError); } } @@ -81,6 +95,12 @@ export default async function connDbHandler( }); } + if (queryErrorArray[0]) { + return { + errors: queryErrorArray, + }; + } + return resArray as any; } else { return null; diff --git a/package-shared/utils/db/schema/data-type-constructor.ts b/package-shared/utils/db/schema/data-type-constructor.ts new file mode 100644 index 0000000..075d0b3 --- /dev/null +++ b/package-shared/utils/db/schema/data-type-constructor.ts @@ -0,0 +1,29 @@ +import dataTypeParser, { DataTypesWithNumbers } from "./data-type-parser"; + +export default function dataTypeConstructor( + dataType: string, + limit?: number, + decimal?: number +) { + let finalType = dataTypeParser(dataType).type; + + if (!DataTypesWithNumbers.includes(finalType)) { + return finalType; + } + + if (finalType == "VARCHAR") { + return (finalType += `(${limit || 250})`); + } + + if ( + finalType == "DECIMAL" || + finalType == "FLOAT" || + finalType == "DOUBLE" + ) { + return (finalType += `(${limit || 10},${decimal || 2})`); + } + + if (limit && !decimal) finalType += `(${limit})`; + if (limit && decimal) finalType += `(${limit},${decimal})`; + return finalType; +} diff --git a/package-shared/utils/db/schema/data-type-parser.ts b/package-shared/utils/db/schema/data-type-parser.ts new file mode 100644 index 0000000..d16e3df --- /dev/null +++ b/package-shared/utils/db/schema/data-type-parser.ts @@ -0,0 +1,54 @@ +import DataTypes from "../../../data/data-types"; +import numberfy from "../../numberfy"; + +export const DataTypesWithNumbers: (typeof DataTypes)[number]["name"][] = [ + "DECIMAL", + "DOUBLE", + "FLOAT", + "VARCHAR", +]; + +export const DataTypesWithTwoNumbers: (typeof DataTypes)[number]["name"][] = [ + "DECIMAL", + "DOUBLE", + "FLOAT", +]; + +type Return = { + type: (typeof DataTypes)[number]["name"]; + limit?: number; + decimal?: number; +}; + +export default function dataTypeParser(dataType?: string): Return { + if (!dataType) { + return { + type: "VARCHAR", + limit: 250, + }; + } + + const dataTypeArray = dataType.split("("); + const type = dataTypeArray[0] as (typeof DataTypes)[number]["name"]; + const number = dataTypeArray[1] as string | undefined; + + if (!DataTypesWithNumbers.includes(type)) { + return { + type, + }; + } + + if (number?.match(/,/)) { + const numberArr = number.split(","); + return { + type, + limit: numberfy(numberArr[0]), + decimal: numberArr[1] ? numberfy(numberArr[1]) : undefined, + }; + } + + return { + type, + limit: number ? numberfy(number) : undefined, + }; +} diff --git a/package-shared/utils/db/schema/grab-target-db-schema-index.ts b/package-shared/utils/db/schema/grab-target-db-schema-index.ts new file mode 100644 index 0000000..2306a92 --- /dev/null +++ b/package-shared/utils/db/schema/grab-target-db-schema-index.ts @@ -0,0 +1,36 @@ +import { + DSQL_ChildrenDatabaseObject, + DSQL_ChildrenTablesType, + DSQL_DatabaseSchemaType, +} from "../../../types"; + +type Params = { + dbs?: DSQL_DatabaseSchemaType[]; + dbSchema?: DSQL_DatabaseSchemaType; + childDbSchema?: DSQL_ChildrenDatabaseObject; + childTableSchema?: DSQL_ChildrenTablesType; + dbSlug?: string; + dbFullName?: string; +}; + +export default function grabTargetDatabaseSchemaIndex({ + dbs, + dbFullName, + dbSlug, + dbSchema, + childDbSchema, + childTableSchema, +}: Params): number | undefined { + if (!dbs) return undefined; + + const targetDbIndex = dbs.findIndex( + (db) => + (dbSlug && dbSlug == db.dbSlug) || + (dbFullName && dbFullName == db.dbFullName) || + (dbSchema && dbSchema.dbSlug && dbSchema.dbSlug == db.dbSlug) + ); + + if (targetDbIndex < 0) return undefined; + + return targetDbIndex; +} diff --git a/package-shared/utils/db/schema/grab-target-table-schema-index.ts b/package-shared/utils/db/schema/grab-target-table-schema-index.ts new file mode 100644 index 0000000..a296a58 --- /dev/null +++ b/package-shared/utils/db/schema/grab-target-table-schema-index.ts @@ -0,0 +1,29 @@ +import { DSQL_ChildrenTablesType, DSQL_TableSchemaType } from "../../../types"; + +type Params = { + tables?: DSQL_TableSchemaType[]; + tableSchema?: DSQL_TableSchemaType; + childTableSchema?: DSQL_ChildrenTablesType; + tableName?: string; +}; + +export default function grabTargetTableSchemaIndex({ + tables, + tableName, + tableSchema, + childTableSchema, +}: Params): number | undefined { + if (!tables) return undefined; + + const targetTableIndex = tables.findIndex( + (tbl) => + (tableName && tableName == tbl.tableName) || + (tableSchema && + tableSchema.tableName && + tableSchema.tableName == tbl.tableName) + ); + + if (targetTableIndex < 0) return undefined; + + return targetTableIndex; +} diff --git a/package-shared/utils/db/schema/grab-target-table-schema.ts b/package-shared/utils/db/schema/grab-target-table-schema.ts new file mode 100644 index 0000000..ab7bd01 --- /dev/null +++ b/package-shared/utils/db/schema/grab-target-table-schema.ts @@ -0,0 +1,16 @@ +import { DSQL_TableSchemaType } from "../../../types"; + +type Params = { + tables: DSQL_TableSchemaType[]; + tableName?: string; +}; + +export default function grabTargetTableSchema({ + tables, + tableName, +}: Params): DSQL_TableSchemaType | undefined { + const targetTable = tables.find( + (tbl) => tableName && tableName == tbl.tableName + ); + return targetTable; +} diff --git a/package-shared/utils/db/schema/grab-text-field-type.ts b/package-shared/utils/db/schema/grab-text-field-type.ts new file mode 100644 index 0000000..b66f122 --- /dev/null +++ b/package-shared/utils/db/schema/grab-text-field-type.ts @@ -0,0 +1,16 @@ +import { DSQL_FieldSchemaType, TextFieldTypesArray } from "../../../types"; + +export default function grabTextFieldType( + field: DSQL_FieldSchemaType, + nullReturn?: boolean +): (typeof TextFieldTypesArray)[number]["value"] | undefined { + if (field.richText) return "richText"; + if (field.json) return "json"; + if (field.yaml) return "yaml"; + if (field.html) return "html"; + if (field.css) return "css"; + if (field.javascript) return "javascript"; + if (field.shell) return "shell"; + if (nullReturn) return undefined; + return "plain"; +} diff --git a/package-shared/utils/db/schema/resolve-schema-children-handle-children-databases.ts b/package-shared/utils/db/schema/resolve-schema-children-handle-children-databases.ts new file mode 100644 index 0000000..1b820ee --- /dev/null +++ b/package-shared/utils/db/schema/resolve-schema-children-handle-children-databases.ts @@ -0,0 +1,132 @@ +import { + grabPrimaryRequiredDbSchema, + writeUpdatedDbSchema, +} from "../../../shell/createDbFromSchema/grab-required-database-schemas"; +import { + DSQL_ChildrenDatabaseObject, + DSQL_DatabaseSchemaType, +} from "../../../types"; +import _ from "lodash"; +import uniqueByKey from "../../unique-by-key"; + +type Params = { + currentDbSchema: DSQL_DatabaseSchemaType; + userId: string | number; +}; + +export default function ({ currentDbSchema, userId }: Params) { + const newCurrentDbSchema = _.cloneDeep(currentDbSchema); + + if (newCurrentDbSchema.childrenDatabases) { + for ( + let ch = 0; + ch < newCurrentDbSchema.childrenDatabases.length; + ch++ + ) { + const dbChildDb = newCurrentDbSchema.childrenDatabases[ch]; + + if (!dbChildDb.dbId) { + newCurrentDbSchema.childrenDatabases.splice(ch, 1, {}); + continue; + } + + const targetChildDatabase = grabPrimaryRequiredDbSchema({ + dbId: dbChildDb.dbId, + userId, + }); + + /** + * Delete child database from array if said database + * doesn't exist + */ + if (targetChildDatabase?.id && targetChildDatabase.childDatabase) { + targetChildDatabase.tables = [...newCurrentDbSchema.tables]; + writeUpdatedDbSchema({ + dbSchema: targetChildDatabase, + userId, + }); + } else { + newCurrentDbSchema.childrenDatabases?.splice(ch, 1, {}); + } + } + + newCurrentDbSchema.childrenDatabases = + uniqueByKey( + newCurrentDbSchema.childrenDatabases.filter((db) => + Boolean(db.dbId) + ), + "dbId" + ); + } + + /** + * Handle scenario where this database is a child of another + */ + if (currentDbSchema.childDatabase && currentDbSchema.childDatabaseDbId) { + const targetParentDatabase = grabPrimaryRequiredDbSchema({ + dbId: currentDbSchema.childDatabaseDbId, + userId, + }); + + if (!targetParentDatabase) { + return newCurrentDbSchema; + } + + /** + * Delete child Database key/values from current database if + * the parent database doesn't esit + */ + if (!targetParentDatabase?.id) { + delete newCurrentDbSchema.childDatabase; + delete newCurrentDbSchema.childDatabaseDbId; + return newCurrentDbSchema; + } + + /** + * New Child Database Object to be appended + */ + const newChildDatabaseObject: DSQL_ChildrenDatabaseObject = { + dbId: currentDbSchema.id, + }; + + /** + * Add a new Children array in the target Database if this is the + * first child to be added to said database. Else append to array + * if it exists + */ + if ( + targetParentDatabase?.id && + !targetParentDatabase.childrenDatabases?.[0] + ) { + targetParentDatabase.childrenDatabases = [newChildDatabaseObject]; + } else if ( + targetParentDatabase?.id && + targetParentDatabase.childrenDatabases?.[0] + ) { + const existingChildDb = targetParentDatabase.childrenDatabases.find( + (db) => db.dbId == currentDbSchema.id + ); + + if (!existingChildDb?.dbId) { + targetParentDatabase.childrenDatabases.push( + newChildDatabaseObject + ); + } + + targetParentDatabase.childrenDatabases = uniqueByKey( + targetParentDatabase.childrenDatabases, + "dbId" + ); + } + + /** + * Update tables for child database, which is the current database + */ + if (targetParentDatabase?.id) { + newCurrentDbSchema.tables = targetParentDatabase.tables; + writeUpdatedDbSchema({ dbSchema: targetParentDatabase, userId }); + } + } + + return newCurrentDbSchema; +} diff --git a/package-shared/utils/db/schema/resolve-schema-children-handle-children-tables.ts b/package-shared/utils/db/schema/resolve-schema-children-handle-children-tables.ts new file mode 100644 index 0000000..4a6c4e0 --- /dev/null +++ b/package-shared/utils/db/schema/resolve-schema-children-handle-children-tables.ts @@ -0,0 +1,229 @@ +import { + grabPrimaryRequiredDbSchema, + writeUpdatedDbSchema, +} from "../../../shell/createDbFromSchema/grab-required-database-schemas"; +import { + DSQL_ChildrenTablesType, + DSQL_DatabaseSchemaType, + DSQL_TableSchemaType, +} from "../../../types"; +import _ from "lodash"; +import uniqueByKey from "../../unique-by-key"; + +type Params = { + currentDbSchema: DSQL_DatabaseSchemaType; + currentTableSchema: DSQL_TableSchemaType; + currentTableSchemaIndex: number; + userId: string | number; +}; + +export default function ({ + currentDbSchema, + currentTableSchema, + currentTableSchemaIndex, + userId, +}: Params): DSQL_DatabaseSchemaType { + if (!currentDbSchema.dbFullName) { + throw new Error( + `Resolve Children tables ERROR => currentDbSchema.dbFullName not found!` + ); + } + + const newCurrentDbSchema = _.cloneDeep(currentDbSchema); + + if (newCurrentDbSchema.tables[currentTableSchemaIndex].childrenTables) { + for ( + let ch = 0; + ch < + newCurrentDbSchema.tables[currentTableSchemaIndex].childrenTables + .length; + ch++ + ) { + const childTable = + newCurrentDbSchema.tables[currentTableSchemaIndex] + .childrenTables[ch]; + + if (!childTable.dbId || !childTable.tableId) { + newCurrentDbSchema.tables[ + currentTableSchemaIndex + ].childrenTables?.splice(ch, 1, {}); + + continue; + } + + const targetChildTableParentDatabase = grabPrimaryRequiredDbSchema({ + dbId: childTable.dbId, + userId, + }); + + /** + * Delete child table from array if the parent database + * of said child table has been deleted or doesn't exist + */ + if (!targetChildTableParentDatabase?.dbFullName) { + newCurrentDbSchema.tables[ + currentTableSchemaIndex + ].childrenTables?.splice(ch, 1, {}); + } else { + /** + * Delete child table from array if the parent database + * exists but the target tabled has been deleted or doesn't + * exist + */ + const targetChildTableParentDatabaseTableIndex = + targetChildTableParentDatabase.tables.findIndex( + (tbl) => tbl.id == childTable.tableId + ); + + const targetChildTableParentDatabaseTable = + targetChildTableParentDatabase.tables[ + targetChildTableParentDatabaseTableIndex + ]; + + if (targetChildTableParentDatabaseTable?.childTable) { + targetChildTableParentDatabase.tables[ + targetChildTableParentDatabaseTableIndex + ].fields = [...currentTableSchema.fields]; + targetChildTableParentDatabase.tables[ + targetChildTableParentDatabaseTableIndex + ].indexes = [...(currentTableSchema.indexes || [])]; + + writeUpdatedDbSchema({ + dbSchema: targetChildTableParentDatabase, + userId, + }); + } else { + newCurrentDbSchema.tables[ + currentTableSchemaIndex + ].childrenTables?.splice(ch, 1, {}); + } + } + } + + if ( + newCurrentDbSchema.tables[currentTableSchemaIndex] + .childrenTables?.[0] + ) { + newCurrentDbSchema.tables[currentTableSchemaIndex].childrenTables = + uniqueByKey( + newCurrentDbSchema.tables[ + currentTableSchemaIndex + ].childrenTables.filter( + (tbl) => Boolean(tbl.dbId) && Boolean(tbl.tableId) + ), + "dbId" + ); + } else { + delete newCurrentDbSchema.tables[currentTableSchemaIndex] + .childrenTables; + } + } + + /** + * Handle scenario where this table is a child of another + */ + if ( + currentTableSchema.childTable && + currentTableSchema.childTableDbId && + currentTableSchema.childTableDbId + ) { + const targetParentDatabase = grabPrimaryRequiredDbSchema({ + dbId: currentTableSchema.childTableDbId, + userId, + }); + + const targetParentDatabaseTableIndex = + targetParentDatabase?.tables.findIndex( + (tbl) => tbl.id == currentTableSchema.childTableId + ); + + const targetParentDatabaseTable = + typeof targetParentDatabaseTableIndex == "number" + ? targetParentDatabaseTableIndex < 0 + ? undefined + : targetParentDatabase?.tables[ + targetParentDatabaseTableIndex + ] + : undefined; + + /** + * Delete child Table key/values from current database if + * the parent database doesn't esit + */ + if ( + !targetParentDatabase?.dbFullName || + !targetParentDatabaseTable?.tableName + ) { + delete newCurrentDbSchema.tables[currentTableSchemaIndex] + .childTable; + delete newCurrentDbSchema.tables[currentTableSchemaIndex] + .childTableDbId; + delete newCurrentDbSchema.tables[currentTableSchemaIndex] + .childTableId; + delete newCurrentDbSchema.tables[currentTableSchemaIndex] + .childTableDbId; + + return newCurrentDbSchema; + } + + /** + * New Child Database Table Object to be appended + */ + const newChildDatabaseTableObject: DSQL_ChildrenTablesType = { + tableId: currentTableSchema.id, + dbId: newCurrentDbSchema.id, + }; + + /** + * Add a new Children array in the target table schema if this is the + * first child to be added to said table schema. Else append to array + * if it exists + */ + if ( + typeof targetParentDatabaseTableIndex == "number" && + !targetParentDatabaseTable.childrenTables?.[0] + ) { + targetParentDatabase.tables[ + targetParentDatabaseTableIndex + ].childrenTables = [newChildDatabaseTableObject]; + } else if ( + typeof targetParentDatabaseTableIndex == "number" && + targetParentDatabaseTable.childrenTables?.[0] + ) { + const existingChildDbTable = + targetParentDatabaseTable.childrenTables.find( + (tbl) => + tbl.dbId == newCurrentDbSchema.id && + tbl.tableId == currentTableSchema.id + ); + if (!existingChildDbTable?.tableId) { + targetParentDatabase.tables[ + targetParentDatabaseTableIndex + ].childrenTables?.push(newChildDatabaseTableObject); + } + + targetParentDatabase.tables[ + targetParentDatabaseTableIndex + ].childrenTables = uniqueByKey( + targetParentDatabase.tables[targetParentDatabaseTableIndex] + .childrenTables || [], + ["dbId", "tableId"] + ); + } + + /** + * Update fields and indexes for child table, which is the + * current table + */ + if (targetParentDatabaseTable?.tableName) { + newCurrentDbSchema.tables[currentTableSchemaIndex].fields = + targetParentDatabaseTable.fields; + newCurrentDbSchema.tables[currentTableSchemaIndex].indexes = + targetParentDatabaseTable.indexes; + + writeUpdatedDbSchema({ dbSchema: targetParentDatabase, userId }); + } + } + + return newCurrentDbSchema; +} diff --git a/package-shared/utils/db/schema/resolve-schema-children.ts b/package-shared/utils/db/schema/resolve-schema-children.ts new file mode 100644 index 0000000..c14b013 --- /dev/null +++ b/package-shared/utils/db/schema/resolve-schema-children.ts @@ -0,0 +1,32 @@ +import fs from "fs"; +import { DSQL_DatabaseSchemaType } from "../../../types"; +import _ from "lodash"; +import resolveSchemaChildrenHandleChildrenDatabases from "./resolve-schema-children-handle-children-databases"; +import resolveSchemaChildrenHandleChildrenTables from "./resolve-schema-children-handle-children-tables"; + +type Params = { + dbSchema: DSQL_DatabaseSchemaType; + userId: string | number; +}; + +export default function resolveSchemaChildren({ dbSchema, userId }: Params) { + let newDbSchema = _.cloneDeep(dbSchema); + + newDbSchema = resolveSchemaChildrenHandleChildrenDatabases({ + currentDbSchema: newDbSchema, + userId, + }); + + for (let t = 0; t < newDbSchema.tables.length; t++) { + const tableSchema = newDbSchema.tables[t]; + + newDbSchema = resolveSchemaChildrenHandleChildrenTables({ + currentDbSchema: newDbSchema, + currentTableSchema: tableSchema, + currentTableSchemaIndex: t, + userId, + }); + } + + return newDbSchema; +} diff --git a/package-shared/utils/db/schema/resolve-schema-foreign-keys.ts b/package-shared/utils/db/schema/resolve-schema-foreign-keys.ts new file mode 100644 index 0000000..7067849 --- /dev/null +++ b/package-shared/utils/db/schema/resolve-schema-foreign-keys.ts @@ -0,0 +1,38 @@ +import { DSQL_DatabaseSchemaType } from "../../../types"; +import _ from "lodash"; + +type Params = { + dbSchema: DSQL_DatabaseSchemaType; + userId: string | number; +}; + +export default function resolveSchemaForeignKeys({ dbSchema, userId }: Params) { + let newDbSchema = _.cloneDeep(dbSchema); + + for (let t = 0; t < newDbSchema.tables.length; t++) { + const tableSchema = newDbSchema.tables[t]; + + for (let f = 0; f < tableSchema.fields.length; f++) { + const fieldSchema = tableSchema.fields[f]; + + if (fieldSchema.foreignKey?.destinationTableColumnName) { + const fkDestinationTableIndex = newDbSchema.tables.findIndex( + (tbl) => + tbl.tableName == + fieldSchema.foreignKey?.destinationTableName + ); + + /** + * Delete current Foreign Key if related table doesn't exist + * or has been deleted + */ + if (fkDestinationTableIndex < 0) { + delete newDbSchema.tables[t].fields[f].foreignKey; + continue; + } + } + } + } + + return newDbSchema; +} diff --git a/package-shared/utils/db/schema/resolve-users-schema-ids.ts b/package-shared/utils/db/schema/resolve-users-schema-ids.ts new file mode 100644 index 0000000..eb9611c --- /dev/null +++ b/package-shared/utils/db/schema/resolve-users-schema-ids.ts @@ -0,0 +1,78 @@ +import fs from "fs"; +import grabDirNames from "../../backend/names/grab-dir-names"; +import _n from "../../numberfy"; +import path from "path"; +import { DSQL_DatabaseSchemaType } from "../../../types"; +import _ from "lodash"; +import EJSON from "../../ejson"; +import { writeUpdatedDbSchema } from "../../../shell/createDbFromSchema/grab-required-database-schemas"; + +type Params = { + userId: string | number; + dbId?: string | number; +}; + +export default function resolveUsersSchemaIDs({ userId, dbId }: Params) { + const { targetUserPrivateDir, tempDirName } = grabDirNames({ userId }); + if (!targetUserPrivateDir) return false; + + const schemaDirFilesFolders = fs.readdirSync(targetUserPrivateDir); + + for (let i = 0; i < schemaDirFilesFolders.length; i++) { + const fileOrFolderName = schemaDirFilesFolders[i]; + if (!fileOrFolderName.match(/^\d+.json/)) continue; + const fileDbId = _n(fileOrFolderName.split(".").shift()); + if (!fileDbId) continue; + + if (dbId && _n(dbId) !== fileDbId) { + continue; + } + + const schemaFullPath = path.join( + targetUserPrivateDir, + fileOrFolderName + ); + + if (!fs.existsSync(schemaFullPath)) continue; + + const dbSchema = EJSON.parse( + fs.readFileSync(schemaFullPath, "utf-8") + ) as DSQL_DatabaseSchemaType | undefined; + + if (!dbSchema) continue; + + let newDbSchema = resolveUserDatabaseSchemaIDs({ dbSchema }); + + writeUpdatedDbSchema({ dbSchema: newDbSchema, userId }); + } +} + +export function resolveUserDatabaseSchemaIDs({ + dbSchema, +}: { + dbSchema: DSQL_DatabaseSchemaType; +}) { + let newDbSchema = _.cloneDeep(dbSchema); + + if (!newDbSchema.id) newDbSchema.id = dbSchema.id; + + newDbSchema.tables.forEach((tbl, index) => { + if (!tbl.id) { + newDbSchema.tables[index].id = index + 1; + } + + tbl.fields.forEach((fld, flIndx) => { + if (!fld.id) { + newDbSchema.tables[index].fields[flIndx].id = flIndx + 1; + } + }); + + tbl.indexes?.forEach((indx, indIndx) => { + if (!indx.id && newDbSchema.tables[index].indexes) { + newDbSchema.tables[index].indexes[indIndx].id = indIndx + 1; + } + }); + }); + + return newDbSchema; +} diff --git a/package-shared/utils/db/schema/set-text-field-type.ts b/package-shared/utils/db/schema/set-text-field-type.ts new file mode 100644 index 0000000..59410d3 --- /dev/null +++ b/package-shared/utils/db/schema/set-text-field-type.ts @@ -0,0 +1,31 @@ +import { DSQL_FieldSchemaType, TextFieldTypesArray } from "../../../types"; +import _ from "lodash"; + +export default function setTextFieldType( + field: DSQL_FieldSchemaType, + type?: (typeof TextFieldTypesArray)[number]["value"] +): DSQL_FieldSchemaType { + const newField = _.cloneDeep(field); + + delete newField.css; + delete newField.richText; + delete newField.json; + delete newField.shell; + delete newField.html; + delete newField.javascript; + delete newField.yaml; + delete newField.code; + + delete newField.defaultValueLiteral; + + if (type == "css") return { ...newField, css: true }; + if (type == "richText") return { ...newField, richText: true }; + if (type == "json") return { ...newField, json: true }; + if (type == "shell") return { ...newField, shell: true }; + if (type == "html") return { ...newField, html: true }; + if (type == "yaml") return { ...newField, yaml: true }; + if (type == "javascript") return { ...newField, javascript: true }; + if (type == "code") return { ...newField, code: true }; + + return { ...newField }; +} diff --git a/package-shared/utils/delete-by-key.ts b/package-shared/utils/delete-by-key.ts new file mode 100644 index 0000000..7f1b633 --- /dev/null +++ b/package-shared/utils/delete-by-key.ts @@ -0,0 +1,37 @@ +import _ from "lodash"; + +/** + * # Delete all matches in an Array + */ +export default function deleteByKey( + arr: T[], + key: keyof T | (keyof T)[] +) { + let newArray = _.cloneDeep(arr); + + for (let i = 0; i < newArray.length; i++) { + const item = newArray[i]; + + if (Array.isArray(key)) { + const targetMatches: boolean[] = []; + + for (let k = 0; k < key.length; k++) { + const ky = key[k]; + const targetValue = item[ky]; + const targetOriginValue = item[ky]; + targetMatches.push(targetValue == targetOriginValue); + } + + if (!targetMatches.find((mtch) => !mtch)) { + newArray.splice(i, 1); + } + } else { + let existingValue = newArray.find((v) => v[key] == item[key]); + if (existingValue) { + newArray.splice(i, 1); + } + } + } + + return newArray; +} diff --git a/package-shared/utils/envsub.ts b/package-shared/utils/envsub.ts new file mode 100644 index 0000000..d086818 --- /dev/null +++ b/package-shared/utils/envsub.ts @@ -0,0 +1,6 @@ +export default function envsub(str: string) { + return str.replace(/\$([A-Z_]+)|\${([A-Z_]+)}/g, (match, var1, var2) => { + const varName = var1 || var2; + return process.env[varName] || match; + }); +} diff --git a/package-shared/utils/grab-api-base-path.ts b/package-shared/utils/grab-api-base-path.ts new file mode 100644 index 0000000..e2211e8 --- /dev/null +++ b/package-shared/utils/grab-api-base-path.ts @@ -0,0 +1,16 @@ +const APIParadigms = ["crud", "media", "schema"] as const; + +type Params = { + version?: string; + paradigm?: (typeof APIParadigms)[number]; +}; + +export default function grabAPIBasePath({ version, paradigm }: Params): string { + let basePath = `/api/v${version || "1"}`; + + if (paradigm) { + basePath += `/${paradigm}`; + } + + return basePath; +} diff --git a/package-shared/utils/grab-app-main-db-schema.ts b/package-shared/utils/grab-app-main-db-schema.ts new file mode 100644 index 0000000..dc805ae --- /dev/null +++ b/package-shared/utils/grab-app-main-db-schema.ts @@ -0,0 +1,18 @@ +import fs from "fs"; +import grabDirNames from "./backend/names/grab-dir-names"; +import EJSON from "./ejson"; +import { DSQL_DatabaseSchemaType } from "../types"; + +export default function grabAppMainDbSchema() { + const { appSchemaJSONFile } = grabDirNames(); + + if (!fs.existsSync(appSchemaJSONFile)) { + return undefined; + } + + const parsedAppSchema = EJSON.parse( + fs.readFileSync(appSchemaJSONFile, "utf-8") + ) as DSQL_DatabaseSchemaType | undefined; + + return parsedAppSchema; +} diff --git a/package-shared/utils/grab-app-version.ts b/package-shared/utils/grab-app-version.ts new file mode 100644 index 0000000..09567f6 --- /dev/null +++ b/package-shared/utils/grab-app-version.ts @@ -0,0 +1,17 @@ +import { AppVersions } from "../types"; + +export default function grabAppVersion(): (typeof AppVersions)[number] { + const appVersionEnv = process.env.NEXT_PUBLIC_VERSION; + const finalAppVersion = (appVersionEnv || + "community") as (typeof AppVersions)[number]["value"]; + + const targetAppVersion = AppVersions.find( + (version) => version.value === finalAppVersion + ); + + if (!targetAppVersion) { + throw new Error(`Invalid App Version: ${finalAppVersion}`); + } + + return targetAppVersion; +} diff --git a/package-shared/utils/grab-db-full-name.ts b/package-shared/utils/grab-db-full-name.ts index 7a0f292..95c7568 100644 --- a/package-shared/utils/grab-db-full-name.ts +++ b/package-shared/utils/grab-db-full-name.ts @@ -1,23 +1,43 @@ +import { UserType } from "../types"; +import slugify from "./slugify"; + type Param = { + /** + * Database full name or slug + */ dbName?: string; userId?: string | number; + user?: UserType | null; }; /** - * # Grab Database Full Name + * # Grab full database name + * @description Grab full database name from slug or full name + * @param param0 + * @returns */ -export default function grabDbFullName({ dbName, userId }: Param): string { - if (!dbName) - throw new Error( - `Database name not provided to db name parser funciton` - ); +export default function grabDbFullName({ + dbName, + userId, + user, +}: Param): string | undefined { + const finalUserId = user?.id || userId; - const sanitizedName = dbName.replace(/[^a-z0-9\_]/g, ""); - const cleanedDbName = sanitizedName.replace(/datasquirel_user_\d+_/, ""); + if (!finalUserId) { + return dbName; + } + if (!dbName) { + return; + } - if (!userId) return cleanedDbName; + const dbNamePrefix = process.env.DSQL_USER_DB_PREFIX; - const dbNamePrefix = `datasquirel_user_${userId}_`; + const parsedDbName = slugify(dbName, "_"); - return dbNamePrefix + cleanedDbName; + const dbSlug = parsedDbName.replace( + new RegExp(`${dbNamePrefix}_?\\d+_`), + "" + ); + + return slugify(`${dbNamePrefix}_${finalUserId}_${dbSlug}`, "_"); } diff --git a/package-shared/utils/grab-db-names.ts b/package-shared/utils/grab-db-names.ts new file mode 100644 index 0000000..bdd34d8 --- /dev/null +++ b/package-shared/utils/grab-db-names.ts @@ -0,0 +1,27 @@ +import { UserType } from "../types"; +import grabDbFullName from "./grab-db-full-name"; + +type Param = { + /** + * Database full name or slug + */ + dbName?: string; + userId?: string | number; + user?: UserType | null; +}; + +/** + * # Grab full database name + * @description Grab full database name from slug or full name + * @param param0 + * @returns + */ +export default function grabDbNames({ dbName, userId, user }: Param) { + const dbNamePrefix = process.env.DSQL_USER_DB_PREFIX; + const finalUserId = user?.id || userId; + const userDbPrefix = `${dbNamePrefix}${finalUserId}_`; + + const dbFullName = grabDbFullName({ dbName, user, userId }); + + return { userDbPrefix, dbFullName, dbNamePrefix }; +} diff --git a/package-shared/utils/grab-docker-resource-ip-numbers.ts b/package-shared/utils/grab-docker-resource-ip-numbers.ts new file mode 100644 index 0000000..0840da7 --- /dev/null +++ b/package-shared/utils/grab-docker-resource-ip-numbers.ts @@ -0,0 +1,15 @@ +export default function grabDockerResourceIPNumbers() { + return { + db: 32, + maxscale: 24, + postDbSetup: 43, + reverse_proxy: 34, + web: 35, + websocket: 36, + cron: 27, + db_cron: 20, + replica_1: 37, + replica_2: 38, + web_app_post_db_setup: 71, + } as const; +} diff --git a/package-shared/utils/grab-instance-global-network-name.ts b/package-shared/utils/grab-instance-global-network-name.ts new file mode 100644 index 0000000..6f2f875 --- /dev/null +++ b/package-shared/utils/grab-instance-global-network-name.ts @@ -0,0 +1,7 @@ +import _ from "lodash"; +import grabIPAddresses from "./backend/names/grab-ip-addresses"; + +export default function grabInstanceGlobalNetWorkName() { + const deploymentName = process.env.DSQL_DEPLOYMENT_NAME || "dsql"; + return `${deploymentName}_dsql_global_network`; +} diff --git a/package-shared/utils/grab-sql-key-name.ts b/package-shared/utils/grab-sql-key-name.ts new file mode 100644 index 0000000..e9cd6c4 --- /dev/null +++ b/package-shared/utils/grab-sql-key-name.ts @@ -0,0 +1,24 @@ +type Param = { + type: "foreign_key" | "index" | "user"; + userId?: string | number; + addDate?: boolean; +}; + +/** + * # Grab Key Names + * @description Grab key names for foreign keys and indexes + */ +export default function grabSQLKeyName({ type, userId, addDate }: Param) { + let prefixParadigm = (() => { + if (type == "foreign_key") return "fk"; + if (type == "index") return "indx"; + if (type == "user") return "user"; + return null; + })(); + + let key = `dsql`; + if (prefixParadigm) key += `_${prefixParadigm}`; + if (userId) key += `_${userId}`; + if (addDate) key += `_${Date.now()}`; + return key; +} diff --git a/package-shared/utils/grab-sql-user-name-for-user.ts b/package-shared/utils/grab-sql-user-name-for-user.ts new file mode 100644 index 0000000..9c0bc15 --- /dev/null +++ b/package-shared/utils/grab-sql-user-name-for-user.ts @@ -0,0 +1,5 @@ +export default function grabSQLUserNameForUser( + userId?: string | number +): string { + return `dsql_user_${userId || 0}`; +} diff --git a/package-shared/utils/grab-sql-user-name.ts b/package-shared/utils/grab-sql-user-name.ts new file mode 100644 index 0000000..6d0fe89 --- /dev/null +++ b/package-shared/utils/grab-sql-user-name.ts @@ -0,0 +1,42 @@ +import { UserType } from "../types"; +import grabSQLUserNameForUser from "./grab-sql-user-name-for-user"; + +type Params = { + user?: UserType | null; + name?: string; +}; + +type Return = { + sqlUsername?: string; + name?: string; + nameWithoutPrefix?: string; +}; + +export default function grabSQLUserName({ + user, + name: passedName, +}: Params): Return { + if (!user) { + console.log("No User Found"); + return {}; + } + + const sqlUsername = grabSQLUserNameForUser(user.id); + const parsedPassedName = passedName + ? passedName.replace(sqlUsername, "").replace(/^_+|_+$/, "") + : undefined; + + const name = parsedPassedName + ? `${sqlUsername}_${parsedPassedName}` + : undefined; + + if (user.isSuperUser) { + return { + sqlUsername: undefined, + name: passedName, + nameWithoutPrefix: passedName, + }; + } + + return { sqlUsername, name, nameWithoutPrefix: parsedPassedName }; +} diff --git a/package-shared/utils/grab-user-main-sql-user-name.ts b/package-shared/utils/grab-user-main-sql-user-name.ts new file mode 100644 index 0000000..cf5d14e --- /dev/null +++ b/package-shared/utils/grab-user-main-sql-user-name.ts @@ -0,0 +1,30 @@ +import { UserType } from "../types"; +import grabSQLUserNameForUser from "./grab-sql-user-name-for-user"; +import grabIPAddresses from "../utils/backend/names/grab-ip-addresses"; + +type Params = { + user?: UserType | null; + HOST?: string; + username?: string; +}; + +export default function grabUserMainSqlUserName({ + HOST, + user, + username, +}: Params) { + const sqlUsername = grabSQLUserNameForUser(user?.id); + const { webAppIP, maxScaleIP } = grabIPAddresses(); + + const finalUsername = username || sqlUsername; + const finalHost = HOST || maxScaleIP || "127.0.0.1"; + const fullName = `${finalUsername}@${webAppIP}`; + + return { + username: finalUsername, + host: finalHost, + webHost: webAppIP, + fullName, + sqlUsername, + }; +} diff --git a/package-shared/utils/normalize-text.ts b/package-shared/utils/normalize-text.ts new file mode 100644 index 0000000..2b89816 --- /dev/null +++ b/package-shared/utils/normalize-text.ts @@ -0,0 +1,6 @@ +export default function normalizeText(txt: string) { + return txt + .replace(/\n|\r|\n\r/g, " ") + .replace(/ {2,}/g, " ") + .trim(); +} diff --git a/package-shared/utils/parse-env.ts b/package-shared/utils/parse-env.ts index 1c67e32..2acac03 100644 --- a/package-shared/utils/parse-env.ts +++ b/package-shared/utils/parse-env.ts @@ -1,7 +1,11 @@ import fs from "fs"; +import { EnvKeys } from "../types"; -export default function parseEnv(envFile: string) { +export default function parseEnv( + /** The file path to the env. Eg. /app/.env */ envFile: string +) { if (!fs.existsSync(envFile)) return undefined; + const envTextContent = fs.readFileSync(envFile, "utf-8"); const envLines = envTextContent .split("\n") @@ -32,5 +36,5 @@ export default function parseEnv(envFile: string) { } } - return newEnvObj; + return newEnvObj as { [k in (typeof EnvKeys)[number]]: string | undefined }; } diff --git a/package-shared/utils/purge-default-fields.ts b/package-shared/utils/purge-default-fields.ts new file mode 100644 index 0000000..6a947e2 --- /dev/null +++ b/package-shared/utils/purge-default-fields.ts @@ -0,0 +1,35 @@ +import _ from "lodash"; +import { DefaultEntryType } from "../types"; +import defaultFieldsRegexp from "../functions/dsql/default-fields-regexp"; + +export default function purgeDefaultFields< + T extends { [k: string]: any } = DefaultEntryType & { [k: string]: any } +>(entry: T | T[]): T | T[] { + const newEntry = _.cloneDeep(entry); + + if (Array.isArray(newEntry)) { + const entryKeys = Object.keys(newEntry[0]); + + for (let i = 0; i < newEntry.length; i++) { + for (let j = 0; j < entryKeys.length; j++) { + const entryKey = entryKeys[j]; + if (defaultFieldsRegexp.test(entryKey)) { + delete newEntry[i][entryKey]; + } + } + } + + return newEntry; + } else { + const entryKeys = Object.keys(newEntry); + + for (let i = 0; i < entryKeys.length; i++) { + const entryKey = entryKeys[i]; + if (defaultFieldsRegexp.test(entryKey)) { + delete newEntry[entryKey]; + } + } + + return newEntry; + } +} diff --git a/package-shared/utils/setup-global-network.ts b/package-shared/utils/setup-global-network.ts new file mode 100644 index 0000000..469ed19 --- /dev/null +++ b/package-shared/utils/setup-global-network.ts @@ -0,0 +1,20 @@ +import { execSync } from "child_process"; +import grabInstanceGlobalNetWorkName from "./grab-instance-global-network-name"; +import grabIPAddresses from "./backend/names/grab-ip-addresses"; + +export default function setupGlobalNetwork() { + const globalNetworkName = grabInstanceGlobalNetWorkName(); + const { globalIPPrefix } = grabIPAddresses(); + + try { + execSync(`docker network rm ${globalNetworkName}`, {}); + } catch (error) {} + + let newNtwkCmd = `docker network create`; + newNtwkCmd += ` --driver bridge`; + newNtwkCmd += ` --subnet ${globalIPPrefix}.0/24`; + newNtwkCmd += ` --gateway ${globalIPPrefix}.1`; + newNtwkCmd += ` ${globalNetworkName}`; + + execSync(newNtwkCmd); +} diff --git a/package-shared/utils/slug-to-normal-text.ts b/package-shared/utils/slug-to-normal-text.ts new file mode 100644 index 0000000..50877d2 --- /dev/null +++ b/package-shared/utils/slug-to-normal-text.ts @@ -0,0 +1,15 @@ +export default function slugToNormalText(str?: string) { + if (!str) return ""; + + return str + .toLowerCase() + .replace(/ /g, "-") + .replace(/[^a-z0-9\-]/g, "-") + .replace(/-{2,}/g, "-") + .replace(/[-]/g, " ") + .split(" ") + .map( + (word) => word.charAt(0).toUpperCase() + word.slice(1).toLowerCase() + ) + .join(" "); +} diff --git a/package-shared/utils/slugify.ts b/package-shared/utils/slugify.ts index 6b956a0..79c3a21 100644 --- a/package-shared/utils/slugify.ts +++ b/package-shared/utils/slugify.ts @@ -1,5 +1,3 @@ -// @ts-check - /** * # Return the slug of a string * @@ -8,19 +6,30 @@ * slugify("Yes!") // "yes" * slugify("Hello!!! World!") // "hello-world" */ -export default function slugify(str?: string): string { +export default function slugify( + str?: string, + divider?: "-" | "_" | null, + allowTrailingDash?: boolean | null +): string { + const finalSlugDivider = divider || "-"; + try { if (!str) return ""; - return String(str) + let finalStr = String(str) .trim() .toLowerCase() .replace(/ {2,}/g, " ") - .replace(/ /g, "-") - .replace(/[^a-z0-9]/g, "-") - .replace(/-{2,}/g, "-") - .replace(/^-/, "") - .replace(/-$/, ""); + .replace(/ /g, finalSlugDivider) + .replace(/[^a-z0-9]/g, finalSlugDivider) + .replace(/-{2,}|_{2,}/g, finalSlugDivider) + .replace(/^-/, ""); + + if (allowTrailingDash) { + return finalStr; + } + + return finalStr.replace(/-$/, ""); } catch (error: any) { console.log(`Slugify ERROR: ${error.message}`); return ""; diff --git a/package-shared/utils/sql-equality-parser.ts b/package-shared/utils/sql-equality-parser.ts new file mode 100644 index 0000000..b5c4358 --- /dev/null +++ b/package-shared/utils/sql-equality-parser.ts @@ -0,0 +1,42 @@ +import { ServerQueryEqualities } from "../types"; + +export default function sqlEqualityParser( + eq: (typeof ServerQueryEqualities)[number] +): string { + switch (eq) { + case "EQUAL": + return "="; + case "LIKE": + return "LIKE"; + case "NOT LIKE": + return "NOT LIKE"; + case "NOT EQUAL": + return "<>"; + case "IN": + return "IN"; + case "NOT IN": + return "NOT IN"; + case "BETWEEN": + return "BETWEEN"; + case "NOT BETWEEN": + return "NOT BETWEEN"; + case "IS NULL": + return "IS NULL"; + case "IS NOT NULL": + return "IS NOT NULL"; + case "EXISTS": + return "EXISTS"; + case "NOT EXISTS": + return "NOT EXISTS"; + case "GREATER THAN": + return ">"; + case "GREATER THAN OR EQUAL": + return ">="; + case "LESS THAN": + return "<"; + case "LESS THAN OR EQUAL": + return "<="; + default: + return "="; + } +} diff --git a/package-shared/utils/unique-by-key.ts b/package-shared/utils/unique-by-key.ts new file mode 100644 index 0000000..b000aad --- /dev/null +++ b/package-shared/utils/unique-by-key.ts @@ -0,0 +1,36 @@ +import slugify from "./slugify"; + +export default function uniqueByKey( + arr: T[], + key: keyof T | (keyof T)[] +) { + let newArray = [] as T[]; + let uniqueValues = [] as string[]; + + for (let i = 0; i < arr.length; i++) { + const item = arr[i]; + + let targetValue: string | undefined; + + if (Array.isArray(key)) { + const targetVals: string[] = []; + + for (let k = 0; k < key.length; k++) { + const ky = key[k]; + const targetValuek = slugify(String(item[ky])); + targetVals.push(targetValuek); + } + targetValue = slugify(targetVals.join(",")); + } else { + targetValue = slugify(String(item[key])); + } + + if (!targetValue) continue; + + if (uniqueValues.includes(targetValue)) continue; + newArray.push(item); + uniqueValues.push(targetValue); + } + + return newArray; +} diff --git a/package-shared/utils/update-grastate-file-to-latest.ts b/package-shared/utils/update-grastate-file-to-latest.ts new file mode 100644 index 0000000..4a30131 --- /dev/null +++ b/package-shared/utils/update-grastate-file-to-latest.ts @@ -0,0 +1,18 @@ +import fs from "fs"; +import grabDirNames from "./backend/names/grab-dir-names"; + +export default function updateGrastateToLatest() { + const { mainDbGrastateDatFile } = grabDirNames(); + + const existingGrastateDatFile = fs.readFileSync( + mainDbGrastateDatFile, + "utf-8" + ); + + const newGrastateDatFile = existingGrastateDatFile.replace( + /safe_to_bootstrap: .*/, + `safe_to_bootstrap: 1` + ); + + fs.writeFileSync(mainDbGrastateDatFile, newGrastateDatFile, "utf-8"); +} diff --git a/package.json b/package.json index a811b70..d9e66d9 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@moduletrace/datasquirel", - "version": "4.7.4", + "version": "4.7.5", "description": "Cloud-based SQL data management tool", "main": "dist/index.js", "bin": { diff --git a/tsconfig.json b/tsconfig.json index 839ec7b..c83214c 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -1,7 +1,7 @@ { "compilerOptions": { - "target": "ES2015", - "module": "commonjs", + "target": "es2017", + "module": "esnext", "maxNodeModuleJsDepth": 10, "esModuleInterop": true, "forceConsistentCasingInFileNames": true, @@ -21,6 +21,6 @@ } ] }, - "include": ["engine", "package-shared", "client"], + "include": ["engine", "package-shared", "client", "test/v1"], "exclude": ["node_modules", "dump", "dist"] }