diff --git a/.eslintrc.json b/.eslintrc.json index c237253b..e90a5bcc 100644 --- a/.eslintrc.json +++ b/.eslintrc.json @@ -7,12 +7,19 @@ }, "globals": { "JSX": "readonly", "Chart": "readonly", "NodeJS": "readonly" }, "extends": [ + "eslint:recommended", + "plugin:react/recommended", + "plugin:react-hooks/recommended", + + "plugin:@typescript-eslint/recommended", + "plugin:@typescript-eslint/recommended-requiring-type-checking", + "airbnb", - "plugin:import/errors", - "plugin:import/warnings", + + "plugin:import/recommended", "plugin:import/typescript", - // "plugin:@typescript-eslint/recommended", + "prettier" ], "parser": "@typescript-eslint/parser", @@ -21,38 +28,63 @@ "jsx": true }, "ecmaVersion": 12, - "sourceType": "module" + "sourceType": "module", + "project": "./tsconfig.json" }, - "plugins": ["react", "@typescript-eslint"], + "plugins": ["react", "react-hooks", "@typescript-eslint", "import"], "rules": { - "react/jsx-filename-extension": [2, { "extensions": [".jsx", ".tsx"] }], + "react/jsx-filename-extension": [1, { "extensions": [".jsx", ".tsx"] }], + "no-use-before-define": "off", "@typescript-eslint/no-use-before-define": ["error"], + "import/extensions": [ "error", "ignorePackages", { "js": "never", "jsx": "never", "ts": "never", "tsx": "never" } ], - // "import/no-extraneous-dependencies": ["error", { "devDependencies": true }], **trying to resolve the electron issue + "jsx-a11y/label-has-associated-control": "off", + // prevent wrong warning with typescript overloads "no-unused-vars": "off", "@typescript-eslint/no-unused-vars": ["error"], - "no-dupe-class-members": "off", + + "no-dupe-class-members": "off", "@typescript-eslint/no-dupe-class-members": ["error"], + "lines-between-class-members": "off", "@typescript-eslint/lines-between-class-members": [ "error", + "always", { "exceptAfterSingleLine": true } ], - "react/jsx-curly-newline": "off", + + // "react/jsx-curly-newline": "off", + // turned off because typescript, functional components and default props // don't seem to be good friends. Decided to manually handle defaults for // optional props instead "react/require-default-props": "off", "react/jsx-props-no-spreading": "off", + "camelcase": "off" + + // "import/no-extraneous-dependencies": ["error", { "devDependencies": true }], **trying to resolve the electron issue + }, + // "settings": "import/core-modules: [ electron ]", **trying to resolve the electron issue + "settings": { + "react": { + "version": "detect" + }, + "import/parsers": { + "@typescript-eslint/parser": [".ts", ".tsx"] + }, + "import/resolver": { + "typescript": { + "alwaysTryTypes": true + } + } }, - // "settings": "import/core-modules: [ electron ]", **trying to resolve the electron issue "root": true } diff --git a/.prettierrc.json b/.prettierrc.json index 85b1406c..dc2715bf 100644 --- a/.prettierrc.json +++ b/.prettierrc.json @@ -1,5 +1,6 @@ { "singleQuote": true, "tabWidth": 2, - "useTabs": false + "useTabs": false, + "trailingComma": "all" } diff --git a/.vscode/settings.json b/.vscode/settings.json index 6d7f3b0e..5155c9b1 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -1,15 +1,13 @@ { - "liveSassCompile.settings.formats": [ - { - "format": "expanded", - "savePath": "/frontend/assets/stylesheets/css", - "extensionName": ".css" - } - ], - "liveSassCompile.settings.generateMap": false, - "yaml.schemas": { - "https://json.schemastore.org/electron-builder": [ - "electron-builder.yml" - ] + "liveSassCompile.settings.formats": [ + { + "format": "expanded", + "savePath": "/frontend/assets/stylesheets/css", + "extensionName": ".css" } -} \ No newline at end of file + ], + "liveSassCompile.settings.generateMap": false, + "yaml.schemas": { + "https://json.schemastore.org/electron-builder": ["electron-builder.yml"] + } +} diff --git a/DEV_README.md b/DEV_README.md new file mode 100644 index 00000000..60fcb146 --- /dev/null +++ b/DEV_README.md @@ -0,0 +1,54 @@ +
+ + + +
+ +`Developer's Read Me` + +`** v12.0.0 **` + +

In this version our team focused on refactoring the broken code base from all previous versions.

+ +

WHAT YOU NEED TO DO FIRST:

+ +Run npm run dev twice if you do not manually run tsc to compile the files first. The ts files have to compile before electron-dev and webpack-dev can start. + +

WHAT WE UPDATED:

+ +

1. trimmed dependency issues from 54 down to 1. this one cannot be resolved because it is from the 30 viewers

+

2. Broke backend into MVVM/MVC model

+

3. Made sure types are enforced in typescript

+

4. Fixed import and export local files

+

5. Fixed Authentication

+ +

WHAT NEEDS TO BE DONE:

+ +

1. Isolating Database
One of the biggest tasks that we tried but did not finish is isolating the concerns of each database type (DBType). The current application has multiple

+if (database === DBType.postgres) {}
+else if (database === DBType.mysql) {}
+else (database === DBType.sqlite) {}
+
+
+

situations and it is not good. instead we will use switch statements to preemptively throw operations into seperate functions to completely silo cases for Postgres, Mysql, and SqLite. This is a task for BOTH THE FRONTEND AND BACKEND and the FRONTEND IS MUCH HARDER. The work for backend is actually done and it is illustrated in the picture below

+ + + +

The road map is finish connecting the siloed pieces for postgres, then moving on to mysql

***Important***
There is not an entry for this system yet but this file frontend/components/iews/ERTables/ERDisplayWindow.tsx will replace frontend/components/iews/ERTables/ERTabling.tsx when this is ready

+ +

2. ERD Logic Update
Currently, previous wrote the frontend to send back a big bundle of all the operations done in the frontend ERD Table. This ERD table object is divided by add, drop, and alter. All the add operations will execute first then drop, then alter. This is BAD.

We need to redesign frontend to send back "sequental" operations instead of bundling operations by add, drop, alter because it takes care of multiple edge cases and users in the front can do as many operations they want to ensure SAVE works. I illustrated the problem below. The current backend is written out already. We just need to make sure the frontend is send back the appropriate logic

+ + + +

**_Important_**
This is wrtten at backend/src/ipcHandlers/dbCRUDHandlerERD.ts and will replace backend/src/ipcHandlers/dbCRUDHandler.ts when this is ready
+ +

3. Async event emmiters between front/backend

+

Currently, the way the feedback modal works is by handling events that are emitted from both the frontend and the backend. Ideally, this should be refactored to be state dependent rather than event dependent, as it controls the display of the modal. This can be tied into the centralized async event emitter added to frontend/components/app.tsx, in conjunction with migration to reducers from state variables. The goal will be to house modal messages in the store tied to the main app reducer. From there, the async handler can send new messages to the state via main app dispatch, and any other front end feedback can do the same.

+The main roadblock in the way of finalizing the transfer of event handlers out of the frontend is the way the dblist (list of databases in the sidebar) gets updated. Many event handlers in the backend send a dblist update event out to update the front end. Ideally, this should be handled by returning the new dblist changes out of the handler and using that resolved value to update state whenever an action would cause a dblist change. Right now, app.tsx has a useEffect running that listens for those dblist updates every frame. This is inefficient as a frontend concern.

+The spinner currently works in a similar way to feedback. Once all async is completely migrated (including dblist update changes), this spinner can simply be tied to the loading property in the main app state.

+There are still some filesystem read/write calls in the front end. This should be refactored to an async call that requests the backend handle the file system read/write for proper separation of concerns. +

+

4.

+

5.

+ +

WHAT IS BROKEN:

diff --git a/README.md b/README.md index 093e450b..003e0cb6 100644 --- a/README.md +++ b/README.md @@ -3,7 +3,7 @@ [![PRs Welcome](https://img.shields.io/badge/PRs-welcome-brightgreen.svg)](https://github.com/oslabs-beta/SeeQR) -![Release: 11.0.0](https://img.shields.io/badge/Release-11.0.0-red) +![Release: 12.0.0](https://img.shields.io/badge/Release-12.0.0-red) ![License: MIT](https://img.shields.io/badge/License-MIT-orange.svg) ![Contributions Welcome](https://img.shields.io/badge/Contributions-welcome-blue.svg) [![Twitter](https://img.shields.io/twitter/url?style=social&url=https%3A%2F%2Ftwitter.com%2Ftheseeqr)](https://twitter.com/theseeqr) @@ -15,7 +15,7 @@ -### For the latest in-depth docs for v11.0.0, please visit our [docs site](http://www.seeqrapp.com/docs). +### For the latest in-depth docs for v12.0.0, please visit our [docs site](http://www.seeqrapp.com/docs). ## Table of Contents @@ -28,7 +28,6 @@ - [Core Team](#core-team) - [License](#license) - ## Getting Started To get started on contributing and editing databases to this project: @@ -56,7 +55,6 @@ To get started on contributing and editing databases to this project: - [D3.js](https://github.com/d3/d3) - [Three-Spritetext](https://github.com/vasturiano/three-spritetext) - ## Interface & Features - Overview @@ -79,22 +77,25 @@ To get started on contributing and editing databases to this project: - In the 'DATABASES' view, an interactive Entity Relationship Diagram (`ER DIAGRAM`) is displayed for the selected database. - Users can select `TABLE` to see selected database in tabular form. + - Users can select a table from a list of all the tables in the schema of the currently selected database. + - Information about the selected table is then displayed. - The name and size of the selected database are also displayed at the top of the page. - Users can also generate large amounts of foreign-key compliant dummy data for the selected table in the current database. Currently supported data types are: + - INT - BIGINT - VARCHAR - BOOLEAN - DATE -
-
- -
-
- +
+
+ +
+
+ - NEW 3D Database Visualization - From the 'DATABASES' view, select the '3D View' tab on the far right of the sidebar to launch the brand new 3D database visualizer. - Individual table nodes are distinguished by their different colors @@ -103,25 +104,25 @@ To get started on contributing and editing databases to this project: - Navigate through the 3D space using left click to rotate, scroll wheel to zoom, and right click to pan. - Click and drag nodes to visually organize them - Click on a table or column node to quickly view the data stored within -
-
- -
-
-
- -
-
-
- -
+
+
+ +
+
+
+ +
+
+
+ +

- Create/Edit Database - - Users can create a new database from scratch by clicking the `Create New Database` button at the bottom of the sidebar. - - Users can modify the newly created database as well as any existing databases using the `ER Diagram` to create/change/delete tables and columns. - - The `Export` button will write a .sql file on the user's desktop of the selected database. +- Users can create a new database from scratch by clicking the `Create New Database` button at the bottom of the sidebar. +- Users can modify the newly created database as well as any existing databases using the `ER Diagram` to create/change/delete tables and columns. +- The `Export` button will write a .sql file on the user's desktop of the selected database.
@@ -137,7 +138,7 @@ To get started on contributing and editing databases to this project: - Please note that only labelled queries will be saved in the current session for future references. - To execute the query, simply select the 'RUN QUERY' button at the bottom of the panel or press 'Ctrl-Enter' on the keyboard. - The 11th version now includes the functionality to run multiple queries, allowing users to obtain more reliable testing results. -
+
@@ -201,7 +202,7 @@ We've released SeeQR because it's a useful tool to help optimize SQL databases. ## Core Team -[Ching-Yuan Lai (Eric)](https://github.com/paranoidFrappe) | [Jamie Zhang](https://github.com/haemie) | [Julian Macalalag](https://github.com/juzi3) | [Nathan Chong](https://github.com/nathanhchong) | [Junaid Ahmed](https://github.com/junaid-ahmed7) | [Chase Sizemore](https://github.com/ChaseSizemore) | [Oscar Romero](https://github.com/creaturenex) | [Anthony Deng](https://github.com/anthonyadeng) | [Aya Moosa](https://github.com/Hiya-its-Aya) | [Trevor Ferguson](https://github.com/TrevorJFerguson) | [Pauline Nguyen](https://github.com/paulinekpn) | [Utkarsh Uppal](https://github.com/utyvert) | [Fred Jeong](https://github.com/fred-jeong) | [Gabriel Kime](https://github.com/wizardbusiness) | [Chris Fryer](github.com/frynoceros) | [Ian Grepo](https://github.com/RadiantGH) | [Michelle Chang](https://github.com/mkchang168) | [Jake Bradbeer](https://github.com/JBradbeer) | [Bryan Santos](https://github.com/santosb93) | [William Trey Lewis](https://github.com/treyfrog128) | [Brandon Lee](https://github.com/BrandonW-Lee) | [Casey Escovedo](https://github.com/caseyescovedo) | [Casey Walker](https://github.com/cwalker3011) | [Catherine Chiu](https://github.com/catherinechiu) | [Chris Akinrinade](https://github.com/chrisakinrinade) | [Cindy Chau](https://github.com/cindychau) | [Claudio Santos](https://github.com/Claudiohbsantos) | [Eric Han](https://github.com/ericJH92) | [Faraz Akhtar](https://github.com/faraza22) | [Frank Norton](https://github.com/FrankNorton32) | [Harrison Nam](https://github.com/harrynam07) | [James Kolotouros](https://github.com/dkolotouros) | [Jennifer Courtner](https://github.com/jcourtner) | [John Wagner](https://github.com/jwagner988) | [Justin Dury-Agri](https://github.com/justinD-A) | [Justin Hicks](https://github.com/JuiceBawks) | [Katie Klochan](https://github.com/kklochan) | [May Wirapa Boonyasurat](https://github.com/mimiwrp) | [Mercer Stronck](https://github.com/mercerstronck) | [Muhammad Trad](https://github.com/muhammadtrad) | [Richard Guo](https://github.com/richardguoo) | [Richard Lam](https://github.com/rlam108) | [Sam Frakes](https://github.com/frakes413) | [Serena Kuo](https://github.com/serenackuo) | [Timothy Sin](https://github.com/timothysin) | [Vincent Trang](https://github.com/vincentt114) +[Annabelle Ni](https://github.com/annni11) | [Derek Koh](https://github.com/derekoko) | [Peter Zepf](https://github.com/peterzepf) | [Tony Gao](https://github.com/tgao17) | [Ching-Yuan Lai (Eric)](https://github.com/paranoidFrappe) | [Jamie Zhang](https://github.com/haemie) | [Julian Macalalag](https://github.com/juzi3) | [Nathan Chong](https://github.com/nathanhchong) | [Junaid Ahmed](https://github.com/junaid-ahmed7) | [Chase Sizemore](https://github.com/ChaseSizemore) | [Oscar Romero](https://github.com/creaturenex) | [Anthony Deng](https://github.com/anthonyadeng) | [Aya Moosa](https://github.com/Hiya-its-Aya) | [Trevor Ferguson](https://github.com/TrevorJFerguson) | [Pauline Nguyen](https://github.com/paulinekpn) | [Utkarsh Uppal](https://github.com/utyvert) | [Fred Jeong](https://github.com/fred-jeong) | [Gabriel Kime](https://github.com/wizardbusiness) | [Chris Fryer](github.com/frynoceros) | [Ian Grepo](https://github.com/RadiantGH) | [Michelle Chang](https://github.com/mkchang168) | [Jake Bradbeer](https://github.com/JBradbeer) | [Bryan Santos](https://github.com/santosb93) | [William Trey Lewis](https://github.com/treyfrog128) | [Brandon Lee](https://github.com/BrandonW-Lee) | [Casey Escovedo](https://github.com/caseyescovedo) | [Casey Walker](https://github.com/cwalker3011) | [Catherine Chiu](https://github.com/catherinechiu) | [Chris Akinrinade](https://github.com/chrisakinrinade) | [Cindy Chau](https://github.com/cindychau) | [Claudio Santos](https://github.com/Claudiohbsantos) | [Eric Han](https://github.com/ericJH92) | [Faraz Akhtar](https://github.com/faraza22) | [Frank Norton](https://github.com/FrankNorton32) | [Harrison Nam](https://github.com/harrynam07) | [James Kolotouros](https://github.com/dkolotouros) | [Jennifer Courtner](https://github.com/jcourtner) | [John Wagner](https://github.com/jwagner988) | [Justin Dury-Agri](https://github.com/justinD-A) | [Justin Hicks](https://github.com/JuiceBawks) | [Katie Klochan](https://github.com/kklochan) | [May Wirapa Boonyasurat](https://github.com/mimiwrp) | [Mercer Stronck](https://github.com/mercerstronck) | [Muhammad Trad](https://github.com/muhammadtrad) | [Richard Guo](https://github.com/richardguoo) | [Richard Lam](https://github.com/rlam108) | [Sam Frakes](https://github.com/frakes413) | [Serena Kuo](https://github.com/serenackuo) | [Timothy Sin](https://github.com/timothysin) | [Vincent Trang](https://github.com/vincentt114) ## License diff --git a/__tests__/backend/src/models/dbCRUDHAndler.spec.ts b/__tests__/backend/src/models/dbCRUDHAndler.spec.ts new file mode 100644 index 00000000..dfe4484c --- /dev/null +++ b/__tests__/backend/src/models/dbCRUDHAndler.spec.ts @@ -0,0 +1,68 @@ +describe('dbCRUDHandler tests', () => { + // setBaseConnections + describe('erTableSchemaUpdate tests', () => { + test('it should only receive backendObj as parameter', () => {}); + test('it should send async started back to frontend', () => {}); + test( + 'it should send backendObj to helper function to receive a queryString and a dbType back as query', + ); + test( + 'it should use query.queryString and query.dbType to run queryModel.query', + ); + }); +}); + +describe('ertable-functions tests', () => { + describe('erdObjToQuery tests', () => { + test('it should only receive backendObj as parameter', () => {}); + test('it should create an empty array', () => {}); + test('it should identity the erdDbType from dbState', () => {}); + test('it should use erdDbType to pick an appropriate query function for backendObj to act on and receive a query bacj', () => {}); + test('it should return a query string'); + }); +}); + +/* +const backendObj = { + database: 'tester2', + updates: { + addTables: [ + { + is_insertable_into: 'yes', + table_name: 'NewTable8', + table_schema: 'puclic', + table_catalog: 'tester2', + columns: [] + } + ], + + dropTables: [{ + table_name: 'newtable5', + table_schema: 'puclic' + } + ], + + alterTables: [ + { + is_insertable_into: null, + table_catalog: 'tester2', + table_name: 'newtable7', + new_table_name: null, + table_schema: 'puclic', + addColumns: [Array], + dropColumns: [], + alterColumns: [] + }, + { + is_insertable_into: null, + table_catalog: 'tester2', + table_name: 'newtable7', + new_table_name: null, + table_schema: 'puclic', + addColumns: [Array], + dropColumns: [], + alterColumns: [] + }] + } +} +*/ diff --git a/__tests__/backend/src/utils/erdAlters/pSqlCUD.spec.ts b/__tests__/backend/src/utils/erdAlters/pSqlCUD.spec.ts new file mode 100644 index 00000000..31e87f21 --- /dev/null +++ b/__tests__/backend/src/utils/erdAlters/pSqlCUD.spec.ts @@ -0,0 +1,189 @@ +import { + generatePostgresColumnQuery, + queryPostgres, +} from '../../../../../backend/src/utils/erdCUD/pSqlCUD'; +import { + PsqlColumnOperations, + ErdUpdatesType, + PSqlDataType, +} from '../../../../../shared/types/erTypes'; + +describe('pSqlCUD', () => { + describe('generatePostgresAlterQuery', () => { + const tableName = 'test'; + + describe('addColumn', () => { + it('should return correct string without Type defined', () => { + const addWithoutType: PsqlColumnOperations = { + columnAction: 'addColumn', + columnName: 'hello', + }; + expect(generatePostgresColumnQuery(tableName, addWithoutType)).toEqual( + 'ADD COLUMN hello', + ); + }); + + it('should return correct string with Type defined', () => { + const addWithType: PsqlColumnOperations = { + columnAction: 'addColumn', + columnName: 'hello2', + type: 'CHAR', + }; + expect(generatePostgresColumnQuery(tableName, addWithType)).toEqual( + 'ADD COLUMN hello2 TYPE CHAR', + ); + }); + }); + + describe('dropColumn', () => { + it('should return drop column string', () => { + const drop: PsqlColumnOperations = { + columnAction: 'dropColumn', + columnName: 'drop', + }; + expect(generatePostgresColumnQuery(tableName, drop)).toEqual( + 'DROP COLUMN drop', + ); + }); + }); + + describe('alterColumnType', () => { + it('should return string with desired alter type', () => { + const alterColumnType: PsqlColumnOperations = { + columnAction: 'alterColumnType', + columnName: 'alterColumnType', + type: 'INTEGER', + }; + expect(generatePostgresColumnQuery(tableName, alterColumnType)).toEqual( + 'ALTER COLUMN alterColumnType TYPE INTEGER', + ); + }); + }); + + describe('renameColumn', () => { + it('should return string with renamed column', () => { + const renameColumn: PsqlColumnOperations = { + columnAction: 'renameColumn', + columnName: 'before', + newColumnName: 'after', + }; + expect(generatePostgresColumnQuery(tableName, renameColumn)).toEqual( + 'RENAME COLUMN before TO after', + ); + }); + }); + + describe('togglePrimary', () => { + it('should return string for primary set to TRUE', () => { + const primaryTrue: PsqlColumnOperations = { + columnAction: 'togglePrimary', + columnName: 'true', + isPrimary: true, + }; + expect(generatePostgresColumnQuery(tableName, primaryTrue)).toEqual( + 'ADD PRIMARY KEY (true)', + ); + }); + it('should return string for primary set to FALSE', () => { + const primaryTrue: PsqlColumnOperations = { + columnAction: 'togglePrimary', + columnName: 'false', + isPrimary: false, + }; + expect(generatePostgresColumnQuery(tableName, primaryTrue)).toEqual( + 'DROP CONSTRAINT users_pkey', + ); + }); + }); + + describe('toggleForeign', () => { + it('should return string for making foreign TRUE', () => { + const foreignTrue: PsqlColumnOperations = { + columnAction: 'toggleForeign', + columnName: 'true', + hasForeign: true, + foreignTable: 'foreignTable', + foreignColumn: 'foreignColumn', + foreignConstraint: 'foreignConstraint', + }; + expect(generatePostgresColumnQuery(tableName, foreignTrue)).toEqual( + 'ADD CONSTRAINT foreignConstraint FOREIGN KEY (true) REFERENCES foreignTable (foreignColumn)', + ); + }); + it('should return string for making foreign FALSE', () => { + const foreignTrue: PsqlColumnOperations = { + columnAction: 'toggleForeign', + columnName: 'false', + hasForeign: false, + foreignConstraint: 'foreignConstraint', + }; + expect(generatePostgresColumnQuery(tableName, foreignTrue)).toEqual( + 'DROP CONSTRAINT foreignConstraint', + ); + }); + }); + + describe('toggleUnique', () => { + it('should return string for making unique TRUE', () => { + const uniqueTrue: PsqlColumnOperations = { + columnAction: 'toggleUnique', + columnName: 'true', + isUnique: true, + }; + expect(generatePostgresColumnQuery(tableName, uniqueTrue)).toEqual( + 'ADD UNIQUE (true)', + ); + }); + it('should return string for making unique FALSE', () => { + const uniqueTrue: PsqlColumnOperations = { + columnAction: 'toggleUnique', + columnName: 'false', + isUnique: false, + }; + expect(generatePostgresColumnQuery(tableName, uniqueTrue)).toEqual( + 'DROP CONSTRAINT test_false_key', + ); + }); + }); + }); + + describe('queryPostgres', () => { + describe('all case test', () => { + it('should return array of strings with add, drop, alter, and column', () => { + const updatesArray: ErdUpdatesType = [ + { + action: 'add', + tableName: 'table1', + tableSchema: 'public', + }, + { + action: 'drop', + tableName: 'table2', + tableSchema: 'public', + }, + { + action: 'alter', + tableName: 'table3', + tableSchema: 'public', + newTableName: 'alteredTable3', + }, + { + action: 'column', + tableName: 'table4', + tableSchema: 'public', + columnOperations: { + columnAction: 'dropColumn', + columnName: 'table4column', + }, + }, + ]; + expect(queryPostgres(updatesArray)).toStrictEqual([ + 'CREATE TABLE public.table1;', + 'DROP TABLE public.table2;', + 'ALTER TABLE public.table3 RENAME TO alteredTable3;', + 'ALTER TABLE public.table4 DROP COLUMN table4column;', + ]); + }); + }); + }); +}); diff --git a/__tests__/backend/src/utils/erdTableFunctions.spec.ts b/__tests__/backend/src/utils/erdTableFunctions.spec.ts new file mode 100644 index 00000000..3a1948da --- /dev/null +++ b/__tests__/backend/src/utils/erdTableFunctions.spec.ts @@ -0,0 +1,36 @@ +import erdUpdatesToQuery from '../../../../backend/src/utils/erdTableFunctions'; +import { ErdUpdatesType } from '../../../../shared/types/erTypes'; +import { DBType } from '../../../../shared/types/dbTypes'; + +describe('erdTableFunctions', () => { + // jest.mock( + // '../../../../backend/src/models/stateModel', + // () => + // ({ + // default: { + // currentERD: DBType.Postgres, + // }, + // } as typeof import('../../../../backend/src/models/stateModel')), + // ); + + it('should return a big string from case POSTGRES and RDSPOSTGRES', () => { + const updatesArray: ErdUpdatesType = [ + { + action: 'add', + tableName: 'table1', + tableSchema: 'public', + }, + { + action: 'drop', + tableName: 'table2', + tableSchema: 'public', + }, + ]; + + const currentERD: DBType = DBType.Postgres; + + expect(erdUpdatesToQuery(updatesArray, currentERD)).toEqual( + 'CREATE TABLE public.table1; DROP TABLE public.table2;', + ); + }); +}); diff --git a/__tests__/backend/ertable-functions.spec.ts b/__tests__/backendTests/readme.txt similarity index 100% rename from __tests__/backend/ertable-functions.spec.ts rename to __tests__/backendTests/readme.txt diff --git a/__tests__/frontend/appAsync.spec.ts b/__tests__/frontend/appAsync.spec.ts new file mode 100644 index 00000000..cea3879d --- /dev/null +++ b/__tests__/frontend/appAsync.spec.ts @@ -0,0 +1,4 @@ +/** + * Test cases for functions used in central useEffect responsible for issuing + * async requests + */ diff --git a/__tests__/frontend/lib/appViews.spec.ts b/__tests__/frontend/lib/appViews.spec.ts new file mode 100644 index 00000000..771bd348 --- /dev/null +++ b/__tests__/frontend/lib/appViews.spec.ts @@ -0,0 +1,84 @@ +import React from 'react'; +import { render, fireEvent } from '@testing-library/react'; +import App from '../../../frontend/components/App'; +import { + appViewStateReducer, + AppViewState, +} from '../../../frontend/state_management/Reducers/AppViewReducer'; + +describe('App view state reducer', () => { + let initialState: AppViewState; + + beforeEach(() => { + initialState = { + selectedView: 'dbView', + sideBarIsHidden: false, + showConfigDialog: false, + showCreateDialog: false, + PG_isConnected: false, + MYSQL_isConnected: false, + }; + }); + + describe('Selected view should properly update the current state', () => { + it('should update the selectedView to erView', () => { + const newState = appViewStateReducer(initialState, { + type: 'SELECTED_VIEW', + payload: 'compareView', + }); + expect(newState.selectedView).toEqual('compareView'); + }); + + it('should update the selectedView to testView', () => { + const newState = appViewStateReducer(initialState, { + type: 'SELECTED_VIEW', + payload: 'queryView', + }); + expect(newState.selectedView).toEqual('queryView'); + }); + + it('should update the selectedView to view', () => { + const newState = appViewStateReducer(initialState, { + type: 'SELECTED_VIEW', + payload: 'newSchemaView', + }); + expect(newState.selectedView).toEqual('newSchemaView'); + }); + }); + + it('should toggle sidebar config', () => { + const newState = appViewStateReducer(initialState, { + type: 'TOGGLE_SIDEBAR', + }); + expect(newState.sideBarIsHidden).toEqual(true); + }); + + it('should toggle showConfigDialog', () => { + const newState = appViewStateReducer(initialState, { + type: 'TOGGLE_CONFIG_DIALOG', + }); + expect(newState.showConfigDialog).toEqual(true); + }); + it('should toggle showConfigDialog', () => { + const newState = appViewStateReducer(initialState, { + type: 'TOGGLE_CREATE_DIALOG', + }); + expect(newState.showCreateDialog).toEqual(true); + }); + + it('should update the PG connected with the proper passed in boolean', () => { + const newState = appViewStateReducer(initialState, { + type: 'IS_PG_CONNECTED', + payload: true, + }); + expect(newState.PG_isConnected).toEqual(true); + }); + + it('should update the MYSQL connected with the proper passed in boolean', () => { + const newState = appViewStateReducer(initialState, { + type: 'IS_MYSQL_CONNECTED', + payload: true, + }); + expect(newState.MYSQL_isConnected).toEqual(true); + }); +}); diff --git a/__tests__/frontend/lib/erdReducers.spec.ts b/__tests__/frontend/lib/erdReducers.spec.ts new file mode 100644 index 00000000..743f00f3 --- /dev/null +++ b/__tests__/frontend/lib/erdReducers.spec.ts @@ -0,0 +1,25 @@ +// import { +// erdReducer, +// ERDState, +// } from '../../../frontend/state_management/Reducers/ERDReducers'; + +// import { ERDActions } from '../../../frontend/state_management/Actions/ERDActions'; + +// const initialState: ERDState[] = []; +// let actionObject: ERDActions; +// describe('erdReducer', () => { +// beforeEach(() => { +// actionObject = { +// type: 'ADD_TABLE', +// payload: { tableName: 'newTableName' }, +// }; +// }); + +// it('should return the initial state without modifying it at all', () => { +// expect(erdReducer(initialState, actionObject)).not.toBe(initialState); +// }); + +// it('should handle ADD_TABLE', () => { +// console.log(erdReducer(initialState, actionObject)); +// }); +// }); diff --git a/__tests__/frontend/lib/queries.spec.ts b/__tests__/frontend/lib/queries.spec.ts index 2469d3ad..f56b7eeb 100644 --- a/__tests__/frontend/lib/queries.spec.ts +++ b/__tests__/frontend/lib/queries.spec.ts @@ -1,7 +1,8 @@ -window.require = ((str: string) => str) as any import * as queries from '../../../frontend/lib/queries'; import type { QueryData } from '../../../frontend/types'; +window.require = ((str: string) => str) as any + const first: Partial = { label: 'firstQuery', db: 'firstDb', @@ -175,7 +176,7 @@ describe('setCompare', () => { }); it('should set execution time to 0 if given false for isCompared', () => { - let qs:any = { [`${queries.key(first as QueryData)}`]: first }; + const qs:any = { [`${queries.key(first as QueryData)}`]: first }; expect(Object.keys(collection).length).toBe(0); const newCollection = queries.setCompare({}, qs, first as QueryData, true); expect(Object.keys(newCollection).length).toBe(1); diff --git a/assets/readmeImages/ERD_before_after.png b/assets/readmeImages/ERD_before_after.png new file mode 100644 index 00000000..b4e904f9 Binary files /dev/null and b/assets/readmeImages/ERD_before_after.png differ diff --git a/assets/readmeImages/erdArchitecture.png b/assets/readmeImages/erdArchitecture.png new file mode 100644 index 00000000..b5fa5b0d Binary files /dev/null and b/assets/readmeImages/erdArchitecture.png differ diff --git a/backend/BE_types.ts b/backend/BE_types.ts index a9b96959..3832a34f 100644 --- a/backend/BE_types.ts +++ b/backend/BE_types.ts @@ -1,6 +1,8 @@ /** * This file contains common types that need to be used across the backend */ +import { PoolOptions } from 'mysql2'; +import { PoolConfig } from 'pg'; import { UpdatesObjType } from '../frontend/types'; export interface ColumnObj { @@ -26,12 +28,12 @@ export interface TableDetails { } export interface DBList { databaseConnected: { - PG: boolean, - MySQL: boolean, - RDSPG: boolean, - RDSMySQL: boolean, - SQLite: boolean, - directPGURI: boolean, + PG: boolean; + MySQL: boolean; + RDSPG: boolean; + RDSMySQL: boolean; + SQLite: boolean; + directPGURI: boolean; }; databaseList: dbDetails[]; tableList: TableDetails[]; @@ -49,7 +51,7 @@ export enum DBType { MySQL = 'mysql', RDSPostgres = 'rds-pg', RDSMySQL = 'rds-mysql', - CloudDB = 'cloud-database', //added for cloud dbs + CloudDB = 'cloud-database', // added for cloud dbs SQLite = 'sqlite', directPGURI = 'directPGURI', } @@ -64,12 +66,22 @@ export enum LogType { } export interface DocConfigFile { - mysql: { user: string; password: string; port: number }; - pg: { user: string; password: string; port: number }; - rds_mysql: { user: string; password: string; port: number; host: string }; - rds_pg: { user: string; password: string; port: number; host: string }; - sqlite: { path: '' }; - directPGURI: { uri: '' }; + mysql_options: { user: string; password: string; port: number } & PoolOptions; + pg_options: { user: string; password: string; port: number } & PoolConfig; + rds_mysql_options: { + user: string; + password: string; + port: number; + host: string; + } & PoolOptions; + rds_pg_options: { + user: string; + password: string; + port: number; + host: string; + } & PoolConfig; + sqlite_options: { filename: string }; + directPGURI_options: { connectionString: string } & PoolConfig; } type dbsInputted = { @@ -93,27 +105,19 @@ type configExists = { type combined = { dbsInputted: dbsInputted; configExists: configExists; -} +}; + +export interface MysqlQueryResolve {} -export interface DBFunctions { +export interface DBFunctions extends DocConfigFile { pg_uri: string; - curPG_DB: string; - curMSQL_DB: string; - curRDS_MSQL_DB: any; - curRDS_PG_DB: { - user: string; - password: string; - host: string; - }; - curSQLite_DB: { path: string }; - curdirectPGURI_DB: string; dbsInputted: dbsInputted; setBaseConnections: () => Promise; - query: (text: string, params: (string | number)[], dbType: DBType) => void; + query: (text: string, params: (string | number)[], dbType: DBType) => any; connectToDB: (db: string, dbType?: DBType) => Promise; disconnectToDrop: (dbType: DBType) => Promise; - getLists: (dbName: string, dbType?: DBType) => Promise; + getLists: (dbName?: string, dbType?: DBType) => Promise; getTableInfo: (tableName: string, dbType: DBType) => Promise; getDBNames: (dbType: DBType) => Promise; getColumnObjects: (tableName: string, dbType: DBType) => Promise; @@ -126,4 +130,4 @@ export interface QueryPayload { sqlString: string; selectedDb: string; runQueryNumber: number; -} \ No newline at end of file +} diff --git a/backend/channels.ts b/backend/channels.ts deleted file mode 100644 index db3eb178..00000000 --- a/backend/channels.ts +++ /dev/null @@ -1,846 +0,0 @@ -/* eslint-disable no-console */ -/* eslint-disable prefer-destructuring */ -import { ipcMain } from 'electron'; // IPCMain: Communicate asynchronously from the main process to renderer processes -import path from 'path'; -import fs from 'fs'; -import os from 'os'; -import helperFunctions from './helperFunctions'; -import generateDummyData from './DummyD/dummyDataMain'; -import { ColumnObj, DBList, DummyRecords, DBType, LogType, QueryPayload } from './BE_types'; -import backendObjToQuery from './ertable-functions'; -import logger from './Logging/masterlog'; - -// import { Integer } from 'type-fest'; - -const db = require('./models'); -const docConfig = require('./_documentsConfig'); - -const { - createDBFunc, - dropDBFunc, - explainQuery, - runSQLFunc, - runTARFunc, - runFullCopyFunc, - runHollowCopyFunc, - promExecute, -} = helperFunctions; - -// *************************************************** IPC Event Listeners *************************************************** // -interface Feedback { - type: string; - message: string; -} - -/** - * Handles set-config requests from frontend - * triggered whenever save is pressed on the config/login page - * establishes connections to database, logs failed connections, sends contents of config file - */ -ipcMain.handle('set-config', async (event, configObj) => { - docConfig.saveConfig(configObj); // saves login info from frontend into config file - - - db.setBaseConnections() // tries to log in using config data - .then(({ dbsInputted, configExists }) => { - - // error handling for trying and failing to log in to databases - let errorStr = ''; - const dbs = Object.keys(dbsInputted); - dbs.forEach(e => { - if (!dbsInputted[e] && configExists[e]) errorStr += ` ${e}`; - }) - if (errorStr.length) { - const err = `Unsuccessful login(s) for ${errorStr.toUpperCase()} database(s)`; - const feedback: Feedback = { - type: 'error', - message: err, - }; - event.sender.send('feedback', feedback); - } - logger('Successfully reset base connections', LogType.SUCCESS); - db.getLists().then((data: DBList) => { - event.sender.send('db-lists', data); // used to populate sidebar - }); - }) - .catch((err) => { - logger( - `Error trying to set base connections on 'reset-connection': ${err.message}`, - LogType.ERROR - ); - const feedback: Feedback = { - type: 'error', - message: err, - }; - event.sender.send('feedback', feedback); - logger( - "Sent 'feedback' from 'reset-connection' (Note: This is an ERROR!)", - LogType.ERROR - ); - }) - .finally(() => { - event.sender.send('get-config', docConfig.getFullConfig()); - }); -}); - -/** - * Handles get-config request from frontend - * sends configuration from config file - */ -ipcMain.handle('get-config', async (event) => { // asdf is configObj used? - event.sender.send('get-config', docConfig.getFullConfig()); -}); - -/** - * Handles return-db-list request from the frontend - * establishes connection to databases, then gets listObj from getLists, then sends to frontend - */ -ipcMain.on('return-db-list', (event) => { - logger( - "Received 'return-db-list' (Note: No Async being sent here)", - LogType.RECEIVE - ); - - db.setBaseConnections() - .then(() => { - db.getLists() - .then((data: DBList) => { - event.sender.send('db-lists', data); - logger("Sent 'db-lists' from 'return-db-list'", LogType.SEND); - }) - .catch((err) => { - logger( - `Error trying to get lists on 'return-db-list': ${err.message}`, - LogType.ERROR - ); - const feedback: Feedback = { - type: 'error', - message: err, - }; - event.sender.send('feedback', feedback); - logger( - "Sent 'feedback' from 'return-db-list' (Note: This is an ERROR!)", - LogType.SEND - ); - }); - }) - .catch((err) => { - logger( - `Error trying to set base connections on 'return-db-list': ${err.message}`, - LogType.ERROR - ); - const feedback: Feedback = { - type: 'error', - message: err, - }; - event.sender.send('feedback', feedback); - logger( - "Sent 'feedback' from 'return-db-list' (Note: This is an ERROR!)", - LogType.SEND - ); - }); -}); - - -/** - * Handles select-db request from frontend - * connect to selected db, then get object containing a list of all databases and a list of tables for the selected database, and sends to frontend - */ -ipcMain.handle( - 'select-db', - async (event, dbName: string, dbType: DBType): Promise => { - logger("Received 'select-db'", LogType.RECEIVE); - - event.sender.send('async-started'); - try { - await db.connectToDB(dbName, dbType); - - // send updated db info - const dbsAndTables: DBList = await db.getLists(dbName, dbType); - event.sender.send('db-lists', dbsAndTables); - logger("Sent 'db-lists' from 'select-db'", LogType.SEND); - } finally { - event.sender.send('async-complete'); - } - } -); - - -/** - * Handler for drop-db requests from frontend - * Drops the passed in DB and returns updated DB List - */ -ipcMain.handle( - 'drop-db', - async ( - event, - dbName: string, - currDB: boolean, - dbType: DBType - ): Promise => { - logger("Received 'drop-db'", LogType.RECEIVE); - - event.sender.send('async-started'); - - try { - // if deleting currently connected db, disconnect from db - // end pool connection - await db.disconnectToDrop(dbType); - // reconnect to database server, but not the db that will be dropped - await db.connectToDB('', dbType); - - // IN CASE OF EMERGENCY USE THIS CODE TO DROP DATABASES - // WILL THROW UNCAUGHT ERRORS LAST RESORT ONLY!!! - // await db.connectToDB('', dbType); - // if(dbType === DBType.Postgres){ - // await db.query(`UPDATE pg_database SET datallowconn = 'false' WHERE datname = '${dbName}'`, null, dbType); - // await db.query(` - // SELECT pid, pg_terminate_backend(pid) - // FROM pg_stat_activity - // WHERE datname = '${dbName}' AND pid <> pg_backend_pid(); - // `, null, dbType); - // // await db.closeTheDB(dbName, dbType); - // } - - const dropDBScript = dropDBFunc(dbName, dbType); - if (dbType !== DBType.SQLite) await db.query(dropDBScript, null, dbType); - - // send updated db info - const dbsAndTables: DBList = await db.getLists(dbName, dbType); - event.sender.send('db-lists', dbsAndTables); - logger("Sent 'db-lists' from 'drop-db'", LogType.SEND); - } finally { - event.sender.send('async-complete'); - } - } -); - -interface DuplicatePayload { - newName: string; - sourceDb: string; - withData: boolean; -} - -/** - * Handle duplicate-db events sent from frontend. Cleans up after itself in - * the event of failure - */ -ipcMain.handle( - 'duplicate-db', - async ( - event, - { newName, sourceDb, withData }: DuplicatePayload, - dbType: DBType - ) => { - logger( - `Received 'duplicate-db'" of dbType: ${dbType} and: `, - LogType.RECEIVE - ); - - event.sender.send('async-started'); - - const tempFilePath = path.resolve( - `${docConfig.getConfigFolder()}/`, - `temp_${newName}.sql` - ); - - try { - // dump database to temp file - const dumpCmd = withData - ? runFullCopyFunc(sourceDb, tempFilePath, dbType) - : runHollowCopyFunc(sourceDb, tempFilePath, dbType); - try { - await promExecute(dumpCmd); - } catch (e) { - throw new Error( - `Failed to dump ${sourceDb} to temp file at ${tempFilePath}` - ); - } - - // create new empty database - try { - await db.query(createDBFunc(newName, dbType), null, dbType); - } catch (e) { - throw new Error(`Failed to create Database`); - } - - // run temp sql file on new database - try { - await promExecute(runSQLFunc(newName, tempFilePath, dbType)); - } catch (e: any) { - // cleanup: drop created db - logger(`Dropping duplicate db because: ${e.message}`, LogType.WARNING); - const dropDBScript = dropDBFunc(newName, dbType); - await db.query(dropDBScript, null, dbType); - - throw new Error('Failed to populate newly created database'); - } - - // update frontend with new db list - const dbsAndTableInfo: DBList = await db.getLists('', dbType); - event.sender.send('db-lists', dbsAndTableInfo); - logger("Sent 'db-lists' from 'duplicate-db'", LogType.SEND); - } finally { - // clean up temp file - try { - fs.unlinkSync(tempFilePath); - } catch (e) { - event.sender.send('feedback', { - type: 'error', - message: `Failed to cleanup temp files. ${tempFilePath} could not be removed.`, - }); - } - - event.sender.send('async-complete'); - } - } -); - -interface ImportPayload { - newDbName: string; - filePath: string; -} - -/** - * Handle import-db events sent from frontend. Cleans up after itself - * in the event of failure - */ -ipcMain.handle( - 'import-db', - async (event, { newDbName, filePath }: ImportPayload, dbType: DBType) => { - logger(`Received 'import-db'" of dbType: ${dbType} and: `, LogType.RECEIVE); - event.sender.send('async-started'); - - try { - // create new empty db - await db.query(createDBFunc(newDbName, dbType), null, dbType); - - const ext = path.extname(filePath).toLowerCase(); - if (ext !== '.sql' && ext !== '.tar') - throw new Error('Invalid file extension'); - - const restoreCmd = - ext === '.sql' - ? runSQLFunc(newDbName, filePath, dbType) - : runTARFunc(newDbName, filePath, dbType); - - try { - // populate new db with data from file - await promExecute(restoreCmd); - } catch (e: any) { - // cleanup: drop created db - logger(`Dropping imported db because: ${e.message}`, LogType.WARNING); - const dropDBScript = dropDBFunc(newDbName, dbType); - await db.query(dropDBScript, null, dbType); - - throw new Error('Failed to populate database'); - } - - // update frontend with new db list - const dbsAndTableInfo: DBList = await db.getLists('', dbType); - event.sender.send('db-lists', dbsAndTableInfo); - logger("Sent 'db-lists' from 'import-db'", LogType.SEND); - } finally { - event.sender.send('async-complete'); - } - } -); - - -/* -look at this to check the explain might not support query error -*/ -/** - * Handle run-query events passed from the front-end, and send back an updated DB List - * If error occurs, will rollback to pre-query state - */ -ipcMain.handle( - 'run-query', - async ( - event, - { targetDb, sqlString, selectedDb, runQueryNumber }: QueryPayload, - dbType: DBType - ) => { - logger( - "Received 'run-query'", - LogType.RECEIVE, - `selectedDb: ${selectedDb} and dbType: ${dbType} and runQueryNumber: ${runQueryNumber}` - ); - event.sender.send('async-started'); - const arr: any[] = []; // array of sample - const numberOfSample: number = runQueryNumber; - let totalSampleTime: number = 0; - let minimumSampleTime: number = 0; - let maximumSampleTime: number = 0; - let averageSampleTime: number = 0; - - function parseExplainExplanation(explain) { - const regex = /actual time=(\d+\.\d+)\.\.(\d+\.\d+) rows=\d+ loops=(\d+)/g; - const matches: any[] = Array.from(explain.matchAll(regex)); - let result: number = 0; - - for (let i = 0; i < matches.length; i += 1) { - result += (parseFloat(matches[i][2]) - parseFloat(matches[i][1])) * parseFloat(matches[i][3]); - } - return result; - } - - try { - let error: string | undefined; - // connect to db to run query - - if (selectedDb !== targetDb) await db.connectToDB(targetDb, dbType); - - // Run Explain - let explainResults; - try { - // console.log('start of try'); - for (let i = 0; i < numberOfSample; i++) { - // console.log('start of for loopo'); - if (dbType === DBType.Postgres) { - const results = await db.query( - explainQuery(sqlString, dbType), - null, - dbType - ); - - // console.log('query results', results); - // console.log('explain query results', results[1].rows); - // console.log('query plan including sample time data', results[1].rows[0]["QUERY PLAN"][0]); - - explainResults = results[1].rows; - const eachSampleTime: any = results[1].rows[0]["QUERY PLAN"][0]['Planning Time'] + results[1].rows[0]["QUERY PLAN"][0]['Execution Time']; - arr.push(eachSampleTime); - totalSampleTime += eachSampleTime; - - } else if (dbType === DBType.MySQL) { - const results = await db.query( - explainQuery(sqlString, dbType), - null, - dbType - ); - const eachSampleTime: any = parseExplainExplanation(results[0][0].EXPLAIN); - arr.push(eachSampleTime); - totalSampleTime += eachSampleTime; - - // hard coded explainResults just to get it working for now - explainResults = { - Plan: { - 'Node Type': 'Seq Scan', - 'Parallel Aware': false, - 'Async Capable': false, - 'Relation Name': 'newtable1', - Schema: 'public', - Alias: 'newtable1', - 'Startup Cost': 0, - 'Total Cost': 7, - 'Plan Rows': 200, - 'Plan Width': 132, - 'Actual Startup Time': 0.015, - 'Actual Total Time': 0.113, - 'Actual Rows': 200, - 'Actual Loops': 1, - Output: ['newcolumn1'], - 'Shared Hit Blocks': 5, - 'Shared Read Blocks': 0, - 'Shared Dirtied Blocks': 0, - 'Shared Written Blocks': 0, - 'Local Hit Blocks': 0, - 'Local Read Blocks': 0, - 'Local Dirtied Blocks': 0, - 'Local Written Blocks': 0, - 'Temp Read Blocks': 0, - 'Temp Written Blocks': 0 - }, - Planning: { - 'Shared Hit Blocks': 64, - 'Shared Read Blocks': 0, - 'Shared Dirtied Blocks': 0, - 'Shared Written Blocks': 0, - 'Local Hit Blocks': 0, - 'Local Read Blocks': 0, - 'Local Dirtied Blocks': 0, - 'Local Written Blocks': 0, - 'Temp Read Blocks': 0, - 'Temp Written Blocks': 0 - }, - 'Planning Time': 9999, - Triggers: [], - 'Execution Time': 9999 - }; - - } else if (dbType === DBType.SQLite) { - const sampleTime = await db.sampler(sqlString); - arr.push(sampleTime); - totalSampleTime += sampleTime - - // hard coded explainResults just to get it working for now - explainResults = { - Plan: { - 'Node Type': 'Seq Scan', - 'Parallel Aware': false, - 'Async Capable': false, - 'Relation Name': 'newtable1', - Schema: 'public', - Alias: 'newtable1', - 'Startup Cost': 0, - 'Total Cost': 7, - 'Plan Rows': 200, - 'Plan Width': 132, - 'Actual Startup Time': 0.015, - 'Actual Total Time': 0.113, - 'Actual Rows': 200, - 'Actual Loops': 1, - Output: ['newcolumn1'], - 'Shared Hit Blocks': 5, - 'Shared Read Blocks': 0, - 'Shared Dirtied Blocks': 0, - 'Shared Written Blocks': 0, - 'Local Hit Blocks': 0, - 'Local Read Blocks': 0, - 'Local Dirtied Blocks': 0, - 'Local Written Blocks': 0, - 'Temp Read Blocks': 0, - 'Temp Written Blocks': 0 - }, - Planning: { - 'Shared Hit Blocks': 64, - 'Shared Read Blocks': 0, - 'Shared Dirtied Blocks': 0, - 'Shared Written Blocks': 0, - 'Local Hit Blocks': 0, - 'Local Read Blocks': 0, - 'Local Dirtied Blocks': 0, - 'Local Written Blocks': 0, - 'Temp Read Blocks': 0, - 'Temp Written Blocks': 0 - }, - 'Planning Time': 9999, - Triggers: [], - 'Execution Time': 9999 - }; - } - } - // get 5 decimal points for sample time - minimumSampleTime = Math.round(Math.min(...arr) * 10 ** 5) / 10 ** 5; - maximumSampleTime = Math.round(Math.max(...arr) * 10 ** 5) / 10 ** 5; - averageSampleTime = Math.round((totalSampleTime / numberOfSample) * 10 ** 5) / 10 ** 5; - totalSampleTime = Math.round(totalSampleTime * 10 ** 5) / 10 ** 5; - } catch (e) { - error = `Failed to get Execution Plan. EXPLAIN might not support this query.`; - } - - // Run Query - let returnedRows; - try { - const results = await db.query(sqlString, null, dbType); - if (dbType === DBType.MySQL) { - // console.log('mySQL results', results); - returnedRows = results[0]; - // console.log('returnedRows in channels for MySQL', returnedRows); - } - if (dbType === DBType.Postgres) { - // console.log('results in channels for Postgres', results); - returnedRows = results.rows; - // console.log('returnedRows in channels for Postgres', returnedRows); - } - if (dbType === DBType.SQLite) { - returnedRows = results; - // console.log('returnedRows in channels for SQLite', returnedRows) - } - } catch (e: any) { - error = e.toString(); - } - - return { - db: targetDb, - sqlString, - returnedRows, - explainResults, - error, - numberOfSample, - totalSampleTime, - minimumSampleTime, - maximumSampleTime, - averageSampleTime, - }; - } finally { - // connect back to initialDb - - if (selectedDb !== targetDb) await db.connectToDB(selectedDb, dbType); - - // send updated db info in case query affected table or database information - // must be run after we connect back to the originally selected so tables information is accurate - const dbsAndTables: DBList = await db.getLists('', dbType); - event.sender.send('db-lists', dbsAndTables); - logger( - "Sent 'db-lists' from 'run-query'", - LogType.SEND, - `selectedDb: ${selectedDb} -- targetDb: ${targetDb} -- dbType: ${dbType}` - ); - event.sender.send('async-complete'); - } - } -); - -interface ExportPayload { - sourceDb: string; -} - -ipcMain.handle( - 'export-db', - async (event, { sourceDb }: ExportPayload, dbType: DBType) => { - logger("Received 'export-db'", LogType.RECEIVE); - event.sender.send('async-started'); - - // store temporary file in user desktop - const FilePath = path.resolve(os.homedir(), 'desktop', `${sourceDb}.sql`); - - let feedback: Feedback = { - type: '', - message: '', - }; - - try { - // dump database to new file - const dumpCmd = runFullCopyFunc(sourceDb, FilePath, dbType); - - try { - await promExecute(dumpCmd); - feedback = { - type: 'success', - message: `${sourceDb} Schema successfully exported to ${FilePath}`, - }; - event.sender.send('feedback', feedback); - logger("Sent 'feedback' from 'export-db'", LogType.SEND); - } catch (e) { - throw new Error(`Failed to dump ${sourceDb} to a file at ${FilePath}`); - } - } finally { - event.sender.send('async-complete'); - } - } -); - -interface dummyDataRequestPayload { - dbName: string; - tableName: string; - rows: number; -} - -ipcMain.handle( - // generate dummy data - 'generate-dummy-data', - async (event, data: dummyDataRequestPayload, dbType: DBType) => { - logger("Received 'generate-dummy-data'", LogType.RECEIVE); - // send notice to front end that DD generation has been started - event.sender.send('async-started'); - // console.log('genereatedata ipcMain dbType: ', dbType) - let feedback: Feedback = { - type: '', - message: '', - }; - try { - // console.log('data in generate-dummy-data', data); // gets here fine - - // Retrieves the Primary Keys and Foreign Keys for all the tables - const tableInfo: ColumnObj[] = await db.getTableInfo( - data.tableName, - dbType - ); // passed in dbType to second argument - // console.log('tableInfo in generate-dummy-data', tableInfo); // working - - // generate dummy data - const dummyArray: DummyRecords = await generateDummyData( - tableInfo, - data.rows - ); - // console.log('dummyArray output: ', dummyArray) - // generate insert query string to insert dummy records - const columnsStringified = '(' - .concat(dummyArray[0].join(', ')) - .concat(')'); - let insertQuery = `INSERT INTO ${data.tableName} ${columnsStringified} VALUES `; - for (let i = 1; i < dummyArray.length - 1; i += 1) { - const recordStringified = '(' - .concat(dummyArray[i].join(', ')) - .concat('), '); - insertQuery = insertQuery.concat(recordStringified); - } - const lastRecordStringified = '(' - .concat(dummyArray[dummyArray.length - 1].join(', ')) - .concat(');'); - insertQuery = insertQuery.concat(lastRecordStringified); - // insert dummy records into DB - await db.query('Begin;', null, dbType); - await db.query(insertQuery, null, dbType); - await db.query('Commit;', null, dbType); - feedback = { - type: 'success', - message: 'Dummy data successfully generated.', - }; - } catch (err: any) { - // rollback transaction if there's an error in insertion and send back feedback to FE - await db.query('Rollback;', null, dbType); - feedback = { - type: 'error', - message: err, - }; - } finally { - // console.log('dbType inside generate-dummy-data', dbType) - // send updated db info in case query affected table or database information - const dbsAndTables: DBList = await db.getLists('', dbType); // dummy data clear error is from here - // console.log('dbsAndTables in generate-dummy-data', dbsAndTables) - event.sender.send('db-lists', dbsAndTables); // dummy data clear error is from here - - // send feedback back to FE - event.sender.send('feedback', feedback); - - // send notice to FE that DD generation has been completed - event.sender.send('async-complete'); - - logger( - "Sent 'db-lists and feedback' from 'generate-dummy-data'", - LogType.SEND - ); - } - } -); - -// handle initialization of a new schema from frontend (newSchemaView) -interface InitializePayload { - newDbName: string; -} - -ipcMain.handle( - 'initialize-db', - async (event, payload: InitializePayload, dbType: DBType) => { - logger( - `Received 'initialize-db' of dbType: ${dbType} and: `, - LogType.RECEIVE, - payload - ); - event.sender.send('async-started'); - const { newDbName } = payload; - - try { - // create new empty db - await db.query(createDBFunc(newDbName, dbType), null, dbType); - // connect to initialized db - await db.connectToDB(newDbName, dbType); - - // update DBList in the sidebar to show this new db - const dbsAndTableInfo: DBList = await db.getLists(newDbName, dbType); - event.sender.send('db-lists', dbsAndTableInfo); - /// - logger("Sent 'db-lists' from 'initialize-db'", LogType.SEND); - } catch (e) { - const err = `Unsuccessful DB Creation for ${newDbName} in ${dbType} database`; - const feedback: Feedback = { - type: 'error', - message: err, - }; - event.sender.send('feedback', feedback); - // in the case of an error, delete the created db - // const dropDBScript = dropDBFunc(newDbName, dbType); - // await db.query(dropDBScript, null, dbType); - // throw new Error('Failed to initialize new database'); - } finally { - event.sender.send('async-complete'); - } - } -); - -// handle updating schemas from the frontend (newSchemaView) -interface UpdatePayload { - // targetDb: string; - sqlString: string; - selectedDb: string; -} - -// Run query passed from the front-end, and send back an updated DB List -// DB will rollback if query is unsuccessful -ipcMain.handle( - 'update-db', - async (event, { sqlString, selectedDb }: UpdatePayload, dbType: DBType) => { - logger("Received 'update-db'", LogType.RECEIVE); - event.sender.send('async-started'); - - try { - // connect to db to run query - await db.connectToDB(selectedDb, dbType); - - // Run Query - try { - await db.query(sqlString, null, dbType); - } catch (e) { - if (e) throw new Error('Failed to update schema'); - } - } finally { - // send updated db info in case query affected table or database information - // must be run after we connect back to the originally selected so tables information is accurate - const dbsAndTables: DBList = await db.getLists('', dbType); - event.sender.send('db-lists', dbsAndTables); - logger("Sent 'db-lists' from 'update-db'", LogType.SEND); - - event.sender.send('async-complete'); - } - } -); - -// Generate and run query from react-flow ER diagram -ipcMain.handle( - 'ertable-schemaupdate', - async (event, backendObj, dbName: string, dbType: DBType) => { - logger( - `Received 'ertable-schemaupdate' with dbType: ${dbType}, dbName: ${dbName}, and backendObj: `, - LogType.RECEIVE, - backendObj - ); - // send notice to front end that schema update has started - event.sender.send('async-started'); - - let feedback: Feedback = { - type: '', - message: '', - }; - try { - // Generates query from backendObj - const query = backendObjToQuery(backendObj, dbType); - - // run sql command - await db.query('Begin;', null, dbType); - await db.query(query, null, dbType); - await db.query('Commit;', null, dbType); - feedback = { - type: 'success', - message: 'Database updated successfully.', - }; - return 'success'; - } catch (err: any) { - // rollback transaction if there's an error in update and send back feedback to FE - await db.query('Rollback;', null, dbType); - - feedback = { - type: 'error', - message: err, - }; - } finally { - // send updated db info - - const updatedDb: DBList = await db.getLists(dbName, dbType); - event.sender.send('db-lists', updatedDb); - - // send feedback back to FE - event.sender.send('feedback', feedback); - - // send notice to FE that schema update has been completed - event.sender.send('async-complete'); - - logger( - "Sent 'db-lists and feedback' from 'ertable-schemaupdate'", - LogType.SEND - ); - } - } -); diff --git a/backend/main.ts b/backend/main.ts index a6e2bc9e..1bca986d 100644 --- a/backend/main.ts +++ b/backend/main.ts @@ -1,25 +1,27 @@ // eslint-disable-next-line import/no-extraneous-dependencies -import { BrowserWindow, Menu, app, session } from 'electron'; // added session here +import * as path from 'path'; +import * as url from 'url'; +import { app, BrowserWindow, Menu } from 'electron'; // added session here +import fixPath from 'fix-path'; + +import MainMenu from './mainMenu'; const dev: boolean = process.env.NODE_ENV === 'development'; -const os = require('os'); -const path = require('path'); -const url = require('url'); -const fixPath = require('fix-path'); -const MainMenu = require('./mainMenu'); // requiring channels file to initialize event listeners -require('./channels'); +// require('./_DEPRECATED_channels'); +require('./src/ipcHandlers/index'); fixPath(); -// Keep a global reference of the window objects, if you don't, the window will be closed automatically when the JavaScript object is garbage collected. +// Keep a global reference of the window objects, if you don't, +// the window will be closed automatically when the JavaScript object is garbage collected. let mainWindow: BrowserWindow | null; // for react dev tools to work with electron // download react devtools and save them on desktop in folder named ReactDevTools // devtools: https://github.com/facebook/react/issues/25843 -// https://github.com/mondaychen/react/raw/017f120369d80a21c0e122106bd7ca1faa48b8ee/packages/react-devtools-extensions/ReactDevTools.zip +// https://github.com/mondaychen/react/raw/017f120369d80a21c0e122106bd7ca1faa48b8ee/packages/react-devtools-extensions/ReactDevTools.zip // ******************** Comment out when done ******************** // // const reactDevToolsPath = path.join(os.homedir(), '/Desktop/ReactDevTools'); // app.whenReady().then(async () => { @@ -35,8 +37,7 @@ process.on('uncaughtException', (error) => { console.error('Uncaught Exception:', error); }); - - +// this creates the new browserWindow. Had to delete remoteprocess from webPrefences since it was deprecated. It allowed driect access to remote objects and APIs in this main process, so instead we implement ipcRenderer.invoke. WebPreferences nodeintegration and contextisolation are set respectively to ensure api's can be used throughout the entire program without contextbridging function createWindow() { mainWindow = new BrowserWindow({ width: 1800, @@ -45,7 +46,7 @@ function createWindow() { minHeight: 720, title: 'SeeQR', show: false, - webPreferences: { nodeIntegration: true, enableRemoteModule: true }, + webPreferences: { nodeIntegration: true, contextIsolation: false }, icon: path.join(__dirname, '../../assets/logo/seeqr_dock.png'), }); @@ -87,7 +88,8 @@ app.on('ready', createWindow); // Quit when all windows are closed for Windows and Linux app.on('window-all-closed', () => { - // On macOS it is common for applications to stay active on their menu bar when the use closes the window + // On macOS it is common for applications to stay active on their menu bar + // when the use closes the window if (process.platform !== 'darwin') { app.quit(); } else { @@ -96,7 +98,8 @@ app.on('window-all-closed', () => { }); app.on('activate', () => { - // On macOS it's common to re-create a window in the app when the dock icon is clicked and there are no other windows open. + // On macOS it's common to re-create a window in the app when the dock + // icon is clicked and there are no other windows open. if (mainWindow === null) { createWindow(); } diff --git a/backend/mainMenu.ts b/backend/mainMenu.ts index e1f3c3f8..1a4cead8 100644 --- a/backend/mainMenu.ts +++ b/backend/mainMenu.ts @@ -6,40 +6,40 @@ * Each object represents a click action the user can take or something cosemetic like a separate line */ - // import shell so a new browser window can open for external links - const { shell } = require('electron'); +// const { shell } = require('electron'); +import { MenuItem, shell } from 'electron'; // darwin is the process platform for Macs const isMac = process.platform === 'darwin'; -const arr = [ +const arr: MenuItem[] = [ // App menu ...(isMac ? [ - { - label: 'Electron', - submenu: [ - { role: 'about' }, - { type: 'separator' }, - { role: 'services' }, - { type: 'separator' }, - { role: 'hide' }, - { role: 'hideothers' }, - { role: 'unhide' }, - { type: 'separator' }, - { role: 'quit' }, - ], - }, - ] + new MenuItem({ + label: 'Electron', + submenu: [ + { role: 'about' }, + { type: 'separator' }, + { role: 'services' }, + { type: 'separator' }, + { role: 'hide' }, + { role: 'hideOthers' }, + { role: 'unhide' }, + { type: 'separator' }, + { role: 'quit' }, + ], + }), + ] : []), // File menu - { + new MenuItem({ label: 'File', submenu: [isMac ? { role: 'close' } : { role: 'quit' }], - }, + }), // Edit menu - { + new MenuItem({ label: 'Edit', submenu: [ { role: 'undo' }, @@ -48,53 +48,48 @@ const arr = [ { role: 'cut' }, { role: 'copy' }, { role: 'paste' }, - ...(isMac - ? [ - { role: 'delete' }, - { role: 'selectAll' }, - { type: 'separator' }, - { - label: 'Speech', - submenu: [{ role: 'startspeaking' }, { role: 'stopspeaking' }], - }, - ] - : [{ role: 'delete' }, { type: 'separator' }, { role: 'selectAll' }]), + { role: 'delete' }, + { role: 'selectAll' }, + { type: 'separator' }, + isMac + ? { + label: 'Speech', + submenu: [{ role: 'startSpeaking' }, { role: 'stopSpeaking' }], + } + : { label: 'Test' }, ], - }, + }), // View menu - { + new MenuItem({ label: 'View', submenu: [ { role: 'reload' }, - { role: 'forcereload' }, - { role: 'toggledevtools' }, + { role: 'forceReload' }, + { role: 'toggleDevTools' }, { type: 'separator' }, - { role: 'resetzoom' }, - { role: 'zoomin' }, - { role: 'zoomout' }, + { role: 'resetZoom' }, + { role: 'zoomIn' }, + { role: 'zoomOut' }, { type: 'separator' }, { role: 'togglefullscreen' }, ], - }, + }), // Window Menu - { + new MenuItem({ label: 'Window', submenu: [ { role: 'minimize' }, { role: 'zoom' }, - ...(isMac - ? [ - { type: 'separator' }, - { role: 'front' }, - { type: 'separator' }, - { role: 'window' }, - ] - : [{ role: 'close' }]), + isMac ? { type: 'separator' } : { label: 'Test' }, + isMac ? { role: 'front' } : { label: 'Test' }, + isMac ? { type: 'separator' } : { label: 'Test' }, + isMac ? { role: 'window' } : { label: 'Test' }, + isMac ? { label: 'Test' } : { role: 'close' }, ], - }, + }), // help menu - { - role: 'Help', + new MenuItem({ + role: 'help', submenu: [ { label: 'Electron Documentation', @@ -109,7 +104,7 @@ const arr = [ }, }, ], - }, + }), ]; -module.exports = arr; +export default arr; diff --git a/backend/models.ts b/backend/models.ts deleted file mode 100644 index 09573961..00000000 --- a/backend/models.ts +++ /dev/null @@ -1,824 +0,0 @@ -import { - ColumnObj, - dbDetails, - TableDetails, - DBList, - DBType, - LogType, - DBFunctions, -} from './BE_types'; -import logger from './Logging/masterlog'; -import pools from './poolVariables'; -import connectionFunctions from './databaseConnections'; - -const fs = require('fs'); -const { performance } = require('perf_hooks'); -const docConfig = require('./_documentsConfig'); - -// eslint-disable-next-line prefer-const - -/** - * This object contains info about the current database being accessed - * login info for rds - * highest level functions for accessing databases - */ -const DBFunctions: DBFunctions = { - pg_uri: '', - curPG_DB: '', - curMSQL_DB: '', - curRDS_MSQL_DB: { - user: '', - password: '', - host: '', - }, - curRDS_PG_DB: { - user: '', - password: '', - host: '', - }, - curSQLite_DB: { path: '' }, - curdirectPGURI_DB: '', - - /** - * Indicates whether the named database has been logged-in to, default to false - */ - dbsInputted: { - pg: false, - msql: false, - rds_pg: false, - rds_msql: false, - sqlite: false, - directPGURI: false, - }, - - /** - * Saves login info to variables. Tries to log in to databases using configs - * @returns object containing login status of all database servers - */ - async setBaseConnections() { - const PG_Cred = docConfig.getCredentials(DBType.Postgres); - const MSQL_Cred = docConfig.getCredentials(DBType.MySQL); - this.curRDS_PG_DB = docConfig.getCredentials(DBType.RDSPostgres); - this.curRDS_MSQL_DB = docConfig.getCredentials(DBType.RDSMySQL); - this.curSQLite_DB = docConfig.getCredentials(DBType.SQLite); - this.curdirectPGURI_DB = docConfig.getCredentials(DBType.directPGURI); - const configExists = { - pg: false, - msql: false, - rds_pg: false, - rds_msql: false, - sqlite: false, - directPGURI: false, - } - /* - all the if/else and try/catch in this function are for various forms of error handling. incorrect passwords/removed entries after successful logins - */ - - // RDS PG POOL: truthy values means user has inputted info into config -> try to log in - if ( - this.curRDS_PG_DB.user && - this.curRDS_PG_DB.password && - this.curRDS_PG_DB.host - ) { - try { - configExists.rds_pg = true; - await connectionFunctions.RDS_PG_DBConnect(this.curRDS_PG_DB); - this.dbsInputted.rds_pg = true; - logger('CONNECTED TO RDS PG DATABASE!', LogType.SUCCESS); - } catch (error) { - this.dbsInputted.rds_pg = false; - logger('FAILED TO CONNECT TO RDS PG DATABASE', LogType.ERROR); - } - } else { - configExists.rds_pg = false; - this.dbsInputted.rds_pg = false; - } - - // RDS MSQL POOL: truthy values means user has inputted info into config -> try to log in - if ( - this.curRDS_MSQL_DB.user && - this.curRDS_MSQL_DB.password && - this.curRDS_MSQL_DB.host - ) { - try { - configExists.rds_msql = true; - await connectionFunctions.RDS_MSQL_DBConnect(this.curRDS_MSQL_DB); - - // test query to make sure were connected. needed for the catch statement to hit incase we arent connected. - const testQuery = await pools.rds_msql_pool.query('SHOW DATABASES;'); - logger(`CONNECTED TO RDS MYSQL DATABASE!`, LogType.SUCCESS); - this.dbsInputted.rds_msql = true; - } catch (error) { - this.dbsInputted.rds_msql = false; - logger('FAILED TO CONNECT TO RDS MSQL DATABASE', LogType.ERROR); - } - } else { - configExists.rds_msql = false; - this.dbsInputted.rds_msql = false; - } - - // LOCAL PG POOL: truthy values means user has inputted info into config -> try to connect - if (PG_Cred.user && PG_Cred.password) { - this.pg_uri = `postgres://${PG_Cred.user}:${PG_Cred.password}@localhost:${PG_Cred.port}/`; - try { - configExists.pg = true; - await connectionFunctions.PG_DBConnect(this.pg_uri, this.curPG_DB); - logger('CONNECTED TO LOCAL PG DATABASE', LogType.SUCCESS); - this.dbsInputted.pg = true; - } catch (error) { - this.dbsInputted.pg = false; - logger('FAILED TO CONNECT TO LOCAL PG DATABASE', LogType.ERROR); - } - } else { - configExists.pg = false; - this.dbsInputted.pg = false; - } - - // LOCAL MSQL POOL: truthy values means user has inputted info into config -> try to log in - if (MSQL_Cred.user && MSQL_Cred.password) { - try { - configExists.msql = true; - await connectionFunctions.MSQL_DBConnect({ - host: `localhost`, - port: MSQL_Cred.port, - user: MSQL_Cred.user, - password: MSQL_Cred.password, - database: this.curMSQL_DB, - waitForConnections: true, - connectionLimit: 10, - queueLimit: 0, - multipleStatements: true, - }); - - // test query to make sure were connected. needed for the catch statement to hit incase we arent connected. - const testQuery = await pools.msql_pool.query('SHOW DATABASES;'); - this.dbsInputted.msql = true; - logger(`CONNECTED TO LOCAL MYSQL DATABASE!`, LogType.SUCCESS); - } catch (error) { - this.dbsInputted.msql = false; - logger('FAILED TO CONNECT TO LOCAL MSQL DATABASE', LogType.ERROR); - } - } else { - configExists.msql = false; - this.dbsInputted.msql = false; - } - - // RDS PG POOL: truthy values means user has inputted info into config -> try to log in - if (this.curSQLite_DB.path) { - try { - configExists.sqlite = true; - await connectionFunctions.SQLite_DBConnect(this.curSQLite_DB.path); - this.dbsInputted.sqlite = true; - logger('CONNECTED TO SQLITE DATABASE!', LogType.SUCCESS); - } catch (error) { - this.dbsInputted.sqlite = false; - logger('FAILED TO CONNECT TO SQLITE DATABASE', LogType.ERROR); - } - } else { - configExists.sqlite = false; - this.dbsInputted.sqlite = false; - } - - return { dbsInputted: this.dbsInputted, configExists } - }, - - - query(text, params, dbType) { - // RUN ANY QUERY - function that will run query on database that is passed in. - logger(`Attempting to run query: \n ${text} for: \n ${dbType}`); - - if (dbType === DBType.RDSPostgres) { - return pools.rds_pg_pool.query(text, params).catch((err) => { - logger(err.message, LogType.WARNING); - }); - } - - if (dbType === DBType.RDSMySQL) { - return pools.rds_msql_pool.query(text, params, dbType); - } - - if (dbType === DBType.Postgres) { - return pools.pg_pool.query(text, params).catch((err) => { - logger(err.message, LogType.WARNING); - }); - } - - if (dbType === DBType.MySQL) { - // pools.msql_pool.query(`USE ${this.curMSQL_DB}`); - return pools.msql_pool.query(text, params, dbType); - } - - if (dbType === DBType.SQLite) { - // return pools.sqlite_db.all(text, (err, res) => { - // if (err) logger(err.message, LogType.WARNING); - // console.log('res', res); - // return res; - // }); - return new Promise((resolve, reject) => { - pools.sqlite_db.all(text, (err, res) => { - if (err) { - logger(err.message, LogType.WARNING); - reject(err); - } else { - resolve(res); - } - }) - }) - } - - }, - - sampler(queryString) { - return new Promise((resolve, reject) => { - pools.sqlite_db.run('BEGIN', (err) => { - if (err) { - console.error(err.message); - reject(err); - } else { - const startTime = performance.now(); - pools.sqlite_db.all(queryString, (err, res) => { - if (err) { - console.error(err.message); - reject(err); - } else { - const endTime = performance.now(); - pools.sqlite_db.run('ROLLBACK', (err) => { - if (err) { - console.error(err.message); - reject(err); - } else { - const elapsedTime = endTime - startTime; - // console.log(`Elapsed time: ${elapsedTime} milliseconds`); - resolve(elapsedTime); - } - }); - } - }); - } - }); - }); - }, - - - // asdf check this.curRDS_MSQL_DB typing sometime - /** - * Only connect to one database at a time - * @param db Name of database to connect to - * @param dbType Type of database to connect to - * - */ - async connectToDB(db, dbType) { - // change current Db - if (dbType === DBType.Postgres) { - this.curPG_DB = db; - await connectionFunctions.PG_DBConnect(this.pg_uri, db); - } else if (dbType === DBType.MySQL) { - this.curMSQL_DB = db; - await connectionFunctions.MSQL_DBQuery(db); - } else if (dbType === DBType.RDSMySQL) { - this.curRDS_MSQL_DB = db; - await connectionFunctions.RDS_MSQL_DBQuery(db); - } else if (dbType === DBType.RDSPostgres) { - await connectionFunctions.RDS_PG_DBConnect(this.curRDS_PG_DB); - } else if (dbType === DBType.SQLite) { - await connectionFunctions.SQLite_DBConnect(this.curSQLite_DB.path); - } - }, - - /** - * Function to disconnect the passed in database type, in order to drop database - * @param dbType type of database to disconnect - */ - async disconnectToDrop(dbType) { - if (dbType === DBType.Postgres) { - // ending pool - await connectionFunctions.PG_DBDisconnect(); - } - if (dbType === DBType.SQLite) { - try { - // disconnect from and delete sqlite .db file - pools.sqlite_db.close(); - fs.unlinkSync(this.curSQLite_DB.path); - this.curSQLite_DB.path = ''; - } catch (e) { - logger('FAILED TO DELETE SQLITE DB FILE', LogType.ERROR); - - } - } - }, - - /** - * When called with no arguments, returns listObj with this.databaseList populated with data from all logged-in databases. - * When called with a dbName and dbType, additionally populates this.tableList with the tables under the named database - * @param dbName defaults to '' - * @param dbType optional argument - * @returns promise that resolves to a listObj, containing database connection statuses, list of all logged in databases, and optional list of all tables under the named database - */ - async getLists(dbName = '', dbType) { - /* - junaid - this list object is what will be returned at the end of the function. function will get lists for all four databases depending on which is logged in - */ - const listObj: DBList = { - databaseConnected: { - PG: false, - MySQL: false, - RDSPG: false, - RDSMySQL: false, - SQLite: false, - directPGURI: false, - }, - databaseList: [], // accumulates lists for each logged-in database - tableList: [], - }; - if (this.dbsInputted.pg) { - try { - const pgDBList = await this.getDBNames(DBType.Postgres); - // console.log('pgDBList', pgDBList) - listObj.databaseConnected.PG = true; - listObj.databaseList = [...listObj.databaseList, ...pgDBList]; - } catch (error) { - logger('COULDNT GET NAMES FROM LOCAL PG', LogType.ERROR); - } - } - - if (this.dbsInputted.msql) { - try { - const msqlDBList = await this.getDBNames(DBType.MySQL); - listObj.databaseConnected.MySQL = true; - listObj.databaseList = [...listObj.databaseList, ...msqlDBList]; - } catch (error) { - logger('COULDNT GET NAMES FROM LOCAL MSQL', LogType.ERROR); - } - } - - if (this.dbsInputted.rds_msql) { - try { - const RDSmsqlDBList = await this.getDBNames(DBType.RDSMySQL); - listObj.databaseConnected.RDSMySQL = true; - listObj.databaseList = [...listObj.databaseList, ...RDSmsqlDBList]; - } catch (error) { - logger('COULDNT GET NAMES FROM RDS MSQL', LogType.ERROR); - } - } - - if (this.dbsInputted.rds_pg) { - try { - const RDSpgDBList = await this.getDBNames(DBType.RDSPostgres); - listObj.databaseConnected.RDSPG = true; - listObj.databaseList = [...listObj.databaseList, ...RDSpgDBList]; - } catch (error) { - logger('COULDNT GET NAMES FROM RDS PG', LogType.ERROR); - } - } - - if (this.dbsInputted.sqlite) { - try { - const sqliteDBList = await this.getDBNames(DBType.SQLite); - // console.log('sqliteDBList', sqliteDBList) - listObj.databaseConnected.SQLite = true; - listObj.databaseList = [...listObj.databaseList, ...sqliteDBList]; - } catch (error) { - logger('COULDNT GET NAMES FROM SQLite DB', LogType.ERROR); - } - } - - if (dbType) { - try { - const listData = await this.getDBLists(dbType, dbName); - logger( - `RESOLVING DB DETAILS: Fetched DB names along with Table List for DBType: ${dbType} and DB: ${dbName}`, - LogType.SUCCESS - ); - listObj.tableList = listData; - } catch (error) { - logger( - `COULNT GET DATABASE LIST FOR ${dbType} ${dbName} DATABASE`, - LogType.ERROR - ); - } - } - // console.log(listObj); - return listObj; - }, - - /** - * - * get column objects for the given tableName - * @param tableName name of table to get the columns of - * @param dbType type of database of the table - * @returns - */ - getTableInfo(tableName, dbType) { - // Returns an array of columnObj given a tableName - return this.getColumnObjects(tableName, dbType); - }, - - /** - * Generate a dbList for the inputted database type - * @param dbType server to get database names off of - * @returns promise that resovles to a dbList (array of objects containing db_name, db_size, db_type) - */ - getDBNames(dbType) { - return new Promise((resolve, reject) => { - let query; - if (dbType === DBType.Postgres || dbType === DBType.RDSPostgres) { - let pool; // changes which pool is being queried based on dbType - - if (dbType === DBType.Postgres) pool = pools.pg_pool; - if (dbType === DBType.RDSPostgres) pool = pools.rds_pg_pool; - const dbList: dbDetails[] = []; - /* - junaid - only run queries if pool is made - */ - if (pool) { - query = `SELECT dbs.datname AS db_name, - pg_size_pretty(pg_database_size(dbs.datname)) AS db_size - FROM pg_database dbs - ORDER BY db_name`; - pool - .query(query) - .then((databases) => { - for (let i = 0; i < databases.rows.length; i += 1) { - const data = databases.rows[i]; - const { db_name } = data; - - if ( - db_name !== 'postgres' && - db_name !== 'template0' && - db_name !== 'template1' - ) { - data.db_type = dbType; - dbList.push(data); - } - } - - logger("PG 'getDBNames' resolved.", LogType.SUCCESS); - // resolve with array of db names - resolve(dbList); - }) - .catch((err) => { - reject(err); - }); - } else { - resolve(dbList); - } - } else if (dbType === DBType.MySQL || dbType === DBType.RDSMySQL) { - let pool; // changes which pool is being queried based on dbType - if (dbType === DBType.MySQL) pool = pools.msql_pool; - if (dbType === DBType.RDSMySQL) pool = pools.rds_msql_pool; - const dbList: dbDetails[] = []; - /* - only run queries if pool is made - */ - if (pool) { - query = ` - SELECT - S.SCHEMA_NAME db_name, - ROUND(SUM(data_length + index_length) / 1024, 1) db_size - FROM - INFORMATION_SCHEMA.SCHEMATA S - LEFT OUTER JOIN - INFORMATION_SCHEMA.TABLES T ON S.SCHEMA_NAME = T.TABLE_SCHEMA - WHERE - S.SCHEMA_NAME NOT IN ('information_schema' , 'mysql', 'performance_schema', 'sys') - GROUP BY S.SCHEMA_NAME - ORDER BY db_name ASC;`; - - pool - .query(query) - .then((databases) => { - for (let i = 0; i < databases[0].length; i += 1) { - const data = databases[0][i]; - data.db_type = dbType; - data.db_size = data.db_size ? `${data.db_size}KB` : '0KB'; - dbList.push(data); - } - - logger("MySQL 'getDBNames' resolved.", LogType.SUCCESS); - // resolve with array of db names - resolve(dbList); - }) - .catch((err) => { - reject(err); - }); - } else { - resolve(dbList); - } - } else if (dbType === DBType.SQLite) { - const dbList: dbDetails[] = []; - const { path } = this.curSQLite_DB; - const filename = path.slice(path.lastIndexOf('\\') + 1, path.lastIndexOf('.db')); - const stats = fs.statSync(path) - const fileSizeInKB = stats.size / 1024; - // Convert the file size to megabytes (optional) - const data = { db_name: filename, db_size: `${fileSizeInKB}KB`, db_type: DBType.SQLite } - dbList.push(data); - resolve(dbList); - } - }); - }, - - /** - * Generates a list of column objects for the inputted table name - * @param tableName name of table to get column properties of - * @param dbType type of database the table is in - * @returns promise that resolves to array of columnObjects (column_name, data_type, character_maximum_length, is_nullable, constraint_name, constraint_type, foreign_table, foreign_column) - */ - getColumnObjects(tableName, dbType) { - let queryString; - const value = [tableName]; - if (dbType === DBType.Postgres || dbType === DBType.RDSPostgres) { - // added to check for RDS - let pool; // changes which pool is being queried based on dbType - if (dbType === DBType.Postgres) pool = pools.pg_pool; - if (dbType === DBType.RDSPostgres) pool = pools.rds_pg_pool; - // query string to get constraints and table references as well - queryString = `SELECT DISTINCT cols.column_name, - cols.data_type, - cols.character_maximum_length, - cols.is_nullable, - kcu.constraint_name, - cons.constraint_type, - rel_kcu.table_name AS foreign_table, - rel_kcu.column_name AS foreign_column - FROM information_schema.columns cols - LEFT JOIN information_schema.key_column_usage kcu - ON cols.column_name = kcu.column_name - AND cols.table_name = kcu.table_name - LEFT JOIN information_schema.table_constraints cons - ON kcu.constraint_name = cons.constraint_name - LEFT JOIN information_schema.referential_constraints rco - ON rco.constraint_name = cons.constraint_name - LEFT JOIN information_schema.key_column_usage rel_kcu - ON rco.unique_constraint_name = rel_kcu.constraint_name - WHERE cols.table_name = $1`; - - // kcu = key column usage = describes which key columns have constraints - // tc = table constraints = shows if constraint is primary key or foreign key - // information_schema.table_constraints show the whole table constraints - - return new Promise((resolve, reject) => { - pool - .query(queryString, value) - .then((result) => { - const columnInfoArray: ColumnObj[] = []; - for (let i = 0; i < result.rows.length; i++) { - columnInfoArray.push(result.rows[i]); - } - resolve(columnInfoArray); - }) - .catch((err) => { - reject(err); - }); - }); - } - if (dbType === DBType.MySQL || dbType === DBType.RDSMySQL) { - // added to check for RDS - - let pool; // changes which pool is being queried based on dbType - if (dbType === DBType.MySQL) pool = pools.msql_pool; - if (dbType === DBType.RDSMySQL) pool = pools.rds_msql_pool; - queryString = `SELECT DISTINCT - cols.column_name AS column_name, - cols.data_type AS data_type, - cols.character_maximum_length AS character_maximum_length, - cols.is_nullable AS is_nullable, - kcu.constraint_name AS constraint_name, - cons.constraint_type AS constraint_type, - rel_kcu.table_name AS foreign_table, - rel_kcu.column_name AS foreign_column - FROM information_schema.columns cols - LEFT JOIN information_schema.key_column_usage kcu - ON cols.column_name = kcu.column_name - AND cols.table_name = kcu.table_name - LEFT JOIN information_schema.table_constraints cons - ON kcu.constraint_name = cons.constraint_name - LEFT JOIN information_schema.referential_constraints rco - ON rco.constraint_name = cons.constraint_name - LEFT JOIN information_schema.key_column_usage rel_kcu - ON rco.unique_constraint_name = rel_kcu.constraint_name - WHERE cols.table_name = ?;`; - - return new Promise((resolve, reject) => { - pool - .query(queryString, value) - .then((result) => { - const columnInfoArray: ColumnObj[] = []; - for (let i = 0; i < result[0].length; i += 1) { - columnInfoArray.push(result[0][i]); - } - resolve(columnInfoArray); - }) - .catch((err) => { - reject(err); - }); - }); - } - - if (dbType === DBType.SQLite) { - const sqliteDB = pools.sqlite_db; - queryString = `SELECT - m.name AS table_name, - p.name AS column_name, - p.type AS data_type, - p.[notnull] AS not_null, - p.pk AS pk, - fkl.[table] AS foreign_table, - fkl.[to] AS foreign_column - FROM sqlite_master m - LEFT JOIN pragma_table_info(m.name) p - LEFT JOIN pragma_foreign_key_list(m.name) fkl - ON p.name = fkl.[from] - WHERE m.type = 'table' AND p.type != '' AND m.name = ?`; - - return new Promise((resolve, reject) => { - sqliteDB - .all(queryString, value, (err, rows) => { - if (err) { - reject(err); - } - const columnInfoArray: ColumnObj[] = []; - for (let i = 0; i < rows.length; i++) { - const { column_name, data_type, not_null, pk, foreign_table, foreign_column } = rows[i]; - const newColumnObj: ColumnObj = { - column_name, - data_type, - character_maximum_length: data_type.includes(`(`) ? parseInt(data_type.slice(1 + data_type.indexOf(`(`), data_type.indexOf(`)`)), 10) : null, - is_nullable: not_null === 1 ? 'NO' : 'YES', - constraint_type: pk === 1 ? 'PRIMARY KEY' : foreign_table ? 'FOREIGN KEY' : null, - foreign_table, - foreign_column, - } - columnInfoArray.push(newColumnObj); - } - resolve(columnInfoArray); - }) - }) - } - - logger('Trying to use unknown DB Type: ', LogType.ERROR, dbType); - // eslint-disable-next-line no-throw-literal - throw 'Unknown db type'; - }, - - /** - * Uses dbType and dbName to find the tables under the specified database - * @param dbType type of target database - * @param dbName name of target database - * @returns tableList (array of table detail objects containing table_catalog, table_schema, table_name, is_insertable_into, columns?) - */ - getDBLists(dbType, dbName) { - return new Promise((resolve, reject) => { - let query; - const tableList: TableDetails[] = []; - const promiseArray: Promise[] = []; - - if (dbType === DBType.Postgres || dbType === DBType.RDSPostgres) { - let pool; - if (dbType === DBType.Postgres) pool = pools.pg_pool; - if (dbType === DBType.RDSPostgres) pool = pools.rds_pg_pool; - - // querying PG metadata - query = `SELECT - table_catalog, - table_schema, - table_name, - is_insertable_into - FROM information_schema.tables - WHERE table_schema = 'public' or table_schema = 'base' - ORDER BY table_name;`; - pool - .query(query) - .then((tables) => { - for (let i = 0; i < tables.rows.length; i++) { - tableList.push(tables.rows[i]); - promiseArray.push( - this.getColumnObjects(tables.rows[i].table_name, dbType) - ); - } - - Promise.all(promiseArray) - .then((columnInfo) => { - for (let i = 0; i < columnInfo.length; i++) { - tableList[i].columns = columnInfo[i]; - } - logger("PG 'getDBLists' resolved.", LogType.SUCCESS); - resolve(tableList); - }) - .catch((err) => { - reject(err); - }); - }) - .catch((err) => { - reject(err); - }); - } else if (dbType === DBType.MySQL || dbType === DBType.RDSMySQL) { - // Notice that TABLE_CATALOG is set to table_schema - // And that TABLE_SCHEMA is set to table_catalog - // This is because PG and MySQL have these flipped (For whatever reason) - - let pool; - if (dbType === DBType.MySQL) pool = pools.msql_pool; - if (dbType === DBType.RDSMySQL) pool = pools.rds_msql_pool; - - let query2 = `SELECT - table_catalog, - table_schema, - table_name, - is_insertable_into - FROM information_schema.tables - WHERE table_schema = 'public' or table_schema = 'base' - ORDER BY table_name;`; - - // query = ` - // SELECT - // TABLE_CATALOG as table_schema, - // TABLE_SCHEMA as table_catalog, - // TABLE_NAME as table_name - // FROM information_schema.tables - // WHERE TABLE_SCHEMA NOT IN('information_schema', 'performance_schema', 'mysql') - // AND TABLE_SCHEMA = '${dbName}' - // ORDER BY table_name;`; - - query = ` - SELECT - TABLE_CATALOG as table_schema, - TABLE_SCHEMA as table_catalog, - TABLE_NAME as table_name - FROM information_schema.tables - WHERE TABLE_SCHEMA NOT IN('information_schema', 'performance_schema', 'mysql', 'sys') - AND TABLE_SCHEMA = '${dbName}' - ORDER BY table_name;`; - - pool - // .query(query2) - .query(query) - .then((tables) => { - for (let i = 0; i < tables[0].length; i++) { - tableList.push(tables[0][i]); - - // Sys returns way too much stuff idk - if (tableList[i].table_schema !== 'sys') { - promiseArray.push( - this.getColumnObjects(tableList[i].table_name, dbType) - ); - } - } - Promise.all(promiseArray) - .then((columnInfo) => { - for (let i = 0; i < columnInfo.length; i++) { - tableList[i].columns = columnInfo[i]; - } - logger("MySQL 'getDBLists' resolved.", LogType.SUCCESS); - resolve(tableList); - }) - .catch((err) => { - reject(err); - }); - }) - .catch((err) => { - reject(err); - }); - } else if (dbType === DBType.SQLite) { - const sqliteDB = pools.sqlite_db; - - // querying SQLite metadata - query = `SELECT - m.name AS table_name - FROM sqlite_master m - WHERE m.type = 'table' AND m.name != 'sqlite_stat1' AND m.name != 'sqlite_sequence'`; - sqliteDB - .all(query, (err, rows) => { - if (err) console.error(err.message) - for (let i = 0; i < rows.length; i += 1) { - const newTableDetails: TableDetails = { - table_catalog: this.curSQLite_DB.path.slice(this.curSQLite_DB.path.lastIndexOf('\\') + 1), - table_schema: 'asdf', - table_name: rows[i].table_name, - is_insertable_into: 'asdf', - } - tableList.push(newTableDetails); - promiseArray.push( - this.getColumnObjects(rows[i].table_name, dbType) - ); - } - Promise.all(promiseArray) - .then((columnInfo) => { - for (let i = 0; i < columnInfo.length; i += 1) { - tableList[i].columns = columnInfo[i]; - } - logger("SQLite 'getDBLists' resolved.", LogType.SUCCESS); - resolve(tableList); - }) - .catch((error) => { - reject(error); - }); - }) - } - }); - }, -}; - -module.exports = DBFunctions; diff --git a/backend/poolVariables.ts b/backend/poolVariables.ts deleted file mode 100644 index 33639b8e..00000000 --- a/backend/poolVariables.ts +++ /dev/null @@ -1,20 +0,0 @@ -/* -junaid -this file is just to set the initial variables for all the pools, so that we can shorten the models file down by extracting functions to other pages. this page allows us to use these variables throughout the backend and have them retain thier value. just remember we need to access them and manipulate them using dot notation, so they get passed by reference to this same variable. if we save them to another variable they will get passed by value but the reference is different so it wont match up throughout different files. -*/ - -let pg_pool; -let msql_pool; -let rds_pg_pool; -let rds_msql_pool; -let sqlite_db; -let directPGURI_pool; - -export default { - pg_pool, - msql_pool, - rds_pg_pool, - rds_msql_pool, - sqlite_db, - directPGURI_pool, -}; diff --git a/backend/databaseConnections.ts b/backend/src/db/databaseConnections.ts similarity index 69% rename from backend/databaseConnections.ts rename to backend/src/db/databaseConnections.ts index 9b51f679..fbcfe457 100644 --- a/backend/databaseConnections.ts +++ b/backend/src/db/databaseConnections.ts @@ -1,21 +1,19 @@ +import mysql from 'mysql2/promise'; +import { Pool, PoolConfig } from 'pg'; +import sqlite3 from 'sqlite3'; +import { LogType } from '../../../shared/types/dbTypes'; +import logger from '../utils/logging/masterlog'; import pools from './poolVariables'; -import logger from './Logging/masterlog'; -import { LogType } from './BE_types'; - -const { Pool } = require('pg'); -const mysql = require('mysql2/promise'); -const sqlite3 = require('sqlite3').verbose(); - export default { /** * For a local Postgres database. - * Uses passed in arguments to create a URI to create a pool, save it, and begin a connection. + * Uses passed in arguments to create a URI to create a pool, save it, and begin a connection. * @param pg_uri URI created in models.ts using login info * @param db Name of target database that the login has access to. Initially empty string */ async PG_DBConnect(pg_uri: string, db: string) { - const newURI = `${pg_uri}${db}`; + const newURI = `${pg_uri}/${db}`; const newPool = new Pool({ connectionString: newURI }); pools.pg_pool = newPool; // await pools.pg_pool.connect(); this is unnecessary for making queries, and causes pg error when trying to drop db @@ -23,7 +21,7 @@ export default { }, async PG_DBDisconnect(): Promise { - await pools.pg_pool.end(); + if (pools.pg_pool) await pools.pg_pool.end(); }, /** @@ -43,17 +41,23 @@ export default { multipleStatements: true, } */ - async MSQL_DBConnect(MYSQL_CREDS: any) { + async MSQL_DBConnect(MYSQL_CREDS: mysql.PoolOptions) { if (pools.msql_pool) await pools.msql_pool.end(); - pools.msql_pool = await mysql.createPool({ ...MYSQL_CREDS }); + pools.msql_pool = mysql.createPool({ ...MYSQL_CREDS }); }, /** * Checks that the MySQL database connection/pool is valid by running short query. * @param db Name of target MySQL database */ - MSQL_DBQuery(db: string) { - pools.msql_pool + MSQL_DBQuery(db: string): Promise { + if (pools.msql_pool === undefined) { + logger(`No active MSQL pool for DB: ${db}`, LogType.ERROR); + return new Promise((res, rej) => { + rej(Error('No active MSQL Pool in attempted query')); + }); + } + return pools.msql_pool .query(`USE ${db};`) .then(() => { logger(`Connected to MSQL DB: ${db}`, LogType.SUCCESS); @@ -67,7 +71,7 @@ export default { * Create pool and connect to an RDS Postgres database using login info. * @param RDS_PG_INFO from config file */ - async RDS_PG_DBConnect(RDS_PG_INFO) { + async RDS_PG_DBConnect(RDS_PG_INFO: PoolConfig) { pools.rds_pg_pool = new Pool({ ...RDS_PG_INFO }); await pools.rds_pg_pool.connect(); }, @@ -77,7 +81,7 @@ export default { * Create/save new pool using login info. * @param RDS_MSQL_INFO from config file */ - async RDS_MSQL_DBConnect(RDS_MSQL_INFO) { + async RDS_MSQL_DBConnect(RDS_MSQL_INFO: mysql.PoolOptions) { if (pools.rds_msql_pool) await pools.rds_msql_pool.end(); pools.rds_msql_pool = mysql.createPool({ ...RDS_MSQL_INFO }); }, @@ -86,8 +90,14 @@ export default { * Checks that the MySQL database connection/pool is valid by running short query. * @param db name of target RDS MySQL database */ - RDS_MSQL_DBQuery(db: string) { - pools.rds_msql_pool + RDS_MSQL_DBQuery(db: string): Promise { + if (pools.rds_msql_pool === undefined) { + logger(`No active RDS MSQL pool for DB: ${db}`, LogType.ERROR); + return new Promise((res, rej) => { + rej(Error(`No active RDS MSQL pool for DB: ${db}`)); + }); + } + return pools.rds_msql_pool .query(`USE ${db};`) .then(() => { logger(`Connected to MSQL DB: ${db}`, LogType.SUCCESS); @@ -97,14 +107,12 @@ export default { }); }, - async SQLite_DBConnect(path: string): Promise { + SQLite_DBConnect(path: string): void { const newDB = new sqlite3.Database( path, sqlite3.OPEN_READWRITE | sqlite3.OPEN_CREATE, - (err) => { - if (err) return console.error(err.message); - } + (err) => (err ? console.error(err.message) : null), ); pools.sqlite_db = newDB; - } + }, }; diff --git a/backend/src/db/poolVariables.ts b/backend/src/db/poolVariables.ts new file mode 100644 index 00000000..ff6c0981 --- /dev/null +++ b/backend/src/db/poolVariables.ts @@ -0,0 +1,29 @@ +/* +junaid +this file is just to set the initial variables for all the pools, so that we +can shorten the models file down by extracting functions to other pages. +this page allows us to use these variables throughout the backend and have +them retain thier value. just remember we need to access them and manipulate +them using dot notation, so they get passed by reference to this same variable. +if we save them to another variable they will get passed by value but the +reference is different so it wont match up throughout different files. +*/ +import { Pool as PGPool } from 'pg'; +import { Pool as MSQLPool } from 'mysql2/promise'; +import { Database } from 'sqlite3'; + +let pg_pool: PGPool | undefined; +let msql_pool: MSQLPool | undefined; +let rds_pg_pool: PGPool | undefined; +let rds_msql_pool: MSQLPool | undefined; +let sqlite_db: Database | undefined; +let directPGURI_pool: PGPool | undefined; + +export default { + pg_pool, + msql_pool, + rds_pg_pool, + rds_msql_pool, + sqlite_db, + directPGURI_pool, +}; diff --git a/backend/src/ipcHandlers/handlers/authHandler.ts b/backend/src/ipcHandlers/handlers/authHandler.ts new file mode 100644 index 00000000..540dbb25 --- /dev/null +++ b/backend/src/ipcHandlers/handlers/authHandler.ts @@ -0,0 +1,85 @@ +// Types +import { DBList, LogType } from '../../../BE_types'; +import { Feedback } from '../../../../shared/types/utilTypes'; + +// Helpers +import logger from '../../utils/logging/masterlog'; + +// Models used +import docConfig from '../../models/configModel'; +import connectionModel from '../../models/connectionModel'; +import databaseModel from '../../models/databaseModel'; + +// TESTING GROUND: +// import db from '../../../models'; + +/** + * EVENT: 'set-config' + * + * DEFINITION: triggered when frontend 'saves' login information + * + * Process involes the following steps: + * 1. saves login info to config + * 2. run setBaseConnections from connectionModel.ts to make a new config + * 3. uses get-config to get this latest config and sends it back to frontend + */ + +export function setConfig(event, configObj) { + // at some point change this name docConfig as well + docConfig.saveConfig(configObj); // saves login info from frontend into config file + + connectionModel + .setBaseConnections() // tries to log in using config data + .then(({ dbsInputted, configExists }) => { + // error handling for trying and failing to log in to databases + let errorStr = ''; + const dbs = Object.keys(dbsInputted); + dbs.forEach((e) => { + if (!dbsInputted[e] && configExists[e]) errorStr += ` ${e}`; + }); + if (errorStr.length) { + const err = `Unsuccessful login(s) for ${errorStr.toUpperCase()} database(s)`; + const feedback: Feedback = { + type: 'error', + message: err, + }; + event.sender.send('feedback', feedback); + } + logger('Successfully reset base connections', LogType.SUCCESS); + return databaseModel.getLists().then((data: DBList) => { + event.sender.send('db-lists', data); // used to populate sidebar + }); + }) + .catch((err) => { + logger( + `Error trying to set base connections on 'reset-connection': ${err.message}`, + LogType.ERROR, + ); + const feedback: Feedback = { + type: 'error', + message: err, + }; + event.sender.send('feedback', feedback); + logger( + "Sent 'feedback' from 'reset-connection' (Note: This is an ERROR!)", + LogType.ERROR, + ); + }) + .finally(() => { + event.sender.send('get-config', docConfig.getFullConfig()); + }); +} + +/** + * EVENT: 'get-config' + * + * DEFINITION: get;s the current config (used during log in, or for new logins with set-config) + * + * Process involes the following steps: + * 1. send back to event from configModel.ts docConfig.getFullConfig() + */ + +export function getConfig() { + // asdf is configObj used? + return docConfig.getFullConfig(); +} diff --git a/backend/src/ipcHandlers/handlers/dbCRUDHandler.ts b/backend/src/ipcHandlers/handlers/dbCRUDHandler.ts new file mode 100644 index 00000000..318299dd --- /dev/null +++ b/backend/src/ipcHandlers/handlers/dbCRUDHandler.ts @@ -0,0 +1,204 @@ +// Types +import { app } from 'electron'; +import { BackendObjType, DBList, DBType, LogType } from '../../../BE_types'; +import { Feedback } from '../../../../shared/types/utilTypes'; + +// Helpers +import logger from '../../utils/logging/masterlog'; +import backendObjToQuery from '../../utils/ertable-functions'; +import helperFunctions from '../../utils/helperFunctions'; + +// Models used +import connectionModel from '../../models/connectionModel'; +import databaseModel from '../../models/databaseModel'; +import queryModel from '../../models/queryModel'; +// import db from '../../../models'; + +const { createDBFunc } = helperFunctions; + +// Local Types + +interface InitializePayload { + // handle initialization of a new schema from frontend (newSchemaView) + newDbName: string; + dbType: DBType; +} + +interface UpdatePayload { + // handle updating schemas from the frontend (newSchemaView) + // targetDb: string; + sqlString: string; + selectedDb: string; +} + +/** + * EVENT: 'initialize-db' + * + * DEFINITION: creates a new database on the side + * + * Process involes the following steps: + * 1. create a new database with queryModel.query + * 2. connect to this new db with connectionModel.connectToDB + * 3. update sidebar with databaseModel.getLists + * 4. send a feedback back to frontend + */ + +export async function intializeDb(event, payload: InitializePayload) { + const { newDbName, dbType } = payload; + logger( + `Received 'initialize-db' of dbType: ${dbType} and: `, + LogType.RECEIVE, + payload, + ); + event.sender.send('async-started'); + + try { + // create new empty db + await queryModel.query(createDBFunc(newDbName, dbType), [], dbType); + // connect to initialized db + await connectionModel.connectToDB(newDbName, dbType); + + // update DBList in the sidebar to show this new db + const dbsAndTableInfo: DBList = await databaseModel.getLists( + newDbName, + dbType, + ); + event.sender.send('db-lists', dbsAndTableInfo); + /// + logger("Sent 'db-lists' from 'initialize-db'", LogType.SEND); + } catch (e) { + const err = `Unsuccessful DB Creation for ${newDbName} in ${dbType} database`; + const feedback: Feedback = { + type: 'error', + message: err, + }; + event.sender.send('feedback', feedback); + // in the case of an error, delete the created db + // const dropDBScript = dropDBFunc(newDbName, dbType); + // await db.query(dropDBScript, null, dbType); + // throw new Error('Failed to initialize new database'); + } finally { + event.sender.send('async-complete'); + } +} + +/** + * EVENT: 'update-db' (i would rename to db-schemaupdate) + * + * DEFINITION: SIDEBAR: updates selected database table's schema and send back updated list. db will rollback if query is unsuccessful + * + * Process involes the following steps: + * 1. connect to the database to query (!) + * 2. run queryModel.query on the sqlstring + * 3. get tables and return to frontend + * + * ISSUES: + * 1. shouldnt the current database be active? why do we need to connect it again? seems like this is an eeror checker step + */ + +export async function updateDb( + event, + { sqlString, selectedDb }: UpdatePayload, + dbType: DBType, +) { + logger("Received 'update-db'", LogType.RECEIVE); + event.sender.send('async-started'); + + try { + // connect to db to run query + await connectionModel.connectToDB(selectedDb, dbType); + + // Run Query + try { + await queryModel.query(sqlString, [], dbType); + } catch (e) { + if (e) throw new Error('Failed to update schema'); + } + } finally { + // send updated db info in case query affected table or database information + // must be run after we connect back to the originally selected so tables information is accurate + const dbsAndTables: DBList = await databaseModel.getLists('', dbType); + event.sender.send('db-lists', dbsAndTables); + logger("Sent 'db-lists' from 'update-db'", LogType.SEND); + + event.sender.send('async-complete'); + } +} + +/** + * EVENT: 'ertable-schemaupdate' + * + * DEFINITION: ERD: Generate and run query from react-flow ER diagram + * + * Process involes the following steps: + * 1. query from object passed back with helper backendObjToQuery + * 2. run queryModel.query on the sqlstring + * 3. rollback if fail + * 4. get tables and return to frontend + * + * SUGGESTION: honestly, i would make a copy of a file if we are working on it in the ERD view. this way we can just iterate over this copy and replace the original one instead of the current UI of saving twice + * + * QUESTION: what is the event sent back when we hit SAVE in the ER diagram? + */ + +export async function erTableSchemaUpdate( + event, + backendObj: BackendObjType, + dbName: string, + dbType: DBType, +) { + logger( + `backendObj: ${dbType}, \n + dbName: ${dbName}, \n`, + LogType.RECEIVE, + backendObj, + ); + + // send notice to front end that schema update has started + event.sender.send('async-started'); + let feedback: Feedback = { + type: '', + message: '', + }; + try { + // Generates query from backendObj + const query = backendObjToQuery(backendObj, dbType); + // run sql command + await queryModel.query('Begin;', [], dbType); + await queryModel.query(query, [], dbType); + await queryModel.query('Commit;', [], dbType); + feedback = { + type: 'success', + message: 'Database updated successfully.', + }; + return 'success'; + } catch (err: any) { + // rollback transaction if there's an error in update and send back feedback to FE + await queryModel.query('Rollback;', [], dbType); + + feedback = { + type: 'error', + message: err, + }; + } finally { + // send updated db info + + const updatedDb: DBList = await databaseModel.getLists(dbName, dbType); + event.sender.send('db-lists', updatedDb); + + // send feedback back to FE + event.sender.send('feedback', feedback); + + // send notice to FE that schema update has been completed + event.sender.send('async-complete'); + + logger( + "Sent 'db-lists and feedback' from 'ertable-schemaupdate'", + LogType.SEND, + ); + } +} + +export function getPath(event, pathType) { + return app.getPath(pathType); +} diff --git a/backend/src/ipcHandlers/handlers/dbCRUDHandlerERD.ts b/backend/src/ipcHandlers/handlers/dbCRUDHandlerERD.ts new file mode 100644 index 00000000..d36a354b --- /dev/null +++ b/backend/src/ipcHandlers/handlers/dbCRUDHandlerERD.ts @@ -0,0 +1,86 @@ +// Types +import { app } from 'electron'; +import { DBList, LogType } from '../../../BE_types'; +import { Feedback } from '../../../../shared/types/utilTypes'; +import { ErdUpdatesType } from '../../../../shared/types/erTypes'; +import dbState from '../../models/stateModel'; +// Helpers +import logger from '../../utils/logging/masterlog'; +import erdUpdatesToQuery from '../../utils/erdTableFunctions'; +// Models used +import databaseModel from '../../models/databaseModel'; +import queryModel from '../../models/queryModel'; +// import db from '../../../models'; + +/** + * EVENT: 'ertable-schemaupdate' + * + * DEFINITION: ERD: Generate and run query from react-flow ER diagram + * + * Process involes the following steps: + * 1. query from object passed back with helper updatesArray + * 2. run queryModel.query on the sqlstring + * 3. rollback if fail + * 4. get tables and return to frontend + * + * + */ + +export async function erTableSchemaUpdate(event, updatesArray: ErdUpdatesType) { + // send notice to front end that schema update has started + event.sender.send('async-started'); + let feedback: Feedback = { + type: '', + message: '', + }; + + // get currentDBState + const { currentERD, currentDb } = dbState; + + try { + // Generates query srting from updatesArray + const queryString = erdUpdatesToQuery(updatesArray, currentERD); + + // Query Transaction + await queryModel.query('Begin;', [], currentERD); // transaction wrapper + await queryModel.query(queryString, [], currentERD); + await queryModel.query('Commit;', [], currentERD); // transaction wrapper + + feedback = { + type: 'success', + message: 'Database updated successfully.', + }; + return 'success'; + } catch (err: any) { + // rollback transaction if there's an error in update and send back feedback to FE + await queryModel.query('Rollback;', [], currentERD); + + feedback = { + type: 'error', + message: err, + }; + } finally { + // send updated db info + + const updatedDb: DBList = await databaseModel.getLists( + currentDb, + currentERD, + ); + event.sender.send('db-lists', updatedDb); + + // send feedback back to FE + event.sender.send('feedback', feedback); + + // send notice to FE that schema update has been completed + event.sender.send('async-complete'); + + logger( + "Sent 'db-lists and feedback' from 'ertable-schemaupdate'", + LogType.SEND, + ); + } +} + +export function getPath(event, pathType) { + return app.getPath(pathType); +} diff --git a/backend/src/ipcHandlers/handlers/dbOpsHandler.ts b/backend/src/ipcHandlers/handlers/dbOpsHandler.ts new file mode 100644 index 00000000..26e7d810 --- /dev/null +++ b/backend/src/ipcHandlers/handlers/dbOpsHandler.ts @@ -0,0 +1,358 @@ +import fs from 'fs'; +import path from 'path'; + +// Types +import { DBList, LogType } from '../../../BE_types'; +import { Feedback } from '../../../../shared/types/utilTypes'; +import { DBType } from '../../../../shared/types/dbTypes'; + +// Helpers +import logger from '../../utils/logging/masterlog'; +import docConfig from '../../models/configModel'; +import helperFunctions from '../../utils/helperFunctions'; + +// Models +import connectionModel from '../../models/connectionModel'; +import databaseModel from '../../models/databaseModel'; +import queryModel from '../../models/queryModel'; +import dbState from '../../models/stateModel'; + +const { + createDBFunc, + dropDBFunc, + runSQLFunc, + // runTARFunc, + runFullCopyFunc, + runHollowCopyFunc, + promExecute, +} = helperFunctions; + +// Local Types +interface DuplicatePayload { + newName: string; + sourceDb: string; + withData: boolean; +} + +interface ImportPayload { + newDbName: string; + filePath: string; + dbType: DBType; +} + +interface ExportPayload extends ImportPayload { + db: string; +} + +interface ExportPayload { + sourceDb: string; +} + +/** + * EVENT: 'return-db-list' + * + * DEFINITION: returns a db-list from frontend. for Sidebar + * + * Process involes the following steps: + * 1. connectionModel.setBaseConections + * 2. get listObj from databaseModel.getLists + */ + +export async function returnDbList(event) { + logger( + "Received 'return-db-list' (Note: No Async being sent here)", + LogType.RECEIVE, + ); + try { + await connectionModel.setBaseConnections(); + const data = await databaseModel.getLists(); + logger("Sent 'db-lists' from 'return-db-list'", LogType.SEND); + return data; + } catch (err: any) { + logger( + `Error trying to set base connections on 'return-db-list': ${err.message}`, + LogType.ERROR, + ); + const feedback: Feedback = { + type: 'error', + message: err, + }; + event.sender.send('feedback', feedback); + logger( + "Sent 'feedback' from 'return-db-list' (Note: This is an ERROR!)", + LogType.SEND, + ); + } +} + +/** + * EVENT: 'select-db' + * + * DEFINITION: connect to selected db on the sidebar, then get object containing a list of all databases abd tables for the selected database, and sends to frontend. + * + * Process involes the following steps: + * 1. connectionModel.connectToDB + * 2. databaseModel.getLists + * 3. returns getLists object back + */ + +export async function selectDb( + event, + dbName: string, + dbType: DBType, +): Promise { + logger("Received 'select-db'", LogType.RECEIVE); + + event.sender.send('async-started'); + try { + if (dbName === '') { + dbName = 'postgres'; + } + + await connectionModel.connectToDB(dbName, dbType); + + // assign currentERD to dbType (string) of selected ERD + dbState.currentERD = dbType; + // assign dbType to dbType (string) of selected ERD + dbState.currentDb = dbName; + + // send updated db info + const dbsAndTables: DBList = await databaseModel.getLists(dbName, dbType); + event.sender.send('db-lists', dbsAndTables); + logger("Sent 'db-lists' from 'select-db'", LogType.SEND); + } finally { + event.sender.send('async-complete'); + } +} + +/** + * EVENT: 'drop-db' + * + * DEFINITION: Handler for drop-db requests from frontend + * + * Process involes the following steps: + * 1. discoonect from all pools first with connectionModel.disconnectToDrop (is there no way to just disconnect from selected db?) + * 2. reconnect immediately with everything but desired db of deletion with connectionModel.connectToDB + * 3. use helper function dropDBFunc (a series of sql commands for each sql EXCEPT sqlite) + * 4. databaseModel.getLists + * 5. returns getLists object back + */ + +export async function dropDb( + event, + dbName: string, + currDB: boolean, + dbType: DBType, +): Promise { + logger("Received 'drop-db'", LogType.RECEIVE); + + event.sender.send('async-started'); + + try { + // if deleting currently connected db, disconnect from db + // end pool connection + await connectionModel.disconnectToDrop(dbType); + // reconnect to database server, but not the db that will be dropped + if (dbType === DBType.Postgres) { + await connectionModel.connectToDB('postgres', dbType); + } else { + await connectionModel.connectToDB('', dbType); + } + + // await connectionModel.disconnectToDrop(dbType); + // reconnect to database server, but not the db that will be dropped + + // await connectionModel.connectToDB('', dbType); + + // IN CASE OF EMERGENCY USE THIS CODE TO DROP DATABASES + // WILL THROW UNCAUGHT ERRORS LAST RESORT ONLY!!! + // await db.connectToDB('', dbType); + // if(dbType === DBType.Postgres){ + // await queryModel.query(`UPDATE pg_database SET datallowconn = 'false' WHERE datname = '${dbName}'`, null, dbType); + // await queryModel.query(` + // SELECT pid, pg_terminate_backend(pid) + // FROM pg_stat_activity + // WHERE datname = '${dbName}' AND pid <> pg_backend_pid(); + // `, null, dbType); + // // await db.closeTheDB(dbName, dbType); + // } + + // const dropDBScript = dropDBFunc(dbName, dbType); + if (dbType !== DBType.SQLite) + await queryModel.query(dropDBFunc(dbName, dbType), [], dbType); + + // send updated db info + const dbsAndTables: DBList = await databaseModel.getLists(dbName, dbType); + event.sender.send('db-lists', dbsAndTables); + logger("Sent 'db-lists' from 'drop-db'", LogType.SEND); + } finally { + event.sender.send('async-complete'); + } +} + +/** + * EVENT: 'duplicate-db' + * + * DEFINITION: Handle duplicate-db events sent from frontend. Cleans up itself in event of failsure + * + * Process involes the following steps: + * 1. create a temporary file path + * 2. dump database (create a backup) to temporary file + * 3. create new empty database with helper + * 4. Run temporary file sql commands on the new empty database + * 5. databaseModel.getLists + * 6. returns getLists object back + * 7. will cleanup the temp file after these operations + */ + +export async function duplicateDb( + event, + { newName, sourceDb, withData }: DuplicatePayload, + dbType: DBType, +) { + logger( + `Received 'duplicate-db'" of dbType: ${dbType} and: `, + LogType.RECEIVE, + ); + + event.sender.send('async-started'); + + const tempFilePath = path.resolve( + `${docConfig.getConfigFolder()}/`, + `temp_${newName}.sql`, + ); + + try { + // dump database to temp file + const dumpCmd = withData + ? runFullCopyFunc(sourceDb, tempFilePath, dbType) + : runHollowCopyFunc(sourceDb, tempFilePath, dbType); + try { + await promExecute(dumpCmd); + } catch (e) { + throw new Error( + `Failed to dump ${sourceDb} to temp file at ${tempFilePath}`, + ); + } + + // create new empty database + try { + await queryModel.query(createDBFunc(newName, dbType), [], dbType); + } catch (e) { + throw new Error('Failed to create Database'); + } + + // run temp sql file on new database + try { + await promExecute(runSQLFunc(newName, tempFilePath, dbType)); + } catch (e: any) { + // cleanup: drop created db + logger(`Dropping duplicate db because: ${e.message}`, LogType.WARNING); + const dropDBScript = dropDBFunc(newName, dbType); + await queryModel.query(dropDBScript, [], dbType); + + throw new Error('Failed to populate newly created database'); + } + + // update frontend with new db list + const dbsAndTableInfo: DBList = await databaseModel.getLists('', dbType); + event.sender.send('db-lists', dbsAndTableInfo); + logger("Sent 'db-lists' from 'duplicate-db'", LogType.SEND); + } finally { + // clean up temp file + try { + fs.unlinkSync(tempFilePath); + } catch (e) { + event.sender.send('feedback', { + type: 'error', + message: `Failed to cleanup temp files. ${tempFilePath} could not be removed.`, + }); + } + + event.sender.send('async-complete'); + } +} + +/** + * EVENT: 'import-db' + * + * DEFINITION: Handle import-db events sent from frontend and cleans up after itself + * + * Process involes the following steps: + * 1. create new empty database with helper + * 2. populate db with new data + * 3. databaseModel.getLists + * 4. returns getLists object back + */ + +export async function importDb( + event, + { newDbName, filePath, dbType }: ImportPayload, +) { + logger(`Received 'import-db'" of dbType: ${dbType} and: `, LogType.RECEIVE); + event.sender.send('async-started'); + + try { + // create new empty database + try { + await queryModel.query(createDBFunc(newDbName, dbType), [], dbType); + } catch (e) { + throw new Error('Failed to create Database'); + } + + // run temp sql file on new database + try { + await promExecute(runSQLFunc(newDbName, filePath, dbType)); + } catch (e: any) { + // cleanup: drop created db + logger(`Dropping duplicate db because: ${e.message}`, LogType.WARNING); + const dropDBScript = dropDBFunc(newDbName, dbType); + await queryModel.query(dropDBScript, [], dbType); + + throw new Error('Failed to populate newly created database'); + } + + // update frontend with new db list + const dbsAndTableInfo: DBList = await databaseModel.getLists('', dbType); + event.sender.send('db-lists', dbsAndTableInfo); + logger("Sent 'db-lists' from 'duplicate-db'", LogType.SEND); + } finally { + event.sender.send('async-complete'); + } +} + +/** + * EVENT: 'export-db' + * + * DEFINITION: exports a selected database to desktop + * + * Process involes the following steps: + * 1. creates a temporary file on desktop + * 2. create a function to dump to new file with helper function (poorly designed) + * 3. use promExecute to execute function created above. this promExecute will run command shell to export the files. basically there is an internal timer in here for 5 seconds and if things do not copy after 5 seconds it terminates. + * 4. send back a feedback to frontend based on pormExecute. + */ + +export async function exportDb(event, payload: ExportPayload, dbType: DBType) { + logger("Received 'export-db'", LogType.RECEIVE); + event.sender.send('async-started'); + + const { db, filePath } = payload; + + const feedback: Feedback = { + type: '', + message: '', + }; + + try { + // dump database to file + const dumpCmd = runFullCopyFunc(db, filePath, dbType); + try { + await promExecute(dumpCmd); + } catch (e) { + throw new Error(`Failed to dump ${db} to temp file at ${filePath}`); + } + } finally { + event.sender.send('async-complete'); + } +} diff --git a/backend/src/ipcHandlers/handlers/miscHandler.ts b/backend/src/ipcHandlers/handlers/miscHandler.ts new file mode 100644 index 00000000..500f696e --- /dev/null +++ b/backend/src/ipcHandlers/handlers/miscHandler.ts @@ -0,0 +1,158 @@ +import { BrowserWindow, dialog } from 'electron'; + +// Types +import { + ColumnObj, + DBList, + DBType, + DummyRecords, + LogType, +} from '../../../BE_types'; +import { Feedback } from '../../../../shared/types/utilTypes'; + +// Helpers +import generateDummyData from '../../utils/dummyData/dummyDataMain'; +import logger from '../../utils/logging/masterlog'; + +// Models used +import databaseModel from '../../models/databaseModel'; +import queryModel from '../../models/queryModel'; +// import db from '../../../models'; + +/** + * EVENT: 'generate-dummy-data' + * + * DEFINITION: makes dummy data with Faker + * + * Process involes the following steps: + * 1. get primary and foreign keys for desired database with databaseModel.getTableInfo and put them in an array of objs + * 2. generate dummy data based on array of objs with generateDummyData helper + * 3. insert this data into the database it was generated for (poorly designed) + * 3.1 run queryModel.query + * 4 update with databaseModel.getLists since list is now updated (this is for every query doe, which is repetitive since not all queries affect the original databse ) + * 8. returns getLists object back + * + * ISSUES: + * step 3 should not be written in our controllers. perhaps need better UI + */ + +interface dummyDataRequestPayload { + dbName: string; + tableName: string; + rows: number; + dbType: DBType; +} + +export async function dummyData(event, data: dummyDataRequestPayload) { + logger("Received 'generate-dummy-data'", LogType.RECEIVE); + // send notice to front end that DD generation has been started + event.sender.send('async-started'); + let feedback: Feedback = { + type: '', + message: '', + }; + try { + // Retrieves the Primary Keys and Foreign Keys for all the tables + const tableInfo: ColumnObj[] = await databaseModel.getTableInfo( + data.tableName, + data.dbType, + ); // passed in dbType to second argument + + // generate dummy data + const dummyArray: DummyRecords = await generateDummyData( + tableInfo, + data.rows, + ); + // generate insert query string to insert dummy records + const columnsStringified = '('.concat(dummyArray[0].join(', ')).concat(')'); + let insertQuery = `INSERT INTO ${data.tableName} ${columnsStringified} VALUES `; + for (let i = 1; i < dummyArray.length - 1; i += 1) { + const recordStringified = '(' + .concat(dummyArray[i].join(', ')) + .concat('), '); + insertQuery = insertQuery.concat(recordStringified); + } + const lastRecordStringified = '(' + .concat(dummyArray[dummyArray.length - 1].join(', ')) + .concat(');'); + insertQuery = insertQuery.concat(lastRecordStringified); + // insert dummy records into DB + await queryModel.query('Begin;', [], data.dbType); + await queryModel.query(insertQuery, [], data.dbType); + await queryModel.query('Commit;', [], data.dbType); + feedback = { + type: 'success', + message: 'Dummy data successfully generated.', + }; + } catch (err: any) { + // rollback transaction if there's an error in insertion and send back feedback to FE + await queryModel.query('Rollback;', [], data.dbType); + feedback = { + type: 'error', + message: err, + }; + } finally { + // send updated db info in case query affected table or database information + const dbsAndTables: DBList = await databaseModel.getLists('', data.dbType); // dummy data clear error is from here + event.sender.send('db-lists', dbsAndTables); // dummy data clear error is from here + + // send feedback back to FE + event.sender.send('feedback', feedback); + + // send notice to FE that DD generation has been completed + event.sender.send('async-complete'); + + logger( + "Sent 'db-lists and feedback' from 'generate-dummy-data'", + LogType.SEND, + ); + } +} + +/** + * EVENT: 'showOpenDialog' + * + * DEFINITION: I blieve this is the window for choosing files to upload .Currently linked to ConfigView.tsx. + * + * Process involes the following steps: + * 1. select a browerwindow + * 2. open it with dialog.showOpenDialog + * + * PROBLEM: + * "fix the type of any of the focused window. I cheated it." + */ + +export async function showOpenDialog(event, options) { + const focusedWindow: any = BrowserWindow.fromWebContents(event.sender); + const result = await dialog.showOpenDialog(focusedWindow, options); + return result.filePaths[0]; +} + +/** + * EVENT: 'showSaveDialog' + * + * DEFINITION: I blieve this is the window for saving files to desktop. (?) + * + * Process involes the following steps: + * 1. select a browerwindow + * 2. open it with dialog.showOpenDialog + */ + +export async function showSaveDialog(event, options) { + const focusedWindow: any = BrowserWindow.fromWebContents(event.sender); + const result = await dialog.showSaveDialog(focusedWindow, options); + return result.filePath; +} + +/** + * EVENT: 'feedback' + * + * DEFINITION: For sending error messages. kind of tester + * + * Process involes the following steps: + * 1. sends feedback to frontend + */ + +export function feedback(event, options: { feedback: Feedback }) { + event.sender.send('feedback', options); +} diff --git a/backend/src/ipcHandlers/handlers/queryHandler.ts b/backend/src/ipcHandlers/handlers/queryHandler.ts new file mode 100644 index 00000000..7a7d33b2 --- /dev/null +++ b/backend/src/ipcHandlers/handlers/queryHandler.ts @@ -0,0 +1,270 @@ +import fs from 'fs'; + +// Types +import { DBList, DBType, LogType, QueryPayload } from '../../../BE_types'; + +// Helpers +import logger from '../../utils/logging/masterlog'; +import helperFunctions from '../../utils/helperFunctions'; + +// Models used +import connectionModel from '../../models/connectionModel'; +import queryModel from '../../models/queryModel'; +import databaseModel from '../../models/databaseModel'; +// import db from '../../../models'; + +const { explainQuery } = helperFunctions; + +/** + * EVENT: 'run-query' + * + * DEFINITION: QUERY MODE (CRUD): Handle run-query events passed from the front-end, and send back an updated DB List for the query view. also show statistics + * + * Process involes the following steps: + * 1. instantiates information like total sampletime, min, max, avg. + * 2. creates a parseExplainExplanation for transcribing query results back to english + * 3. connect to desired DB if query is not on current DB + * 4. attempts to extract EXPLAIN on each db. explain is an execution plan + * 5. run Query with queryModel.query() and return a big object of all the desired results + * 6. will connect back to inital database if query was on a different table + * 7. databaseModel.getLists since list is now updated (this is for every query doe, which is repetitive since not all queries affect the original databse ) + * 8. returns getLists object back + * + * ISSUES: + * 1. currently there are functionalities in this handler. lets break them away. + * 2. personally not a fan of global queries. why aren't queries local? and if you do want global queries, you should not be able to go back to your current db view (aka silo the query page to a different entity) + * CRUD is not distringuished + */ + +export async function runQuery( + event, + { targetDb, sqlString, selectedDb, runQueryNumber }: QueryPayload, + dbType: DBType, +) { + logger( + "Received 'run-query'", + LogType.RECEIVE, + `selectedDb: ${selectedDb} and dbType: ${dbType} and runQueryNumber: ${runQueryNumber}`, + ); + event.sender.send('async-started'); + const arr: any[] = []; // array of sample + const numberOfSample: number = runQueryNumber; + let totalSampleTime: number = 0; + let minimumSampleTime: number = 0; + let maximumSampleTime: number = 0; + let averageSampleTime: number = 0; + + function parseExplainExplanation(explain: string) { + const regex = /actual time=(\d+\.\d+)\.\.(\d+\.\d+) rows=\d+ loops=(\d+)/g; + const matches: string[][] = Array.from(explain.matchAll(regex)); + let result: number = 0; + for (let i = 0; i < matches.length; i += 1) { + result += + (parseFloat(matches[i][2]) - parseFloat(matches[i][1])) * + parseFloat(matches[i][3]); + } + return result; + } + + try { + let error: string | undefined; + // connect to db to run query + + if (selectedDb !== targetDb) + await connectionModel.connectToDB(targetDb, dbType); + + // Run Explain + let explainResults; + try { + for (let i = 0; i < numberOfSample; i++) { + if (dbType === DBType.Postgres) { + const results = await queryModel.query( + explainQuery(sqlString, dbType), + [], + dbType, + ); + + explainResults = results[1].rows; + const eachSampleTime: any = + results[1].rows[0]['QUERY PLAN'][0]['Planning Time'] + + results[1].rows[0]['QUERY PLAN'][0]['Execution Time']; + arr.push(eachSampleTime); + totalSampleTime += eachSampleTime; + } else if (dbType === DBType.MySQL) { + const results = await queryModel.query( + explainQuery(sqlString, dbType), + [], + dbType, + ); + const eachSampleTime: any = parseExplainExplanation( + results[0][0].EXPLAIN, + ); + arr.push(eachSampleTime); + totalSampleTime += eachSampleTime; + + // hard coded explainResults just to get it working for now + explainResults = { + Plan: { + 'Node Type': 'Seq Scan', + 'Parallel Aware': false, + 'Async Capable': false, + 'Relation Name': 'newtable1', + Schema: 'public', + Alias: 'newtable1', + 'Startup Cost': 0, + 'Total Cost': 7, + 'Plan Rows': 200, + 'Plan Width': 132, + 'Actual Startup Time': 0.015, + 'Actual Total Time': 0.113, + 'Actual Rows': 200, + 'Actual Loops': 1, + Output: ['newcolumn1'], + 'Shared Hit Blocks': 5, + 'Shared Read Blocks': 0, + 'Shared Dirtied Blocks': 0, + 'Shared Written Blocks': 0, + 'Local Hit Blocks': 0, + 'Local Read Blocks': 0, + 'Local Dirtied Blocks': 0, + 'Local Written Blocks': 0, + 'Temp Read Blocks': 0, + 'Temp Written Blocks': 0, + }, + Planning: { + 'Shared Hit Blocks': 64, + 'Shared Read Blocks': 0, + 'Shared Dirtied Blocks': 0, + 'Shared Written Blocks': 0, + 'Local Hit Blocks': 0, + 'Local Read Blocks': 0, + 'Local Dirtied Blocks': 0, + 'Local Written Blocks': 0, + 'Temp Read Blocks': 0, + 'Temp Written Blocks': 0, + }, + 'Planning Time': 9999, + Triggers: [], + 'Execution Time': 9999, + }; + } else if (dbType === DBType.SQLite) { + const sampleTime = await queryModel.sampler(sqlString); + arr.push(sampleTime); + totalSampleTime += sampleTime; + + // hard coded explainResults just to get it working for now + explainResults = { + Plan: { + 'Node Type': 'Seq Scan', + 'Parallel Aware': false, + 'Async Capable': false, + 'Relation Name': 'newtable1', + Schema: 'public', + Alias: 'newtable1', + 'Startup Cost': 0, + 'Total Cost': 7, + 'Plan Rows': 200, + 'Plan Width': 132, + 'Actual Startup Time': 0.015, + 'Actual Total Time': 0.113, + 'Actual Rows': 200, + 'Actual Loops': 1, + Output: ['newcolumn1'], + 'Shared Hit Blocks': 5, + 'Shared Read Blocks': 0, + 'Shared Dirtied Blocks': 0, + 'Shared Written Blocks': 0, + 'Local Hit Blocks': 0, + 'Local Read Blocks': 0, + 'Local Dirtied Blocks': 0, + 'Local Written Blocks': 0, + 'Temp Read Blocks': 0, + 'Temp Written Blocks': 0, + }, + Planning: { + 'Shared Hit Blocks': 64, + 'Shared Read Blocks': 0, + 'Shared Dirtied Blocks': 0, + 'Shared Written Blocks': 0, + 'Local Hit Blocks': 0, + 'Local Read Blocks': 0, + 'Local Dirtied Blocks': 0, + 'Local Written Blocks': 0, + 'Temp Read Blocks': 0, + 'Temp Written Blocks': 0, + }, + 'Planning Time': 9999, + Triggers: [], + 'Execution Time': 9999, + }; + } + } + // get 5 decimal points for sample time + minimumSampleTime = Math.round(Math.min(...arr) * 10 ** 5) / 10 ** 5; + maximumSampleTime = Math.round(Math.max(...arr) * 10 ** 5) / 10 ** 5; + averageSampleTime = + Math.round((totalSampleTime / numberOfSample) * 10 ** 5) / 10 ** 5; + totalSampleTime = Math.round(totalSampleTime * 10 ** 5) / 10 ** 5; + } catch (e) { + error = + 'Failed to get Execution Plan. EXPLAIN might not support this query.'; + } + + // Run Query + let returnedRows; + try { + const results = await queryModel.query(sqlString, [], dbType); + if (dbType === DBType.MySQL) { + returnedRows = results[0]; + } + if (dbType === DBType.Postgres) { + returnedRows = results?.rows; + } + if (dbType === DBType.SQLite) { + returnedRows = results; + } + } catch (e: any) { + error = e.toString(); + } + + return { + db: targetDb, + sqlString, + returnedRows, + explainResults, + error, + numberOfSample, + totalSampleTime, + minimumSampleTime, + maximumSampleTime, + averageSampleTime, + }; + } finally { + // connect back to initialDb + + if (selectedDb !== targetDb) + await connectionModel.connectToDB(selectedDb, dbType); + + // send updated db info in case query affected table or database information + // must be run after we connect back to the originally selected so tables information is accurate + const dbsAndTables: DBList = await databaseModel.getLists('', dbType); + event.sender.send('db-lists', dbsAndTables); + logger( + "Sent 'db-lists' from 'run-query'", + LogType.SEND, + `selectedDb: ${selectedDb} -- targetDb: ${targetDb} -- dbType: ${dbType}`, + ); + event.sender.send('async-complete'); + } +} + +// Reads the query JSON file and send it to the front end +export function readQuery(event, filepath) { + try { + const data = fs.readFileSync(filepath, 'utf8'); + + return data; + } catch (err) { + console.log('this is error in read-query', err); + } +} diff --git a/backend/src/ipcHandlers/index.ts b/backend/src/ipcHandlers/index.ts new file mode 100644 index 00000000..203ff0be --- /dev/null +++ b/backend/src/ipcHandlers/index.ts @@ -0,0 +1,54 @@ +import { ipcMain } from 'electron'; + +// // imports all other handlers to this index for main to require/import + +import { setConfig, getConfig } from './handlers/authHandler'; +import { + intializeDb, + updateDb, + erTableSchemaUpdate, + getPath, +} from './handlers/dbCRUDHandler'; +import { + returnDbList, + selectDb, + dropDb, + duplicateDb, + importDb, + exportDb, +} from './handlers/dbOpsHandler'; +import { runQuery, readQuery } from './handlers/queryHandler'; +import { + dummyData, + showOpenDialog, + showSaveDialog, + feedback, +} from './handlers/miscHandler'; + +// auth +ipcMain.handle('set-config', setConfig); +ipcMain.handle('get-config', getConfig); + +// db Operations +ipcMain.handle('return-db-list', returnDbList); +ipcMain.handle('select-db', selectDb); +ipcMain.handle('drop-db', dropDb); +ipcMain.handle('duplicate-db', duplicateDb); +ipcMain.handle('import-db', importDb); +ipcMain.handle('export-db', exportDb); +ipcMain.handle('get-path', getPath); + +// db CRUD functionalities +ipcMain.handle('initialize-db', intializeDb); +ipcMain.handle('update-db', updateDb); +ipcMain.handle('ertable-schemaupdate', erTableSchemaUpdate); + +// query +ipcMain.handle('run-query', runQuery); +ipcMain.handle('read-query', readQuery); + +// misc (other events bundled together) +ipcMain.handle('generate-dummy-data', dummyData); +ipcMain.handle('showOpenDialog', showOpenDialog); +ipcMain.handle('showSaveDialog', showSaveDialog); +ipcMain.handle('feedback', feedback); diff --git a/backend/src/ipcHandlers/readme.txt b/backend/src/ipcHandlers/readme.txt new file mode 100644 index 00000000..12b271b6 --- /dev/null +++ b/backend/src/ipcHandlers/readme.txt @@ -0,0 +1,10 @@ +Format for new events: + +/** + * EVENT: + * + * DEFINITION: + * + * Process involes the following steps: + */ + diff --git a/backend/_documentsConfig.ts b/backend/src/models/configModel.ts similarity index 52% rename from backend/_documentsConfig.ts rename to backend/src/models/configModel.ts index a9faae7a..3a4b37ff 100644 --- a/backend/_documentsConfig.ts +++ b/backend/src/models/configModel.ts @@ -1,22 +1,34 @@ -/* eslint-disable no-throw-literal */ -/* eslint-disable no-shadow */ -/* eslint-disable object-shorthand */ // import path from 'path'; import fs from 'fs'; import os from 'os'; -import { DBType, DocConfigFile, LogType } from './BE_types'; -import logger from './Logging/masterlog'; +import { DBType, LogType } from '../../../shared/types/dbTypes'; +import logger from '../utils/logging/masterlog'; +import { DocConfigFile } from '../../BE_types'; + +// HELPER FUNCTIONS const home = `${os.homedir()}/Documents/SeeQR`; -const configFile = `config.json`; +const configFile = 'config.json'; const configPath = `${home}/${configFile}`; -const defaultFile: DocConfigFile = { - mysql: { user: '', password: '', port: 3306 }, - pg: { user: '', password: '', port: 5432 }, - rds_mysql: { user: '', password: '', port: 3306, host: '' }, - rds_pg: { user: '', password: '', port: 5432, host: '' }, - sqlite: { path: '' }, - directPGURI: { uri: '' } + +// ideally, we want to keep this config in a seperate file as well +export const defaultFile: DocConfigFile = { + mysql_options: { user: '', password: '', port: 3306 }, + pg_options: { user: '', password: '', port: 5432 }, + rds_mysql_options: { + user: '', + password: '', + port: 3306, + host: '', + }, + rds_pg_options: { + user: '', + password: '', + port: 5432, + host: '', + }, + sqlite_options: { filename: '' }, + directPGURI_options: { connectionString: '' }, }; /** @@ -25,18 +37,19 @@ const defaultFile: DocConfigFile = { */ function writeConfigDefault(): DocConfigFile { logger('Could not find config file. Creating default', LogType.WARNING); - fs.writeFileSync(configPath, JSON.stringify(defaultFile)); + fs.writeFileSync(configPath, JSON.stringify(defaultFile, null, 2)); return defaultFile; -}; +} /** * Check if config.json object has the correct database properties (mysql, pg, etc.), tries to replace only the properties that are missing and return either the original or new object. Doesn't care about additional properties in the object besides those in const defaultFile. * @param currConfig current configuration * @returns configuration with only valid key value properties */ -const checkConfigFile = function (currConfig: DocConfigFile): DocConfigFile { +const checkConfigFile = (currConfig: DocConfigFile): DocConfigFile => { const invalidKeys: string[] = []; try { + // pushes extra data from configFile to invalidKeys array Object.keys(defaultFile).forEach((key) => { if (!Object.prototype.hasOwnProperty.call(currConfig, key)) { invalidKeys.push(key); @@ -51,8 +64,8 @@ const checkConfigFile = function (currConfig: DocConfigFile): DocConfigFile { } catch (err) { console.log(err); logger( - `Error caught checking config file. Resetting config to default.`, - LogType.WARNING + 'Error caught checking config file. Resetting config to default.', + LogType.WARNING, ); return writeConfigDefault(); } @@ -69,51 +82,51 @@ const checkConfigFile = function (currConfig: DocConfigFile): DocConfigFile { /** * Reads config file data and sends it into checkConfigFile, then returns the result - * If an error occurs during read, the config file will be set back to default + * If an error occurs during read, the config file will be set back to default * @returns config file contents (login info for each database type) */ function readConfigFile(): DocConfigFile { try { const config = JSON.parse( - fs.readFileSync(configPath, 'utf-8') + fs.readFileSync(configPath, 'utf-8'), ) as DocConfigFile; return checkConfigFile(config); - } catch (err: any) { + } catch (err) { console.log(err); logger( - `Error caught checking config file. Resetting config to default.`, - LogType.WARNING + 'Error caught checking config file. Resetting config to default.', + LogType.WARNING, ); return writeConfigDefault(); } -}; +} -interface DocConfig { - getConfigFolder: () => string; - getCredentials: (dbType: DBType) => { - user?: string; - password?: string; - host?: string; - port?: number | string; - uri?: string; - path?: string; - }; - getFullConfig: () => Object; - saveConfig: (config: Object) => void; +interface GetCredentialsReturnType { + [DBType.MySQL]: DocConfigFile['mysql_options']; + [DBType.RDSMySQL]: DocConfigFile['rds_mysql_options']; + [DBType.Postgres]: DocConfigFile['pg_options']; + [DBType.RDSPostgres]: DocConfigFile['rds_pg_options']; + [DBType.directPGURI]: DocConfigFile['directPGURI_options']; + [DBType.CloudDB]: undefined; + [DBType.SQLite]: DocConfigFile['sqlite_options']; } -const docConfig: DocConfig = { +// ------------------------------ +// MAIN FUNCTIONALITY +// + +const docConfig = { /** * Checks if config file directory exists. If not, creates one * @returns config file directory */ - getConfigFolder: function getConfigFolder() { + getConfigFolder(): string { if (fs.existsSync(home)) { logger(`Found documents directory: ${home}`, LogType.SUCCESS); } else { logger( `Could not find documents directory. Creating at: ${home}`, - LogType.WARNING + LogType.WARNING, ); fs.mkdirSync(home); } @@ -125,61 +138,50 @@ const docConfig: DocConfig = { * @param dbType: desired database type for login info * @returns login info for the desired database type */ - getCredentials: function getCredentials(dbType: DBType) { - this.getConfigFolder(); // ensure directory exists - let configFile: DocConfigFile; - try { - configFile = readConfigFile(); // all login info now in configFile - } catch (err: any) { - logger(err.message, LogType.WARNING); - return { user: '', password: '', port: 1 }; - } - - if (dbType === DBType.Postgres) { - return { ...configFile.pg }; - } - if (dbType === DBType.MySQL) { - return { ...configFile.mysql }; - } - if (dbType === DBType.RDSMySQL) { - return { ...configFile.rds_mysql }; - } - if (dbType === DBType.RDSPostgres) { - return { ...configFile.rds_pg }; - } - // asdf check sqlite and directpguri and format for return in case of error - if (dbType === DBType.SQLite) { - return { ...configFile.sqlite }; - } - // if (dbType === DBType.directPGURI) { - // return { ...configFile.directPGURI }; - // } + getCredentials(dbType: K): GetCredentialsReturnType[K] { + this.getConfigFolder(); // ensure directory exists + const config = readConfigFile(); // all login info now in configFile - logger('Could not get credentials of DBType: ', LogType.ERROR, dbType); - return { user: '', password: '', port: 1 }; + return { + [DBType.MySQL]: { ...config.mysql_options }, + [DBType.RDSMySQL]: { ...config.rds_mysql_options }, + [DBType.Postgres]: { ...config.pg_options }, + [DBType.RDSPostgres]: { ...config.rds_pg_options }, + [DBType.directPGURI]: { connectionString: '' }, + [DBType.CloudDB]: undefined, + [DBType.SQLite]: { ...config.sqlite_options }, + }[dbType]; }, /** * Reads config file contents and returns it all. * @returns all login info from config file */ - getFullConfig: function getFullConfig() { + getFullConfig() { this.getConfigFolder(); - let configFile: DocConfigFile; + let config: DocConfigFile; try { - configFile = readConfigFile(); - return configFile; - } catch (err: any) { - logger(err.message, LogType.WARNING); - return 'Failed to retrieve data.'; + config = readConfigFile(); + return config; + } catch (err) { + logger( + typeof err === 'object' && + err !== null && + 'message' in err && + typeof err.message === 'string' + ? err.message + : `Error occurred in getFullConfig : ${err}`, + LogType.WARNING, + ); + return undefined; } }, /** * Takes config data object sent from frontend, stringifies it and saves in config file - * @param config + * @param config */ - saveConfig: function saveConfig(config: Object) { + saveConfig(config: DocConfigFile) { try { fs.writeFileSync(configPath, JSON.stringify(config)); logger('Saved new config: ', LogType.NORMAL, config); @@ -189,4 +191,4 @@ const docConfig: DocConfig = { }, }; -module.exports = docConfig; +export default docConfig; diff --git a/backend/src/models/connectionModel.ts b/backend/src/models/connectionModel.ts new file mode 100644 index 00000000..bacbbb39 --- /dev/null +++ b/backend/src/models/connectionModel.ts @@ -0,0 +1,200 @@ +import fs from 'fs'; +import docConfig from './configModel'; +import { LogType } from '../../BE_types'; +import { DBType, connectionModelType } from '../../../shared/types/dbTypes'; +import connectionFunctions from '../db/databaseConnections'; +import logger from '../utils/logging/masterlog'; +import pools from '../db/poolVariables'; + +import dbState from './stateModel'; + +/* +README: "connectionModel" deals with business logic of connetion actions. This file dealswith logining and connections to different kinds of databases. +FUNCTIONS: setBaseConnections, connectToDB, disconnectToDrop +*/ + +// Functions +const connectionModel: connectionModelType = { + setBaseConnections: async () => { + dbState.mysql_options = docConfig.getCredentials(DBType.MySQL); + dbState.pg_options = docConfig.getCredentials(DBType.Postgres); + dbState.rds_pg_options = docConfig.getCredentials(DBType.RDSPostgres); + dbState.rds_mysql_options = docConfig.getCredentials(DBType.RDSMySQL); + dbState.sqlite_options = docConfig.getCredentials(DBType.SQLite); + dbState.directPGURI_options = docConfig.getCredentials(DBType.directPGURI); + + const configExists = { + pg: false, + msql: false, + rds_pg: false, + rds_msql: false, + sqlite: false, + directPGURI: false, + }; + /* + all the if/else and try/catch in this function are for various forms of error handling. + incorrect passwords/removed entries after successful logins + */ + + // RDS PG POOL: truthy values means user has inputted info into config -> try to log in + if ( + dbState.rds_pg_options.user && + dbState.rds_pg_options.password && + dbState.rds_pg_options.host + ) { + try { + configExists.rds_pg = true; + await connectionFunctions.RDS_PG_DBConnect(dbState.rds_pg_options); + dbState.dbsInputted.rds_pg = true; + logger('CONNECTED TO RDS PG DATABASE!', LogType.SUCCESS); + } catch (error) { + dbState.dbsInputted.rds_pg = false; + logger('FAILED TO CONNECT TO RDS PG DATABASE', LogType.ERROR); + } + } else { + configExists.rds_pg = false; + dbState.dbsInputted.rds_pg = false; + } + + // RDS MSQL POOL: truthy values means user has inputted info into config -> try to log in + if ( + dbState.rds_mysql_options.user && + dbState.rds_mysql_options.password && + dbState.rds_mysql_options.host + ) { + try { + configExists.rds_msql = true; + await connectionFunctions.RDS_MSQL_DBConnect(dbState.rds_mysql_options); + + // test query to make sure were connected. needed for the + // catch statement to hit incase we arent connected. + if (pools.rds_msql_pool === undefined) + throw new Error('No RDS msql pool connected'); + await pools.rds_msql_pool.query('SHOW DATABASES;'); + logger('CONNECTED TO RDS MYSQL DATABASE!', LogType.SUCCESS); + dbState.dbsInputted.rds_msql = true; + } catch (error) { + dbState.dbsInputted.rds_msql = false; + logger('FAILED TO CONNECT TO RDS MSQL DATABASE', LogType.ERROR); + } + } else { + configExists.rds_msql = false; + dbState.dbsInputted.rds_msql = false; + } + + // LOCAL PG POOL: truthy values means user has inputted info into config -> try to connect + if (dbState.pg_options.user && dbState.pg_options.password) { + dbState.pg_options.connectionString = `postgres://${dbState.pg_options.user}:${dbState.pg_options.password}@localhost:${dbState.pg_options.port}`; + dbState.pg_options.database = 'postgres'; + try { + configExists.pg = true; + await connectionFunctions.PG_DBConnect( + dbState.pg_options.connectionString, + dbState.pg_options.database, + ); + logger('CONNECTED TO LOCAL PG DATABASE', LogType.SUCCESS); + dbState.dbsInputted.pg = true; + } catch (error) { + dbState.dbsInputted.pg = false; + logger('FAILED TO CONNECT TO LOCAL PG DATABASE', LogType.ERROR); + } + } else { + configExists.pg = false; + dbState.dbsInputted.pg = false; + } + + // LOCAL MSQL POOL: truthy values means user has inputted info into config -> try to log in + if (dbState.mysql_options.user && dbState.mysql_options.password) { + try { + configExists.msql = true; + dbState.mysql_options = { + ...dbState.mysql_options, + host: 'localhost', + waitForConnections: true, + connectionLimit: 10, + queueLimit: 0, + multipleStatements: true, + }; + await connectionFunctions.MSQL_DBConnect(dbState.mysql_options); + + // test query to make sure were connected. needed for the catch statement to hit incase we arent connected. + if (pools.msql_pool === undefined) + throw new Error('No active msql pool'); + await pools.msql_pool.query('SHOW DATABASES;'); + dbState.dbsInputted.msql = true; + logger('CONNECTED TO LOCAL MYSQL DATABASE!', LogType.SUCCESS); + } catch (error) { + dbState.dbsInputted.msql = false; + logger('FAILED TO CONNECT TO LOCAL MSQL DATABASE', LogType.ERROR); + } + } else { + configExists.msql = false; + dbState.dbsInputted.msql = false; + } + + // RDS PG POOL: truthy values means user has inputted info into config -> try to log in + if (dbState.sqlite_options.filename) { + try { + configExists.sqlite = true; + connectionFunctions.SQLite_DBConnect(dbState.sqlite_options.filename); + dbState.dbsInputted.sqlite = true; + logger('CONNECTED TO SQLITE DATABASE!', LogType.SUCCESS); + } catch (error) { + dbState.dbsInputted.sqlite = false; + logger('FAILED TO CONNECT TO SQLITE DATABASE', LogType.ERROR); + } + } else { + configExists.sqlite = false; + dbState.dbsInputted.sqlite = false; + } + + return { dbsInputted: dbState.dbsInputted, configExists }; + }, + + // connectToDB : chooses what kind of database this is based on received dbType. + connectToDB: async (db, dbType) => { + // change current Db + + if (dbType === DBType.Postgres) { + dbState.pg_options.database = db; + + if (dbState.pg_options.connectionString) { + await connectionFunctions.PG_DBConnect( + dbState.pg_options.connectionString, + db, + ); + } else { + // handle case where connection string is undefined} + } + } else if (dbType === DBType.MySQL) { + dbState.mysql_options.database = db; + await connectionFunctions.MSQL_DBQuery(db); + } else if (dbType === DBType.RDSMySQL) { + dbState.rds_mysql_options.database = db; + await connectionFunctions.RDS_MSQL_DBQuery(db); + } else if (dbType === DBType.RDSPostgres) { + await connectionFunctions.RDS_PG_DBConnect(dbState.rds_pg_options); + } else if (dbType === DBType.SQLite) { + connectionFunctions.SQLite_DBConnect(dbState.sqlite_options.filename); + } + }, + + disconnectToDrop: async (dbType) => { + if (dbType === DBType.Postgres) { + // ending pool + await connectionFunctions.PG_DBDisconnect(); + } + if (dbType === DBType.SQLite) { + try { + // disconnect from and delete sqlite .db file + pools.sqlite_db?.close(); + fs.unlinkSync(dbState.sqlite_options.filename); + dbState.sqlite_options.filename = ''; + } catch (e) { + logger('FAILED TO DELETE SQLITE DB FILE', LogType.ERROR); + } + } + }, +}; + +export default connectionModel; diff --git a/backend/src/models/databaseModel.ts b/backend/src/models/databaseModel.ts new file mode 100644 index 00000000..d224ce4b --- /dev/null +++ b/backend/src/models/databaseModel.ts @@ -0,0 +1,608 @@ +import fs from 'fs'; +import { Pool } from 'pg'; +import { Pool as MSQLPool } from 'mysql2/promise'; +import { RowDataPacket } from 'mysql2'; +import { + ColumnObj, + dbDetails, + DBList, + DBType, + LogType, + TableDetails, + databaseModelType, +} from '../../../shared/types/dbTypes'; +import logger from '../utils/logging/masterlog'; +import pools from '../db/poolVariables'; + +import dbState from './stateModel'; + +/* +README: "databaseModel" deals with business logic of connetion actions. This file dealswith logining and connections to different kinds of databases. +FUNCTIONS: getLists, getTableInfo, getDBNames, getColumnObjects, getDBLists +*/ + +// Functions +const databaseModel: databaseModelType = { + // getLists: this list object is what will be returned at the end of the function. function will get lists for all four databases depending on which is logged in + + getLists: async (dbName = '', dbType) => { + const listObj: DBList = { + databaseConnected: { + PG: false, + MySQL: false, + RDSPG: false, + RDSMySQL: false, + SQLite: false, + directPGURI: false, + }, + databaseList: [], // accumulates lists for each logged-in database + tableList: [], + }; + if (dbState.dbsInputted.pg) { + try { + const pgDBList = await databaseModel.getDBNames(DBType.Postgres); + listObj.databaseConnected.PG = true; + listObj.databaseList = [...listObj.databaseList, ...pgDBList]; + } catch (error) { + logger('COULDNT GET NAMES FROM LOCAL PG', LogType.ERROR); + } + } + + if (dbState.dbsInputted.msql) { + try { + const msqlDBList = await databaseModel.getDBNames(DBType.MySQL); + listObj.databaseConnected.MySQL = true; + listObj.databaseList = [...listObj.databaseList, ...msqlDBList]; + } catch (error) { + logger('COULDNT GET NAMES FROM LOCAL MSQL', LogType.ERROR); + } + } + + if (dbState.dbsInputted.rds_msql) { + try { + const RDSmsqlDBList = await databaseModel.getDBNames(DBType.RDSMySQL); + listObj.databaseConnected.RDSMySQL = true; + listObj.databaseList = [...listObj.databaseList, ...RDSmsqlDBList]; + } catch (error) { + logger('COULDNT GET NAMES FROM RDS MSQL', LogType.ERROR); + } + } + + if (dbState.dbsInputted.rds_pg) { + try { + const RDSpgDBList = await databaseModel.getDBNames(DBType.RDSPostgres); + listObj.databaseConnected.RDSPG = true; + listObj.databaseList = [...listObj.databaseList, ...RDSpgDBList]; + } catch (error) { + logger('COULDNT GET NAMES FROM RDS PG', LogType.ERROR); + } + } + + if (dbState.dbsInputted.sqlite) { + try { + const sqliteDBList = await databaseModel.getDBNames(DBType.SQLite); + listObj.databaseConnected.SQLite = true; + listObj.databaseList = [...listObj.databaseList, ...sqliteDBList]; + } catch (error) { + logger('COULDNT GET NAMES FROM SQLite DB', LogType.ERROR); + } + } + + if (dbType) { + try { + const listData = await databaseModel.getDBLists(dbType, dbName); + logger( + `RESOLVING DB DETAILS: Fetched DB names along with Table List for DBType: ${dbType} and DB: ${dbName}`, + LogType.SUCCESS, + ); + listObj.tableList = listData; + } catch (error) { + logger( + `COULNT GET DATABASE LIST FOR ${dbType} ${dbName} DATABASE`, + LogType.ERROR, + ); + } + } + return listObj; + }, + + // NEED TO LOOK INTO getTableInfo + + getTableInfo: (tableName, dbType) => + databaseModel.getColumnObjects(tableName, dbType), + + // NEED TO LOOK INTO getDBNames + + getDBNames: (dbType) => + new Promise((resolve, reject) => { + let query: string; + if (dbType === DBType.Postgres || dbType === DBType.RDSPostgres) { + let pool: Pool | undefined; // changes which pool is being queried based on dbType + + if (dbType === DBType.Postgres) pool = pools.pg_pool; + if (dbType === DBType.RDSPostgres) pool = pools.rds_pg_pool; + if (pool === undefined) { + reject(Error('No pool for Postgres DB')); + return; + } + const dbList: dbDetails[] = []; + /* + junaid + only run queries if pool is made + */ + if (pool) { + query = `SELECT dbs.datname AS db_name, + pg_size_pretty(pg_database_size(dbs.datname)) AS db_size + FROM pg_database dbs + ORDER BY db_name`; + pool + .query<{ db_name: string; db_size: string }>(query) + .then((databases) => { + for (let i = 0; i < databases.rows.length; i += 1) { + const data = databases.rows[i]; + const { db_name } = data; + + if ( + db_name !== 'postgres' && + db_name !== 'template0' && + db_name !== 'template1' + ) { + dbList.push({ ...data, db_type: dbType }); + } + } + + logger("PG 'getDBNames' resolved.", LogType.SUCCESS); + // resolve with array of db names + resolve(dbList); + }) + .catch((err) => { + reject(err); + }); + } else { + resolve(dbList); + } + } else if (dbType === DBType.MySQL || dbType === DBType.RDSMySQL) { + let pool: MSQLPool | undefined; // changes which pool is being queried based on dbType + if (dbType === DBType.MySQL) pool = pools.msql_pool; + if (dbType === DBType.RDSMySQL) pool = pools.rds_msql_pool; + if (pool === undefined) { + reject(Error('No pool for MySQL DB')); + return; + } + + const dbList: dbDetails[] = []; + /* + only run queries if pool is made + */ + if (pool) { + query = ` + SELECT + S.SCHEMA_NAME db_name, + ROUND(SUM(data_length + index_length) / 1024, 1) db_size + FROM + INFORMATION_SCHEMA.SCHEMATA S + LEFT OUTER JOIN + INFORMATION_SCHEMA.TABLES T ON S.SCHEMA_NAME = T.TABLE_SCHEMA + WHERE + S.SCHEMA_NAME NOT IN ('information_schema' , 'mysql', 'performance_schema', 'sys') + GROUP BY S.SCHEMA_NAME + ORDER BY db_name ASC;`; + + pool + .query(query) + .then((databases) => { + for (let i = 0; i < databases[0].length; i += 1) { + const data = databases[0][i]; + const filterData: dbDetails = { + db_type: dbType, + db_size: data.db_size ? `${data.db_size}KB` : '0KB', + db_name: + typeof data.db_name === 'string' ? data.db_name : 'ERROR', + }; + data.db_type = dbType; + data.db_size = data.db_size ? `${data.db_size}KB` : '0KB'; + data.db_name ??= 'ERROR'; + dbList.push(filterData); + } + + logger("MySQL 'getDBNames' resolved.", LogType.SUCCESS); + // resolve with array of db names + resolve(dbList); + }) + .catch((err) => { + reject(err); + }); + } else { + resolve(dbList); + } + } else if (dbType === DBType.SQLite) { + const dbList: dbDetails[] = []; + let { filename } = dbState.sqlite_options; + filename = filename.slice( + filename.lastIndexOf('\\') + 1, + filename.lastIndexOf('.db'), + ); + const stats = fs.statSync(filename); + const fileSizeInKB = stats.size / 1024; + // Convert the file size to megabytes (optional) + const data = { + db_name: filename, + db_size: `${fileSizeInKB}KB`, + db_type: DBType.SQLite, + }; + dbList.push(data); + resolve(dbList); + } + }), + + // THIS FUNCTION IS FKED + + getColumnObjects: (tableName, dbType) => { + let queryString: string; + const value = [tableName]; + if (dbType === DBType.Postgres || dbType === DBType.RDSPostgres) { + // added to check for RDS + let pool: Pool | undefined; // changes which pool is being queried based on dbType + if (dbType === DBType.Postgres) pool = pools.pg_pool; + if (dbType === DBType.RDSPostgres) pool = pools.rds_pg_pool; + if (pool === undefined) { + throw Error('No pool for psql DB'); + } + // query string to get constraints and table references as well + queryString = `SELECT DISTINCT cols.column_name, + cols.data_type, + cols.character_maximum_length, + cols.is_nullable, + kcu.constraint_name, + cons.constraint_type, + rel_kcu.table_name AS foreign_table, + rel_kcu.column_name AS foreign_column + FROM information_schema.columns cols + LEFT JOIN information_schema.key_column_usage kcu + ON cols.column_name = kcu.column_name + AND cols.table_name = kcu.table_name + LEFT JOIN information_schema.table_constraints cons + ON kcu.constraint_name = cons.constraint_name + LEFT JOIN information_schema.referential_constraints rco + ON rco.constraint_name = cons.constraint_name + LEFT JOIN information_schema.key_column_usage rel_kcu + ON rco.unique_constraint_name = rel_kcu.constraint_name + WHERE cols.table_name = $1`; + + // kcu = key column usage = describes which key columns have constraints + // tc = table constraints = shows if constraint is primary key or foreign key + // information_schema.table_constraints show the whole table constraints + + return new Promise((resolve, reject) => { + pool + ?.query(queryString, value) + .then((result) => { + const columnInfoArray: ColumnObj[] = []; + for (let i = 0; i < result.rows.length; i += 1) { + columnInfoArray.push(result.rows[i]); + } + resolve(columnInfoArray); + }) + .catch((err) => { + reject(err); + }); + }); + } + if (dbType === DBType.MySQL || dbType === DBType.RDSMySQL) { + // added to check for RDS + + let pool: MSQLPool | undefined; // changes which pool is being queried based on dbType + if (dbType === DBType.MySQL) pool = pools.msql_pool; + if (dbType === DBType.RDSMySQL) pool = pools.rds_msql_pool; + if (pool === undefined) { + throw Error('No pool for mysql db'); + } + queryString = `SELECT DISTINCT + cols.column_name AS column_name, + cols.data_type AS data_type, + cols.character_maximum_length AS character_maximum_length, + cols.is_nullable AS is_nullable, + kcu.constraint_name AS constraint_name, + cons.constraint_type AS constraint_type, + rel_kcu.table_name AS foreign_table, + rel_kcu.column_name AS foreign_column + FROM information_schema.columns cols + LEFT JOIN information_schema.key_column_usage kcu + ON cols.column_name = kcu.column_name + AND cols.table_name = kcu.table_name + LEFT JOIN information_schema.table_constraints cons + ON kcu.constraint_name = cons.constraint_name + LEFT JOIN information_schema.referential_constraints rco + ON rco.constraint_name = cons.constraint_name + LEFT JOIN information_schema.key_column_usage rel_kcu + ON rco.unique_constraint_name = rel_kcu.constraint_name + WHERE cols.table_name = ?;`; + + return new Promise((resolve, reject) => { + pool + ?.query(queryString, value) + .then((result) => { + const columnInfoArray: ColumnObj[] = []; + for (let i = 0; i < result[0].length; i += 1) { + const data = result[0][i]; + const colObjFromData: ColumnObj = { + column_name: + typeof data.column_data === 'string' ? data.column_data : '', + data_type: + typeof data.data_type === 'string' ? data.data_type : '', + character_maximum_length: + typeof data.character_maximum_length === 'number' + ? data.character_maximum_length + : null, + is_nullable: + typeof data.is_nullable === 'string' ? data.is_nullable : '', + constraint_type: + typeof data.constraint_type === 'string' + ? data.constraint_type + : null, + foreign_table: + typeof data.foreign_table === 'string' + ? data.foreign_table + : null, + foreign_column: + typeof data.foreign_column === 'string' + ? data.foreign_column + : null, + }; + columnInfoArray.push(colObjFromData); + } + resolve(columnInfoArray); + }) + .catch((err) => { + reject(err); + }); + }); + } + + if (dbType === DBType.SQLite) { + const sqliteDB = pools.sqlite_db; + queryString = `SELECT + m.name AS table_name, + p.name AS column_name, + p.type AS data_type, + p.[notnull] AS not_null, + p.pk AS pk, + fkl.[table] AS foreign_table, + fkl.[to] AS foreign_column + FROM sqlite_master m + LEFT JOIN pragma_table_info(m.name) p + LEFT JOIN pragma_foreign_key_list(m.name) fkl + ON p.name = fkl.[from] + WHERE m.type = 'table' AND p.type != '' AND m.name = ?`; + + return new Promise((resolve, reject) => { + sqliteDB?.all<{ + column_name: string; + data_type: string; + not_null: 0 | 1; + pk: 0 | 1; + foreign_table: string; + foreign_column: string; + }>(queryString, value, (err, rows) => { + if (err) { + reject(err); + } + const columnInfoArray: ColumnObj[] = []; + for (let i = 0; i < rows.length; i++) { + const { + column_name, + data_type, + not_null, + pk, + foreign_table, + foreign_column, + } = rows[i]; + const newColumnObj: ColumnObj = { + column_name, + data_type, + character_maximum_length: data_type.includes('(') + ? parseInt( + data_type.slice( + 1 + data_type.indexOf('('), + data_type.indexOf(')'), + ), + 10, + ) + : null, + is_nullable: not_null === 1 ? 'NO' : 'YES', + constraint_type: + pk === 1 ? 'PRIMARY KEY' : foreign_table ? 'FOREIGN KEY' : null, + foreign_table, + foreign_column, + }; + columnInfoArray.push(newColumnObj); + } + resolve(columnInfoArray); + }); + }); + } + + logger('Trying to use unknown DB Type: ', LogType.ERROR, dbType); + // eslint-disable-next-line no-throw-literal + throw 'Unknown db type'; + }, + + getDBLists: (dbType, dbName) => + new Promise((resolve, reject) => { + let query: string; + const tableList: TableDetails[] = []; + const promiseArray: Promise[] = []; + + if (dbType === DBType.Postgres || dbType === DBType.RDSPostgres) { + let pool: Pool | undefined; + if (dbType === DBType.Postgres) pool = pools.pg_pool; + if (dbType === DBType.RDSPostgres) pool = pools.rds_pg_pool; + + // querying PG metadata + query = `SELECT + table_catalog, + table_schema, + table_name, + is_insertable_into + FROM information_schema.tables + WHERE table_schema = 'public' or table_schema = 'base' + ORDER BY table_name;`; + pool + ?.query(query) + .then((tables) => { + for (let i = 0; i < tables.rows.length; i++) { + tableList.push(tables.rows[i]); + promiseArray.push( + databaseModel.getColumnObjects( + tables.rows[i].table_name, + dbType, + ), + ); + } + + Promise.all(promiseArray) + .then((columnInfo) => { + for (let i = 0; i < columnInfo.length; i++) { + tableList[i].columns = columnInfo[i]; + } + logger("PG 'getDBLists' resolved.", LogType.SUCCESS); + resolve(tableList); + }) + .catch((err) => { + reject(err); + }); + }) + .catch((err) => { + reject(err); + }); + } else if (dbType === DBType.MySQL || dbType === DBType.RDSMySQL) { + // Notice that TABLE_CATALOG is set to table_schema + // And that TABLE_SCHEMA is set to table_catalog + // This is because PG and MySQL have these flipped (For whatever reason) + + let pool: MSQLPool | undefined; + if (dbType === DBType.MySQL) pool = pools.msql_pool; + else pool = pools.rds_msql_pool; + if (pool === undefined) { + throw Error('No pool for msql DB'); + } + + // const query2 = `SELECT + // table_catalog, + // table_schema, + // table_name, + // is_insertable_into + // FROM information_schema.tables + // WHERE table_schema = 'public' or table_schema = 'base' + // ORDER BY table_name;`; + + // query = ` + // SELECT + // TABLE_CATALOG as table_schema, + // TABLE_SCHEMA as table_catalog, + // TABLE_NAME as table_name + // FROM information_schema.tables + // WHERE TABLE_SCHEMA NOT IN('information_schema', 'performance_schema', 'mysql') + // AND TABLE_SCHEMA = '${dbName}' + // ORDER BY table_name;`; + + query = ` + SELECT + TABLE_CATALOG as table_schema, + TABLE_SCHEMA as table_catalog, + TABLE_NAME as table_name + FROM information_schema.tables + WHERE TABLE_SCHEMA NOT IN('information_schema', 'performance_schema', 'mysql', 'sys') + AND TABLE_SCHEMA = '${dbName}' + ORDER BY table_name;`; + + pool + // .query(query2) + .query(query) + .then((tables) => { + for (let i = 0; i < tables[0].length; i++) { + const data = tables[0][i]; + const tableDetailsFromData: TableDetails = { + table_catalog: + typeof data.table_catalog === 'string' + ? data.table_catalog + : '', + table_schema: + typeof data.table_schema === 'string' + ? data.table_schema + : '', + table_name: + typeof data.table_name === 'string' ? data.table_name : '', + is_insertable_into: + typeof data.is_insertable_into === 'string' + ? data.is_insertable_into + : '', + }; + tableList.push(tableDetailsFromData); + + // Sys returns way too much stuff idk + if (tableList[i].table_schema !== 'sys') { + promiseArray.push( + databaseModel.getColumnObjects( + tableList[i].table_name, + dbType, + ), + ); + } + } + Promise.all(promiseArray) + .then((columnInfo) => { + for (let i = 0; i < columnInfo.length; i++) { + tableList[i].columns = columnInfo[i]; + } + logger("MySQL 'getDBLists' resolved.", LogType.SUCCESS); + resolve(tableList); + }) + .catch((err) => { + reject(err); + }); + }) + .catch((err) => { + reject(err); + }); + } else if (dbType === DBType.SQLite) { + const sqliteDB = pools.sqlite_db; + + // querying SQLite metadata + query = `SELECT + m.name AS table_name + FROM sqlite_master m + WHERE m.type = 'table' AND m.name != 'sqlite_stat1' AND m.name != 'sqlite_sequence'`; + sqliteDB?.all<{ table_name: string }>(query, (err, rows) => { + if (err) console.error(err.message); + for (let i = 0; i < rows.length; i += 1) { + const newTableDetails: TableDetails = { + table_catalog: dbState.sqlite_options.filename.slice( + dbState.sqlite_options.filename.lastIndexOf('\\') + 1, + ), + table_schema: 'asdf', + table_name: rows[i].table_name, + is_insertable_into: 'asdf', + }; + tableList.push(newTableDetails); + promiseArray.push( + databaseModel.getColumnObjects(rows[i].table_name, dbType), + ); + } + Promise.all(promiseArray) + .then((columnInfo) => { + for (let i = 0; i < columnInfo.length; i += 1) { + tableList[i].columns = columnInfo[i]; + } + logger("SQLite 'getDBLists' resolved.", LogType.SUCCESS); + resolve(tableList); + }) + .catch((error) => { + reject(error); + }); + }); + } + }), +}; + +export default databaseModel; diff --git a/backend/src/models/queryModel.ts b/backend/src/models/queryModel.ts new file mode 100644 index 00000000..9c75c9a3 --- /dev/null +++ b/backend/src/models/queryModel.ts @@ -0,0 +1,90 @@ +import { performance } from 'perf_hooks'; + +import { DBType, LogType, queryModelType } from '../../../shared/types/dbTypes'; + +import logger from '../utils/logging/masterlog'; +import pools from '../db/poolVariables'; + +/* +README: "queryModel" deals with business logic of any incoming queries from the query sidebar*?. Implement furthur query functionalities here NOT ERDtable +FUNCTIONS: query, sampler +*/ + +// Functions +const queryModel: queryModelType = { + /** + * 'query': + * runs sql command depending on the database + */ + query: (text, params, dbType): Promise | undefined => { + logger(`Attempting to run query: \n ${text} for: \n ${dbType}`); + + if (dbType === DBType.RDSPostgres) { + return pools.rds_pg_pool?.query(text, params).catch((err) => { + logger(err.message, LogType.WARNING); + }); + } + + if (dbType === DBType.RDSMySQL) { + return pools.rds_msql_pool?.query(text, params); + } + + if (dbType === DBType.Postgres) { + return pools.pg_pool?.query(text, params).catch((err) => { + logger(err.message, LogType.WARNING); + }); + } + + if (dbType === DBType.MySQL) { + // pools.msql_pool.query(`USE ${this.curMSQL_DB}`); + return pools.msql_pool?.query(text, params); + } + + if (dbType === DBType.SQLite) { + return new Promise((resolve, reject) => { + pools.sqlite_db?.all(text, (err, res) => { + if (err) { + logger(err.message, LogType.WARNING); + reject(err); + } else { + resolve(res); + } + }); + }); + } + return new Promise((resolve, reject) => { + reject(Error('Invalid DB Type')); + }); + }, + + sampler: (queryString) => + new Promise((resolve, reject) => { + pools.sqlite_db?.run('BEGIN', (err) => { + if (err) { + console.error(err.message); + reject(err); + } else { + const startTime = performance.now(); + pools.sqlite_db?.all(queryString, (err1) => { + if (err1) { + console.error(err1.message); + reject(err1); + } else { + const endTime = performance.now(); + pools.sqlite_db?.run('ROLLBACK', (err2) => { + if (err2) { + console.error(err2.message); + reject(err2); + } else { + const elapsedTime = endTime - startTime; + resolve(elapsedTime); + } + }); + } + }); + } + }); + }), +}; + +export default queryModel; diff --git a/backend/src/models/readme.txt b/backend/src/models/readme.txt new file mode 100644 index 00000000..32b51da5 --- /dev/null +++ b/backend/src/models/readme.txt @@ -0,0 +1,16 @@ +Models write to databases and perform business logics. I want to point out "dbStateModel" is the special model here, becuse it encompasses the state of the backend, of which saves which users are currently logged in and which database is the "active one" for ERDtable. + +configModel: + +"connectionModel" deals with business logic of connetion actions. This file dealswith logining and connections to different kinds of databases. + +FUNCTIONS: setBaseConnections, connectToDB, disconnectToDrop + +"databaseModel" deals with business logic of connetion actions. This file dealswith logining and connections to different kinds of databases. + +FUNCTIONS: getLists, getTableInfo, getDBNames, getColumnObjects, getDBLists +dbStateModel: + +"queryModel" deals with business logic of any incoming queries from the query sidebar*?. Implement furthur query functionalities here NOT ERDtable + +FUNCTIONS: query, sampler \ No newline at end of file diff --git a/backend/src/models/stateModel.ts b/backend/src/models/stateModel.ts new file mode 100644 index 00000000..3de503de --- /dev/null +++ b/backend/src/models/stateModel.ts @@ -0,0 +1,36 @@ +import { dbsInputted, DBType } from '../../../shared/types/dbTypes'; +import { DocConfigFile } from '../../BE_types'; +import { defaultFile } from './configModel'; + +type DBState = DocConfigFile & { + dbsInputted: dbsInputted; + currentERD: DBType; + currentDb: string; +}; + +const dbState: DBState = { + // NEEDS UPDATE TO PETERS NEW SYNTAX + pg_options: defaultFile.pg_options, + mysql_options: defaultFile.mysql_options, + rds_mysql_options: defaultFile.rds_mysql_options, + rds_pg_options: defaultFile.rds_pg_options, + sqlite_options: defaultFile.sqlite_options, + directPGURI_options: defaultFile.directPGURI_options, + + dbsInputted: { + pg: false, + msql: false, + rds_pg: false, + rds_msql: false, + sqlite: false, + directPGURI: false, + }, + + // current ERD Db type + currentERD: DBType.Postgres, + + // current Database + currentDb: '', +}; + +export default dbState; diff --git a/backend/DummyD/dummyDataMain.ts b/backend/src/utils/dummyData/dummyDataMain.ts similarity index 53% rename from backend/DummyD/dummyDataMain.ts rename to backend/src/utils/dummyData/dummyDataMain.ts index 1de6a28c..a7085e70 100644 --- a/backend/DummyD/dummyDataMain.ts +++ b/backend/src/utils/dummyData/dummyDataMain.ts @@ -1,8 +1,7 @@ -import faker from 'faker'; -import { ColumnObj, DummyRecords, LogType } from '../BE_types'; -import logger from '../Logging/masterlog'; - -const db = require('../models'); +import { faker } from '@faker-js/faker'; +import { ColumnObj, DummyRecords, LogType } from '../../../BE_types'; +import logger from '../logging/masterlog'; +import queryModel from '../../models/queryModel'; /* THIS FILE CONTAINS THE ALGORITHMS THAT GENERATE DUMMY DATA */ /* */ @@ -13,50 +12,53 @@ const db = require('../models'); /* subsequently concatenated into the INSERT query generated */ /* in channels.ts to generate the dummy records for the table */ - // *************************************************** Helper Functions *************************************************** // // helper function to generate random numbers that will ultimately represent a random date + const getRandomInt = (min: number, max: number) => { const minInt = Math.ceil(min); const maxInt = Math.floor(max); // The maximum is exclusive and the minimum is inclusive - return Math.floor(Math.random() * (maxInt - minInt) + minInt); + return Math.floor(Math.random() * (maxInt - minInt) + minInt); }; - // helper function to generate random data based on a given column's data type const generateDataByType = (columnObj: ColumnObj): string | number => { let length; - // faker.js method to generate data by type - // console.log('columnObj_datatype: ', columnObj.data_type) - + // updated the new faker package so updated to follow proper documentation. switch (columnObj.data_type) { case 'smallint': - return faker.random.number({ min: -32768, max: 32767 }); + return faker.number.int({ min: -32768, max: 32767 }); case 'integer': - return faker.random.number({ min: -2147483648, max: 2147483647 }); + return faker.number.int({ min: -2147483648, max: 2147483647 }); case 'bigint': - return faker.random.number({ + return faker.number.int({ min: -9223372036854775808, max: 9223372036854775807, }); case 'character varying': // defaulting length to 3 because faker.lorem defaults to a length of 3 if no length is specified - length = columnObj.character_maximum_length && columnObj.character_maximum_length > 3 - ? Math.floor(Math.random() * columnObj.character_maximum_length) - : 3; - return '\''.concat(faker.random.alphaNumeric(length)).concat('\''); + length = + columnObj.character_maximum_length && + columnObj.character_maximum_length > 3 + ? Math.floor(Math.random() * columnObj.character_maximum_length) + : 3; + // TODO: need to properly deal with the deprecated faker.random.alphaNumeric(length) method + return "'".concat(faker.random.alphaNumeric(length)).concat("'"); case 'varchar': // defaulting length to 3 because faker.lorem defaults to a length of 3 if no length is specified - - length = columnObj.character_maximum_length && columnObj.character_maximum_length > 3 - ? Math.floor(Math.random() * columnObj.character_maximum_length) - : 3; - return '\''.concat(faker.random.alphaNumeric(length)).concat('\''); + + length = + columnObj.character_maximum_length && + columnObj.character_maximum_length > 3 + ? Math.floor(Math.random() * columnObj.character_maximum_length) + : 3; + // TODO: need to properly deal with the deprecated faker.random.alphaNumeric(length) method + return "'".concat(faker.random.alphaNumeric(length)).concat("'"); case 'int': - return faker.random.number({ min: -2147483648, max: 2147483647 }); + return faker.number.int({ min: -2147483648, max: 2147483647 }); case 'date': { // generating a random date between 1500 and 2020 const year = getRandomInt(1500, 2020).toString(); @@ -65,10 +67,10 @@ const generateDataByType = (columnObj: ColumnObj): string | number => { let day = getRandomInt(1, 29).toString(); if (day.length === 1) day = `0${day}`; const result = `${year}/${month}/${day}`; - return '\''.concat(result).concat('\''); + return "'".concat(result).concat("'"); } case 'boolean': { - return '\''.concat(faker.random.boolean()).concat('\''); + return "'".concat(faker.datatype.boolean().toString()).concat("'"); } default: throw new Error('unhandled data type'); @@ -77,12 +79,18 @@ const generateDataByType = (columnObj: ColumnObj): string | number => { // *************************************************** Main Function to Generate Dummy Data *************************************************** // -type GenerateDummyData = (tableInfo: ColumnObj[], numRows: number) => Promise; +type GenerateDummyData = ( + tableInfo: ColumnObj[], + numRows: number, +) => Promise; -const generateDummyData: GenerateDummyData = async (tableInfo: ColumnObj[], numRows: number) => { +const generateDummyData: GenerateDummyData = async ( + tableInfo: ColumnObj[], + numRows: number, +) => { // assuming primary key is serial, get all the column names except for the column with the primary key const columnNames: Array = []; - for(let i = 0; i < tableInfo.length; i++) { + for (let i = 0; i < tableInfo.length; i++) { columnNames.push(tableInfo[i].column_name); } @@ -94,31 +102,40 @@ const generateDummyData: GenerateDummyData = async (tableInfo: ColumnObj[], numR // at each row, check the columns of the table and generate dummy data accordingly for (let j = 0; j < tableInfo.length; j += 1) { // if column has no foreign key constraint, then generate dummy data based on data type - if (tableInfo[j].constraint_type !== 'FOREIGN KEY'){ + if (tableInfo[j].constraint_type !== 'FOREIGN KEY') { // && tableInfo[j].constraint_type !== 'PRIMARY KEY' - row.push(generateDataByType(tableInfo[j])) + row.push(generateDataByType(tableInfo[j])); } - - // if there is a foreign key constraint, grab random key from foreign table - else if (tableInfo[j].constraint_type === 'FOREIGN KEY') { - const foreignColumn = tableInfo[j].foreign_column; - const foreignTable = tableInfo[j].foreign_table; - const getForeignKeyQuery = ` + // if there is a foreign key constraint, grab random key from foreign table + else if (tableInfo[j].constraint_type === 'FOREIGN KEY') { + const foreignColumn = tableInfo[j].foreign_column; + const foreignTable = tableInfo[j].foreign_table; + const getForeignKeyQuery = ` SELECT ${foreignColumn} FROM ${foreignTable} TABLESAMPLE BERNOULLI(100) LIMIT 1 `; - const foreignKey = await db.query(getForeignKeyQuery); - const chosenPrimaryValue = foreignKey.rows[0][Object.keys(foreignKey.rows[0])[0]] - if (foreignKey.rows.length) { - if (typeof chosenPrimaryValue === 'string') row.push(`'${chosenPrimaryValue}'`); - else row.push(chosenPrimaryValue); - } - else{ - logger('There was an error while retrieving a valid foreign key while generating dummy data.', LogType.ERROR); - throw new Error('There was an error while retrieving a valid foreign key.'); - } + + // const foreignKey = await queryModel.query(getForeignKeyQuery); + // ISSUE CO-76: SINCE DB IS UPDATED, WE NEED TO ADJUST THIS LOGIC FOR GETTING FOREIGN KEY + const foreignKey = { rows: 'nothing here' }; + // CO-76: temp values to prevent file from breaking. get rid of this after you fix queryModel.query(getForeignKeyQuery) + const chosenPrimaryValue = + foreignKey.rows[0][Object.keys(foreignKey.rows[0])[0]]; + if (foreignKey.rows.length) { + if (typeof chosenPrimaryValue === 'string') + row.push(`'${chosenPrimaryValue}'`); + else row.push(chosenPrimaryValue); + } else { + logger( + 'There was an error while retrieving a valid foreign key while generating dummy data.', + LogType.ERROR, + ); + throw new Error( + 'There was an error while retrieving a valid foreign key.', + ); + } } } dummyRecords.push(row); diff --git a/backend/DummyD/primaryAndForeignKeyQueries.ts b/backend/src/utils/dummyData/primaryAndForeignKeyQueries.ts similarity index 98% rename from backend/DummyD/primaryAndForeignKeyQueries.ts rename to backend/src/utils/dummyData/primaryAndForeignKeyQueries.ts index ac025fc7..e9c84739 100644 --- a/backend/DummyD/primaryAndForeignKeyQueries.ts +++ b/backend/src/utils/dummyData/primaryAndForeignKeyQueries.ts @@ -1,3 +1,5 @@ +// where is this file used? + const foreignAndPrimaryKeys = { /** * The information schema itself is a schema named information_schema. This schema automatically exists in all diff --git a/backend/src/utils/erdCUD/SqLiteCUD.ts b/backend/src/utils/erdCUD/SqLiteCUD.ts new file mode 100644 index 00000000..7cf9ef2b --- /dev/null +++ b/backend/src/utils/erdCUD/SqLiteCUD.ts @@ -0,0 +1,128 @@ +import { + ErdUpdatesType, + OperationType, + PsqlColumnOperations, +} from '../../../../shared/types/erTypes'; + +/** + * POSTGRESQL COLUMN HELPER + * @param columnOperations + * @returns string + */ + +export function generateSqLiteColumnQuery( + tableName: string, + columnOperations: PsqlColumnOperations, +): string { + const { columnAction, columnName } = columnOperations; + + let effect: string = columnName; + let action: string; + + switch (columnAction) { + case 'addColumn': { + action = 'ADD'; + if (columnOperations.type !== undefined) { + // if there is a type + effect += ` TYPE ${columnOperations.type}`; + } + break; + } + + case 'dropColumn': + action = 'DROP'; + break; + + // alters + case 'alterColumnType': + action = 'ALTER'; + effect += ` TYPE ${columnOperations.type}`; + break; + + case 'renameColumn': + action = 'RENAME'; + effect += ` TO ${columnOperations.newColumnName}`; + break; + + // contraints + case 'togglePrimary': { + if (!columnOperations.isPrimary) { + // usually default to user_pkey. need further investigation + return `DROP CONSTRAINT users_pkey`; + } + return `ADD PRIMARY KEY (${effect})`; + } + + case 'toggleForeign': { + const { hasForeign, foreignConstraint } = columnOperations; + if (!hasForeign) { + return `DROP CONSTRAINT ${foreignConstraint}`; + } + return `ADD CONSTRAINT ${foreignConstraint} FOREIGN KEY (${columnName}) REFERENCES ${columnOperations.foreignTable} (${columnOperations.foreignColumn})`; + } + + // we are assuminmg postgress will generate a new unique key for us if it is not taken here. risky. + case 'toggleUnique': { + const { isUnique } = columnOperations; + if (!isUnique) { + return `DROP CONSTRAINT ${tableName}_${columnName}_key`; + } + return `ADD UNIQUE (${columnName})`; + } + + default: + throw new Error(`Invalid tableAction: ${columnAction as string}`); + } + + return `${action} COLUMN ${effect}`; +} + +/** + * POSTGRESQL OPERATIONS + * @param updatesArray + * @returns Array of strings + */ + +export function querySqLite(updatesArray: ErdUpdatesType): string[] { + const psqlArray: string[] = []; + updatesArray.forEach((operation: OperationType) => { + const { action, tableName, tableSchema } = operation; + switch (action) { + // this is adding tables for psql + case 'add': + psqlArray.push(`CREATE TABLE ${tableSchema}.${tableName};`); + break; + + // this is dropping tables for psql + case 'drop': + psqlArray.push(`DROP TABLE ${tableSchema}.${tableName};`); + break; + + // this is altering table name for psql + case 'alter': { + const { newTableName } = operation; + psqlArray.push( + `ALTER TABLE ${tableSchema}.${tableName} RENAME TO ${newTableName};`, + ); + break; + } + + // this is altering columns name for psql + case 'column': { + const { columnOperations } = operation; + const alterQuery: string = generateSqLiteColumnQuery( + tableName, + columnOperations as PsqlColumnOperations, + ); + psqlArray.push( + `ALTER TABLE ${tableSchema}.${tableName} ${alterQuery};`, + ); + break; + } + + default: + throw new Error(`Invalid action: ${action as string}`); + } + }); + return psqlArray; +} diff --git a/backend/src/utils/erdCUD/mySqlCUD.ts b/backend/src/utils/erdCUD/mySqlCUD.ts new file mode 100644 index 00000000..6538fb10 --- /dev/null +++ b/backend/src/utils/erdCUD/mySqlCUD.ts @@ -0,0 +1,128 @@ +import { + ErdUpdatesType, + OperationType, + PsqlColumnOperations, +} from '../../../../shared/types/erTypes'; + +/** + * POSTGRESQL COLUMN HELPER + * @param columnOperations + * @returns string + */ + +export function generateMySqlColumnQuery( + tableName: string, + columnOperations: PsqlColumnOperations, +): string { + const { columnAction, columnName } = columnOperations; + + let effect: string = columnName; + let action: string; + + switch (columnAction) { + case 'addColumn': { + action = 'ADD'; + if (columnOperations.type !== undefined) { + // if there is a type + effect += ` TYPE ${columnOperations.type}`; + } + break; + } + + case 'dropColumn': + action = 'DROP'; + break; + + // alters + case 'alterColumnType': + action = 'ALTER'; + effect += ` TYPE ${columnOperations.type}`; + break; + + case 'renameColumn': + action = 'RENAME'; + effect += ` TO ${columnOperations.newColumnName}`; + break; + + // contraints + case 'togglePrimary': { + if (!columnOperations.isPrimary) { + // usually default to user_pkey. need further investigation + return `DROP CONSTRAINT users_pkey`; + } + return `ADD PRIMARY KEY (${effect})`; + } + + case 'toggleForeign': { + const { hasForeign, foreignConstraint } = columnOperations; + if (!hasForeign) { + return `DROP CONSTRAINT ${foreignConstraint}`; + } + return `ADD CONSTRAINT ${foreignConstraint} FOREIGN KEY (${columnName}) REFERENCES ${columnOperations.foreignTable} (${columnOperations.foreignColumn})`; + } + + // we are assuminmg postgress will generate a new unique key for us if it is not taken here. risky. + case 'toggleUnique': { + const { isUnique } = columnOperations; + if (!isUnique) { + return `DROP CONSTRAINT ${tableName}_${columnName}_key`; + } + return `ADD UNIQUE (${columnName})`; + } + + default: + throw new Error(`Invalid tableAction: ${columnAction as string}`); + } + + return `${action} COLUMN ${effect}`; +} + +/** + * POSTGRESQL OPERATIONS + * @param updatesArray + * @returns Array of strings + */ + +export function queryMySql(updatesArray: ErdUpdatesType): string[] { + const psqlArray: string[] = []; + updatesArray.forEach((operation: OperationType) => { + const { action, tableName, tableSchema } = operation; + switch (action) { + // this is adding tables for psql + case 'add': + psqlArray.push(`CREATE TABLE ${tableSchema}.${tableName};`); + break; + + // this is dropping tables for psql + case 'drop': + psqlArray.push(`DROP TABLE ${tableSchema}.${tableName};`); + break; + + // this is altering table name for psql + case 'alter': { + const { newTableName } = operation; + psqlArray.push( + `ALTER TABLE ${tableSchema}.${tableName} RENAME TO ${newTableName};`, + ); + break; + } + + // this is altering columns name for psql + case 'column': { + const { columnOperations } = operation; + const alterQuery: string = generateMySqlColumnQuery( + tableName, + columnOperations as PsqlColumnOperations, + ); + psqlArray.push( + `ALTER TABLE ${tableSchema}.${tableName} ${alterQuery};`, + ); + break; + } + + default: + throw new Error(`Invalid action: ${action as string}`); + } + }); + return psqlArray; +} diff --git a/backend/src/utils/erdCUD/pSqlCUD.ts b/backend/src/utils/erdCUD/pSqlCUD.ts new file mode 100644 index 00000000..0b99f203 --- /dev/null +++ b/backend/src/utils/erdCUD/pSqlCUD.ts @@ -0,0 +1,128 @@ +import { + ErdUpdatesType, + OperationType, + PsqlColumnOperations, +} from '../../../../shared/types/erTypes'; + +/** + * POSTGRESQL COLUMN HELPER + * @param columnOperations + * @returns string + */ + +export function generatePostgresColumnQuery( + tableName: string, + columnOperations: PsqlColumnOperations, +): string { + const { columnAction, columnName } = columnOperations; + + let effect: string = columnName; + let action: string; + + switch (columnAction) { + case 'addColumn': { + action = 'ADD'; + if (columnOperations.type !== undefined) { + // if there is a type + effect += ` TYPE ${columnOperations.type}`; + } + break; + } + + case 'dropColumn': + action = 'DROP'; + break; + + // alters + case 'alterColumnType': + action = 'ALTER'; + effect += ` TYPE ${columnOperations.type}`; + break; + + case 'renameColumn': + action = 'RENAME'; + effect += ` TO ${columnOperations.newColumnName}`; + break; + + // contraints + case 'togglePrimary': { + if (!columnOperations.isPrimary) { + // usually default to user_pkey. need further investigation + return `DROP CONSTRAINT users_pkey`; + } + return `ADD PRIMARY KEY (${effect})`; + } + + case 'toggleForeign': { + const { hasForeign, foreignConstraint } = columnOperations; + if (!hasForeign) { + return `DROP CONSTRAINT ${foreignConstraint}`; + } + return `ADD CONSTRAINT ${foreignConstraint} FOREIGN KEY (${columnName}) REFERENCES ${columnOperations.foreignTable} (${columnOperations.foreignColumn})`; + } + + // we are assuminmg postgress will generate a new unique key for us if it is not taken here. risky. + case 'toggleUnique': { + const { isUnique } = columnOperations; + if (!isUnique) { + return `DROP CONSTRAINT ${tableName}_${columnName}_key`; + } + return `ADD UNIQUE (${columnName})`; + } + + default: + throw new Error(`Invalid tableAction: ${columnAction as string}`); + } + + return `${action} COLUMN ${effect}`; +} + +/** + * POSTGRESQL OPERATIONS + * @param updatesArray + * @returns Array of strings + */ + +export function queryPostgres(updatesArray: ErdUpdatesType): string[] { + const psqlArray: string[] = []; + updatesArray.forEach((operation: OperationType) => { + const { action, tableName, tableSchema } = operation; + switch (action) { + // this is adding tables for psql + case 'add': + psqlArray.push(`CREATE TABLE ${tableSchema}.${tableName};`); + break; + + // this is dropping tables for psql + case 'drop': + psqlArray.push(`DROP TABLE ${tableSchema}.${tableName};`); + break; + + // this is altering table name for psql + case 'alter': { + const { newTableName } = operation; + psqlArray.push( + `ALTER TABLE ${tableSchema}.${tableName} RENAME TO ${newTableName};`, + ); + break; + } + + // this is altering columns name for psql + case 'column': { + const { columnOperations } = operation; + const alterQuery: string = generatePostgresColumnQuery( + tableName, + columnOperations as PsqlColumnOperations, + ); + psqlArray.push( + `ALTER TABLE ${tableSchema}.${tableName} ${alterQuery};`, + ); + break; + } + + default: + throw new Error(`Invalid action: ${action as string}`); + } + }); + return psqlArray; +} diff --git a/backend/src/utils/erdTableFunctions.ts b/backend/src/utils/erdTableFunctions.ts new file mode 100644 index 00000000..439408be --- /dev/null +++ b/backend/src/utils/erdTableFunctions.ts @@ -0,0 +1,44 @@ +import { ErdUpdatesType } from '../../../shared/types/erTypes'; +import { queryPostgres } from './erdCUD/pSqlCUD'; +import { queryMySql } from './erdCUD/mySqlCUD'; +import { querySqLite } from './erdCUD/SqLiteCUD'; +import { DBType } from '../../../shared/types/dbTypes'; + +function erdUpdatesToQuery( + updatesArray: ErdUpdatesType, + currentERD: DBType, +): string { + let returnArray: string[] = []; + + // check current dbType of active ERD table and pick a query method + // const currentERD = 'pg'; // replace this later with dbState below + + switch (currentERD) { + case DBType.Postgres: + case DBType.RDSPostgres: + returnArray = queryPostgres(updatesArray); + break; + + // TODO: queryMySql has not been written. + case DBType.MySQL: + case DBType.RDSMySQL: + returnArray = queryMySql(updatesArray); + break; + + // TODO: querySqLite has not been written. it is just a copy of queryPostgres + case DBType.SQLite: + returnArray = querySqLite(updatesArray); + break; + + default: + returnArray = []; + console.error(`Unknown DBType: ${currentERD}`); // errors for switch question is where does this go to? + break; + } + + // return array will need to join with space between each query + + return returnArray.join(' '); +} + +export default erdUpdatesToQuery; diff --git a/backend/ertable-functions.ts b/backend/src/utils/ertable-functions.ts similarity index 91% rename from backend/ertable-functions.ts rename to backend/src/utils/ertable-functions.ts index 2ddafcd5..1bc2d1b3 100644 --- a/backend/ertable-functions.ts +++ b/backend/src/utils/ertable-functions.ts @@ -4,12 +4,12 @@ import { AlterTablesObjType, AlterColumnsObjType, AddConstraintObjType, -} from '../frontend/types'; +} from '../../../frontend/types'; -import { BackendObjType, DBType } from './BE_types'; +import { BackendObjType, DBType } from '../../../shared/types/dbTypes'; /** - * + * * @param backendObj object containing info as to what changes are to be made to the database * @param dbType type of database * @returns Query string containing all changes to be made to the database @@ -20,19 +20,21 @@ function backendObjToQuery(backendObj: BackendObjType, dbType: DBType): string { // Add table to database /** - * Function adding commands for adding queries to the output depending on tables that need to be added + * Function adding commands for adding queries to the output depending on tables that need to be added * @param addTableArray holds array of properties of tables to be added to database * @param alterTablesArray holds table of properties of tables to be altered, and alterations to be made */ - function addTable(addTableArray: AddTablesObjType[], alterTablesArray: AlterTablesObjType[]): void { + function addTable( + addTableArray: AddTablesObjType[], + alterTablesArray: AlterTablesObjType[], + ): void { for (let i = 0; i < addTableArray.length; i += 1) { - const currTable: AddTablesObjType = addTableArray[i]; const currAlterTable: AlterTablesObjType = alterTablesArray[i]; if (dbType === DBType.Postgres || dbType === DBType.RDSPostgres) { outputArray.push( - `CREATE TABLE ${currTable.table_schema}.${currTable.table_name}(); ` + `CREATE TABLE ${currTable.table_schema}.${currTable.table_name}(); `, ); } if (dbType === DBType.MySQL || dbType === DBType.RDSMySQL) { @@ -42,29 +44,13 @@ function backendObjToQuery(backendObj: BackendObjType, dbType: DBType): string { (${currAlterTable.addColumns[0].column_name} ${currAlterTable.addColumns[0].data_type} (${currAlterTable.addColumns[0].character_maximum_length})) - ;` + ;`, ); } if (dbType === DBType.SQLite) { - console.log('TEST') - // for (let j = 0; j < alterTablesArray.length; j += 1) { - // console.log(alterTablesArray[j]) - // if (addTableArray[i].table_name === alterTablesArray[j].table_name) { - // if (alterTablesArray[j].alterColumns.length) { - // for (let k = 0; k < alterTablesArray[j].alterColumns.length; k += 1) { - // if (alterTablesArray[j].alterColumns[k].column_name === 'NewColumn1') { - // firstAddingMySQLColumnName = alterTablesArray[j].alterColumns[k].new_column_name; - // outputArray.push( - // `CREATE TABLE ${currTable.table_name}(${alterTablesArray[j].alterColumns[k].new_column_name} ${alterTablesArray[j].alterColumns[k].data_type}(${alterTablesArray[j].alterColumns[k].character_maximum_length}));` - // ); - // }; - // } - // } - // } - // } outputArray.push( - `CREATE TABLE ${currTable.table_name}(id INTEGER PRIMARY KEY AUTOINCREMENT); ` + `CREATE TABLE ${currTable.table_name}(id INTEGER PRIMARY KEY AUTOINCREMENT); `, ); } } @@ -74,17 +60,16 @@ function backendObjToQuery(backendObj: BackendObjType, dbType: DBType): string { function dropTable(dropTableArray: DropTablesObjType[]): void { for (let i = 0; i < dropTableArray.length; i += 1) { const currTable: DropTablesObjType = dropTableArray[i]; - if (dbType === DBType.Postgres || dbType === DBType.RDSPostgres) + if (dbType === DBType.Postgres || dbType === DBType.RDSPostgres) { outputArray.push( - `DROP TABLE ${currTable.table_schema}.${currTable.table_name}; ` + `DROP TABLE ${currTable.table_schema}.${currTable.table_name}; `, ); + } if (dbType === DBType.MySQL || dbType === DBType.RDSMySQL) outputArray.push(`DROP TABLE ${currTable.table_name}; `); if (dbType === DBType.SQLite) { - outputArray.push( - `DROP TABLE ${currTable.table_name}; ` - ); + outputArray.push(`DROP TABLE ${currTable.table_name}; `); } } } @@ -103,14 +88,16 @@ function backendObjToQuery(backendObj: BackendObjType, dbType: DBType): string { if (currTable.addColumns[i].character_maximum_length != null) { lengthOfData = `(${currTable.addColumns[i].character_maximum_length})`; } - if (firstAddingMySQLColumnName === null || firstAddingMySQLColumnName !== `${currTable.addColumns[i].column_name}`) { + if ( + firstAddingMySQLColumnName === null || + firstAddingMySQLColumnName !== + `${currTable.addColumns[i].column_name}` + ) { addColumnString += `ALTER TABLE ${currTable.table_name} ADD COLUMN ${currTable.addColumns[i].column_name} ${currTable.addColumns[i].data_type} ${lengthOfData}; `; - } } if (dbType === DBType.SQLite) addColumnString += `ALTER TABLE ${currTable.table_name} ADD COLUMN ${currTable.addColumns[i].column_name} ${currTable.addColumns[i].data_type}(${currTable.addColumns[i].character_maximum_length}); `; - } } return addColumnString; @@ -136,7 +123,7 @@ function backendObjToQuery(backendObj: BackendObjType, dbType: DBType): string { // Add a primary key constraint to column function addPrimaryKey( currConstraint: AddConstraintObjType, - currColumn: AlterColumnsObjType + currColumn: AlterColumnsObjType, ): void { let defaultRowValue: number | string; if (currColumn.current_data_type === 'character varying') @@ -151,7 +138,7 @@ function backendObjToQuery(backendObj: BackendObjType, dbType: DBType): string { // Add a foreign key constraint to column function addForeignKey( currConstraint: AddConstraintObjType, - currColumn: AlterColumnsObjType + currColumn: AlterColumnsObjType, ): void { if (dbType === DBType.Postgres || dbType === DBType.RDSPostgres) alterTableConstraintString += `ALTER TABLE ${currTable.table_schema}.${currTable.table_name} ADD CONSTRAINT ${currConstraint.constraint_name} FOREIGN KEY ("${currColumn.column_name}") REFERENCES ${currConstraint.foreign_table}(${currConstraint.foreign_column}); `; @@ -161,7 +148,7 @@ function backendObjToQuery(backendObj: BackendObjType, dbType: DBType): string { // Add a unique constraint to column function addUnique( currConstraint: AddConstraintObjType, - currColumn: AlterColumnsObjType + currColumn: AlterColumnsObjType, ): void { if (dbType === DBType.Postgres || dbType === DBType.RDSPostgres) alterTableConstraintString += `ALTER TABLE ${currTable.table_schema}.${currTable.table_name} ADD CONSTRAINT ${currConstraint.constraint_name} UNIQUE (${currColumn.column_name}); `; @@ -256,10 +243,10 @@ function backendObjToQuery(backendObj: BackendObjType, dbType: DBType): string { const currTable: AlterTablesObjType = alterTableArray[i]; outputArray.push( `${addColumn(currTable)}${dropColumn(currTable)}${alterType( - currTable + currTable, )}${alterTableConstraint(currTable)}${alterNotNullConstraint( - currTable - )}${alterMaxCharacterLength(currTable)}` + currTable, + )}${alterMaxCharacterLength(currTable)}`, ); } } @@ -308,8 +295,8 @@ function backendObjToQuery(backendObj: BackendObjType, dbType: DBType): string { currAlterColumn.rename_constraint[0] === 'p' ? 'pk' : 'f' - ? 'fk' - : 'unique', + ? 'fk' + : 'unique', column_name: currAlterColumn.new_column_name ? currAlterColumn.new_column_name : currAlterColumn.column_name, @@ -379,5 +366,4 @@ function backendObjToQuery(backendObj: BackendObjType, dbType: DBType): string { return outputArray.join(''); } - export default backendObjToQuery; diff --git a/backend/helperFunctions.ts b/backend/src/utils/helperFunctions.ts similarity index 67% rename from backend/helperFunctions.ts rename to backend/src/utils/helperFunctions.ts index 8bf6cdc3..740b101d 100644 --- a/backend/helperFunctions.ts +++ b/backend/src/utils/helperFunctions.ts @@ -1,8 +1,7 @@ /* eslint-disable object-shorthand */ -import { DBType } from './BE_types'; - -const { exec } = require('child_process'); // Child_Process: Importing Node.js' child_process API -const docConfig = require('./_documentsConfig'); +import { exec } from 'child_process'; // Child_Process: Importing Node.js' child_process API +import { DBType } from '../../../shared/types/dbTypes'; +import docConfig from '../models/configModel'; // ************************************** CLI COMMANDS & SQL Queries TO CREATE, DELETE, COPY DB SCHEMA, etc. ************************************** // // Generate SQL queries & CLI commands to be executed in pg and child process respectively @@ -63,7 +62,7 @@ const helperFunctions: HelperFunctions = { const PG = `BEGIN; EXPLAIN (FORMAT JSON, ANALYZE, VERBOSE, BUFFERS) ${sqlString}; ROLLBACK`; // const MYSQL = `BEGIN; EXPLAIN ANALYZE ${sqlString}`; const MYSQL = `EXPLAIN ANALYZE ${sqlString}`; - const SQLite = `EXPLAIN QUERY PLAN ${sqlString}` + const SQLite = `EXPLAIN QUERY PLAN ${sqlString}`; if (dbType === DBType.Postgres || dbType === DBType.RDSPostgres) return PG; if (dbType === DBType.MySQL || dbType === DBType.RDSMySQL) return MYSQL; @@ -74,8 +73,8 @@ const helperFunctions: HelperFunctions = { // import SQL file into new DB created runSQLFunc: function runSQLFunc(dbName, file, dbType: DBType) { const SQL_data = docConfig.getFullConfig(); - const PG = `psql -U ${SQL_data.pg.user} -d "${dbName}" -f "${file}" -p ${SQL_data.pg.port}`; - const MYSQL = `export MYSQL_PWD='${SQL_data.mysql.password}'; mysql -u${SQL_data.mysql.user} --port=${SQL_data.mysql.port} ${dbName} < ${file}`; + const PG = `PGPASSWORD=${SQL_data?.pg_options.password} psql -U ${SQL_data?.pg_options.user} -d "${dbName}" -f "${file}" -p ${SQL_data?.pg_options.port}`; + const MYSQL = `export MYSQL_PWD='${SQL_data?.mysql_options.password}'; mysql -u${SQL_data?.mysql_options.user} --port=${SQL_data?.mysql_options.port} ${dbName} < ${file}`; if (dbType === DBType.Postgres || dbType === DBType.RDSPostgres) return PG; if (dbType === DBType.MySQL || dbType === DBType.RDSMySQL) return MYSQL; return 'invalid dbtype'; @@ -84,8 +83,8 @@ const helperFunctions: HelperFunctions = { // import TAR file into new DB created runTARFunc: function runTARFunc(dbName, file, dbType: DBType) { const SQL_data = docConfig.getFullConfig(); - const PG = `pg_restore -U ${SQL_data.pg.user} -p ${SQL_data.pg.port} -d "${dbName}" "${file}" `; - const MYSQL = `export MYSQL_PWD='${SQL_data.mysql.password}'; mysqldump -u ${SQL_data.mysql.user} --port=${SQL_data.mysql.port} ${dbName} > ${file}`; + const PG = `PGPASSWORD=${SQL_data?.pg_options.password} pg_restore -U ${SQL_data?.pg_options.user} -p ${SQL_data?.pg_options.port} -d "${dbName}" "${file}" `; + const MYSQL = `export MYSQL_PWD='${SQL_data?.mysql_options.password}'; mysqldump -u ${SQL_data?.mysql_options.user} --port=${SQL_data?.mysql_options.port} ${dbName} > ${file}`; if (dbType === DBType.Postgres || dbType === DBType.RDSPostgres) return PG; if (dbType === DBType.MySQL || dbType === DBType.RDSMySQL) return MYSQL; return 'invalid dbtype'; @@ -95,11 +94,11 @@ const helperFunctions: HelperFunctions = { runFullCopyFunc: function runFullCopyFunc( dbCopyName, newFile, - dbType: DBType + dbType: DBType, ) { const SQL_data = docConfig.getFullConfig(); - const PG = `pg_dump -U ${SQL_data.pg.user} -p ${SQL_data.pg.port} -Fp -d ${dbCopyName} > "${newFile}"`; - const MYSQL = `export MYSQL_PWD='${SQL_data.mysql.password}'; mysqldump -h localhost -u ${SQL_data.mysql.user} ${dbCopyName} > ${newFile}`; + const PG = `PGPASSWORD=${SQL_data?.pg_options.password} pg_dump -s -U ${SQL_data?.pg_options.user} -p ${SQL_data?.pg_options.port} -Fp -d ${dbCopyName} > "${newFile}"`; + const MYSQL = `export MYSQL_PWD='${SQL_data?.mysql_options.password}'; mysqldump -h localhost -u ${SQL_data?.mysql_options.user} ${dbCopyName} > ${newFile}`; if (dbType === DBType.Postgres || dbType === DBType.RDSPostgres) return PG; if (dbType === DBType.MySQL || dbType === DBType.RDSMySQL) return MYSQL; return 'invalid dbtype'; @@ -109,11 +108,11 @@ const helperFunctions: HelperFunctions = { runHollowCopyFunc: function runHollowCopyFunc( dbCopyName, file, - dbType: DBType + dbType: DBType, ) { const SQL_data = docConfig.getFullConfig(); - const PG = `pg_dump -s -U ${SQL_data.pg.user} -p ${SQL_data.pg.port} -F p -d "${dbCopyName}" > "${file}"`; - const MYSQL = `export MYSQL_PWD='${SQL_data.mysql.password}'; mysqldump -h localhost -u ${SQL_data.mysql.user} --port=${SQL_data.mysql.port} ${dbCopyName} > ${file}`; + const PG = ` PGPASSWORD=${SQL_data?.pg_options.password} pg_dump -s -U ${SQL_data?.pg_options.user} -p ${SQL_data?.pg_options.port} -F p -d "${dbCopyName}" > "${file}"`; + const MYSQL = `export MYSQL_PWD='${SQL_data?.mysql_options.password}'; mysqldump -h localhost -u ${SQL_data?.mysql_options.user} --port=${SQL_data?.mysql_options.port} ${dbCopyName} > ${file}`; if (dbType === DBType.Postgres || dbType === DBType.RDSPostgres) return PG; if (dbType === DBType.MySQL || dbType === DBType.RDSMySQL) return MYSQL; return 'invalid dbtype'; @@ -122,16 +121,21 @@ const helperFunctions: HelperFunctions = { // promisified execute to execute commands in the child process promExecute: (cmd: string) => new Promise((resolve, reject) => { - exec(cmd, { - timeout: 5000, - env: { PGPASSWORD: docConfig.getFullConfig().pg.password }, - }, (error, stdout, stderr) => { - if (error) { - return reject(error); - } - if (stderr) return reject(new Error(stderr)); - return resolve({ stdout, stderr }); - }); + exec( + cmd, + { + timeout: 5000, + // env: { PGPASSWORD: docConfig.getFullConfig().pg.password }, + }, + (error, stdout, stderr) => { + if (error) { + console.log('ERROR in helperfunctions - promExecute', error); + return reject(error); + } + if (stderr) return reject(new Error(stderr)); + return resolve({ stdout, stderr }); + }, + ); }), }; diff --git a/backend/Logging/masterlog.ts b/backend/src/utils/logging/masterlog.ts similarity index 91% rename from backend/Logging/masterlog.ts rename to backend/src/utils/logging/masterlog.ts index c399029a..b0865fb1 100644 --- a/backend/Logging/masterlog.ts +++ b/backend/src/utils/logging/masterlog.ts @@ -1,6 +1,7 @@ /* eslint-disable func-names */ /* eslint-disable @typescript-eslint/no-unused-vars */ -import { LogType } from '../BE_types'; +// import { LogType } from '@mytypes/dbTypes'; +import { LogType } from '../../../../shared/types/dbTypes'; // file to print color coded logs in the console @@ -14,7 +15,7 @@ const logger = function ( message: string, logType: LogType = LogType.NORMAL, opt1?, - opt2? + opt2?, ) { // Code for the log color let colorCode = 0; @@ -62,7 +63,7 @@ const logger = function ( console.log( `\u001b[1;${colorCode}m ${`[${logType}] ${message + moreText}`}` + - `\u001b[1;0m` + '\u001b[1;0m', ); saveLogMessage(`[${logType}] ${message}`); }; diff --git a/frontend/components/App.tsx b/frontend/components/App.tsx index 511ecf85..77127a6e 100644 --- a/frontend/components/App.tsx +++ b/frontend/components/App.tsx @@ -1,49 +1,79 @@ -import React, { useState, useEffect } from 'react'; -import '../lib/style.css' -import styled from 'styled-components'; -import { ThemeProvider, Theme, StyledEngineProvider } from '@mui/material/'; +import { EventEmitter } from 'events'; +import { StyledEngineProvider, Theme, ThemeProvider } from '@mui/material/'; import CssBaseline from '@mui/material/CssBaseline'; -import { IpcRendererEvent, ipcRenderer } from 'electron'; -import { - MuiTheme, - bgColor, - sidebarWidth, - defaultMargin, - sidebarShowButtonSize, -} from '../style-variables'; +import React, { useEffect, useMemo, useReducer, useRef, useState } from 'react'; +import styled from 'styled-components'; + +import { ipcRenderer, IpcRendererEvent } from 'electron'; import GlobalStyle from '../GlobalStyle'; + +import { DBType } from '../../backend/BE_types'; +import { createQuery } from '../lib/queries'; +import '../lib/style.css'; import { AppState, CreateNewQuery, - QueryData, - isDbLists, DatabaseInfo, - TableInfo, DbLists, + isDbLists, + QueryData, + TableInfo, } from '../types'; -import { DBType } from '../../backend/BE_types'; -import { createQuery } from '../lib/queries'; + +import { + bgColor, + defaultMargin, + MuiTheme, + sidebarShowButtonSize, + sidebarWidth, +} from '../style-variables'; + import Sidebar from './sidebar/Sidebar'; -import QueryView from './views/QueryView/QueryView'; -import DbView from './views/DbView/DbView'; + import CompareView from './views/CompareView/CompareView'; -import QuickStartView from './views/QuickStartView'; +import DbView from './views/DbView/DbView'; import NewSchemaView from './views/NewSchemaView/NewSchemaView'; +import QueryView from './views/QueryView/QueryView'; +import QuickStartView from './views/QuickStartView'; +import ThreeDView from './views/ThreeDView/ThreeDView'; + import FeedbackModal from './modal/FeedbackModal'; import Spinner from './modal/Spinner'; -import { once } from '../lib/utils'; -import CreateDBDialog from './Dialog/CreateDBDialog'; + import ConfigView from './Dialog/ConfigView'; -import ThreeDView from './views/ThreeDView/ThreeDView'; +import CreateDBDialog from './Dialog/CreateDBDialog'; +import MenuContext from '../state_management/Contexts/MenuContext'; +import menuReducer, { + initialMenuState, + submitAsyncToBackend, +} from '../state_management/Reducers/MenuReducers'; +import invoke from '../lib/electronHelper'; + +import { + appViewStateReducer, + AppViewState, +} from '../state_management/Reducers/AppViewReducer'; +import { + AppViewContextState, + AppViewContextDispatch, +} from '../state_management/Contexts/AppViewContext'; + +// Query Context and Reducer Imports +import { + QueryContextState, + QueryContextDispatch, +} from '../state_management/Contexts/QueryContext'; +import { + queryReducer, + QueryState, +} from '../state_management/Reducers/QueryReducers'; declare module '@mui/material/styles/' { // eslint-disable-next-line @typescript-eslint/no-empty-interface - interface DefaultTheme extends Theme { } + interface DefaultTheme extends Theme {} } -const EventEmitter = require('events'); - const emitter = new EventEmitter(); emitter.setMaxListeners(20); @@ -58,26 +88,60 @@ const Main = styled.main<{ $fullwidth: boolean }>` grid-area: ${({ $fullwidth }) => ($fullwidth ? '1 / 1 / -1 / -1' : 'main')}; background: ${bgColor}; height: calc(100vh - (2 * ${defaultMargin})); - max-width: ${({ $fullwidth }) => ($fullwidth ? '' : `calc(90vw - ${sidebarWidth} )`)}; + max-width: ${({ $fullwidth }) => + $fullwidth ? '' : `calc(90vw - ${sidebarWidth} )`}; padding: ${defaultMargin} ${sidebarShowButtonSize}; margin: 0; `; -// emitting with no payload requests backend to send back a db-lists event with list of dbs -const requestDbListOnce = once(() => ipcRenderer.send('return-db-list')); +function App() { + /** + * Reducers + * useMemo prevents rerenders when state does not change. necessary because of useContext + */ + const [menuState, menuDispatch] = useReducer(menuReducer, initialMenuState); + const menuProvider = useMemo( + () => ({ state: menuState, dispatch: menuDispatch }), + [menuState], + ); + + // initializing the initial viewState object + // this is the app views that will be passed through a provider to any children components wrapped in it. Right now, only sidebar is wrapped in it. + const initialAppViewState: AppViewState = { + selectedView: 'dbView', + sideBarIsHidden: false, + showConfigDialog: false, + showCreateDialog: false, + PG_isConnected: false, + MYSQL_isConnected: false, + }; -const App = () => { - const [queries, setQueries] = useState({}); - const [comparedQueries, setComparedQueries] = useState( - {} + const initialQueryState: QueryState = { + queries: {}, + comparedQueries: {}, + workingQuery: undefined, + newFilePath: '', + }; + + // creating the reducer to reduce all state changes to a single state object + // This reducer manages all the state calls for the app views + const [appViewState, appViewDispatch] = useReducer( + appViewStateReducer, + initialAppViewState, + ); + // this reducer manages query states + const [queryState, queryDispatch] = useReducer( + queryReducer, + initialQueryState, ); - const [workingQuery, setWorkingQuery] = useState(); - const [selectedView, setSelectedView] = - useState('dbView'); + + // tablesReducer stuff here + + // --- + // In the future, we'd love to see all of these state varaiables to be condensed to their own reducer. const [selectedDb, setSelectedDb] = useState(''); - const [sidebarIsHidden, setSidebarHidden] = useState(false); - const [newFilePath, setFilePath] = useState(''); + const [ERView, setERView] = useState(true); const [DBInfo, setDBInfo] = useState(); @@ -86,26 +150,18 @@ const App = () => { const [dbTables, setTables] = useState([]); const [selectedTable, setSelectedTable] = useState(); - const [PG_isConnected, setPGStatus] = useState(false); - const [MYSQL_isConnected, setMYSQLStatus] = useState(false); - const [showCreateDialog, setCreateDialog] = useState(false); - const [showConfigDialog, setConfigDialog] = useState(false); - + // reverted to db-list event listener + // TODO: refactor event handlers in back end to return db list rather than emit event useEffect(() => { // Listen to backend for updates to list of available databases const dbListFromBackend = (evt: IpcRendererEvent, dbLists: DbLists) => { if (isDbLists(dbLists)) { setDBInfo(dbLists.databaseList); setTables(dbLists.tableList); - setPGStatus(dbLists.databaseConnected.PG); - setMYSQLStatus(dbLists.databaseConnected.MySQL); - setSelectedTable(selectedTable || dbTables[0]); } }; - ipcRenderer.on('db-lists', dbListFromBackend); // dummy data error here? - requestDbListOnce(); - + ipcRenderer.on('db-lists', dbListFromBackend); // return cleanup function return () => { ipcRenderer.removeListener('db-lists', dbListFromBackend); @@ -113,22 +169,59 @@ const App = () => { }); /** - * Hook to create new Query from data + * New central source of async calls */ - const createNewQuery: CreateNewQuery = (query: QueryData) => { - // Only save query to saved queries if it contains all minimum information - if (query.label && query.db && query.sqlString && query.group) { - const newQueries = createQuery(queries, query); - setQueries(newQueries); + const asyncCount = useRef(0); + useEffect(() => { + const { issued, resolved, asyncList } = menuState.loading; + // Check that we are here because a new async was issued + if (issued - resolved > asyncCount.current) { + /** + * FLOW: new async request + * - async call submitted by component + * - menuReducer adds request to tracked ongoing asyncs + * - this useEffect triggers; something in the state contains necessary info to launch invoke + * + * NOTE: moved this logic to MenuReducers to keep logic localized and utilize + * dependency injection for testing purposes + */ + submitAsyncToBackend(issued, asyncList, invoke, menuDispatch); } - // we must set working query to newly created query otherwise query view won't update - setWorkingQuery(query); - }; + // keep track of ongoing asyncs in this useRef, even when arriving here as an async resolves + asyncCount.current = issued - resolved; + }, [menuState.loading]); + + // populate initial dblist + useEffect(() => { + const dbListFromBackend = (dbLists: DbLists) => { + setDBInfo(dbLists.databaseList); + setTables(dbLists.tableList); + appViewDispatch({ + type: 'IS_PG_CONNECTED', + payload: dbLists.databaseConnected.PG, + }); + + appViewDispatch({ + type: 'IS_MYSQL_CONNECTED', + payload: dbLists.databaseConnected.MySQL, + }); + + // setSelectedTable(selectedTable || dbTables[0]); + }; + menuDispatch({ + type: 'ASYNC_TRIGGER', + loading: 'LOADING', + options: { + event: 'return-db-list', + callback: dbListFromBackend, + }, + }); + }, []); // determine which view should be visible depending on selected view and // prerequisites for each view - let shownView: AppState['selectedView']; - switch (selectedView) { + let shownView; + switch (appViewState.selectedView) { case 'compareView': shownView = 'compareView'; break; @@ -140,7 +233,7 @@ const App = () => { shownView = 'dbView'; break; case 'queryView': - if (!queries.selected && !selectedDb) { + if (!queryState.queries?.selected && !selectedDb) { shownView = 'quickStartView'; break; } @@ -161,99 +254,92 @@ const App = () => { // Styled Components must be injected last in order to override Material UI style: https://material-ui.com/guides/interoperability/#controlling-priority-3 - - - - - -
- - - - - - - - - - setConfigDialog(false)} - /> - setCreateDialog(false)} - /> -
- -
+ + + + + + + + + + + + + +
+ + + + + + + + + appViewDispatch({ type: 'TOGGLE_CONFIG_DIALOG' }) + } + /> + + appViewDispatch({ type: 'TOGGLE_CREATE_DIALOG' }) + } + /> +
+ +
+
+
+
); -}; +} export default App; diff --git a/frontend/components/Dialog/ConfigView.tsx b/frontend/components/Dialog/ConfigView.tsx index c77da409..9ef97ea4 100644 --- a/frontend/components/Dialog/ConfigView.tsx +++ b/frontend/components/Dialog/ConfigView.tsx @@ -1,5 +1,5 @@ -import React, { useState, useEffect } from 'react'; -import { IpcRendererEvent, ipcRenderer, remote } from 'electron'; +import React, { useState, useEffect, useContext } from 'react'; +import { ipcRenderer } from 'electron'; import { Box, Tab, @@ -16,6 +16,8 @@ import { StyledTextField, } from '../../style-variables'; import '../../lib/style.css'; +import { DocConfigFile } from '../../../backend/BE_types'; +import MenuContext from '../../state_management/Contexts/MenuContext'; /* junaid @@ -43,13 +45,14 @@ function TabPanel(props: TabPanelProps) { {...other} > {value === index && ( - {children} @@ -65,7 +68,10 @@ function a11yProps(index: number) { }; } -const BasicTabs = ({ onClose }: BasicTabsProps) => { +function BasicTabs({ onClose }: BasicTabsProps) { + // context for async calls + const { dispatch: menuDispatch } = useContext(MenuContext); + // useState hooks for database connection information const [mysql, setmysql] = useState({}); const [pg, setpg] = useState({}); @@ -91,24 +97,24 @@ const BasicTabs = ({ onClose }: BasicTabsProps) => { sqlite: [], // added sqlite }); - // function to store user-selected file path in state - const designateFile = function (path, setPath) { - const { dialog } = remote; - const WIN = remote.getCurrentWindow(); - + const designateFile = (setPath) => { const options = { - title: "Select SQLite File", + title: 'Select SQLite File', defaultPath: '', - buttonLabel: "Select File", filters: [ - { name: 'db', extensions: ['db'] } - ] - } - - dialog.showOpenDialog(WIN, options) - .then((res: any) => { - setPath({ path: res.filePaths[0] }) - }); - } + buttonLabel: 'Select File', + filters: [{ name: 'db', extensions: ['db'] }], + }; + const setPathCallback = (val) => setPath({ path: val }); + menuDispatch({ + type: 'ASYNC_TRIGGER', + loading: 'LOADING', + options: { + event: 'showOpenDialog', + payload: options, + callback: setPathCallback, + }, + }); + }; // Function to make StyledTextFields and store them in inputFieldsToRender state function inputFieldMaker(dbTypeFromState, setDbTypeFromState, dbString) { @@ -116,10 +122,14 @@ const BasicTabs = ({ onClose }: BasicTabsProps) => { const arrayToRender: JSX.Element[] = []; if (dbString === 'sqlite') { arrayToRender.push( - designateFile(dbTypeFromState, setDbTypeFromState)}> + designateFile(setDbTypeFromState)} + > Set db file location - - ) + , + ); } else { // Get key value pairs from passed in database connection info from state Object.entries(dbTypeFromState).forEach((entry) => { @@ -152,7 +162,6 @@ const BasicTabs = ({ onClose }: BasicTabsProps) => { } // Push StyledTextField to temporary render array for current key in database connection object from state - arrayToRender.push( { }} // Spread special password props if they exist {...styledTextFieldProps} - /> + />, ); - }); } // Update state for our current database type passing in our temporary array of StyledTextField components @@ -187,21 +195,24 @@ const BasicTabs = ({ onClose }: BasicTabsProps) => { useEffect(() => { // Listen to backend for updates to list of available databases - const configFromBackend = (evt: IpcRendererEvent, config) => { + const configFromBackend = (config: DocConfigFile) => { // Set state based on parsed config.json object received from backend - setmysql({ ...config.mysql }); - setpg({ ...config.pg }); - setrds_mysql({ ...config.rds_mysql }); - setrds_pg({ ...config.rds_pg }); - setSqlite({ ...config.sqlite }); // added sqlite + setmysql({ ...config.mysql_options }); + setpg({ ...config.pg_options }); + setrds_mysql({ ...config.rds_mysql_options }); + setrds_pg({ ...config.rds_pg_options }); + setSqlite({ ...config.sqlite_options }); // added sqlite }; - ipcRenderer.on('get-config', configFromBackend); - ipcRenderer.invoke('get-config'); - // return cleanup function - return () => { - ipcRenderer.removeListener('get-config', configFromBackend); - }; - }, []); + + menuDispatch({ + type: 'ASYNC_TRIGGER', + loading: 'LOADING', + options: { + event: 'get-config', + callback: configFromBackend, + }, + }); + }, [menuDispatch]); // Invoke functions to generate input StyledTextFields components -- passing in state, setstate hook, and database name string. // have it subscribed to changes in db connection info or show password button. Separate hooks to not rerender all fields each time @@ -227,28 +238,52 @@ const BasicTabs = ({ onClose }: BasicTabsProps) => { const handleSubmit = () => { // Pass database connection values from state to backend - ipcRenderer - .invoke('set-config', { - mysql: { ...mysql }, - pg: { ...pg }, - rds_mysql: { ...rds_mysql }, - rds_pg: { ...rds_pg }, - sqlite: { ...sqlite }, // added sqlite - }) - .then(() => { - handleClose(); - }) - .catch((err) => { - sendFeedback({ - type: 'error', - message: err ?? 'Failed to save config.', - }); - }); + // OLD CODE + // ipcRenderer + // .invoke('set-config', { + // mysql_options: { ...mysql }, + // pg_options: { ...pg }, + // rds_mysql_options: { ...rds_mysql }, + // rds_pg_options: { ...rds_pg }, + // sqlite_options: { ...sqlite }, // added sqlite + // }) + // .then(() => { + // handleClose(); + // }) + // .catch((err) => { + // sendFeedback({ + // type: 'error', + // message: err ?? 'Failed to save config.', + // }); + // }); + + menuDispatch({ + type: 'ASYNC_TRIGGER', + loading: 'LOADING', + options: { + event: 'set-config', + payload: { + mysql_options: { ...mysql }, + pg_options: { ...pg }, + rds_mysql_options: { ...rds_mysql }, + rds_pg_options: { ...rds_pg }, + sqlite_options: { ...sqlite }, + }, + callback: handleClose, + }, + }); }; + // Function to handle onChange -- when tab panels change const handleChange = (event: React.SyntheticEvent, newValue: number) => { // On panel change reset all passwords to hidden - setShowpass({ mysql: false, pg: false, rds_mysql: false, rds_pg: false, sqlite: false }); + setShowpass({ + mysql: false, + pg: false, + rds_mysql: false, + rds_pg: false, + sqlite: false, + }); // Change which tab panel is hidden/shown setValue(newValue); }; @@ -264,11 +299,17 @@ const BasicTabs = ({ onClose }: BasicTabsProps) => { value={value} onChange={handleChange} aria-label="wrapped label basic tabs" - className='db-login-tabs' + className="db-login-tabs" > - {dbNames.map((db, idx) => - - )} + {dbNames.map((db, idx) => ( + + ))} @@ -288,22 +329,30 @@ const BasicTabs = ({ onClose }: BasicTabsProps) => { - + Cancel - + Save ); -}; +} interface ConfigViewProps { show: boolean; onClose: () => void; } -const ConfigView = ({ show, onClose }: ConfigViewProps) => { +function ConfigView({ show, onClose }: ConfigViewProps) { const handleClose = () => { onClose(); }; @@ -322,6 +371,6 @@ const ConfigView = ({ show, onClose }: ConfigViewProps) => {
); -}; +} export default ConfigView; diff --git a/frontend/components/Dialog/CreateDBDialog.tsx b/frontend/components/Dialog/CreateDBDialog.tsx index d127c3e0..16a4a243 100644 --- a/frontend/components/Dialog/CreateDBDialog.tsx +++ b/frontend/components/Dialog/CreateDBDialog.tsx @@ -1,9 +1,5 @@ -import React, { useState } from 'react'; -import { - DialogTitle, - Dialog, - Tooltip -} from '@mui/material/'; +import React, { useContext, useState } from 'react'; +import { DialogTitle, Dialog, Tooltip } from '@mui/material/'; import { ipcRenderer } from 'electron'; import { DatabaseInfo } from '../../types'; import { DBType } from '../../../backend/BE_types'; @@ -18,15 +14,17 @@ import { StyledNativeDropdown, StyledNativeOption, } from '../../style-variables'; +import MenuContext from '../../state_management/Contexts/MenuContext'; interface CreateDBDialogProps { - show: boolean, + show: boolean; DBInfo: DatabaseInfo[] | undefined; onClose: () => void; -}; +} -const CreateDBDialog = ({ show, DBInfo, onClose }: CreateDBDialogProps) => { +function CreateDBDialog({ show, DBInfo, onClose }: CreateDBDialogProps) { if (!show) return <>; + const { dispatch: menuDispatch } = useContext(MenuContext); const [newDbName, setNewDbName] = useState(''); const [isError, setIsError] = useState(false); @@ -72,28 +70,34 @@ const CreateDBDialog = ({ show, DBInfo, onClose }: CreateDBDialogProps) => { setNewDbName(dbSafeName); }; - const handleSubmit = () => { + const handleSubmit = (handleClose) => { // it needs to be as any because otherwise typescript thinks it doesn't have a 'value' param idk why const dbt: DBType = (document.getElementById('dbTypeDropdown') as any) .value; - ipcRenderer - .invoke( - 'initialize-db', - { - newDbName, - }, - dbt - ) - .then(() => { - handleClose(); - }) - .catch((err) => { - sendFeedback({ - type: 'error', - message: err ?? 'Failed to initialize db', - }); - }); + // ipcRenderer + // .invoke('initialize-db', { + // newDbName, + // dbType: dbt, + // }) + // .then(() => { + // handleClose(); + // }) + // .catch((err) => { + // sendFeedback({ + // type: 'error', + // message: err ?? 'Failed to initialize db', + // }); + // }); + menuDispatch({ + type: 'ASYNC_TRIGGER', + loading: 'LOADING', + options: { + event: 'initialize-db', + payload: { newDbName, dbType: dbt }, + callback: handleClose, + }, + }); }; return ( @@ -140,9 +144,7 @@ const CreateDBDialog = ({ show, DBInfo, onClose }: CreateDBDialogProps) => { Postgres - - MySQL - + MySQL RDS Postgres @@ -167,7 +169,9 @@ const CreateDBDialog = ({ show, DBInfo, onClose }: CreateDBDialogProps) => { { } : handleSubmit} + onClick={ + isEmpty || isError ? () => {} : () => handleSubmit(handleClose) + } > Confirm @@ -175,6 +179,6 @@ const CreateDBDialog = ({ show, DBInfo, onClose }: CreateDBDialogProps) => { ); -}; +} export default CreateDBDialog; diff --git a/frontend/components/modal/AddNewDbModalCorrect.tsx b/frontend/components/modal/AddNewDbModalCorrect.tsx index 15ac3541..6c9282cf 100644 --- a/frontend/components/modal/AddNewDbModalCorrect.tsx +++ b/frontend/components/modal/AddNewDbModalCorrect.tsx @@ -1,8 +1,7 @@ -import React, { useState } from 'react'; +import path from 'path'; +import React, { useContext, useState } from 'react'; import { Dialog, DialogTitle, Tooltip } from '@mui/material/'; import CloudUploadIcon from '@mui/icons-material/CloudUpload'; -import { ipcRenderer, remote } from 'electron'; -import { sendFeedback } from '../../lib/utils'; import { ButtonContainer, TextFieldContainer, @@ -14,13 +13,7 @@ import { StyledNativeOption, } from '../../style-variables'; import { DBType } from '../../../backend/BE_types'; - -const { dialog } = remote; - -interface ImportPayload { - newDbName: string; - filePath: string; -} +import MenuContext from '../../state_management/Contexts/MenuContext'; type AddNewDbModalProps = { open: boolean; @@ -29,12 +22,14 @@ type AddNewDbModalProps = { curDBType: DBType | undefined; }; -const AddNewDbModal = ({ +function AddNewDbModal({ open, onClose, dbNames, curDBType, -}: AddNewDbModalProps) => { +}: AddNewDbModalProps) { + const { dispatch: menuDispatch } = useContext(MenuContext); + const [newDbName, setNewDbName] = useState(''); const [isError, setIsError] = useState(false); const [isEmpty, setIsEmpty] = useState(true); @@ -79,45 +74,43 @@ const AddNewDbModal = ({ }; // Opens modal to select file and sends the selected file to backend - const handleFileClick = () => { - const dbt: DBType = (document.getElementById('dbTypeDropdown') as any).value; - // console.log('curDBType in addnewdbmodalcorrect', curDBType) - // console.log('newdbName in addnewdbmodalcorrect', newDbName) - // console.log('dbt in addnewdbmodalcorrect', dbt) - dialog - .showOpenDialog({ - properties: ['openFile'], - filters: [{ name: 'Custom File Type', extensions: ['sql', 'tar'] }], - message: 'Please upload .sql or .tar database file', - }) - .then((result) => { - if (result.canceled) return; - - if (!result.filePaths.length) { - sendFeedback({ - type: 'warning', - message: 'No file was selected', - }); - return; - } - - const payload: ImportPayload = { - newDbName, - filePath: result.filePaths[0], - }; - - - ipcRenderer.invoke('import-db', payload, dbt).catch(() => - sendFeedback({ - type: 'error', - message: 'Failed to import database', - }) - ); - }) - .catch((err: object) => { - // console.log(err); - }) - .finally(handleClose); + const handleDBimport = (dbName: string, closeModal: () => void) => { + // TODO: fix the any type. + const dbt: DBType = (document.getElementById('dbTypeDropdown') as any) + .value; + const options = { + title: 'Import DB', + defaultPath: path.join(__dirname, '../assets/'), + buttonLabel: 'Import', + filters: [ + { + name: 'Custom File Type', + extensions: ['sql', 'tar'], + }, + ], + }; + // this runs after opendialog resolves, use as callback + const importdb = (filePath: string) => { + menuDispatch({ + type: 'ASYNC_TRIGGER', + loading: 'LOADING', + options: { + event: 'import-db', + payload: { newDbName: dbName, filePath, dbType: dbt }, // see importDb for type reqs + callback: closeModal, + }, + }); + }; + // initial async call + menuDispatch({ + type: 'ASYNC_TRIGGER', + loading: 'LOADING', + options: { + event: 'showOpenDialog', + payload: options, + callback: importdb, + }, + }); }; return ( @@ -150,14 +143,20 @@ const AddNewDbModal = ({ - + Database Type - Postgres + + Postgres + MySQL @@ -173,7 +172,11 @@ const AddNewDbModal = ({ variant="contained" color="primary" startIcon={} - onClick={isEmpty || isError ? () => {} : handleFileClick} + onClick={ + isEmpty || isError + ? () => {} + : () => handleDBimport(newDbName, handleClose) + } > Import @@ -181,6 +184,6 @@ const AddNewDbModal = ({ ); -}; +} export default AddNewDbModal; diff --git a/frontend/components/modal/DummyDataModal.tsx b/frontend/components/modal/DummyDataModal.tsx index 5bd06e6c..25ee0e41 100644 --- a/frontend/components/modal/DummyDataModal.tsx +++ b/frontend/components/modal/DummyDataModal.tsx @@ -1,4 +1,4 @@ -import React, { useState } from 'react'; +import React, { useContext, useState } from 'react'; import { Dialog } from '@mui/material/'; import { ipcRenderer } from 'electron'; import { @@ -10,11 +10,13 @@ import { } from '../../style-variables'; import { sendFeedback } from '../../lib/utils'; import { DBType } from '../../../backend/BE_types'; +import MenuContext from '../../state_management/Contexts/MenuContext'; interface DummyPayload { dbName: string; tableName: string; rows: number; + dbType: DBType; } type DummyDataModalProps = { @@ -25,20 +27,19 @@ type DummyDataModalProps = { curDBType: DBType | undefined; }; -const DummyDataModal = ({ +function DummyDataModal({ open, onClose, dbName, tableName, - curDBType -}: DummyDataModalProps) => { + curDBType, +}: DummyDataModalProps) { + const { dispatch: menuDispatch } = useContext(MenuContext); + const [rowNum, setRowNum] = useState(0); const [isError, setIsError] = useState(false); const [isEmpty, setIsEmpty] = useState(true); - // console.log('curDBType:', curDBType); - - const handleClose = () => { setIsError(false); setIsEmpty(true); @@ -82,34 +83,39 @@ const DummyDataModal = ({ }; // Event handler to send rows to backend - const handleClick = () => { + const handleClick = ( + close: () => void, + db: string, + table: string, + rows: number, + dbType: DBType, + ) => { // Check if dbName is given and not undefined - if (!dbName || !tableName) + if (!dbName || !tableName) { + // TODO feedback return sendFeedback({ type: 'error', message: 'Failed to generate dummy data', }); + } const payload: DummyPayload = { - dbName, - tableName, - rows: rowNum, + dbName: db, + tableName: table, + rows, + dbType, }; - - ipcRenderer - .invoke('generate-dummy-data', payload, curDBType) - .catch(() => - sendFeedback({ - type: 'error', - message: 'Failed to generate dummy data', - }) - ) - .catch((err: object) => { - // console.log(err); - }) - .finally(handleClose); + menuDispatch({ + type: 'ASYNC_TRIGGER', + loading: 'LOADING', + options: { + event: 'generate-dummy-data', + payload, + callback: close, + }, + }); }; - + return (
{} : handleClick} + onClick={ + isError || isEmpty + ? () => {} + : () => + // the ORs are here bc typescript doesn't know about the isError isEmpty checks + handleClick( + handleClose, + dbName || '', + tableName || '', + rowNum, + curDBType || DBType.Postgres, + ) + } > Generate @@ -156,6 +174,6 @@ const DummyDataModal = ({
); -}; +} export default DummyDataModal; diff --git a/frontend/components/modal/DuplicateDbModal.tsx b/frontend/components/modal/DuplicateDbModal.tsx index c0021f84..dde0a2ec 100644 --- a/frontend/components/modal/DuplicateDbModal.tsx +++ b/frontend/components/modal/DuplicateDbModal.tsx @@ -19,13 +19,11 @@ import { } from '../../style-variables'; import { DBType } from '../../../backend/BE_types'; - declare module '@mui/material/styles/' { // eslint-disable-next-line @typescript-eslint/no-empty-interface interface DefaultTheme extends Theme {} } - const { ipcRenderer } = window.require('electron'); interface DuplicatePayload { @@ -57,16 +55,16 @@ const handleDBName = (dbCopyName, dbNames) => { return dbName; }; -const DuplicateDbModal = ({ +function DuplicateDbModal({ open, onClose, dbCopyName, dbNames, - curDBType -}: copyDbModalProps) => { + curDBType, +}: copyDbModalProps) { const [checked, setChecked] = useState(true); const [newSchemaName, setNewSchemaName] = useState( - handleDBName(dbCopyName, dbNames) + handleDBName(dbCopyName, dbNames), ); const [isError, setIsError] = useState(false); const [isEmpty, setIsEmpty] = useState(false); @@ -102,8 +100,7 @@ const DuplicateDbModal = ({ dbSafeName = dbSafeName.replace(/[^\w-]/gi, ''); if (dbNames?.includes(dbSafeName)) { setIsError(true); - } - else { + } else { setIsError(false); } // dbSafeName = dbSafeName.replace(/[^A-Z0-9]/gi, ''); @@ -202,6 +199,6 @@ const DuplicateDbModal = ({ ); -}; +} export default DuplicateDbModal; diff --git a/frontend/components/modal/FeedbackModal.tsx b/frontend/components/modal/FeedbackModal.tsx index a4c899b5..fd72c3cc 100644 --- a/frontend/components/modal/FeedbackModal.tsx +++ b/frontend/components/modal/FeedbackModal.tsx @@ -1,7 +1,7 @@ -import React, { useState, useEffect } from 'react'; -import { IpcRendererEvent, ipcRenderer } from 'electron'; import { Snackbar } from '@mui/material'; import MuiAlert, { AlertProps } from '@mui/material/Alert'; +import { ipcRenderer, IpcRendererEvent } from 'electron'; +import React, { useEffect, useState } from 'react'; import { readingTime } from '../../lib/utils'; import type { Feedback, FeedbackSeverity } from '../../types'; @@ -10,14 +10,19 @@ function Alert(props: AlertProps) { return ; } -const FeedbackModal = () => { +function FeedbackModal() { const [isOpen, setOpen] = useState(false); const [message, setMessage] = useState(''); const [severity, setSeverity] = useState('info'); useEffect(() => { const receiveFeedback = (evt: IpcRendererEvent, feedback: Feedback) => { - const validTypes: FeedbackSeverity[] = ['success','error', 'info', 'warning']; + const validTypes: FeedbackSeverity[] = [ + 'success', + 'error', + 'info', + 'warning', + ]; // Ignore 'success' feedback. if (validTypes.includes(feedback.type)) { setSeverity(feedback.type); @@ -39,7 +44,7 @@ const FeedbackModal = () => { onClose={handleClose} autoHideDuration={readingTime(message)} anchorOrigin={{ vertical: 'bottom', horizontal: 'right' }} - // disable hiding on clickAway + // disable hiding on clickAway ClickAwayListenerProps={{ onClickAway: () => {} }} >
@@ -49,6 +54,6 @@ const FeedbackModal = () => {
); -}; +} export default FeedbackModal; diff --git a/frontend/components/modal/Spinner.tsx b/frontend/components/modal/Spinner.tsx index b5572f2a..cedd4edd 100644 --- a/frontend/components/modal/Spinner.tsx +++ b/frontend/components/modal/Spinner.tsx @@ -1,24 +1,25 @@ -import React, { useState, useEffect } from 'react'; +import React, { useEffect, useState } from 'react'; import { LinearProgress } from '@mui/material'; -import styled from 'styled-components'; import { ipcRenderer } from 'electron'; +import styled, { ExecutionContext } from 'styled-components'; -const StyledLinearProg = styled(LinearProgress)` +interface Props { + $show: boolean; +} + +const StyledLinearProg = styled(LinearProgress)` /* Material Ui Drawer component used by sidebar has z-index: 1200 */ z-index: 1300; height: 5px; - visibility: ${({ $show }: { $show: boolean }) => - $show ? 'visible' : 'hidden'}; + visibility: ${(props?) => (props.$show ? 'visible' : 'hidden')}; `; - let delayTimer: NodeJS.Timeout; const delay = 500; -const Spinner = () => { +function Spinner() { const [show, setShow] = useState(false); - useEffect(() => { const showProgress = () => { // show spinner after delay ms @@ -44,6 +45,6 @@ const Spinner = () => { }); return ; -}; +} export default Spinner; diff --git a/frontend/components/sidebar/BottomButtons.tsx b/frontend/components/sidebar/BottomButtons.tsx index 0f6aaba3..1ba34d61 100644 --- a/frontend/components/sidebar/BottomButtons.tsx +++ b/frontend/components/sidebar/BottomButtons.tsx @@ -2,6 +2,10 @@ import React from 'react'; import { ButtonGroup, Button } from '@mui/material/'; import styled from 'styled-components'; import { selectedColor, textColor, defaultMargin } from '../../style-variables'; +import { + useAppViewContext, + useAppViewDispatch, +} from '../../state_management/Contexts/AppViewContext'; const ViewBtnGroup = styled(ButtonGroup)` margin: ${defaultMargin} 5px; @@ -14,32 +18,31 @@ interface ViewButtonProps { $isSelected: boolean; } -const ViewButton = styled(Button)` - background: ${({ $isSelected }: ViewButtonProps) => +const ViewButton = styled(Button)` + background: ${({ $isSelected }: { $isSelected?: boolean }) => $isSelected ? textColor : selectedColor}; `; -type BottomButtonProps = { - showCreateDialog: boolean; - setCreateDialog: (show: boolean) => void; -}; - /** * Selector for view on sidebar. Updates App state with selected view */ -const BottomButtons = ({ - showCreateDialog, - setCreateDialog, -}: BottomButtonProps) => ( - - { - setCreateDialog(true); - }} - $isSelected={showCreateDialog} - > - Create New Database - - -); +function BottomButtons() { + const appViewStateContext = useAppViewContext(); + const appViewDispatchContext = useAppViewDispatch(); + return ( + + { + if (!appViewStateContext?.showCreateDialog) + appViewDispatchContext!({ + type: 'TOGGLE_CREATE_DIALOG', + }); + }} + $isSelected={appViewStateContext!.showCreateDialog} + > + Create New Database + + + ); +} export default BottomButtons; diff --git a/frontend/components/sidebar/DbEntry.tsx b/frontend/components/sidebar/DbEntry.tsx index 1362b03a..b57c5e15 100644 --- a/frontend/components/sidebar/DbEntry.tsx +++ b/frontend/components/sidebar/DbEntry.tsx @@ -12,10 +12,12 @@ import { } from '@mui/material'; import DeleteIcon from '@mui/icons-material/Delete'; import FileCopyIcon from '@mui/icons-material/FileCopy'; +import FileDownloadIcon from '@mui/icons-material/FileDownload'; import { SidebarListItem, StyledListItemText } from '../../style-variables'; import { sendFeedback } from '../../lib/utils'; import { DBType } from '../../../backend/BE_types'; +import { getAppDataPath } from '../../lib/queries'; const { ipcRenderer } = window.require('electron'); @@ -27,13 +29,7 @@ interface DbEntryProps { dbType: DBType; } -const DbEntry = ({ - db, - isSelected, - select, - duplicate, - dbType, -}: DbEntryProps) => { +function DbEntry({ db, isSelected, select, duplicate, dbType }: DbEntryProps) { const [isDeleteDialogOpen, setIsDeleteDialogOpen] = useState(false); const handleDelete = () => { @@ -44,10 +40,32 @@ const DbEntry = ({ setIsDeleteDialogOpen(false); }) .catch(() => - sendFeedback({ type: 'error', message: `Failed to delete ${db}` }) + sendFeedback({ type: 'error', message: `Failed to delete ${db}` }), ); }; + const handleExportDB = async () => { + const options = { + title: 'Choose File Path', + defaultPath: `${getAppDataPath('sql')}`, + buttonLabel: 'Save', + filters: [{ name: 'SQL', extensions: ['sql'] }], + }; + + try { + const filePath = await ipcRenderer.invoke('showSaveDialog', options); + + const payload = { + db, + filePath, + }; + + await ipcRenderer.invoke('export-db', payload, dbType); + } catch (error) { + console.log(error); + } + }; + return ( + + + + + + - setIsDeleteDialogOpen(true)} size="large"> + setIsDeleteDialogOpen(true)} + size="large" + > @@ -71,17 +99,19 @@ const DbEntry = ({ aria-labelledby="alert-dialog-title" aria-describedby="alert-dialog-description" > - Confirm deletion + + Confirm deletion + - Are you sure you want to delete the database - {' '} - {db} - ? + Are you sure you want to delete the database {db}? -