diff --git a/README.md b/README.md index bc661f51..7afacb92 100644 --- a/README.md +++ b/README.md @@ -40,7 +40,7 @@ SeeQR is still in BETA. Additional features, extensions, and improvements will c To get started on contributing to this project: -1. Download and Install [Docker Desktop](https://www.docker.com/get-started) +1. Download and install [Postgres App](https://postgresapp.com/) and start it before opening up SeeQR. 2. Fork or clone this repository 3. Npm install 1. Run `npm install` for application-specific dependencies. @@ -48,7 +48,6 @@ To get started on contributing to this project: 4. Install [Live Sass Compile](https://github.com/ritwickdey/vscode-live-sass-compiler) VSCode extension (settings are configured in the .vscode file in this repo), or set up your preferred Sass compiler 5. To run application during development 1. `npm run dev` to launch Electron application window and webpack-dev-server. - 2. `npm run resetContainer` to reset the container and clear pre-existing SeeQR databases. If error “can’t find postgres-1” is encountered, it is simply an indication that the container is already pruned. ## Built With @@ -56,19 +55,19 @@ To get started on contributing to this project: - [React](https://reactjs.org/) - [React-Hooks](https://reactjs.org/docs/hooks-intro.html) - [Typescript](https://www.typescriptlang.org/) -- [Docker](https://www.docker.com/get-started) -- [Docker-Compose](https://docs.docker.com/compose/) - [PostgreSQL](https://www.postgresql.org/) - [Chart.js](https://github.com/chartjs) - [Faker.js](https://github.com/Marak/faker.js) - [CodeMirror](https://codemirror.net/) ## Interface & Features +
-

The whole interface in a nutshell

+

The whole interface in a nutshell

- Schema + - Upon application launch, upload `.sql` or `.tar` file when prompted by splash page, or hit cancel. - The uploaded `.sql` or `.tar` file becomes the active database. - To input new schemas, toggle the “Input Schema” button. Upload a .sql or .tar file or directly input schema code. Remember to provide the schema with a unique label, as it will be assigned to the name property of the newly spun up database connected to the schema. @@ -76,49 +75,50 @@ To get started on contributing to this project: - Query input - The center panel is where the query input text field is located, utilizing CodeMirror for SQL styling. - - Users have option to execute a tracked or untracked query—simply check the box and provide a label to identify the query in later comparisons against other queries. + - Users have option to execute a tracked or untracked query — simply check the box and provide a label to identify the query in later comparisons against other queries. - Toggle the submit button in the bottom left to send the query to the selected database.

-
- -
-

+
+ +
+

- Data - The data table displays data returned by the inputted query. -
-

-
+
+

+
- Input Schema and Tabs - - New schemas can be uploaded into the application by clicking the "+" button above the main panel in the form of a ```.sql``` or a ```.tar``` file. + + - New schemas can be uploaded into the application by clicking the "+" button above the main panel in the form of a `.sql` or a `.tar` file. - Users can also make a copy of an existing schema, with or without data included. - Newly uploaded schemas are displayed as tabs, which can be activated to run tests against during application session. -
- -
+
+ +
- Generate Dummy Data + - MacOS users can generate up to 500,000 rows of foreign-key compliant dummy-data - Users can generate dummy data to fill in a selected scheama's tables—currently supported data types are: - INT - SMALLINT - BIGINT - VARCHAR - - Dummy data is foreign-key compliant. - - Columns with key constraints are dropped and replaced with new primary and foreign-key integer columns -
- -
+ - Dummy data is foreign-key compliant. - Columns with key constraints are dropped and replaced with new primary and foreign-key integer columns +
+ +
- History - The history table shows the latest queries the user submitted irrespective of the database. - The history table also displays the total rows returned by the query and the total query execution time. -
- -
+
+ +
- Compare @@ -142,8 +142,8 @@ To get started on contributing to this project: ## Application Architecture and Logic -Containerization
-SeeQR streamlines the process of instantiating postgres databases by leveraging Docker to containerize an image of postgres. This means instances of databases are automatically created every time new schema data is uploaded or inputted via the SeeQR GUI. Electron communicates with the instantiated database’s URI’s by taking advantage of the `'pg'` npm package. +Sandbox Environment
+SeeQR streamlines the process of instantiating postgres databases by leveraging Postgres.app to import a copy of your database in postgres on your local machine. This means instances of databases are automatically created every time new schema data is uploaded or inputted via the SeeQR GUI. Electron communicates with the instantiated database’s URIs by taking advantage of the `'pg'` npm package. Cross-schema Comparisons
One of the key features of SeeQR is to compare the efficiency of executing user-inputted queries against different schemas. This allows customization of table scale, relationship, type, and the queries themselves within the context of each schema. This flexibility affords the user granular adjustments for testing every desired scenario. Please refer to “Interface & Functionality” for more details on execution. @@ -198,3 +198,22 @@ The outcome results from each query, both retrieved data and analytics, are stor Casey Walker + + + + + + +
+
+Chris Akinrinade +
+
+James Kolotouros +
+
+Jennifer Courtner +
+
+Katie Klochan +
diff --git a/backend/DummyD/dummyDataMain.ts b/backend/DummyD/dummyDataMain.ts index 0e897b3f..abbb2b18 100644 --- a/backend/DummyD/dummyDataMain.ts +++ b/backend/DummyD/dummyDataMain.ts @@ -1,5 +1,5 @@ -import faker from "faker"; -import execute from "../channels"; +import faker from 'faker'; +import execute from '../channels'; const db = require('../models'); ///////////////////////////////////////////////////////////////////// @@ -18,20 +18,20 @@ let keyObject: any; type schemaLayout = { tableNames: string[]; tables: any; -} +}; //this object is created on the front end in DummyDataModal type dummyDataRequest = { schemaName: string; dummyData: {}; -} +}; //helper function to generate random numbers that will ultimately represent a random date const getRandomInt = (min, max) => { min = Math.ceil(min); max = Math.floor(max); return Math.floor(Math.random() * (max - min) + min); //The maximum is exclusive and the minimum is inclusive -} +}; // this function generates data for a column // column data coming in is an object of the form @@ -43,16 +43,22 @@ const generateDataByType = (columnObj) => { //faker.js method to generate data by type switch (columnObj.dataInfo.data_type) { case 'smallint': - return faker.random.number({min: -32768, max: 32767}); + return faker.random.number({ min: -32768, max: 32767 }); case 'integer': - return faker.random.number({min: -2147483648, max: 2147483647}); + return faker.random.number({ min: -2147483648, max: 2147483647 }); case 'bigint': - return faker.random.number({min: -9223372036854775808, max: 9223372036854775807}); + return faker.random.number({ + min: -9223372036854775808, + max: 9223372036854775807, + }); case 'character varying': if (columnObj.dataInfo.character_maximum_length) { - return faker.lorem.character(Math.floor(Math.random() * columnObj.dataInfo.character_maximum_length)); - } - else return faker.lorem.word(); + return faker.lorem.character( + Math.floor( + Math.random() * columnObj.dataInfo.character_maximum_length + ) + ); + } else return faker.lorem.word(); case 'date': // generating a random date between 1500 and 2020 let result: string = ''; @@ -64,7 +70,7 @@ const generateDataByType = (columnObj) => { result += year + '-' + month + '-' + day; return result; default: - console.log('Error generating dummy data by type') + console.log('Error generating dummy data by type'); } }; @@ -72,7 +78,13 @@ const generateDataByType = (columnObj) => { let count: number = 0; module.exports = { - writeCSVFile: (tableObject, schemaLayout, keyObject, dummyDataRequest, event: any) => { + writeCSVFile: ( + tableObject, + schemaLayout, + keyObject, + dummyDataRequest, + event: any + ) => { // extracting variables const tableCount: number = Object.keys(dummyDataRequest.dummyData).length; const tableName: string = tableObject.tableName; @@ -80,21 +92,23 @@ module.exports = { const schemaName: string = dummyDataRequest.schemaName; // mapping column headers from getColumnObjects in models.ts to columnNames - const columnArray: string[] = schemaLayout.tables[tableName].map(columnObj => columnObj.columnName); + const columnArray: string[] = schemaLayout.tables[tableName].map( + (columnObj) => columnObj.columnName + ); // transpose the table-matrix to orient it as a table const table: any = []; - let row: any = []; - for(let i = 0; i < tableMatrix[0].length; i++) { - for(let j = 0; j < tableMatrix.length; j++) { - row.push(tableMatrix[j][i]); + let row: any = []; + for (let i = 0; i < tableMatrix[0].length; i++) { + for (let j = 0; j < tableMatrix.length; j++) { + row.push(tableMatrix[j][i]); } //join each subarray (which correspond to rows in our table) with a comma const rowString = row.join(','); table.push(rowString); //'1, luke, etc' row = []; } - + // Step 3 - this step adds back the PK constraints that we took off prior to copying the dummy data into the DB (using the db that is imported from models.ts) const step3 = () => { count += 1; @@ -103,46 +117,47 @@ module.exports = { db.addPrimaryKeyConstraints(keyObject, dummyDataRequest) .then(() => { db.addForeignKeyConstraints(keyObject, dummyDataRequest) - .then(() => { - event.sender.send('async-complete'); - count = 0; - }) - .catch((err) => { - console.log(err); - count = 0; - }); + .then(() => { + event.sender.send('async-complete'); + count = 0; + }) + .catch((err) => { + console.log(err); + count = 0; + }); }) .catch((err) => { console.log(err); count = 0; }); - } - else return; - } + } else return; + }; // Step 2 - using the postgres COPY command, this step copies the contents of the csv file in the container file system into the appropriate postgres DB const step2 = () => { - let queryString: string = `COPY ${tableName} FROM '/${tableName}.csv' WITH CSV HEADER;`; - // run the query in the container using a docker command - execute(`docker exec postgres-1 psql -U postgres -d ${schemaName} -c "${queryString}" `, step3); - } + let queryString: string = `\\copy ${tableName} FROM '${tableName}.csv' WITH CSV HEADER;`; + + execute(`psql -U postgres -d ${schemaName} -c "${queryString}" `, step3); + }; let csvString: string; //join tableMatrix with a line break (different on mac and windows because of line breaks in the bash CLI) if (process.platform === 'win32') { - const tableDataString: string = table.join(`' >> ${tableName}.csv; echo '`); + const tableDataString: string = table.join( + `' >> ${tableName}.csv; echo '` + ); const columnString: string = columnArray.join(','); - csvString = columnString.concat(`' > ${tableName}.csv; echo '`).concat(tableDataString); - execute(`docker exec postgres-1 bash -c "echo '${csvString}' >> ${tableName}.csv;"`, step2); - } - else { + csvString = columnString + .concat(`' > ${tableName}.csv; echo '`) + .concat(tableDataString); + execute(`bash -c "echo '${csvString}' >> ${tableName}.csv;"`, step2); + } else { // we know we are not on Windows, thank god! const tableDataString: string = table.join('\n'); const columnString: string = columnArray.join(','); csvString = columnString.concat('\n').concat(tableDataString); - + // split csv string into an array of csv strings that each are of length 100,000 characters or less - // create upperLimit variable, which represents that max amount of character a bash shell command can handle let upperLimit: number; upperLimit = 100000; @@ -150,7 +165,7 @@ module.exports = { let stringCount: number = Math.ceil(csvString.length / upperLimit); // create csvArray that will hold our final csv strings let csvArray: string[] = []; - + let startIndex: number; let endIndex: number; // iterate over i from 0 to less than stringCount, each iteration pushing slices of original csvString into an array @@ -161,43 +176,54 @@ module.exports = { if (i === stringCount - 1) csvArray.push(csvString.slice(startIndex)); else csvArray.push(csvString.slice(startIndex, endIndex)); } - let index: number = 0 + let index: number = 0; // Step 1 - this writes a csv file to the postgres-1 file system, which contains all of the dummy data that will be copied into its corresponding postgres DB const step1 = () => { // NOTE: in order to rewrite the csv files in the container file system, we must use echo with a single angle bracket on the first element of csvArray AND then move on directly to step2 (and then also reset index) - + // if our csvArray contains only one element if (csvArray.length === 1) { - execute(`docker exec postgres-1 bash -c "echo '${csvArray[index]}' > ${tableName}.csv;"`, step2); + execute( + `bash -c "echo '${csvArray[index]}' > ${tableName}.csv;"`, + step2 + ); index = 0; } // otherwise if we are working with the first element in csvArray else if (index === 0) { - execute(`docker exec postgres-1 bash -c "echo -n '${csvArray[index]}' > ${tableName}.csv;"`, step1); + console.log('this is last else statement in step1 on line 211 '); + execute( + `bash -c "echo -n '${csvArray[index]}' > ${tableName}.csv;"`, + step1 + ); index += 1; } // if working with last csvArray element, execute docker command but pass in step2 as second argument - else if (index === (csvArray.length - 1)) { - // console.log('FINAL STEP 1: ', csvArray[index]); - execute(`docker exec postgres-1 bash -c "echo '${csvArray[index]}' >> ${tableName}.csv;"`, step2); + else if (index === csvArray.length - 1) { + execute( + `bash -c "echo '${csvArray[index]}' >> ${tableName}.csv;"`, + step2 + ); index = 0; } // otherwise we know we are not working with the first OR the last element in csvArray, so execute docker command but pass in a recursive call to our step one function and then immediately increment our index variable else { - // console.log('STEP 1: ', index, csvArray[index]); - execute(`docker exec postgres-1 bash -c "echo -n '${csvArray[index]}' >> ${tableName}.csv;"`, step1); + console.log('this is last else statement in step1 on line 230 '); + execute( + `bash -c “echo -n ‘${csvArray[index]}’ >> ${tableName}.csv;“`, + step1 + ); index += 1; } - } + }; step1(); } }, - //maps table names from schemaLayout to sql files generateDummyData: (schemaLayout, dummyDataRequest, keyObject) => { const returnArray: any = []; - + //iterate over schemaLayout.tableNames array for (const tableName of schemaLayout.tableNames) { const tableMatrix: any = []; @@ -209,7 +235,9 @@ module.exports = { let entry: any; //iterate over columnArray (i.e. an array of the column names for the table) - let columnArray: string[] = schemaLayout.tables[tableName].map(columnObj => columnObj.columnName) + let columnArray: string[] = schemaLayout.tables[tableName].map( + (columnObj) => columnObj.columnName + ); for (let i = 0; i < columnArray.length; i++) { // declare a variable j (to be used in while loops below), set equal to zero let j: number = 0; @@ -223,14 +251,16 @@ module.exports = { columnData.push(j); // increment j j += 1; - } + } } // if this is a FK column, add random number between 0 and n-1 (inclusive) into column (unordered) else if (keyObject[tableName].foreignKeyColumns[columnArray[i]]) { //while j < reqeusted number of rows while (j < dummyDataRequest.dummyData[tableName]) { //generate an entry - entry = Math.floor(Math.random() * (dummyDataRequest.dummyData[tableName])); + entry = Math.floor( + Math.random() * dummyDataRequest.dummyData[tableName] + ); //push into columnData columnData.push(entry); j += 1; @@ -244,7 +274,7 @@ module.exports = { //push into columnData columnData.push(entry); j += 1; - }; + } } } // otherwise, we'll just add data by the type to which the column is constrained @@ -255,19 +285,19 @@ module.exports = { //push into columnData columnData.push(entry); j += 1; - }; + } } //push columnData array into tableMatrix tableMatrix.push(columnData); //reset columnData array for next column columnData = []; - }; + } // only push something to the array if data was asked for for the specific table - returnArray.push({tableName, data: tableMatrix}); - }; - }; + returnArray.push({ tableName, data: tableMatrix }); + } + } // then return the returnArray return returnArray; - } -} \ No newline at end of file + }, +}; diff --git a/backend/DummyD/foreign_key_info.ts b/backend/DummyD/foreign_key_info.ts index 3c80592b..90b606c8 100644 --- a/backend/DummyD/foreign_key_info.ts +++ b/backend/DummyD/foreign_key_info.ts @@ -1,7 +1,6 @@ -module.exports= { +module.exports = { // This query lists each table that has a foreign key, the name of the table that key points to, and the name of the column at which the foreign key constraint resides - getForeignKeys: - `select kcu.table_name as foreign_table, + getForeignKeys: `select kcu.table_name as foreign_table, rel_kcu.table_name as primary_table, kcu.column_name as fk_column from information_schema.table_constraints tco @@ -17,33 +16,11 @@ module.exports= { kcu.ordinal_position;`, // This query lists each table and the column name at which there is a primary key - getPrimaryKeys: - `select kcu.table_name as table_name, + getPrimaryKeys: `select kcu.table_name as table_name, kcu.column_name as pk_column from information_schema.key_column_usage as kcu join information_schema.table_constraints as tco on tco.constraint_name = kcu.constraint_name where tco.constraint_type = 'PRIMARY KEY' order by kcu.table_name;`, -} - - - -// structure of the key object for generating key compliant data -// const KeyObject = { -// // people: -// Table_1: { -// primaryKeyColumns: { -// // id: true -// _id: true -// } -// foreignKeyColumns: { -// // species_id: n where n is the number of rows asked for in the primary table the key points to -// foreignKeyColumnName_1: numOfRows, -// foreignKeyColumnName_2: numOfRows -// } -// } -// . -// . -// . -// } \ No newline at end of file +}; diff --git a/backend/channels.ts b/backend/channels.ts index a8c6c09b..436caa7b 100644 --- a/backend/channels.ts +++ b/backend/channels.ts @@ -10,28 +10,39 @@ const db = require('./models'); ************************************************************/ // Generate CLI commands to be executed in child process. +// The electron app will access your terminal to execute these postgres commands + +// create a database const createDBFunc = (name) => { - return `docker exec postgres-1 psql -h localhost -p 5432 -U postgres -c "CREATE DATABASE ${name}"` -} -const importFileFunc = (file) => { - return `docker cp ${file} postgres-1:/data_dump`; -} -const runSQLFunc = (dbName) => { - return `docker exec postgres-1 psql -U postgres -d ${dbName} -f /data_dump`; -} -const runTARFunc = (dbName) => { - return `docker exec postgres-1 pg_restore -U postgres -d ${dbName} /data_dump`; -} -const runFullCopyFunc = (dbCopyName) => { - return `docker exec postgres-1 pg_dump -U postgres ${dbCopyName} -f /data_dump`; -} -const runHollowCopyFunc = (dbCopyName) => { - return `docker exec postgres-1 pg_dump -s -U postgres ${dbCopyName} -f /data_dump`; -} + return `psql -U postgres -c "CREATE DATABASE ${name}"`; +}; + +// import SQL file into new DB created +const runSQLFunc = (dbName, file) => { + return `psql -U postgres -d ${dbName} -f ${file}`; +}; + +// import TAR file into new DB created +const runTARFunc = (dbName, file) => { + return `pg_restore -U postgres -d ${dbName} -f ${file}`; +}; + +// make a full copy of the schema +const runFullCopyFunc = (dbCopyName, file) => { + let newFile = file[0]; + + return `pg_dump -U postgres -d ${dbCopyName} -f ${newFile}`; +}; + +// make a hollow copy of the schema +const runHollowCopyFunc = (dbCopyName, file) => { + return `pg_dump -s -U postgres ${dbCopyName} -f ${file}`; +}; // Function to execute commands in the child process. const execute = (str: string, nextStep: any) => { exec(str, (error, stdout, stderr) => { + console.log('exec func', `${stdout}`); if (error) { //this shows the console error in an error message on the frontend dialog.showErrorBox(`${error.message}`, ''); @@ -44,7 +55,7 @@ const execute = (str: string, nextStep: any) => { console.log(`stderr: ${stderr}`); return; } - // console.log('exec func', `${stdout}`); + if (nextStep) nextStep(); }); }; @@ -56,36 +67,48 @@ const execute = (str: string, nextStep: any) => { // Global variable to store list of databases and tables to provide to frontend upon refreshing view. let listObj: any; -ipcMain.on('return-db-list', (event, args) => { - db.getLists().then(data => event.sender.send('db-lists', data)); +ipcMain.on('return-db-list', (event, dbName) => { + // DB query to get the database size + let dbSize: string; + db.query(`SELECT pg_size_pretty(pg_database_size('${dbName}'));`).then( + (queryStats) => { + dbSize = queryStats.rows[0].pg_size_pretty; + } + ); + db.getLists().then((data) => event.sender.send('db-lists', data, dbSize)); }); // Listen for skip button on Splash page. -ipcMain.on('skip-file-upload', (event) => { }); +ipcMain.on('skip-file-upload', (event) => {}); // Listen for database changes sent from the renderer upon changing tabs. ipcMain.on('change-db', (event, dbName) => { - db.changeDB(dbName) + db.changeDB(dbName); }); // Listen for file upload. Create an instance of database from pre-made .tar or .sql file. ipcMain.on('upload-file', (event, filePath: string) => { - // send notice to the frontend that async process has begun event.sender.send('async-started'); let dbName: string; if (process.platform === 'darwin') { - dbName = filePath[0].slice(filePath[0].lastIndexOf('/') + 1, filePath[0].lastIndexOf('.')); + dbName = filePath[0].slice( + filePath[0].lastIndexOf('/') + 1, + filePath[0].lastIndexOf('.') + ); } else { - dbName = filePath[0].slice(filePath[0].lastIndexOf('\\') + 1, filePath[0].lastIndexOf('.')); + dbName = filePath[0].slice( + filePath[0].lastIndexOf('\\') + 1, + filePath[0].lastIndexOf('.') + ); } const createDB: string = createDBFunc(dbName); - const importFile: string = importFileFunc(filePath); - const runSQL: string = runSQLFunc(dbName); - const runTAR: string = runTARFunc(dbName); + const runSQL: string = runSQLFunc(dbName, filePath); + const runTAR: string = runTARFunc(dbName, filePath); const extension: string = filePath[0].slice(filePath[0].lastIndexOf('.')); + let dbSize: string; // SEQUENCE OF EXECUTING COMMANDS // Steps are in reverse order because each step is a callback function that requires the following step to be defined. @@ -93,15 +116,15 @@ ipcMain.on('upload-file', (event, filePath: string) => { // Step 5: Changes the pg URI the newly created database, queries new database, then sends list of tables and list of databases to frontend. async function sendLists() { listObj = await db.getLists(); - console.log('channels: ', listObj); - event.sender.send('db-lists', listObj); + // Send list of databases and tables, as well as database size to frontend. + event.sender.send('db-lists', listObj, dbSize); // Send schema name back to frontend, so frontend can load tab name. event.sender.send('return-schema-name', dbName); // tell the front end to switch tabs to the newly created database event.sender.send('switch-to-new', null); // notify frontend that async process has been completed event.sender.send('async-complete'); - }; + } // Step 4: Given the file path extension, run the appropriate command in postgres to populate db. const step4 = () => { @@ -109,16 +132,24 @@ ipcMain.on('upload-file', (event, filePath: string) => { if (extension === '.sql') runCmd = runSQL; else if (extension === '.tar') runCmd = runTAR; execute(runCmd, sendLists); + + // DB query to get the database size + db.query(`SELECT pg_size_pretty(pg_database_size('${dbName}'));`).then( + (queryStats) => { + dbSize = queryStats.rows[0].pg_size_pretty; + } + ); }; // Step 3: Import database file from file path into docker container - const step3 = () => execute(importFile, step4); + // Edit: We changed the functionality to create a file on the local machine instead of adding it to the docker container + // const step3 = () => execute(importFile, step4); - // Step 2: Change curent URI to match newly created DB + // Step 2: Change current URI to match newly created DB const step2 = () => { db.changeDB(dbName); - return step3(); - } + return step4(); //changing step3 to step4 to test removal of importFile func + }; // Step 1: Create empty db if (extension === '.sql' || extension === '.tar') execute(createDB, step2); @@ -137,27 +168,41 @@ interface SchemaType { // OR // Listens for and handles DB copying events ipcMain.on('input-schema', (event, data: SchemaType) => { - // send notice to the frontend that async process has begun event.sender.send('async-started'); const { schemaName: dbName, dbCopyName, copy } = data; let { schemaFilePath: filePath } = data; - + console.log( + 'Schema name: ', + data.schemaName, + 'data.schemaFilePath: ', + data.schemaFilePath, + 'filepath: ', + filePath, + 'dbCopyName: ', + dbCopyName + ); + + // conditional to get the correct schemaFilePath name from the Load Schema Modal + if (!data.schemaFilePath) { + filePath = [data.schemaName + '.sql']; + } else { + filePath = data.schemaFilePath; + } // generate strings that are fed into execute functions later const createDB: string = createDBFunc(dbName); - const importFile: string = importFileFunc(filePath); - const runSQL: string = runSQLFunc(dbName); - const runTAR: string = runTARFunc(dbName); - const runFullCopy: string = runFullCopyFunc(dbCopyName); - const runHollowCopy: string = runHollowCopyFunc(dbCopyName); + + const runSQL: string = runSQLFunc(dbName, filePath); + const runTAR: string = runTARFunc(dbName, filePath); + const runFullCopy: string = runFullCopyFunc(dbCopyName, filePath); + const runHollowCopy: string = runHollowCopyFunc(dbCopyName, filePath); // determine if the file is a sql or a tar file, in the case of a copy, we will not have a filepath so we just hard-code the extension to be sql let extension: string = ''; if (filePath.length > 0) { extension = filePath[0].slice(filePath[0].lastIndexOf('.')); - } - else extension = '.sql'; + } else extension = '.sql'; // SEQUENCE OF EXECUTING COMMANDS // Steps are in reverse order because each step is a callback function that requires the following step to be defined. @@ -165,12 +210,13 @@ ipcMain.on('input-schema', (event, data: SchemaType) => { // Step 5: Changes the pg URI to look to the newly created database and queries all the tables in that database and sends it to frontend. async function sendLists() { listObj = await db.getLists(); + event.sender.send('db-lists', listObj); // tell the front end to switch tabs to the newly created database event.sender.send('switch-to-new', null); // notify frontend that async process has been completed event.sender.send('async-complete'); - }; + } // Step 4: Given the file path extension, run the appropriate command in postgres to build the db const step4 = () => { @@ -180,13 +226,11 @@ ipcMain.on('input-schema', (event, data: SchemaType) => { execute(runCmd, sendLists); }; - // Step 3: Import database file from file path into docker container - const step3 = () => execute(importFile, step4); - // skip step three which is only for importing files and instead change the current db to the newly created one + // Step 3: Change the database you're referencing const step3Copy = () => { db.changeDB(dbName); return step4(); - } + }; // Step 2: Change curent URI to match newly created DB const step2 = () => { @@ -209,9 +253,9 @@ ipcMain.on('input-schema', (event, data: SchemaType) => { // change the current database back to the newly created one // and now that we have changed to the new db, we can move on to importing the data file db.changeDB(dbName); - return step3(); - } - } + return step4(); + } + }; // Step 1 : Create empty db execute(createDB, step2); @@ -226,12 +270,12 @@ interface QueryType { } ipcMain.on('execute-query-untracked', (event, data: QueryType) => { - // send notice to front end that query has been started event.sender.send('async-started'); // destructure object from frontend const { queryString } = data; + // run query on db db.query(queryString) .then(() => { @@ -249,7 +293,6 @@ ipcMain.on('execute-query-untracked', (event, data: QueryType) => { // Listen for queries being sent from renderer ipcMain.on('execute-query-tracked', (event, data: QueryType) => { - // send notice to front end that query has been started event.sender.send('async-started'); @@ -272,18 +315,19 @@ ipcMain.on('execute-query-tracked', (event, data: QueryType) => { frontendData.queryData = queryData.rows; if (!queryString.match(/create/i)) { // Run EXPLAIN (FORMAT JSON, ANALYZE) - db.query('EXPLAIN (FORMAT JSON, ANALYZE) ' + queryString) - .then((queryStats) => { + db.query('EXPLAIN (FORMAT JSON, ANALYZE) ' + queryString).then( + (queryStats) => { frontendData.queryStatistics = queryStats.rows; (async function getListAsync() { listObj = await db.getLists(); frontendData.lists = listObj; - event.sender.send('db-lists', listObj) + event.sender.send('db-lists', listObj); event.sender.send('return-execute-query', frontendData); event.sender.send('async-complete'); })(); - }) + } + ); } else { // Handling for tracking a create table query, can't run explain/analyze on create statements (async function getListAsync() { @@ -297,7 +341,6 @@ ipcMain.on('execute-query-tracked', (event, data: QueryType) => { .catch((error: string) => { console.log('ERROR in execute-query-tracked channel in main.ts', error); }); - }); interface dummyDataRequest { @@ -306,37 +349,42 @@ interface dummyDataRequest { } ipcMain.on('generate-dummy-data', (event: any, data: dummyDataRequest) => { - // send notice to front end that DD generation has been started event.sender.send('async-started'); let schemaLayout: any; let dummyDataRequest: dummyDataRequest = data; let tableMatricesArray: any; - let keyObject: any = "Unresolved"; - - db.createKeyObject() - .then((result) => { - // set keyObject equal to the result of this query - keyObject = result; - db.dropKeyColumns(keyObject) - .then(() => { - db.addNewKeyColumns(keyObject) - .then(() => { - db.getSchemaLayout() - .then((result) => { - schemaLayout = result; - // generate the dummy data and save it into matrices associated with table names - tableMatricesArray = generateDummyData(schemaLayout, dummyDataRequest, keyObject); - //iterate through tableMatricesArray to write individual .csv files - for (const tableObject of tableMatricesArray) { - // write all entries in tableMatrix to csv file - writeCSVFile(tableObject, schemaLayout, keyObject, dummyDataRequest, event); - } - }); - }); + let keyObject: any = 'Unresolved'; + + db.createKeyObject().then((result) => { + // set keyObject equal to the result of this query + keyObject = result; + db.dropKeyColumns(keyObject).then(() => { + db.addNewKeyColumns(keyObject).then(() => { + db.getSchemaLayout().then((result) => { + schemaLayout = result; + // generate the dummy data and save it into matrices associated with table names + tableMatricesArray = generateDummyData( + schemaLayout, + dummyDataRequest, + keyObject + ); + //iterate through tableMatricesArray to write individual .csv files + for (const tableObject of tableMatricesArray) { + // write all entries in tableMatrix to csv file + writeCSVFile( + tableObject, + schemaLayout, + keyObject, + dummyDataRequest, + event + ); + } }); - }) -}) + }); + }); + }); +}); -export default execute; \ No newline at end of file +export default execute; diff --git a/backend/main.ts b/backend/main.ts index 4ecd600d..6c454ded 100644 --- a/backend/main.ts +++ b/backend/main.ts @@ -3,8 +3,7 @@ import { app, BrowserWindow, ipcMain, Menu } from 'electron'; import { appendFile } from 'fs/promises'; import { join } from 'path'; import { format } from 'url'; -//import './channels' // all channels live here -import execute from './channels'; +import './channels'; // all channels live here - this format signals that we want to import the code even if we're not calling any of the functions. If we were to import an object from channels and not call any of the functions in this file, webpack thinks we're not using it and skips the import. const { exec } = require('child_process'); const appMenu = require('./mainMenu'); // use appMenu to add options in top menu bar of app @@ -31,7 +30,10 @@ let pruned: boolean = false; let mainMenu = Menu.buildFromTemplate(require('./mainMenu')); // Keep a reference for dev mode let dev = false; -if (process.env.NODE_ENV !== undefined && process.env.NODE_ENV === 'development') { +if ( + process.env.NODE_ENV !== undefined && + process.env.NODE_ENV === 'development' +) { dev = true; } @@ -49,7 +51,9 @@ function createWindow() { }); if (process.platform === 'darwin') { - app.dock.setIcon(path.join(__dirname, '../../frontend/assets/images/seeqr_dock.png')); + app.dock.setIcon( + path.join(__dirname, '../../frontend/assets/images/seeqr_dock.png') + ); } // Load index.html of the app @@ -77,49 +81,12 @@ function createWindow() { // Don't show until we are ready and loaded mainWindow.once('ready-to-show', (event) => { mainWindow.show(); - // uncomment code below before running production build and packaging - // const yamlPath = join(__dirname, '../../docker-compose.yml') - // const runDocker: string = `docker-compose -f '${yamlPath}' up -d`; - const runDocker: string = `docker-compose up -d`; - exec(runDocker, (error, stdout, stderr) => { - if (error) { - console.log(`error: ${error.message}`); - return; - } - if (stderr) { - console.log(`stderr: ${stderr}`); - return; - } - console.log(`${stdout}`); - }) }); } app.on('before-quit', (event: any) => { - // check if containers have already been pruned--else, continue with default behavior to terminate application - if (!pruned) { - event.preventDefault(); - // Stop and remove postgres-1 and busybox-1 Docker containers upon window exit. - const stopContainers: string = 'docker stop postgres-1 busybox-1'; - const pruneContainers: string = 'docker rm -f postgres-1 busybox-1'; - // this command removes the volume which stores the session data for the postgres instance - // comment this out for dev - const pruneVolumes: string = 'docker volume rm -f seeqr_database-data'; - - // use this string for production build - // const pruneVolumes: string = 'docker volume rm -f app_database-data' - - const step4 = () => { - pruned = true; - app.quit() - }; - const step3 = () => execute(pruneVolumes, step4); - const step2 = () => execute(pruneContainers, step3); - - execute(stopContainers, step2); - } -}) - + // future iterations should add functionality to selete .sql and .csv files from a user's computer before quitting the app +}); // Invoke createWindow to create browser windows after Electron has been initialized. // Some APIs can only be used after this event occurs. @@ -142,4 +109,4 @@ app.on('activate', () => { } }); -export default mainWindow; \ No newline at end of file +export default mainWindow; diff --git a/backend/models.ts b/backend/models.ts index 772d0b9b..0a753849 100644 --- a/backend/models.ts +++ b/backend/models.ts @@ -1,5 +1,5 @@ const { Pool } = require('pg'); -const { getPrimaryKeys, getForeignKeys } = require('./DummyD/foreign_key_info') +const { getPrimaryKeys, getForeignKeys } = require('./DummyD/foreign_key_info'); // Initialize to a default db. // URI Format: postgres://username:password@hostname:port/databasename @@ -9,50 +9,52 @@ let pool: any = new Pool({ connectionString: PG_URI }); //helper function that creates the column objects, which are saved to the schemaLayout object //this function returns a promise to be resolved with Promise.all syntax const getColumnObjects = (tableName: string) => { - const queryString = "SELECT column_name, data_type, character_maximum_length FROM information_schema.columns WHERE table_name = $1;"; + const queryString = + 'SELECT column_name, data_type, character_maximum_length FROM information_schema.columns WHERE table_name = $1;'; const value = [tableName]; - return new Promise ((resolve) => { - pool - .query(queryString, value) - .then((result) => { - const columnInfoArray: any = []; - for (let i = 0; i < result.rows.length; i++) { - const columnObj: any = { - columnName: result.rows[i].column_name, - dataInfo: { - data_type: result.rows[i].data_type, - character_maxiumum_length: result.rows[i].character_maxiumum_length - } - } - columnInfoArray.push(columnObj) - } - resolve(columnInfoArray); - }) - }) -} + return new Promise((resolve) => { + pool.query(queryString, value).then((result) => { + const columnInfoArray: any = []; + for (let i = 0; i < result.rows.length; i++) { + const columnObj: any = { + columnName: result.rows[i].column_name, + dataInfo: { + data_type: result.rows[i].data_type, + character_maxiumum_length: result.rows[i].character_maxiumum_length, + }, + }; + columnInfoArray.push(columnObj); + } + resolve(columnInfoArray); + }); + }); +}; // gets all the names of the current postgres instances const getDBNames = () => { - return new Promise((resolve) =>{ - pool - .query('SELECT datname FROM pg_database;') - .then((databases) => { - let dbList: any = []; - for (let i = 0; i < databases.rows.length; ++i) { - let curName = databases.rows[i].datname; - if (curName !== 'postgres' && curName !== 'template0' && curName !== 'template1') - dbList.push(databases.rows[i].datname); - } - resolve(dbList); - }) - }) -} + return new Promise((resolve) => { + pool.query('SELECT datname FROM pg_database;').then((databases) => { + let dbList: any = []; + for (let i = 0; i < databases.rows.length; ++i) { + let curName = databases.rows[i].datname; + if ( + curName !== 'postgres' && + curName !== 'template0' && + curName !== 'template1' + ) + dbList.push(databases.rows[i].datname); + } + resolve(dbList); + }); + }); +}; // gets all tablenames from currentschema const getDBLists = () => { return new Promise((resolve) => { pool - .query("SELECT table_name FROM information_schema.tables WHERE table_schema = 'public' ORDER BY table_name;" + .query( + "SELECT table_name FROM information_schema.tables WHERE table_schema = 'public' ORDER BY table_name;" ) .then((tables) => { let tableList: any = []; @@ -60,12 +62,11 @@ const getDBLists = () => { tableList.push(tables.rows[i].table_name); } resolve(tableList); - }) - }) -} + }); + }); +}; module.exports = { - query: (text, params, callback) => { console.log('Executed query: ', text); return pool.query(text, params, callback); @@ -84,57 +85,61 @@ module.exports = { tableList: [], // current database's tables databaseList: [], }; - Promise.all([getDBNames(), getDBLists()]) - .then((data) => { - console.log('models: ', data); - listObj.databaseList = data[0]; - listObj.tableList = data[1]; - resolve(listObj); - }) - }) - }, + Promise.all([getDBNames(), getDBLists()]).then((data) => { + console.log('models: ', data); + listObj.databaseList = data[0]; + listObj.tableList = data[1]; + resolve(listObj); + }); + }); + }, - createKeyObject: () => { return new Promise((resolve) => { // initialize the keyObject we eventually want to return out - const keyObject: any = {}; + const keyObject: any = {}; pool .query(getPrimaryKeys, null) .then((result) => { let table; - let pkColumn + let pkColumn; // iterate over the primary key table, adding info to our keyObject for (let i = 0; i < result.rows.length; i++) { table = result.rows[i].table_name; pkColumn = result.rows[i].pk_column; // if the table is not yet initialized within the keyObject, then initialize it - if (!keyObject[table]) keyObject[table] = {primaryKeyColumns: {}, foreignKeyColumns: {}}; + if (!keyObject[table]) + keyObject[table] = { + primaryKeyColumns: {}, + foreignKeyColumns: {}, + }; // then just set the value at the pk column name to true for later checking keyObject[table].primaryKeyColumns[pkColumn] = true; } - }) - .then(() => { - pool - .query(getForeignKeys, null) - .then((result) => { - let table; - let primaryTable; - let fkColumn; - // iterate over the foreign key table, adding info to our keyObject - for (let i = 0; i < result.rows.length; i++) { - table = result.rows[i].foreign_table; - primaryTable = result.rows[i].primary_table - fkColumn = result.rows[i].fk_column; - // if the table is not yet initialized within the keyObject, then initialize it - if (!keyObject[table]) keyObject[table] = {primaryKeyColumns: {}, foreignKeyColumns: {}}; - // then set the value at the fk column name to the number of rows asked for in the primary table to which it points - keyObject[table].foreignKeyColumns[fkColumn] = primaryTable; - } - resolve(keyObject); - }) }) - }) + .then(() => { + pool.query(getForeignKeys, null).then((result) => { + let table; + let primaryTable; + let fkColumn; + // iterate over the foreign key table, adding info to our keyObject + for (let i = 0; i < result.rows.length; i++) { + table = result.rows[i].foreign_table; + primaryTable = result.rows[i].primary_table; + fkColumn = result.rows[i].fk_column; + // if the table is not yet initialized within the keyObject, then initialize it + if (!keyObject[table]) + keyObject[table] = { + primaryKeyColumns: {}, + foreignKeyColumns: {}, + }; + // then set the value at the fk column name to the number of rows asked for in the primary table to which it points + keyObject[table].foreignKeyColumns[fkColumn] = primaryTable; + } + resolve(keyObject); + }); + }); + }); }, dropKeyColumns: async (keyObject: any) => { @@ -143,21 +148,21 @@ module.exports = { let queryString = `ALTER TABLE ${table}`; let count: number = 2; - for (const pkc in keyObject[table].primaryKeyColumns){ + for (const pkc in keyObject[table].primaryKeyColumns) { if (count > 2) queryString += ','; queryString += ` DROP COLUMN ${pkc} CASCADE`; count += 1; } - for (const fkc in keyObject[table].foreignKeyColumns){ + for (const fkc in keyObject[table].foreignKeyColumns) { if (count > 2) queryString += ','; - queryString += ` DROP COLUMN ${fkc}` + queryString += ` DROP COLUMN ${fkc}`; count += 1; } - queryString += ';' - + queryString += ';'; + return Promise.resolve(pool.query(queryString)); - } - + }; + // iterate over tables, running drop queries, and pushing a new promise to promise array for (const table in keyObject) { await generateAndRunDropQuery(table); @@ -166,29 +171,29 @@ module.exports = { return; }, - addNewKeyColumns: async (keyObject: any) => { + addNewKeyColumns: async (keyObject: any) => { // define helper function to generate and run query const generateAndRunAddQuery = (table: string) => { let queryString = `ALTER TABLE ${table}`; let count: number = 2; - for (const pkc in keyObject[table].primaryKeyColumns){ + for (const pkc in keyObject[table].primaryKeyColumns) { if (count > 2) queryString += ','; queryString += ` ADD COLUMN ${pkc} INT`; count += 1; } - for (const fkc in keyObject[table].foreignKeyColumns){ + for (const fkc in keyObject[table].foreignKeyColumns) { if (count > 2) queryString += ','; - queryString += ` ADD COLUMN ${fkc} INT` + queryString += ` ADD COLUMN ${fkc} INT`; count += 1; } - queryString += ';' - + queryString += ';'; + return Promise.resolve(pool.query(queryString)); - } - + }; + // iterate over tables, running drop queries, and pushing a new promise to promise array - for (const table in keyObject){ + for (const table in keyObject) { await generateAndRunAddQuery(table); } @@ -202,7 +207,7 @@ module.exports = { tableNames: [], tables: { // tableName: [columnObj array] - } + }, }; pool // This query returns the names of all the tables in the database @@ -216,21 +221,20 @@ module.exports = { } const promiseArray: any = []; for (let tableName of schemaLayout.tableNames) { - promiseArray.push(getColumnObjects(tableName)) + promiseArray.push(getColumnObjects(tableName)); } //we resolve all of the promises for the data info, and are returned an array of column data objects - Promise.all(promiseArray) - .then((columnInfo) => { - //here, we create a key for each table name and assign the array of column objects to the corresponding table name - for (let i = 0; i < columnInfo.length; i++) { - schemaLayout.tables[schemaLayout.tableNames[i]] = columnInfo[i]; - } - resolve(schemaLayout); - }) + Promise.all(promiseArray).then((columnInfo) => { + //here, we create a key for each table name and assign the array of column objects to the corresponding table name + for (let i = 0; i < columnInfo.length; i++) { + schemaLayout.tables[schemaLayout.tableNames[i]] = columnInfo[i]; + } + resolve(schemaLayout); + }); }) .catch(() => { - console.log('error in models.ts') - }) + console.log('error in models.ts'); + }); }); }, @@ -241,17 +245,17 @@ module.exports = { if (Object.keys(keyObject[tableName].primaryKeyColumns).length) { let queryString: string = `ALTER TABLE ${tableName} `; let count: number = 0; - + for (const pk in keyObject[tableName].primaryKeyColumns) { if (count > 0) queryString += `, `; queryString += `ADD CONSTRAINT "${tableName}_pk${count}" PRIMARY KEY ("${pk}")`; count += 1; } - + queryString += `;`; // wait for the previous query to return before moving on to the next table await pool.query(queryString); - } + } } } return; @@ -261,18 +265,21 @@ module.exports = { // iterate over table's keyObject property, add foreign key constraints for (const tableName of Object.keys(dummyDataRequest.dummyData)) { if (keyObject[tableName]) { - if (Object.keys(keyObject[tableName].foreignKeyColumns).length) { + if (Object.keys(keyObject[tableName].foreignKeyColumns).length) { let queryString: string = `ALTER TABLE ${tableName} `; let count: number = 0; for (const fk in keyObject[tableName].foreignKeyColumns) { - let primaryTable: string = keyObject[tableName].foreignKeyColumns[fk]; - let primaryKey: any = Object.keys(keyObject[primaryTable].primaryKeyColumns)[0]; + let primaryTable: string = + keyObject[tableName].foreignKeyColumns[fk]; + let primaryKey: any = Object.keys( + keyObject[primaryTable].primaryKeyColumns + )[0]; if (count > 0) queryString += `, `; queryString += `ADD CONSTRAINT "${tableName}_fk${count}" FOREIGN KEY ("${fk}") REFERENCES ${primaryTable}("${primaryKey}")`; count += 1; } - + queryString += `;`; // wait for the previous query to return before moving on to the next table await pool.query(queryString); @@ -280,5 +287,5 @@ module.exports = { } } return; - } -} \ No newline at end of file + }, +}; diff --git a/docker-compose.yml b/docker-compose.yml deleted file mode 100644 index c59eccfe..00000000 --- a/docker-compose.yml +++ /dev/null @@ -1,28 +0,0 @@ -version: '3.7' -services: - bb: - image: busybox - volumes: - - database-data:/var/lib/postgresql/data - ports: - - 5001:5001 - container_name: busybox-1 - db: - image: postgres:12 - environment: - POSTGRES_PASSWORD: postgres - POSTGRES_USER: postgres - POSTGRES_DB: defaultDB - volumes: - - database-data:/var/lib/postgresql/data - depends_on: - - bb - container_name: postgres-1 - ports: - - 5432:5432 - hostname: localhost - networks: - - default - -volumes: - database-data: # named volumes can be managed easier using docker-compose \ No newline at end of file diff --git a/frontend/assets/images/chrisakinrinade.png b/frontend/assets/images/chrisakinrinade.png new file mode 100644 index 00000000..e446fc9d Binary files /dev/null and b/frontend/assets/images/chrisakinrinade.png differ diff --git a/frontend/assets/images/interface.png b/frontend/assets/images/interface.png deleted file mode 100644 index bb2be6f1..00000000 Binary files a/frontend/assets/images/interface.png and /dev/null differ diff --git a/frontend/assets/images/jameskolotouros.png b/frontend/assets/images/jameskolotouros.png new file mode 100644 index 00000000..4234bb66 Binary files /dev/null and b/frontend/assets/images/jameskolotouros.png differ diff --git a/frontend/assets/images/jennifercourtner.png b/frontend/assets/images/jennifercourtner.png new file mode 100644 index 00000000..66686962 Binary files /dev/null and b/frontend/assets/images/jennifercourtner.png differ diff --git a/frontend/assets/images/katieklochan.png b/frontend/assets/images/katieklochan.png new file mode 100644 index 00000000..49e388f0 Binary files /dev/null and b/frontend/assets/images/katieklochan.png differ diff --git a/frontend/assets/images/seeqr_desktop.jpg b/frontend/assets/images/seeqr_desktop.jpg new file mode 100644 index 00000000..642a999a Binary files /dev/null and b/frontend/assets/images/seeqr_desktop.jpg differ diff --git a/frontend/assets/stylesheets/css/style.css b/frontend/assets/stylesheets/css/style.css index 2cf60152..c83e3794 100644 --- a/frontend/assets/stylesheets/css/style.css +++ b/frontend/assets/stylesheets/css/style.css @@ -1,11 +1,11 @@ -@import url("https://fonts.googleapis.com/css2?family=PT+Sans:ital,wght@0,400;0,700;1,400;1,700&display=swap"); -@import url("https://fonts.googleapis.com/css2?family=PT+Mono&display=swap"); -@import url("https://fonts.googleapis.com/css2?family=PT+Sans:ital,wght@0,400;0,700;1,400;1,700&display=swap"); -@import url("https://fonts.googleapis.com/css2?family=PT+Mono&display=swap"); -@import url("https://fonts.googleapis.com/css2?family=PT+Sans:ital,wght@0,400;0,700;1,400;1,700&display=swap"); -@import url("https://fonts.googleapis.com/css2?family=PT+Mono&display=swap"); -@import url("https://fonts.googleapis.com/css2?family=PT+Sans:ital,wght@0,400;0,700;1,400;1,700&display=swap"); -@import url("https://fonts.googleapis.com/css2?family=PT+Mono&display=swap"); +@import url('https://fonts.googleapis.com/css2?family=PT+Sans:ital,wght@0,400;0,700;1,400;1,700&display=swap'); +@import url('https://fonts.googleapis.com/css2?family=PT+Mono&display=swap'); +@import url('https://fonts.googleapis.com/css2?family=PT+Sans:ital,wght@0,400;0,700;1,400;1,700&display=swap'); +@import url('https://fonts.googleapis.com/css2?family=PT+Mono&display=swap'); +@import url('https://fonts.googleapis.com/css2?family=PT+Sans:ital,wght@0,400;0,700;1,400;1,700&display=swap'); +@import url('https://fonts.googleapis.com/css2?family=PT+Mono&display=swap'); +@import url('https://fonts.googleapis.com/css2?family=PT+Sans:ital,wght@0,400;0,700;1,400;1,700&display=swap'); +@import url('https://fonts.googleapis.com/css2?family=PT+Mono&display=swap'); #query-window .text-field { width: 100%; height: 200px; @@ -13,7 +13,7 @@ .label-field, .schema-label { - font-family: "PT Mono", monospace; + font-family: 'PT Mono', monospace; color: #6cbba9; margin-left: 8px; } @@ -71,7 +71,7 @@ textarea { outline: none; border: none; color: #6cbba9; - font-family: "PT Mono", monospace; + font-family: 'PT Mono', monospace; } table.scroll-box { @@ -113,12 +113,12 @@ input *:focus { display: flex; -webkit-box-orient: horizontal; -webkit-box-direction: normal; - -ms-flex-direction: row; - flex-direction: row; + -ms-flex-direction: row; + flex-direction: row; padding: 1rem; -webkit-box-align: center; - -ms-flex-align: center; - align-items: center; + -ms-flex-align: center; + align-items: center; } #dummy-rows-input { @@ -138,7 +138,8 @@ input *:focus { border: 1px solid #444c50; } -.dummy-data-table th, .dummy-data-table tr { +.dummy-data-table th, +.dummy-data-table tr { padding: 0.5rem; border-bottom: 1px solid #444c50; } @@ -163,14 +164,14 @@ input *:focus { display: flex; -webkit-box-orient: horizontal; -webkit-box-direction: normal; - -ms-flex-direction: row; - flex-direction: row; + -ms-flex-direction: row; + flex-direction: row; -webkit-box-align: center; - -ms-flex-align: center; - align-items: center; + -ms-flex-align: center; + align-items: center; -webkit-box-pack: justify; - -ms-flex-pack: justify; - justify-content: space-between; + -ms-flex-pack: justify; + justify-content: space-between; } #track { @@ -186,13 +187,13 @@ input *:focus { display: flex; -webkit-box-orient: horizontal; -webkit-box-direction: normal; - -ms-flex-direction: row; - flex-direction: row; + -ms-flex-direction: row; + flex-direction: row; -webkit-box-align: center; - -ms-flex-align: center; - align-items: center; + -ms-flex-align: center; + align-items: center; -ms-flex-pack: distribute; - justify-content: space-around; + justify-content: space-around; } #data-table { @@ -242,7 +243,7 @@ input *:focus { /* Track */ ::-webkit-scrollbar-track { -webkit-box-shadow: inset 0px 0px 5px grey; - box-shadow: inset 0px 0px 5px grey; + box-shadow: inset 0px 0px 5px grey; border-radius: 10px; } @@ -264,18 +265,18 @@ input *:focus { display: -ms-flexbox; display: flex; -webkit-box-flex: 1; - -ms-flex: 1; - flex: 1; + -ms-flex: 1; + flex: 1; -webkit-box-orient: vertical; -webkit-box-direction: normal; - -ms-flex-direction: column; - flex-direction: column; + -ms-flex-direction: column; + flex-direction: column; -webkit-box-pack: center; - -ms-flex-pack: center; - justify-content: center; + -ms-flex-pack: center; + justify-content: center; -webkit-box-align: center; - -ms-flex-align: center; - align-items: center; + -ms-flex-align: center; + align-items: center; height: 100vh; color: #c6d2d5; } @@ -286,8 +287,8 @@ input *:focus { display: flex; -webkit-box-orient: vertical; -webkit-box-direction: normal; - -ms-flex-direction: column; - flex-direction: column; + -ms-flex-direction: column; + flex-direction: column; margin-top: 30px; text-align: center; } @@ -298,11 +299,11 @@ input *:focus { display: flex; -webkit-box-orient: horizontal; -webkit-box-direction: normal; - -ms-flex-direction: row; - flex-direction: row; + -ms-flex-direction: row; + flex-direction: row; margin-top: 50px; -ms-flex-pack: distribute; - justify-content: space-around; + justify-content: space-around; } #splash-page .splash-buttons button { @@ -329,7 +330,7 @@ input *:focus { } #splash-page .logo { - background-image: url("../../images/logo_color.png"); + background-image: url('../../images/logo_color.png'); width: 360px; height: 362px; } @@ -340,11 +341,11 @@ input *:focus { display: flex; -webkit-box-orient: vertical; -webkit-box-direction: normal; - -ms-flex-direction: column; - flex-direction: column; + -ms-flex-direction: column; + flex-direction: column; -webkit-box-align: center; - -ms-flex-align: center; - align-items: center; + -ms-flex-align: center; + align-items: center; margin: 25px; } @@ -354,11 +355,11 @@ input *:focus { display: flex; -webkit-box-orient: vertical; -webkit-box-direction: normal; - -ms-flex-direction: column; - flex-direction: column; + -ms-flex-direction: column; + flex-direction: column; -webkit-box-align: center; - -ms-flex-align: center; - align-items: center; + -ms-flex-align: center; + align-items: center; margin: 25px; } @@ -368,11 +369,11 @@ input *:focus { display: flex; -webkit-box-orient: horizontal; -webkit-box-direction: normal; - -ms-flex-direction: row; - flex-direction: row; + -ms-flex-direction: row; + flex-direction: row; height: 100vh; overflow: hidden; - background-image: url("../../images/logo_monochrome.png"); + background-image: url('../../images/logo_monochrome.png'); background-repeat: no-repeat; background-position-x: right; background-position-y: bottom; @@ -385,8 +386,8 @@ input *:focus { display: flex; -webkit-box-orient: vertical; -webkit-box-direction: normal; - -ms-flex-direction: column; - flex-direction: column; + -ms-flex-direction: column; + flex-direction: column; padding: 15px; background-color: #292a30; } @@ -398,8 +399,8 @@ input *:focus { display: flex; -webkit-box-orient: vertical; -webkit-box-direction: normal; - -ms-flex-direction: column; - flex-direction: column; + -ms-flex-direction: column; + flex-direction: column; } .history-container { @@ -408,8 +409,8 @@ input *:focus { display: flex; -webkit-box-orient: vertical; -webkit-box-direction: normal; - -ms-flex-direction: column; - flex-direction: column; + -ms-flex-direction: column; + flex-direction: column; height: 250px; overflow-y: auto; } @@ -419,8 +420,8 @@ input *:focus { display: -ms-flexbox; display: flex; -webkit-box-flex: 1; - -ms-flex-positive: 1; - flex-grow: 1; + -ms-flex-positive: 1; + flex-grow: 1; } #main-right { @@ -429,11 +430,11 @@ input *:focus { display: flex; -webkit-box-orient: vertical; -webkit-box-direction: normal; - -ms-flex-direction: column; - flex-direction: column; + -ms-flex-direction: column; + flex-direction: column; -webkit-box-flex: 1; - -ms-flex-positive: 1; - flex-grow: 1; + -ms-flex-positive: 1; + flex-grow: 1; height: 100%; } @@ -443,8 +444,8 @@ input *:focus { display: flex; -webkit-box-orient: horizontal; -webkit-box-direction: normal; - -ms-flex-direction: row; - flex-direction: row; + -ms-flex-direction: row; + flex-direction: row; height: 100%; } @@ -454,12 +455,12 @@ input *:focus { display: flex; -webkit-box-orient: vertical; -webkit-box-direction: normal; - -ms-flex-direction: column; - flex-direction: column; + -ms-flex-direction: column; + flex-direction: column; width: 50%; -webkit-box-flex: 1; - -ms-flex: 1; - flex: 1; + -ms-flex: 1; + flex: 1; padding: 15px; border-right: 0.5px solid #444c50; } @@ -470,8 +471,8 @@ input *:focus { display: flex; -webkit-box-orient: vertical; -webkit-box-direction: normal; - -ms-flex-direction: column; - flex-direction: column; + -ms-flex-direction: column; + flex-direction: column; height: 50%; z-index: 1000; } @@ -482,11 +483,11 @@ input *:focus { display: flex; -webkit-box-orient: vertical; -webkit-box-direction: normal; - -ms-flex-direction: column; - flex-direction: column; + -ms-flex-direction: column; + flex-direction: column; -webkit-box-flex: 1; - -ms-flex-positive: 1; - flex-grow: 1; + -ms-flex-positive: 1; + flex-grow: 1; } #schema-right { @@ -495,8 +496,8 @@ input *:focus { display: flex; -webkit-box-orient: vertical; -webkit-box-direction: normal; - -ms-flex-direction: column; - flex-direction: column; + -ms-flex-direction: column; + flex-direction: column; width: 50%; padding: 15px; height: 100%; @@ -507,12 +508,12 @@ input *:focus { display: -ms-flexbox; display: flex; -webkit-box-flex: 1; - -ms-flex-positive: 1; - flex-grow: 1; + -ms-flex-positive: 1; + flex-grow: 1; -webkit-box-orient: vertical; -webkit-box-direction: normal; - -ms-flex-direction: column; - flex-direction: column; + -ms-flex-direction: column; + flex-direction: column; } .results-container { @@ -521,8 +522,8 @@ input *:focus { display: flex; -webkit-box-orient: vertical; -webkit-box-direction: normal; - -ms-flex-direction: column; - flex-direction: column; + -ms-flex-direction: column; + flex-direction: column; height: 300px; overflow-y: auto; } @@ -535,11 +536,11 @@ input *:focus { -webkit-transition: 1.1s ease-out; transition: 1.1s ease-out; -webkit-box-shadow: -1rem 1rem 1rem rgba(0, 0, 0, 0.2); - box-shadow: -1rem 1rem 1rem rgba(0, 0, 0, 0.2); + box-shadow: -1rem 1rem 1rem rgba(0, 0, 0, 0.2); -webkit-filter: blur(0); - filter: blur(0); + filter: blur(0); -webkit-transform: scale(1); - transform: scale(1); + transform: scale(1); opacity: 1; visibility: visible; padding: 40px; @@ -557,11 +558,11 @@ input *:focus { opacity: 0; visibility: hidden; -webkit-filter: blur(8px); - filter: blur(8px); + filter: blur(8px); -webkit-transform: scale(0.33); - transform: scale(0.33); + transform: scale(0.33); -webkit-box-shadow: 1rem 0 0 rgba(0, 0, 0, 0.2); - box-shadow: 1rem 0 0 rgba(0, 0, 0, 0.2); + box-shadow: 1rem 0 0 rgba(0, 0, 0, 0.2); } .modal h2 { @@ -578,7 +579,7 @@ input *:focus { margin: 10px 0; display: block; color: #6cbba9; - font-family: "PT Mono", monospace; + font-family: 'PT Mono', monospace; } .schema-text-field { @@ -592,8 +593,8 @@ input *:focus { display: flex; -webkit-box-orient: horizontal; -webkit-box-direction: normal; - -ms-flex-direction: row; - flex-direction: row; + -ms-flex-direction: row; + flex-direction: row; } #horizontal { @@ -607,11 +608,11 @@ input *:focus { display: flex; -webkit-box-orient: horizontal; -webkit-box-direction: normal; - -ms-flex-direction: row; - flex-direction: row; + -ms-flex-direction: row; + flex-direction: row; -webkit-box-align: baseline; - -ms-flex-align: baseline; - align-items: baseline; + -ms-flex-align: baseline; + align-items: baseline; } #load-button { @@ -623,25 +624,26 @@ input *:focus { display: -ms-flexbox; display: flex; -webkit-box-align: center; - -ms-flex-align: center; - align-items: center; + -ms-flex-align: center; + align-items: center; text-align: center; } -.separator::before, .separator::after { +.separator::before, +.separator::after { content: ''; -webkit-box-flex: 1; - -ms-flex: 1; - flex: 1; + -ms-flex: 1; + flex: 1; border-bottom: 1px solid #ccc; } .separator::before { - margin-right: .25em; + margin-right: 0.25em; } .separator::after { - margin-left: .25em; + margin-left: 0.25em; } .copy-instance { @@ -650,11 +652,11 @@ input *:focus { display: flex; -webkit-box-orient: horizontal; -webkit-box-direction: normal; - -ms-flex-direction: row; - flex-direction: row; + -ms-flex-direction: row; + flex-direction: row; -webkit-box-align: baseline; - -ms-flex-align: baseline; - align-items: baseline; + -ms-flex-align: baseline; + align-items: baseline; } #select-dropdown { @@ -674,11 +676,11 @@ input *:focus { display: flex; -webkit-box-orient: horizontal; -webkit-box-direction: normal; - -ms-flex-direction: row; - flex-direction: row; + -ms-flex-direction: row; + flex-direction: row; -webkit-box-align: center; - -ms-flex-align: center; - align-items: center; + -ms-flex-align: center; + align-items: center; } #copy-button { @@ -691,14 +693,14 @@ input *:focus { display: flex; -webkit-box-orient: vertical; -webkit-box-direction: normal; - -ms-flex-direction: column; - flex-direction: column; + -ms-flex-direction: column; + flex-direction: column; -webkit-box-align: center; - -ms-flex-align: center; - align-items: center; + -ms-flex-align: center; + align-items: center; -webkit-box-pack: center; - -ms-flex-pack: center; - justify-content: center; + -ms-flex-pack: center; + justify-content: center; width: 100px; height: 50px; position: absolute; @@ -716,7 +718,7 @@ input *:focus { body { background-color: #2b2d35; - font-family: "PT Sans", sans-serif; + font-family: 'PT Sans', sans-serif; font-weight: 100; font-size: 1em; line-height: 1/5; @@ -789,7 +791,7 @@ h4 { outline: none; color: #c6d2d5; -webkit-box-shadow: none; - box-shadow: none; + box-shadow: none; background-repeat: no-repeat; border: none; cursor: pointer; @@ -806,7 +808,7 @@ h4 { color: #c6d2d5; display: block; -ms-flex-line-pack: center; - align-content: center; + align-content: center; padding: 10px; text-decoration: none; font-family: 'PT Mono', monospace; diff --git a/frontend/assets/stylesheets/scss/modal.scss b/frontend/assets/stylesheets/scss/modal.scss index 3d679762..d6be172f 100644 --- a/frontend/assets/stylesheets/scss/modal.scss +++ b/frontend/assets/stylesheets/scss/modal.scss @@ -58,7 +58,7 @@ } .modal-buttons { display: flex; - flex-direction: row; + flex-direction: row; } #horizontal { @@ -70,7 +70,6 @@ display: flex; flex-direction: row; align-items: baseline; - } #load-button { @@ -82,16 +81,17 @@ align-items: center; text-align: center; } -.separator::before, .separator::after { +.separator::before, +.separator::after { content: ''; flex: 1; border-bottom: 1px solid #ccc; } .separator::before { - margin-right: .25em; + margin-right: 0.25em; } .separator::after { - margin-left: .25em; + margin-left: 0.25em; } .copy-instance { @@ -107,6 +107,7 @@ #copy-data-checkbox { min-width: 10px; margin-left: 0.5rem; + margin-left: 1rem; width: 1.2rem; height: 1.2rem; } @@ -119,6 +120,7 @@ #copy-button { margin-top: 20px; + margin-left: 40px; } #loading-modal { @@ -134,4 +136,4 @@ bottom: 50%; left: 50%; z-index: 1020; -} \ No newline at end of file +} diff --git a/frontend/assets/stylesheets/scss/style.scss b/frontend/assets/stylesheets/scss/style.scss index c463020c..3d202a7c 100644 --- a/frontend/assets/stylesheets/scss/style.scss +++ b/frontend/assets/stylesheets/scss/style.scss @@ -60,7 +60,6 @@ h4 { height: 200px; } - #add-query-button { width: 120px; margin-bottom: 15px; @@ -115,11 +114,11 @@ h4 { .bar-chart { margin-top: 3rem; - height: 300px; - display: block; + height: 300px; + display: block; } - -.tab-list { + +.tab-list { border-bottom: 2px solid #c6d2d5; // padding-left: 0 30 0 0; margin-right: 10px; @@ -133,7 +132,7 @@ h4 { margin-bottom: -1px; padding: 0.5rem 0.75rem; font-weight: 500; - letter-spacing: 0.5px; + letter-spacing: 0.5px; cursor: pointer; border: solid #ccc; @@ -141,13 +140,13 @@ h4 { border-radius: 15px 15px 0px 0px; } -.tab-list-item:hover{ - // background-color: #c6d2d5; - color:#c6d2d5; - } - +.tab-list-item:hover { + // background-color: #c6d2d5; + color: #c6d2d5; +} + .tab-list-active { - background-color:rgb(108, 187, 169); + background-color: rgb(108, 187, 169); border: solid #ccc; border-width: 1px 1px 0 1px; border-radius: 15px 15px 0px 0px; @@ -172,14 +171,14 @@ h4 { } #input-schema-button:hover { - // background-color: #c6d2d5; - color:#c6d2d5; -} - + // background-color: #c6d2d5; + color: #c6d2d5; +} + button:hover { cursor: pointer; } // #generate-data-button { // margin-top: 30px; -// } \ No newline at end of file +// } diff --git a/frontend/components/App.tsx b/frontend/components/App.tsx index d8f6f730..aca28e0f 100644 --- a/frontend/components/App.tsx +++ b/frontend/components/App.tsx @@ -27,15 +27,14 @@ export class App extends Component { handleFileClick(event: ClickEvent) { dialog - .showOpenDialog( - { - properties: ['openFile'], - filters: [{ name: 'Custom File Type', extensions: ['tar', 'sql'] }], - message: 'Please upload .sql or .tar database file' - }, - ) + .showOpenDialog({ + properties: ['openFile'], + filters: [{ name: 'Custom File Type', extensions: ['tar', 'sql'] }], + message: 'Please upload .sql or .tar database file', + }) .then((result: object) => { const filePathArr = result['filePaths']; + // send via channel to main process if (!result['canceled']) { ipcRenderer.send('upload-file', filePathArr); @@ -68,8 +67,8 @@ export class App extends Component { handleSkipClick={this.handleSkipClick} /> ) : ( - - )} + + )} ); } diff --git a/frontend/components/MainPanel.tsx b/frontend/components/MainPanel.tsx index 75e57a65..db2cc2e5 100644 --- a/frontend/components/MainPanel.tsx +++ b/frontend/components/MainPanel.tsx @@ -1,8 +1,8 @@ import { dialog } from 'electron'; import React, { Component } from 'react'; -import { Compare } from './leftPanel/Compare'; -import History from './leftPanel/History'; -import { Tabs } from './rightPanel/Tabs'; +import { Compare } from './rightPanel/Compare'; +import History from './rightPanel/History'; +import { Tabs } from './leftPanel/Tabs'; import LoadingModal from './LoadingModal'; const { ipcRenderer } = window.require('electron'); @@ -11,13 +11,14 @@ type MainState = { queries: { queryString: string; queryData: {}[]; - queryStatistics: any + queryStatistics: any; querySchema: string; queryLabel: string; }[]; currentSchema: string; lists: any; loading: boolean; + dbSize: string; }; type MainProps = {}; @@ -34,18 +35,25 @@ class MainPanel extends Component { databaseList: ['defaultDB'], tableList: [], }, - loading: false + loading: false, + dbSize: '', }; componentDidMount() { ipcRenderer.send('return-db-list'); - + // Listening for returnedData from executing Query // Update state with new object (containing query data, query statistics, query schema // inside of state.queries array ipcRenderer.on('return-execute-query', (event: any, returnedData: any) => { // destructure from returnedData from backend - const { queryString, queryData, queryStatistics, queryCurrentSchema, queryLabel } = returnedData; + const { + queryString, + queryData, + queryStatistics, + queryCurrentSchema, + queryLabel, + } = returnedData; // create new query object with returnedData const newQuery = { queryString, @@ -53,32 +61,38 @@ class MainPanel extends Component { queryStatistics, querySchema: queryCurrentSchema, queryLabel, - } + }; // create copy of current queries array let queries = this.state.queries.slice(); // push new query object into copy of queries array - queries.push(newQuery) - this.setState({ queries }) + queries.push(newQuery); + this.setState({ queries }); }); - ipcRenderer.on('db-lists', (event: any, returnedLists: any) => { - this.setState(prevState => ({ - ...prevState, - lists: { - databaseList: returnedLists.databaseList, - tableList: returnedLists.tableList - } - })) - }); + ipcRenderer.on( + 'db-lists', + (event: any, returnedLists: any, returnedDbSize: string) => { + this.setState((prevState) => ({ + ...prevState, + lists: { + databaseList: returnedLists.databaseList, + tableList: returnedLists.tableList, + }, + dbSize: returnedDbSize, + })); + } + ); ipcRenderer.on('switch-to-new', (event: any) => { const newSchemaIndex = this.state.lists.databaseList.length - 1; - this.setState({currentSchema: this.state.lists.databaseList[newSchemaIndex]}); + this.setState({ + currentSchema: this.state.lists.databaseList[newSchemaIndex], + }); }); // Renders the loading modal during async functions. ipcRenderer.on('async-started', (event: any) => { - this.setState({ loading: true }); + this.setState({ loading: false }); // ** James/Katie - changing to false for now to avoid loading modal until we can figure out later why the async complete listener isnt kicking in }); ipcRenderer.on('async-complete', (event: any) => { @@ -88,22 +102,36 @@ class MainPanel extends Component { onClickTabItem(tabName) { ipcRenderer.send('change-db', tabName); - ipcRenderer.send('return-db-list'); + ipcRenderer.send('return-db-list', tabName); this.setState({ currentSchema: tabName }); } render() { - return (
- +
- - + +
+
+ +
-
); } diff --git a/frontend/components/rightPanel/SchemaContainer.tsx b/frontend/components/leftPanel/SchemaContainer.tsx similarity index 77% rename from frontend/components/rightPanel/SchemaContainer.tsx rename to frontend/components/leftPanel/SchemaContainer.tsx index 7495d1d3..28f39371 100644 --- a/frontend/components/rightPanel/SchemaContainer.tsx +++ b/frontend/components/leftPanel/SchemaContainer.tsx @@ -6,6 +6,7 @@ type SchemaContainerProps = { queries: any; currentSchema: string; tableList: string[]; + databaseSize: string; }; type state = { @@ -27,7 +28,11 @@ export class SchemaContainer extends Component {
- +
diff --git a/frontend/components/rightPanel/Tabs.tsx b/frontend/components/leftPanel/Tabs.tsx similarity index 69% rename from frontend/components/rightPanel/Tabs.tsx rename to frontend/components/leftPanel/Tabs.tsx index 6573120b..01bcbbb6 100644 --- a/frontend/components/rightPanel/Tabs.tsx +++ b/frontend/components/leftPanel/Tabs.tsx @@ -6,12 +6,13 @@ import { Tab } from './tabsChildren/Tab'; const { ipcRenderer } = window.require('electron'); type TabsProps = { - currentSchema: string, - tabList: string[], - queries: any, - onClickTabItem: any, - tableList: string[] -} + currentSchema: string; + tabList: string[]; + queries: any; + onClickTabItem: any; + tableList: string[]; + databaseSize: string; +}; type state = { show: boolean; @@ -22,27 +23,28 @@ export class Tabs extends Component { this.showModal = this.showModal.bind(this); } state: state = { - show: false + show: false, }; showModal = (event: any) => { this.setState({ show: true }); }; - componentDidMount() { // After schema is successfully sent to backend, backend spins up new database with inputted schemaName. // It will send the frontend an updated variable 'lists' that is an array of updated lists of all the tabs (which is the same - // thing as all the databases). We open a channel to listen for it here inside of componendDidMount, then + // thing as all the databases). We open a channel to listen for it here inside of componentDidMount, then // we invoke onClose to close schemaModal ONLY after we are sure that backend has created that channel. ipcRenderer.on('db-lists', (event: any, returnedLists: any) => { - this.setState({currentSchema: returnedLists}) + this.setState({ + currentSchema: returnedLists, + }); this.onClose(event); - }) + }); } onClose = (event: any) => { - this.setState({ show: false }) + this.setState({ show: false }); }; render() { @@ -51,9 +53,12 @@ export class Tabs extends Component { tabList, currentSchema, queries, + databaseSize, } = this.props; - const activeTabQueries = queries.filter((query) => query.querySchema === currentSchema); + const activeTabQueries = queries.filter( + (query) => query.querySchema === currentSchema + ); return (
@@ -78,17 +83,30 @@ export class Tabs extends Component { }} > + - + - +
{tabList.map((tab, index) => { if (tab !== currentSchema) return undefined; - return ; + return ( + + ); })}
); } -} \ No newline at end of file +} diff --git a/frontend/components/rightPanel/schemaChildren/Data.tsx b/frontend/components/leftPanel/schemaChildren/Data.tsx similarity index 100% rename from frontend/components/rightPanel/schemaChildren/Data.tsx rename to frontend/components/leftPanel/schemaChildren/Data.tsx diff --git a/frontend/components/rightPanel/schemaChildren/DummyDataPanel.tsx b/frontend/components/leftPanel/schemaChildren/DummyDataPanel.tsx similarity index 100% rename from frontend/components/rightPanel/schemaChildren/DummyDataPanel.tsx rename to frontend/components/leftPanel/schemaChildren/DummyDataPanel.tsx diff --git a/frontend/components/rightPanel/schemaChildren/Query.tsx b/frontend/components/leftPanel/schemaChildren/Query.tsx similarity index 83% rename from frontend/components/rightPanel/schemaChildren/Query.tsx rename to frontend/components/leftPanel/schemaChildren/Query.tsx index 18f24ce4..6afda0d0 100644 --- a/frontend/components/rightPanel/schemaChildren/Query.tsx +++ b/frontend/components/leftPanel/schemaChildren/Query.tsx @@ -15,9 +15,10 @@ import CodeMirror from '@skidding/react-codemirror'; *********************** TYPESCRIPT: TYPES *********************** ************************************************************/ -type QueryProps = { +type QueryProps = { currentSchema: string; - tableList: string[]; + tableList: string[]; + dbSize: string; }; type state = { @@ -33,22 +34,20 @@ class Query extends Component { super(props); this.handleQuerySubmit = this.handleQuerySubmit.bind(this); this.updateCode = this.updateCode.bind(this); - this.handleTrackQuery = this.handleTrackQuery.bind(this); + this.handleTrackQuery = this.handleTrackQuery.bind(this); } state: state = { queryString: '', queryLabel: '', show: false, - trackQuery: false + trackQuery: false, }; componentDidMount() { ipcRenderer.on('query-error', (event: any, message: string) => { - console.log('Query error: '); // dialog.showErrorBox('Error', message); - - }) + }); } // Updates state.queryString as user inputs query label @@ -74,7 +73,7 @@ class Query extends Component { // if query string is empty, show error if (!this.state.queryString) { dialog.showErrorBox('Please enter a Query.', ''); - } + } if (!this.state.trackQuery) { //functionality to send query but not return stats and track const queryAndSchema = { @@ -88,8 +87,7 @@ class Query extends Component { } if (this.state.trackQuery && !this.state.queryLabel) { dialog.showErrorBox('Please enter a label for the Query.', ''); - } - else if (this.state.trackQuery) { + } else if (this.state.trackQuery) { // send query and return stats from explain/analyze const queryAndSchema = { queryString: this.state.queryString, @@ -112,31 +110,35 @@ class Query extends Component { return (
+
Database Size: {this.props.dbSize}
- +

Query

track on chart: - + > +
+
+ + this.handleLabelEntry(e)} + />
-
- - this.handleLabelEntry(e)} - /> -

diff --git a/frontend/components/rightPanel/schemaChildren/SchemaModal.tsx b/frontend/components/leftPanel/schemaChildren/SchemaModal.tsx similarity index 61% rename from frontend/components/rightPanel/schemaChildren/SchemaModal.tsx rename to frontend/components/leftPanel/schemaChildren/SchemaModal.tsx index bfe168e7..bfa961ef 100644 --- a/frontend/components/rightPanel/schemaChildren/SchemaModal.tsx +++ b/frontend/components/leftPanel/schemaChildren/SchemaModal.tsx @@ -20,7 +20,7 @@ type state = { schemaEntry: string; redirect: boolean; dbCopyName: string; - copy: boolean + copy: boolean; }; class SchemaModal extends Component { @@ -45,10 +45,9 @@ class SchemaModal extends Component { schemaEntry: '', redirect: false, dbCopyName: 'Select Instance', - copy: false + copy: false, }; - // Set schema name handleSchemaName(event: any) { // convert input label name to lowercase only with no spacing to comply with db naming convention. @@ -62,6 +61,7 @@ class SchemaModal extends Component { // When file path is uploaded, query entry is cleared. handleSchemaFilePath(event: ClickEvent) { event.preventDefault(); + dialog .showOpenDialog({ properties: ['openFile'], @@ -78,20 +78,23 @@ class SchemaModal extends Component { }; if (!result['canceled']) { ipcRenderer.send('input-schema', schemaObj); - this.setState({ schemaName: ''}); + this.setState({ schemaName: '' }); } - this.setState({ dbCopyName: 'Select Instance'}); + this.setState({ dbCopyName: 'Select Instance' }); this.props.showModal(event); }) + .catch((err: object) => { - console.log('Error in handleSchemaFilePath method of SchemaModal.tsx.', err); + console.log( + 'Error in handleSchemaFilePath method of SchemaModal.tsx.', + err + ); }); } // When schema script is inserted, file path is cleared set dialog to warn user. handleSchemaEntry(event: any) { this.setState({ schemaEntry: event.target.value, schemaFilePath: '' }); - // this.setState({ schemaFilePath: '' }); } handleSchemaSubmit(event: any) { @@ -102,37 +105,39 @@ class SchemaModal extends Component { schemaFilePath: this.state.schemaFilePath, schemaEntry: this.state.schemaEntry, }; - ipcRenderer.send('input-schema', schemaObj); } - selectHandler = (eventKey, e: React.SyntheticEvent) => { - this.setState({ dbCopyName: eventKey }); - } + selectHandler = (eventKey, e: React.SyntheticEvent) => { + this.setState({ dbCopyName: eventKey }); // + }; handleCopyData(event: any) { - if(!this.state.copy) this.setState({ copy: true }); + if (!this.state.copy) this.setState({ copy: true }); else this.setState({ copy: false }); } dropDownList = () => { - return this.props.tabList.map((db, index) => {db}); + return this.props.tabList.map((db, index) => ( + + {db} + + )); }; handleCopyFilePath(event: any) { event.preventDefault(); - const schemaObj = { schemaName: this.state.schemaName, schemaFilePath: '', schemaEntry: '', dbCopyName: this.state.dbCopyName, - copy: this.state.copy - } + copy: this.state.copy, + }; ipcRenderer.send('input-schema', schemaObj); - this.setState({ dbCopyName: 'Select Instance'}); - this.setState({ schemaName: ''}); + this.setState({ dbCopyName: `Select Instance` }); + this.setState({ schemaName: '' }); this.props.showModal(event); } @@ -143,58 +148,65 @@ class SchemaModal extends Component { return (