From b449733c0f2764b9a94ac24f1816588cc0054f9c Mon Sep 17 00:00:00 2001 From: Jennifer Courtner Date: Thu, 10 Dec 2020 09:31:05 -0500 Subject: [PATCH 01/34] changes to channels to use psql without docker --- backend/channels.ts | 131 ++++++++-------- backend/channels.txt | 353 +++++++++++++++++++++++++++++++++++++++++++ backend/models.ts | 229 ++++++++++++++-------------- package.json | 2 +- 4 files changed, 543 insertions(+), 172 deletions(-) create mode 100644 backend/channels.txt diff --git a/backend/channels.ts b/backend/channels.ts index a8c6c09b..50bff6e5 100644 --- a/backend/channels.ts +++ b/backend/channels.ts @@ -11,27 +11,32 @@ const db = require('./models'); // Generate CLI commands to be executed in child process. const createDBFunc = (name) => { - return `docker exec postgres-1 psql -h localhost -p 5432 -U postgres -c "CREATE DATABASE ${name}"` -} -const importFileFunc = (file) => { - return `docker cp ${file} postgres-1:/data_dump`; -} + return `psql -U postgres -c "CREATE DATABASE ${name}"`; + + // return `docker exec postgres-1 psql -h localhost -p 5432 -U postgres -c "CREATE DATABASE ${name}"`; +}; +const importFileFunc = (name, file) => { + return `psql -U postgres ${name} < ${file}`; + + // return `docker cp ${file} postgres-1:/data_dump`; +}; const runSQLFunc = (dbName) => { return `docker exec postgres-1 psql -U postgres -d ${dbName} -f /data_dump`; -} +}; const runTARFunc = (dbName) => { return `docker exec postgres-1 pg_restore -U postgres -d ${dbName} /data_dump`; -} +}; const runFullCopyFunc = (dbCopyName) => { return `docker exec postgres-1 pg_dump -U postgres ${dbCopyName} -f /data_dump`; -} +}; const runHollowCopyFunc = (dbCopyName) => { return `docker exec postgres-1 pg_dump -s -U postgres ${dbCopyName} -f /data_dump`; -} +}; // Function to execute commands in the child process. const execute = (str: string, nextStep: any) => { exec(str, (error, stdout, stderr) => { + console.log('exec func', `${stdout}`); if (error) { //this shows the console error in an error message on the frontend dialog.showErrorBox(`${error.message}`, ''); @@ -44,7 +49,7 @@ const execute = (str: string, nextStep: any) => { console.log(`stderr: ${stderr}`); return; } - // console.log('exec func', `${stdout}`); + if (nextStep) nextStep(); }); }; @@ -57,32 +62,37 @@ const execute = (str: string, nextStep: any) => { let listObj: any; ipcMain.on('return-db-list', (event, args) => { - db.getLists().then(data => event.sender.send('db-lists', data)); + db.getLists().then((data) => event.sender.send('db-lists', data)); }); // Listen for skip button on Splash page. -ipcMain.on('skip-file-upload', (event) => { }); +ipcMain.on('skip-file-upload', (event) => {}); // Listen for database changes sent from the renderer upon changing tabs. ipcMain.on('change-db', (event, dbName) => { - db.changeDB(dbName) + db.changeDB(dbName); }); // Listen for file upload. Create an instance of database from pre-made .tar or .sql file. ipcMain.on('upload-file', (event, filePath: string) => { - // send notice to the frontend that async process has begun event.sender.send('async-started'); let dbName: string; if (process.platform === 'darwin') { - dbName = filePath[0].slice(filePath[0].lastIndexOf('/') + 1, filePath[0].lastIndexOf('.')); + dbName = filePath[0].slice( + filePath[0].lastIndexOf('/') + 1, + filePath[0].lastIndexOf('.') + ); } else { - dbName = filePath[0].slice(filePath[0].lastIndexOf('\\') + 1, filePath[0].lastIndexOf('.')); + dbName = filePath[0].slice( + filePath[0].lastIndexOf('\\') + 1, + filePath[0].lastIndexOf('.') + ); } const createDB: string = createDBFunc(dbName); - const importFile: string = importFileFunc(filePath); + const importFile: string = importFileFunc(dbName, filePath); const runSQL: string = runSQLFunc(dbName); const runTAR: string = runTARFunc(dbName); const extension: string = filePath[0].slice(filePath[0].lastIndexOf('.')); @@ -101,7 +111,7 @@ ipcMain.on('upload-file', (event, filePath: string) => { event.sender.send('switch-to-new', null); // notify frontend that async process has been completed event.sender.send('async-complete'); - }; + } // Step 4: Given the file path extension, run the appropriate command in postgres to populate db. const step4 = () => { @@ -118,7 +128,7 @@ ipcMain.on('upload-file', (event, filePath: string) => { const step2 = () => { db.changeDB(dbName); return step3(); - } + }; // Step 1: Create empty db if (extension === '.sql' || extension === '.tar') execute(createDB, step2); @@ -137,7 +147,6 @@ interface SchemaType { // OR // Listens for and handles DB copying events ipcMain.on('input-schema', (event, data: SchemaType) => { - // send notice to the frontend that async process has begun event.sender.send('async-started'); @@ -146,7 +155,7 @@ ipcMain.on('input-schema', (event, data: SchemaType) => { // generate strings that are fed into execute functions later const createDB: string = createDBFunc(dbName); - const importFile: string = importFileFunc(filePath); + const importFile: string = importFileFunc(dbName, filePath); const runSQL: string = runSQLFunc(dbName); const runTAR: string = runTARFunc(dbName); const runFullCopy: string = runFullCopyFunc(dbCopyName); @@ -156,8 +165,7 @@ ipcMain.on('input-schema', (event, data: SchemaType) => { let extension: string = ''; if (filePath.length > 0) { extension = filePath[0].slice(filePath[0].lastIndexOf('.')); - } - else extension = '.sql'; + } else extension = '.sql'; // SEQUENCE OF EXECUTING COMMANDS // Steps are in reverse order because each step is a callback function that requires the following step to be defined. @@ -170,7 +178,7 @@ ipcMain.on('input-schema', (event, data: SchemaType) => { event.sender.send('switch-to-new', null); // notify frontend that async process has been completed event.sender.send('async-complete'); - }; + } // Step 4: Given the file path extension, run the appropriate command in postgres to build the db const step4 = () => { @@ -186,7 +194,7 @@ ipcMain.on('input-schema', (event, data: SchemaType) => { const step3Copy = () => { db.changeDB(dbName); return step4(); - } + }; // Step 2: Change curent URI to match newly created DB const step2 = () => { @@ -210,8 +218,8 @@ ipcMain.on('input-schema', (event, data: SchemaType) => { // and now that we have changed to the new db, we can move on to importing the data file db.changeDB(dbName); return step3(); - } - } + } + }; // Step 1 : Create empty db execute(createDB, step2); @@ -226,7 +234,6 @@ interface QueryType { } ipcMain.on('execute-query-untracked', (event, data: QueryType) => { - // send notice to front end that query has been started event.sender.send('async-started'); @@ -249,7 +256,6 @@ ipcMain.on('execute-query-untracked', (event, data: QueryType) => { // Listen for queries being sent from renderer ipcMain.on('execute-query-tracked', (event, data: QueryType) => { - // send notice to front end that query has been started event.sender.send('async-started'); @@ -272,18 +278,19 @@ ipcMain.on('execute-query-tracked', (event, data: QueryType) => { frontendData.queryData = queryData.rows; if (!queryString.match(/create/i)) { // Run EXPLAIN (FORMAT JSON, ANALYZE) - db.query('EXPLAIN (FORMAT JSON, ANALYZE) ' + queryString) - .then((queryStats) => { + db.query('EXPLAIN (FORMAT JSON, ANALYZE) ' + queryString).then( + (queryStats) => { frontendData.queryStatistics = queryStats.rows; (async function getListAsync() { listObj = await db.getLists(); frontendData.lists = listObj; - event.sender.send('db-lists', listObj) + event.sender.send('db-lists', listObj); event.sender.send('return-execute-query', frontendData); event.sender.send('async-complete'); })(); - }) + } + ); } else { // Handling for tracking a create table query, can't run explain/analyze on create statements (async function getListAsync() { @@ -297,7 +304,6 @@ ipcMain.on('execute-query-tracked', (event, data: QueryType) => { .catch((error: string) => { console.log('ERROR in execute-query-tracked channel in main.ts', error); }); - }); interface dummyDataRequest { @@ -306,37 +312,42 @@ interface dummyDataRequest { } ipcMain.on('generate-dummy-data', (event: any, data: dummyDataRequest) => { - // send notice to front end that DD generation has been started event.sender.send('async-started'); let schemaLayout: any; let dummyDataRequest: dummyDataRequest = data; let tableMatricesArray: any; - let keyObject: any = "Unresolved"; - - db.createKeyObject() - .then((result) => { - // set keyObject equal to the result of this query - keyObject = result; - db.dropKeyColumns(keyObject) - .then(() => { - db.addNewKeyColumns(keyObject) - .then(() => { - db.getSchemaLayout() - .then((result) => { - schemaLayout = result; - // generate the dummy data and save it into matrices associated with table names - tableMatricesArray = generateDummyData(schemaLayout, dummyDataRequest, keyObject); - //iterate through tableMatricesArray to write individual .csv files - for (const tableObject of tableMatricesArray) { - // write all entries in tableMatrix to csv file - writeCSVFile(tableObject, schemaLayout, keyObject, dummyDataRequest, event); - } - }); - }); + let keyObject: any = 'Unresolved'; + + db.createKeyObject().then((result) => { + // set keyObject equal to the result of this query + keyObject = result; + db.dropKeyColumns(keyObject).then(() => { + db.addNewKeyColumns(keyObject).then(() => { + db.getSchemaLayout().then((result) => { + schemaLayout = result; + // generate the dummy data and save it into matrices associated with table names + tableMatricesArray = generateDummyData( + schemaLayout, + dummyDataRequest, + keyObject + ); + //iterate through tableMatricesArray to write individual .csv files + for (const tableObject of tableMatricesArray) { + // write all entries in tableMatrix to csv file + writeCSVFile( + tableObject, + schemaLayout, + keyObject, + dummyDataRequest, + event + ); + } }); - }) -}) + }); + }); + }); +}); -export default execute; \ No newline at end of file +export default execute; diff --git a/backend/channels.txt b/backend/channels.txt new file mode 100644 index 00000000..50bff6e5 --- /dev/null +++ b/backend/channels.txt @@ -0,0 +1,353 @@ +// Import parts of electron to use +import { dialog, ipcMain } from 'electron'; + +const { generateDummyData, writeCSVFile } = require('./DummyD/dummyDataMain'); +const { exec } = require('child_process'); +const db = require('./models'); + +/************************************************************ + *********************** Helper functions ******************* + ************************************************************/ + +// Generate CLI commands to be executed in child process. +const createDBFunc = (name) => { + return `psql -U postgres -c "CREATE DATABASE ${name}"`; + + // return `docker exec postgres-1 psql -h localhost -p 5432 -U postgres -c "CREATE DATABASE ${name}"`; +}; +const importFileFunc = (name, file) => { + return `psql -U postgres ${name} < ${file}`; + + // return `docker cp ${file} postgres-1:/data_dump`; +}; +const runSQLFunc = (dbName) => { + return `docker exec postgres-1 psql -U postgres -d ${dbName} -f /data_dump`; +}; +const runTARFunc = (dbName) => { + return `docker exec postgres-1 pg_restore -U postgres -d ${dbName} /data_dump`; +}; +const runFullCopyFunc = (dbCopyName) => { + return `docker exec postgres-1 pg_dump -U postgres ${dbCopyName} -f /data_dump`; +}; +const runHollowCopyFunc = (dbCopyName) => { + return `docker exec postgres-1 pg_dump -s -U postgres ${dbCopyName} -f /data_dump`; +}; + +// Function to execute commands in the child process. +const execute = (str: string, nextStep: any) => { + exec(str, (error, stdout, stderr) => { + console.log('exec func', `${stdout}`); + if (error) { + //this shows the console error in an error message on the frontend + dialog.showErrorBox(`${error.message}`, ''); + console.log(`error: ${error.message}`); + return; + } + if (stderr) { + //this shows the console error in an error message on the frontend + dialog.showErrorBox(`${stderr}`, ''); + console.log(`stderr: ${stderr}`); + return; + } + + if (nextStep) nextStep(); + }); +}; + +/************************************************************ + *********************** IPC CHANNELS *********************** + ************************************************************/ + +// Global variable to store list of databases and tables to provide to frontend upon refreshing view. +let listObj: any; + +ipcMain.on('return-db-list', (event, args) => { + db.getLists().then((data) => event.sender.send('db-lists', data)); +}); + +// Listen for skip button on Splash page. +ipcMain.on('skip-file-upload', (event) => {}); + +// Listen for database changes sent from the renderer upon changing tabs. +ipcMain.on('change-db', (event, dbName) => { + db.changeDB(dbName); +}); + +// Listen for file upload. Create an instance of database from pre-made .tar or .sql file. +ipcMain.on('upload-file', (event, filePath: string) => { + // send notice to the frontend that async process has begun + event.sender.send('async-started'); + + let dbName: string; + if (process.platform === 'darwin') { + dbName = filePath[0].slice( + filePath[0].lastIndexOf('/') + 1, + filePath[0].lastIndexOf('.') + ); + } else { + dbName = filePath[0].slice( + filePath[0].lastIndexOf('\\') + 1, + filePath[0].lastIndexOf('.') + ); + } + + const createDB: string = createDBFunc(dbName); + const importFile: string = importFileFunc(dbName, filePath); + const runSQL: string = runSQLFunc(dbName); + const runTAR: string = runTARFunc(dbName); + const extension: string = filePath[0].slice(filePath[0].lastIndexOf('.')); + + // SEQUENCE OF EXECUTING COMMANDS + // Steps are in reverse order because each step is a callback function that requires the following step to be defined. + + // Step 5: Changes the pg URI the newly created database, queries new database, then sends list of tables and list of databases to frontend. + async function sendLists() { + listObj = await db.getLists(); + console.log('channels: ', listObj); + event.sender.send('db-lists', listObj); + // Send schema name back to frontend, so frontend can load tab name. + event.sender.send('return-schema-name', dbName); + // tell the front end to switch tabs to the newly created database + event.sender.send('switch-to-new', null); + // notify frontend that async process has been completed + event.sender.send('async-complete'); + } + + // Step 4: Given the file path extension, run the appropriate command in postgres to populate db. + const step4 = () => { + let runCmd: string = ''; + if (extension === '.sql') runCmd = runSQL; + else if (extension === '.tar') runCmd = runTAR; + execute(runCmd, sendLists); + }; + + // Step 3: Import database file from file path into docker container + const step3 = () => execute(importFile, step4); + + // Step 2: Change curent URI to match newly created DB + const step2 = () => { + db.changeDB(dbName); + return step3(); + }; + + // Step 1: Create empty db + if (extension === '.sql' || extension === '.tar') execute(createDB, step2); + else console.log('INVALID FILE TYPE: Please use .tar or .sql extensions.'); +}); + +interface SchemaType { + schemaName: string; + schemaFilePath: string[]; + schemaEntry: string; + dbCopyName: string; + copy: boolean; +} + +// The following function creates an instance of database from pre-made .tar or .sql file. +// OR +// Listens for and handles DB copying events +ipcMain.on('input-schema', (event, data: SchemaType) => { + // send notice to the frontend that async process has begun + event.sender.send('async-started'); + + const { schemaName: dbName, dbCopyName, copy } = data; + let { schemaFilePath: filePath } = data; + + // generate strings that are fed into execute functions later + const createDB: string = createDBFunc(dbName); + const importFile: string = importFileFunc(dbName, filePath); + const runSQL: string = runSQLFunc(dbName); + const runTAR: string = runTARFunc(dbName); + const runFullCopy: string = runFullCopyFunc(dbCopyName); + const runHollowCopy: string = runHollowCopyFunc(dbCopyName); + + // determine if the file is a sql or a tar file, in the case of a copy, we will not have a filepath so we just hard-code the extension to be sql + let extension: string = ''; + if (filePath.length > 0) { + extension = filePath[0].slice(filePath[0].lastIndexOf('.')); + } else extension = '.sql'; + + // SEQUENCE OF EXECUTING COMMANDS + // Steps are in reverse order because each step is a callback function that requires the following step to be defined. + + // Step 5: Changes the pg URI to look to the newly created database and queries all the tables in that database and sends it to frontend. + async function sendLists() { + listObj = await db.getLists(); + event.sender.send('db-lists', listObj); + // tell the front end to switch tabs to the newly created database + event.sender.send('switch-to-new', null); + // notify frontend that async process has been completed + event.sender.send('async-complete'); + } + + // Step 4: Given the file path extension, run the appropriate command in postgres to build the db + const step4 = () => { + let runCmd: string = ''; + if (extension === '.sql') runCmd = runSQL; + else if (extension === '.tar') runCmd = runTAR; + execute(runCmd, sendLists); + }; + + // Step 3: Import database file from file path into docker container + const step3 = () => execute(importFile, step4); + // skip step three which is only for importing files and instead change the current db to the newly created one + const step3Copy = () => { + db.changeDB(dbName); + return step4(); + }; + + // Step 2: Change curent URI to match newly created DB + const step2 = () => { + // if we are copying + if (copy !== undefined) { + // first, we need to change the current DB instance to that of the one we need to copy, so we'll head to the changeDB function in the models file + db.changeDB(dbCopyName); + // now that our DB has been changed to the one we wish to copy, we need to either make an exact copy or a hollow copy using pg_dump OR pg_dump -s + // this generates a pg_dump file from the specified db and saves it to a location in the container. + // Full copy case + if (copy) { + execute(runFullCopy, step3Copy); + } + // Hollow copy case + else execute(runHollowCopy, step3Copy); + return; + } + // if we are not copying + else { + // change the current database back to the newly created one + // and now that we have changed to the new db, we can move on to importing the data file + db.changeDB(dbName); + return step3(); + } + }; + + // Step 1 : Create empty db + execute(createDB, step2); +}); + +interface QueryType { + queryCurrentSchema: string; + queryString: string; + queryLabel: string; + queryData: string; + queryStatistics: string; +} + +ipcMain.on('execute-query-untracked', (event, data: QueryType) => { + // send notice to front end that query has been started + event.sender.send('async-started'); + + // destructure object from frontend + const { queryString } = data; + // run query on db + db.query(queryString) + .then(() => { + (async function getListAsync() { + listObj = await db.getLists(); + event.sender.send('db-lists', listObj); + event.sender.send('async-complete'); + })(); + }) + .catch((error: string) => { + console.log('ERROR in execute-query-untracked channel in main.ts', error); + event.sender.send('query-error', 'Error executing query.'); + }); +}); + +// Listen for queries being sent from renderer +ipcMain.on('execute-query-tracked', (event, data: QueryType) => { + // send notice to front end that query has been started + event.sender.send('async-started'); + + // destructure object from frontend + const { queryString, queryCurrentSchema, queryLabel } = data; + + // initialize object to store all data to send to frontend + let frontendData = { + queryString, + queryCurrentSchema, + queryLabel, + queryData: '', + queryStatistics: '', + lists: {}, + }; + + // Run select * from actors; + db.query(queryString) + .then((queryData) => { + frontendData.queryData = queryData.rows; + if (!queryString.match(/create/i)) { + // Run EXPLAIN (FORMAT JSON, ANALYZE) + db.query('EXPLAIN (FORMAT JSON, ANALYZE) ' + queryString).then( + (queryStats) => { + frontendData.queryStatistics = queryStats.rows; + + (async function getListAsync() { + listObj = await db.getLists(); + frontendData.lists = listObj; + event.sender.send('db-lists', listObj); + event.sender.send('return-execute-query', frontendData); + event.sender.send('async-complete'); + })(); + } + ); + } else { + // Handling for tracking a create table query, can't run explain/analyze on create statements + (async function getListAsync() { + listObj = await db.getLists(); + frontendData.lists = listObj; + event.sender.send('db-lists', listObj); + event.sender.send('async-complete'); + })(); + } + }) + .catch((error: string) => { + console.log('ERROR in execute-query-tracked channel in main.ts', error); + }); +}); + +interface dummyDataRequest { + schemaName: string; + dummyData: {}; +} + +ipcMain.on('generate-dummy-data', (event: any, data: dummyDataRequest) => { + // send notice to front end that DD generation has been started + event.sender.send('async-started'); + + let schemaLayout: any; + let dummyDataRequest: dummyDataRequest = data; + let tableMatricesArray: any; + let keyObject: any = 'Unresolved'; + + db.createKeyObject().then((result) => { + // set keyObject equal to the result of this query + keyObject = result; + db.dropKeyColumns(keyObject).then(() => { + db.addNewKeyColumns(keyObject).then(() => { + db.getSchemaLayout().then((result) => { + schemaLayout = result; + // generate the dummy data and save it into matrices associated with table names + tableMatricesArray = generateDummyData( + schemaLayout, + dummyDataRequest, + keyObject + ); + //iterate through tableMatricesArray to write individual .csv files + for (const tableObject of tableMatricesArray) { + // write all entries in tableMatrix to csv file + writeCSVFile( + tableObject, + schemaLayout, + keyObject, + dummyDataRequest, + event + ); + } + }); + }); + }); + }); +}); + +export default execute; diff --git a/backend/models.ts b/backend/models.ts index 772d0b9b..0a753849 100644 --- a/backend/models.ts +++ b/backend/models.ts @@ -1,5 +1,5 @@ const { Pool } = require('pg'); -const { getPrimaryKeys, getForeignKeys } = require('./DummyD/foreign_key_info') +const { getPrimaryKeys, getForeignKeys } = require('./DummyD/foreign_key_info'); // Initialize to a default db. // URI Format: postgres://username:password@hostname:port/databasename @@ -9,50 +9,52 @@ let pool: any = new Pool({ connectionString: PG_URI }); //helper function that creates the column objects, which are saved to the schemaLayout object //this function returns a promise to be resolved with Promise.all syntax const getColumnObjects = (tableName: string) => { - const queryString = "SELECT column_name, data_type, character_maximum_length FROM information_schema.columns WHERE table_name = $1;"; + const queryString = + 'SELECT column_name, data_type, character_maximum_length FROM information_schema.columns WHERE table_name = $1;'; const value = [tableName]; - return new Promise ((resolve) => { - pool - .query(queryString, value) - .then((result) => { - const columnInfoArray: any = []; - for (let i = 0; i < result.rows.length; i++) { - const columnObj: any = { - columnName: result.rows[i].column_name, - dataInfo: { - data_type: result.rows[i].data_type, - character_maxiumum_length: result.rows[i].character_maxiumum_length - } - } - columnInfoArray.push(columnObj) - } - resolve(columnInfoArray); - }) - }) -} + return new Promise((resolve) => { + pool.query(queryString, value).then((result) => { + const columnInfoArray: any = []; + for (let i = 0; i < result.rows.length; i++) { + const columnObj: any = { + columnName: result.rows[i].column_name, + dataInfo: { + data_type: result.rows[i].data_type, + character_maxiumum_length: result.rows[i].character_maxiumum_length, + }, + }; + columnInfoArray.push(columnObj); + } + resolve(columnInfoArray); + }); + }); +}; // gets all the names of the current postgres instances const getDBNames = () => { - return new Promise((resolve) =>{ - pool - .query('SELECT datname FROM pg_database;') - .then((databases) => { - let dbList: any = []; - for (let i = 0; i < databases.rows.length; ++i) { - let curName = databases.rows[i].datname; - if (curName !== 'postgres' && curName !== 'template0' && curName !== 'template1') - dbList.push(databases.rows[i].datname); - } - resolve(dbList); - }) - }) -} + return new Promise((resolve) => { + pool.query('SELECT datname FROM pg_database;').then((databases) => { + let dbList: any = []; + for (let i = 0; i < databases.rows.length; ++i) { + let curName = databases.rows[i].datname; + if ( + curName !== 'postgres' && + curName !== 'template0' && + curName !== 'template1' + ) + dbList.push(databases.rows[i].datname); + } + resolve(dbList); + }); + }); +}; // gets all tablenames from currentschema const getDBLists = () => { return new Promise((resolve) => { pool - .query("SELECT table_name FROM information_schema.tables WHERE table_schema = 'public' ORDER BY table_name;" + .query( + "SELECT table_name FROM information_schema.tables WHERE table_schema = 'public' ORDER BY table_name;" ) .then((tables) => { let tableList: any = []; @@ -60,12 +62,11 @@ const getDBLists = () => { tableList.push(tables.rows[i].table_name); } resolve(tableList); - }) - }) -} + }); + }); +}; module.exports = { - query: (text, params, callback) => { console.log('Executed query: ', text); return pool.query(text, params, callback); @@ -84,57 +85,61 @@ module.exports = { tableList: [], // current database's tables databaseList: [], }; - Promise.all([getDBNames(), getDBLists()]) - .then((data) => { - console.log('models: ', data); - listObj.databaseList = data[0]; - listObj.tableList = data[1]; - resolve(listObj); - }) - }) - }, + Promise.all([getDBNames(), getDBLists()]).then((data) => { + console.log('models: ', data); + listObj.databaseList = data[0]; + listObj.tableList = data[1]; + resolve(listObj); + }); + }); + }, - createKeyObject: () => { return new Promise((resolve) => { // initialize the keyObject we eventually want to return out - const keyObject: any = {}; + const keyObject: any = {}; pool .query(getPrimaryKeys, null) .then((result) => { let table; - let pkColumn + let pkColumn; // iterate over the primary key table, adding info to our keyObject for (let i = 0; i < result.rows.length; i++) { table = result.rows[i].table_name; pkColumn = result.rows[i].pk_column; // if the table is not yet initialized within the keyObject, then initialize it - if (!keyObject[table]) keyObject[table] = {primaryKeyColumns: {}, foreignKeyColumns: {}}; + if (!keyObject[table]) + keyObject[table] = { + primaryKeyColumns: {}, + foreignKeyColumns: {}, + }; // then just set the value at the pk column name to true for later checking keyObject[table].primaryKeyColumns[pkColumn] = true; } - }) - .then(() => { - pool - .query(getForeignKeys, null) - .then((result) => { - let table; - let primaryTable; - let fkColumn; - // iterate over the foreign key table, adding info to our keyObject - for (let i = 0; i < result.rows.length; i++) { - table = result.rows[i].foreign_table; - primaryTable = result.rows[i].primary_table - fkColumn = result.rows[i].fk_column; - // if the table is not yet initialized within the keyObject, then initialize it - if (!keyObject[table]) keyObject[table] = {primaryKeyColumns: {}, foreignKeyColumns: {}}; - // then set the value at the fk column name to the number of rows asked for in the primary table to which it points - keyObject[table].foreignKeyColumns[fkColumn] = primaryTable; - } - resolve(keyObject); - }) }) - }) + .then(() => { + pool.query(getForeignKeys, null).then((result) => { + let table; + let primaryTable; + let fkColumn; + // iterate over the foreign key table, adding info to our keyObject + for (let i = 0; i < result.rows.length; i++) { + table = result.rows[i].foreign_table; + primaryTable = result.rows[i].primary_table; + fkColumn = result.rows[i].fk_column; + // if the table is not yet initialized within the keyObject, then initialize it + if (!keyObject[table]) + keyObject[table] = { + primaryKeyColumns: {}, + foreignKeyColumns: {}, + }; + // then set the value at the fk column name to the number of rows asked for in the primary table to which it points + keyObject[table].foreignKeyColumns[fkColumn] = primaryTable; + } + resolve(keyObject); + }); + }); + }); }, dropKeyColumns: async (keyObject: any) => { @@ -143,21 +148,21 @@ module.exports = { let queryString = `ALTER TABLE ${table}`; let count: number = 2; - for (const pkc in keyObject[table].primaryKeyColumns){ + for (const pkc in keyObject[table].primaryKeyColumns) { if (count > 2) queryString += ','; queryString += ` DROP COLUMN ${pkc} CASCADE`; count += 1; } - for (const fkc in keyObject[table].foreignKeyColumns){ + for (const fkc in keyObject[table].foreignKeyColumns) { if (count > 2) queryString += ','; - queryString += ` DROP COLUMN ${fkc}` + queryString += ` DROP COLUMN ${fkc}`; count += 1; } - queryString += ';' - + queryString += ';'; + return Promise.resolve(pool.query(queryString)); - } - + }; + // iterate over tables, running drop queries, and pushing a new promise to promise array for (const table in keyObject) { await generateAndRunDropQuery(table); @@ -166,29 +171,29 @@ module.exports = { return; }, - addNewKeyColumns: async (keyObject: any) => { + addNewKeyColumns: async (keyObject: any) => { // define helper function to generate and run query const generateAndRunAddQuery = (table: string) => { let queryString = `ALTER TABLE ${table}`; let count: number = 2; - for (const pkc in keyObject[table].primaryKeyColumns){ + for (const pkc in keyObject[table].primaryKeyColumns) { if (count > 2) queryString += ','; queryString += ` ADD COLUMN ${pkc} INT`; count += 1; } - for (const fkc in keyObject[table].foreignKeyColumns){ + for (const fkc in keyObject[table].foreignKeyColumns) { if (count > 2) queryString += ','; - queryString += ` ADD COLUMN ${fkc} INT` + queryString += ` ADD COLUMN ${fkc} INT`; count += 1; } - queryString += ';' - + queryString += ';'; + return Promise.resolve(pool.query(queryString)); - } - + }; + // iterate over tables, running drop queries, and pushing a new promise to promise array - for (const table in keyObject){ + for (const table in keyObject) { await generateAndRunAddQuery(table); } @@ -202,7 +207,7 @@ module.exports = { tableNames: [], tables: { // tableName: [columnObj array] - } + }, }; pool // This query returns the names of all the tables in the database @@ -216,21 +221,20 @@ module.exports = { } const promiseArray: any = []; for (let tableName of schemaLayout.tableNames) { - promiseArray.push(getColumnObjects(tableName)) + promiseArray.push(getColumnObjects(tableName)); } //we resolve all of the promises for the data info, and are returned an array of column data objects - Promise.all(promiseArray) - .then((columnInfo) => { - //here, we create a key for each table name and assign the array of column objects to the corresponding table name - for (let i = 0; i < columnInfo.length; i++) { - schemaLayout.tables[schemaLayout.tableNames[i]] = columnInfo[i]; - } - resolve(schemaLayout); - }) + Promise.all(promiseArray).then((columnInfo) => { + //here, we create a key for each table name and assign the array of column objects to the corresponding table name + for (let i = 0; i < columnInfo.length; i++) { + schemaLayout.tables[schemaLayout.tableNames[i]] = columnInfo[i]; + } + resolve(schemaLayout); + }); }) .catch(() => { - console.log('error in models.ts') - }) + console.log('error in models.ts'); + }); }); }, @@ -241,17 +245,17 @@ module.exports = { if (Object.keys(keyObject[tableName].primaryKeyColumns).length) { let queryString: string = `ALTER TABLE ${tableName} `; let count: number = 0; - + for (const pk in keyObject[tableName].primaryKeyColumns) { if (count > 0) queryString += `, `; queryString += `ADD CONSTRAINT "${tableName}_pk${count}" PRIMARY KEY ("${pk}")`; count += 1; } - + queryString += `;`; // wait for the previous query to return before moving on to the next table await pool.query(queryString); - } + } } } return; @@ -261,18 +265,21 @@ module.exports = { // iterate over table's keyObject property, add foreign key constraints for (const tableName of Object.keys(dummyDataRequest.dummyData)) { if (keyObject[tableName]) { - if (Object.keys(keyObject[tableName].foreignKeyColumns).length) { + if (Object.keys(keyObject[tableName].foreignKeyColumns).length) { let queryString: string = `ALTER TABLE ${tableName} `; let count: number = 0; for (const fk in keyObject[tableName].foreignKeyColumns) { - let primaryTable: string = keyObject[tableName].foreignKeyColumns[fk]; - let primaryKey: any = Object.keys(keyObject[primaryTable].primaryKeyColumns)[0]; + let primaryTable: string = + keyObject[tableName].foreignKeyColumns[fk]; + let primaryKey: any = Object.keys( + keyObject[primaryTable].primaryKeyColumns + )[0]; if (count > 0) queryString += `, `; queryString += `ADD CONSTRAINT "${tableName}_fk${count}" FOREIGN KEY ("${fk}") REFERENCES ${primaryTable}("${primaryKey}")`; count += 1; } - + queryString += `;`; // wait for the previous query to return before moving on to the next table await pool.query(queryString); @@ -280,5 +287,5 @@ module.exports = { } } return; - } -} \ No newline at end of file + }, +}; diff --git a/package.json b/package.json index 6992a92f..1327a46a 100644 --- a/package.json +++ b/package.json @@ -32,7 +32,7 @@ "chart.js": "^2.9.3", "codemirror": "^5.57.0", "concurrently": "^5.3.0", - "cross-env": "^7.0.2", + "cross-env": "^7.0.3", "electron-store": "^6.0.0", "faker": "^5.1.0", "fix-path": "^3.0.0", From 89a17891bf6d1330fb07cdd19fd9af9b22f4e5ac Mon Sep 17 00:00:00 2001 From: Jennifer Courtner Date: Thu, 10 Dec 2020 12:19:05 -0500 Subject: [PATCH 02/34] added comments about changes --- backend/channels.ts | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/backend/channels.ts b/backend/channels.ts index 50bff6e5..f0dbc77c 100644 --- a/backend/channels.ts +++ b/backend/channels.ts @@ -15,6 +15,8 @@ const createDBFunc = (name) => { // return `docker exec postgres-1 psql -h localhost -p 5432 -U postgres -c "CREATE DATABASE ${name}"`; }; + +// added "name" as a parameter for importFileFunc const importFileFunc = (name, file) => { return `psql -U postgres ${name} < ${file}`; @@ -92,6 +94,8 @@ ipcMain.on('upload-file', (event, filePath: string) => { } const createDB: string = createDBFunc(dbName); + + // added dbName to importFile const importFile: string = importFileFunc(dbName, filePath); const runSQL: string = runSQLFunc(dbName); const runTAR: string = runTARFunc(dbName); @@ -155,6 +159,7 @@ ipcMain.on('input-schema', (event, data: SchemaType) => { // generate strings that are fed into execute functions later const createDB: string = createDBFunc(dbName); + //added dbName to importFile const importFile: string = importFileFunc(dbName, filePath); const runSQL: string = runSQLFunc(dbName); const runTAR: string = runTARFunc(dbName); From 1a30c29544e16eac7ce4a9a7917feffc076db414 Mon Sep 17 00:00:00 2001 From: kklochan <32072621+kklochan@users.noreply.github.com> Date: Thu, 10 Dec 2020 12:22:56 -0500 Subject: [PATCH 03/34] Delete channels.txt --- backend/channels.txt | 353 ------------------------------------------- 1 file changed, 353 deletions(-) delete mode 100644 backend/channels.txt diff --git a/backend/channels.txt b/backend/channels.txt deleted file mode 100644 index 50bff6e5..00000000 --- a/backend/channels.txt +++ /dev/null @@ -1,353 +0,0 @@ -// Import parts of electron to use -import { dialog, ipcMain } from 'electron'; - -const { generateDummyData, writeCSVFile } = require('./DummyD/dummyDataMain'); -const { exec } = require('child_process'); -const db = require('./models'); - -/************************************************************ - *********************** Helper functions ******************* - ************************************************************/ - -// Generate CLI commands to be executed in child process. -const createDBFunc = (name) => { - return `psql -U postgres -c "CREATE DATABASE ${name}"`; - - // return `docker exec postgres-1 psql -h localhost -p 5432 -U postgres -c "CREATE DATABASE ${name}"`; -}; -const importFileFunc = (name, file) => { - return `psql -U postgres ${name} < ${file}`; - - // return `docker cp ${file} postgres-1:/data_dump`; -}; -const runSQLFunc = (dbName) => { - return `docker exec postgres-1 psql -U postgres -d ${dbName} -f /data_dump`; -}; -const runTARFunc = (dbName) => { - return `docker exec postgres-1 pg_restore -U postgres -d ${dbName} /data_dump`; -}; -const runFullCopyFunc = (dbCopyName) => { - return `docker exec postgres-1 pg_dump -U postgres ${dbCopyName} -f /data_dump`; -}; -const runHollowCopyFunc = (dbCopyName) => { - return `docker exec postgres-1 pg_dump -s -U postgres ${dbCopyName} -f /data_dump`; -}; - -// Function to execute commands in the child process. -const execute = (str: string, nextStep: any) => { - exec(str, (error, stdout, stderr) => { - console.log('exec func', `${stdout}`); - if (error) { - //this shows the console error in an error message on the frontend - dialog.showErrorBox(`${error.message}`, ''); - console.log(`error: ${error.message}`); - return; - } - if (stderr) { - //this shows the console error in an error message on the frontend - dialog.showErrorBox(`${stderr}`, ''); - console.log(`stderr: ${stderr}`); - return; - } - - if (nextStep) nextStep(); - }); -}; - -/************************************************************ - *********************** IPC CHANNELS *********************** - ************************************************************/ - -// Global variable to store list of databases and tables to provide to frontend upon refreshing view. -let listObj: any; - -ipcMain.on('return-db-list', (event, args) => { - db.getLists().then((data) => event.sender.send('db-lists', data)); -}); - -// Listen for skip button on Splash page. -ipcMain.on('skip-file-upload', (event) => {}); - -// Listen for database changes sent from the renderer upon changing tabs. -ipcMain.on('change-db', (event, dbName) => { - db.changeDB(dbName); -}); - -// Listen for file upload. Create an instance of database from pre-made .tar or .sql file. -ipcMain.on('upload-file', (event, filePath: string) => { - // send notice to the frontend that async process has begun - event.sender.send('async-started'); - - let dbName: string; - if (process.platform === 'darwin') { - dbName = filePath[0].slice( - filePath[0].lastIndexOf('/') + 1, - filePath[0].lastIndexOf('.') - ); - } else { - dbName = filePath[0].slice( - filePath[0].lastIndexOf('\\') + 1, - filePath[0].lastIndexOf('.') - ); - } - - const createDB: string = createDBFunc(dbName); - const importFile: string = importFileFunc(dbName, filePath); - const runSQL: string = runSQLFunc(dbName); - const runTAR: string = runTARFunc(dbName); - const extension: string = filePath[0].slice(filePath[0].lastIndexOf('.')); - - // SEQUENCE OF EXECUTING COMMANDS - // Steps are in reverse order because each step is a callback function that requires the following step to be defined. - - // Step 5: Changes the pg URI the newly created database, queries new database, then sends list of tables and list of databases to frontend. - async function sendLists() { - listObj = await db.getLists(); - console.log('channels: ', listObj); - event.sender.send('db-lists', listObj); - // Send schema name back to frontend, so frontend can load tab name. - event.sender.send('return-schema-name', dbName); - // tell the front end to switch tabs to the newly created database - event.sender.send('switch-to-new', null); - // notify frontend that async process has been completed - event.sender.send('async-complete'); - } - - // Step 4: Given the file path extension, run the appropriate command in postgres to populate db. - const step4 = () => { - let runCmd: string = ''; - if (extension === '.sql') runCmd = runSQL; - else if (extension === '.tar') runCmd = runTAR; - execute(runCmd, sendLists); - }; - - // Step 3: Import database file from file path into docker container - const step3 = () => execute(importFile, step4); - - // Step 2: Change curent URI to match newly created DB - const step2 = () => { - db.changeDB(dbName); - return step3(); - }; - - // Step 1: Create empty db - if (extension === '.sql' || extension === '.tar') execute(createDB, step2); - else console.log('INVALID FILE TYPE: Please use .tar or .sql extensions.'); -}); - -interface SchemaType { - schemaName: string; - schemaFilePath: string[]; - schemaEntry: string; - dbCopyName: string; - copy: boolean; -} - -// The following function creates an instance of database from pre-made .tar or .sql file. -// OR -// Listens for and handles DB copying events -ipcMain.on('input-schema', (event, data: SchemaType) => { - // send notice to the frontend that async process has begun - event.sender.send('async-started'); - - const { schemaName: dbName, dbCopyName, copy } = data; - let { schemaFilePath: filePath } = data; - - // generate strings that are fed into execute functions later - const createDB: string = createDBFunc(dbName); - const importFile: string = importFileFunc(dbName, filePath); - const runSQL: string = runSQLFunc(dbName); - const runTAR: string = runTARFunc(dbName); - const runFullCopy: string = runFullCopyFunc(dbCopyName); - const runHollowCopy: string = runHollowCopyFunc(dbCopyName); - - // determine if the file is a sql or a tar file, in the case of a copy, we will not have a filepath so we just hard-code the extension to be sql - let extension: string = ''; - if (filePath.length > 0) { - extension = filePath[0].slice(filePath[0].lastIndexOf('.')); - } else extension = '.sql'; - - // SEQUENCE OF EXECUTING COMMANDS - // Steps are in reverse order because each step is a callback function that requires the following step to be defined. - - // Step 5: Changes the pg URI to look to the newly created database and queries all the tables in that database and sends it to frontend. - async function sendLists() { - listObj = await db.getLists(); - event.sender.send('db-lists', listObj); - // tell the front end to switch tabs to the newly created database - event.sender.send('switch-to-new', null); - // notify frontend that async process has been completed - event.sender.send('async-complete'); - } - - // Step 4: Given the file path extension, run the appropriate command in postgres to build the db - const step4 = () => { - let runCmd: string = ''; - if (extension === '.sql') runCmd = runSQL; - else if (extension === '.tar') runCmd = runTAR; - execute(runCmd, sendLists); - }; - - // Step 3: Import database file from file path into docker container - const step3 = () => execute(importFile, step4); - // skip step three which is only for importing files and instead change the current db to the newly created one - const step3Copy = () => { - db.changeDB(dbName); - return step4(); - }; - - // Step 2: Change curent URI to match newly created DB - const step2 = () => { - // if we are copying - if (copy !== undefined) { - // first, we need to change the current DB instance to that of the one we need to copy, so we'll head to the changeDB function in the models file - db.changeDB(dbCopyName); - // now that our DB has been changed to the one we wish to copy, we need to either make an exact copy or a hollow copy using pg_dump OR pg_dump -s - // this generates a pg_dump file from the specified db and saves it to a location in the container. - // Full copy case - if (copy) { - execute(runFullCopy, step3Copy); - } - // Hollow copy case - else execute(runHollowCopy, step3Copy); - return; - } - // if we are not copying - else { - // change the current database back to the newly created one - // and now that we have changed to the new db, we can move on to importing the data file - db.changeDB(dbName); - return step3(); - } - }; - - // Step 1 : Create empty db - execute(createDB, step2); -}); - -interface QueryType { - queryCurrentSchema: string; - queryString: string; - queryLabel: string; - queryData: string; - queryStatistics: string; -} - -ipcMain.on('execute-query-untracked', (event, data: QueryType) => { - // send notice to front end that query has been started - event.sender.send('async-started'); - - // destructure object from frontend - const { queryString } = data; - // run query on db - db.query(queryString) - .then(() => { - (async function getListAsync() { - listObj = await db.getLists(); - event.sender.send('db-lists', listObj); - event.sender.send('async-complete'); - })(); - }) - .catch((error: string) => { - console.log('ERROR in execute-query-untracked channel in main.ts', error); - event.sender.send('query-error', 'Error executing query.'); - }); -}); - -// Listen for queries being sent from renderer -ipcMain.on('execute-query-tracked', (event, data: QueryType) => { - // send notice to front end that query has been started - event.sender.send('async-started'); - - // destructure object from frontend - const { queryString, queryCurrentSchema, queryLabel } = data; - - // initialize object to store all data to send to frontend - let frontendData = { - queryString, - queryCurrentSchema, - queryLabel, - queryData: '', - queryStatistics: '', - lists: {}, - }; - - // Run select * from actors; - db.query(queryString) - .then((queryData) => { - frontendData.queryData = queryData.rows; - if (!queryString.match(/create/i)) { - // Run EXPLAIN (FORMAT JSON, ANALYZE) - db.query('EXPLAIN (FORMAT JSON, ANALYZE) ' + queryString).then( - (queryStats) => { - frontendData.queryStatistics = queryStats.rows; - - (async function getListAsync() { - listObj = await db.getLists(); - frontendData.lists = listObj; - event.sender.send('db-lists', listObj); - event.sender.send('return-execute-query', frontendData); - event.sender.send('async-complete'); - })(); - } - ); - } else { - // Handling for tracking a create table query, can't run explain/analyze on create statements - (async function getListAsync() { - listObj = await db.getLists(); - frontendData.lists = listObj; - event.sender.send('db-lists', listObj); - event.sender.send('async-complete'); - })(); - } - }) - .catch((error: string) => { - console.log('ERROR in execute-query-tracked channel in main.ts', error); - }); -}); - -interface dummyDataRequest { - schemaName: string; - dummyData: {}; -} - -ipcMain.on('generate-dummy-data', (event: any, data: dummyDataRequest) => { - // send notice to front end that DD generation has been started - event.sender.send('async-started'); - - let schemaLayout: any; - let dummyDataRequest: dummyDataRequest = data; - let tableMatricesArray: any; - let keyObject: any = 'Unresolved'; - - db.createKeyObject().then((result) => { - // set keyObject equal to the result of this query - keyObject = result; - db.dropKeyColumns(keyObject).then(() => { - db.addNewKeyColumns(keyObject).then(() => { - db.getSchemaLayout().then((result) => { - schemaLayout = result; - // generate the dummy data and save it into matrices associated with table names - tableMatricesArray = generateDummyData( - schemaLayout, - dummyDataRequest, - keyObject - ); - //iterate through tableMatricesArray to write individual .csv files - for (const tableObject of tableMatricesArray) { - // write all entries in tableMatrix to csv file - writeCSVFile( - tableObject, - schemaLayout, - keyObject, - dummyDataRequest, - event - ); - } - }); - }); - }); - }); -}); - -export default execute; From 4939737cca9eec68344284233a66cb8ab079f827 Mon Sep 17 00:00:00 2001 From: Jennifer Courtner Date: Thu, 10 Dec 2020 14:44:26 -0500 Subject: [PATCH 04/34] deleted channels.txt --- backend/channels.txt | 353 ------------------------------------------- 1 file changed, 353 deletions(-) delete mode 100644 backend/channels.txt diff --git a/backend/channels.txt b/backend/channels.txt deleted file mode 100644 index 50bff6e5..00000000 --- a/backend/channels.txt +++ /dev/null @@ -1,353 +0,0 @@ -// Import parts of electron to use -import { dialog, ipcMain } from 'electron'; - -const { generateDummyData, writeCSVFile } = require('./DummyD/dummyDataMain'); -const { exec } = require('child_process'); -const db = require('./models'); - -/************************************************************ - *********************** Helper functions ******************* - ************************************************************/ - -// Generate CLI commands to be executed in child process. -const createDBFunc = (name) => { - return `psql -U postgres -c "CREATE DATABASE ${name}"`; - - // return `docker exec postgres-1 psql -h localhost -p 5432 -U postgres -c "CREATE DATABASE ${name}"`; -}; -const importFileFunc = (name, file) => { - return `psql -U postgres ${name} < ${file}`; - - // return `docker cp ${file} postgres-1:/data_dump`; -}; -const runSQLFunc = (dbName) => { - return `docker exec postgres-1 psql -U postgres -d ${dbName} -f /data_dump`; -}; -const runTARFunc = (dbName) => { - return `docker exec postgres-1 pg_restore -U postgres -d ${dbName} /data_dump`; -}; -const runFullCopyFunc = (dbCopyName) => { - return `docker exec postgres-1 pg_dump -U postgres ${dbCopyName} -f /data_dump`; -}; -const runHollowCopyFunc = (dbCopyName) => { - return `docker exec postgres-1 pg_dump -s -U postgres ${dbCopyName} -f /data_dump`; -}; - -// Function to execute commands in the child process. -const execute = (str: string, nextStep: any) => { - exec(str, (error, stdout, stderr) => { - console.log('exec func', `${stdout}`); - if (error) { - //this shows the console error in an error message on the frontend - dialog.showErrorBox(`${error.message}`, ''); - console.log(`error: ${error.message}`); - return; - } - if (stderr) { - //this shows the console error in an error message on the frontend - dialog.showErrorBox(`${stderr}`, ''); - console.log(`stderr: ${stderr}`); - return; - } - - if (nextStep) nextStep(); - }); -}; - -/************************************************************ - *********************** IPC CHANNELS *********************** - ************************************************************/ - -// Global variable to store list of databases and tables to provide to frontend upon refreshing view. -let listObj: any; - -ipcMain.on('return-db-list', (event, args) => { - db.getLists().then((data) => event.sender.send('db-lists', data)); -}); - -// Listen for skip button on Splash page. -ipcMain.on('skip-file-upload', (event) => {}); - -// Listen for database changes sent from the renderer upon changing tabs. -ipcMain.on('change-db', (event, dbName) => { - db.changeDB(dbName); -}); - -// Listen for file upload. Create an instance of database from pre-made .tar or .sql file. -ipcMain.on('upload-file', (event, filePath: string) => { - // send notice to the frontend that async process has begun - event.sender.send('async-started'); - - let dbName: string; - if (process.platform === 'darwin') { - dbName = filePath[0].slice( - filePath[0].lastIndexOf('/') + 1, - filePath[0].lastIndexOf('.') - ); - } else { - dbName = filePath[0].slice( - filePath[0].lastIndexOf('\\') + 1, - filePath[0].lastIndexOf('.') - ); - } - - const createDB: string = createDBFunc(dbName); - const importFile: string = importFileFunc(dbName, filePath); - const runSQL: string = runSQLFunc(dbName); - const runTAR: string = runTARFunc(dbName); - const extension: string = filePath[0].slice(filePath[0].lastIndexOf('.')); - - // SEQUENCE OF EXECUTING COMMANDS - // Steps are in reverse order because each step is a callback function that requires the following step to be defined. - - // Step 5: Changes the pg URI the newly created database, queries new database, then sends list of tables and list of databases to frontend. - async function sendLists() { - listObj = await db.getLists(); - console.log('channels: ', listObj); - event.sender.send('db-lists', listObj); - // Send schema name back to frontend, so frontend can load tab name. - event.sender.send('return-schema-name', dbName); - // tell the front end to switch tabs to the newly created database - event.sender.send('switch-to-new', null); - // notify frontend that async process has been completed - event.sender.send('async-complete'); - } - - // Step 4: Given the file path extension, run the appropriate command in postgres to populate db. - const step4 = () => { - let runCmd: string = ''; - if (extension === '.sql') runCmd = runSQL; - else if (extension === '.tar') runCmd = runTAR; - execute(runCmd, sendLists); - }; - - // Step 3: Import database file from file path into docker container - const step3 = () => execute(importFile, step4); - - // Step 2: Change curent URI to match newly created DB - const step2 = () => { - db.changeDB(dbName); - return step3(); - }; - - // Step 1: Create empty db - if (extension === '.sql' || extension === '.tar') execute(createDB, step2); - else console.log('INVALID FILE TYPE: Please use .tar or .sql extensions.'); -}); - -interface SchemaType { - schemaName: string; - schemaFilePath: string[]; - schemaEntry: string; - dbCopyName: string; - copy: boolean; -} - -// The following function creates an instance of database from pre-made .tar or .sql file. -// OR -// Listens for and handles DB copying events -ipcMain.on('input-schema', (event, data: SchemaType) => { - // send notice to the frontend that async process has begun - event.sender.send('async-started'); - - const { schemaName: dbName, dbCopyName, copy } = data; - let { schemaFilePath: filePath } = data; - - // generate strings that are fed into execute functions later - const createDB: string = createDBFunc(dbName); - const importFile: string = importFileFunc(dbName, filePath); - const runSQL: string = runSQLFunc(dbName); - const runTAR: string = runTARFunc(dbName); - const runFullCopy: string = runFullCopyFunc(dbCopyName); - const runHollowCopy: string = runHollowCopyFunc(dbCopyName); - - // determine if the file is a sql or a tar file, in the case of a copy, we will not have a filepath so we just hard-code the extension to be sql - let extension: string = ''; - if (filePath.length > 0) { - extension = filePath[0].slice(filePath[0].lastIndexOf('.')); - } else extension = '.sql'; - - // SEQUENCE OF EXECUTING COMMANDS - // Steps are in reverse order because each step is a callback function that requires the following step to be defined. - - // Step 5: Changes the pg URI to look to the newly created database and queries all the tables in that database and sends it to frontend. - async function sendLists() { - listObj = await db.getLists(); - event.sender.send('db-lists', listObj); - // tell the front end to switch tabs to the newly created database - event.sender.send('switch-to-new', null); - // notify frontend that async process has been completed - event.sender.send('async-complete'); - } - - // Step 4: Given the file path extension, run the appropriate command in postgres to build the db - const step4 = () => { - let runCmd: string = ''; - if (extension === '.sql') runCmd = runSQL; - else if (extension === '.tar') runCmd = runTAR; - execute(runCmd, sendLists); - }; - - // Step 3: Import database file from file path into docker container - const step3 = () => execute(importFile, step4); - // skip step three which is only for importing files and instead change the current db to the newly created one - const step3Copy = () => { - db.changeDB(dbName); - return step4(); - }; - - // Step 2: Change curent URI to match newly created DB - const step2 = () => { - // if we are copying - if (copy !== undefined) { - // first, we need to change the current DB instance to that of the one we need to copy, so we'll head to the changeDB function in the models file - db.changeDB(dbCopyName); - // now that our DB has been changed to the one we wish to copy, we need to either make an exact copy or a hollow copy using pg_dump OR pg_dump -s - // this generates a pg_dump file from the specified db and saves it to a location in the container. - // Full copy case - if (copy) { - execute(runFullCopy, step3Copy); - } - // Hollow copy case - else execute(runHollowCopy, step3Copy); - return; - } - // if we are not copying - else { - // change the current database back to the newly created one - // and now that we have changed to the new db, we can move on to importing the data file - db.changeDB(dbName); - return step3(); - } - }; - - // Step 1 : Create empty db - execute(createDB, step2); -}); - -interface QueryType { - queryCurrentSchema: string; - queryString: string; - queryLabel: string; - queryData: string; - queryStatistics: string; -} - -ipcMain.on('execute-query-untracked', (event, data: QueryType) => { - // send notice to front end that query has been started - event.sender.send('async-started'); - - // destructure object from frontend - const { queryString } = data; - // run query on db - db.query(queryString) - .then(() => { - (async function getListAsync() { - listObj = await db.getLists(); - event.sender.send('db-lists', listObj); - event.sender.send('async-complete'); - })(); - }) - .catch((error: string) => { - console.log('ERROR in execute-query-untracked channel in main.ts', error); - event.sender.send('query-error', 'Error executing query.'); - }); -}); - -// Listen for queries being sent from renderer -ipcMain.on('execute-query-tracked', (event, data: QueryType) => { - // send notice to front end that query has been started - event.sender.send('async-started'); - - // destructure object from frontend - const { queryString, queryCurrentSchema, queryLabel } = data; - - // initialize object to store all data to send to frontend - let frontendData = { - queryString, - queryCurrentSchema, - queryLabel, - queryData: '', - queryStatistics: '', - lists: {}, - }; - - // Run select * from actors; - db.query(queryString) - .then((queryData) => { - frontendData.queryData = queryData.rows; - if (!queryString.match(/create/i)) { - // Run EXPLAIN (FORMAT JSON, ANALYZE) - db.query('EXPLAIN (FORMAT JSON, ANALYZE) ' + queryString).then( - (queryStats) => { - frontendData.queryStatistics = queryStats.rows; - - (async function getListAsync() { - listObj = await db.getLists(); - frontendData.lists = listObj; - event.sender.send('db-lists', listObj); - event.sender.send('return-execute-query', frontendData); - event.sender.send('async-complete'); - })(); - } - ); - } else { - // Handling for tracking a create table query, can't run explain/analyze on create statements - (async function getListAsync() { - listObj = await db.getLists(); - frontendData.lists = listObj; - event.sender.send('db-lists', listObj); - event.sender.send('async-complete'); - })(); - } - }) - .catch((error: string) => { - console.log('ERROR in execute-query-tracked channel in main.ts', error); - }); -}); - -interface dummyDataRequest { - schemaName: string; - dummyData: {}; -} - -ipcMain.on('generate-dummy-data', (event: any, data: dummyDataRequest) => { - // send notice to front end that DD generation has been started - event.sender.send('async-started'); - - let schemaLayout: any; - let dummyDataRequest: dummyDataRequest = data; - let tableMatricesArray: any; - let keyObject: any = 'Unresolved'; - - db.createKeyObject().then((result) => { - // set keyObject equal to the result of this query - keyObject = result; - db.dropKeyColumns(keyObject).then(() => { - db.addNewKeyColumns(keyObject).then(() => { - db.getSchemaLayout().then((result) => { - schemaLayout = result; - // generate the dummy data and save it into matrices associated with table names - tableMatricesArray = generateDummyData( - schemaLayout, - dummyDataRequest, - keyObject - ); - //iterate through tableMatricesArray to write individual .csv files - for (const tableObject of tableMatricesArray) { - // write all entries in tableMatrix to csv file - writeCSVFile( - tableObject, - schemaLayout, - keyObject, - dummyDataRequest, - event - ); - } - }); - }); - }); - }); -}); - -export default execute; From d606e7b9c2c70cf89c9d4d0ee40a91e4049f4093 Mon Sep 17 00:00:00 2001 From: Katie Klochan Date: Thu, 10 Dec 2020 14:45:24 -0500 Subject: [PATCH 05/34] package.json diff --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 6992a92f..1327a46a 100644 --- a/package.json +++ b/package.json @@ -32,7 +32,7 @@ "chart.js": "^2.9.3", "codemirror": "^5.57.0", "concurrently": "^5.3.0", - "cross-env": "^7.0.2", + "cross-env": "^7.0.3", "electron-store": "^6.0.0", "faker": "^5.1.0", "fix-path": "^3.0.0", From 8a215136d349a1682228fe98a238355dafa56e7d Mon Sep 17 00:00:00 2001 From: Katie Klochan Date: Thu, 10 Dec 2020 20:06:25 -0500 Subject: [PATCH 06/34] commented out docker functionality so the app is spinning up without docker --- backend/channels.ts | 12 ++++--- backend/main.ts | 82 +++++++++++++++++++++++---------------------- 2 files changed, 50 insertions(+), 44 deletions(-) diff --git a/backend/channels.ts b/backend/channels.ts index f0dbc77c..ae0f0ffc 100644 --- a/backend/channels.ts +++ b/backend/channels.ts @@ -23,16 +23,20 @@ const importFileFunc = (name, file) => { // return `docker cp ${file} postgres-1:/data_dump`; }; const runSQLFunc = (dbName) => { - return `docker exec postgres-1 psql -U postgres -d ${dbName} -f /data_dump`; + return `psql -U postgres -d ${dbName} -f /data_dump`; + // return `docker exec postgres-1 psql -U postgres -d ${dbName} -f /data_dump`; }; const runTARFunc = (dbName) => { - return `docker exec postgres-1 pg_restore -U postgres -d ${dbName} /data_dump`; + return `psql -U postgres -d ${dbName} -f /data_dump`; + // return `docker exec postgres-1 pg_restore -U postgres -d ${dbName} /data_dump`; }; const runFullCopyFunc = (dbCopyName) => { - return `docker exec postgres-1 pg_dump -U postgres ${dbCopyName} -f /data_dump`; + return `pg_dump -U postgres ${dbCopyName} -f /data_dump`; + // `docker exec postgres-1 pg_dump -U postgres ${dbCopyName} -f /data_dump`; }; const runHollowCopyFunc = (dbCopyName) => { - return `docker exec postgres-1 pg_dump -s -U postgres ${dbCopyName} -f /data_dump`; + return `pg_dump -s -U postgres ${dbCopyName} -f /data_dump`; + //`docker exec postgres-1 pg_dump -s -U postgres ${dbCopyName} -f /data_dump`; }; // Function to execute commands in the child process. diff --git a/backend/main.ts b/backend/main.ts index 4ecd600d..687e48fc 100644 --- a/backend/main.ts +++ b/backend/main.ts @@ -77,50 +77,52 @@ function createWindow() { // Don't show until we are ready and loaded mainWindow.once('ready-to-show', (event) => { mainWindow.show(); - // uncomment code below before running production build and packaging - // const yamlPath = join(__dirname, '../../docker-compose.yml') - // const runDocker: string = `docker-compose -f '${yamlPath}' up -d`; - const runDocker: string = `docker-compose up -d`; - exec(runDocker, (error, stdout, stderr) => { - if (error) { - console.log(`error: ${error.message}`); - return; - } - if (stderr) { - console.log(`stderr: ${stderr}`); - return; - } - console.log(`${stdout}`); - }) - }); -} -app.on('before-quit', (event: any) => { - // check if containers have already been pruned--else, continue with default behavior to terminate application - if (!pruned) { - event.preventDefault(); - // Stop and remove postgres-1 and busybox-1 Docker containers upon window exit. - const stopContainers: string = 'docker stop postgres-1 busybox-1'; - const pruneContainers: string = 'docker rm -f postgres-1 busybox-1'; - // this command removes the volume which stores the session data for the postgres instance - // comment this out for dev - const pruneVolumes: string = 'docker volume rm -f seeqr_database-data'; - - // use this string for production build - // const pruneVolumes: string = 'docker volume rm -f app_database-data' - - const step4 = () => { - pruned = true; - app.quit() - }; - const step3 = () => execute(pruneVolumes, step4); - const step2 = () => execute(pruneContainers, step3); - - execute(stopContainers, step2); - } +// // uncomment code below before running production build and packaging +// // const yamlPath = join(__dirname, '../../docker-compose.yml') +// // const runDocker: string = `docker-compose -f '${yamlPath}' up -d`; +// const runDocker: string = `docker-compose up -d`; +// exec(runDocker, (error, stdout, stderr) => { +// if (error) { +// console.log(`error: ${error.message}`); +// return; +// } +// if (stderr) { +// console.log(`stderr: ${stderr}`); +// return; +// } +// console.log(`${stdout}`); +// }) +// }); + }) +// app.on('before-quit', (event: any) => { +// // check if containers have already been pruned--else, continue with default behavior to terminate application +// if (!pruned) { +// event.preventDefault(); +// // Stop and remove postgres-1 and busybox-1 Docker containers upon window exit. +// const stopContainers: string = 'docker stop postgres-1 busybox-1'; +// const pruneContainers: string = 'docker rm -f postgres-1 busybox-1'; +// // this command removes the volume which stores the session data for the postgres instance +// // comment this out for dev +// const pruneVolumes: string = 'docker volume rm -f seeqr_database-data'; + +// // use this string for production build +// // const pruneVolumes: string = 'docker volume rm -f app_database-data' + +// const step4 = () => { +// pruned = true; +// app.quit() +// }; +// const step3 = () => execute(pruneVolumes, step4); +// const step2 = () => execute(pruneContainers, step3); + +// execute(stopContainers, step2); +// } +// }) +} // Invoke createWindow to create browser windows after Electron has been initialized. // Some APIs can only be used after this event occurs. app.on('ready', createWindow); From cfb126d647c36f2c5ae9bd3aeaa65ac34ac1a9c1 Mon Sep 17 00:00:00 2001 From: Jennifer Courtner Date: Fri, 11 Dec 2020 09:33:01 -0500 Subject: [PATCH 07/34] figuring out data analytics --- backend/channels.ts | 3 ++- backend/main.ts | 18 +++++++++++------- package.json | 1 + 3 files changed, 14 insertions(+), 8 deletions(-) diff --git a/backend/channels.ts b/backend/channels.ts index f0dbc77c..42cf4709 100644 --- a/backend/channels.ts +++ b/backend/channels.ts @@ -286,7 +286,8 @@ ipcMain.on('execute-query-tracked', (event, data: QueryType) => { db.query('EXPLAIN (FORMAT JSON, ANALYZE) ' + queryString).then( (queryStats) => { frontendData.queryStatistics = queryStats.rows; - + console.log('query stats ROWS: '); + console.table(queryStats.rows[0]['QUERY PLAN']); (async function getListAsync() { listObj = await db.getLists(); frontendData.lists = listObj; diff --git a/backend/main.ts b/backend/main.ts index 4ecd600d..563fbc81 100644 --- a/backend/main.ts +++ b/backend/main.ts @@ -31,7 +31,10 @@ let pruned: boolean = false; let mainMenu = Menu.buildFromTemplate(require('./mainMenu')); // Keep a reference for dev mode let dev = false; -if (process.env.NODE_ENV !== undefined && process.env.NODE_ENV === 'development') { +if ( + process.env.NODE_ENV !== undefined && + process.env.NODE_ENV === 'development' +) { dev = true; } @@ -49,7 +52,9 @@ function createWindow() { }); if (process.platform === 'darwin') { - app.dock.setIcon(path.join(__dirname, '../../frontend/assets/images/seeqr_dock.png')); + app.dock.setIcon( + path.join(__dirname, '../../frontend/assets/images/seeqr_dock.png') + ); } // Load index.html of the app @@ -91,7 +96,7 @@ function createWindow() { return; } console.log(`${stdout}`); - }) + }); }); } @@ -111,15 +116,14 @@ app.on('before-quit', (event: any) => { const step4 = () => { pruned = true; - app.quit() + app.quit(); }; const step3 = () => execute(pruneVolumes, step4); const step2 = () => execute(pruneContainers, step3); execute(stopContainers, step2); } -}) - +}); // Invoke createWindow to create browser windows after Electron has been initialized. // Some APIs can only be used after this event occurs. @@ -142,4 +146,4 @@ app.on('activate', () => { } }); -export default mainWindow; \ No newline at end of file +export default mainWindow; diff --git a/package.json b/package.json index 1327a46a..5cddb318 100644 --- a/package.json +++ b/package.json @@ -66,6 +66,7 @@ "electron": "^9.3.1", "electron-devtools-installer": "^3.0.0", "electron-packager": "^14.2.1", + "electron-reloader": "^1.1.0", "enzyme": "^3.11.0", "enzyme-adapter-react-16": "^1.15.5", "file-loader": "^6.0.0", From 10f7f23a7933a3d76366379a0b2f5cd30d2c1174 Mon Sep 17 00:00:00 2001 From: Katie Klochan Date: Sat, 12 Dec 2020 16:40:44 -0500 Subject: [PATCH 08/34] added Docker back in for testing. Need to figure out how to make it work without Docker --- backend/channels.ts | 31 ++++++++------ backend/main.ts | 100 +++++++++++++++++++++++--------------------- 2 files changed, 72 insertions(+), 59 deletions(-) diff --git a/backend/channels.ts b/backend/channels.ts index ae0f0ffc..7e2dc2a9 100644 --- a/backend/channels.ts +++ b/backend/channels.ts @@ -13,30 +13,35 @@ const db = require('./models'); const createDBFunc = (name) => { return `psql -U postgres -c "CREATE DATABASE ${name}"`; - // return `docker exec postgres-1 psql -h localhost -p 5432 -U postgres -c "CREATE DATABASE ${name}"`; + //return `docker exec postgres-1 psql -h localhost -p 5432 -U postgres -c "CREATE DATABASE ${name}"`; }; // added "name" as a parameter for importFileFunc const importFileFunc = (name, file) => { + console.log('inside importFile Func'); return `psql -U postgres ${name} < ${file}`; // return `docker cp ${file} postgres-1:/data_dump`; }; -const runSQLFunc = (dbName) => { - return `psql -U postgres -d ${dbName} -f /data_dump`; +// added file param: +const runSQLFunc = (dbName, file) => { + return `psql -U postgres -d ${dbName} -f ${file}`; // replaced /data_dump with ${file}; + // return `docker exec postgres-1 psql -U postgres -d ${dbName} -f /data_dump`; }; -const runTARFunc = (dbName) => { - return `psql -U postgres -d ${dbName} -f /data_dump`; - // return `docker exec postgres-1 pg_restore -U postgres -d ${dbName} /data_dump`; +// added file param: +const runTARFunc = (dbName, file) => { + return `pg_restore -U postgres -d ${dbName} -f ${file}`; // replaced /data_dump with ${file}`; + // docker exec postgres-1 pg_restore -U postgres -d ${dbName} /data_dump`; }; const runFullCopyFunc = (dbCopyName) => { return `pg_dump -U postgres ${dbCopyName} -f /data_dump`; - // `docker exec postgres-1 pg_dump -U postgres ${dbCopyName} -f /data_dump`; + // docker exec postgres-1 pg_dump -U postgres ${dbCopyName} -f /data_dump`; + // }; const runHollowCopyFunc = (dbCopyName) => { return `pg_dump -s -U postgres ${dbCopyName} -f /data_dump`; - //`docker exec postgres-1 pg_dump -s -U postgres ${dbCopyName} -f /data_dump`; + // docker exec postgres-1 pg_dump -s -U postgres ${dbCopyName} -f /data_dump`; }; // Function to execute commands in the child process. @@ -101,8 +106,8 @@ ipcMain.on('upload-file', (event, filePath: string) => { // added dbName to importFile const importFile: string = importFileFunc(dbName, filePath); - const runSQL: string = runSQLFunc(dbName); - const runTAR: string = runTARFunc(dbName); + const runSQL: string = runSQLFunc(dbName, filePath); // added filepath + const runTAR: string = runTARFunc(dbName, filePath); //added filepath const extension: string = filePath[0].slice(filePath[0].lastIndexOf('.')); // SEQUENCE OF EXECUTING COMMANDS @@ -130,6 +135,7 @@ ipcMain.on('upload-file', (event, filePath: string) => { }; // Step 3: Import database file from file path into docker container + // Edit: We changed the functionality to create a file on the local machine instead of adding it to the docker container const step3 = () => execute(importFile, step4); // Step 2: Change curent URI to match newly created DB @@ -165,8 +171,8 @@ ipcMain.on('input-schema', (event, data: SchemaType) => { const createDB: string = createDBFunc(dbName); //added dbName to importFile const importFile: string = importFileFunc(dbName, filePath); - const runSQL: string = runSQLFunc(dbName); - const runTAR: string = runTARFunc(dbName); + const runSQL: string = runSQLFunc(dbName, filePath); // added filePath + const runTAR: string = runTARFunc(dbName, filePath); // added filePath const runFullCopy: string = runFullCopyFunc(dbCopyName); const runHollowCopy: string = runHollowCopyFunc(dbCopyName); @@ -198,6 +204,7 @@ ipcMain.on('input-schema', (event, data: SchemaType) => { }; // Step 3: Import database file from file path into docker container + const step3 = () => execute(importFile, step4); // skip step three which is only for importing files and instead change the current db to the newly created one const step3Copy = () => { diff --git a/backend/main.ts b/backend/main.ts index 687e48fc..d1d15020 100644 --- a/backend/main.ts +++ b/backend/main.ts @@ -31,7 +31,10 @@ let pruned: boolean = false; let mainMenu = Menu.buildFromTemplate(require('./mainMenu')); // Keep a reference for dev mode let dev = false; -if (process.env.NODE_ENV !== undefined && process.env.NODE_ENV === 'development') { +if ( + process.env.NODE_ENV !== undefined && + process.env.NODE_ENV === 'development' +) { dev = true; } @@ -49,7 +52,9 @@ function createWindow() { }); if (process.platform === 'darwin') { - app.dock.setIcon(path.join(__dirname, '../../frontend/assets/images/seeqr_dock.png')); + app.dock.setIcon( + path.join(__dirname, '../../frontend/assets/images/seeqr_dock.png') + ); } // Load index.html of the app @@ -78,51 +83,52 @@ function createWindow() { mainWindow.once('ready-to-show', (event) => { mainWindow.show(); -// // uncomment code below before running production build and packaging -// // const yamlPath = join(__dirname, '../../docker-compose.yml') -// // const runDocker: string = `docker-compose -f '${yamlPath}' up -d`; -// const runDocker: string = `docker-compose up -d`; -// exec(runDocker, (error, stdout, stderr) => { -// if (error) { -// console.log(`error: ${error.message}`); -// return; -// } -// if (stderr) { -// console.log(`stderr: ${stderr}`); -// return; -// } -// console.log(`${stdout}`); -// }) -// }); - -}) - -// app.on('before-quit', (event: any) => { -// // check if containers have already been pruned--else, continue with default behavior to terminate application -// if (!pruned) { -// event.preventDefault(); -// // Stop and remove postgres-1 and busybox-1 Docker containers upon window exit. -// const stopContainers: string = 'docker stop postgres-1 busybox-1'; -// const pruneContainers: string = 'docker rm -f postgres-1 busybox-1'; -// // this command removes the volume which stores the session data for the postgres instance -// // comment this out for dev -// const pruneVolumes: string = 'docker volume rm -f seeqr_database-data'; - -// // use this string for production build -// // const pruneVolumes: string = 'docker volume rm -f app_database-data' - -// const step4 = () => { -// pruned = true; -// app.quit() -// }; -// const step3 = () => execute(pruneVolumes, step4); -// const step2 = () => execute(pruneContainers, step3); - -// execute(stopContainers, step2); -// } -// }) - + // // uncomment code below before running production build and packaging + // // const yamlPath = join(__dirname, '../../docker-compose.yml') + // // const runDocker: string = `docker-compose -f '${yamlPath}' up -d`; + const runDocker: string = `docker-compose up -d`; + exec(runDocker, (error, stdout, stderr) => { + if (error) { + console.log(`error: ${error.message}`); + return; + } + if (stderr) { + console.log(`stderr: ${stderr}`); + return; + } + console.log(`${stdout}`); + }); + }); } +// ---- +app.on('before-quit', (event: any) => { + console.log('is this the problem?'); + + // check if containers have already been pruned--else, continue with default behavior to terminate application + if (!pruned) { + console.log('what the hell?'); + event.preventDefault(); + // Stop and remove postgres-1 and busybox-1 Docker containers upon window exit. + const stopContainers: string = 'docker stop postgres-1 busybox-1'; + const pruneContainers: string = 'docker rm -f postgres-1 busybox-1'; + // this command removes the volume which stores the session data for the postgres instance + // comment this out for dev + const pruneVolumes: string = 'docker volume rm -f seeqr_database-data'; + + // use this string for production build + // const pruneVolumes: string = 'docker volume rm -f app_database-data' + + const step4 = () => { + pruned = true; + app.quit(); + }; + const step3 = () => execute(pruneVolumes, step4); + const step2 = () => execute(pruneContainers, step3); + + execute(stopContainers, step2); + } +}); +// ---- // Invoke createWindow to create browser windows after Electron has been initialized. // Some APIs can only be used after this event occurs. app.on('ready', createWindow); @@ -144,4 +150,4 @@ app.on('activate', () => { } }); -export default mainWindow; \ No newline at end of file +export default mainWindow; From dfe97c594a5faaf528d6f81fc7660ccf6f445080 Mon Sep 17 00:00:00 2001 From: Jennifer Courtner Date: Mon, 14 Dec 2020 10:34:34 -0500 Subject: [PATCH 09/34] changed main.ts to import ./channels instead of execute from ./channels --- backend/main.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/backend/main.ts b/backend/main.ts index 563fbc81..f5668048 100644 --- a/backend/main.ts +++ b/backend/main.ts @@ -3,8 +3,8 @@ import { app, BrowserWindow, ipcMain, Menu } from 'electron'; import { appendFile } from 'fs/promises'; import { join } from 'path'; import { format } from 'url'; -//import './channels' // all channels live here -import execute from './channels'; +import './channels'; // all channels live here +// import execute from './channels'; const { exec } = require('child_process'); const appMenu = require('./mainMenu'); // use appMenu to add options in top menu bar of app From 76ea171818f0ab8757dd96746d8236584a18ffb2 Mon Sep 17 00:00:00 2001 From: Jennifer Courtner Date: Mon, 14 Dec 2020 16:37:37 -0500 Subject: [PATCH 10/34] testing db.query in channels.ts to get database size --- backend/channels.ts | 8 ++++ backend/main.ts | 68 +++++++++++++++---------------- frontend/components/MainPanel.tsx | 53 ++++++++++++++++-------- 3 files changed, 78 insertions(+), 51 deletions(-) diff --git a/backend/channels.ts b/backend/channels.ts index ff04c57b..b413b1c0 100644 --- a/backend/channels.ts +++ b/backend/channels.ts @@ -298,7 +298,10 @@ ipcMain.on('execute-query-tracked', (event, data: QueryType) => { (queryStats) => { frontendData.queryStatistics = queryStats.rows; console.log('query stats ROWS: '); + console.log(queryStats.rows[0]['QUERY PLAN']); + console.log('console.table of queryStats.row[0]'); console.table(queryStats.rows[0]['QUERY PLAN']); + (async function getListAsync() { listObj = await db.getLists(); frontendData.lists = listObj; @@ -308,6 +311,11 @@ ipcMain.on('execute-query-tracked', (event, data: QueryType) => { })(); } ); + db.query( + `SELECT pg_size_pretty(pg_database_size('${queryCurrentSchema}'));` + ).then((queryStats) => { + console.log('this is the size of the DB: ', queryStats); + }); } else { // Handling for tracking a create table query, can't run explain/analyze on create statements (async function getListAsync() { diff --git a/backend/main.ts b/backend/main.ts index b5fc8269..d99daf4d 100644 --- a/backend/main.ts +++ b/backend/main.ts @@ -86,18 +86,18 @@ function createWindow() { // // uncomment code below before running production build and packaging // // const yamlPath = join(__dirname, '../../docker-compose.yml') // // const runDocker: string = `docker-compose -f '${yamlPath}' up -d`; - const runDocker: string = `docker-compose up -d`; - exec(runDocker, (error, stdout, stderr) => { - if (error) { - console.log(`error: ${error.message}`); - return; - } - if (stderr) { - console.log(`stderr: ${stderr}`); - return; - } - console.log(`${stdout}`); - }); + // const runDocker: string = `docker-compose up -d`; + // exec(runDocker, (error, stdout, stderr) => { + // if (error) { + // console.log(`error: ${error.message}`); + // return; + // } + // if (stderr) { + // console.log(`stderr: ${stderr}`); + // return; + // } + // console.log(`${stdout}`); + // }); }); } // ---- @@ -105,28 +105,28 @@ app.on('before-quit', (event: any) => { console.log('is this the problem?'); // check if containers have already been pruned--else, continue with default behavior to terminate application - if (!pruned) { - console.log('what the hell?'); - event.preventDefault(); - // Stop and remove postgres-1 and busybox-1 Docker containers upon window exit. - const stopContainers: string = 'docker stop postgres-1 busybox-1'; - const pruneContainers: string = 'docker rm -f postgres-1 busybox-1'; - // this command removes the volume which stores the session data for the postgres instance - // comment this out for dev - const pruneVolumes: string = 'docker volume rm -f seeqr_database-data'; - - // use this string for production build - // const pruneVolumes: string = 'docker volume rm -f app_database-data' - - const step4 = () => { - pruned = true; - app.quit(); - }; - const step3 = () => execute(pruneVolumes, step4); - const step2 = () => execute(pruneContainers, step3); - - execute(stopContainers, step2); - } + // if (!pruned) { + // console.log('what the hell?'); + // event.preventDefault(); + // // Stop and remove postgres-1 and busybox-1 Docker containers upon window exit. + // const stopContainers: string = 'docker stop postgres-1 busybox-1'; + // const pruneContainers: string = 'docker rm -f postgres-1 busybox-1'; + // // this command removes the volume which stores the session data for the postgres instance + // // comment this out for dev + // const pruneVolumes: string = 'docker volume rm -f seeqr_database-data'; + + // // use this string for production build + // // const pruneVolumes: string = 'docker volume rm -f app_database-data' + + // const step4 = () => { + // pruned = true; + // app.quit(); + // }; + // const step3 = () => execute(pruneVolumes, step4); + // const step2 = () => execute(pruneContainers, step3); + + // execute(stopContainers, step2); + // } }); // ---- // Invoke createWindow to create browser windows after Electron has been initialized. diff --git a/frontend/components/MainPanel.tsx b/frontend/components/MainPanel.tsx index 75e57a65..07729b81 100644 --- a/frontend/components/MainPanel.tsx +++ b/frontend/components/MainPanel.tsx @@ -11,7 +11,7 @@ type MainState = { queries: { queryString: string; queryData: {}[]; - queryStatistics: any + queryStatistics: any; querySchema: string; queryLabel: string; }[]; @@ -34,18 +34,24 @@ class MainPanel extends Component { databaseList: ['defaultDB'], tableList: [], }, - loading: false + loading: false, }; componentDidMount() { ipcRenderer.send('return-db-list'); - + // Listening for returnedData from executing Query // Update state with new object (containing query data, query statistics, query schema // inside of state.queries array ipcRenderer.on('return-execute-query', (event: any, returnedData: any) => { // destructure from returnedData from backend - const { queryString, queryData, queryStatistics, queryCurrentSchema, queryLabel } = returnedData; + const { + queryString, + queryData, + queryStatistics, + queryCurrentSchema, + queryLabel, + } = returnedData; // create new query object with returnedData const newQuery = { queryString, @@ -53,27 +59,29 @@ class MainPanel extends Component { queryStatistics, querySchema: queryCurrentSchema, queryLabel, - } + }; // create copy of current queries array let queries = this.state.queries.slice(); // push new query object into copy of queries array - queries.push(newQuery) - this.setState({ queries }) + queries.push(newQuery); + this.setState({ queries }); }); ipcRenderer.on('db-lists', (event: any, returnedLists: any) => { - this.setState(prevState => ({ + this.setState((prevState) => ({ ...prevState, lists: { databaseList: returnedLists.databaseList, - tableList: returnedLists.tableList - } - })) + tableList: returnedLists.tableList, + }, + })); }); ipcRenderer.on('switch-to-new', (event: any) => { const newSchemaIndex = this.state.lists.databaseList.length - 1; - this.setState({currentSchema: this.state.lists.databaseList[newSchemaIndex]}); + this.setState({ + currentSchema: this.state.lists.databaseList[newSchemaIndex], + }); }); // Renders the loading modal during async functions. @@ -93,17 +101,28 @@ class MainPanel extends Component { } render() { - return (
- +
- - + +
- +
); } From cd365f37a5a15f76214a3faf9b9b33462707cc3c Mon Sep 17 00:00:00 2001 From: Jennifer Courtner Date: Mon, 14 Dec 2020 19:54:18 -0500 Subject: [PATCH 11/34] added db size to event sender in channels.ts and data was returned in mainPanel.tsx --- backend/channels.ts | 21 ++++---- frontend/components/MainPanel.tsx | 22 +++++---- .../rightPanel/schemaChildren/Query.tsx | 48 ++++++++++--------- 3 files changed, 51 insertions(+), 40 deletions(-) diff --git a/backend/channels.ts b/backend/channels.ts index b413b1c0..ef303f8d 100644 --- a/backend/channels.ts +++ b/backend/channels.ts @@ -109,6 +109,7 @@ ipcMain.on('upload-file', (event, filePath: string) => { const runSQL: string = runSQLFunc(dbName, filePath); // added filepath const runTAR: string = runTARFunc(dbName, filePath); //added filepath const extension: string = filePath[0].slice(filePath[0].lastIndexOf('.')); + let dbSize: string; // SEQUENCE OF EXECUTING COMMANDS // Steps are in reverse order because each step is a callback function that requires the following step to be defined. @@ -117,7 +118,8 @@ ipcMain.on('upload-file', (event, filePath: string) => { async function sendLists() { listObj = await db.getLists(); console.log('channels: ', listObj); - event.sender.send('db-lists', listObj); + // Send list of databases and tables, as well as database size to frontend. + event.sender.send('db-lists', listObj, dbSize); // Send schema name back to frontend, so frontend can load tab name. event.sender.send('return-schema-name', dbName); // tell the front end to switch tabs to the newly created database @@ -132,16 +134,24 @@ ipcMain.on('upload-file', (event, filePath: string) => { if (extension === '.sql') runCmd = runSQL; else if (extension === '.tar') runCmd = runTAR; execute(runCmd, sendLists); + + // DB query to get the database size + db.query(`SELECT pg_size_pretty(pg_database_size('${dbName}'));`).then( + (queryStats) => { + console.log('this is the size of the DB: ', queryStats); + dbSize = queryStats.rows[0].pg_size_pretty; + } + ); }; // Step 3: Import database file from file path into docker container // Edit: We changed the functionality to create a file on the local machine instead of adding it to the docker container - const step3 = () => execute(importFile, step4); + // const step3 = () => execute(importFile, step4); // Step 2: Change curent URI to match newly created DB const step2 = () => { db.changeDB(dbName); - return step3(); + return step4(); }; // Step 1: Create empty db @@ -311,11 +321,6 @@ ipcMain.on('execute-query-tracked', (event, data: QueryType) => { })(); } ); - db.query( - `SELECT pg_size_pretty(pg_database_size('${queryCurrentSchema}'));` - ).then((queryStats) => { - console.log('this is the size of the DB: ', queryStats); - }); } else { // Handling for tracking a create table query, can't run explain/analyze on create statements (async function getListAsync() { diff --git a/frontend/components/MainPanel.tsx b/frontend/components/MainPanel.tsx index 07729b81..b3e08335 100644 --- a/frontend/components/MainPanel.tsx +++ b/frontend/components/MainPanel.tsx @@ -67,15 +67,19 @@ class MainPanel extends Component { this.setState({ queries }); }); - ipcRenderer.on('db-lists', (event: any, returnedLists: any) => { - this.setState((prevState) => ({ - ...prevState, - lists: { - databaseList: returnedLists.databaseList, - tableList: returnedLists.tableList, - }, - })); - }); + ipcRenderer.on( + 'db-lists', + (event: any, returnedLists: any, returnedDbSize: string) => { + console.log('database size in FE: ', returnedDbSize); + this.setState((prevState) => ({ + ...prevState, + lists: { + databaseList: returnedLists.databaseList, + tableList: returnedLists.tableList, + }, + })); + } + ); ipcRenderer.on('switch-to-new', (event: any) => { const newSchemaIndex = this.state.lists.databaseList.length - 1; diff --git a/frontend/components/rightPanel/schemaChildren/Query.tsx b/frontend/components/rightPanel/schemaChildren/Query.tsx index 18f24ce4..e52bde5f 100644 --- a/frontend/components/rightPanel/schemaChildren/Query.tsx +++ b/frontend/components/rightPanel/schemaChildren/Query.tsx @@ -15,9 +15,9 @@ import CodeMirror from '@skidding/react-codemirror'; *********************** TYPESCRIPT: TYPES *********************** ************************************************************/ -type QueryProps = { +type QueryProps = { currentSchema: string; - tableList: string[]; + tableList: string[]; }; type state = { @@ -33,22 +33,21 @@ class Query extends Component { super(props); this.handleQuerySubmit = this.handleQuerySubmit.bind(this); this.updateCode = this.updateCode.bind(this); - this.handleTrackQuery = this.handleTrackQuery.bind(this); + this.handleTrackQuery = this.handleTrackQuery.bind(this); } state: state = { queryString: '', queryLabel: '', show: false, - trackQuery: false + trackQuery: false, }; componentDidMount() { ipcRenderer.on('query-error', (event: any, message: string) => { console.log('Query error: '); // dialog.showErrorBox('Error', message); - - }) + }); } // Updates state.queryString as user inputs query label @@ -74,7 +73,7 @@ class Query extends Component { // if query string is empty, show error if (!this.state.queryString) { dialog.showErrorBox('Please enter a Query.', ''); - } + } if (!this.state.trackQuery) { //functionality to send query but not return stats and track const queryAndSchema = { @@ -88,8 +87,7 @@ class Query extends Component { } if (this.state.trackQuery && !this.state.queryLabel) { dialog.showErrorBox('Please enter a label for the Query.', ''); - } - else if (this.state.trackQuery) { + } else if (this.state.trackQuery) { // send query and return stats from explain/analyze const queryAndSchema = { queryString: this.state.queryString, @@ -112,31 +110,35 @@ class Query extends Component { return (
+
Database Size:
- +

Query

track on chart: - + > +
+
+ + this.handleLabelEntry(e)} + />
-
- - this.handleLabelEntry(e)} - /> -

From 0617ce9d7a5e084bb80f505150d90fdaf6586881 Mon Sep 17 00:00:00 2001 From: Katie Klochan Date: Mon, 14 Dec 2020 20:23:38 -0500 Subject: [PATCH 12/34] pushing fixes to schema loading--still in progress --- backend/channels.ts | 60 ++++--- backend/main.ts | 87 +++++----- backend/models.ts | 2 + frontend/components/App.tsx | 25 +-- frontend/components/MainPanel.tsx | 56 ++++--- .../rightPanel/schemaChildren/SchemaModal.tsx | 152 +++++++++++------- undefined | 22 +++ 7 files changed, 252 insertions(+), 152 deletions(-) create mode 100644 undefined diff --git a/backend/channels.ts b/backend/channels.ts index 7e2dc2a9..bc1aa8de 100644 --- a/backend/channels.ts +++ b/backend/channels.ts @@ -10,37 +10,44 @@ const db = require('./models'); ************************************************************/ // Generate CLI commands to be executed in child process. +// updated commands to use postgres without docker (commented out docker code) const createDBFunc = (name) => { return `psql -U postgres -c "CREATE DATABASE ${name}"`; //return `docker exec postgres-1 psql -h localhost -p 5432 -U postgres -c "CREATE DATABASE ${name}"`; }; -// added "name" as a parameter for importFileFunc -const importFileFunc = (name, file) => { - console.log('inside importFile Func'); - return `psql -U postgres ${name} < ${file}`; +//commenting out the importFileFunc to test duplicate import errors +// const importFileFunc = (name, file) => { // added "name" as a parameter for importFileFunc +// console.log('inside importFile Func'); +// return `psql -U postgres ${name} < ${file}`; + +// // return `docker cp ${file} postgres-1:/data_dump`; +// }; - // return `docker cp ${file} postgres-1:/data_dump`; -}; -// added file param: const runSQLFunc = (dbName, file) => { + // added file param: return `psql -U postgres -d ${dbName} -f ${file}`; // replaced /data_dump with ${file}; // return `docker exec postgres-1 psql -U postgres -d ${dbName} -f /data_dump`; }; -// added file param: + const runTARFunc = (dbName, file) => { + // added file param: return `pg_restore -U postgres -d ${dbName} -f ${file}`; // replaced /data_dump with ${file}`; // docker exec postgres-1 pg_restore -U postgres -d ${dbName} /data_dump`; }; -const runFullCopyFunc = (dbCopyName) => { - return `pg_dump -U postgres ${dbCopyName} -f /data_dump`; +const runFullCopyFunc = (dbCopyName, filePath) => { + console.log('this is the runFullCopyFunc code'); + return `pg_dump -U postgres -d ${dbCopyName} -f ${filePath}`; + // docker exec postgres-1 pg_dump -U postgres ${dbCopyName} -f /data_dump`; // }; -const runHollowCopyFunc = (dbCopyName) => { - return `pg_dump -s -U postgres ${dbCopyName} -f /data_dump`; +const runHollowCopyFunc = (dbCopyName, file) => { + //added file as param + console.log('this is the runHollowCopyFunc'); + return `pg_dump -s -U postgres ${dbCopyName} -f ${file}`; // replaced /data_dump with ${file}`; // docker exec postgres-1 pg_dump -s -U postgres ${dbCopyName} -f /data_dump`; }; @@ -104,8 +111,7 @@ ipcMain.on('upload-file', (event, filePath: string) => { const createDB: string = createDBFunc(dbName); - // added dbName to importFile - const importFile: string = importFileFunc(dbName, filePath); + //const importFile: string = importFileFunc(dbName, filePath); // added dbName to importFile // commenting out to test removal of importFile func const runSQL: string = runSQLFunc(dbName, filePath); // added filepath const runTAR: string = runTARFunc(dbName, filePath); //added filepath const extension: string = filePath[0].slice(filePath[0].lastIndexOf('.')); @@ -136,12 +142,12 @@ ipcMain.on('upload-file', (event, filePath: string) => { // Step 3: Import database file from file path into docker container // Edit: We changed the functionality to create a file on the local machine instead of adding it to the docker container - const step3 = () => execute(importFile, step4); + // const step3 = () => execute(importFile, step4); // Step 2: Change curent URI to match newly created DB const step2 = () => { db.changeDB(dbName); - return step3(); + return step4(); //changing step3 to step4 to test removal of importFile func }; // Step 1: Create empty db @@ -166,15 +172,23 @@ ipcMain.on('input-schema', (event, data: SchemaType) => { const { schemaName: dbName, dbCopyName, copy } = data; let { schemaFilePath: filePath } = data; + console.log( + 'Schema name: ', + data.schemaName, + 'data[schemaFilePath: ', + data.schemaFilePath, + 'filepath: ', + filePath + ); // generate strings that are fed into execute functions later const createDB: string = createDBFunc(dbName); - //added dbName to importFile - const importFile: string = importFileFunc(dbName, filePath); + + // const importFile: string = importFileFunc(dbName, filePath); //added dbName to importFile //commenting out to test removal of importFile func const runSQL: string = runSQLFunc(dbName, filePath); // added filePath const runTAR: string = runTARFunc(dbName, filePath); // added filePath - const runFullCopy: string = runFullCopyFunc(dbCopyName); - const runHollowCopy: string = runHollowCopyFunc(dbCopyName); + const runFullCopy: string = runFullCopyFunc(dbCopyName, filePath); + const runHollowCopy: string = runHollowCopyFunc(dbCopyName, filePath); // determine if the file is a sql or a tar file, in the case of a copy, we will not have a filepath so we just hard-code the extension to be sql let extension: string = ''; @@ -205,7 +219,8 @@ ipcMain.on('input-schema', (event, data: SchemaType) => { // Step 3: Import database file from file path into docker container - const step3 = () => execute(importFile, step4); + //const step3 = () => execute(importFile, step4); + // skip step three which is only for importing files and instead change the current db to the newly created one const step3Copy = () => { db.changeDB(dbName); @@ -222,6 +237,7 @@ ipcMain.on('input-schema', (event, data: SchemaType) => { // this generates a pg_dump file from the specified db and saves it to a location in the container. // Full copy case if (copy) { + console.log('this is a console log'); execute(runFullCopy, step3Copy); } // Hollow copy case @@ -233,7 +249,7 @@ ipcMain.on('input-schema', (event, data: SchemaType) => { // change the current database back to the newly created one // and now that we have changed to the new db, we can move on to importing the data file db.changeDB(dbName); - return step3(); + return step4(); //changing step3 to step4 to test removal of importFile func } }; diff --git a/backend/main.ts b/backend/main.ts index d1d15020..6782bd41 100644 --- a/backend/main.ts +++ b/backend/main.ts @@ -3,8 +3,8 @@ import { app, BrowserWindow, ipcMain, Menu } from 'electron'; import { appendFile } from 'fs/promises'; import { join } from 'path'; import { format } from 'url'; -//import './channels' // all channels live here -import execute from './channels'; +import './channels' // all channels live here - this format signals that we want to import the code even if we're not calling any of the functions. If we were to import an object from channels and not call any of the functions in this file, webpack thinks we're not using it and skips the import. +//import execute from './channels'; const { exec } = require('child_process'); const appMenu = require('./mainMenu'); // use appMenu to add options in top menu bar of app @@ -82,52 +82,51 @@ function createWindow() { // Don't show until we are ready and loaded mainWindow.once('ready-to-show', (event) => { mainWindow.show(); - + } + )} // // uncomment code below before running production build and packaging // // const yamlPath = join(__dirname, '../../docker-compose.yml') // // const runDocker: string = `docker-compose -f '${yamlPath}' up -d`; - const runDocker: string = `docker-compose up -d`; - exec(runDocker, (error, stdout, stderr) => { - if (error) { - console.log(`error: ${error.message}`); - return; - } - if (stderr) { - console.log(`stderr: ${stderr}`); - return; - } - console.log(`${stdout}`); - }); - }); -} + // const runDocker: string = `docker-compose up -d`; + // exec(runDocker, (error, stdout, stderr) => { + // if (error) { + // console.log(`error: ${error.message}`); + // return; + // } + // if (stderr) { + // console.log(`stderr: ${stderr}`); + // return; + // } + // console.log(`${stdout}`); + // }); + // }); +//} // ---- -app.on('before-quit', (event: any) => { - console.log('is this the problem?'); - - // check if containers have already been pruned--else, continue with default behavior to terminate application - if (!pruned) { - console.log('what the hell?'); - event.preventDefault(); - // Stop and remove postgres-1 and busybox-1 Docker containers upon window exit. - const stopContainers: string = 'docker stop postgres-1 busybox-1'; - const pruneContainers: string = 'docker rm -f postgres-1 busybox-1'; - // this command removes the volume which stores the session data for the postgres instance - // comment this out for dev - const pruneVolumes: string = 'docker volume rm -f seeqr_database-data'; - - // use this string for production build - // const pruneVolumes: string = 'docker volume rm -f app_database-data' - - const step4 = () => { - pruned = true; - app.quit(); - }; - const step3 = () => execute(pruneVolumes, step4); - const step2 = () => execute(pruneContainers, step3); - - execute(stopContainers, step2); - } -}); +// app.on('before-quit', (event: any) => { +// // check if containers have already been pruned--else, continue with default behavior to terminate application +// if (!pruned) { +// event.preventDefault(); +// // Stop and remove postgres-1 and busybox-1 Docker containers upon window exit. +// const stopContainers: string = 'docker stop postgres-1 busybox-1'; +// const pruneContainers: string = 'docker rm -f postgres-1 busybox-1'; +// // this command removes the volume which stores the session data for the postgres instance +// // comment this out for dev +// const pruneVolumes: string = 'docker volume rm -f seeqr_database-data'; + +// // use this string for production build +// // const pruneVolumes: string = 'docker volume rm -f app_database-data' + +// const step4 = () => { +// pruned = true; +// app.quit(); +// }; +// const step3 = () => execute(pruneVolumes, step4); +// const step2 = () => execute(pruneContainers, step3); + + // execute(stopContainers, step2); + +// } +// }); // ---- // Invoke createWindow to create browser windows after Electron has been initialized. // Some APIs can only be used after this event occurs. diff --git a/backend/models.ts b/backend/models.ts index 0a753849..c3dce03f 100644 --- a/backend/models.ts +++ b/backend/models.ts @@ -9,6 +9,7 @@ let pool: any = new Pool({ connectionString: PG_URI }); //helper function that creates the column objects, which are saved to the schemaLayout object //this function returns a promise to be resolved with Promise.all syntax const getColumnObjects = (tableName: string) => { + console.log('this is the getColumnObjects function'); const queryString = 'SELECT column_name, data_type, character_maximum_length FROM information_schema.columns WHERE table_name = $1;'; const value = [tableName]; @@ -32,6 +33,7 @@ const getColumnObjects = (tableName: string) => { // gets all the names of the current postgres instances const getDBNames = () => { + console.log('this is the getDBNames function'); return new Promise((resolve) => { pool.query('SELECT datname FROM pg_database;').then((databases) => { let dbList: any = []; diff --git a/frontend/components/App.tsx b/frontend/components/App.tsx index d8f6f730..6498ebfc 100644 --- a/frontend/components/App.tsx +++ b/frontend/components/App.tsx @@ -27,19 +27,25 @@ export class App extends Component { handleFileClick(event: ClickEvent) { dialog - .showOpenDialog( - { - properties: ['openFile'], - filters: [{ name: 'Custom File Type', extensions: ['tar', 'sql'] }], - message: 'Please upload .sql or .tar database file' - }, - ) + .showOpenDialog({ + properties: ['openFile'], + filters: [{ name: 'Custom File Type', extensions: ['tar', 'sql'] }], + message: 'Please upload .sql or .tar database file', + }) .then((result: object) => { const filePathArr = result['filePaths']; + console.log( + 'this is the handleFileClick function - first dot then', + this.state + ); // send via channel to main process if (!result['canceled']) { ipcRenderer.send('upload-file', filePathArr); this.setState({ openSplash: false }); + console.log( + 'this is the handleFileClick function - second dot then', + this.state + ); } }) .catch((err: object) => { @@ -51,6 +57,7 @@ export class App extends Component { handleSkipClick(event: ClickEvent) { ipcRenderer.send('skip-file-upload'); this.setState({ openSplash: false }); + console.log('this is the handleSkipClick function', this.state); } render() { @@ -68,8 +75,8 @@ export class App extends Component { handleSkipClick={this.handleSkipClick} /> ) : ( - - )} + + )}
); } diff --git a/frontend/components/MainPanel.tsx b/frontend/components/MainPanel.tsx index 75e57a65..c03ed443 100644 --- a/frontend/components/MainPanel.tsx +++ b/frontend/components/MainPanel.tsx @@ -11,7 +11,7 @@ type MainState = { queries: { queryString: string; queryData: {}[]; - queryStatistics: any + queryStatistics: any; querySchema: string; queryLabel: string; }[]; @@ -34,18 +34,24 @@ class MainPanel extends Component { databaseList: ['defaultDB'], tableList: [], }, - loading: false + loading: false, }; componentDidMount() { ipcRenderer.send('return-db-list'); - + // Listening for returnedData from executing Query // Update state with new object (containing query data, query statistics, query schema // inside of state.queries array ipcRenderer.on('return-execute-query', (event: any, returnedData: any) => { // destructure from returnedData from backend - const { queryString, queryData, queryStatistics, queryCurrentSchema, queryLabel } = returnedData; + const { + queryString, + queryData, + queryStatistics, + queryCurrentSchema, + queryLabel, + } = returnedData; // create new query object with returnedData const newQuery = { queryString, @@ -53,32 +59,34 @@ class MainPanel extends Component { queryStatistics, querySchema: queryCurrentSchema, queryLabel, - } + }; // create copy of current queries array let queries = this.state.queries.slice(); // push new query object into copy of queries array - queries.push(newQuery) - this.setState({ queries }) + queries.push(newQuery); + this.setState({ queries }); }); ipcRenderer.on('db-lists', (event: any, returnedLists: any) => { - this.setState(prevState => ({ + this.setState((prevState) => ({ ...prevState, lists: { databaseList: returnedLists.databaseList, - tableList: returnedLists.tableList - } - })) + tableList: returnedLists.tableList, + }, + })); }); ipcRenderer.on('switch-to-new', (event: any) => { const newSchemaIndex = this.state.lists.databaseList.length - 1; - this.setState({currentSchema: this.state.lists.databaseList[newSchemaIndex]}); + this.setState({ + currentSchema: this.state.lists.databaseList[newSchemaIndex], + }); }); // Renders the loading modal during async functions. ipcRenderer.on('async-started', (event: any) => { - this.setState({ loading: true }); + this.setState({ loading: false }); // ** James/Katie - changing to false for now to avoid loading modal until we can figure out later why the async complete listener isnt kicking in }); ipcRenderer.on('async-complete', (event: any) => { @@ -90,20 +98,32 @@ class MainPanel extends Component { ipcRenderer.send('change-db', tabName); ipcRenderer.send('return-db-list'); this.setState({ currentSchema: tabName }); + console.log('this is the onClickTabItem func', this.state); } render() { - return (
- +
- - + +
- +
); } diff --git a/frontend/components/rightPanel/schemaChildren/SchemaModal.tsx b/frontend/components/rightPanel/schemaChildren/SchemaModal.tsx index bfe168e7..4b3026c3 100644 --- a/frontend/components/rightPanel/schemaChildren/SchemaModal.tsx +++ b/frontend/components/rightPanel/schemaChildren/SchemaModal.tsx @@ -20,7 +20,7 @@ type state = { schemaEntry: string; redirect: boolean; dbCopyName: string; - copy: boolean + copy: boolean; }; class SchemaModal extends Component { @@ -45,10 +45,9 @@ class SchemaModal extends Component { schemaEntry: '', redirect: false, dbCopyName: 'Select Instance', - copy: false + copy: false, }; - // Set schema name handleSchemaName(event: any) { // convert input label name to lowercase only with no spacing to comply with db naming convention. @@ -56,12 +55,17 @@ class SchemaModal extends Component { let dbSafeName = schemaNameInput.toLowerCase(); dbSafeName = dbSafeName.replace(/[^A-Z0-9]/gi, ''); this.setState({ schemaName: dbSafeName }); + console.log('this is the handleSchemaName function', this.state); } // Load schema file path // When file path is uploaded, query entry is cleared. handleSchemaFilePath(event: ClickEvent) { event.preventDefault(); + console.log( + 'this is state in the top of the handleSchemaFilePath func', + this.state + ); dialog .showOpenDialog({ properties: ['openFile'], @@ -70,7 +74,9 @@ class SchemaModal extends Component { }) .then((result: object) => { const filePath = result['filePaths']; + console.log('result: ', result); this.setState({ schemaFilePath: filePath }); + console.log('state: ', this.state); const schemaObj = { schemaName: this.state.schemaName, schemaFilePath: this.state.schemaFilePath, @@ -78,19 +84,25 @@ class SchemaModal extends Component { }; if (!result['canceled']) { ipcRenderer.send('input-schema', schemaObj); - this.setState({ schemaName: ''}); + this.setState({ schemaName: '' }); } - this.setState({ dbCopyName: 'Select Instance'}); + this.setState({ dbCopyName: 'Select Instance' }); this.props.showModal(event); }) + .catch((err: object) => { - console.log('Error in handleSchemaFilePath method of SchemaModal.tsx.', err); + console.log( + 'Error in handleSchemaFilePath method of SchemaModal.tsx.', + err + ); }); + console.log('this is state in the handleSchemaFilePath func', this.state); } // When schema script is inserted, file path is cleared set dialog to warn user. handleSchemaEntry(event: any) { this.setState({ schemaEntry: event.target.value, schemaFilePath: '' }); + console.log('this is state in the handleSchemaEntry func', this.state); // this.setState({ schemaFilePath: '' }); } @@ -102,21 +114,28 @@ class SchemaModal extends Component { schemaFilePath: this.state.schemaFilePath, schemaEntry: this.state.schemaEntry, }; - + console.log(schemaObj); + console.log('this is state in the handleSchemaSubmit func', this.state); ipcRenderer.send('input-schema', schemaObj); } - selectHandler = (eventKey, e: React.SyntheticEvent) => { - this.setState({ dbCopyName: eventKey }); - } + selectHandler = (eventKey, e: React.SyntheticEvent) => { + this.setState({ dbCopyName: eventKey += '_copy' }); // added += _copy to append copy to the dbCopyName + console.log('this is state in the selectHandler func', this.state); + }; handleCopyData(event: any) { - if(!this.state.copy) this.setState({ copy: true }); + if (!this.state.copy) this.setState({ copy: true }); else this.setState({ copy: false }); + console.log('this is state in the handleCopyData func', this.state); } dropDownList = () => { - return this.props.tabList.map((db, index) => {db}); + return this.props.tabList.map((db, index) => ( + + {db} + + )); }; handleCopyFilePath(event: any) { @@ -127,13 +146,15 @@ class SchemaModal extends Component { schemaFilePath: '', schemaEntry: '', dbCopyName: this.state.dbCopyName, - copy: this.state.copy - } + copy: this.state.copy, + }; ipcRenderer.send('input-schema', schemaObj); - this.setState({ dbCopyName: 'Select Instance'}); - this.setState({ schemaName: ''}); + this.setState({ dbCopyName: 'Select Instance' }); + this.setState({ schemaName: '' }); this.props.showModal(event); + + console.log('this is state in the handleCopyFilePath func', this.state); } render() { @@ -145,49 +166,60 @@ class SchemaModal extends Component { ); diff --git a/frontend/components/rightPanel/Tabs.tsx b/frontend/components/rightPanel/Tabs.tsx index 6573120b..0945e7f3 100644 --- a/frontend/components/rightPanel/Tabs.tsx +++ b/frontend/components/rightPanel/Tabs.tsx @@ -10,7 +10,8 @@ type TabsProps = { tabList: string[], queries: any, onClickTabItem: any, - tableList: string[] + tableList: string[], + databaseSize: string } type state = { @@ -33,7 +34,7 @@ export class Tabs extends Component { componentDidMount() { // After schema is successfully sent to backend, backend spins up new database with inputted schemaName. // It will send the frontend an updated variable 'lists' that is an array of updated lists of all the tabs (which is the same - // thing as all the databases). We open a channel to listen for it here inside of componendDidMount, then + // thing as all the databases). We open a channel to listen for it here inside of componentDidMount, then // we invoke onClose to close schemaModal ONLY after we are sure that backend has created that channel. ipcRenderer.on('db-lists', (event: any, returnedLists: any) => { this.setState({currentSchema: returnedLists}) From 11a9ac490d4191a81a6b4956547b404d61bd345f Mon Sep 17 00:00:00 2001 From: Jennifer Courtner Date: Wed, 16 Dec 2020 11:13:53 -0500 Subject: [PATCH 18/34] pass db size from mainpanel to tabs to schemaContainer to query.tsx --- .../components/rightPanel/SchemaContainer.tsx | 2 + frontend/components/rightPanel/Tabs.tsx | 59 +++++++++++++------ .../rightPanel/schemaChildren/Query.tsx | 3 +- 3 files changed, 45 insertions(+), 19 deletions(-) diff --git a/frontend/components/rightPanel/SchemaContainer.tsx b/frontend/components/rightPanel/SchemaContainer.tsx index 36c22cb2..28f39371 100644 --- a/frontend/components/rightPanel/SchemaContainer.tsx +++ b/frontend/components/rightPanel/SchemaContainer.tsx @@ -6,6 +6,7 @@ type SchemaContainerProps = { queries: any; currentSchema: string; tableList: string[]; + databaseSize: string; }; type state = { @@ -30,6 +31,7 @@ export class SchemaContainer extends Component {
diff --git a/frontend/components/rightPanel/Tabs.tsx b/frontend/components/rightPanel/Tabs.tsx index 0945e7f3..cc172a6b 100644 --- a/frontend/components/rightPanel/Tabs.tsx +++ b/frontend/components/rightPanel/Tabs.tsx @@ -6,13 +6,13 @@ import { Tab } from './tabsChildren/Tab'; const { ipcRenderer } = window.require('electron'); type TabsProps = { - currentSchema: string, - tabList: string[], - queries: any, - onClickTabItem: any, - tableList: string[], - databaseSize: string -} + currentSchema: string; + tabList: string[]; + queries: any; + onClickTabItem: any; + tableList: string[]; + databaseSize: string; +}; type state = { show: boolean; @@ -23,27 +23,33 @@ export class Tabs extends Component { this.showModal = this.showModal.bind(this); } state: state = { - show: false + show: false, }; showModal = (event: any) => { this.setState({ show: true }); }; - componentDidMount() { // After schema is successfully sent to backend, backend spins up new database with inputted schemaName. // It will send the frontend an updated variable 'lists' that is an array of updated lists of all the tabs (which is the same // thing as all the databases). We open a channel to listen for it here inside of componentDidMount, then // we invoke onClose to close schemaModal ONLY after we are sure that backend has created that channel. - ipcRenderer.on('db-lists', (event: any, returnedLists: any) => { - this.setState({currentSchema: returnedLists}) + ipcRenderer.on('db-lists', ( + event: any, + returnedLists: any /*returnedDbSize: string*/ + ) => { + this.setState({ + currentSchema: returnedLists, + // databaseSize: returnedDbSize, + }); + // console.log('db size was returned in component did mount in tabs.tsx'); this.onClose(event); - }) + }); } onClose = (event: any) => { - this.setState({ show: false }) + this.setState({ show: false }); }; render() { @@ -52,9 +58,13 @@ export class Tabs extends Component { tabList, currentSchema, queries, + databaseSize, } = this.props; + console.log('this is this.props in tabs.tsx: ', this.props); - const activeTabQueries = queries.filter((query) => query.querySchema === currentSchema); + const activeTabQueries = queries.filter( + (query) => query.querySchema === currentSchema + ); return (
@@ -79,17 +89,30 @@ export class Tabs extends Component { }} > + - + - +
{tabList.map((tab, index) => { if (tab !== currentSchema) return undefined; - return ; + return ( + + ); })}
); } -} \ No newline at end of file +} diff --git a/frontend/components/rightPanel/schemaChildren/Query.tsx b/frontend/components/rightPanel/schemaChildren/Query.tsx index e52bde5f..695246a0 100644 --- a/frontend/components/rightPanel/schemaChildren/Query.tsx +++ b/frontend/components/rightPanel/schemaChildren/Query.tsx @@ -18,6 +18,7 @@ import CodeMirror from '@skidding/react-codemirror'; type QueryProps = { currentSchema: string; tableList: string[]; + dbSize: string; }; type state = { @@ -110,7 +111,7 @@ class Query extends Component { return (
-
Database Size:
+
Database Size: {this.props.dbSize}
Date: Wed, 16 Dec 2020 19:48:30 -0500 Subject: [PATCH 19/34] changes to dummyDataMain.ts to generate dummy data to csv and psql --- backend/DummyD/dummyDataMain.ts | 174 ++++++++++++++++++++------------ 1 file changed, 112 insertions(+), 62 deletions(-) diff --git a/backend/DummyD/dummyDataMain.ts b/backend/DummyD/dummyDataMain.ts index 0e897b3f..78166d0d 100644 --- a/backend/DummyD/dummyDataMain.ts +++ b/backend/DummyD/dummyDataMain.ts @@ -1,5 +1,5 @@ -import faker from "faker"; -import execute from "../channels"; +import faker from 'faker'; +import execute from '../channels'; const db = require('../models'); ///////////////////////////////////////////////////////////////////// @@ -18,20 +18,20 @@ let keyObject: any; type schemaLayout = { tableNames: string[]; tables: any; -} +}; //this object is created on the front end in DummyDataModal type dummyDataRequest = { schemaName: string; dummyData: {}; -} +}; //helper function to generate random numbers that will ultimately represent a random date const getRandomInt = (min, max) => { min = Math.ceil(min); max = Math.floor(max); return Math.floor(Math.random() * (max - min) + min); //The maximum is exclusive and the minimum is inclusive -} +}; // this function generates data for a column // column data coming in is an object of the form @@ -43,16 +43,22 @@ const generateDataByType = (columnObj) => { //faker.js method to generate data by type switch (columnObj.dataInfo.data_type) { case 'smallint': - return faker.random.number({min: -32768, max: 32767}); + return faker.random.number({ min: -32768, max: 32767 }); case 'integer': - return faker.random.number({min: -2147483648, max: 2147483647}); + return faker.random.number({ min: -2147483648, max: 2147483647 }); case 'bigint': - return faker.random.number({min: -9223372036854775808, max: 9223372036854775807}); + return faker.random.number({ + min: -9223372036854775808, + max: 9223372036854775807, + }); case 'character varying': if (columnObj.dataInfo.character_maximum_length) { - return faker.lorem.character(Math.floor(Math.random() * columnObj.dataInfo.character_maximum_length)); - } - else return faker.lorem.word(); + return faker.lorem.character( + Math.floor( + Math.random() * columnObj.dataInfo.character_maximum_length + ) + ); + } else return faker.lorem.word(); case 'date': // generating a random date between 1500 and 2020 let result: string = ''; @@ -64,7 +70,7 @@ const generateDataByType = (columnObj) => { result += year + '-' + month + '-' + day; return result; default: - console.log('Error generating dummy data by type') + console.log('Error generating dummy data by type'); } }; @@ -72,29 +78,48 @@ const generateDataByType = (columnObj) => { let count: number = 0; module.exports = { - writeCSVFile: (tableObject, schemaLayout, keyObject, dummyDataRequest, event: any) => { + writeCSVFile: ( + tableObject, + schemaLayout, + keyObject, + dummyDataRequest, + event: any + ) => { // extracting variables const tableCount: number = Object.keys(dummyDataRequest.dummyData).length; const tableName: string = tableObject.tableName; const tableMatrix: any = tableObject.data; const schemaName: string = dummyDataRequest.schemaName; + console.log( + 'tableCount: ', + tableCount, + 'tableName: ', + tableName, + 'tableMatrix: ', + tableMatrix, + 'schemaName: ', + schemaName + ); + // mapping column headers from getColumnObjects in models.ts to columnNames - const columnArray: string[] = schemaLayout.tables[tableName].map(columnObj => columnObj.columnName); + const columnArray: string[] = schemaLayout.tables[tableName].map( + (columnObj) => columnObj.columnName + ); // transpose the table-matrix to orient it as a table const table: any = []; - let row: any = []; - for(let i = 0; i < tableMatrix[0].length; i++) { - for(let j = 0; j < tableMatrix.length; j++) { - row.push(tableMatrix[j][i]); + let row: any = []; + for (let i = 0; i < tableMatrix[0].length; i++) { + for (let j = 0; j < tableMatrix.length; j++) { + row.push(tableMatrix[j][i]); } //join each subarray (which correspond to rows in our table) with a comma const rowString = row.join(','); table.push(rowString); //'1, luke, etc' row = []; } - + // Step 3 - this step adds back the PK constraints that we took off prior to copying the dummy data into the DB (using the db that is imported from models.ts) const step3 = () => { count += 1; @@ -103,46 +128,52 @@ module.exports = { db.addPrimaryKeyConstraints(keyObject, dummyDataRequest) .then(() => { db.addForeignKeyConstraints(keyObject, dummyDataRequest) - .then(() => { - event.sender.send('async-complete'); - count = 0; - }) - .catch((err) => { - console.log(err); - count = 0; - }); + .then(() => { + event.sender.send('async-complete'); + count = 0; + }) + .catch((err) => { + console.log(err); + count = 0; + }); }) .catch((err) => { console.log(err); count = 0; }); - } - else return; - } + } else return; + }; // Step 2 - using the postgres COPY command, this step copies the contents of the csv file in the container file system into the appropriate postgres DB const step2 = () => { - let queryString: string = `COPY ${tableName} FROM '/${tableName}.csv' WITH CSV HEADER;`; + let queryString: string = `\\copy ${tableName} FROM '${tableName}.csv' WITH CSV HEADER;`; // run the query in the container using a docker command - execute(`docker exec postgres-1 psql -U postgres -d ${schemaName} -c "${queryString}" `, step3); - } + // docker exec postgres-1 psql -U postgres -d ${schemaName} -c "${queryString}" + execute(`psql -U postgres -d ${schemaName} -c "${queryString}" `, step3); + }; let csvString: string; //join tableMatrix with a line break (different on mac and windows because of line breaks in the bash CLI) if (process.platform === 'win32') { - const tableDataString: string = table.join(`' >> ${tableName}.csv; echo '`); + const tableDataString: string = table.join( + `' >> ${tableName}.csv; echo '` + ); const columnString: string = columnArray.join(','); - csvString = columnString.concat(`' > ${tableName}.csv; echo '`).concat(tableDataString); - execute(`docker exec postgres-1 bash -c "echo '${csvString}' >> ${tableName}.csv;"`, step2); - } - else { + csvString = columnString + .concat(`' > ${tableName}.csv; echo '`) + .concat(tableDataString); + execute( + `docker exec postgres-1 bash -c "echo '${csvString}' >> ${tableName}.csv;"`, + step2 + ); + } else { // we know we are not on Windows, thank god! const tableDataString: string = table.join('\n'); const columnString: string = columnArray.join(','); csvString = columnString.concat('\n').concat(tableDataString); - + // split csv string into an array of csv strings that each are of length 100,000 characters or less - + // create upperLimit variable, which represents that max amount of character a bash shell command can handle let upperLimit: number; upperLimit = 100000; @@ -150,7 +181,7 @@ module.exports = { let stringCount: number = Math.ceil(csvString.length / upperLimit); // create csvArray that will hold our final csv strings let csvArray: string[] = []; - + let startIndex: number; let endIndex: number; // iterate over i from 0 to less than stringCount, each iteration pushing slices of original csvString into an array @@ -161,43 +192,58 @@ module.exports = { if (i === stringCount - 1) csvArray.push(csvString.slice(startIndex)); else csvArray.push(csvString.slice(startIndex, endIndex)); } - let index: number = 0 + let index: number = 0; // Step 1 - this writes a csv file to the postgres-1 file system, which contains all of the dummy data that will be copied into its corresponding postgres DB const step1 = () => { // NOTE: in order to rewrite the csv files in the container file system, we must use echo with a single angle bracket on the first element of csvArray AND then move on directly to step2 (and then also reset index) - + // if our csvArray contains only one element if (csvArray.length === 1) { - execute(`docker exec postgres-1 bash -c "echo '${csvArray[index]}' > ${tableName}.csv;"`, step2); + // `docker exec postgres-1 bash -c "echo '${csvArray[index]}' > ${tableName}.csv;"` + execute( + `bash -c "echo '${csvArray[index]}' > ${tableName}.csv;"`, + step2 + ); index = 0; } // otherwise if we are working with the first element in csvArray else if (index === 0) { - execute(`docker exec postgres-1 bash -c "echo -n '${csvArray[index]}' > ${tableName}.csv;"`, step1); + console.log('this is last else statement in step1 on line 211 '); + execute( + `bash -c "echo -n '${csvArray[index]}' > ${tableName}.csv;"`, + step1 + ); index += 1; } // if working with last csvArray element, execute docker command but pass in step2 as second argument - else if (index === (csvArray.length - 1)) { + else if (index === csvArray.length - 1) { // console.log('FINAL STEP 1: ', csvArray[index]); - execute(`docker exec postgres-1 bash -c "echo '${csvArray[index]}' >> ${tableName}.csv;"`, step2); + execute( + `bash -c "echo '${csvArray[index]}' >> ${tableName}.csv;"`, + step2 + ); index = 0; } // otherwise we know we are not working with the first OR the last element in csvArray, so execute docker command but pass in a recursive call to our step one function and then immediately increment our index variable else { // console.log('STEP 1: ', index, csvArray[index]); - execute(`docker exec postgres-1 bash -c "echo -n '${csvArray[index]}' >> ${tableName}.csv;"`, step1); + // `docker exec postgres-1 bash -c "echo -n '${csvArray[index]}' >> ${tableName}.csv;"` + console.log('this is last else statement in step1 on line 230 '); + execute( + `bash -c “echo -n ‘${csvArray[index]}’ >> ${tableName}.csv;“`, + step1 + ); index += 1; } - } + }; step1(); } }, - //maps table names from schemaLayout to sql files generateDummyData: (schemaLayout, dummyDataRequest, keyObject) => { const returnArray: any = []; - + //iterate over schemaLayout.tableNames array for (const tableName of schemaLayout.tableNames) { const tableMatrix: any = []; @@ -209,7 +255,9 @@ module.exports = { let entry: any; //iterate over columnArray (i.e. an array of the column names for the table) - let columnArray: string[] = schemaLayout.tables[tableName].map(columnObj => columnObj.columnName) + let columnArray: string[] = schemaLayout.tables[tableName].map( + (columnObj) => columnObj.columnName + ); for (let i = 0; i < columnArray.length; i++) { // declare a variable j (to be used in while loops below), set equal to zero let j: number = 0; @@ -223,14 +271,16 @@ module.exports = { columnData.push(j); // increment j j += 1; - } + } } // if this is a FK column, add random number between 0 and n-1 (inclusive) into column (unordered) else if (keyObject[tableName].foreignKeyColumns[columnArray[i]]) { //while j < reqeusted number of rows while (j < dummyDataRequest.dummyData[tableName]) { //generate an entry - entry = Math.floor(Math.random() * (dummyDataRequest.dummyData[tableName])); + entry = Math.floor( + Math.random() * dummyDataRequest.dummyData[tableName] + ); //push into columnData columnData.push(entry); j += 1; @@ -244,7 +294,7 @@ module.exports = { //push into columnData columnData.push(entry); j += 1; - }; + } } } // otherwise, we'll just add data by the type to which the column is constrained @@ -255,19 +305,19 @@ module.exports = { //push into columnData columnData.push(entry); j += 1; - }; + } } //push columnData array into tableMatrix tableMatrix.push(columnData); //reset columnData array for next column columnData = []; - }; + } // only push something to the array if data was asked for for the specific table - returnArray.push({tableName, data: tableMatrix}); - }; - }; + returnArray.push({ tableName, data: tableMatrix }); + } + } // then return the returnArray return returnArray; - } -} \ No newline at end of file + }, +}; From 65713b644a4896444c22931cc72ef69c0f6ff932 Mon Sep 17 00:00:00 2001 From: Jennifer Courtner Date: Wed, 30 Dec 2020 16:58:57 -0500 Subject: [PATCH 20/34] changed text in schema modal and div organization in mainPanel --- frontend/components/MainPanel.tsx | 18 +++++---- .../rightPanel/schemaChildren/SchemaModal.tsx | 39 ++++++++++--------- 2 files changed, 31 insertions(+), 26 deletions(-) diff --git a/frontend/components/MainPanel.tsx b/frontend/components/MainPanel.tsx index 9f67c9c0..e423b801 100644 --- a/frontend/components/MainPanel.tsx +++ b/frontend/components/MainPanel.tsx @@ -116,6 +116,16 @@ class MainPanel extends Component {
+ +
+
{ currentSchema={this.state.currentSchema} />
-
); } diff --git a/frontend/components/rightPanel/schemaChildren/SchemaModal.tsx b/frontend/components/rightPanel/schemaChildren/SchemaModal.tsx index 063a8058..ed15ab4c 100644 --- a/frontend/components/rightPanel/schemaChildren/SchemaModal.tsx +++ b/frontend/components/rightPanel/schemaChildren/SchemaModal.tsx @@ -55,7 +55,6 @@ class SchemaModal extends Component { let dbSafeName = schemaNameInput.toLowerCase(); dbSafeName = dbSafeName.replace(/[^A-Z0-9]/gi, ''); this.setState({ schemaName: dbSafeName }); - console.log('this is the handleSchemaName function', this.state); } // Load schema file path @@ -118,7 +117,7 @@ class SchemaModal extends Component { } selectHandler = (eventKey, e: React.SyntheticEvent) => { - this.setState({ dbCopyName: eventKey }); // + this.setState({ dbCopyName: eventKey }); // console.log('this is state in the selectHandler func', this.state); }; @@ -147,7 +146,7 @@ class SchemaModal extends Component { }; ipcRenderer.send('input-schema', schemaObj); - this.setState({ dbCopyName: `Select Instance` }); + this.setState({ dbCopyName: `Select Instance` }); this.setState({ schemaName: '' }); this.props.showModal(event); @@ -161,19 +160,19 @@ class SchemaModal extends Component { return ( - +