From b449733c0f2764b9a94ac24f1816588cc0054f9c Mon Sep 17 00:00:00 2001 From: Jennifer Courtner Date: Thu, 10 Dec 2020 09:31:05 -0500 Subject: [PATCH 1/3] changes to channels to use psql without docker --- backend/channels.ts | 131 ++++++++-------- backend/channels.txt | 353 +++++++++++++++++++++++++++++++++++++++++++ backend/models.ts | 229 ++++++++++++++-------------- package.json | 2 +- 4 files changed, 543 insertions(+), 172 deletions(-) create mode 100644 backend/channels.txt diff --git a/backend/channels.ts b/backend/channels.ts index a8c6c09b..50bff6e5 100644 --- a/backend/channels.ts +++ b/backend/channels.ts @@ -11,27 +11,32 @@ const db = require('./models'); // Generate CLI commands to be executed in child process. const createDBFunc = (name) => { - return `docker exec postgres-1 psql -h localhost -p 5432 -U postgres -c "CREATE DATABASE ${name}"` -} -const importFileFunc = (file) => { - return `docker cp ${file} postgres-1:/data_dump`; -} + return `psql -U postgres -c "CREATE DATABASE ${name}"`; + + // return `docker exec postgres-1 psql -h localhost -p 5432 -U postgres -c "CREATE DATABASE ${name}"`; +}; +const importFileFunc = (name, file) => { + return `psql -U postgres ${name} < ${file}`; + + // return `docker cp ${file} postgres-1:/data_dump`; +}; const runSQLFunc = (dbName) => { return `docker exec postgres-1 psql -U postgres -d ${dbName} -f /data_dump`; -} +}; const runTARFunc = (dbName) => { return `docker exec postgres-1 pg_restore -U postgres -d ${dbName} /data_dump`; -} +}; const runFullCopyFunc = (dbCopyName) => { return `docker exec postgres-1 pg_dump -U postgres ${dbCopyName} -f /data_dump`; -} +}; const runHollowCopyFunc = (dbCopyName) => { return `docker exec postgres-1 pg_dump -s -U postgres ${dbCopyName} -f /data_dump`; -} +}; // Function to execute commands in the child process. const execute = (str: string, nextStep: any) => { exec(str, (error, stdout, stderr) => { + console.log('exec func', `${stdout}`); if (error) { //this shows the console error in an error message on the frontend dialog.showErrorBox(`${error.message}`, ''); @@ -44,7 +49,7 @@ const execute = (str: string, nextStep: any) => { console.log(`stderr: ${stderr}`); return; } - // console.log('exec func', `${stdout}`); + if (nextStep) nextStep(); }); }; @@ -57,32 +62,37 @@ const execute = (str: string, nextStep: any) => { let listObj: any; ipcMain.on('return-db-list', (event, args) => { - db.getLists().then(data => event.sender.send('db-lists', data)); + db.getLists().then((data) => event.sender.send('db-lists', data)); }); // Listen for skip button on Splash page. -ipcMain.on('skip-file-upload', (event) => { }); +ipcMain.on('skip-file-upload', (event) => {}); // Listen for database changes sent from the renderer upon changing tabs. ipcMain.on('change-db', (event, dbName) => { - db.changeDB(dbName) + db.changeDB(dbName); }); // Listen for file upload. Create an instance of database from pre-made .tar or .sql file. ipcMain.on('upload-file', (event, filePath: string) => { - // send notice to the frontend that async process has begun event.sender.send('async-started'); let dbName: string; if (process.platform === 'darwin') { - dbName = filePath[0].slice(filePath[0].lastIndexOf('/') + 1, filePath[0].lastIndexOf('.')); + dbName = filePath[0].slice( + filePath[0].lastIndexOf('/') + 1, + filePath[0].lastIndexOf('.') + ); } else { - dbName = filePath[0].slice(filePath[0].lastIndexOf('\\') + 1, filePath[0].lastIndexOf('.')); + dbName = filePath[0].slice( + filePath[0].lastIndexOf('\\') + 1, + filePath[0].lastIndexOf('.') + ); } const createDB: string = createDBFunc(dbName); - const importFile: string = importFileFunc(filePath); + const importFile: string = importFileFunc(dbName, filePath); const runSQL: string = runSQLFunc(dbName); const runTAR: string = runTARFunc(dbName); const extension: string = filePath[0].slice(filePath[0].lastIndexOf('.')); @@ -101,7 +111,7 @@ ipcMain.on('upload-file', (event, filePath: string) => { event.sender.send('switch-to-new', null); // notify frontend that async process has been completed event.sender.send('async-complete'); - }; + } // Step 4: Given the file path extension, run the appropriate command in postgres to populate db. const step4 = () => { @@ -118,7 +128,7 @@ ipcMain.on('upload-file', (event, filePath: string) => { const step2 = () => { db.changeDB(dbName); return step3(); - } + }; // Step 1: Create empty db if (extension === '.sql' || extension === '.tar') execute(createDB, step2); @@ -137,7 +147,6 @@ interface SchemaType { // OR // Listens for and handles DB copying events ipcMain.on('input-schema', (event, data: SchemaType) => { - // send notice to the frontend that async process has begun event.sender.send('async-started'); @@ -146,7 +155,7 @@ ipcMain.on('input-schema', (event, data: SchemaType) => { // generate strings that are fed into execute functions later const createDB: string = createDBFunc(dbName); - const importFile: string = importFileFunc(filePath); + const importFile: string = importFileFunc(dbName, filePath); const runSQL: string = runSQLFunc(dbName); const runTAR: string = runTARFunc(dbName); const runFullCopy: string = runFullCopyFunc(dbCopyName); @@ -156,8 +165,7 @@ ipcMain.on('input-schema', (event, data: SchemaType) => { let extension: string = ''; if (filePath.length > 0) { extension = filePath[0].slice(filePath[0].lastIndexOf('.')); - } - else extension = '.sql'; + } else extension = '.sql'; // SEQUENCE OF EXECUTING COMMANDS // Steps are in reverse order because each step is a callback function that requires the following step to be defined. @@ -170,7 +178,7 @@ ipcMain.on('input-schema', (event, data: SchemaType) => { event.sender.send('switch-to-new', null); // notify frontend that async process has been completed event.sender.send('async-complete'); - }; + } // Step 4: Given the file path extension, run the appropriate command in postgres to build the db const step4 = () => { @@ -186,7 +194,7 @@ ipcMain.on('input-schema', (event, data: SchemaType) => { const step3Copy = () => { db.changeDB(dbName); return step4(); - } + }; // Step 2: Change curent URI to match newly created DB const step2 = () => { @@ -210,8 +218,8 @@ ipcMain.on('input-schema', (event, data: SchemaType) => { // and now that we have changed to the new db, we can move on to importing the data file db.changeDB(dbName); return step3(); - } - } + } + }; // Step 1 : Create empty db execute(createDB, step2); @@ -226,7 +234,6 @@ interface QueryType { } ipcMain.on('execute-query-untracked', (event, data: QueryType) => { - // send notice to front end that query has been started event.sender.send('async-started'); @@ -249,7 +256,6 @@ ipcMain.on('execute-query-untracked', (event, data: QueryType) => { // Listen for queries being sent from renderer ipcMain.on('execute-query-tracked', (event, data: QueryType) => { - // send notice to front end that query has been started event.sender.send('async-started'); @@ -272,18 +278,19 @@ ipcMain.on('execute-query-tracked', (event, data: QueryType) => { frontendData.queryData = queryData.rows; if (!queryString.match(/create/i)) { // Run EXPLAIN (FORMAT JSON, ANALYZE) - db.query('EXPLAIN (FORMAT JSON, ANALYZE) ' + queryString) - .then((queryStats) => { + db.query('EXPLAIN (FORMAT JSON, ANALYZE) ' + queryString).then( + (queryStats) => { frontendData.queryStatistics = queryStats.rows; (async function getListAsync() { listObj = await db.getLists(); frontendData.lists = listObj; - event.sender.send('db-lists', listObj) + event.sender.send('db-lists', listObj); event.sender.send('return-execute-query', frontendData); event.sender.send('async-complete'); })(); - }) + } + ); } else { // Handling for tracking a create table query, can't run explain/analyze on create statements (async function getListAsync() { @@ -297,7 +304,6 @@ ipcMain.on('execute-query-tracked', (event, data: QueryType) => { .catch((error: string) => { console.log('ERROR in execute-query-tracked channel in main.ts', error); }); - }); interface dummyDataRequest { @@ -306,37 +312,42 @@ interface dummyDataRequest { } ipcMain.on('generate-dummy-data', (event: any, data: dummyDataRequest) => { - // send notice to front end that DD generation has been started event.sender.send('async-started'); let schemaLayout: any; let dummyDataRequest: dummyDataRequest = data; let tableMatricesArray: any; - let keyObject: any = "Unresolved"; - - db.createKeyObject() - .then((result) => { - // set keyObject equal to the result of this query - keyObject = result; - db.dropKeyColumns(keyObject) - .then(() => { - db.addNewKeyColumns(keyObject) - .then(() => { - db.getSchemaLayout() - .then((result) => { - schemaLayout = result; - // generate the dummy data and save it into matrices associated with table names - tableMatricesArray = generateDummyData(schemaLayout, dummyDataRequest, keyObject); - //iterate through tableMatricesArray to write individual .csv files - for (const tableObject of tableMatricesArray) { - // write all entries in tableMatrix to csv file - writeCSVFile(tableObject, schemaLayout, keyObject, dummyDataRequest, event); - } - }); - }); + let keyObject: any = 'Unresolved'; + + db.createKeyObject().then((result) => { + // set keyObject equal to the result of this query + keyObject = result; + db.dropKeyColumns(keyObject).then(() => { + db.addNewKeyColumns(keyObject).then(() => { + db.getSchemaLayout().then((result) => { + schemaLayout = result; + // generate the dummy data and save it into matrices associated with table names + tableMatricesArray = generateDummyData( + schemaLayout, + dummyDataRequest, + keyObject + ); + //iterate through tableMatricesArray to write individual .csv files + for (const tableObject of tableMatricesArray) { + // write all entries in tableMatrix to csv file + writeCSVFile( + tableObject, + schemaLayout, + keyObject, + dummyDataRequest, + event + ); + } }); - }) -}) + }); + }); + }); +}); -export default execute; \ No newline at end of file +export default execute; diff --git a/backend/channels.txt b/backend/channels.txt new file mode 100644 index 00000000..50bff6e5 --- /dev/null +++ b/backend/channels.txt @@ -0,0 +1,353 @@ +// Import parts of electron to use +import { dialog, ipcMain } from 'electron'; + +const { generateDummyData, writeCSVFile } = require('./DummyD/dummyDataMain'); +const { exec } = require('child_process'); +const db = require('./models'); + +/************************************************************ + *********************** Helper functions ******************* + ************************************************************/ + +// Generate CLI commands to be executed in child process. +const createDBFunc = (name) => { + return `psql -U postgres -c "CREATE DATABASE ${name}"`; + + // return `docker exec postgres-1 psql -h localhost -p 5432 -U postgres -c "CREATE DATABASE ${name}"`; +}; +const importFileFunc = (name, file) => { + return `psql -U postgres ${name} < ${file}`; + + // return `docker cp ${file} postgres-1:/data_dump`; +}; +const runSQLFunc = (dbName) => { + return `docker exec postgres-1 psql -U postgres -d ${dbName} -f /data_dump`; +}; +const runTARFunc = (dbName) => { + return `docker exec postgres-1 pg_restore -U postgres -d ${dbName} /data_dump`; +}; +const runFullCopyFunc = (dbCopyName) => { + return `docker exec postgres-1 pg_dump -U postgres ${dbCopyName} -f /data_dump`; +}; +const runHollowCopyFunc = (dbCopyName) => { + return `docker exec postgres-1 pg_dump -s -U postgres ${dbCopyName} -f /data_dump`; +}; + +// Function to execute commands in the child process. +const execute = (str: string, nextStep: any) => { + exec(str, (error, stdout, stderr) => { + console.log('exec func', `${stdout}`); + if (error) { + //this shows the console error in an error message on the frontend + dialog.showErrorBox(`${error.message}`, ''); + console.log(`error: ${error.message}`); + return; + } + if (stderr) { + //this shows the console error in an error message on the frontend + dialog.showErrorBox(`${stderr}`, ''); + console.log(`stderr: ${stderr}`); + return; + } + + if (nextStep) nextStep(); + }); +}; + +/************************************************************ + *********************** IPC CHANNELS *********************** + ************************************************************/ + +// Global variable to store list of databases and tables to provide to frontend upon refreshing view. +let listObj: any; + +ipcMain.on('return-db-list', (event, args) => { + db.getLists().then((data) => event.sender.send('db-lists', data)); +}); + +// Listen for skip button on Splash page. +ipcMain.on('skip-file-upload', (event) => {}); + +// Listen for database changes sent from the renderer upon changing tabs. +ipcMain.on('change-db', (event, dbName) => { + db.changeDB(dbName); +}); + +// Listen for file upload. Create an instance of database from pre-made .tar or .sql file. +ipcMain.on('upload-file', (event, filePath: string) => { + // send notice to the frontend that async process has begun + event.sender.send('async-started'); + + let dbName: string; + if (process.platform === 'darwin') { + dbName = filePath[0].slice( + filePath[0].lastIndexOf('/') + 1, + filePath[0].lastIndexOf('.') + ); + } else { + dbName = filePath[0].slice( + filePath[0].lastIndexOf('\\') + 1, + filePath[0].lastIndexOf('.') + ); + } + + const createDB: string = createDBFunc(dbName); + const importFile: string = importFileFunc(dbName, filePath); + const runSQL: string = runSQLFunc(dbName); + const runTAR: string = runTARFunc(dbName); + const extension: string = filePath[0].slice(filePath[0].lastIndexOf('.')); + + // SEQUENCE OF EXECUTING COMMANDS + // Steps are in reverse order because each step is a callback function that requires the following step to be defined. + + // Step 5: Changes the pg URI the newly created database, queries new database, then sends list of tables and list of databases to frontend. + async function sendLists() { + listObj = await db.getLists(); + console.log('channels: ', listObj); + event.sender.send('db-lists', listObj); + // Send schema name back to frontend, so frontend can load tab name. + event.sender.send('return-schema-name', dbName); + // tell the front end to switch tabs to the newly created database + event.sender.send('switch-to-new', null); + // notify frontend that async process has been completed + event.sender.send('async-complete'); + } + + // Step 4: Given the file path extension, run the appropriate command in postgres to populate db. + const step4 = () => { + let runCmd: string = ''; + if (extension === '.sql') runCmd = runSQL; + else if (extension === '.tar') runCmd = runTAR; + execute(runCmd, sendLists); + }; + + // Step 3: Import database file from file path into docker container + const step3 = () => execute(importFile, step4); + + // Step 2: Change curent URI to match newly created DB + const step2 = () => { + db.changeDB(dbName); + return step3(); + }; + + // Step 1: Create empty db + if (extension === '.sql' || extension === '.tar') execute(createDB, step2); + else console.log('INVALID FILE TYPE: Please use .tar or .sql extensions.'); +}); + +interface SchemaType { + schemaName: string; + schemaFilePath: string[]; + schemaEntry: string; + dbCopyName: string; + copy: boolean; +} + +// The following function creates an instance of database from pre-made .tar or .sql file. +// OR +// Listens for and handles DB copying events +ipcMain.on('input-schema', (event, data: SchemaType) => { + // send notice to the frontend that async process has begun + event.sender.send('async-started'); + + const { schemaName: dbName, dbCopyName, copy } = data; + let { schemaFilePath: filePath } = data; + + // generate strings that are fed into execute functions later + const createDB: string = createDBFunc(dbName); + const importFile: string = importFileFunc(dbName, filePath); + const runSQL: string = runSQLFunc(dbName); + const runTAR: string = runTARFunc(dbName); + const runFullCopy: string = runFullCopyFunc(dbCopyName); + const runHollowCopy: string = runHollowCopyFunc(dbCopyName); + + // determine if the file is a sql or a tar file, in the case of a copy, we will not have a filepath so we just hard-code the extension to be sql + let extension: string = ''; + if (filePath.length > 0) { + extension = filePath[0].slice(filePath[0].lastIndexOf('.')); + } else extension = '.sql'; + + // SEQUENCE OF EXECUTING COMMANDS + // Steps are in reverse order because each step is a callback function that requires the following step to be defined. + + // Step 5: Changes the pg URI to look to the newly created database and queries all the tables in that database and sends it to frontend. + async function sendLists() { + listObj = await db.getLists(); + event.sender.send('db-lists', listObj); + // tell the front end to switch tabs to the newly created database + event.sender.send('switch-to-new', null); + // notify frontend that async process has been completed + event.sender.send('async-complete'); + } + + // Step 4: Given the file path extension, run the appropriate command in postgres to build the db + const step4 = () => { + let runCmd: string = ''; + if (extension === '.sql') runCmd = runSQL; + else if (extension === '.tar') runCmd = runTAR; + execute(runCmd, sendLists); + }; + + // Step 3: Import database file from file path into docker container + const step3 = () => execute(importFile, step4); + // skip step three which is only for importing files and instead change the current db to the newly created one + const step3Copy = () => { + db.changeDB(dbName); + return step4(); + }; + + // Step 2: Change curent URI to match newly created DB + const step2 = () => { + // if we are copying + if (copy !== undefined) { + // first, we need to change the current DB instance to that of the one we need to copy, so we'll head to the changeDB function in the models file + db.changeDB(dbCopyName); + // now that our DB has been changed to the one we wish to copy, we need to either make an exact copy or a hollow copy using pg_dump OR pg_dump -s + // this generates a pg_dump file from the specified db and saves it to a location in the container. + // Full copy case + if (copy) { + execute(runFullCopy, step3Copy); + } + // Hollow copy case + else execute(runHollowCopy, step3Copy); + return; + } + // if we are not copying + else { + // change the current database back to the newly created one + // and now that we have changed to the new db, we can move on to importing the data file + db.changeDB(dbName); + return step3(); + } + }; + + // Step 1 : Create empty db + execute(createDB, step2); +}); + +interface QueryType { + queryCurrentSchema: string; + queryString: string; + queryLabel: string; + queryData: string; + queryStatistics: string; +} + +ipcMain.on('execute-query-untracked', (event, data: QueryType) => { + // send notice to front end that query has been started + event.sender.send('async-started'); + + // destructure object from frontend + const { queryString } = data; + // run query on db + db.query(queryString) + .then(() => { + (async function getListAsync() { + listObj = await db.getLists(); + event.sender.send('db-lists', listObj); + event.sender.send('async-complete'); + })(); + }) + .catch((error: string) => { + console.log('ERROR in execute-query-untracked channel in main.ts', error); + event.sender.send('query-error', 'Error executing query.'); + }); +}); + +// Listen for queries being sent from renderer +ipcMain.on('execute-query-tracked', (event, data: QueryType) => { + // send notice to front end that query has been started + event.sender.send('async-started'); + + // destructure object from frontend + const { queryString, queryCurrentSchema, queryLabel } = data; + + // initialize object to store all data to send to frontend + let frontendData = { + queryString, + queryCurrentSchema, + queryLabel, + queryData: '', + queryStatistics: '', + lists: {}, + }; + + // Run select * from actors; + db.query(queryString) + .then((queryData) => { + frontendData.queryData = queryData.rows; + if (!queryString.match(/create/i)) { + // Run EXPLAIN (FORMAT JSON, ANALYZE) + db.query('EXPLAIN (FORMAT JSON, ANALYZE) ' + queryString).then( + (queryStats) => { + frontendData.queryStatistics = queryStats.rows; + + (async function getListAsync() { + listObj = await db.getLists(); + frontendData.lists = listObj; + event.sender.send('db-lists', listObj); + event.sender.send('return-execute-query', frontendData); + event.sender.send('async-complete'); + })(); + } + ); + } else { + // Handling for tracking a create table query, can't run explain/analyze on create statements + (async function getListAsync() { + listObj = await db.getLists(); + frontendData.lists = listObj; + event.sender.send('db-lists', listObj); + event.sender.send('async-complete'); + })(); + } + }) + .catch((error: string) => { + console.log('ERROR in execute-query-tracked channel in main.ts', error); + }); +}); + +interface dummyDataRequest { + schemaName: string; + dummyData: {}; +} + +ipcMain.on('generate-dummy-data', (event: any, data: dummyDataRequest) => { + // send notice to front end that DD generation has been started + event.sender.send('async-started'); + + let schemaLayout: any; + let dummyDataRequest: dummyDataRequest = data; + let tableMatricesArray: any; + let keyObject: any = 'Unresolved'; + + db.createKeyObject().then((result) => { + // set keyObject equal to the result of this query + keyObject = result; + db.dropKeyColumns(keyObject).then(() => { + db.addNewKeyColumns(keyObject).then(() => { + db.getSchemaLayout().then((result) => { + schemaLayout = result; + // generate the dummy data and save it into matrices associated with table names + tableMatricesArray = generateDummyData( + schemaLayout, + dummyDataRequest, + keyObject + ); + //iterate through tableMatricesArray to write individual .csv files + for (const tableObject of tableMatricesArray) { + // write all entries in tableMatrix to csv file + writeCSVFile( + tableObject, + schemaLayout, + keyObject, + dummyDataRequest, + event + ); + } + }); + }); + }); + }); +}); + +export default execute; diff --git a/backend/models.ts b/backend/models.ts index 772d0b9b..0a753849 100644 --- a/backend/models.ts +++ b/backend/models.ts @@ -1,5 +1,5 @@ const { Pool } = require('pg'); -const { getPrimaryKeys, getForeignKeys } = require('./DummyD/foreign_key_info') +const { getPrimaryKeys, getForeignKeys } = require('./DummyD/foreign_key_info'); // Initialize to a default db. // URI Format: postgres://username:password@hostname:port/databasename @@ -9,50 +9,52 @@ let pool: any = new Pool({ connectionString: PG_URI }); //helper function that creates the column objects, which are saved to the schemaLayout object //this function returns a promise to be resolved with Promise.all syntax const getColumnObjects = (tableName: string) => { - const queryString = "SELECT column_name, data_type, character_maximum_length FROM information_schema.columns WHERE table_name = $1;"; + const queryString = + 'SELECT column_name, data_type, character_maximum_length FROM information_schema.columns WHERE table_name = $1;'; const value = [tableName]; - return new Promise ((resolve) => { - pool - .query(queryString, value) - .then((result) => { - const columnInfoArray: any = []; - for (let i = 0; i < result.rows.length; i++) { - const columnObj: any = { - columnName: result.rows[i].column_name, - dataInfo: { - data_type: result.rows[i].data_type, - character_maxiumum_length: result.rows[i].character_maxiumum_length - } - } - columnInfoArray.push(columnObj) - } - resolve(columnInfoArray); - }) - }) -} + return new Promise((resolve) => { + pool.query(queryString, value).then((result) => { + const columnInfoArray: any = []; + for (let i = 0; i < result.rows.length; i++) { + const columnObj: any = { + columnName: result.rows[i].column_name, + dataInfo: { + data_type: result.rows[i].data_type, + character_maxiumum_length: result.rows[i].character_maxiumum_length, + }, + }; + columnInfoArray.push(columnObj); + } + resolve(columnInfoArray); + }); + }); +}; // gets all the names of the current postgres instances const getDBNames = () => { - return new Promise((resolve) =>{ - pool - .query('SELECT datname FROM pg_database;') - .then((databases) => { - let dbList: any = []; - for (let i = 0; i < databases.rows.length; ++i) { - let curName = databases.rows[i].datname; - if (curName !== 'postgres' && curName !== 'template0' && curName !== 'template1') - dbList.push(databases.rows[i].datname); - } - resolve(dbList); - }) - }) -} + return new Promise((resolve) => { + pool.query('SELECT datname FROM pg_database;').then((databases) => { + let dbList: any = []; + for (let i = 0; i < databases.rows.length; ++i) { + let curName = databases.rows[i].datname; + if ( + curName !== 'postgres' && + curName !== 'template0' && + curName !== 'template1' + ) + dbList.push(databases.rows[i].datname); + } + resolve(dbList); + }); + }); +}; // gets all tablenames from currentschema const getDBLists = () => { return new Promise((resolve) => { pool - .query("SELECT table_name FROM information_schema.tables WHERE table_schema = 'public' ORDER BY table_name;" + .query( + "SELECT table_name FROM information_schema.tables WHERE table_schema = 'public' ORDER BY table_name;" ) .then((tables) => { let tableList: any = []; @@ -60,12 +62,11 @@ const getDBLists = () => { tableList.push(tables.rows[i].table_name); } resolve(tableList); - }) - }) -} + }); + }); +}; module.exports = { - query: (text, params, callback) => { console.log('Executed query: ', text); return pool.query(text, params, callback); @@ -84,57 +85,61 @@ module.exports = { tableList: [], // current database's tables databaseList: [], }; - Promise.all([getDBNames(), getDBLists()]) - .then((data) => { - console.log('models: ', data); - listObj.databaseList = data[0]; - listObj.tableList = data[1]; - resolve(listObj); - }) - }) - }, + Promise.all([getDBNames(), getDBLists()]).then((data) => { + console.log('models: ', data); + listObj.databaseList = data[0]; + listObj.tableList = data[1]; + resolve(listObj); + }); + }); + }, - createKeyObject: () => { return new Promise((resolve) => { // initialize the keyObject we eventually want to return out - const keyObject: any = {}; + const keyObject: any = {}; pool .query(getPrimaryKeys, null) .then((result) => { let table; - let pkColumn + let pkColumn; // iterate over the primary key table, adding info to our keyObject for (let i = 0; i < result.rows.length; i++) { table = result.rows[i].table_name; pkColumn = result.rows[i].pk_column; // if the table is not yet initialized within the keyObject, then initialize it - if (!keyObject[table]) keyObject[table] = {primaryKeyColumns: {}, foreignKeyColumns: {}}; + if (!keyObject[table]) + keyObject[table] = { + primaryKeyColumns: {}, + foreignKeyColumns: {}, + }; // then just set the value at the pk column name to true for later checking keyObject[table].primaryKeyColumns[pkColumn] = true; } - }) - .then(() => { - pool - .query(getForeignKeys, null) - .then((result) => { - let table; - let primaryTable; - let fkColumn; - // iterate over the foreign key table, adding info to our keyObject - for (let i = 0; i < result.rows.length; i++) { - table = result.rows[i].foreign_table; - primaryTable = result.rows[i].primary_table - fkColumn = result.rows[i].fk_column; - // if the table is not yet initialized within the keyObject, then initialize it - if (!keyObject[table]) keyObject[table] = {primaryKeyColumns: {}, foreignKeyColumns: {}}; - // then set the value at the fk column name to the number of rows asked for in the primary table to which it points - keyObject[table].foreignKeyColumns[fkColumn] = primaryTable; - } - resolve(keyObject); - }) }) - }) + .then(() => { + pool.query(getForeignKeys, null).then((result) => { + let table; + let primaryTable; + let fkColumn; + // iterate over the foreign key table, adding info to our keyObject + for (let i = 0; i < result.rows.length; i++) { + table = result.rows[i].foreign_table; + primaryTable = result.rows[i].primary_table; + fkColumn = result.rows[i].fk_column; + // if the table is not yet initialized within the keyObject, then initialize it + if (!keyObject[table]) + keyObject[table] = { + primaryKeyColumns: {}, + foreignKeyColumns: {}, + }; + // then set the value at the fk column name to the number of rows asked for in the primary table to which it points + keyObject[table].foreignKeyColumns[fkColumn] = primaryTable; + } + resolve(keyObject); + }); + }); + }); }, dropKeyColumns: async (keyObject: any) => { @@ -143,21 +148,21 @@ module.exports = { let queryString = `ALTER TABLE ${table}`; let count: number = 2; - for (const pkc in keyObject[table].primaryKeyColumns){ + for (const pkc in keyObject[table].primaryKeyColumns) { if (count > 2) queryString += ','; queryString += ` DROP COLUMN ${pkc} CASCADE`; count += 1; } - for (const fkc in keyObject[table].foreignKeyColumns){ + for (const fkc in keyObject[table].foreignKeyColumns) { if (count > 2) queryString += ','; - queryString += ` DROP COLUMN ${fkc}` + queryString += ` DROP COLUMN ${fkc}`; count += 1; } - queryString += ';' - + queryString += ';'; + return Promise.resolve(pool.query(queryString)); - } - + }; + // iterate over tables, running drop queries, and pushing a new promise to promise array for (const table in keyObject) { await generateAndRunDropQuery(table); @@ -166,29 +171,29 @@ module.exports = { return; }, - addNewKeyColumns: async (keyObject: any) => { + addNewKeyColumns: async (keyObject: any) => { // define helper function to generate and run query const generateAndRunAddQuery = (table: string) => { let queryString = `ALTER TABLE ${table}`; let count: number = 2; - for (const pkc in keyObject[table].primaryKeyColumns){ + for (const pkc in keyObject[table].primaryKeyColumns) { if (count > 2) queryString += ','; queryString += ` ADD COLUMN ${pkc} INT`; count += 1; } - for (const fkc in keyObject[table].foreignKeyColumns){ + for (const fkc in keyObject[table].foreignKeyColumns) { if (count > 2) queryString += ','; - queryString += ` ADD COLUMN ${fkc} INT` + queryString += ` ADD COLUMN ${fkc} INT`; count += 1; } - queryString += ';' - + queryString += ';'; + return Promise.resolve(pool.query(queryString)); - } - + }; + // iterate over tables, running drop queries, and pushing a new promise to promise array - for (const table in keyObject){ + for (const table in keyObject) { await generateAndRunAddQuery(table); } @@ -202,7 +207,7 @@ module.exports = { tableNames: [], tables: { // tableName: [columnObj array] - } + }, }; pool // This query returns the names of all the tables in the database @@ -216,21 +221,20 @@ module.exports = { } const promiseArray: any = []; for (let tableName of schemaLayout.tableNames) { - promiseArray.push(getColumnObjects(tableName)) + promiseArray.push(getColumnObjects(tableName)); } //we resolve all of the promises for the data info, and are returned an array of column data objects - Promise.all(promiseArray) - .then((columnInfo) => { - //here, we create a key for each table name and assign the array of column objects to the corresponding table name - for (let i = 0; i < columnInfo.length; i++) { - schemaLayout.tables[schemaLayout.tableNames[i]] = columnInfo[i]; - } - resolve(schemaLayout); - }) + Promise.all(promiseArray).then((columnInfo) => { + //here, we create a key for each table name and assign the array of column objects to the corresponding table name + for (let i = 0; i < columnInfo.length; i++) { + schemaLayout.tables[schemaLayout.tableNames[i]] = columnInfo[i]; + } + resolve(schemaLayout); + }); }) .catch(() => { - console.log('error in models.ts') - }) + console.log('error in models.ts'); + }); }); }, @@ -241,17 +245,17 @@ module.exports = { if (Object.keys(keyObject[tableName].primaryKeyColumns).length) { let queryString: string = `ALTER TABLE ${tableName} `; let count: number = 0; - + for (const pk in keyObject[tableName].primaryKeyColumns) { if (count > 0) queryString += `, `; queryString += `ADD CONSTRAINT "${tableName}_pk${count}" PRIMARY KEY ("${pk}")`; count += 1; } - + queryString += `;`; // wait for the previous query to return before moving on to the next table await pool.query(queryString); - } + } } } return; @@ -261,18 +265,21 @@ module.exports = { // iterate over table's keyObject property, add foreign key constraints for (const tableName of Object.keys(dummyDataRequest.dummyData)) { if (keyObject[tableName]) { - if (Object.keys(keyObject[tableName].foreignKeyColumns).length) { + if (Object.keys(keyObject[tableName].foreignKeyColumns).length) { let queryString: string = `ALTER TABLE ${tableName} `; let count: number = 0; for (const fk in keyObject[tableName].foreignKeyColumns) { - let primaryTable: string = keyObject[tableName].foreignKeyColumns[fk]; - let primaryKey: any = Object.keys(keyObject[primaryTable].primaryKeyColumns)[0]; + let primaryTable: string = + keyObject[tableName].foreignKeyColumns[fk]; + let primaryKey: any = Object.keys( + keyObject[primaryTable].primaryKeyColumns + )[0]; if (count > 0) queryString += `, `; queryString += `ADD CONSTRAINT "${tableName}_fk${count}" FOREIGN KEY ("${fk}") REFERENCES ${primaryTable}("${primaryKey}")`; count += 1; } - + queryString += `;`; // wait for the previous query to return before moving on to the next table await pool.query(queryString); @@ -280,5 +287,5 @@ module.exports = { } } return; - } -} \ No newline at end of file + }, +}; diff --git a/package.json b/package.json index 6992a92f..1327a46a 100644 --- a/package.json +++ b/package.json @@ -32,7 +32,7 @@ "chart.js": "^2.9.3", "codemirror": "^5.57.0", "concurrently": "^5.3.0", - "cross-env": "^7.0.2", + "cross-env": "^7.0.3", "electron-store": "^6.0.0", "faker": "^5.1.0", "fix-path": "^3.0.0", From 89a17891bf6d1330fb07cdd19fd9af9b22f4e5ac Mon Sep 17 00:00:00 2001 From: Jennifer Courtner Date: Thu, 10 Dec 2020 12:19:05 -0500 Subject: [PATCH 2/3] added comments about changes --- backend/channels.ts | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/backend/channels.ts b/backend/channels.ts index 50bff6e5..f0dbc77c 100644 --- a/backend/channels.ts +++ b/backend/channels.ts @@ -15,6 +15,8 @@ const createDBFunc = (name) => { // return `docker exec postgres-1 psql -h localhost -p 5432 -U postgres -c "CREATE DATABASE ${name}"`; }; + +// added "name" as a parameter for importFileFunc const importFileFunc = (name, file) => { return `psql -U postgres ${name} < ${file}`; @@ -92,6 +94,8 @@ ipcMain.on('upload-file', (event, filePath: string) => { } const createDB: string = createDBFunc(dbName); + + // added dbName to importFile const importFile: string = importFileFunc(dbName, filePath); const runSQL: string = runSQLFunc(dbName); const runTAR: string = runTARFunc(dbName); @@ -155,6 +159,7 @@ ipcMain.on('input-schema', (event, data: SchemaType) => { // generate strings that are fed into execute functions later const createDB: string = createDBFunc(dbName); + //added dbName to importFile const importFile: string = importFileFunc(dbName, filePath); const runSQL: string = runSQLFunc(dbName); const runTAR: string = runTARFunc(dbName); From 1a30c29544e16eac7ce4a9a7917feffc076db414 Mon Sep 17 00:00:00 2001 From: kklochan <32072621+kklochan@users.noreply.github.com> Date: Thu, 10 Dec 2020 12:22:56 -0500 Subject: [PATCH 3/3] Delete channels.txt --- backend/channels.txt | 353 ------------------------------------------- 1 file changed, 353 deletions(-) delete mode 100644 backend/channels.txt diff --git a/backend/channels.txt b/backend/channels.txt deleted file mode 100644 index 50bff6e5..00000000 --- a/backend/channels.txt +++ /dev/null @@ -1,353 +0,0 @@ -// Import parts of electron to use -import { dialog, ipcMain } from 'electron'; - -const { generateDummyData, writeCSVFile } = require('./DummyD/dummyDataMain'); -const { exec } = require('child_process'); -const db = require('./models'); - -/************************************************************ - *********************** Helper functions ******************* - ************************************************************/ - -// Generate CLI commands to be executed in child process. -const createDBFunc = (name) => { - return `psql -U postgres -c "CREATE DATABASE ${name}"`; - - // return `docker exec postgres-1 psql -h localhost -p 5432 -U postgres -c "CREATE DATABASE ${name}"`; -}; -const importFileFunc = (name, file) => { - return `psql -U postgres ${name} < ${file}`; - - // return `docker cp ${file} postgres-1:/data_dump`; -}; -const runSQLFunc = (dbName) => { - return `docker exec postgres-1 psql -U postgres -d ${dbName} -f /data_dump`; -}; -const runTARFunc = (dbName) => { - return `docker exec postgres-1 pg_restore -U postgres -d ${dbName} /data_dump`; -}; -const runFullCopyFunc = (dbCopyName) => { - return `docker exec postgres-1 pg_dump -U postgres ${dbCopyName} -f /data_dump`; -}; -const runHollowCopyFunc = (dbCopyName) => { - return `docker exec postgres-1 pg_dump -s -U postgres ${dbCopyName} -f /data_dump`; -}; - -// Function to execute commands in the child process. -const execute = (str: string, nextStep: any) => { - exec(str, (error, stdout, stderr) => { - console.log('exec func', `${stdout}`); - if (error) { - //this shows the console error in an error message on the frontend - dialog.showErrorBox(`${error.message}`, ''); - console.log(`error: ${error.message}`); - return; - } - if (stderr) { - //this shows the console error in an error message on the frontend - dialog.showErrorBox(`${stderr}`, ''); - console.log(`stderr: ${stderr}`); - return; - } - - if (nextStep) nextStep(); - }); -}; - -/************************************************************ - *********************** IPC CHANNELS *********************** - ************************************************************/ - -// Global variable to store list of databases and tables to provide to frontend upon refreshing view. -let listObj: any; - -ipcMain.on('return-db-list', (event, args) => { - db.getLists().then((data) => event.sender.send('db-lists', data)); -}); - -// Listen for skip button on Splash page. -ipcMain.on('skip-file-upload', (event) => {}); - -// Listen for database changes sent from the renderer upon changing tabs. -ipcMain.on('change-db', (event, dbName) => { - db.changeDB(dbName); -}); - -// Listen for file upload. Create an instance of database from pre-made .tar or .sql file. -ipcMain.on('upload-file', (event, filePath: string) => { - // send notice to the frontend that async process has begun - event.sender.send('async-started'); - - let dbName: string; - if (process.platform === 'darwin') { - dbName = filePath[0].slice( - filePath[0].lastIndexOf('/') + 1, - filePath[0].lastIndexOf('.') - ); - } else { - dbName = filePath[0].slice( - filePath[0].lastIndexOf('\\') + 1, - filePath[0].lastIndexOf('.') - ); - } - - const createDB: string = createDBFunc(dbName); - const importFile: string = importFileFunc(dbName, filePath); - const runSQL: string = runSQLFunc(dbName); - const runTAR: string = runTARFunc(dbName); - const extension: string = filePath[0].slice(filePath[0].lastIndexOf('.')); - - // SEQUENCE OF EXECUTING COMMANDS - // Steps are in reverse order because each step is a callback function that requires the following step to be defined. - - // Step 5: Changes the pg URI the newly created database, queries new database, then sends list of tables and list of databases to frontend. - async function sendLists() { - listObj = await db.getLists(); - console.log('channels: ', listObj); - event.sender.send('db-lists', listObj); - // Send schema name back to frontend, so frontend can load tab name. - event.sender.send('return-schema-name', dbName); - // tell the front end to switch tabs to the newly created database - event.sender.send('switch-to-new', null); - // notify frontend that async process has been completed - event.sender.send('async-complete'); - } - - // Step 4: Given the file path extension, run the appropriate command in postgres to populate db. - const step4 = () => { - let runCmd: string = ''; - if (extension === '.sql') runCmd = runSQL; - else if (extension === '.tar') runCmd = runTAR; - execute(runCmd, sendLists); - }; - - // Step 3: Import database file from file path into docker container - const step3 = () => execute(importFile, step4); - - // Step 2: Change curent URI to match newly created DB - const step2 = () => { - db.changeDB(dbName); - return step3(); - }; - - // Step 1: Create empty db - if (extension === '.sql' || extension === '.tar') execute(createDB, step2); - else console.log('INVALID FILE TYPE: Please use .tar or .sql extensions.'); -}); - -interface SchemaType { - schemaName: string; - schemaFilePath: string[]; - schemaEntry: string; - dbCopyName: string; - copy: boolean; -} - -// The following function creates an instance of database from pre-made .tar or .sql file. -// OR -// Listens for and handles DB copying events -ipcMain.on('input-schema', (event, data: SchemaType) => { - // send notice to the frontend that async process has begun - event.sender.send('async-started'); - - const { schemaName: dbName, dbCopyName, copy } = data; - let { schemaFilePath: filePath } = data; - - // generate strings that are fed into execute functions later - const createDB: string = createDBFunc(dbName); - const importFile: string = importFileFunc(dbName, filePath); - const runSQL: string = runSQLFunc(dbName); - const runTAR: string = runTARFunc(dbName); - const runFullCopy: string = runFullCopyFunc(dbCopyName); - const runHollowCopy: string = runHollowCopyFunc(dbCopyName); - - // determine if the file is a sql or a tar file, in the case of a copy, we will not have a filepath so we just hard-code the extension to be sql - let extension: string = ''; - if (filePath.length > 0) { - extension = filePath[0].slice(filePath[0].lastIndexOf('.')); - } else extension = '.sql'; - - // SEQUENCE OF EXECUTING COMMANDS - // Steps are in reverse order because each step is a callback function that requires the following step to be defined. - - // Step 5: Changes the pg URI to look to the newly created database and queries all the tables in that database and sends it to frontend. - async function sendLists() { - listObj = await db.getLists(); - event.sender.send('db-lists', listObj); - // tell the front end to switch tabs to the newly created database - event.sender.send('switch-to-new', null); - // notify frontend that async process has been completed - event.sender.send('async-complete'); - } - - // Step 4: Given the file path extension, run the appropriate command in postgres to build the db - const step4 = () => { - let runCmd: string = ''; - if (extension === '.sql') runCmd = runSQL; - else if (extension === '.tar') runCmd = runTAR; - execute(runCmd, sendLists); - }; - - // Step 3: Import database file from file path into docker container - const step3 = () => execute(importFile, step4); - // skip step three which is only for importing files and instead change the current db to the newly created one - const step3Copy = () => { - db.changeDB(dbName); - return step4(); - }; - - // Step 2: Change curent URI to match newly created DB - const step2 = () => { - // if we are copying - if (copy !== undefined) { - // first, we need to change the current DB instance to that of the one we need to copy, so we'll head to the changeDB function in the models file - db.changeDB(dbCopyName); - // now that our DB has been changed to the one we wish to copy, we need to either make an exact copy or a hollow copy using pg_dump OR pg_dump -s - // this generates a pg_dump file from the specified db and saves it to a location in the container. - // Full copy case - if (copy) { - execute(runFullCopy, step3Copy); - } - // Hollow copy case - else execute(runHollowCopy, step3Copy); - return; - } - // if we are not copying - else { - // change the current database back to the newly created one - // and now that we have changed to the new db, we can move on to importing the data file - db.changeDB(dbName); - return step3(); - } - }; - - // Step 1 : Create empty db - execute(createDB, step2); -}); - -interface QueryType { - queryCurrentSchema: string; - queryString: string; - queryLabel: string; - queryData: string; - queryStatistics: string; -} - -ipcMain.on('execute-query-untracked', (event, data: QueryType) => { - // send notice to front end that query has been started - event.sender.send('async-started'); - - // destructure object from frontend - const { queryString } = data; - // run query on db - db.query(queryString) - .then(() => { - (async function getListAsync() { - listObj = await db.getLists(); - event.sender.send('db-lists', listObj); - event.sender.send('async-complete'); - })(); - }) - .catch((error: string) => { - console.log('ERROR in execute-query-untracked channel in main.ts', error); - event.sender.send('query-error', 'Error executing query.'); - }); -}); - -// Listen for queries being sent from renderer -ipcMain.on('execute-query-tracked', (event, data: QueryType) => { - // send notice to front end that query has been started - event.sender.send('async-started'); - - // destructure object from frontend - const { queryString, queryCurrentSchema, queryLabel } = data; - - // initialize object to store all data to send to frontend - let frontendData = { - queryString, - queryCurrentSchema, - queryLabel, - queryData: '', - queryStatistics: '', - lists: {}, - }; - - // Run select * from actors; - db.query(queryString) - .then((queryData) => { - frontendData.queryData = queryData.rows; - if (!queryString.match(/create/i)) { - // Run EXPLAIN (FORMAT JSON, ANALYZE) - db.query('EXPLAIN (FORMAT JSON, ANALYZE) ' + queryString).then( - (queryStats) => { - frontendData.queryStatistics = queryStats.rows; - - (async function getListAsync() { - listObj = await db.getLists(); - frontendData.lists = listObj; - event.sender.send('db-lists', listObj); - event.sender.send('return-execute-query', frontendData); - event.sender.send('async-complete'); - })(); - } - ); - } else { - // Handling for tracking a create table query, can't run explain/analyze on create statements - (async function getListAsync() { - listObj = await db.getLists(); - frontendData.lists = listObj; - event.sender.send('db-lists', listObj); - event.sender.send('async-complete'); - })(); - } - }) - .catch((error: string) => { - console.log('ERROR in execute-query-tracked channel in main.ts', error); - }); -}); - -interface dummyDataRequest { - schemaName: string; - dummyData: {}; -} - -ipcMain.on('generate-dummy-data', (event: any, data: dummyDataRequest) => { - // send notice to front end that DD generation has been started - event.sender.send('async-started'); - - let schemaLayout: any; - let dummyDataRequest: dummyDataRequest = data; - let tableMatricesArray: any; - let keyObject: any = 'Unresolved'; - - db.createKeyObject().then((result) => { - // set keyObject equal to the result of this query - keyObject = result; - db.dropKeyColumns(keyObject).then(() => { - db.addNewKeyColumns(keyObject).then(() => { - db.getSchemaLayout().then((result) => { - schemaLayout = result; - // generate the dummy data and save it into matrices associated with table names - tableMatricesArray = generateDummyData( - schemaLayout, - dummyDataRequest, - keyObject - ); - //iterate through tableMatricesArray to write individual .csv files - for (const tableObject of tableMatricesArray) { - // write all entries in tableMatrix to csv file - writeCSVFile( - tableObject, - schemaLayout, - keyObject, - dummyDataRequest, - event - ); - } - }); - }); - }); - }); -}); - -export default execute;