From bcbd6ce72fe00c068f32f5879f32a1c23acc89b7 Mon Sep 17 00:00:00 2001 From: Mercer Stronck Date: Wed, 2 Sep 2020 15:19:24 -0700 Subject: [PATCH 1/4] Attempting to solve scalability bug in algo with frank --- backend/dummy_db/dummy_handler.ts | 8 +-- backend/main.ts | 109 ++++++++++++++++++++++++++++++ 2 files changed, 113 insertions(+), 4 deletions(-) diff --git a/backend/dummy_db/dummy_handler.ts b/backend/dummy_db/dummy_handler.ts index 8b4aa931..feccbc98 100644 --- a/backend/dummy_db/dummy_handler.ts +++ b/backend/dummy_db/dummy_handler.ts @@ -310,7 +310,7 @@ const valuesList = (columns : any, scale : number) => { const columnTypes = createRecordFunc(columns, scale); const valuesArray : any = []; // determine maximum number of records Postgres will allow per insert query - with buffer - let maxRecords : number = 90000 / columns.length; + let maxRecords : number = 10; // columns.length; let list : string = ''; // create the number of records equal to the scale of the table for (let i : number = 0; i < scale; i += 1) { @@ -325,7 +325,7 @@ const valuesList = (columns : any, scale : number) => { if (k < columns.length - 1) record += ', '; }) list += `(${record})`; - if (i === maxRecords || i === scale - 1) { + if (i % maxRecords === 0 || i === scale - 1) { valuesArray.push(list); list = ''; } @@ -398,5 +398,5 @@ const fromApp = { ] }; - -console.log(createInsertQuery(fromApp)); \ No newline at end of file +module.exports = createInsertQuery; +// console.log(createInsertQuery(fromApp)); \ No newline at end of file diff --git a/backend/main.ts b/backend/main.ts index 3e21fc57..e0570a31 100644 --- a/backend/main.ts +++ b/backend/main.ts @@ -9,6 +9,10 @@ const { exec } = require('child_process'); const appMenu = require('./mainMenu'); const db = require('./modal'); const path = require('path'); +const createInsertQuery = require('./dummy_db/dummy_handler') + + + /************************************************************ ********* CREATE & CLOSE WINDOW UPON INITIALIZATION ********* ************************************************************/ @@ -310,3 +314,108 @@ ipcMain.on('input-schema', (event, data: SchemaType) => { else addDB(createDB, step3); // else console.log('INVAILD FILE TYPE: Please use .tar or .sql extensions.'); }); + + +// Temporary!!! +const fromApp = { + schema : 'public', //used to be schema1 + table : 'table1', + scale : 20, + columns : [ + { + name : '_id', + dataCategory : 'unique', // random, repeating, unique, combo, foreign + dataType : 'num', + data : { + serial: true, + } + }, + { + name : 'username', + dataCategory : 'unique', // random, repeating, unique, combo, foreign + dataType : 'str', + data : { + length : [10, 15], + inclAlphaLow : true, + inclAlphaUp : true, + inclNum : true, + inclSpaces : true, + inclSpecChar : true, + include : ["include", "these", "aReplace"], + }, + }, + { + name : 'first_name', + dataCategory : 'random', // random, repeating, unique, combo, foreign + dataType : 'name.firstName', + data : { + } + }, + { + name : 'company_name', + dataCategory : 'random', + dataType : 'company.companyName', + data : { + } + } + ] +}; + + +// Generating Dummy Data from parameters sent from the frontend +(function dummFunc(paramsObj) { //Yo seré pongo este codigo en el ipcMain hasta cuando frontend es listo + // Need addDB in this context + const addDB = (str: string, nextStep: any) => { + exec(str, (error, stdout, stderr) => { + if (error) { + console.log(`error: ${error.message}`); + return; + } + if (stderr) { + console.log(`stderr: ${stderr}`); + return; + } + // console.log(`stdout: ${stdout}`); + console.log(`${stdout}`); + if (nextStep) nextStep(); + }); + }; + + + + const db_name : string = 'defaultDB'; + const schemaStr : string = `CREATE TABLE "table1"( + "_id" integer NOT NULL, + "username" VARCHAR(255) NOT NULL, + "first_name" VARCHAR(255), + "company_name" VARCHAR(255), + CONSTRAINT "tabl1_pk" PRIMARY KEY ("_id") + ) WITH ( + OIDS=FALSE + );` + const insertArray : Array = createInsertQuery(paramsObj); + console.log(insertArray) + + + for(let i = 0; i < insertArray.length; ++i){ + let currentInsert = insertArray[i]; + const dummyScript: string = `docker exec postgres-1 psql -U postgres -d ${db_name} -c "${currentInsert}"`; + db.query(schemaStr) + .then((returnedData) => { + console.log("In then") + addDB(dummyScript, () => console.log(`Dummied Database: ${db_name}`)); + }) + console.log("In loop") + } + // Need a setTimeout because query would run before any data gets uploaded to the database from the runTAR or runSQL commands + // setTimeout(async () => { + // let listObj; + // listObj = await db.getLists(); + // console.log('Temp log until channel is made', listObj); + // //event.sender.send('db-lists', listObj); + // }, 1000); + +})(fromApp); + +ipcMain.on('dummy_handler', (event, paramObj: any) => {}); + From 4b3643881edca57507782e759d4f4f20a46b5028 Mon Sep 17 00:00:00 2001 From: Mercer Stronck Date: Wed, 2 Sep 2020 15:39:22 -0700 Subject: [PATCH 2/4] Worked out splitting loading dummy data into multiple insert queries --- backend/main.ts | 39 +++++++++++++++------------------------ 1 file changed, 15 insertions(+), 24 deletions(-) diff --git a/backend/main.ts b/backend/main.ts index e0570a31..5e7e7654 100644 --- a/backend/main.ts +++ b/backend/main.ts @@ -320,7 +320,7 @@ ipcMain.on('input-schema', (event, data: SchemaType) => { const fromApp = { schema : 'public', //used to be schema1 table : 'table1', - scale : 20, + scale : 1000, columns : [ { name : '_id', @@ -387,34 +387,25 @@ const fromApp = { const schemaStr : string = `CREATE TABLE "table1"( "_id" integer NOT NULL, "username" VARCHAR(255) NOT NULL, - "first_name" VARCHAR(255), - "company_name" VARCHAR(255), + "first_name" VARCHAR(255) NOT NULL, + "company_name" VARCHAR(255) NOT NULL, CONSTRAINT "tabl1_pk" PRIMARY KEY ("_id") ) WITH ( OIDS=FALSE );` const insertArray : Array = createInsertQuery(paramsObj); - console.log(insertArray) - - - for(let i = 0; i < insertArray.length; ++i){ - let currentInsert = insertArray[i]; - const dummyScript: string = `docker exec postgres-1 psql -U postgres -d ${db_name} -c "${currentInsert}"`; - db.query(schemaStr) - .then((returnedData) => { - console.log("In then") - addDB(dummyScript, () => console.log(`Dummied Database: ${db_name}`)); - }) - console.log("In loop") - } - // Need a setTimeout because query would run before any data gets uploaded to the database from the runTAR or runSQL commands - // setTimeout(async () => { - // let listObj; - // listObj = await db.getLists(); - // console.log('Temp log until channel is made', listObj); - // //event.sender.send('db-lists', listObj); - // }, 1000); - + console.log(insertArray); + + db.query(schemaStr) + .then((returnedData) => { + console.log("In then for setup table1") + for(let i = 0; i < insertArray.length; ++i){ + console.log(i) + let currentInsert = insertArray[i]; + const dummyScript: string = `docker exec postgres-1 psql -U postgres -d ${db_name} -c "${currentInsert}"`; + addDB(dummyScript, () => console.log(`Dummied Database: ${db_name}`)) + } + }) })(fromApp); ipcMain.on('dummy_handler', (event, paramObj: any) => {}); From e24539c990e0089183bf1236483adb309abb665c Mon Sep 17 00:00:00 2001 From: Mercer Stronck Date: Wed, 2 Sep 2020 20:04:27 -0700 Subject: [PATCH 3/4] Application generates dummy data and inserts it into defaultDB upon application load --- backend/dummy_db/dummy_handler.ts | 2 +- backend/main.ts | 65 ++++++++++++++++++------------- backend/modal.ts | 2 + 3 files changed, 40 insertions(+), 29 deletions(-) diff --git a/backend/dummy_db/dummy_handler.ts b/backend/dummy_db/dummy_handler.ts index feccbc98..3a9aafc6 100644 --- a/backend/dummy_db/dummy_handler.ts +++ b/backend/dummy_db/dummy_handler.ts @@ -310,7 +310,7 @@ const valuesList = (columns : any, scale : number) => { const columnTypes = createRecordFunc(columns, scale); const valuesArray : any = []; // determine maximum number of records Postgres will allow per insert query - with buffer - let maxRecords : number = 10; // columns.length; + let maxRecords : number = 20; // columns.length; let list : string = ''; // create the number of records equal to the scale of the table for (let i : number = 0; i < scale; i += 1) { diff --git a/backend/main.ts b/backend/main.ts index 5e7e7654..ab39efda 100644 --- a/backend/main.ts +++ b/backend/main.ts @@ -151,26 +151,22 @@ ipcMain.on('upload-file', (event, filePaths: string) => { let runCmd: string = ''; if (extension === '.sql') runCmd = runSQL; else if (extension === '.tar') runCmd = runTAR; - addDB(runCmd, () => console.log(`Created Database: ${db_name}`)); - redirectModal(); + addDB(runCmd, redirectModal); }; // Step 2 : Import database file from file path into docker container const step2 = () => addDB(importFile, step3); // Changes the pg URI to look to the newly created database and queries all the tables in that database and sends it to frontend. - const redirectModal = () => { - // Redirects modal towards new imported database - db.changeDB(db_name); - console.log(`Connected to database ${db_name}`); - - // Need a setTimeout because query would run before any data gets uploaded to the database from the runTAR or runSQL commands - setTimeout(async () => { - let listObj; - listObj = await db.getLists(); - console.log('Temp log until channel is made', listObj); - event.sender.send('db-lists', listObj); - }, 1000); + async function redirectModal() { + // Redirects modal towards new imported database, used before we added tabs. Not so much needed now + // db.changeDB(db_name); + // console.log(`Connected to database ${db_name}`); + + let listObj; + listObj = await db.getLists(); + console.log('Temp log until channel is made', listObj); + event.sender.send('db-lists', listObj); }; // Step 1 : Create empty db @@ -220,7 +216,9 @@ ipcMain.on('execute-query', (event, data: QueryType) => { async function getListAsync() { let listObj; listObj = await db.getLists(); + console.log("Should be my lists", listObj) frontendData.lists = listObj; + event.sender.send('db-lists', listObj) event.sender.send('return-execute-query', frontendData); } getListAsync(); @@ -243,6 +241,7 @@ ipcMain.on('input-schema', (event, data: SchemaType) => { db_name = data.schemaName; let filePath = data.schemaFilePath; let schemaEntry = data.schemaEntry.trim(); + console.log("schema entry", schemaEntry) console.log('filePath', filePath); // command strings @@ -281,29 +280,39 @@ ipcMain.on('input-schema', (event, data: SchemaType) => { let runCmd: string = ''; if (extension === '.sql') runCmd = runSQL; else if (extension === '.tar') runCmd = runTAR; - else runCmd = runScript; - addDB(runCmd, () => console.log(`Created Database: ${db_name}`)); - // Redirects modal towards new imported database - redirectModal(); + addDB(runCmd, redirectModal); }; // Step 2 : Import database file from file path into docker container const step2 = () => addDB(importFile, step3); - const redirectModal = () => { - // Redirects modal towards new imported database + // Changes the pg URI to look to the newly created database and queries all the tables in that database and sends it to frontend. + async function redirectModal() { + // Redirects modal towards new imported database, used before we added tabs. Not so much needed now db.changeDB(db_name); console.log(`Connected to database ${db_name}`); - // Need a setTimeout because query would run before any data gets uploaded to the database from the runTAR or runSQL commands - setTimeout(async () => { - let listObj; - listObj = await db.getLists(); - console.log('Temp log until channel is made', listObj); - event.sender.send('db-lists', listObj); - }, 1000); + let listObj; + listObj = await db.getLists(); + console.log('Temp log until channel is made', listObj); + event.sender.send('db-lists', listObj); }; + + // const redirectModal = () => { + // // Redirects modal towards new imported database + // db.changeDB(db_name); + // console.log(`Connected to database ${db_name}`); + + // // Need a setTimeout because query would run before any data gets uploaded to the database from the runTAR or runSQL commands + // setTimeout(async () => { + // let listObj; + // listObj = await db.getLists(); + // console.log('Temp log until channel is made', listObj); + // event.sender.send('db-lists', listObj); + // }, 1000); + // }; + // Step 1 : Create empty db if (extension === '.sql' || extension === '.tar') { console.log('extension is sql tar'); @@ -320,7 +329,7 @@ ipcMain.on('input-schema', (event, data: SchemaType) => { const fromApp = { schema : 'public', //used to be schema1 table : 'table1', - scale : 1000, + scale : 20, columns : [ { name : '_id', diff --git a/backend/modal.ts b/backend/modal.ts index 0749bb6a..886636c0 100644 --- a/backend/modal.ts +++ b/backend/modal.ts @@ -27,6 +27,7 @@ module.exports = { "SELECT table_name FROM information_schema.tables WHERE table_schema = 'public' ORDER BY table_name;" ) .then((tables) => { + console.log("Should be table1", tables.rows) let tableList: any = []; for (let i = 0; i < tables.rows.length; ++i) { tableList.push(tables.rows[i].table_name); @@ -35,6 +36,7 @@ module.exports = { pool.query('SELECT datname FROM pg_database;').then((databases) => { let dbList: any = []; + console.log("Should be defaultDB", databases.rows) for (let i = 0; i < databases.rows.length; ++i) { let curName = databases.rows[i].datname; if (curName !== 'postgres' && curName !== 'template0' && curName !== 'template1') From 704ec3b5e25d535f8ac251a114b5896324880362 Mon Sep 17 00:00:00 2001 From: Mercer Stronck Date: Wed, 2 Sep 2020 20:18:06 -0700 Subject: [PATCH 4/4] Removed debugging console logs and commented out db.changeDB methods --- backend/main.ts | 5 ++--- backend/modal.ts | 2 -- 2 files changed, 2 insertions(+), 5 deletions(-) diff --git a/backend/main.ts b/backend/main.ts index ab39efda..262e3f41 100644 --- a/backend/main.ts +++ b/backend/main.ts @@ -289,12 +289,11 @@ ipcMain.on('input-schema', (event, data: SchemaType) => { // Changes the pg URI to look to the newly created database and queries all the tables in that database and sends it to frontend. async function redirectModal() { // Redirects modal towards new imported database, used before we added tabs. Not so much needed now - db.changeDB(db_name); - console.log(`Connected to database ${db_name}`); + // db.changeDB(db_name); + // console.log(`Connected to database ${db_name}`); let listObj; listObj = await db.getLists(); - console.log('Temp log until channel is made', listObj); event.sender.send('db-lists', listObj); }; diff --git a/backend/modal.ts b/backend/modal.ts index 886636c0..0749bb6a 100644 --- a/backend/modal.ts +++ b/backend/modal.ts @@ -27,7 +27,6 @@ module.exports = { "SELECT table_name FROM information_schema.tables WHERE table_schema = 'public' ORDER BY table_name;" ) .then((tables) => { - console.log("Should be table1", tables.rows) let tableList: any = []; for (let i = 0; i < tables.rows.length; ++i) { tableList.push(tables.rows[i].table_name); @@ -36,7 +35,6 @@ module.exports = { pool.query('SELECT datname FROM pg_database;').then((databases) => { let dbList: any = []; - console.log("Should be defaultDB", databases.rows) for (let i = 0; i < databases.rows.length; ++i) { let curName = databases.rows[i].datname; if (curName !== 'postgres' && curName !== 'template0' && curName !== 'template1')