diff --git a/backend/dummy_db/dataGenHandler.ts b/backend/dummy_db/dataGenHandler.ts index 36ea565e..0a76f87c 100644 --- a/backend/dummy_db/dataGenHandler.ts +++ b/backend/dummy_db/dataGenHandler.ts @@ -1,13 +1,15 @@ const faker = require('faker'); -const fakerLink = require('./fakerLink'); -const types = require('./dataTypeLibrary'); +const {fakerLink} = require('./fakerLink'); +const {types} = require('./dataTypeLibrary'); + +/* --- MAIN FUNCTION --- */ // GENERATE 'INSERT INTO' QUERY STRING // Populate an array of INSERT queries to add the data for the table to the database. // An array is used to break the insert into smaller pieces if needed. // Postgres limits insert queries to 100,000 entry values: 100,000 / # of columns = Max number of rows per query. // Arguments: form = DB generation form object submitted by user - from front end -function createInsertQuery (form : any) : string { +export const createInsertQuery = (form : any) : string => { const values = valuesList(form.columns, form.scale); const cols = columnList(form.columns); const queryArray : any = []; @@ -15,6 +17,9 @@ function createInsertQuery (form : any) : string { return queryArray; } + +/* --- CALLBACK FUNCTIONS --- */ + // CREATE 'COLUMN' STRING FOR QUERY // Called by createInsertQuery() // deconstruct and convert the column names to a single string @@ -35,7 +40,7 @@ const valuesList = (columns : any, scale : number) => { const columnTypes = createRecordFunc(columns, scale); const valuesArray : any = []; // determine maximum number of records Postgres will allow per insert query - with buffer - let maxRecords : number = 20; // columns.length; + let maxRecords : number = 10; // columns.length; let list : string = ''; // create the number of records equal to the scale of the table for (let i : number = 0; i < scale; i += 1) { @@ -78,9 +83,6 @@ const createRecordFunc = (columns : any, scale : number) => { return output; }; - -module.exports = createInsertQuery; - /* UNCOMMENT BELOW FOR TESTING OBJECT AND FUNCTION */ // const fromApp = { // schema : 'schema1', // Not currrently relevant: when multiple schemas per db are added, add this after INTO in createInsertQuery = |"${form.schema}".| diff --git a/backend/dummy_db/temp_connectToDB.ts b/backend/dummy_db/temp_connectToDB.ts index 41c9c150..905c769d 100644 --- a/backend/dummy_db/temp_connectToDB.ts +++ b/backend/dummy_db/temp_connectToDB.ts @@ -1,105 +1,105 @@ -// Temporary Hardcoded database scaling -// To test this code: -// 1. Copy and paste this code into the bottom of main.ts -// 2. Create a button on the frontend that activates the route 'generate-data' -// 3. Test the button out out on the defaultDB tab, should only work in that tab because 'defaultDB' is hardcoded below +// // Temporary Hardcoded database scaling +// // To test this code: +// // 1. Copy and paste this code into the bottom of main.ts +// // 2. Create a button on the frontend that activates the route 'generate-data' +// // 3. Test the button out out on the defaultDB tab, should only work in that tab because 'defaultDB' is hardcoded below -/*=== SAMPLE OBJECT TO BE SENT FROM USER INTERFACE TO DATA GENERATOR ===*/ -const fromApp = { - schema: 'public', //used to be schema1 - table: 'table1', - scale: 40, - columns: [ - { - name: '_id', - dataCategory: 'unique', // random, repeating, unique, combo, foreign - dataType: 'num', - data: { - serial: true, - } - }, - { - name: 'username', - dataCategory: 'unique', // random, repeating, unique, combo, foreign - dataType: 'str', - data: { - length: [10, 15], - inclAlphaLow: true, - inclAlphaUp: true, - inclNum: true, - inclSpaces: true, - inclSpecChar: true, - include: ["include", "these", "aReplace"], - }, - }, - { - name: 'first_name', - dataCategory: 'random', // random, repeating, unique, combo, foreign - dataType: 'Name - firstName', - data: { - } - }, - { - name: 'company_name', - dataCategory: 'random', - dataType: 'Company - companyName', - data: { - } - } - ] -}; +// /*=== SAMPLE OBJECT TO BE SENT FROM USER INTERFACE TO DATA GENERATOR ===*/ +// const fromApp = { +// schema: 'public', //used to be schema1 +// table: 'table1', +// scale: 40, +// columns: [ +// { +// name: '_id', +// dataCategory: 'unique', // random, repeating, unique, combo, foreign +// dataType: 'num', +// data: { +// serial: true, +// } +// }, +// { +// name: 'username', +// dataCategory: 'unique', // random, repeating, unique, combo, foreign +// dataType: 'str', +// data: { +// length: [10, 15], +// inclAlphaLow: true, +// inclAlphaUp: true, +// inclNum: true, +// inclSpaces: true, +// inclSpecChar: true, +// include: ["include", "these", "aReplace"], +// }, +// }, +// { +// name: 'first_name', +// dataCategory: 'random', // random, repeating, unique, combo, foreign +// dataType: 'Name - firstName', +// data: { +// } +// }, +// { +// name: 'company_name', +// dataCategory: 'random', +// dataType: 'Company - companyName', +// data: { +// } +// } +// ] +// }; -ipcMain.on('generate-data', (event, paramObj: any) => { - // Generating Dummy Data from parameters sent from the frontend - (function dummyFunc(paramsObj) { // paramsObj === fromApp - // Need addDB in this context - const addDB = (str: string, nextStep: any) => { - exec(str, (error, stdout, stderr) => { - if (error) { - console.log(`error: ${error.message}`); - return; - } - if (stderr) { - console.log(`stderr: ${stderr}`); - return; - } - // console.log(`stdout: ${stdout}`); - console.log(`${stdout}`); - if (nextStep) nextStep(); - }); - }; - const db_name: string = 'defaultDB'; - // This is based off of the fromApp hard coded object, - // In theory this would be given to SeeQR from the user - const schemaStr: string = `CREATE TABLE "table1"( - "_id" integer NOT NULL, - "username" VARCHAR(255) NOT NULL, - "first_name" VARCHAR(255) NOT NULL, - "company_name" VARCHAR(255) NOT NULL, - CONSTRAINT "tabl1_pk" PRIMARY KEY ("_id") - ) WITH ( - OIDS=FALSE - );` - // This is where createInsertQuery function is invoked - const insertArray: Array = createInsertQuery(paramsObj); - console.log(insertArray); - // Important part !!!! - // takes in an array of insert query strings: insertArray - // this insertArray is the output of the createInsertQuery function from dataGenHandler.ts - // db_name is whatever tab they're currently on - // scemaStr is the hard coded table for the fromApp hard coded object - db.query(schemaStr) // this makes hard coded table in database - .then((returnedData) => { - // ====== BELOW IS MAIN FUNCTIONALITY FOR SUBMITTING DUMMY DATA TO THE DATABASE ======= AKA looping insert queries into the node child process - // USE THIS ALONG WITH THE addDB(node childprocess) FUNCTION FOR FINAL PRODUCT - // THE CODE FROM ABOVE IS FOR TESTING THIS WITHOUT THE INTERFACE - for (let i = 0; i < insertArray.length; ++i) { - console.log(i) - let currentInsert = insertArray[i]; - const dummyScript: string = `docker exec postgres-1 psql -U postgres -d ${db_name} -c "${currentInsert}"`; - addDB(dummyScript, () => console.log(`Dummied Database: ${db_name}`)) //using the Node childprocess to access postgres for each INSERT query in the insertArray - } - }) - })(fromApp); -}); \ No newline at end of file +// ipcMain.on('generate-data', (event, paramObj: any) => { +// // Generating Dummy Data from parameters sent from the frontend +// (function dummyFunc(paramsObj) { // paramsObj === fromApp +// // Need addDB in this context +// const addDB = (str: string, nextStep: any) => { +// exec(str, (error, stdout, stderr) => { +// if (error) { +// console.log(`error: ${error.message}`); +// return; +// } +// if (stderr) { +// console.log(`stderr: ${stderr}`); +// return; +// } +// // console.log(`stdout: ${stdout}`); +// console.log(`${stdout}`); +// if (nextStep) nextStep(); +// }); +// }; +// const db_name: string = 'defaultDB'; +// // This is based off of the fromApp hard coded object, +// // In theory this would be given to SeeQR from the user +// const schemaStr: string = `CREATE TABLE "table1"( +// "_id" integer NOT NULL, +// "username" VARCHAR(255) NOT NULL, +// "first_name" VARCHAR(255) NOT NULL, +// "company_name" VARCHAR(255) NOT NULL, +// CONSTRAINT "tabl1_pk" PRIMARY KEY ("_id") +// ) WITH ( +// OIDS=FALSE +// );` +// // This is where createInsertQuery function is invoked +// const insertArray: Array = createInsertQuery(paramsObj); +// console.log(insertArray); +// // Important part !!!! +// // takes in an array of insert query strings: insertArray +// // this insertArray is the output of the createInsertQuery function from dataGenHandler.ts +// // db_name is whatever tab they're currently on +// // scemaStr is the hard coded table for the fromApp hard coded object +// db.query(schemaStr) // this makes hard coded table in database +// .then((returnedData) => { +// // ====== BELOW IS MAIN FUNCTIONALITY FOR SUBMITTING DUMMY DATA TO THE DATABASE ======= AKA looping insert queries into the node child process +// // USE THIS ALONG WITH THE addDB(node childprocess) FUNCTION FOR FINAL PRODUCT +// // THE CODE FROM ABOVE IS FOR TESTING THIS WITHOUT THE INTERFACE +// for (let i = 0; i < insertArray.length; ++i) { +// console.log(i) +// let currentInsert = insertArray[i]; +// const dummyScript: string = `docker exec postgres-1 psql -U postgres -d ${db_name} -c "${currentInsert}"`; +// addDB(dummyScript, () => console.log(`Dummied Database: ${db_name}`)) //using the Node childprocess to access postgres for each INSERT query in the insertArray +// } +// }) +// })(fromApp); +// }); \ No newline at end of file