From bd42bf7263d3a7d1b3f76a6bcda5edebb9dc148f Mon Sep 17 00:00:00 2001 From: Bennett Toftner Date: Sun, 6 Oct 2024 17:15:23 -0400 Subject: [PATCH 001/198] add api endpoint to get experiment data from mongodb --- apps/frontend/lib/mongodb.ts | 1 + .../pages/api/experiments/[expIdToGet].tsx | 33 +++++++++++++++++++ 2 files changed, 34 insertions(+) create mode 100644 apps/frontend/pages/api/experiments/[expIdToGet].tsx diff --git a/apps/frontend/lib/mongodb.ts b/apps/frontend/lib/mongodb.ts index 884b58e6..e24f3a44 100644 --- a/apps/frontend/lib/mongodb.ts +++ b/apps/frontend/lib/mongodb.ts @@ -15,6 +15,7 @@ export const DB_NAME = 'gladosdb'; export const COLLECTION_LOGS = 'logs'; export const COLLECTION_ZIPS = 'zips'; export const COLLECTION_RESULTS_CSVS = 'results'; +export const COLLECTION_EXPERIMENTS = 'experiments'; let client: MongoClient; let clientPromise: Promise = new Promise((success) => { diff --git a/apps/frontend/pages/api/experiments/[expIdToGet].tsx b/apps/frontend/pages/api/experiments/[expIdToGet].tsx new file mode 100644 index 00000000..bef2589a --- /dev/null +++ b/apps/frontend/pages/api/experiments/[expIdToGet].tsx @@ -0,0 +1,33 @@ +import clientPromise, { DB_NAME, COLLECTION_EXPERIMENTS } from '../../../lib/mongodb'; +import { NextApiHandler } from 'next'; +import { ExperimentData } from '../../../firebase/db_types'; + +const mongoExpHandler: NextApiHandler = async (req, res) => { + const { expIdToGet } = req.query; + if (typeof expIdToGet !== 'string') { + res.status(400).json({ response: 'Missing experiment ID' } as any); + return; + } + + let results; + try { + const client = await clientPromise; + const db = client.db(DB_NAME); + + const experiment = await db + .collection(COLLECTION_EXPERIMENTS) + .findOne({ '_id': expIdToGet as any }); // Assuming expId is the unique identifier in the collection + + if (!experiment) { + return res.status(404).json({ response: 'Experiment not found' } as any); + } + + res.status(200).json(experiment as unknown as ExperimentData); + } catch (error) { + const message = 'Failed to download the experiment data'; + console.error('Error contacting server: ', error); + res.status(500).json({ response: message } as any); + } +}; + +export default mongoExpHandler; From 7a45fa91bfc803d19195d02fc460a5d8388dd7da Mon Sep 17 00:00:00 2001 From: Bennett Toftner Date: Sun, 6 Oct 2024 18:38:25 -0400 Subject: [PATCH 002/198] storeExp api handler added --- apps/frontend/pages/api/[storeExp].tsx | 39 ++++++++++++++++++++++++++ 1 file changed, 39 insertions(+) create mode 100644 apps/frontend/pages/api/[storeExp].tsx diff --git a/apps/frontend/pages/api/[storeExp].tsx b/apps/frontend/pages/api/[storeExp].tsx new file mode 100644 index 00000000..406c3b0b --- /dev/null +++ b/apps/frontend/pages/api/[storeExp].tsx @@ -0,0 +1,39 @@ +import clientPromise, { DB_NAME, COLLECTION_EXPERIMENTS } from '../../lib/mongodb'; +import { NextApiHandler } from 'next'; +import { ExperimentData } from '../../firebase/db_types'; + +const mongoExpHandler: NextApiHandler = async (req, res) => { + if (req.method !== 'POST') { + res.status(405).json({ response: 'Method not allowed, use POST' } as any); + return; + } + + const experimentData: ExperimentData = req.body; + + if (!experimentData || typeof experimentData !== 'object') { + res.status(400).json({ response: 'Invalid experiment data provided' } as any); + return; + } + + try { + const client = await clientPromise; + const db = client.db(DB_NAME); + + // Insert the new experiment data + const result = await db + .collection(COLLECTION_EXPERIMENTS) + .insertOne(experimentData); + + if (result.insertedId) { + res.status(201).json({ response: 'Experiment data stored successfully', id: result.insertedId }); + } else { + throw new Error('Insert operation failed'); + } + } catch (error) { + const message = 'Failed to store the experiment data'; + console.error('Error contacting server: ', error); + res.status(500).json({ response: message } as any); + } +}; + +export default mongoExpHandler; From c4292346c572eca71ff27e0de9391ef89d1ae28c Mon Sep 17 00:00:00 2001 From: rhit-johnsoz2 Date: Wed, 9 Oct 2024 13:37:17 -0400 Subject: [PATCH 003/198] Changed the name of the storeExp file to [... storeExp].tsx to accommodate for storing the experiments properly. --- .../pages/api/{[storeExp].tsx => storage/[... storeExp].tsx} | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) rename apps/frontend/pages/api/{[storeExp].tsx => storage/[... storeExp].tsx} (93%) diff --git a/apps/frontend/pages/api/[storeExp].tsx b/apps/frontend/pages/api/storage/[... storeExp].tsx similarity index 93% rename from apps/frontend/pages/api/[storeExp].tsx rename to apps/frontend/pages/api/storage/[... storeExp].tsx index 406c3b0b..61fb93a7 100644 --- a/apps/frontend/pages/api/[storeExp].tsx +++ b/apps/frontend/pages/api/storage/[... storeExp].tsx @@ -1,6 +1,6 @@ -import clientPromise, { DB_NAME, COLLECTION_EXPERIMENTS } from '../../lib/mongodb'; +import clientPromise, { DB_NAME, COLLECTION_EXPERIMENTS } from '../../../lib/mongodb'; import { NextApiHandler } from 'next'; -import { ExperimentData } from '../../firebase/db_types'; +import { ExperimentData } from '../../../firebase/db_types'; const mongoExpHandler: NextApiHandler = async (req, res) => { if (req.method !== 'POST') { From 5cc93c7bf908e310ac7becc5f91873aebb930b09 Mon Sep 17 00:00:00 2001 From: rhit-johnsoz2 Date: Wed, 9 Oct 2024 13:52:38 -0400 Subject: [PATCH 004/198] Redid the name upon doing more research into JSON.stringify --- .../pages/api/storage/{[... storeExp].tsx => storeExp.tsx} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename apps/frontend/pages/api/storage/{[... storeExp].tsx => storeExp.tsx} (100%) diff --git a/apps/frontend/pages/api/storage/[... storeExp].tsx b/apps/frontend/pages/api/storage/storeExp.tsx similarity index 100% rename from apps/frontend/pages/api/storage/[... storeExp].tsx rename to apps/frontend/pages/api/storage/storeExp.tsx From 79eacd5dcc09c2cb47421cbfccaea89c411ee9af Mon Sep 17 00:00:00 2001 From: rhit-johnsoz2 Date: Wed, 9 Oct 2024 13:56:28 -0400 Subject: [PATCH 005/198] Put down (in a comment) what we plan to incorporate into DispatchStep.tsx --- apps/frontend/pages/api/storage/storeExp.tsx | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/apps/frontend/pages/api/storage/storeExp.tsx b/apps/frontend/pages/api/storage/storeExp.tsx index 61fb93a7..51cdf41d 100644 --- a/apps/frontend/pages/api/storage/storeExp.tsx +++ b/apps/frontend/pages/api/storage/storeExp.tsx @@ -36,4 +36,13 @@ const mongoExpHandler: NextApiHandler = async (req, res) => { } }; +// TODO: In DispatchStep.tsx: +// fetch('/api/storage/storeExp', { +// method: 'POST', +// headers: { +// 'Content-Type': 'application/json', +// }, +// body: JSON.stringify(objectWithData) +// }) + export default mongoExpHandler; From b3f473be36c07bbba15db685558412defbf4da5a Mon Sep 17 00:00:00 2001 From: rhit-johnsoz2 Date: Mon, 14 Oct 2024 20:58:59 -0400 Subject: [PATCH 006/198] Refactored the updateProjectNameInFirebase method in firebase/db.ts to be called updateExperimentName and now does an api call to the new [... expIdToUpdateName].tsx and utilises MongoDB. Needs to be tested. --- .../ViewExperiment/ExperimentListing.tsx | 4 +-- apps/frontend/firebase/db.ts | 31 ++++++++++------ .../experiments/{ => get}/[expIdToGet].tsx | 5 ++- .../{ => start}/[expIdToStart].tsx | 2 +- .../api/{storage => experiments}/storeExp.tsx | 0 .../update/[... expIdToUpdateName].tsx | 35 +++++++++++++++++++ 6 files changed, 60 insertions(+), 17 deletions(-) rename apps/frontend/pages/api/experiments/{ => get}/[expIdToGet].tsx (91%) rename apps/frontend/pages/api/experiments/{ => start}/[expIdToStart].tsx (95%) rename apps/frontend/pages/api/{storage => experiments}/storeExp.tsx (100%) create mode 100644 apps/frontend/pages/api/experiments/update/[... expIdToUpdateName].tsx diff --git a/apps/frontend/components/flows/ViewExperiment/ExperimentListing.tsx b/apps/frontend/components/flows/ViewExperiment/ExperimentListing.tsx index 41c92eda..4697d3f6 100644 --- a/apps/frontend/components/flows/ViewExperiment/ExperimentListing.tsx +++ b/apps/frontend/components/flows/ViewExperiment/ExperimentListing.tsx @@ -1,7 +1,7 @@ /* eslint-disable no-mixed-spaces-and-tabs */ import { ChevronRightIcon } from '@heroicons/react/24/solid'; import { useEffect, useState } from 'react'; -import { ExperimentDocumentId, subscribeToExp, updateProjectNameInFirebase, getCurrentProjectName } from '../../../firebase/db'; +import { ExperimentDocumentId, subscribeToExp, updateExperimentName, getCurrentProjectName } from '../../../firebase/db'; import { ExperimentData } from '../../../firebase/db_types'; import { MdEdit, MdPadding } from 'react-icons/md'; import { Timestamp } from 'mongodb'; @@ -46,7 +46,7 @@ export const ExperimentListing = ({ projectinit, onCopyExperiment, onDownloadRes const handleSave = (newProjectName) => { // Update the project name in Firebase with the edited name - updateProjectNameInFirebase(project.expId, projectName); + updateExperimentName(project.expId, projectName); // Exit the editing mode setIsEditing(false); diff --git a/apps/frontend/firebase/db.ts b/apps/frontend/firebase/db.ts index f060bf88..e14b9118 100644 --- a/apps/frontend/firebase/db.ts +++ b/apps/frontend/firebase/db.ts @@ -19,6 +19,7 @@ export type FirebaseUserId = FirebaseId; export type ExperimentDocumentId = FirebaseDocumentId; +// Should be working with MondoDB in pages\api\storage\storeExp.tsx export const submitExperiment = async (values: Partial, userId: FirebaseUserId): Promise => { const newExperimentDocument = doc(experiments); console.log('Experiment submitted. Values:', values); @@ -49,7 +50,7 @@ export const submitExperiment = async (values: Partial, userId: return newExperimentDocument.id; }; - +// TODO: Change to use MongoDB export const uploadExec = async (id: ExperimentDocumentId, file) => { const fileRef = ref(storage, `experiment${id}`); return await uploadBytes(fileRef, file).then((snapshot) => { @@ -129,6 +130,7 @@ export interface ExperimentSubscribeCallback { (data: Partial): any; } +// TODO: Convert from Firestore to MongoDB export const subscribeToExp = (id: ExperimentDocumentId, callback: ExperimentSubscribeCallback) => { const unsubscribe = onSnapshot(doc(db, DB_COLLECTION_EXPERIMENTS, id), (doc) => { console.log(`exp ${id} data updated: `, doc.data()); @@ -142,6 +144,7 @@ export interface MultipleExperimentSubscribeCallback { (data: Partial[]): any; } +// TODO: Convert from Firestore MongoDB export const listenToExperiments = (uid: FirebaseUserId, callback: MultipleExperimentSubscribeCallback) => { const q = query(experiments, where('creator', '==', uid)); const unsubscribe = onSnapshot(q, (snapshot) => { @@ -152,6 +155,7 @@ export const listenToExperiments = (uid: FirebaseUserId, callback: MultipleExper return unsubscribe; }; +// TODO: Convert from Firestore to MongoDB export const deleteExperiment = async (expId: ExperimentDocumentId) => { const experimentRef = doc(db, DB_COLLECTION_EXPERIMENTS, expId); console.log(`Deleting ${expId} from firestore...`); @@ -162,20 +166,25 @@ export const deleteExperiment = async (expId: ExperimentDocumentId) => { return false; }; -export const updateProjectNameInFirebase = async (projectId, updatedName) => { - try { - // Reference the project document in Firebase - const experimentRef = doc(db, DB_COLLECTION_EXPERIMENTS, projectId); - - // Update the project name - await updateDoc(experimentRef, { name: updatedName }); - } catch (error) { - console.error('Error updating project name:', error); - } +// Done: Convert from Firestore to MongoDB +// TODO: Test this! +export const updateExperimentName = async (expId, updatedName) => { + await fetch(`/api/experiments/update/${expId}/${updatedName}`).then((response) => { + if (response?.ok) { + return response.json(); + } + return Promise.reject(response); + }).then((expId: String) => { + console.log(expId); + }).catch((response: Response) => { + // might need this + }); }; // Function to get the project name from Firebase +// TODO: Convert from Firestore to MongoDB +// Not being used right now; we have [expIdToGet].tsx, which might render this useless anyway. export const getCurrentProjectName = async (projectId) => { try { // Reference the project document in Firebase diff --git a/apps/frontend/pages/api/experiments/[expIdToGet].tsx b/apps/frontend/pages/api/experiments/get/[expIdToGet].tsx similarity index 91% rename from apps/frontend/pages/api/experiments/[expIdToGet].tsx rename to apps/frontend/pages/api/experiments/get/[expIdToGet].tsx index bef2589a..4c2407a4 100644 --- a/apps/frontend/pages/api/experiments/[expIdToGet].tsx +++ b/apps/frontend/pages/api/experiments/get/[expIdToGet].tsx @@ -1,6 +1,6 @@ -import clientPromise, { DB_NAME, COLLECTION_EXPERIMENTS } from '../../../lib/mongodb'; +import clientPromise, { DB_NAME, COLLECTION_EXPERIMENTS } from '../../../../lib/mongodb'; import { NextApiHandler } from 'next'; -import { ExperimentData } from '../../../firebase/db_types'; +import { ExperimentData } from '../../../../firebase/db_types'; const mongoExpHandler: NextApiHandler = async (req, res) => { const { expIdToGet } = req.query; @@ -9,7 +9,6 @@ const mongoExpHandler: NextApiHandler = async (req, res) => { return; } - let results; try { const client = await clientPromise; const db = client.db(DB_NAME); diff --git a/apps/frontend/pages/api/experiments/[expIdToStart].tsx b/apps/frontend/pages/api/experiments/start/[expIdToStart].tsx similarity index 95% rename from apps/frontend/pages/api/experiments/[expIdToStart].tsx rename to apps/frontend/pages/api/experiments/start/[expIdToStart].tsx index 62a4d101..d7193a5d 100644 --- a/apps/frontend/pages/api/experiments/[expIdToStart].tsx +++ b/apps/frontend/pages/api/experiments/start/[expIdToStart].tsx @@ -1,5 +1,5 @@ import { NextApiHandler } from 'next'; -import { getEnvVar } from '../../../utils/env'; +import { getEnvVar } from '../../../../utils/env'; const BACKEND_PORT = getEnvVar('BACKEND_PORT'); diff --git a/apps/frontend/pages/api/storage/storeExp.tsx b/apps/frontend/pages/api/experiments/storeExp.tsx similarity index 100% rename from apps/frontend/pages/api/storage/storeExp.tsx rename to apps/frontend/pages/api/experiments/storeExp.tsx diff --git a/apps/frontend/pages/api/experiments/update/[... expIdToUpdateName].tsx b/apps/frontend/pages/api/experiments/update/[... expIdToUpdateName].tsx new file mode 100644 index 00000000..b339c15e --- /dev/null +++ b/apps/frontend/pages/api/experiments/update/[... expIdToUpdateName].tsx @@ -0,0 +1,35 @@ +import clientPromise, { DB_NAME, COLLECTION_EXPERIMENTS } from '../../../../lib/mongodb'; +import { NextApiHandler } from 'next'; +import { ExperimentData } from '../../../../firebase/db_types'; + +const mongoExpHandler: NextApiHandler = async (req, res) => { + const { expIdToUpdateName } = req.query; + if (!(Array.isArray(expIdToUpdateName)) || expIdToUpdateName?.length == 0) { + res.status(400).json({ response: 'Missing experiment ID' } as any); + return; + } + + try { + const client = await clientPromise; + const db = client.db(DB_NAME); + + const expId = expIdToUpdateName[0] + const newName = expIdToUpdateName[1] + + const experiment = await db + .collection(COLLECTION_EXPERIMENTS) + .updateOne({'_id': expId as any}, {'name': newName as any}); + + if (!experiment) { + return res.status(404).json({ response: 'Experiment not found' } as any); + } + + res.status(200).json(experiment as unknown as ExperimentData); + } catch (error) { + const message = 'Failed to rename the experiment'; + console.error('Error contacting server: ', error); + res.status(500).json({ response: message } as any); + } +}; + +export default mongoExpHandler; From 1e305608dd219bc5b19095389bb79b45b6a9afd9 Mon Sep 17 00:00:00 2001 From: Bennett Toftner Date: Wed, 16 Oct 2024 12:22:28 -0400 Subject: [PATCH 007/198] deleteExperiment converted --- apps/frontend/firebase/db.ts | 17 ++++++---- .../experiments/delete/[expIdToDelete].tsx | 32 +++++++++++++++++++ 2 files changed, 42 insertions(+), 7 deletions(-) create mode 100644 apps/frontend/pages/api/experiments/delete/[expIdToDelete].tsx diff --git a/apps/frontend/firebase/db.ts b/apps/frontend/firebase/db.ts index e14b9118..73dff02f 100644 --- a/apps/frontend/firebase/db.ts +++ b/apps/frontend/firebase/db.ts @@ -157,13 +157,16 @@ export const listenToExperiments = (uid: FirebaseUserId, callback: MultipleExper // TODO: Convert from Firestore to MongoDB export const deleteExperiment = async (expId: ExperimentDocumentId) => { - const experimentRef = doc(db, DB_COLLECTION_EXPERIMENTS, expId); - console.log(`Deleting ${expId} from firestore...`); - deleteDoc(experimentRef).then(() => { - console.log(`Deleted experiment ${expId}`); - return true; - }).catch((error) => console.log('Delete experiment error: ', error)); - return false; + await fetch(`/api/experiments/delete/${expId}`).then((response) => { + if (response?.ok) { + return response.json(); + } + return Promise.reject(response); + }).then((expId: String) => { + console.log(expId); + }).catch((response: Response) => { + // might need this + }); }; // Done: Convert from Firestore to MongoDB diff --git a/apps/frontend/pages/api/experiments/delete/[expIdToDelete].tsx b/apps/frontend/pages/api/experiments/delete/[expIdToDelete].tsx new file mode 100644 index 00000000..fc4d8ff1 --- /dev/null +++ b/apps/frontend/pages/api/experiments/delete/[expIdToDelete].tsx @@ -0,0 +1,32 @@ +import clientPromise, { DB_NAME, COLLECTION_EXPERIMENTS } from '../../../../lib/mongodb'; +import { NextApiHandler } from 'next'; +import { ExperimentData } from '../../../../firebase/db_types'; + +const mongoExpHandler: NextApiHandler = async (req, res) => { + const { expIdToDelete } = req.query; + if (typeof expIdToDelete !== 'string') { + res.status(400).json({ response: 'Missing experiment ID' } as any); + return; + } + + try { + const client = await clientPromise; + const db = client.db(DB_NAME); + + const experiment = await db + .collection(COLLECTION_EXPERIMENTS) + .deleteOne({ '_id': expIdToDelete as any }); // Assuming expId is the unique identifier in the collection + + if (!experiment) { + return res.status(404).json({ response: 'Experiment not found' } as any); + } + + res.status(200).json(experiment as unknown as ExperimentData); + } catch (error) { + const message = 'Failed to delete the experiment'; + console.error('Error contacting server: ', error); + res.status(500).json({ response: message } as any); + } +}; + +export default mongoExpHandler; From 83f07f4c38583242483efb7bf8e2b4a860077743 Mon Sep 17 00:00:00 2001 From: Bennett Toftner Date: Wed, 16 Oct 2024 12:33:19 -0400 Subject: [PATCH 008/198] converted submitExperiment to mongo --- apps/frontend/firebase/db.ts | 47 +++++++++++++----------------------- 1 file changed, 17 insertions(+), 30 deletions(-) diff --git a/apps/frontend/firebase/db.ts b/apps/frontend/firebase/db.ts index 73dff02f..c97904de 100644 --- a/apps/frontend/firebase/db.ts +++ b/apps/frontend/firebase/db.ts @@ -19,35 +19,23 @@ export type FirebaseUserId = FirebaseId; export type ExperimentDocumentId = FirebaseDocumentId; -// Should be working with MondoDB in pages\api\storage\storeExp.tsx -export const submitExperiment = async (values: Partial, userId: FirebaseUserId): Promise => { - const newExperimentDocument = doc(experiments); - console.log('Experiment submitted. Values:', values); - setDoc(newExperimentDocument, { - creator: userId, - name: values.name, - description: values.description, - verbose: values.verbose, - workers: values.workers, - expId: newExperimentDocument.id, - trialExtraFile: values.trialExtraFile, - trialResult: values.trialResult, - timeout: values.timeout, - keepLogs: values.keepLogs, - scatter: values.scatter, - scatterIndVar: values.scatterIndVar, - scatterDepVar: values.scatterDepVar, - dumbTextArea: values.dumbTextArea, - created: Date.now(), - hyperparameters: JSON.stringify({ - hyperparameters: values.hyperparameters, - }), - finished: false, - estimatedTotalTimeMinutes: 0, - totalExperimentRuns: 0, +// test +export const submitExperiment = async (values: Partial, userId: FirebaseUserId) => { + await fetch(`/api/experiments/storeExp`, + { + method: "POST", + body: JSON.stringify(values) + } + ).then((response) => { + if (response?.ok) { + return response.json(); + } + return Promise.reject(response); + }).then((expId: String) => { + console.log(expId); + }).catch((response: Response) => { + // might need this }); - console.log(`Created Experiment: ${newExperimentDocument.id}`); - return newExperimentDocument.id; }; // TODO: Change to use MongoDB @@ -155,7 +143,7 @@ export const listenToExperiments = (uid: FirebaseUserId, callback: MultipleExper return unsubscribe; }; -// TODO: Convert from Firestore to MongoDB +// TODO: Test this! export const deleteExperiment = async (expId: ExperimentDocumentId) => { await fetch(`/api/experiments/delete/${expId}`).then((response) => { if (response?.ok) { @@ -169,7 +157,6 @@ export const deleteExperiment = async (expId: ExperimentDocumentId) => { }); }; -// Done: Convert from Firestore to MongoDB // TODO: Test this! export const updateExperimentName = async (expId, updatedName) => { await fetch(`/api/experiments/update/${expId}/${updatedName}`).then((response) => { From d49c7e85e959c055a7394279e96fa6c76886ffe6 Mon Sep 17 00:00:00 2001 From: Bennett Toftner Date: Wed, 16 Oct 2024 12:40:10 -0400 Subject: [PATCH 009/198] Added comment to uploadExec --- apps/frontend/firebase/db.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/frontend/firebase/db.ts b/apps/frontend/firebase/db.ts index c97904de..ff538529 100644 --- a/apps/frontend/firebase/db.ts +++ b/apps/frontend/firebase/db.ts @@ -38,7 +38,7 @@ export const submitExperiment = async (values: Partial, userId: }); }; -// TODO: Change to use MongoDB +// TODO: will use mongo gridfs export const uploadExec = async (id: ExperimentDocumentId, file) => { const fileRef = ref(storage, `experiment${id}`); return await uploadBytes(fileRef, file).then((snapshot) => { From f454af9456c0b0baf6cbed105543ca138d711ac5 Mon Sep 17 00:00:00 2001 From: Bennett Toftner Date: Wed, 23 Oct 2024 13:19:44 -0400 Subject: [PATCH 010/198] Fixed issues pertaining to reviewer comments --- .../flows/AddExperiment/stepComponents/DispatchStep.tsx | 2 +- apps/frontend/firebase/db.ts | 5 +++++ .../pages/api/experiments/delete/[expIdToDelete].tsx | 6 +++--- apps/frontend/pages/api/experiments/storeExp.tsx | 9 --------- 4 files changed, 9 insertions(+), 13 deletions(-) diff --git a/apps/frontend/components/flows/AddExperiment/stepComponents/DispatchStep.tsx b/apps/frontend/components/flows/AddExperiment/stepComponents/DispatchStep.tsx index 9c33dcfe..901869dc 100644 --- a/apps/frontend/components/flows/AddExperiment/stepComponents/DispatchStep.tsx +++ b/apps/frontend/components/flows/AddExperiment/stepComponents/DispatchStep.tsx @@ -27,7 +27,7 @@ export const DispatchStep = ({ id, form, ...props }) => { const uploadResponse = await uploadExec(expId, files[0]); if (uploadResponse) { console.log(`Handing experiment ${expId} to the backend`); - const response = await fetch(`/api/experiments/${expId}`, { + const response = await fetch(`/api/experiments/start/${expId}`, { method: 'POST', headers: new Headers({ 'Content-Type': 'application/json' }), credentials: 'same-origin', diff --git a/apps/frontend/firebase/db.ts b/apps/frontend/firebase/db.ts index ff538529..fb728f6a 100644 --- a/apps/frontend/firebase/db.ts +++ b/apps/frontend/firebase/db.ts @@ -21,6 +21,11 @@ export type ExperimentDocumentId = FirebaseDocumentId; // test export const submitExperiment = async (values: Partial, userId: FirebaseUserId) => { + values.creator = userId; + values.created = Date.now(); + values.finished = false; + values.estimatedTotalTimeMinutes = 0; + values.totalExperimentRuns = 0; await fetch(`/api/experiments/storeExp`, { method: "POST", diff --git a/apps/frontend/pages/api/experiments/delete/[expIdToDelete].tsx b/apps/frontend/pages/api/experiments/delete/[expIdToDelete].tsx index fc4d8ff1..b9aa7675 100644 --- a/apps/frontend/pages/api/experiments/delete/[expIdToDelete].tsx +++ b/apps/frontend/pages/api/experiments/delete/[expIdToDelete].tsx @@ -13,15 +13,15 @@ const mongoExpHandler: NextApiHandler = async (req, res) => { const client = await clientPromise; const db = client.db(DB_NAME); - const experiment = await db + const result = await db .collection(COLLECTION_EXPERIMENTS) .deleteOne({ '_id': expIdToDelete as any }); // Assuming expId is the unique identifier in the collection - if (!experiment) { + if (result.deletedCount === 0) { return res.status(404).json({ response: 'Experiment not found' } as any); } - res.status(200).json(experiment as unknown as ExperimentData); + res.status(200); } catch (error) { const message = 'Failed to delete the experiment'; console.error('Error contacting server: ', error); diff --git a/apps/frontend/pages/api/experiments/storeExp.tsx b/apps/frontend/pages/api/experiments/storeExp.tsx index 51cdf41d..61fb93a7 100644 --- a/apps/frontend/pages/api/experiments/storeExp.tsx +++ b/apps/frontend/pages/api/experiments/storeExp.tsx @@ -36,13 +36,4 @@ const mongoExpHandler: NextApiHandler = async (req, res) => { } }; -// TODO: In DispatchStep.tsx: -// fetch('/api/storage/storeExp', { -// method: 'POST', -// headers: { -// 'Content-Type': 'application/json', -// }, -// body: JSON.stringify(objectWithData) -// }) - export default mongoExpHandler; From c25f79452b5592e6b757f312a2253d60131807b2 Mon Sep 17 00:00:00 2001 From: rhit-windsors Date: Wed, 23 Oct 2024 13:25:56 -0400 Subject: [PATCH 011/198] Update mongodb.ts --- apps/frontend/lib/mongodb.ts | 1 + 1 file changed, 1 insertion(+) diff --git a/apps/frontend/lib/mongodb.ts b/apps/frontend/lib/mongodb.ts index 8d37e79b..d026ba05 100644 --- a/apps/frontend/lib/mongodb.ts +++ b/apps/frontend/lib/mongodb.ts @@ -18,6 +18,7 @@ export const COLLECTION_RESULTS_CSVS = 'results'; export const COLLECTION_EXPERIMENTS = 'experiments'; export const COLLECTION_EXPERIMENT_FILES = 'files'; + let client: MongoClient; let clientPromise: Promise = new Promise((success) => { return true; From 00fcb9eb02602c74d1350d5ad1e6c90ac49201bd Mon Sep 17 00:00:00 2001 From: rhit-windsors Date: Wed, 23 Oct 2024 13:36:20 -0400 Subject: [PATCH 012/198] try casting --- apps/frontend/lib/mongodb.ts | 1 - apps/frontend/pages/api/experiments/storeExp.tsx | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/apps/frontend/lib/mongodb.ts b/apps/frontend/lib/mongodb.ts index d026ba05..8d37e79b 100644 --- a/apps/frontend/lib/mongodb.ts +++ b/apps/frontend/lib/mongodb.ts @@ -18,7 +18,6 @@ export const COLLECTION_RESULTS_CSVS = 'results'; export const COLLECTION_EXPERIMENTS = 'experiments'; export const COLLECTION_EXPERIMENT_FILES = 'files'; - let client: MongoClient; let clientPromise: Promise = new Promise((success) => { return true; diff --git a/apps/frontend/pages/api/experiments/storeExp.tsx b/apps/frontend/pages/api/experiments/storeExp.tsx index 61fb93a7..4239c136 100644 --- a/apps/frontend/pages/api/experiments/storeExp.tsx +++ b/apps/frontend/pages/api/experiments/storeExp.tsx @@ -8,7 +8,7 @@ const mongoExpHandler: NextApiHandler = async (req, res) => { return; } - const experimentData: ExperimentData = req.body; + const experimentData: Partial = JSON.parse(req.body) as Partial; if (!experimentData || typeof experimentData !== 'object') { res.status(400).json({ response: 'Invalid experiment data provided' } as any); From 1752191dee461340dfe999f53708ead33762b12d Mon Sep 17 00:00:00 2001 From: rhit-windsors Date: Wed, 23 Oct 2024 13:37:58 -0400 Subject: [PATCH 013/198] Update frontend.Dockerfile --- apps/frontend/frontend.Dockerfile | 70 +++++++++++++++---------------- 1 file changed, 35 insertions(+), 35 deletions(-) diff --git a/apps/frontend/frontend.Dockerfile b/apps/frontend/frontend.Dockerfile index c7c44bb5..ce47a8a2 100644 --- a/apps/frontend/frontend.Dockerfile +++ b/apps/frontend/frontend.Dockerfile @@ -1,59 +1,59 @@ -FROM node:20.6 AS base +# FROM node:20.6 AS base -WORKDIR /app +# WORKDIR /app -COPY . /app +# COPY . /app -RUN npm install +# RUN npm install -RUN npm run build +# RUN npm run build -EXPOSE $FRONTEND_WEBSERVER_PORT +# EXPOSE $FRONTEND_WEBSERVER_PORT -CMD ["npm", "start"] +# CMD ["npm", "start"] -# FROM node:20-alpine AS base +FROM node:20-alpine AS base -# FROM base AS deps +FROM base AS deps -# RUN apk add --no-cache libc6-compat -# WORKDIR /app +RUN apk add --no-cache libc6-compat +WORKDIR /app -# COPY package.json ./ +COPY package.json ./ -# RUN npm update && npm install +RUN npm update && npm install -# # Install this to optimize images -# RUN npm i sharp +# Install this to optimize images +RUN npm i sharp -# # If you want yarn update and install uncomment the bellow +# If you want yarn update and install uncomment the bellow -# # RUN yarn install && yarn upgrade +# RUN yarn install && yarn upgrade -# FROM base AS builder -# WORKDIR /app -# COPY --from=deps /app/node_modules ./node_modules -# COPY . . +FROM base AS builder +WORKDIR /app +COPY --from=deps /app/node_modules ./node_modules +COPY . . -# RUN npm run build +RUN npm run build -# FROM base AS runner -# WORKDIR /app +FROM base AS runner +WORKDIR /app -# ENV NODE_ENV=production -# RUN addgroup --system --gid 1001 nodejs -# RUN adduser --system --uid 1001 nextjs +ENV NODE_ENV=production +RUN addgroup --system --gid 1001 nodejs +RUN adduser --system --uid 1001 nextjs -# COPY --from=builder /app/public ./public +COPY --from=builder /app/public ./public -# RUN mkdir .next -# RUN chown nextjs:nodejs .next +RUN mkdir .next +RUN chown nextjs:nodejs .next -# COPY --from=builder --chown=nextjs:nodejs /app/.next/standalone ./ -# COPY --from=builder --chown=nextjs:nodejs /app/.next/static ./.next/static +COPY --from=builder --chown=nextjs:nodejs /app/.next/standalone ./ +COPY --from=builder --chown=nextjs:nodejs /app/.next/static ./.next/static -# USER nextjs +USER nextjs -# EXPOSE $FRONTEND_WEBSERVER_PORT +EXPOSE $FRONTEND_WEBSERVER_PORT -# CMD ["node", "server.js"] \ No newline at end of file +CMD ["node", "server.js"] \ No newline at end of file From da1607d66ad9c0313f56dbdbb1871a61dd40facb Mon Sep 17 00:00:00 2001 From: rhit-windsors Date: Wed, 23 Oct 2024 13:46:58 -0400 Subject: [PATCH 014/198] Update db.ts --- apps/frontend/firebase/db.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/apps/frontend/firebase/db.ts b/apps/frontend/firebase/db.ts index fb728f6a..57e4c163 100644 --- a/apps/frontend/firebase/db.ts +++ b/apps/frontend/firebase/db.ts @@ -31,9 +31,9 @@ export const submitExperiment = async (values: Partial, userId: method: "POST", body: JSON.stringify(values) } - ).then((response) => { + ).then(async (response) => { if (response?.ok) { - return response.json(); + return JSON.parse(await response.json()).id; } return Promise.reject(response); }).then((expId: String) => { From 1ebbe19feb3b38672fb1a218bf59338f0b543c1a Mon Sep 17 00:00:00 2001 From: rhit-windsors Date: Wed, 23 Oct 2024 13:48:53 -0400 Subject: [PATCH 015/198] Update db.ts --- apps/frontend/firebase/db.ts | 27 +++++++++++++++------------ 1 file changed, 15 insertions(+), 12 deletions(-) diff --git a/apps/frontend/firebase/db.ts b/apps/frontend/firebase/db.ts index 57e4c163..1ab6510d 100644 --- a/apps/frontend/firebase/db.ts +++ b/apps/frontend/firebase/db.ts @@ -26,21 +26,24 @@ export const submitExperiment = async (values: Partial, userId: values.finished = false; values.estimatedTotalTimeMinutes = 0; values.totalExperimentRuns = 0; - await fetch(`/api/experiments/storeExp`, + const response = await fetch(`/api/experiments/storeExp`, { method: "POST", body: JSON.stringify(values) - } - ).then(async (response) => { - if (response?.ok) { - return JSON.parse(await response.json()).id; - } - return Promise.reject(response); - }).then((expId: String) => { - console.log(expId); - }).catch((response: Response) => { - // might need this - }); + }); + if (response.ok){ + return JSON.parse(await response.json()).id; + } + // ).then(async (response) => { + // if (response?.ok) { + // return JSON.parse(await response.json()).id; + // } + // return Promise.reject(response); + // }).then((expId: String) => { + // console.log(expId); + // }).catch((response: Response) => { + // // might need this + // }); }; // TODO: will use mongo gridfs From b13b560c5c3ffdd2e93086f74908692efb3a4373 Mon Sep 17 00:00:00 2001 From: rhit-windsors Date: Wed, 23 Oct 2024 13:57:37 -0400 Subject: [PATCH 016/198] Update db.ts --- apps/frontend/firebase/db.ts | 12 +----------- 1 file changed, 1 insertion(+), 11 deletions(-) diff --git a/apps/frontend/firebase/db.ts b/apps/frontend/firebase/db.ts index 1ab6510d..40b239cf 100644 --- a/apps/frontend/firebase/db.ts +++ b/apps/frontend/firebase/db.ts @@ -32,18 +32,8 @@ export const submitExperiment = async (values: Partial, userId: body: JSON.stringify(values) }); if (response.ok){ - return JSON.parse(await response.json()).id; + return (await response.json()).id; } - // ).then(async (response) => { - // if (response?.ok) { - // return JSON.parse(await response.json()).id; - // } - // return Promise.reject(response); - // }).then((expId: String) => { - // console.log(expId); - // }).catch((response: Response) => { - // // might need this - // }); }; // TODO: will use mongo gridfs From 4d1d1595a540646a5ac55e4b51b5d4b5c92b89db Mon Sep 17 00:00:00 2001 From: rhit-windsors Date: Wed, 23 Oct 2024 20:46:30 -0400 Subject: [PATCH 017/198] working on backend for exps --- apps/backend/app.py | 10 +++++++++- apps/backend/modules/mongo.py | 7 +++++-- apps/runner/modules/utils.py | 13 ++++++++++--- apps/runner/runner.py | 4 +++- 4 files changed, 27 insertions(+), 7 deletions(-) diff --git a/apps/backend/app.py b/apps/backend/app.py index ad7238ee..4d830ad0 100644 --- a/apps/backend/app.py +++ b/apps/backend/app.py @@ -7,7 +7,7 @@ from flask import Flask, Response, request, jsonify from kubernetes import client, config import pymongo -from modules.mongo import upload_experiment_aggregated_results, upload_experiment_zip, upload_log_file, verify_mongo_connection, check_insert_default_experiments, download_experiment_file +from modules.mongo import upload_experiment_aggregated_results, upload_experiment_zip, upload_log_file, verify_mongo_connection, check_insert_default_experiments, download_experiment_file, get_experiment from spawn_runner import create_job, create_job_object flaskApp = Flask(__name__) @@ -104,6 +104,14 @@ def download_exp_file(): return {'contents': download_experiment_file(experiment_id, mongoClient)} except Exception: return Response(status=500) + +@flaskApp.post("/getExperiment") +def get_experiment_post(): + try: + experiment_id = request.args.get('expId', default='', type=str) + return {'contents': get_experiment(experiment_id, mongoClient)} + except Exception: + return Response(status=500) if __name__ == '__main__': flaskApp.run() diff --git a/apps/backend/modules/mongo.py b/apps/backend/modules/mongo.py index 63a58aee..2a157163 100644 --- a/apps/backend/modules/mongo.py +++ b/apps/backend/modules/mongo.py @@ -28,7 +28,6 @@ def upload_experiment_zip(experimentId: str, encoded: Binary, mongoClient: pymon experimentZipEntry = {"_id": experimentId, "fileContent": encoded} zipCollection = mongoClient["gladosdb"].zips try: - # TODO: Refactor to call the backend resultZipId = zipCollection.insert_one(experimentZipEntry).inserted_id return resultZipId except Exception as err: @@ -89,4 +88,8 @@ def download_experiment_file(expId: str, mongoClient: pymongo.MongoClient): file = bucket.open_download_stream_by_name(file_name) contents = file.read() return contents.decode("utf-8") - \ No newline at end of file + +def get_experiment(expId: str, mongoClient: pymongo.MongoClient): + experimentsCollection = mongoClient["gladosdb"].experiments + experiment = experimentsCollection.find_one({"_id": expId}) + return experiment diff --git a/apps/runner/modules/utils.py b/apps/runner/modules/utils.py index ad1fed10..bbaf59a8 100644 --- a/apps/runner/modules/utils.py +++ b/apps/runner/modules/utils.py @@ -41,7 +41,7 @@ def upload_experiment_aggregated_results(experiment: ExperimentData, resultConte payload = { "experimentId": experiment.expId, "results": resultContent - } + } _callBackend(url, payload, "inserted result csv into mongodb with id") @@ -51,7 +51,7 @@ def upload_experiment_zip(experiment: ExperimentData, encoded: Binary): payload = { "experimentId": experiment.expId, "encoded": base64.b64encode(encoded).decode("utf-8") - } + } _callBackend(url, payload, "inserted zip into mongodb with id") def upload_experiment_log(experimentId: DocumentId): @@ -73,8 +73,15 @@ def upload_experiment_log(experimentId: DocumentId): payload = { "experimentId": experimentId, "logContents": contents - } + } _callBackend(url, payload, "inserted log file into mongodb with id") + +def get_experiment_with_id(experimentId: str): + url = f'http://glados-service-backend:{os.getenv("BACKEND_PORT")}/getExperiment' + payload = { + "experimentId": experimentId + } + _callBackend(url, payload, "got experiment data") def _callBackend(url, payload, logMsg): try: diff --git a/apps/runner/runner.py b/apps/runner/runner.py index 11f054a3..23e486d8 100644 --- a/apps/runner/runner.py +++ b/apps/runner/runner.py @@ -81,11 +81,13 @@ def run_batch(data: IncomingStartRequest): open_experiment_logger(expId) - # Retrieve experiment details from firebase + # Retrieve experiment details from the backend api try: experiments = firebaseDb.collection(DB_COLLECTION_EXPERIMENTS) expRef = experiments.document(expId) experimentData = expRef.get().to_dict() + + except Exception as err: # pylint: disable=broad-exception-caught explogger.error("Error retrieving experiment data from firebase, aborting") explogger.exception(err) From 5f67214243f64d1760d6c3e92a34a9e085a91285 Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Wed, 23 Oct 2024 20:58:57 -0400 Subject: [PATCH 018/198] Update app.py --- apps/backend/app.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/backend/app.py b/apps/backend/app.py index 4d830ad0..085b8a9e 100644 --- a/apps/backend/app.py +++ b/apps/backend/app.py @@ -108,7 +108,7 @@ def download_exp_file(): @flaskApp.post("/getExperiment") def get_experiment_post(): try: - experiment_id = request.args.get('expId', default='', type=str) + experiment_id = request.get_json()['experimentId'] return {'contents': get_experiment(experiment_id, mongoClient)} except Exception: return Response(status=500) From 0422e18bad2d7fce4620762e5cadcb4fe16088df Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Wed, 23 Oct 2024 21:03:21 -0400 Subject: [PATCH 019/198] Update mongo.py --- apps/backend/modules/mongo.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/apps/backend/modules/mongo.py b/apps/backend/modules/mongo.py index 2a157163..83edaa50 100644 --- a/apps/backend/modules/mongo.py +++ b/apps/backend/modules/mongo.py @@ -1,7 +1,7 @@ import json import pymongo from pymongo.errors import ConnectionFailure -from bson import Binary +from bson import Binary, ObjectId from gridfs import GridFSBucket def verify_mongo_connection(mongoClient: pymongo.MongoClient): @@ -91,5 +91,5 @@ def download_experiment_file(expId: str, mongoClient: pymongo.MongoClient): def get_experiment(expId: str, mongoClient: pymongo.MongoClient): experimentsCollection = mongoClient["gladosdb"].experiments - experiment = experimentsCollection.find_one({"_id": expId}) + experiment = experimentsCollection.find_one({"_id": ObjectId(expId)}) return experiment From 0ae2c16dfac5ece9696a21bb0a0a3ac3c1eb136c Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Wed, 23 Oct 2024 21:07:49 -0400 Subject: [PATCH 020/198] Update mongo.py --- apps/backend/modules/mongo.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/backend/modules/mongo.py b/apps/backend/modules/mongo.py index 83edaa50..1ac259ac 100644 --- a/apps/backend/modules/mongo.py +++ b/apps/backend/modules/mongo.py @@ -92,4 +92,4 @@ def download_experiment_file(expId: str, mongoClient: pymongo.MongoClient): def get_experiment(expId: str, mongoClient: pymongo.MongoClient): experimentsCollection = mongoClient["gladosdb"].experiments experiment = experimentsCollection.find_one({"_id": ObjectId(expId)}) - return experiment + return "found exp!" From 0a0862e83016ce77c26d5afd9a32dce8f65c8cc9 Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Wed, 23 Oct 2024 21:12:12 -0400 Subject: [PATCH 021/198] Update mongo.py --- apps/backend/modules/mongo.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/apps/backend/modules/mongo.py b/apps/backend/modules/mongo.py index 1ac259ac..eb509edc 100644 --- a/apps/backend/modules/mongo.py +++ b/apps/backend/modules/mongo.py @@ -92,4 +92,7 @@ def download_experiment_file(expId: str, mongoClient: pymongo.MongoClient): def get_experiment(expId: str, mongoClient: pymongo.MongoClient): experimentsCollection = mongoClient["gladosdb"].experiments experiment = experimentsCollection.find_one({"_id": ObjectId(expId)}) + if experiment is None: + raise Exception("Could not find experiment!") + print(experiment) return "found exp!" From 48e14df5dcf39ddad89e4b266a61500b849d4593 Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Wed, 23 Oct 2024 21:16:12 -0400 Subject: [PATCH 022/198] Update mongo.py --- apps/backend/modules/mongo.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/apps/backend/modules/mongo.py b/apps/backend/modules/mongo.py index eb509edc..7025e705 100644 --- a/apps/backend/modules/mongo.py +++ b/apps/backend/modules/mongo.py @@ -94,5 +94,4 @@ def get_experiment(expId: str, mongoClient: pymongo.MongoClient): experiment = experimentsCollection.find_one({"_id": ObjectId(expId)}) if experiment is None: raise Exception("Could not find experiment!") - print(experiment) - return "found exp!" + return experiment["name"] From 8e371ec12adbcde70fd0086f25cc1a226640ee63 Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Wed, 23 Oct 2024 21:27:00 -0400 Subject: [PATCH 023/198] Update mongo.py --- apps/backend/modules/mongo.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/apps/backend/modules/mongo.py b/apps/backend/modules/mongo.py index 7025e705..5da80491 100644 --- a/apps/backend/modules/mongo.py +++ b/apps/backend/modules/mongo.py @@ -91,7 +91,7 @@ def download_experiment_file(expId: str, mongoClient: pymongo.MongoClient): def get_experiment(expId: str, mongoClient: pymongo.MongoClient): experimentsCollection = mongoClient["gladosdb"].experiments - experiment = experimentsCollection.find_one({"_id": ObjectId(expId)}) + experiment = experimentsCollection.find_one({"_id": ObjectId(expId)}, {"_id": 0}) if experiment is None: raise Exception("Could not find experiment!") - return experiment["name"] + return experiment From c38335734488b9f7d09b37dff1329a6a6785f5d5 Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Wed, 23 Oct 2024 21:29:21 -0400 Subject: [PATCH 024/198] Update runner.py --- apps/runner/runner.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/apps/runner/runner.py b/apps/runner/runner.py index 23e486d8..8c0c6e35 100644 --- a/apps/runner/runner.py +++ b/apps/runner/runner.py @@ -23,7 +23,7 @@ from modules.exceptions import CustomFlaskError, DatabaseConnectionError, GladosInternalError, ExperimentAbort, GladosUserError from modules.output.plots import generateScatterPlot from modules.configs import generate_config_files -from modules.utils import _get_env, upload_experiment_aggregated_results, upload_experiment_log, upload_experiment_zip, verify_mongo_connection +from modules.utils import _get_env, upload_experiment_aggregated_results, upload_experiment_log, upload_experiment_zip, verify_mongo_connection, get_experiment_with_id try: import magic # Crashes on windows if you're missing the 'python-magic-bin' python package @@ -83,9 +83,10 @@ def run_batch(data: IncomingStartRequest): # Retrieve experiment details from the backend api try: - experiments = firebaseDb.collection(DB_COLLECTION_EXPERIMENTS) - expRef = experiments.document(expId) - experimentData = expRef.get().to_dict() + # experiments = firebaseDb.collection(DB_COLLECTION_EXPERIMENTS) + # expRef = experiments.document(expId) + # experimentData = expRef.get().to_dict() + experimentData = get_experiment_with_id(expId).to_dict() except Exception as err: # pylint: disable=broad-exception-caught From c9ddfc7d0ab183ea5eb951ef4835cde3d3d47264 Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Wed, 23 Oct 2024 21:35:46 -0400 Subject: [PATCH 025/198] Update mongo.py --- apps/backend/modules/mongo.py | 1 + 1 file changed, 1 insertion(+) diff --git a/apps/backend/modules/mongo.py b/apps/backend/modules/mongo.py index 5da80491..d8cb850c 100644 --- a/apps/backend/modules/mongo.py +++ b/apps/backend/modules/mongo.py @@ -94,4 +94,5 @@ def get_experiment(expId: str, mongoClient: pymongo.MongoClient): experiment = experimentsCollection.find_one({"_id": ObjectId(expId)}, {"_id": 0}) if experiment is None: raise Exception("Could not find experiment!") + experiment["id"] = expId return experiment From 997aa26d6618cdbf0c2dda613df33c9de7824b8f Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Wed, 23 Oct 2024 21:43:36 -0400 Subject: [PATCH 026/198] update ids --- apps/backend/modules/mongo.py | 6 +++--- .../pages/api/download/csv/[expIdToCsvDownload].tsx | 2 +- apps/frontend/pages/api/download/logs/[idOfLogFile].tsx | 2 +- .../pages/api/download/zip/[expIdToZipDownload].tsx | 2 +- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/apps/backend/modules/mongo.py b/apps/backend/modules/mongo.py index d8cb850c..615a84d3 100644 --- a/apps/backend/modules/mongo.py +++ b/apps/backend/modules/mongo.py @@ -12,7 +12,7 @@ def verify_mongo_connection(mongoClient: pymongo.MongoClient): raise Exception("MongoDB server not available/unreachable") from err def upload_experiment_aggregated_results(experimentId: str, results: str, mongoClient: pymongo.MongoClient): - experimentResultEntry = {"_id": experimentId, "resultContent": results} + experimentResultEntry = {"experimentId": experimentId, "resultContent": results} # Get the results connection resultsCollection = mongoClient["gladosdb"].results try: @@ -25,7 +25,7 @@ def upload_experiment_aggregated_results(experimentId: str, results: str, mongoC raise Exception("Encountered error while storing aggregated results in MongoDB") from err def upload_experiment_zip(experimentId: str, encoded: Binary, mongoClient: pymongo.MongoClient): - experimentZipEntry = {"_id": experimentId, "fileContent": encoded} + experimentZipEntry = {"experimentId": experimentId, "fileContent": encoded} zipCollection = mongoClient["gladosdb"].zips try: resultZipId = zipCollection.insert_one(experimentZipEntry).inserted_id @@ -34,7 +34,7 @@ def upload_experiment_zip(experimentId: str, encoded: Binary, mongoClient: pymon raise Exception("Encountered error while storing results zip in MongoDB") from err def upload_log_file(experimentId: str, contents: str, mongoClient: pymongo.MongoClient): - logFileEntry = {"_id": experimentId, "fileContent": contents} + logFileEntry = {"experimentId": experimentId, "fileContent": contents} logCollection = mongoClient["gladosdb"].logs try: resultId = logCollection.insert_one(logFileEntry).inserted_id diff --git a/apps/frontend/pages/api/download/csv/[expIdToCsvDownload].tsx b/apps/frontend/pages/api/download/csv/[expIdToCsvDownload].tsx index 2fa40549..a9a5ef51 100644 --- a/apps/frontend/pages/api/download/csv/[expIdToCsvDownload].tsx +++ b/apps/frontend/pages/api/download/csv/[expIdToCsvDownload].tsx @@ -19,7 +19,7 @@ const mongoCSVHandler: NextApiHandler = async (req, res) => { results = await db .collection(COLLECTION_RESULTS_CSVS) // TODO correct mongodb typescript type for id - .find({ '_id': expIdToCsvDownload as any }).toArray(); + .find({ 'experimentId': expIdToCsvDownload as any }).toArray(); } catch (error) { const message = 'Failed to download the csv'; console.error('Error contacting server: ', error); diff --git a/apps/frontend/pages/api/download/logs/[idOfLogFile].tsx b/apps/frontend/pages/api/download/logs/[idOfLogFile].tsx index 39475ed0..5e8dcab7 100644 --- a/apps/frontend/pages/api/download/logs/[idOfLogFile].tsx +++ b/apps/frontend/pages/api/download/logs/[idOfLogFile].tsx @@ -18,7 +18,7 @@ const mongoLogHandler: NextApiHandler = async (req, res) => { results = await db .collection(COLLECTION_LOGS) // TODO correct mongodb typescript type for id - .find({ '_id': idOfLogFile as any }).toArray(); + .find({ 'experimentId': idOfLogFile as any }).toArray(); } catch (error) { const message = 'Failed to download the log file'; console.error('Error contacting server: ', error); diff --git a/apps/frontend/pages/api/download/zip/[expIdToZipDownload].tsx b/apps/frontend/pages/api/download/zip/[expIdToZipDownload].tsx index 13d71b43..d346b2ad 100644 --- a/apps/frontend/pages/api/download/zip/[expIdToZipDownload].tsx +++ b/apps/frontend/pages/api/download/zip/[expIdToZipDownload].tsx @@ -19,7 +19,7 @@ const mongoZipHandler: NextApiHandler = async (req, res) => { results = await db .collection(COLLECTION_ZIPS) // TODO correct mongodb typescript type for id - .find({ '_id': expIdToZipDownload as any }).toArray(); + .find({ 'experimentId': expIdToZipDownload as any }).toArray(); } catch (error) { const message = 'Failed to download the zip'; console.error('Error contacting server: ', error); From e432a094df3fe4e22dd6d59307d0ddb0e344dbff Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Wed, 23 Oct 2024 21:45:41 -0400 Subject: [PATCH 027/198] Update utils.py --- apps/runner/modules/utils.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/apps/runner/modules/utils.py b/apps/runner/modules/utils.py index bbaf59a8..d142ebb1 100644 --- a/apps/runner/modules/utils.py +++ b/apps/runner/modules/utils.py @@ -87,11 +87,11 @@ def _callBackend(url, payload, logMsg): try: response = requests.post(url, json=payload) if response.status_code == 200: - resultId = response.json().get('id') + resultId = response.json().get('_id') if resultId: explogger.info(f"{logMsg}: {resultId}") else: - raise DatabaseConnectionError("Encountered error while writing document to MongoDB") + raise DatabaseConnectionError("Encountered error while contacting the backend!") except Exception as err: - raise DatabaseConnectionError("Encountered error while writing document to MongoDB") from err + raise DatabaseConnectionError("Encountered error while contacting the backend!") from err From 23f109e07a32dc0c56f62138921280789273f630 Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Wed, 23 Oct 2024 22:01:24 -0400 Subject: [PATCH 028/198] Update utils.py --- apps/runner/modules/utils.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/apps/runner/modules/utils.py b/apps/runner/modules/utils.py index d142ebb1..5beef636 100644 --- a/apps/runner/modules/utils.py +++ b/apps/runner/modules/utils.py @@ -81,7 +81,11 @@ def get_experiment_with_id(experimentId: str): payload = { "experimentId": experimentId } - _callBackend(url, payload, "got experiment data") + response = requests.post(url, json=payload) + if response.status_code == 200: + return response + else: + raise DatabaseConnectionError("Error while getting experiment from backend!") def _callBackend(url, payload, logMsg): try: From 00dcdf3c8f782e2b8595ec0ea5894df754b6c4f7 Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Wed, 23 Oct 2024 22:08:23 -0400 Subject: [PATCH 029/198] Update utils.py --- apps/runner/modules/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/runner/modules/utils.py b/apps/runner/modules/utils.py index 5beef636..a935f6ee 100644 --- a/apps/runner/modules/utils.py +++ b/apps/runner/modules/utils.py @@ -83,7 +83,7 @@ def get_experiment_with_id(experimentId: str): } response = requests.post(url, json=payload) if response.status_code == 200: - return response + return response.json() else: raise DatabaseConnectionError("Error while getting experiment from backend!") From 7dac82e7fbaf58f50a90f8d62235173d63e9f12f Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Wed, 23 Oct 2024 22:16:34 -0400 Subject: [PATCH 030/198] Update runner.py --- apps/runner/runner.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/runner/runner.py b/apps/runner/runner.py index 8c0c6e35..30f713b0 100644 --- a/apps/runner/runner.py +++ b/apps/runner/runner.py @@ -86,7 +86,7 @@ def run_batch(data: IncomingStartRequest): # experiments = firebaseDb.collection(DB_COLLECTION_EXPERIMENTS) # expRef = experiments.document(expId) # experimentData = expRef.get().to_dict() - experimentData = get_experiment_with_id(expId).to_dict() + experimentData = get_experiment_with_id(expId) except Exception as err: # pylint: disable=broad-exception-caught From 0ecdd0a270b859f479f04777080a46797a615341 Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Wed, 23 Oct 2024 22:35:51 -0400 Subject: [PATCH 031/198] Update runner.py --- apps/runner/runner.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/apps/runner/runner.py b/apps/runner/runner.py index 30f713b0..bcc2fee7 100644 --- a/apps/runner/runner.py +++ b/apps/runner/runner.py @@ -92,7 +92,7 @@ def run_batch(data: IncomingStartRequest): except Exception as err: # pylint: disable=broad-exception-caught explogger.error("Error retrieving experiment data from firebase, aborting") explogger.exception(err) - close_experiment_run(expId, None) + # close_experiment_run(expId, None) return # Parse hyperaparameters into their datatype. Required to parse the rest of the experiment data @@ -104,7 +104,7 @@ def run_batch(data: IncomingStartRequest): else: explogger.error("Error generating hyperparameters - Validation error") explogger.exception(err) - close_experiment_run(expId, expRef) + # close_experiment_run(expId, expRef) return experimentData['hyperparameters'] = hyperparameters @@ -115,7 +115,7 @@ def run_batch(data: IncomingStartRequest): except ValueError as err: explogger.error("Experiment data was not formatted correctly, aborting") explogger.exception(err) - close_experiment_run(expId, expRef) + # close_experiment_run(expId, expRef) return #Downloading Experiment File @@ -130,7 +130,7 @@ def run_batch(data: IncomingStartRequest): explogger.error("This is not a supported experiment file type, aborting") explogger.exception(err) os.chdir('../..') - close_experiment_run(expId, expRef) + # close_experiment_run(expId, expRef) return explogger.info(f"Generating configs and downloading to ExperimentFiles/{expId}/configFiles") @@ -139,7 +139,7 @@ def run_batch(data: IncomingStartRequest): if totalExperimentRuns == 0: os.chdir('../..') explogger.exception(GladosInternalError("Error generating configs - somehow no config files were produced?")) - close_experiment_run(expId, expRef) + # close_experiment_run(expId, expRef) return experiment.totalExperimentRuns = totalExperimentRuns @@ -159,7 +159,7 @@ def run_batch(data: IncomingStartRequest): finally: # We need to be out of the dir for the log file upload to work os.chdir('../..') - close_experiment_run(expId, expRef) + # close_experiment_run(expId, expRef) def close_experiment_run(expId: DocumentId, expRef: "typing.Any | None"): explogger.info(f'Exiting experiment {expId}') From e3c73893766dad5015f3e37aed9ecf85f68624d4 Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Wed, 23 Oct 2024 22:40:42 -0400 Subject: [PATCH 032/198] Update utils.py --- apps/runner/modules/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/runner/modules/utils.py b/apps/runner/modules/utils.py index a935f6ee..18ac4330 100644 --- a/apps/runner/modules/utils.py +++ b/apps/runner/modules/utils.py @@ -83,7 +83,7 @@ def get_experiment_with_id(experimentId: str): } response = requests.post(url, json=payload) if response.status_code == 200: - return response.json() + return response.json()['contents'] else: raise DatabaseConnectionError("Error while getting experiment from backend!") From a372f0818fbbb504162b7ad825d6532961f7ccb1 Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Wed, 23 Oct 2024 22:52:07 -0400 Subject: [PATCH 033/198] Update runner.py --- apps/runner/runner.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/runner/runner.py b/apps/runner/runner.py index bcc2fee7..9a1dec4a 100644 --- a/apps/runner/runner.py +++ b/apps/runner/runner.py @@ -97,7 +97,7 @@ def run_batch(data: IncomingStartRequest): # Parse hyperaparameters into their datatype. Required to parse the rest of the experiment data try: - hyperparameters: "dict[str,Parameter]" = parseRawHyperparameterData(json.loads(experimentData['hyperparameters'])['hyperparameters']) + hyperparameters: "dict[str,Parameter]" = parseRawHyperparameterData(experimentData['hyperparameters']) except (KeyError, ValueError) as err: if isinstance(err, KeyError): explogger.error("Error generating hyperparameters - hyperparameters not found in experiment object, aborting") From 11efd8dbd58e0dc49c20ae88a66c297dcdb4a963 Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Wed, 23 Oct 2024 22:55:36 -0400 Subject: [PATCH 034/198] Update mongo.py --- apps/backend/modules/mongo.py | 1 + 1 file changed, 1 insertion(+) diff --git a/apps/backend/modules/mongo.py b/apps/backend/modules/mongo.py index 615a84d3..4707b0a8 100644 --- a/apps/backend/modules/mongo.py +++ b/apps/backend/modules/mongo.py @@ -95,4 +95,5 @@ def get_experiment(expId: str, mongoClient: pymongo.MongoClient): if experiment is None: raise Exception("Could not find experiment!") experiment["id"] = expId + experiment["expId"] = expId return experiment From 5bb999152d0162e18b32dc78bdb300badd3e2cbe Mon Sep 17 00:00:00 2001 From: rhit-windsors Date: Thu, 24 Oct 2024 14:19:56 -0400 Subject: [PATCH 035/198] get rid of expref --- apps/backend/app.py | 14 +++++++++++++- apps/backend/modules/mongo.py | 5 +++++ apps/runner/modules/runner.py | 25 +++++++++++++++---------- apps/runner/modules/utils.py | 13 +++++++++++++ apps/runner/runner.py | 28 +++++++++++++++------------- 5 files changed, 61 insertions(+), 24 deletions(-) diff --git a/apps/backend/app.py b/apps/backend/app.py index 085b8a9e..996e9b1f 100644 --- a/apps/backend/app.py +++ b/apps/backend/app.py @@ -7,7 +7,7 @@ from flask import Flask, Response, request, jsonify from kubernetes import client, config import pymongo -from modules.mongo import upload_experiment_aggregated_results, upload_experiment_zip, upload_log_file, verify_mongo_connection, check_insert_default_experiments, download_experiment_file, get_experiment +from modules.mongo import upload_experiment_aggregated_results, upload_experiment_zip, upload_log_file, verify_mongo_connection, check_insert_default_experiments, download_experiment_file, get_experiment, update_exp_value from spawn_runner import create_job, create_job_object flaskApp = Flask(__name__) @@ -112,6 +112,18 @@ def get_experiment_post(): return {'contents': get_experiment(experiment_id, mongoClient)} except Exception: return Response(status=500) + +@flaskApp.post("/updateExperiment") +def update_experiment(): + try: + json = request.get_json() + experiment_id = json['experimentId'] + field = json['field'] + newVal = json['newValue'] + update_exp_value(experiment_id, field, newVal, mongoClient) + return Response(status=200) + except Exception: + return Response(status=500) if __name__ == '__main__': flaskApp.run() diff --git a/apps/backend/modules/mongo.py b/apps/backend/modules/mongo.py index 4707b0a8..ada93b56 100644 --- a/apps/backend/modules/mongo.py +++ b/apps/backend/modules/mongo.py @@ -97,3 +97,8 @@ def get_experiment(expId: str, mongoClient: pymongo.MongoClient): experiment["id"] = expId experiment["expId"] = expId return experiment + +def update_exp_value(expId: str, field: str, newValue: str, mongoClient: pymongo.MongoClient): + experimentsCollection = mongoClient["gladosdb"].experiments + experimentsCollection.update_one({"_id": ObjectId(expId)}, {"$set": {field: newValue}}) + return \ No newline at end of file diff --git a/apps/runner/modules/runner.py b/apps/runner/modules/runner.py index 86ff44fe..c7e28c37 100644 --- a/apps/runner/modules/runner.py +++ b/apps/runner/modules/runner.py @@ -11,6 +11,7 @@ from modules.exceptions import InternalTrialFailedError from modules.configs import get_config_paramNames from modules.logging.gladosLogging import get_experiment_logger +from modules.utils import update_exp_value PROCESS_OUT_STREAM = 0 PROCESS_ERROR_STREAM = 1 @@ -85,7 +86,7 @@ def _add_to_output_batch(trialExtraFile, ExpRun: int): raise FileHandlingError("Failed to copy results csv. Maybe there was a typo in the filepath?") from err -def conduct_experiment(experiment: ExperimentData, expRef): +def conduct_experiment(experiment: ExperimentData): """ Call this function when inside the experiment folder! """ @@ -100,7 +101,8 @@ def conduct_experiment(experiment: ExperimentData, expRef): for trialNum in range(0, experiment.totalExperimentRuns): startSeconds = time.time() if trialNum == 0: - expRef.update({"startedAtEpochMillis": int(startSeconds * 1000)}) + # expRef.update({"startedAtEpochMillis": int(startSeconds * 1000)}) + update_exp_value(experiment.expId, "startedAtEpochMillis", int(startSeconds * 1000)) try: configFileName = create_config_from_data(experiment, trialNum) @@ -111,7 +113,7 @@ def conduct_experiment(experiment: ExperimentData, expRef): try: _run_trial(experiment, f'configFiles/{configFileName}', trialNum) except (TrialTimeoutError, InternalTrialFailedError) as err: - _handle_trial_error(experiment, expRef, numOutputs, paramNames, writer, trialNum, err) + _handle_trial_error(experiment, numOutputs, paramNames, writer, trialNum, err) continue endSeconds = time.time() @@ -120,12 +122,13 @@ def conduct_experiment(experiment: ExperimentData, expRef): if trialNum == 0: estimatedTotalTimeMinutes = timeTakenMinutes * experiment.totalExperimentRuns explogger.info(f"Estimated minutes to run: {estimatedTotalTimeMinutes}") - expRef.update({'estimatedTotalTimeMinutes': estimatedTotalTimeMinutes}) + # expRef.update({'estimatedTotalTimeMinutes': estimatedTotalTimeMinutes}) + update_exp_value(experiment.expId, 'estimatedTotalTimeMinutes', estimatedTotalTimeMinutes) try: csvHeader = _get_line_n_of_trial_results_csv(0, experiment.trialResult) except GladosUserError as err: - _handle_trial_error(experiment, expRef, numOutputs, paramNames, writer, trialNum, err) + _handle_trial_error(experiment, numOutputs, paramNames, writer, trialNum, err) return numOutputs = len(csvHeader) writer.writerow(["Experiment Run"] + csvHeader + paramNames) @@ -134,23 +137,24 @@ def conduct_experiment(experiment: ExperimentData, expRef): try: _add_to_output_batch(experiment.trialExtraFile, trialNum) except FileHandlingError as err: - _handle_trial_error(experiment, expRef, numOutputs, paramNames, writer, trialNum, err) + _handle_trial_error(experiment, numOutputs, paramNames, writer, trialNum, err) continue try: output = _get_line_n_of_trial_results_csv(1, experiment.trialResult) except GladosUserError as err: - _handle_trial_error(experiment, expRef, numOutputs, paramNames, writer, trialNum, err) + _handle_trial_error(experiment, numOutputs, paramNames, writer, trialNum, err) continue writer.writerow([trialNum] + output + get_configs_ordered(f'configFiles/{trialNum}.ini', paramNames)) explogger.info(f'Trial#{trialNum} completed') experiment.passes += 1 - expRef.update({'passes': experiment.passes}) + # expRef.update({'passes': experiment.passes}) + update_exp_value(experiment.expId, 'passes', experiment.passes) explogger.info("Finished running Trials") -def _handle_trial_error(experiment: ExperimentData, expRef, numOutputs: int, paramNames: "list", writer, trialNum: int, err: BaseException): +def _handle_trial_error(experiment: ExperimentData, numOutputs: int, paramNames: "list", writer, trialNum: int, err: BaseException): csvErrorValue = None if isinstance(err, TrialTimeoutError): csvErrorValue = "TIMEOUT" @@ -160,7 +164,8 @@ def _handle_trial_error(experiment: ExperimentData, expRef, numOutputs: int, par explogger.error(f'Trial#{trialNum} Encountered an Error') explogger.exception(err) experiment.fails += 1 - expRef.update({'fails': experiment.fails}) + # expRef.update({'fails': experiment.fails}) + update_exp_value(experiment.expId, 'fails', experiment.fails) if trialNum == 0: message = f"First trial of {experiment.expId} ran into an error while running, aborting the whole experiment. Read the traceback to find out what the actual cause of this problem is (it will not necessarily be at the top of the stack trace)." raise ExperimentAbort(message) from err diff --git a/apps/runner/modules/utils.py b/apps/runner/modules/utils.py index 18ac4330..f58823d1 100644 --- a/apps/runner/modules/utils.py +++ b/apps/runner/modules/utils.py @@ -86,6 +86,19 @@ def get_experiment_with_id(experimentId: str): return response.json()['contents'] else: raise DatabaseConnectionError("Error while getting experiment from backend!") + +def update_exp_value(experimentId: str, field: str, newValue): + url = f'http://glados-service-backend:{os.getenv("BACKEND_PORT")}/updateExperiment' + payload = { + "experimentId": experimentId, + "field": field, + "newValue": newValue + } + response = requests.post(url, json=payload) + if response.status_code == 200: + return + else: + raise DatabaseConnectionError("Error updating experiment document!") def _callBackend(url, payload, logMsg): try: diff --git a/apps/runner/runner.py b/apps/runner/runner.py index 9a1dec4a..4a326d9b 100644 --- a/apps/runner/runner.py +++ b/apps/runner/runner.py @@ -23,7 +23,7 @@ from modules.exceptions import CustomFlaskError, DatabaseConnectionError, GladosInternalError, ExperimentAbort, GladosUserError from modules.output.plots import generateScatterPlot from modules.configs import generate_config_files -from modules.utils import _get_env, upload_experiment_aggregated_results, upload_experiment_log, upload_experiment_zip, verify_mongo_connection, get_experiment_with_id +from modules.utils import _get_env, upload_experiment_aggregated_results, upload_experiment_log, upload_experiment_zip, verify_mongo_connection, get_experiment_with_id, update_exp_value try: import magic # Crashes on windows if you're missing the 'python-magic-bin' python package @@ -104,7 +104,7 @@ def run_batch(data: IncomingStartRequest): else: explogger.error("Error generating hyperparameters - Validation error") explogger.exception(err) - # close_experiment_run(expId, expRef) + close_experiment_run(expId) return experimentData['hyperparameters'] = hyperparameters @@ -115,7 +115,7 @@ def run_batch(data: IncomingStartRequest): except ValueError as err: explogger.error("Experiment data was not formatted correctly, aborting") explogger.exception(err) - # close_experiment_run(expId, expRef) + close_experiment_run(expId) return #Downloading Experiment File @@ -130,7 +130,7 @@ def run_batch(data: IncomingStartRequest): explogger.error("This is not a supported experiment file type, aborting") explogger.exception(err) os.chdir('../..') - # close_experiment_run(expId, expRef) + close_experiment_run(expId) return explogger.info(f"Generating configs and downloading to ExperimentFiles/{expId}/configFiles") @@ -139,15 +139,16 @@ def run_batch(data: IncomingStartRequest): if totalExperimentRuns == 0: os.chdir('../..') explogger.exception(GladosInternalError("Error generating configs - somehow no config files were produced?")) - # close_experiment_run(expId, expRef) + close_experiment_run(expId) return experiment.totalExperimentRuns = totalExperimentRuns - expRef.update({"totalExperimentRuns": experiment.totalExperimentRuns}) + # expRef.update({"totalExperimentRuns": experiment.totalExperimentRuns}) + update_exp_value(expId, "totalExperimentRuns", experiment.totalExperimentRuns) try: - conduct_experiment(experiment, expRef) + conduct_experiment(experiment) post_process_experiment(experiment) upload_experiment_results(experiment) except ExperimentAbort as err: @@ -159,14 +160,15 @@ def run_batch(data: IncomingStartRequest): finally: # We need to be out of the dir for the log file upload to work os.chdir('../..') - # close_experiment_run(expId, expRef) + # close_experiment_run(expId) -def close_experiment_run(expId: DocumentId, expRef: "typing.Any | None"): +def close_experiment_run(expId: DocumentId): explogger.info(f'Exiting experiment {expId}') - if expRef: - expRef.update({'finished': True, 'finishedAtEpochMillis': int(time.time() * 1000)}) - else: - syslogger.warning(f'No experiment ref supplied when closing {expId} , could not update it to finished') + # if expRef: + # expRef.update({'finished': True, 'finishedAtEpochMillis': int(time.time() * 1000)}) + # else: + # syslogger.warning(f'No experiment ref supplied when closing {expId} , could not update it to finished') + update_exp_value(expId, 'finished', True) close_experiment_logger() upload_experiment_log(expId) remove_downloaded_directory(expId) From c6872ef4b6ac1047aada637f170e5bb91a2c64c7 Mon Sep 17 00:00:00 2001 From: rhit-windsors Date: Thu, 24 Oct 2024 14:36:18 -0400 Subject: [PATCH 036/198] cleanup --- apps/runner/modules/utils.py | 91 +++++++++++++++++++++++++++++------- apps/runner/runner.py | 44 +++++++---------- 2 files changed, 91 insertions(+), 44 deletions(-) diff --git a/apps/runner/modules/utils.py b/apps/runner/modules/utils.py index f58823d1..e7975060 100644 --- a/apps/runner/modules/utils.py +++ b/apps/runner/modules/utils.py @@ -26,45 +26,69 @@ def _get_env(key: str): return value def verify_mongo_connection(): + """Verify the connection to the mongo database + + Raises: + DatabaseConnectionError: error that connection was unsuccessful + """ url = f'http://glados-service-backend:{os.getenv("BACKEND_PORT")}/mongoPulse' - response = requests.get(url) + response = requests.get(url, timeout=10) if response.ok: return else: raise DatabaseConnectionError("MongoDB server not available/unreachable") - - def upload_experiment_aggregated_results(experiment: ExperimentData, resultContent: str): + """Upload experiment results to the database + + Args: + experiment (ExperimentData): experiment data + resultContent (str): csv data + """ # Call the backend url = f'http://glados-service-backend:{os.getenv("BACKEND_PORT")}/uploadResults' payload = { "experimentId": experiment.expId, "results": resultContent } - _callBackend(url, payload, "inserted result csv into mongodb with id") + _call_backend(url, payload, "inserted result csv into mongodb with id") def upload_experiment_zip(experiment: ExperimentData, encoded: Binary): + """Upload experiment zip + + Args: + experiment (ExperimentData): experiment data + encoded (Binary): encoded binary of zip for mongo + """ # Call the backend url = f'http://glados-service-backend:{os.getenv("BACKEND_PORT")}/uploadZip' payload = { "experimentId": experiment.expId, "encoded": base64.b64encode(encoded).decode("utf-8") } - _callBackend(url, payload, "inserted zip into mongodb with id") + _call_backend(url, payload, "inserted zip into mongodb with id") def upload_experiment_log(experimentId: DocumentId): - filePath = get_filepath_for_experiment_log(experimentId) - get_system_logger().info('Uploading log file to the database: %s', filePath) + """Upload the experiment log + + Args: + experimentId (DocumentId): experiment data + + Raises: + GladosInternalError: error raised + GladosInternalError: error raised + """ + file_path = get_filepath_for_experiment_log(experimentId) + get_system_logger().info('Uploading log file to the database: %s', file_path) if len(explogger.handlers) != 0: raise GladosInternalError("Experiment logger still has a handler open at upload time. Close it first.") contents = None try: - with open(filePath, 'r', encoding="utf8") as logFile: - contents = logFile.read() + with open(file_path, 'r', encoding="utf8") as log_file: + contents = log_file.read() except Exception as err: raise GladosInternalError(f"Failed to read log file for experiment {experimentId}") from err @@ -74,20 +98,41 @@ def upload_experiment_log(experimentId: DocumentId): "experimentId": experimentId, "logContents": contents } - _callBackend(url, payload, "inserted log file into mongodb with id") + _call_backend(url, payload, "inserted log file into mongodb with id") def get_experiment_with_id(experimentId: str): + """Get the experiment data from the ID + + Args: + experimentId (str): experiment id in mongo + + Raises: + DatabaseConnectionError: couldn't connect to db + + Returns: + json: json contents + """ url = f'http://glados-service-backend:{os.getenv("BACKEND_PORT")}/getExperiment' payload = { "experimentId": experimentId } - response = requests.post(url, json=payload) + response = requests.post(url, json=payload, timeout=10) if response.status_code == 200: return response.json()['contents'] else: raise DatabaseConnectionError("Error while getting experiment from backend!") - + def update_exp_value(experimentId: str, field: str, newValue): + """Update an experiment dynamically + + Args: + experimentId (str): _description_ + field (str): _description_ + newValue (_type_): _description_ + + Raises: + DatabaseConnectionError: _description_ + """ url = f'http://glados-service-backend:{os.getenv("BACKEND_PORT")}/updateExperiment' payload = { "experimentId": experimentId, @@ -100,15 +145,25 @@ def update_exp_value(experimentId: str, field: str, newValue): else: raise DatabaseConnectionError("Error updating experiment document!") -def _callBackend(url, payload, logMsg): +def _call_backend(url, payload, log_msg): + """calls the backend with provided args + + Args: + url (str): backend url to be called + payload (json): payload to send to backend + logMsg (str): message to log + + Raises: + DatabaseConnectionError: _description_ + DatabaseConnectionError: _description_ + """ try: - response = requests.post(url, json=payload) + response = requests.post(url, json=payload, timeout=10) if response.status_code == 200: - resultId = response.json().get('_id') - if resultId: - explogger.info(f"{logMsg}: {resultId}") + result_id = response.json().get('_id') + if result_id: + explogger.info(f"{log_msg}: {result_id}") else: raise DatabaseConnectionError("Encountered error while contacting the backend!") except Exception as err: raise DatabaseConnectionError("Encountered error while contacting the backend!") from err - diff --git a/apps/runner/runner.py b/apps/runner/runner.py index 4a326d9b..defda87f 100644 --- a/apps/runner/runner.py +++ b/apps/runner/runner.py @@ -76,52 +76,49 @@ def run_batch(data: IncomingStartRequest): syslogger.info('Run_Batch starting with data %s', data) # Obtain most basic experiment info - expId = data['experiment']['id'] - syslogger.debug('received %s', expId) + exp_id = data['experiment']['id'] + syslogger.debug('received %s', exp_id) - open_experiment_logger(expId) + open_experiment_logger(exp_id) # Retrieve experiment details from the backend api try: - # experiments = firebaseDb.collection(DB_COLLECTION_EXPERIMENTS) - # expRef = experiments.document(expId) - # experimentData = expRef.get().to_dict() - experimentData = get_experiment_with_id(expId) + experiment_data = get_experiment_with_id(exp_id) except Exception as err: # pylint: disable=broad-exception-caught explogger.error("Error retrieving experiment data from firebase, aborting") explogger.exception(err) - # close_experiment_run(expId, None) + close_experiment_run(exp_id) return # Parse hyperaparameters into their datatype. Required to parse the rest of the experiment data try: - hyperparameters: "dict[str,Parameter]" = parseRawHyperparameterData(experimentData['hyperparameters']) + hyperparameters: "dict[str,Parameter]" = parseRawHyperparameterData(experiment_data['hyperparameters']) except (KeyError, ValueError) as err: if isinstance(err, KeyError): explogger.error("Error generating hyperparameters - hyperparameters not found in experiment object, aborting") else: explogger.error("Error generating hyperparameters - Validation error") explogger.exception(err) - close_experiment_run(expId) + close_experiment_run(exp_id) return - experimentData['hyperparameters'] = hyperparameters + experiment_data['hyperparameters'] = hyperparameters # Parsing into Datatype try: - experiment = ExperimentData(**experimentData) + experiment = ExperimentData(**experiment_data) experiment.postProcess = experiment.scatter except ValueError as err: explogger.error("Experiment data was not formatted correctly, aborting") explogger.exception(err) - close_experiment_run(expId) + close_experiment_run(exp_id) return #Downloading Experiment File # If the program errors here after you just deleted the ExperimentFiles on your dev machine, restart the docker container, seems to be volume mount weirdness - os.makedirs(f'ExperimentFiles/{expId}') - os.chdir(f'ExperimentFiles/{expId}') + os.makedirs(f'ExperimentFiles/{exp_id}') + os.chdir(f'ExperimentFiles/{exp_id}') filepath = download_experiment_files(experiment) try: @@ -130,29 +127,28 @@ def run_batch(data: IncomingStartRequest): explogger.error("This is not a supported experiment file type, aborting") explogger.exception(err) os.chdir('../..') - close_experiment_run(expId) + close_experiment_run(exp_id) return - explogger.info(f"Generating configs and downloading to ExperimentFiles/{expId}/configFiles") + explogger.info(f"Generating configs and downloading to ExperimentFiles/{exp_id}/configFiles") totalExperimentRuns = generate_config_files(experiment) if totalExperimentRuns == 0: os.chdir('../..') explogger.exception(GladosInternalError("Error generating configs - somehow no config files were produced?")) - close_experiment_run(expId) + close_experiment_run(exp_id) return experiment.totalExperimentRuns = totalExperimentRuns - # expRef.update({"totalExperimentRuns": experiment.totalExperimentRuns}) - update_exp_value(expId, "totalExperimentRuns", experiment.totalExperimentRuns) + update_exp_value(exp_id, "totalExperimentRuns", experiment.totalExperimentRuns) try: conduct_experiment(experiment) post_process_experiment(experiment) upload_experiment_results(experiment) except ExperimentAbort as err: - explogger.error(f'Experiment {expId} critical failure, not doing any result uploading or post processing') + explogger.error(f'Experiment {exp_id} critical failure, not doing any result uploading or post processing') explogger.exception(err) except Exception as err: # pylint: disable=broad-exception-caught explogger.error('Uncaught exception while running an experiment. The GLADOS code needs to be changed to handle this in a cleaner manner') @@ -160,14 +156,10 @@ def run_batch(data: IncomingStartRequest): finally: # We need to be out of the dir for the log file upload to work os.chdir('../..') - # close_experiment_run(expId) + close_experiment_run(exp_id) def close_experiment_run(expId: DocumentId): explogger.info(f'Exiting experiment {expId}') - # if expRef: - # expRef.update({'finished': True, 'finishedAtEpochMillis': int(time.time() * 1000)}) - # else: - # syslogger.warning(f'No experiment ref supplied when closing {expId} , could not update it to finished') update_exp_value(expId, 'finished', True) close_experiment_logger() upload_experiment_log(expId) From 008437bb8f24bb9c53356c00c7a7d6d1c6304322 Mon Sep 17 00:00:00 2001 From: Bennett Toftner Date: Thu, 24 Oct 2024 17:35:11 -0400 Subject: [PATCH 037/198] Added subscribeToExp() in mongodb.ts --- apps/frontend/firebase/db.ts | 13 +----------- apps/frontend/lib/mongodb.ts | 20 +++++++++++++++++++ .../pages/api/experiments/storeExp.tsx | 2 +- .../[... expIdToUpdateName].tsx | 0 4 files changed, 22 insertions(+), 13 deletions(-) rename apps/frontend/pages/api/experiments/{update => updatename}/[... expIdToUpdateName].tsx (100%) diff --git a/apps/frontend/firebase/db.ts b/apps/frontend/firebase/db.ts index fb728f6a..424c8519 100644 --- a/apps/frontend/firebase/db.ts +++ b/apps/frontend/firebase/db.ts @@ -119,18 +119,7 @@ export const downloadExperimentProjectZip = async (expId: ExperimentDocumentId) }); }; -export interface ExperimentSubscribeCallback { - (data: Partial): any; -} -// TODO: Convert from Firestore to MongoDB -export const subscribeToExp = (id: ExperimentDocumentId, callback: ExperimentSubscribeCallback) => { - const unsubscribe = onSnapshot(doc(db, DB_COLLECTION_EXPERIMENTS, id), (doc) => { - console.log(`exp ${id} data updated: `, doc.data()); - callback(doc.data() as Partial); - }); - return unsubscribe; -}; export interface MultipleExperimentSubscribeCallback { @@ -164,7 +153,7 @@ export const deleteExperiment = async (expId: ExperimentDocumentId) => { // TODO: Test this! export const updateExperimentName = async (expId, updatedName) => { - await fetch(`/api/experiments/update/${expId}/${updatedName}`).then((response) => { + await fetch(`/api/experiments/updatename/${expId}/${updatedName}`).then((response) => { if (response?.ok) { return response.json(); } diff --git a/apps/frontend/lib/mongodb.ts b/apps/frontend/lib/mongodb.ts index 8d37e79b..59274eb9 100644 --- a/apps/frontend/lib/mongodb.ts +++ b/apps/frontend/lib/mongodb.ts @@ -1,6 +1,8 @@ // THIS IS CURRENTLY UNUSED, FIGURE OUT HOW TO IMPORT IT INTO api/experiments/ import { MongoClient } from 'mongodb'; import { getEnvVar } from '../utils/env'; +import { ExperimentData } from '../firebase/db_types'; +import { ExperimentDocumentId } from '../firebase/db'; // Adapted from https://github.com/vercel/next.js/tree/canary/examples/with-mongodb @@ -44,6 +46,24 @@ if (process.env.NODE_ENV === 'development') { clientPromise = client.connect(); } +export interface ExperimentSubscribeCallback { + (data: Partial): any; +} + +// TODO: Convert from Firestore to MongoDB +export const subscribeToExp = async (id: ExperimentDocumentId, callback: ExperimentSubscribeCallback) => { + const db = client.db(DB_NAME); + const collection = db.collection(COLLECTION_EXPERIMENTS); + const changeStream = collection.watch(); + changeStream.on('change', next => { + if (next.operationType === 'update' && next.documentKey._id.toString() === id) + { + const data = collection.findOne({ '_id': id as any }) as Partial; + callback(data); + } + }); +}; + // Export a module-scoped MongoClient promise. By doing this in a // separate module, the client can be shared across functions. export default clientPromise; diff --git a/apps/frontend/pages/api/experiments/storeExp.tsx b/apps/frontend/pages/api/experiments/storeExp.tsx index 61fb93a7..09566bc3 100644 --- a/apps/frontend/pages/api/experiments/storeExp.tsx +++ b/apps/frontend/pages/api/experiments/storeExp.tsx @@ -8,7 +8,7 @@ const mongoExpHandler: NextApiHandler = async (req, res) => { return; } - const experimentData: ExperimentData = req.body; + const experimentData: Partial = JSON.parse(req.body); if (!experimentData || typeof experimentData !== 'object') { res.status(400).json({ response: 'Invalid experiment data provided' } as any); diff --git a/apps/frontend/pages/api/experiments/update/[... expIdToUpdateName].tsx b/apps/frontend/pages/api/experiments/updatename/[... expIdToUpdateName].tsx similarity index 100% rename from apps/frontend/pages/api/experiments/update/[... expIdToUpdateName].tsx rename to apps/frontend/pages/api/experiments/updatename/[... expIdToUpdateName].tsx From 71e3db2068db51f5ae9885f044e962b3c132fc06 Mon Sep 17 00:00:00 2001 From: rhit-windsors Date: Wed, 30 Oct 2024 13:04:05 -0400 Subject: [PATCH 038/198] add listener --- .../ViewExperiment/ExperimentListing.tsx | 3 +- apps/frontend/app/dashboard/page.tsx | 9 ++++- apps/frontend/lib/mongo_listener.ts | 39 +++++++++++++++++++ apps/frontend/lib/mongodb.ts | 3 +- 4 files changed, 50 insertions(+), 4 deletions(-) create mode 100644 apps/frontend/lib/mongo_listener.ts diff --git a/apps/frontend/app/components/flows/ViewExperiment/ExperimentListing.tsx b/apps/frontend/app/components/flows/ViewExperiment/ExperimentListing.tsx index f11e23ef..8c10ea00 100644 --- a/apps/frontend/app/components/flows/ViewExperiment/ExperimentListing.tsx +++ b/apps/frontend/app/components/flows/ViewExperiment/ExperimentListing.tsx @@ -1,9 +1,10 @@ /* eslint-disable no-mixed-spaces-and-tabs */ import { ChevronRightIcon } from '@heroicons/react/24/solid'; import { useEffect, useState } from 'react'; -import { ExperimentDocumentId, subscribeToExp, updateExperimentName, getCurrentProjectName } from '../../../../firebase/db'; +import { ExperimentDocumentId, updateExperimentName, getCurrentProjectName } from '../../../../firebase/db'; import { ExperimentData } from '../../../../firebase/db_types'; import { MdEdit, MdPadding } from 'react-icons/md'; +import { subscribeToExp } from '../../../../lib/mongodb'; import { Timestamp } from 'mongodb'; export interface ExperimentListingProps { diff --git a/apps/frontend/app/dashboard/page.tsx b/apps/frontend/app/dashboard/page.tsx index 21908279..ad03141b 100644 --- a/apps/frontend/app/dashboard/page.tsx +++ b/apps/frontend/app/dashboard/page.tsx @@ -3,7 +3,8 @@ import NewExperiment, { FormStates } from '../components/flows/AddExperiment/NewExperiment'; import { useAuth } from '../../firebase/fbAuth'; import { deleteExperiment } from '../../firebase/db'; -import { listenToExperiments, downloadExperimentResults, downloadExperimentProjectZip, ExperimentDocumentId } from '../../firebase/db'; +import { downloadExperimentResults, downloadExperimentProjectZip, ExperimentDocumentId } from '../../firebase/db'; +import { listenToExperiments } from '../../lib/mongo_listener'; import { Fragment, useState, useEffect } from 'react'; import { Disclosure, Menu, Transition } from '@headlessui/react'; import { @@ -189,7 +190,11 @@ export default function DashboardPage() { if (!userId) { return; } - return listenToExperiments(userId, (newExperimentList) => setExperiments(newExperimentList as ExperimentData[])); // TODO this assumes that all values will be present, which is not true + const fetchExps = async () =>{ + return await listenToExperiments(userId, (newExperimentList) => setExperiments(newExperimentList as ExperimentData[])); // TODO this assumes that all values will be present, which is not true + } + fetchExps(); + }, [userId]); const QUEUE_UNKNOWN_LENGTH = -1; diff --git a/apps/frontend/lib/mongo_listener.ts b/apps/frontend/lib/mongo_listener.ts new file mode 100644 index 00000000..73c043f0 --- /dev/null +++ b/apps/frontend/lib/mongo_listener.ts @@ -0,0 +1,39 @@ +'use client'; +import { ChangeStream, WithId, Document } from "mongodb"; +import { ExperimentData } from "../firebase/db_types"; +import clientPromise, { COLLECTION_EXPERIMENTS, DB_NAME } from "./mongodb"; + +export const listenToExperiments = async ( + uid: string, + callback: (experiments: Partial[]) => void +) => { + const client = await clientPromise; + const db = client.db(DB_NAME); + const experimentsCollection = db.collection(COLLECTION_EXPERIMENTS); + + // Match documents where 'creator' field equals 'uid' + const pipeline = [{ $match: { "fullDocument.creator": uid } }]; + + // Listen to changes in the experiments collection + const changeStream: ChangeStream = experimentsCollection.watch(pipeline); + + changeStream.on("change", async () => { + const updatedDocuments = await experimentsCollection + .find({ creator: uid }) + .toArray(); + + // Map documents to Partial[] + const result: Partial[] = updatedDocuments.map((doc: WithId) => { + const { _id, ...rest } = doc; + return { id: _id.toString(), ...rest } as Partial; + }); + + callback(result); + }); + + // Return function to close the change stream and client connection + return () => { + changeStream.close(); + client.close(); + }; +}; \ No newline at end of file diff --git a/apps/frontend/lib/mongodb.ts b/apps/frontend/lib/mongodb.ts index 59274eb9..20e10046 100644 --- a/apps/frontend/lib/mongodb.ts +++ b/apps/frontend/lib/mongodb.ts @@ -1,5 +1,5 @@ // THIS IS CURRENTLY UNUSED, FIGURE OUT HOW TO IMPORT IT INTO api/experiments/ -import { MongoClient } from 'mongodb'; +import { MongoClient, ChangeStream, WithId, Document } from 'mongodb'; import { getEnvVar } from '../utils/env'; import { ExperimentData } from '../firebase/db_types'; import { ExperimentDocumentId } from '../firebase/db'; @@ -64,6 +64,7 @@ export const subscribeToExp = async (id: ExperimentDocumentId, callback: Experim }); }; + // Export a module-scoped MongoClient promise. By doing this in a // separate module, the client can be shared across functions. export default clientPromise; From 9b21abe8e239dd8c5055f3a0712d016876fe0a3b Mon Sep 17 00:00:00 2001 From: rhit-windsors Date: Wed, 30 Oct 2024 13:06:58 -0400 Subject: [PATCH 039/198] force action run --- apps/backend/README.md | 2 +- apps/runner/runner.py | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/apps/backend/README.md b/apps/backend/README.md index a2fae73c..ffdc92aa 100644 --- a/apps/backend/README.md +++ b/apps/backend/README.md @@ -2,4 +2,4 @@ To consume secrets in python use: ``` environ.get("SECRET_NAME") -``` \ No newline at end of file +``` diff --git a/apps/runner/runner.py b/apps/runner/runner.py index 11f054a3..d43f69a5 100644 --- a/apps/runner/runner.py +++ b/apps/runner/runner.py @@ -167,6 +167,7 @@ def close_experiment_run(expId: DocumentId, expRef: "typing.Any | None"): close_experiment_logger() upload_experiment_log(expId) remove_downloaded_directory(expId) + def determine_experiment_file_type(filepath: str): rawfiletype = magic.from_file(filepath) From 8fb6be1dffc96cd4ac8be37cc5f081e44eee24f2 Mon Sep 17 00:00:00 2001 From: rhit-windsors Date: Wed, 30 Oct 2024 14:11:57 -0400 Subject: [PATCH 040/198] Revert "force action run" This reverts commit 9b21abe8e239dd8c5055f3a0712d016876fe0a3b. --- apps/backend/README.md | 2 +- apps/runner/runner.py | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/apps/backend/README.md b/apps/backend/README.md index ffdc92aa..a2fae73c 100644 --- a/apps/backend/README.md +++ b/apps/backend/README.md @@ -2,4 +2,4 @@ To consume secrets in python use: ``` environ.get("SECRET_NAME") -``` +``` \ No newline at end of file diff --git a/apps/runner/runner.py b/apps/runner/runner.py index d43f69a5..11f054a3 100644 --- a/apps/runner/runner.py +++ b/apps/runner/runner.py @@ -167,7 +167,6 @@ def close_experiment_run(expId: DocumentId, expRef: "typing.Any | None"): close_experiment_logger() upload_experiment_log(expId) remove_downloaded_directory(expId) - def determine_experiment_file_type(filepath: str): rawfiletype = magic.from_file(filepath) From 4e26a4dedea4df23cf06d2c37f7fee55bc03a65f Mon Sep 17 00:00:00 2001 From: rhit-windsors Date: Wed, 30 Oct 2024 14:12:04 -0400 Subject: [PATCH 041/198] Revert "add listener" This reverts commit 71e3db2068db51f5ae9885f044e962b3c132fc06. --- .../ViewExperiment/ExperimentListing.tsx | 3 +- apps/frontend/app/dashboard/page.tsx | 9 +---- apps/frontend/lib/mongo_listener.ts | 39 ------------------- apps/frontend/lib/mongodb.ts | 3 +- 4 files changed, 4 insertions(+), 50 deletions(-) delete mode 100644 apps/frontend/lib/mongo_listener.ts diff --git a/apps/frontend/app/components/flows/ViewExperiment/ExperimentListing.tsx b/apps/frontend/app/components/flows/ViewExperiment/ExperimentListing.tsx index 8c10ea00..f11e23ef 100644 --- a/apps/frontend/app/components/flows/ViewExperiment/ExperimentListing.tsx +++ b/apps/frontend/app/components/flows/ViewExperiment/ExperimentListing.tsx @@ -1,10 +1,9 @@ /* eslint-disable no-mixed-spaces-and-tabs */ import { ChevronRightIcon } from '@heroicons/react/24/solid'; import { useEffect, useState } from 'react'; -import { ExperimentDocumentId, updateExperimentName, getCurrentProjectName } from '../../../../firebase/db'; +import { ExperimentDocumentId, subscribeToExp, updateExperimentName, getCurrentProjectName } from '../../../../firebase/db'; import { ExperimentData } from '../../../../firebase/db_types'; import { MdEdit, MdPadding } from 'react-icons/md'; -import { subscribeToExp } from '../../../../lib/mongodb'; import { Timestamp } from 'mongodb'; export interface ExperimentListingProps { diff --git a/apps/frontend/app/dashboard/page.tsx b/apps/frontend/app/dashboard/page.tsx index ad03141b..21908279 100644 --- a/apps/frontend/app/dashboard/page.tsx +++ b/apps/frontend/app/dashboard/page.tsx @@ -3,8 +3,7 @@ import NewExperiment, { FormStates } from '../components/flows/AddExperiment/NewExperiment'; import { useAuth } from '../../firebase/fbAuth'; import { deleteExperiment } from '../../firebase/db'; -import { downloadExperimentResults, downloadExperimentProjectZip, ExperimentDocumentId } from '../../firebase/db'; -import { listenToExperiments } from '../../lib/mongo_listener'; +import { listenToExperiments, downloadExperimentResults, downloadExperimentProjectZip, ExperimentDocumentId } from '../../firebase/db'; import { Fragment, useState, useEffect } from 'react'; import { Disclosure, Menu, Transition } from '@headlessui/react'; import { @@ -190,11 +189,7 @@ export default function DashboardPage() { if (!userId) { return; } - const fetchExps = async () =>{ - return await listenToExperiments(userId, (newExperimentList) => setExperiments(newExperimentList as ExperimentData[])); // TODO this assumes that all values will be present, which is not true - } - fetchExps(); - + return listenToExperiments(userId, (newExperimentList) => setExperiments(newExperimentList as ExperimentData[])); // TODO this assumes that all values will be present, which is not true }, [userId]); const QUEUE_UNKNOWN_LENGTH = -1; diff --git a/apps/frontend/lib/mongo_listener.ts b/apps/frontend/lib/mongo_listener.ts deleted file mode 100644 index 73c043f0..00000000 --- a/apps/frontend/lib/mongo_listener.ts +++ /dev/null @@ -1,39 +0,0 @@ -'use client'; -import { ChangeStream, WithId, Document } from "mongodb"; -import { ExperimentData } from "../firebase/db_types"; -import clientPromise, { COLLECTION_EXPERIMENTS, DB_NAME } from "./mongodb"; - -export const listenToExperiments = async ( - uid: string, - callback: (experiments: Partial[]) => void -) => { - const client = await clientPromise; - const db = client.db(DB_NAME); - const experimentsCollection = db.collection(COLLECTION_EXPERIMENTS); - - // Match documents where 'creator' field equals 'uid' - const pipeline = [{ $match: { "fullDocument.creator": uid } }]; - - // Listen to changes in the experiments collection - const changeStream: ChangeStream = experimentsCollection.watch(pipeline); - - changeStream.on("change", async () => { - const updatedDocuments = await experimentsCollection - .find({ creator: uid }) - .toArray(); - - // Map documents to Partial[] - const result: Partial[] = updatedDocuments.map((doc: WithId) => { - const { _id, ...rest } = doc; - return { id: _id.toString(), ...rest } as Partial; - }); - - callback(result); - }); - - // Return function to close the change stream and client connection - return () => { - changeStream.close(); - client.close(); - }; -}; \ No newline at end of file diff --git a/apps/frontend/lib/mongodb.ts b/apps/frontend/lib/mongodb.ts index 20e10046..59274eb9 100644 --- a/apps/frontend/lib/mongodb.ts +++ b/apps/frontend/lib/mongodb.ts @@ -1,5 +1,5 @@ // THIS IS CURRENTLY UNUSED, FIGURE OUT HOW TO IMPORT IT INTO api/experiments/ -import { MongoClient, ChangeStream, WithId, Document } from 'mongodb'; +import { MongoClient } from 'mongodb'; import { getEnvVar } from '../utils/env'; import { ExperimentData } from '../firebase/db_types'; import { ExperimentDocumentId } from '../firebase/db'; @@ -64,7 +64,6 @@ export const subscribeToExp = async (id: ExperimentDocumentId, callback: Experim }); }; - // Export a module-scoped MongoClient promise. By doing this in a // separate module, the client can be shared across functions. export default clientPromise; From 100079091ac373818d04d2e4f9fd1de8085ce897 Mon Sep 17 00:00:00 2001 From: rhit-windsors Date: Wed, 30 Oct 2024 14:28:49 -0400 Subject: [PATCH 042/198] update frontend --- .../ViewExperiment/ExperimentListing.tsx | 3 +- apps/frontend/app/dashboard/page.tsx | 8 +- apps/frontend/lib/mongo_funcs.ts | 61 ++ apps/frontend/lib/mongodb.ts | 18 - apps/frontend/package-lock.json | 668 ++++++++++++++---- apps/frontend/package.json | 2 +- 6 files changed, 586 insertions(+), 174 deletions(-) create mode 100644 apps/frontend/lib/mongo_funcs.ts diff --git a/apps/frontend/app/components/flows/ViewExperiment/ExperimentListing.tsx b/apps/frontend/app/components/flows/ViewExperiment/ExperimentListing.tsx index f11e23ef..0e760121 100644 --- a/apps/frontend/app/components/flows/ViewExperiment/ExperimentListing.tsx +++ b/apps/frontend/app/components/flows/ViewExperiment/ExperimentListing.tsx @@ -1,7 +1,8 @@ /* eslint-disable no-mixed-spaces-and-tabs */ import { ChevronRightIcon } from '@heroicons/react/24/solid'; import { useEffect, useState } from 'react'; -import { ExperimentDocumentId, subscribeToExp, updateExperimentName, getCurrentProjectName } from '../../../../firebase/db'; +import { ExperimentDocumentId, updateExperimentName, getCurrentProjectName } from '../../../../firebase/db'; +import { subscribeToExp } from '../../../../lib/mongo_funcs'; import { ExperimentData } from '../../../../firebase/db_types'; import { MdEdit, MdPadding } from 'react-icons/md'; import { Timestamp } from 'mongodb'; diff --git a/apps/frontend/app/dashboard/page.tsx b/apps/frontend/app/dashboard/page.tsx index 21908279..b6063339 100644 --- a/apps/frontend/app/dashboard/page.tsx +++ b/apps/frontend/app/dashboard/page.tsx @@ -3,7 +3,8 @@ import NewExperiment, { FormStates } from '../components/flows/AddExperiment/NewExperiment'; import { useAuth } from '../../firebase/fbAuth'; import { deleteExperiment } from '../../firebase/db'; -import { listenToExperiments, downloadExperimentResults, downloadExperimentProjectZip, ExperimentDocumentId } from '../../firebase/db'; +import { downloadExperimentResults, downloadExperimentProjectZip, ExperimentDocumentId } from '../../firebase/db'; +import { listenToExperiments } from '../../lib/mongo_funcs'; import { Fragment, useState, useEffect } from 'react'; import { Disclosure, Menu, Transition } from '@headlessui/react'; import { @@ -189,7 +190,10 @@ export default function DashboardPage() { if (!userId) { return; } - return listenToExperiments(userId, (newExperimentList) => setExperiments(newExperimentList as ExperimentData[])); // TODO this assumes that all values will be present, which is not true + const run = async () => { + return await listenToExperiments(userId, (newExperimentList) => setExperiments(newExperimentList as ExperimentData[])); // TODO this assumes that all values will be present, which is not true + } + }, [userId]); const QUEUE_UNKNOWN_LENGTH = -1; diff --git a/apps/frontend/lib/mongo_funcs.ts b/apps/frontend/lib/mongo_funcs.ts new file mode 100644 index 00000000..45a78d2b --- /dev/null +++ b/apps/frontend/lib/mongo_funcs.ts @@ -0,0 +1,61 @@ +'use server' +import { ExperimentDocumentId } from "../firebase/db"; +import { ExperimentData } from "../firebase/db_types"; +import clientPromise, { COLLECTION_EXPERIMENTS, DB_NAME } from "./mongodb"; +import { ChangeStream, WithId, Document } from "mongodb"; + +export interface ExperimentSubscribeCallback { + (data: Partial): any; +} + +// TODO: Convert from Firestore to MongoDB +export const subscribeToExp = async (id: ExperimentDocumentId, callback: ExperimentSubscribeCallback) => { + const client = await clientPromise; + const db = client.db(DB_NAME); + const collection = db.collection(COLLECTION_EXPERIMENTS); + const changeStream = collection.watch(); + changeStream.on('change', next => { + if (next.operationType === 'update' && next.documentKey._id.toString() === id) + { + const data = collection.findOne({ '_id': id as any }) as Partial; + callback(data); + } + }); +}; + + +export const listenToExperiments = async ( + uid: string, + callback: (experiments: Partial[]) => void +) => { + const client = await clientPromise; + const db = client.db(DB_NAME); + const experimentsCollection = db.collection(COLLECTION_EXPERIMENTS); + + // Match documents where 'creator' field equals 'uid' + const pipeline = [{ $match: { "fullDocument.creator": uid } }]; + + // Listen to changes in the experiments collection + const changeStream: ChangeStream = experimentsCollection.watch(pipeline); + + changeStream.on("change", async () => { + const updatedDocuments = await experimentsCollection + .find({ creator: uid }) + .toArray(); + + // Map documents to Partial[] + const result: Partial[] = updatedDocuments.map((doc: WithId) => { + const { _id, ...rest } = doc; + return { id: _id.toString(), ...rest } as Partial; + }); + + callback(result); + }); + + // Return function to close the change stream and client connection + return () => { + changeStream.close(); + client.close(); + }; +}; + diff --git a/apps/frontend/lib/mongodb.ts b/apps/frontend/lib/mongodb.ts index 59274eb9..4003b5d6 100644 --- a/apps/frontend/lib/mongodb.ts +++ b/apps/frontend/lib/mongodb.ts @@ -46,24 +46,6 @@ if (process.env.NODE_ENV === 'development') { clientPromise = client.connect(); } -export interface ExperimentSubscribeCallback { - (data: Partial): any; -} - -// TODO: Convert from Firestore to MongoDB -export const subscribeToExp = async (id: ExperimentDocumentId, callback: ExperimentSubscribeCallback) => { - const db = client.db(DB_NAME); - const collection = db.collection(COLLECTION_EXPERIMENTS); - const changeStream = collection.watch(); - changeStream.on('change', next => { - if (next.operationType === 'update' && next.documentKey._id.toString() === id) - { - const data = collection.findOne({ '_id': id as any }) as Partial; - callback(data); - } - }); -}; - // Export a module-scoped MongoClient promise. By doing this in a // separate module, the client can be shared across functions. export default clientPromise; diff --git a/apps/frontend/package-lock.json b/apps/frontend/package-lock.json index 7ae2e655..1d780b8f 100644 --- a/apps/frontend/package-lock.json +++ b/apps/frontend/package-lock.json @@ -26,7 +26,7 @@ "firebase": "^9.12.1", "joi": "^17.6.4", "mongodb": "^5.2.0", - "next": "^13.4.19", + "next": "^15.0.2", "pino": "^7.11.0", "react": "^18.2.0", "react-beautiful-dnd": "^13.1.1", @@ -281,6 +281,15 @@ "node": ">=6.9.0" } }, + "node_modules/@emnapi/runtime": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/@emnapi/runtime/-/runtime-1.3.1.tgz", + "integrity": "sha512-kEBmG8KyqtxJZv+ygbEim+KCGtIq1fC22Ms3S4ziXmYKm8uyoLX0MHONVKwp+9opg390VaKRNt4a7A9NwmpNhw==", + "optional": true, + "dependencies": { + "tslib": "^2.4.0" + } + }, "node_modules/@emotion/babel-plugin": { "version": "11.11.0", "resolved": "https://registry.npmjs.org/@emotion/babel-plugin/-/babel-plugin-11.11.0.tgz", @@ -1203,6 +1212,348 @@ "integrity": "sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA==", "dev": true }, + "node_modules/@img/sharp-darwin-arm64": { + "version": "0.33.5", + "resolved": "https://registry.npmjs.org/@img/sharp-darwin-arm64/-/sharp-darwin-arm64-0.33.5.tgz", + "integrity": "sha512-UT4p+iz/2H4twwAoLCqfA9UH5pI6DggwKEGuaPy7nCVQ8ZsiY5PIcrRvD1DzuY3qYL07NtIQcWnBSY/heikIFQ==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + }, + "optionalDependencies": { + "@img/sharp-libvips-darwin-arm64": "1.0.4" + } + }, + "node_modules/@img/sharp-darwin-x64": { + "version": "0.33.5", + "resolved": "https://registry.npmjs.org/@img/sharp-darwin-x64/-/sharp-darwin-x64-0.33.5.tgz", + "integrity": "sha512-fyHac4jIc1ANYGRDxtiqelIbdWkIuQaI84Mv45KvGRRxSAa7o7d1ZKAOBaYbnepLC1WqxfpimdeWfvqqSGwR2Q==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + }, + "optionalDependencies": { + "@img/sharp-libvips-darwin-x64": "1.0.4" + } + }, + "node_modules/@img/sharp-libvips-darwin-arm64": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-darwin-arm64/-/sharp-libvips-darwin-arm64-1.0.4.tgz", + "integrity": "sha512-XblONe153h0O2zuFfTAbQYAX2JhYmDHeWikp1LM9Hul9gVPjFY427k6dFEcOL72O01QxQsWi761svJ/ev9xEDg==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "darwin" + ], + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@img/sharp-libvips-darwin-x64": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-darwin-x64/-/sharp-libvips-darwin-x64-1.0.4.tgz", + "integrity": "sha512-xnGR8YuZYfJGmWPvmlunFaWJsb9T/AO2ykoP3Fz/0X5XV2aoYBPkX6xqCQvUTKKiLddarLaxpzNe+b1hjeWHAQ==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "darwin" + ], + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@img/sharp-libvips-linux-arm": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-arm/-/sharp-libvips-linux-arm-1.0.5.tgz", + "integrity": "sha512-gvcC4ACAOPRNATg/ov8/MnbxFDJqf/pDePbBnuBDcjsI8PssmjoKMAz4LtLaVi+OnSb5FK/yIOamqDwGmXW32g==", + "cpu": [ + "arm" + ], + "optional": true, + "os": [ + "linux" + ], + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@img/sharp-libvips-linux-arm64": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-arm64/-/sharp-libvips-linux-arm64-1.0.4.tgz", + "integrity": "sha512-9B+taZ8DlyyqzZQnoeIvDVR/2F4EbMepXMc/NdVbkzsJbzkUjhXv/70GQJ7tdLA4YJgNP25zukcxpX2/SueNrA==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "linux" + ], + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@img/sharp-libvips-linux-s390x": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-s390x/-/sharp-libvips-linux-s390x-1.0.4.tgz", + "integrity": "sha512-u7Wz6ntiSSgGSGcjZ55im6uvTrOxSIS8/dgoVMoiGE9I6JAfU50yH5BoDlYA1tcuGS7g/QNtetJnxA6QEsCVTA==", + "cpu": [ + "s390x" + ], + "optional": true, + "os": [ + "linux" + ], + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@img/sharp-libvips-linux-x64": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-x64/-/sharp-libvips-linux-x64-1.0.4.tgz", + "integrity": "sha512-MmWmQ3iPFZr0Iev+BAgVMb3ZyC4KeFc3jFxnNbEPas60e1cIfevbtuyf9nDGIzOaW9PdnDciJm+wFFaTlj5xYw==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "linux" + ], + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@img/sharp-libvips-linuxmusl-arm64": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linuxmusl-arm64/-/sharp-libvips-linuxmusl-arm64-1.0.4.tgz", + "integrity": "sha512-9Ti+BbTYDcsbp4wfYib8Ctm1ilkugkA/uscUn6UXK1ldpC1JjiXbLfFZtRlBhjPZ5o1NCLiDbg8fhUPKStHoTA==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "linux" + ], + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@img/sharp-libvips-linuxmusl-x64": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linuxmusl-x64/-/sharp-libvips-linuxmusl-x64-1.0.4.tgz", + "integrity": "sha512-viYN1KX9m+/hGkJtvYYp+CCLgnJXwiQB39damAO7WMdKWlIhmYTfHjwSbQeUK/20vY154mwezd9HflVFM1wVSw==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "linux" + ], + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@img/sharp-linux-arm": { + "version": "0.33.5", + "resolved": "https://registry.npmjs.org/@img/sharp-linux-arm/-/sharp-linux-arm-0.33.5.tgz", + "integrity": "sha512-JTS1eldqZbJxjvKaAkxhZmBqPRGmxgu+qFKSInv8moZ2AmT5Yib3EQ1c6gp493HvrvV8QgdOXdyaIBrhvFhBMQ==", + "cpu": [ + "arm" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + }, + "optionalDependencies": { + "@img/sharp-libvips-linux-arm": "1.0.5" + } + }, + "node_modules/@img/sharp-linux-arm64": { + "version": "0.33.5", + "resolved": "https://registry.npmjs.org/@img/sharp-linux-arm64/-/sharp-linux-arm64-0.33.5.tgz", + "integrity": "sha512-JMVv+AMRyGOHtO1RFBiJy/MBsgz0x4AWrT6QoEVVTyh1E39TrCUpTRI7mx9VksGX4awWASxqCYLCV4wBZHAYxA==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + }, + "optionalDependencies": { + "@img/sharp-libvips-linux-arm64": "1.0.4" + } + }, + "node_modules/@img/sharp-linux-s390x": { + "version": "0.33.5", + "resolved": "https://registry.npmjs.org/@img/sharp-linux-s390x/-/sharp-linux-s390x-0.33.5.tgz", + "integrity": "sha512-y/5PCd+mP4CA/sPDKl2961b+C9d+vPAveS33s6Z3zfASk2j5upL6fXVPZi7ztePZ5CuH+1kW8JtvxgbuXHRa4Q==", + "cpu": [ + "s390x" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + }, + "optionalDependencies": { + "@img/sharp-libvips-linux-s390x": "1.0.4" + } + }, + "node_modules/@img/sharp-linux-x64": { + "version": "0.33.5", + "resolved": "https://registry.npmjs.org/@img/sharp-linux-x64/-/sharp-linux-x64-0.33.5.tgz", + "integrity": "sha512-opC+Ok5pRNAzuvq1AG0ar+1owsu842/Ab+4qvU879ippJBHvyY5n2mxF1izXqkPYlGuP/M556uh53jRLJmzTWA==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + }, + "optionalDependencies": { + "@img/sharp-libvips-linux-x64": "1.0.4" + } + }, + "node_modules/@img/sharp-linuxmusl-arm64": { + "version": "0.33.5", + "resolved": "https://registry.npmjs.org/@img/sharp-linuxmusl-arm64/-/sharp-linuxmusl-arm64-0.33.5.tgz", + "integrity": "sha512-XrHMZwGQGvJg2V/oRSUfSAfjfPxO+4DkiRh6p2AFjLQztWUuY/o8Mq0eMQVIY7HJ1CDQUJlxGGZRw1a5bqmd1g==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + }, + "optionalDependencies": { + "@img/sharp-libvips-linuxmusl-arm64": "1.0.4" + } + }, + "node_modules/@img/sharp-linuxmusl-x64": { + "version": "0.33.5", + "resolved": "https://registry.npmjs.org/@img/sharp-linuxmusl-x64/-/sharp-linuxmusl-x64-0.33.5.tgz", + "integrity": "sha512-WT+d/cgqKkkKySYmqoZ8y3pxx7lx9vVejxW/W4DOFMYVSkErR+w7mf2u8m/y4+xHe7yY9DAXQMWQhpnMuFfScw==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + }, + "optionalDependencies": { + "@img/sharp-libvips-linuxmusl-x64": "1.0.4" + } + }, + "node_modules/@img/sharp-wasm32": { + "version": "0.33.5", + "resolved": "https://registry.npmjs.org/@img/sharp-wasm32/-/sharp-wasm32-0.33.5.tgz", + "integrity": "sha512-ykUW4LVGaMcU9lu9thv85CbRMAwfeadCJHRsg2GmeRa/cJxsVY9Rbd57JcMxBkKHag5U/x7TSBpScF4U8ElVzg==", + "cpu": [ + "wasm32" + ], + "optional": true, + "dependencies": { + "@emnapi/runtime": "^1.2.0" + }, + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@img/sharp-win32-ia32": { + "version": "0.33.5", + "resolved": "https://registry.npmjs.org/@img/sharp-win32-ia32/-/sharp-win32-ia32-0.33.5.tgz", + "integrity": "sha512-T36PblLaTwuVJ/zw/LaH0PdZkRz5rd3SmMHX8GSmR7vtNSP5Z6bQkExdSK7xGWyxLw4sUknBuugTelgw2faBbQ==", + "cpu": [ + "ia32" + ], + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@img/sharp-win32-x64": { + "version": "0.33.5", + "resolved": "https://registry.npmjs.org/@img/sharp-win32-x64/-/sharp-win32-x64-0.33.5.tgz", + "integrity": "sha512-MpY/o8/8kj+EcnxwvrP4aTJSWw/aZ7JIGR4aBeZkZw5B7/Jn+tY9/VNwtcoGmdT7GfggGIU4kygOMSbYnOrAbg==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + } + }, "node_modules/@jest/expect-utils": { "version": "29.6.4", "resolved": "https://registry.npmjs.org/@jest/expect-utils/-/expect-utils-29.6.4.tgz", @@ -1389,9 +1740,9 @@ } }, "node_modules/@next/env": { - "version": "13.4.19", - "resolved": "https://registry.npmjs.org/@next/env/-/env-13.4.19.tgz", - "integrity": "sha512-FsAT5x0jF2kkhNkKkukhsyYOrRqtSxrEhfliniIq0bwWbuXLgyt3Gv0Ml+b91XwjwArmuP7NxCiGd++GGKdNMQ==" + "version": "15.0.2", + "resolved": "https://registry.npmjs.org/@next/env/-/env-15.0.2.tgz", + "integrity": "sha512-c0Zr0ModK5OX7D4ZV8Jt/wqoXtitLNPwUfG9zElCZztdaZyNVnN40rDXVZ/+FGuR4CcNV5AEfM6N8f+Ener7Dg==" }, "node_modules/@next/eslint-plugin-next": { "version": "13.4.19", @@ -1403,9 +1754,9 @@ } }, "node_modules/@next/swc-darwin-arm64": { - "version": "13.4.19", - "resolved": "https://registry.npmjs.org/@next/swc-darwin-arm64/-/swc-darwin-arm64-13.4.19.tgz", - "integrity": "sha512-vv1qrjXeGbuF2mOkhkdxMDtv9np7W4mcBtaDnHU+yJG+bBwa6rYsYSCI/9Xm5+TuF5SbZbrWO6G1NfTh1TMjvQ==", + "version": "15.0.2", + "resolved": "https://registry.npmjs.org/@next/swc-darwin-arm64/-/swc-darwin-arm64-15.0.2.tgz", + "integrity": "sha512-GK+8w88z+AFlmt+ondytZo2xpwlfAR8U6CRwXancHImh6EdGfHMIrTSCcx5sOSBei00GyLVL0ioo1JLKTfprgg==", "cpu": [ "arm64" ], @@ -1418,9 +1769,9 @@ } }, "node_modules/@next/swc-darwin-x64": { - "version": "13.4.19", - "resolved": "https://registry.npmjs.org/@next/swc-darwin-x64/-/swc-darwin-x64-13.4.19.tgz", - "integrity": "sha512-jyzO6wwYhx6F+7gD8ddZfuqO4TtpJdw3wyOduR4fxTUCm3aLw7YmHGYNjS0xRSYGAkLpBkH1E0RcelyId6lNsw==", + "version": "15.0.2", + "resolved": "https://registry.npmjs.org/@next/swc-darwin-x64/-/swc-darwin-x64-15.0.2.tgz", + "integrity": "sha512-KUpBVxIbjzFiUZhiLIpJiBoelqzQtVZbdNNsehhUn36e2YzKHphnK8eTUW1s/4aPy5kH/UTid8IuVbaOpedhpw==", "cpu": [ "x64" ], @@ -1433,9 +1784,9 @@ } }, "node_modules/@next/swc-linux-arm64-gnu": { - "version": "13.4.19", - "resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-gnu/-/swc-linux-arm64-gnu-13.4.19.tgz", - "integrity": "sha512-vdlnIlaAEh6H+G6HrKZB9c2zJKnpPVKnA6LBwjwT2BTjxI7e0Hx30+FoWCgi50e+YO49p6oPOtesP9mXDRiiUg==", + "version": "15.0.2", + "resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-gnu/-/swc-linux-arm64-gnu-15.0.2.tgz", + "integrity": "sha512-9J7TPEcHNAZvwxXRzOtiUvwtTD+fmuY0l7RErf8Yyc7kMpE47MIQakl+3jecmkhOoIyi/Rp+ddq7j4wG6JDskQ==", "cpu": [ "arm64" ], @@ -1448,9 +1799,9 @@ } }, "node_modules/@next/swc-linux-arm64-musl": { - "version": "13.4.19", - "resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-musl/-/swc-linux-arm64-musl-13.4.19.tgz", - "integrity": "sha512-aU0HkH2XPgxqrbNRBFb3si9Ahu/CpaR5RPmN2s9GiM9qJCiBBlZtRTiEca+DC+xRPyCThTtWYgxjWHgU7ZkyvA==", + "version": "15.0.2", + "resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-musl/-/swc-linux-arm64-musl-15.0.2.tgz", + "integrity": "sha512-BjH4ZSzJIoTTZRh6rG+a/Ry4SW0HlizcPorqNBixBWc3wtQtj4Sn9FnRZe22QqrPnzoaW0ctvSz4FaH4eGKMww==", "cpu": [ "arm64" ], @@ -1463,9 +1814,9 @@ } }, "node_modules/@next/swc-linux-x64-gnu": { - "version": "13.4.19", - "resolved": "https://registry.npmjs.org/@next/swc-linux-x64-gnu/-/swc-linux-x64-gnu-13.4.19.tgz", - "integrity": "sha512-htwOEagMa/CXNykFFeAHHvMJeqZfNQEoQvHfsA4wgg5QqGNqD5soeCer4oGlCol6NGUxknrQO6VEustcv+Md+g==", + "version": "15.0.2", + "resolved": "https://registry.npmjs.org/@next/swc-linux-x64-gnu/-/swc-linux-x64-gnu-15.0.2.tgz", + "integrity": "sha512-i3U2TcHgo26sIhcwX/Rshz6avM6nizrZPvrDVDY1bXcLH1ndjbO8zuC7RoHp0NSK7wjJMPYzm7NYL1ksSKFreA==", "cpu": [ "x64" ], @@ -1478,9 +1829,9 @@ } }, "node_modules/@next/swc-linux-x64-musl": { - "version": "13.4.19", - "resolved": "https://registry.npmjs.org/@next/swc-linux-x64-musl/-/swc-linux-x64-musl-13.4.19.tgz", - "integrity": "sha512-4Gj4vvtbK1JH8ApWTT214b3GwUh9EKKQjY41hH/t+u55Knxi/0wesMzwQRhppK6Ddalhu0TEttbiJ+wRcoEj5Q==", + "version": "15.0.2", + "resolved": "https://registry.npmjs.org/@next/swc-linux-x64-musl/-/swc-linux-x64-musl-15.0.2.tgz", + "integrity": "sha512-AMfZfSVOIR8fa+TXlAooByEF4OB00wqnms1sJ1v+iu8ivwvtPvnkwdzzFMpsK5jA2S9oNeeQ04egIWVb4QWmtQ==", "cpu": [ "x64" ], @@ -1493,9 +1844,9 @@ } }, "node_modules/@next/swc-win32-arm64-msvc": { - "version": "13.4.19", - "resolved": "https://registry.npmjs.org/@next/swc-win32-arm64-msvc/-/swc-win32-arm64-msvc-13.4.19.tgz", - "integrity": "sha512-bUfDevQK4NsIAHXs3/JNgnvEY+LRyneDN788W2NYiRIIzmILjba7LaQTfihuFawZDhRtkYCv3JDC3B4TwnmRJw==", + "version": "15.0.2", + "resolved": "https://registry.npmjs.org/@next/swc-win32-arm64-msvc/-/swc-win32-arm64-msvc-15.0.2.tgz", + "integrity": "sha512-JkXysDT0/hEY47O+Hvs8PbZAeiCQVxKfGtr4GUpNAhlG2E0Mkjibuo8ryGD29Qb5a3IOnKYNoZlh/MyKd2Nbww==", "cpu": [ "arm64" ], @@ -1507,25 +1858,10 @@ "node": ">= 10" } }, - "node_modules/@next/swc-win32-ia32-msvc": { - "version": "13.4.19", - "resolved": "https://registry.npmjs.org/@next/swc-win32-ia32-msvc/-/swc-win32-ia32-msvc-13.4.19.tgz", - "integrity": "sha512-Y5kikILFAr81LYIFaw6j/NrOtmiM4Sf3GtOc0pn50ez2GCkr+oejYuKGcwAwq3jiTKuzF6OF4iT2INPoxRycEA==", - "cpu": [ - "ia32" - ], - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">= 10" - } - }, "node_modules/@next/swc-win32-x64-msvc": { - "version": "13.4.19", - "resolved": "https://registry.npmjs.org/@next/swc-win32-x64-msvc/-/swc-win32-x64-msvc-13.4.19.tgz", - "integrity": "sha512-YzA78jBDXMYiINdPdJJwGgPNT3YqBNNGhsthsDoWHL9p24tEJn9ViQf/ZqTbwSpX/RrkPupLfuuTH2sf73JBAw==", + "version": "15.0.2", + "resolved": "https://registry.npmjs.org/@next/swc-win32-x64-msvc/-/swc-win32-x64-msvc-15.0.2.tgz", + "integrity": "sha512-foaUL0NqJY/dX0Pi/UcZm5zsmSk5MtP/gxx3xOPyREkMFN+CTjctPfu3QaqrQHinaKdPnMWPJDKt4VjDfTBe/Q==", "cpu": [ "x64" ], @@ -1786,10 +2122,15 @@ "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.27.8.tgz", "integrity": "sha512-+Fj43pSMwJs4KRrH/938Uf+uAELIgVBmQzg/q1YG10djyfA3TnrU8N8XzqCh/okZdszqBQTZf96idMfE5lnwTA==" }, + "node_modules/@swc/counter": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/@swc/counter/-/counter-0.1.3.tgz", + "integrity": "sha512-e2BR4lsJkkRlKZ/qCHPw9ZaSxc0MVUd7gtbtaB7aMvHeJVYe8sOB8DBZkP2DtISHGSku9sCK6T6cnY0CtXrOCQ==" + }, "node_modules/@swc/helpers": { - "version": "0.5.1", - "resolved": "https://registry.npmjs.org/@swc/helpers/-/helpers-0.5.1.tgz", - "integrity": "sha512-sJ902EfIzn1Fa+qYmjdQqh8tPsoxyBz+8yBKC2HKUxyezKJFwPGOn7pv4WY6QuQW//ySQi5lJjA/ZT9sNWWNTg==", + "version": "0.5.13", + "resolved": "https://registry.npmjs.org/@swc/helpers/-/helpers-0.5.13.tgz", + "integrity": "sha512-UoKGxQ3r5kYI9dALKJapMmuK+1zWM/H17Z1+iwnNmzcJRnfFuevZs375TA5rW31pu4BS4NoSy1fRsexDXfWn5w==", "dependencies": { "tslib": "^2.4.0" } @@ -2632,9 +2973,9 @@ } }, "node_modules/caniuse-lite": { - "version": "1.0.30001527", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001527.tgz", - "integrity": "sha512-YkJi7RwPgWtXVSgK4lG9AHH57nSzvvOp9MesgXmw4Q7n0C3H04L0foHqfxcmSAm5AcWb8dW9AYj2tR7/5GnddQ==", + "version": "1.0.30001675", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001675.tgz", + "integrity": "sha512-/wV1bQwPrkLiQMjaJF5yUMVM/VdRPOCU8QZ+PmG6uW6DvYSrNY1bpwHI/3mOcUosLaJCzYDi5o91IQB51ft6cg==", "funding": [ { "type": "opencollective", @@ -2743,6 +3084,19 @@ "node": ">=6" } }, + "node_modules/color": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/color/-/color-4.2.3.tgz", + "integrity": "sha512-1rXeuUUiGGrykh+CeBdu5Ie7OJwinCgQY0bc7GCRxy5xVHy+moaqkpL/jqQq0MtQOeYcrqEz4abc5f0KtU7W4A==", + "optional": true, + "dependencies": { + "color-convert": "^2.0.1", + "color-string": "^1.9.0" + }, + "engines": { + "node": ">=12.5.0" + } + }, "node_modules/color-convert": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", @@ -2759,6 +3113,16 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" }, + "node_modules/color-string": { + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/color-string/-/color-string-1.9.1.tgz", + "integrity": "sha512-shrVawQFojnZv6xM40anx4CkoDP+fZsw/ZerEMsW/pyzsRbElpsL/DBVW7q3ExxwusdNXI3lXpuhEZkzs8p5Eg==", + "optional": true, + "dependencies": { + "color-name": "^1.0.0", + "simple-swizzle": "^0.2.2" + } + }, "node_modules/commander": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/commander/-/commander-4.1.1.tgz", @@ -2923,6 +3287,15 @@ "node": ">=6" } }, + "node_modules/detect-libc": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.3.tgz", + "integrity": "sha512-bwy0MGW55bG41VqxxypOsdSdGqLwXPI/focwgTYCFMbdUiBAxLg9CFzG08sz2aqzknwiX7Hkl0bQENjg8iLByw==", + "optional": true, + "engines": { + "node": ">=8" + } + }, "node_modules/didyoumean": { "version": "1.2.2", "resolved": "https://registry.npmjs.org/didyoumean/-/didyoumean-1.2.2.tgz", @@ -3982,11 +4355,6 @@ "node": ">=10.13.0" } }, - "node_modules/glob-to-regexp": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/glob-to-regexp/-/glob-to-regexp-0.4.1.tgz", - "integrity": "sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw==" - }, "node_modules/globals": { "version": "13.21.0", "resolved": "https://registry.npmjs.org/globals/-/globals-13.21.0.tgz", @@ -5000,18 +5368,6 @@ "loose-envify": "cli.js" } }, - "node_modules/lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "dev": true, - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=10" - } - }, "node_modules/lz-string": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/lz-string/-/lz-string-1.5.0.tgz", @@ -5180,72 +5536,56 @@ "dev": true }, "node_modules/next": { - "version": "13.4.19", - "resolved": "https://registry.npmjs.org/next/-/next-13.4.19.tgz", - "integrity": "sha512-HuPSzzAbJ1T4BD8e0bs6B9C1kWQ6gv8ykZoRWs5AQoiIuqbGHHdQO7Ljuvg05Q0Z24E2ABozHe6FxDvI6HfyAw==", + "version": "15.0.2", + "resolved": "https://registry.npmjs.org/next/-/next-15.0.2.tgz", + "integrity": "sha512-rxIWHcAu4gGSDmwsELXacqAPUk+j8dV/A9cDF5fsiCMpkBDYkO2AEaL1dfD+nNmDiU6QMCFN8Q30VEKapT9UHQ==", "dependencies": { - "@next/env": "13.4.19", - "@swc/helpers": "0.5.1", + "@next/env": "15.0.2", + "@swc/counter": "0.1.3", + "@swc/helpers": "0.5.13", "busboy": "1.6.0", - "caniuse-lite": "^1.0.30001406", - "postcss": "8.4.14", - "styled-jsx": "5.1.1", - "watchpack": "2.4.0", - "zod": "3.21.4" + "caniuse-lite": "^1.0.30001579", + "postcss": "8.4.31", + "styled-jsx": "5.1.6" }, "bin": { "next": "dist/bin/next" }, "engines": { - "node": ">=16.8.0" + "node": ">=18.18.0" }, "optionalDependencies": { - "@next/swc-darwin-arm64": "13.4.19", - "@next/swc-darwin-x64": "13.4.19", - "@next/swc-linux-arm64-gnu": "13.4.19", - "@next/swc-linux-arm64-musl": "13.4.19", - "@next/swc-linux-x64-gnu": "13.4.19", - "@next/swc-linux-x64-musl": "13.4.19", - "@next/swc-win32-arm64-msvc": "13.4.19", - "@next/swc-win32-ia32-msvc": "13.4.19", - "@next/swc-win32-x64-msvc": "13.4.19" + "@next/swc-darwin-arm64": "15.0.2", + "@next/swc-darwin-x64": "15.0.2", + "@next/swc-linux-arm64-gnu": "15.0.2", + "@next/swc-linux-arm64-musl": "15.0.2", + "@next/swc-linux-x64-gnu": "15.0.2", + "@next/swc-linux-x64-musl": "15.0.2", + "@next/swc-win32-arm64-msvc": "15.0.2", + "@next/swc-win32-x64-msvc": "15.0.2", + "sharp": "^0.33.5" }, "peerDependencies": { "@opentelemetry/api": "^1.1.0", - "react": "^18.2.0", - "react-dom": "^18.2.0", + "@playwright/test": "^1.41.2", + "babel-plugin-react-compiler": "*", + "react": "^18.2.0 || 19.0.0-rc-02c0e824-20241028", + "react-dom": "^18.2.0 || 19.0.0-rc-02c0e824-20241028", "sass": "^1.3.0" }, "peerDependenciesMeta": { "@opentelemetry/api": { "optional": true }, - "sass": { + "@playwright/test": { "optional": true - } - } - }, - "node_modules/next/node_modules/postcss": { - "version": "8.4.14", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.14.tgz", - "integrity": "sha512-E398TUmfAYFPBSdzgeieK2Y1+1cpdxJx8yXbK/m57nRhKSmk1GB2tO4lbLBtlkfPQTDKfe4Xqv1ASWPpayPEig==", - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/postcss/" }, - { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/postcss" + "babel-plugin-react-compiler": { + "optional": true + }, + "sass": { + "optional": true } - ], - "dependencies": { - "nanoid": "^3.3.4", - "picocolors": "^1.0.0", - "source-map-js": "^1.0.2" - }, - "engines": { - "node": "^10 || ^12 || >=14" } }, "node_modules/node-fetch": { @@ -5646,10 +5986,9 @@ } }, "node_modules/postcss": { - "version": "8.4.29", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.29.tgz", - "integrity": "sha512-cbI+jaqIeu/VGqXEarWkRCCffhjgXc0qjBtXpqJhTBohMUjUQnbBr0xqX3vEKudc4iviTewcJo5ajcec5+wdJw==", - "dev": true, + "version": "8.4.31", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.31.tgz", + "integrity": "sha512-PS08Iboia9mts/2ygV3eLpY5ghnUcfLV/EXTOW1E2qYxJKGGBUtNjN76FYHnMs36RmARn41bC0AZmn+rR0OVpQ==", "funding": [ { "type": "opencollective", @@ -6281,13 +6620,10 @@ } }, "node_modules/semver": { - "version": "7.5.4", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", - "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", - "dev": true, - "dependencies": { - "lru-cache": "^6.0.0" - }, + "version": "7.6.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", + "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", + "devOptional": true, "bin": { "semver": "bin/semver.js" }, @@ -6295,6 +6631,45 @@ "node": ">=10" } }, + "node_modules/sharp": { + "version": "0.33.5", + "resolved": "https://registry.npmjs.org/sharp/-/sharp-0.33.5.tgz", + "integrity": "sha512-haPVm1EkS9pgvHrQ/F3Xy+hgcuMV0Wm9vfIBSiwZ05k+xgb0PkBQpGsAA/oWdDobNaZTH5ppvHtzCFbnSEwHVw==", + "hasInstallScript": true, + "optional": true, + "dependencies": { + "color": "^4.2.3", + "detect-libc": "^2.0.3", + "semver": "^7.6.3" + }, + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + }, + "optionalDependencies": { + "@img/sharp-darwin-arm64": "0.33.5", + "@img/sharp-darwin-x64": "0.33.5", + "@img/sharp-libvips-darwin-arm64": "1.0.4", + "@img/sharp-libvips-darwin-x64": "1.0.4", + "@img/sharp-libvips-linux-arm": "1.0.5", + "@img/sharp-libvips-linux-arm64": "1.0.4", + "@img/sharp-libvips-linux-s390x": "1.0.4", + "@img/sharp-libvips-linux-x64": "1.0.4", + "@img/sharp-libvips-linuxmusl-arm64": "1.0.4", + "@img/sharp-libvips-linuxmusl-x64": "1.0.4", + "@img/sharp-linux-arm": "0.33.5", + "@img/sharp-linux-arm64": "0.33.5", + "@img/sharp-linux-s390x": "0.33.5", + "@img/sharp-linux-x64": "0.33.5", + "@img/sharp-linuxmusl-arm64": "0.33.5", + "@img/sharp-linuxmusl-x64": "0.33.5", + "@img/sharp-wasm32": "0.33.5", + "@img/sharp-win32-ia32": "0.33.5", + "@img/sharp-win32-x64": "0.33.5" + } + }, "node_modules/shebang-command": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", @@ -6329,6 +6704,21 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/simple-swizzle": { + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/simple-swizzle/-/simple-swizzle-0.2.2.tgz", + "integrity": "sha512-JA//kQgZtbuY83m+xT+tXJkmJncGMTFT+C+g2h2R9uxkYIrE2yy9sgmcLhCnw57/WSD+Eh3J97FPEDFnbXnDUg==", + "optional": true, + "dependencies": { + "is-arrayish": "^0.3.1" + } + }, + "node_modules/simple-swizzle/node_modules/is-arrayish": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.3.2.tgz", + "integrity": "sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ==", + "optional": true + }, "node_modules/slash": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", @@ -6577,9 +6967,9 @@ } }, "node_modules/styled-jsx": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/styled-jsx/-/styled-jsx-5.1.1.tgz", - "integrity": "sha512-pW7uC1l4mBZ8ugbiZrcIsiIvVx1UmTfw7UkC3Um2tmfUq9Bhk8IiyEIPl6F8agHgjzku6j0xQEZbfA5uSgSaCw==", + "version": "5.1.6", + "resolved": "https://registry.npmjs.org/styled-jsx/-/styled-jsx-5.1.6.tgz", + "integrity": "sha512-qSVyDTeMotdvQYoHWLNGwRFJHC+i+ZvdBRYosOFgC+Wg1vx4frN2/RG/NA7SYqqvKNLf39P2LSRA2pu6n0XYZA==", "dependencies": { "client-only": "0.0.1" }, @@ -6587,7 +6977,7 @@ "node": ">= 12.0.0" }, "peerDependencies": { - "react": ">= 16.8.0 || 17.x.x || ^18.0.0-0" + "react": ">= 16.8.0 || 17.x.x || ^18.0.0-0 || ^19.0.0-0" }, "peerDependenciesMeta": { "@babel/core": { @@ -7051,18 +7441,6 @@ "uuid": "dist/bin/uuid" } }, - "node_modules/watchpack": { - "version": "2.4.0", - "resolved": "https://registry.npmjs.org/watchpack/-/watchpack-2.4.0.tgz", - "integrity": "sha512-Lcvm7MGST/4fup+ifyKi2hjyIAwcdI4HRgtvTpIUxBRhB+RFtUh8XtDOxUfctVCnhVi+QQj49i91OyvzkJl6cg==", - "dependencies": { - "glob-to-regexp": "^0.4.1", - "graceful-fs": "^4.1.2" - }, - "engines": { - "node": ">=10.13.0" - } - }, "node_modules/webidl-conversions": { "version": "7.0.0", "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-7.0.0.tgz", @@ -7221,12 +7599,6 @@ "node": ">=10" } }, - "node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true - }, "node_modules/yaml": { "version": "1.10.2", "resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz", @@ -7271,14 +7643,6 @@ "funding": { "url": "https://github.com/sponsors/sindresorhus" } - }, - "node_modules/zod": { - "version": "3.21.4", - "resolved": "https://registry.npmjs.org/zod/-/zod-3.21.4.tgz", - "integrity": "sha512-m46AKbrzKVzOzs/DZgVnG5H55N1sv1M8qZU3A8RIKbs3mrACDNeIOeilDymVb2HdmP8uwshOCF4uJ8uM9rCqJw==", - "funding": { - "url": "https://github.com/sponsors/colinhacks" - } } } } diff --git a/apps/frontend/package.json b/apps/frontend/package.json index 802074de..73a9f10c 100644 --- a/apps/frontend/package.json +++ b/apps/frontend/package.json @@ -29,7 +29,7 @@ "firebase": "^9.12.1", "joi": "^17.6.4", "mongodb": "^5.2.0", - "next": "^13.4.19", + "next": "^15.0.2", "pino": "^7.11.0", "react": "^18.2.0", "react-beautiful-dnd": "^13.1.1", From 895dada0b52adfe03fb65d1efcd695891be2df52 Mon Sep 17 00:00:00 2001 From: rhit-windsors Date: Wed, 30 Oct 2024 14:37:12 -0400 Subject: [PATCH 043/198] Update page.tsx --- apps/frontend/app/dashboard/page.tsx | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/apps/frontend/app/dashboard/page.tsx b/apps/frontend/app/dashboard/page.tsx index b6063339..08b38cfd 100644 --- a/apps/frontend/app/dashboard/page.tsx +++ b/apps/frontend/app/dashboard/page.tsx @@ -190,7 +190,7 @@ export default function DashboardPage() { if (!userId) { return; } - const run = async () => { + async () => { return await listenToExperiments(userId, (newExperimentList) => setExperiments(newExperimentList as ExperimentData[])); // TODO this assumes that all values will be present, which is not true } @@ -454,6 +454,7 @@ const ExperimentList = ({ experiments, onCopyExperiment, onDeleteExperiment }: E // Sort the experiments based on the selected sorting option useEffect(() => { + console.log(experiments); switch (sortBy) { case SortingOptions.NAME: setSortedExperiments([...experiments].sort(sortByName)); From c330fd0c1882e88e024d13978942780b597fd35a Mon Sep 17 00:00:00 2001 From: rhit-windsors Date: Wed, 30 Oct 2024 14:46:05 -0400 Subject: [PATCH 044/198] Update page.tsx --- apps/frontend/app/dashboard/page.tsx | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/apps/frontend/app/dashboard/page.tsx b/apps/frontend/app/dashboard/page.tsx index 08b38cfd..29253776 100644 --- a/apps/frontend/app/dashboard/page.tsx +++ b/apps/frontend/app/dashboard/page.tsx @@ -191,7 +191,11 @@ export default function DashboardPage() { return; } async () => { - return await listenToExperiments(userId, (newExperimentList) => setExperiments(newExperimentList as ExperimentData[])); // TODO this assumes that all values will be present, which is not true + console.log("getting exps"); + const toReturn = await listenToExperiments(userId, (newExperimentList) => setExperiments(newExperimentList as ExperimentData[])); // TODO this assumes that all values will be present, which is not true + console.log(experiments); + console.log(toReturn); + return toReturn; } }, [userId]); From b9a6d0c81d5ef3384156ba8305a48bcbe8fd7aaa Mon Sep 17 00:00:00 2001 From: rhit-windsors Date: Wed, 30 Oct 2024 14:56:14 -0400 Subject: [PATCH 045/198] Update page.tsx --- apps/frontend/app/dashboard/page.tsx | 160 +++++++++++++-------------- 1 file changed, 78 insertions(+), 82 deletions(-) diff --git a/apps/frontend/app/dashboard/page.tsx b/apps/frontend/app/dashboard/page.tsx index 29253776..c860171f 100644 --- a/apps/frontend/app/dashboard/page.tsx +++ b/apps/frontend/app/dashboard/page.tsx @@ -70,7 +70,7 @@ const Navbar = (props) => { + }} /> {/* Links section */}
@@ -190,14 +190,10 @@ export default function DashboardPage() { if (!userId) { return; } - async () => { - console.log("getting exps"); - const toReturn = await listenToExperiments(userId, (newExperimentList) => setExperiments(newExperimentList as ExperimentData[])); // TODO this assumes that all values will be present, which is not true - console.log(experiments); - console.log(toReturn); - return toReturn; - } - + console.log("getting exps"); + listenToExperiments(userId, (newExperimentList) => setExperiments(newExperimentList as ExperimentData[])); // TODO this assumes that all values will be present, which is not true + console.log(experiments); + }, [userId]); const QUEUE_UNKNOWN_LENGTH = -1; @@ -288,7 +284,7 @@ export default function DashboardPage() {
- { authService.userEmail } + {authService.userEmail}
@@ -300,7 +296,7 @@ export default function DashboardPage() { onClick={() => { setFormState(1); }} - // onClick + // onClick > {label} @@ -345,7 +341,7 @@ export default function DashboardPage() { `${queueLength} experiment${queueLength == 1 ? '' : 's'} in queue` } -
From 29486b049705c3a94025cce062cbf0c985781eb3 Mon Sep 17 00:00:00 2001 From: rhit-windsors Date: Tue, 5 Nov 2024 20:34:51 -0500 Subject: [PATCH 095/198] Update db.ts --- apps/frontend/firebase/db.ts | 1 + 1 file changed, 1 insertion(+) diff --git a/apps/frontend/firebase/db.ts b/apps/frontend/firebase/db.ts index 861ccfc2..d938b447 100644 --- a/apps/frontend/firebase/db.ts +++ b/apps/frontend/firebase/db.ts @@ -33,6 +33,7 @@ export const submitExperiment = async (values: Partial, userId: } ).then((response) => { if (response?.ok) { + console.log(response.json()['id']); return response.json()['id']; } return Promise.reject(response); From eace80e2801e906f9d06a737785d3a23c4e86e9b Mon Sep 17 00:00:00 2001 From: rhit-windsors Date: Tue, 5 Nov 2024 20:41:19 -0500 Subject: [PATCH 096/198] Update db.ts --- apps/frontend/firebase/db.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/frontend/firebase/db.ts b/apps/frontend/firebase/db.ts index d938b447..faaf2ee1 100644 --- a/apps/frontend/firebase/db.ts +++ b/apps/frontend/firebase/db.ts @@ -33,7 +33,7 @@ export const submitExperiment = async (values: Partial, userId: } ).then((response) => { if (response?.ok) { - console.log(response.json()['id']); + console.log(response.json()); return response.json()['id']; } return Promise.reject(response); From 12dad8bb7c62b70bedea78c85030a3ac650ec410 Mon Sep 17 00:00:00 2001 From: rhit-windsors Date: Tue, 5 Nov 2024 20:49:46 -0500 Subject: [PATCH 097/198] Update db.ts --- apps/frontend/firebase/db.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/apps/frontend/firebase/db.ts b/apps/frontend/firebase/db.ts index faaf2ee1..9c721733 100644 --- a/apps/frontend/firebase/db.ts +++ b/apps/frontend/firebase/db.ts @@ -31,9 +31,9 @@ export const submitExperiment = async (values: Partial, userId: method: "POST", body: JSON.stringify(values) } - ).then((response) => { + ).then(async (response) => { if (response?.ok) { - console.log(response.json()); + console.log(await response.json()); return response.json()['id']; } return Promise.reject(response); From 31a2d15a196b51cfc02e06cb7a52f0e6bca8a849 Mon Sep 17 00:00:00 2001 From: rhit-windsors Date: Tue, 5 Nov 2024 20:55:08 -0500 Subject: [PATCH 098/198] Update db.ts --- apps/frontend/firebase/db.ts | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/apps/frontend/firebase/db.ts b/apps/frontend/firebase/db.ts index 9c721733..8ac86bf6 100644 --- a/apps/frontend/firebase/db.ts +++ b/apps/frontend/firebase/db.ts @@ -33,8 +33,7 @@ export const submitExperiment = async (values: Partial, userId: } ).then(async (response) => { if (response?.ok) { - console.log(await response.json()); - return response.json()['id']; + return await response.json()['id']; } return Promise.reject(response); }).then((expId: String) => { From 3a28c8d7dd7107f92ad5817f8f278e93b9864be4 Mon Sep 17 00:00:00 2001 From: rhit-windsors Date: Tue, 5 Nov 2024 21:04:31 -0500 Subject: [PATCH 099/198] Update subscribe.tsx --- apps/frontend/pages/api/experiments/subscribe.tsx | 3 --- 1 file changed, 3 deletions(-) diff --git a/apps/frontend/pages/api/experiments/subscribe.tsx b/apps/frontend/pages/api/experiments/subscribe.tsx index 0a596e96..26ec190f 100644 --- a/apps/frontend/pages/api/experiments/subscribe.tsx +++ b/apps/frontend/pages/api/experiments/subscribe.tsx @@ -30,12 +30,9 @@ export default async function handler(req, res) { const initDocs = await experimentsCollection .find({ '_id': new ObjectId(expId) }) .toArray(); - console.log(initDocs.length); const initArray = convertToExpsArray(initDocs)[0]; res.write(`data: ${JSON.stringify(initArray)}\n\n`); - console.log("creating change stream"); - console.log(changeStream); // Listen to changes in the collection changeStream.on("change", async () => { const updatedDocuments = await experimentsCollection From 2f4573127dae6285070e1374372fa7bc22cca90f Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Wed, 6 Nov 2024 07:57:19 -0500 Subject: [PATCH 100/198] add some logging --- .../flows/AddExperiment/stepComponents/DispatchStep.tsx | 4 +++- apps/frontend/app/dashboard/page.tsx | 6 +++--- apps/frontend/firebase/db.ts | 2 +- 3 files changed, 7 insertions(+), 5 deletions(-) diff --git a/apps/frontend/app/components/flows/AddExperiment/stepComponents/DispatchStep.tsx b/apps/frontend/app/components/flows/AddExperiment/stepComponents/DispatchStep.tsx index 601f9711..212bde46 100644 --- a/apps/frontend/app/components/flows/AddExperiment/stepComponents/DispatchStep.tsx +++ b/apps/frontend/app/components/flows/AddExperiment/stepComponents/DispatchStep.tsx @@ -24,7 +24,9 @@ export const DispatchStep = ({ id, form, ...props }) => { const onDropFile = (files: Parameters[0]) => { setLoading(true); console.log('Submitting Experiment'); - submitExperiment(form.values, userId as string).then(async (expId) => { + submitExperiment(form.values, userId as string).then(async (json) => { + console.log(json); + const expId = json['id']; console.log(`Uploading file for ${expId}:`, files); const uploadResponse = await fetch('/api/files/uploadFile', { method: 'POST', diff --git a/apps/frontend/app/dashboard/page.tsx b/apps/frontend/app/dashboard/page.tsx index 581a06af..c92d9360 100644 --- a/apps/frontend/app/dashboard/page.tsx +++ b/apps/frontend/app/dashboard/page.tsx @@ -223,9 +223,9 @@ export default function DashboardPage() { } } - eventSource.onerror = (event) => { - console.error('SSE Error:', event); - }; + // eventSource.onerror = (event) => { + // console.error('SSE Error:', event); + // }; return () => eventSource.close(); diff --git a/apps/frontend/firebase/db.ts b/apps/frontend/firebase/db.ts index 8ac86bf6..7d7c2b7e 100644 --- a/apps/frontend/firebase/db.ts +++ b/apps/frontend/firebase/db.ts @@ -33,7 +33,7 @@ export const submitExperiment = async (values: Partial, userId: } ).then(async (response) => { if (response?.ok) { - return await response.json()['id']; + return response.json(); } return Promise.reject(response); }).then((expId: String) => { From 1ad6f52e266c8536db2e3fb6db2769fec6258aec Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Wed, 6 Nov 2024 10:51:48 -0500 Subject: [PATCH 101/198] make sure connection gets closed --- apps/frontend/frontend.Dockerfile | 70 +++++++++---------- .../frontend/pages/api/experiments/listen.tsx | 2 +- .../pages/api/experiments/subscribe.tsx | 2 +- 3 files changed, 37 insertions(+), 37 deletions(-) diff --git a/apps/frontend/frontend.Dockerfile b/apps/frontend/frontend.Dockerfile index c7c44bb5..ce47a8a2 100644 --- a/apps/frontend/frontend.Dockerfile +++ b/apps/frontend/frontend.Dockerfile @@ -1,59 +1,59 @@ -FROM node:20.6 AS base +# FROM node:20.6 AS base -WORKDIR /app +# WORKDIR /app -COPY . /app +# COPY . /app -RUN npm install +# RUN npm install -RUN npm run build +# RUN npm run build -EXPOSE $FRONTEND_WEBSERVER_PORT +# EXPOSE $FRONTEND_WEBSERVER_PORT -CMD ["npm", "start"] +# CMD ["npm", "start"] -# FROM node:20-alpine AS base +FROM node:20-alpine AS base -# FROM base AS deps +FROM base AS deps -# RUN apk add --no-cache libc6-compat -# WORKDIR /app +RUN apk add --no-cache libc6-compat +WORKDIR /app -# COPY package.json ./ +COPY package.json ./ -# RUN npm update && npm install +RUN npm update && npm install -# # Install this to optimize images -# RUN npm i sharp +# Install this to optimize images +RUN npm i sharp -# # If you want yarn update and install uncomment the bellow +# If you want yarn update and install uncomment the bellow -# # RUN yarn install && yarn upgrade +# RUN yarn install && yarn upgrade -# FROM base AS builder -# WORKDIR /app -# COPY --from=deps /app/node_modules ./node_modules -# COPY . . +FROM base AS builder +WORKDIR /app +COPY --from=deps /app/node_modules ./node_modules +COPY . . -# RUN npm run build +RUN npm run build -# FROM base AS runner -# WORKDIR /app +FROM base AS runner +WORKDIR /app -# ENV NODE_ENV=production -# RUN addgroup --system --gid 1001 nodejs -# RUN adduser --system --uid 1001 nextjs +ENV NODE_ENV=production +RUN addgroup --system --gid 1001 nodejs +RUN adduser --system --uid 1001 nextjs -# COPY --from=builder /app/public ./public +COPY --from=builder /app/public ./public -# RUN mkdir .next -# RUN chown nextjs:nodejs .next +RUN mkdir .next +RUN chown nextjs:nodejs .next -# COPY --from=builder --chown=nextjs:nodejs /app/.next/standalone ./ -# COPY --from=builder --chown=nextjs:nodejs /app/.next/static ./.next/static +COPY --from=builder --chown=nextjs:nodejs /app/.next/standalone ./ +COPY --from=builder --chown=nextjs:nodejs /app/.next/static ./.next/static -# USER nextjs +USER nextjs -# EXPOSE $FRONTEND_WEBSERVER_PORT +EXPOSE $FRONTEND_WEBSERVER_PORT -# CMD ["node", "server.js"] \ No newline at end of file +CMD ["node", "server.js"] \ No newline at end of file diff --git a/apps/frontend/pages/api/experiments/listen.tsx b/apps/frontend/pages/api/experiments/listen.tsx index 297cd797..d828b54b 100644 --- a/apps/frontend/pages/api/experiments/listen.tsx +++ b/apps/frontend/pages/api/experiments/listen.tsx @@ -57,7 +57,7 @@ export default async function handler(req, res) { }); // Close the change stream and client connection when the request ends - req.socket.on("close", () => { + req.on("close", () => { changeStream.close(); clearInterval(intervalId); res.end() diff --git a/apps/frontend/pages/api/experiments/subscribe.tsx b/apps/frontend/pages/api/experiments/subscribe.tsx index 26ec190f..134796e9 100644 --- a/apps/frontend/pages/api/experiments/subscribe.tsx +++ b/apps/frontend/pages/api/experiments/subscribe.tsx @@ -45,7 +45,7 @@ export default async function handler(req, res) { }); // Close the change stream and client connection when the request ends - req.socket.on("close", () => { + req.on("close", () => { changeStream.close(); clearInterval(intervalId); res.end() From 65848c573d6e479fecda6e2009e602f77efa1266 Mon Sep 17 00:00:00 2001 From: rhit-windsors Date: Thu, 7 Nov 2024 10:43:23 -0500 Subject: [PATCH 102/198] add small delay --- apps/backend/job-runner.yaml | 2 +- apps/frontend/pages/api/experiments/listen.tsx | 5 ++++- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/apps/backend/job-runner.yaml b/apps/backend/job-runner.yaml index cbd45305..0ff57460 100644 --- a/apps/backend/job-runner.yaml +++ b/apps/backend/job-runner.yaml @@ -10,7 +10,7 @@ spec: spec: containers: - name: runner - image: gladospipeline/glados-runner:main + image: gladospipeline/glados-runner:development imagePullPolicy: Always command: [] env: diff --git a/apps/frontend/pages/api/experiments/listen.tsx b/apps/frontend/pages/api/experiments/listen.tsx index d828b54b..c73a9377 100644 --- a/apps/frontend/pages/api/experiments/listen.tsx +++ b/apps/frontend/pages/api/experiments/listen.tsx @@ -43,7 +43,10 @@ export default async function handler(req, res) { .find({ 'creator': uid }) .toArray(); const initArray = convertToExpsArray(initDocs); - res.write(`data: ${JSON.stringify(initArray)}\n\n`); + setTimeout(() => { + res.write(`data: ${JSON.stringify(initArray)}\n\n`); + }, 500); + // Listen to changes in the collection changeStream.on("change", async () => { From b5e42d77ce44c0d1cd6aac12148e5c0d0c0097ac Mon Sep 17 00:00:00 2001 From: rhit-windsors Date: Thu, 7 Nov 2024 10:49:48 -0500 Subject: [PATCH 103/198] Update listen.tsx --- apps/frontend/pages/api/experiments/listen.tsx | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/apps/frontend/pages/api/experiments/listen.tsx b/apps/frontend/pages/api/experiments/listen.tsx index c73a9377..cd30a343 100644 --- a/apps/frontend/pages/api/experiments/listen.tsx +++ b/apps/frontend/pages/api/experiments/listen.tsx @@ -43,10 +43,8 @@ export default async function handler(req, res) { .find({ 'creator': uid }) .toArray(); const initArray = convertToExpsArray(initDocs); - setTimeout(() => { - res.write(`data: ${JSON.stringify(initArray)}\n\n`); - }, 500); - + res.write(': heartbeat\n\n'); + res.write(`data: ${JSON.stringify(initArray)}\n\n`); // Listen to changes in the collection changeStream.on("change", async () => { From 6e48a40d43f19f892b6517e3a1d3d9d402d15148 Mon Sep 17 00:00:00 2001 From: rhit-windsors Date: Thu, 7 Nov 2024 10:56:45 -0500 Subject: [PATCH 104/198] Update page.tsx --- apps/frontend/app/dashboard/page.tsx | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/apps/frontend/app/dashboard/page.tsx b/apps/frontend/app/dashboard/page.tsx index c92d9360..b204ef21 100644 --- a/apps/frontend/app/dashboard/page.tsx +++ b/apps/frontend/app/dashboard/page.tsx @@ -211,14 +211,15 @@ export default function DashboardPage() { const eventSource = new EventSource(`/api/experiments/listen?uid=${userId}`) console.log(eventSource); - eventSource.onopen = () => { + eventSource.onopen = (event) => { console.log("SSE open!"); + console.log(event); } eventSource.onmessage = (event) => { console.log("received change!"); console.log("data was:" + event.data); - if (JSON.parse(event.data) !== ':heartbeat') { + if (JSON.parse(event.data) !== ': heartbeat') { setExperiments(JSON.parse(event.data) as ExperimentData[]); } } From c954470fc2fc0a00aec9a18c6884243748f713af Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Thu, 7 Nov 2024 11:17:50 -0500 Subject: [PATCH 105/198] change logging and update listen --- apps/frontend/app/dashboard/page.tsx | 8 ++++---- apps/frontend/pages/api/experiments/listen.tsx | 6 ++++-- 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/apps/frontend/app/dashboard/page.tsx b/apps/frontend/app/dashboard/page.tsx index b204ef21..85629f1d 100644 --- a/apps/frontend/app/dashboard/page.tsx +++ b/apps/frontend/app/dashboard/page.tsx @@ -211,17 +211,17 @@ export default function DashboardPage() { const eventSource = new EventSource(`/api/experiments/listen?uid=${userId}`) console.log(eventSource); - eventSource.onopen = (event) => { + eventSource.onopen = () => { console.log("SSE open!"); - console.log(event); } eventSource.onmessage = (event) => { console.log("received change!"); - console.log("data was:" + event.data); - if (JSON.parse(event.data) !== ': heartbeat') { + // console.log("data was:" + event.data); + if (event.data) { setExperiments(JSON.parse(event.data) as ExperimentData[]); } + } // eventSource.onerror = (event) => { diff --git a/apps/frontend/pages/api/experiments/listen.tsx b/apps/frontend/pages/api/experiments/listen.tsx index cd30a343..dbaba68f 100644 --- a/apps/frontend/pages/api/experiments/listen.tsx +++ b/apps/frontend/pages/api/experiments/listen.tsx @@ -39,11 +39,13 @@ export default async function handler(req, res) { res.write(': heartbeat\n\n'); }, HEARTBEAT_INTERVAL); + //Create function to listen + + const initDocs = await experimentsCollection .find({ 'creator': uid }) .toArray(); const initArray = convertToExpsArray(initDocs); - res.write(': heartbeat\n\n'); res.write(`data: ${JSON.stringify(initArray)}\n\n`); // Listen to changes in the collection @@ -95,4 +97,4 @@ function convertToExpsArray(arr: WithId[]) { fails: doc.fails ?? 0, totalExperimentRuns: doc.totalExperimentRuns ?? 0, })); -} +} \ No newline at end of file From 01784e58e833f629d28f1b7c0d080bc7e0d91395 Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Thu, 7 Nov 2024 11:33:41 -0500 Subject: [PATCH 106/198] update --- apps/frontend/frontend.Dockerfile | 4 ++-- apps/frontend/pages/api/experiments/listen.tsx | 2 +- apps/frontend/pages/api/experiments/subscribe.tsx | 4 ++-- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/apps/frontend/frontend.Dockerfile b/apps/frontend/frontend.Dockerfile index ce47a8a2..219d73ea 100644 --- a/apps/frontend/frontend.Dockerfile +++ b/apps/frontend/frontend.Dockerfile @@ -12,11 +12,11 @@ # CMD ["npm", "start"] -FROM node:20-alpine AS base +FROM node:latest AS base FROM base AS deps -RUN apk add --no-cache libc6-compat +# RUN apk add --no-cache bash libc6-compat WORKDIR /app COPY package.json ./ diff --git a/apps/frontend/pages/api/experiments/listen.tsx b/apps/frontend/pages/api/experiments/listen.tsx index dbaba68f..287dd5d9 100644 --- a/apps/frontend/pages/api/experiments/listen.tsx +++ b/apps/frontend/pages/api/experiments/listen.tsx @@ -36,7 +36,7 @@ export default async function handler(req, res) { const HEARTBEAT_INTERVAL = 2500; // 5 seconds (adjust this as needed) const intervalId = setInterval(() => { // Send a heartbeat message to keep the connection alive - res.write(': heartbeat\n\n'); + res.write('data: heartbeat\n\n'); }, HEARTBEAT_INTERVAL); //Create function to listen diff --git a/apps/frontend/pages/api/experiments/subscribe.tsx b/apps/frontend/pages/api/experiments/subscribe.tsx index 134796e9..93ee91d6 100644 --- a/apps/frontend/pages/api/experiments/subscribe.tsx +++ b/apps/frontend/pages/api/experiments/subscribe.tsx @@ -21,10 +21,10 @@ export default async function handler(req, res) { 'Content-Type': 'text/event-stream', }); - const HEARTBEAT_INTERVAL = 5000; // 5 seconds (adjust this as needed) + const HEARTBEAT_INTERVAL = 2500; // 5 seconds (adjust this as needed) const intervalId = setInterval(() => { // Send a heartbeat message to keep the connection alive - res.write(': heartbeat\n\n'); + res.write('data: heartbeat\n\n'); }, HEARTBEAT_INTERVAL); const initDocs = await experimentsCollection From 9a2889d828fb76b959065178eeca180259a68838 Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Thu, 7 Nov 2024 14:59:18 -0500 Subject: [PATCH 107/198] update --- apps/frontend/frontend.Dockerfile | 4 ++-- apps/frontend/pages/api/experiments/listen.tsx | 4 +++- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/apps/frontend/frontend.Dockerfile b/apps/frontend/frontend.Dockerfile index 219d73ea..1fa7c33d 100644 --- a/apps/frontend/frontend.Dockerfile +++ b/apps/frontend/frontend.Dockerfile @@ -12,11 +12,11 @@ # CMD ["npm", "start"] -FROM node:latest AS base +FROM node:20-alpine AS base FROM base AS deps -# RUN apk add --no-cache bash libc6-compat +RUN apk add --no-cache bash libc6-compat WORKDIR /app COPY package.json ./ diff --git a/apps/frontend/pages/api/experiments/listen.tsx b/apps/frontend/pages/api/experiments/listen.tsx index 287dd5d9..66ed6c0d 100644 --- a/apps/frontend/pages/api/experiments/listen.tsx +++ b/apps/frontend/pages/api/experiments/listen.tsx @@ -36,7 +36,8 @@ export default async function handler(req, res) { const HEARTBEAT_INTERVAL = 2500; // 5 seconds (adjust this as needed) const intervalId = setInterval(() => { // Send a heartbeat message to keep the connection alive - res.write('data: heartbeat\n\n'); + res.write(': heartbeat\n\n'); + res.flush(); }, HEARTBEAT_INTERVAL); //Create function to listen @@ -47,6 +48,7 @@ export default async function handler(req, res) { .toArray(); const initArray = convertToExpsArray(initDocs); res.write(`data: ${JSON.stringify(initArray)}\n\n`); + res.flush(); // Listen to changes in the collection changeStream.on("change", async () => { From c6415b5aee95df1a97ddfd462f45b28714ab8939 Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Thu, 7 Nov 2024 15:08:14 -0500 Subject: [PATCH 108/198] switch to websockets --- apps/frontend/app/dashboard/page.tsx | 32 +++-- apps/frontend/next.config.mjs | 11 +- apps/frontend/package-lock.json | 24 +++- apps/frontend/package.json | 3 +- .../frontend/pages/api/experiments/listen.tsx | 120 +++++++++--------- 5 files changed, 110 insertions(+), 80 deletions(-) diff --git a/apps/frontend/app/dashboard/page.tsx b/apps/frontend/app/dashboard/page.tsx index 85629f1d..8c7f1c69 100644 --- a/apps/frontend/app/dashboard/page.tsx +++ b/apps/frontend/app/dashboard/page.tsx @@ -208,27 +208,25 @@ export default function DashboardPage() { // setExperiments(experiments); // }, 2500) - const eventSource = new EventSource(`/api/experiments/listen?uid=${userId}`) - console.log(eventSource); + const socket = new WebSocket(`ws://${window.location.host}/api/experiments/listen?uid=${userId}`); - eventSource.onopen = () => { - console.log("SSE open!"); - } + socket.onopen = () => { + console.log('Connected to WebSocket server'); + }; - eventSource.onmessage = (event) => { - console.log("received change!"); - // console.log("data was:" + event.data); - if (event.data) { - setExperiments(JSON.parse(event.data) as ExperimentData[]); - } + socket.onmessage = (event) => { + const data = JSON.parse(event.data); + console.log('Received data:', data); + // Here you can update your component state with the received data + }; - } - - // eventSource.onerror = (event) => { - // console.error('SSE Error:', event); - // }; + socket.onclose = () => { + console.log('WebSocket connection closed'); + }; - return () => eventSource.close(); + return () => { + socket.close(); + }; // return () => clearInterval(interval); }, [userId]); diff --git a/apps/frontend/next.config.mjs b/apps/frontend/next.config.mjs index 4a3c2ea8..aaffbc0e 100644 --- a/apps/frontend/next.config.mjs +++ b/apps/frontend/next.config.mjs @@ -1,4 +1,7 @@ /** @type {import('next').NextConfig} */ + +const { webSocketUpgrade } = require('./pages/api/experiments/listen'); + const nextConfig = { reactStrictMode: true, output: 'standalone', // For deployment, https://nextjs.org/docs/advanced-features/output-file-tracing @@ -11,7 +14,13 @@ const nextConfig = { pathname: '/img/**' }, ], - } + }, + webpack: (config, { isServer }) => { + if (isServer) { + require('./pages/api/experiments/listen').webSocketUpgrade(config); + } + return config; + }, }; export default nextConfig; diff --git a/apps/frontend/package-lock.json b/apps/frontend/package-lock.json index 706a1fde..beb13338 100644 --- a/apps/frontend/package-lock.json +++ b/apps/frontend/package-lock.json @@ -35,7 +35,8 @@ "react-dom": "^18.2.0", "react-icons": "^4.6.0", "tabler-icons-react": "^1.55.0", - "uuid": "^9.0.0" + "uuid": "^9.0.0", + "ws": "^8.18.0" }, "devDependencies": { "@tailwindcss/forms": "^0.5.3", @@ -7636,6 +7637,27 @@ "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" }, + "node_modules/ws": { + "version": "8.18.0", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.18.0.tgz", + "integrity": "sha512-8VbfWfHLbbwu3+N6OKsOMpBdT4kXPDDB9cJk2bJ6mh9ucxdlnNvH1e+roYkKmN9Nxw2yjz7VzeO9oOz2zJ04Pw==", + "license": "MIT", + "engines": { + "node": ">=10.0.0" + }, + "peerDependencies": { + "bufferutil": "^4.0.1", + "utf-8-validate": ">=5.0.2" + }, + "peerDependenciesMeta": { + "bufferutil": { + "optional": true + }, + "utf-8-validate": { + "optional": true + } + } + }, "node_modules/y18n": { "version": "5.0.8", "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", diff --git a/apps/frontend/package.json b/apps/frontend/package.json index cf0932d0..e262bfed 100644 --- a/apps/frontend/package.json +++ b/apps/frontend/package.json @@ -38,7 +38,8 @@ "react-dom": "^18.2.0", "react-icons": "^4.6.0", "tabler-icons-react": "^1.55.0", - "uuid": "^9.0.0" + "uuid": "^9.0.0", + "ws": "^8.18.0" }, "devDependencies": { "@tailwindcss/forms": "^0.5.3", diff --git a/apps/frontend/pages/api/experiments/listen.tsx b/apps/frontend/pages/api/experiments/listen.tsx index 66ed6c0d..de680867 100644 --- a/apps/frontend/pages/api/experiments/listen.tsx +++ b/apps/frontend/pages/api/experiments/listen.tsx @@ -1,77 +1,77 @@ +// pages/api/experiments/listen.js import clientPromise, { COLLECTION_EXPERIMENTS, DB_NAME } from "../../../lib/mongodb"; -import { WithId, Document } from "mongodb"; +import { WebSocketServer } from 'ws'; -export const runtime = 'nodejs'; -export const dynamic = "force-dynamic"; +let wss; // WebSocket server instance -export default async function handler(req, res) { - const { uid } = req.query; +export const config = { + api: { + bodyParser: false, // Disable body parsing for WebSocket handling + }, +}; - if (!uid){ - return; - } - - // Connect to MongoDB - const client = await clientPromise; - const db = client.db(DB_NAME); - const experimentsCollection = db.collection(COLLECTION_EXPERIMENTS); +async function handler(req, res) { + if (!wss) { + // Initialize WebSocket server + wss = new WebSocketServer({ noServer: true }); - // Set up a Change Stream for real-time updates - const pipeline = [{ $match: { "fullDocument.creator": uid } }]; - const options = { fullDocument: "updateLookup" }; - const changeStream = experimentsCollection.watch(pipeline, options); + wss.on('connection', async (ws, request) => { + const uid = new URL(request.url, `http://${request.headers.host}`).searchParams.get('uid'); + if (!uid) { + ws.close(); + return; + } - // Set up real-time streaming of changes to the client using SSE - // res.setHeader("Access-Control-Allow-Origin", "*"); - // res.setHeader("Cache-Control", "no-cache"); - // res.setHeader("Connection", "keep-alive"); - // res.setHeader("Content-Type", "text/event-stream"); - res.writeHead(200, { - Connection: 'keep-alive', - 'Content-Encoding': 'none', - 'Cache-Control': 'no-cache', - 'Content-Type': 'text/event-stream', - }); + console.log(`WebSocket connection established for user: ${uid}`); - const HEARTBEAT_INTERVAL = 2500; // 5 seconds (adjust this as needed) - const intervalId = setInterval(() => { - // Send a heartbeat message to keep the connection alive - res.write(': heartbeat\n\n'); - res.flush(); - }, HEARTBEAT_INTERVAL); + // MongoDB Change Stream setup + const client = await clientPromise; + const db = client.db(DB_NAME); + const experimentsCollection = db.collection(COLLECTION_EXPERIMENTS); - //Create function to listen + const pipeline = [{ $match: { "fullDocument.creator": uid } }]; + const options = { fullDocument: "updateLookup" }; + const changeStream = experimentsCollection.watch(pipeline, options); + // Initial data fetch and send to client + const initDocs = await experimentsCollection.find({ creator: uid }).toArray(); + ws.send(JSON.stringify(initDocs.map(doc => formatExperiment(doc)))); - const initDocs = await experimentsCollection - .find({ 'creator': uid }) - .toArray(); - const initArray = convertToExpsArray(initDocs); - res.write(`data: ${JSON.stringify(initArray)}\n\n`); - res.flush(); + // Listen for MongoDB changes + changeStream.on('change', async () => { + const updatedDocuments = await experimentsCollection.find({ creator: uid }).toArray(); + ws.send(JSON.stringify(updatedDocuments.map(doc => formatExperiment(doc)))); + }); - // Listen to changes in the collection - changeStream.on("change", async () => { - const updatedDocuments = await experimentsCollection - .find({ 'creator': uid }) - .toArray(); + ws.on('close', () => { + console.log(`WebSocket connection closed for user: ${uid}`); + changeStream.close(); + }); + }); + } - const result = convertToExpsArray(updatedDocuments); - // Send the updated experiments to the client - res.write(`data: ${JSON.stringify(result)}\n\n`); - }); + if (req.method === 'GET') { + res.status(200).send('WebSocket server is running'); + } else { + res.status(405).end(); // Method Not Allowed + } +} - // Close the change stream and client connection when the request ends - req.on("close", () => { - changeStream.close(); - clearInterval(intervalId); - res.end() +// Upgrade WebSocket connection +export function webSocketUpgrade(server) { + server.on('upgrade', (req, socket, head) => { + if (req.url.startsWith('/api/experiments/listen')) { + wss.handleUpgrade(req, socket, head, (ws) => { + wss.emit('connection', ws, req); + }); + } else { + socket.destroy(); + } }); - } -function convertToExpsArray(arr: WithId[]) { - return arr.map((doc: WithId) => ({ +function formatExperiment(doc) { + return { id: doc._id.toString(), name: doc.name || "Untitled", creator: doc.creator || "Unknown", @@ -98,5 +98,5 @@ function convertToExpsArray(arr: WithId[]) { passes: doc.passes ?? 0, fails: doc.fails ?? 0, totalExperimentRuns: doc.totalExperimentRuns ?? 0, - })); -} \ No newline at end of file + }; +} From 50422cc4e177c0c0c740236ad01a2601c3f98c38 Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Thu, 7 Nov 2024 15:20:45 -0500 Subject: [PATCH 109/198] update socket server --- apps/frontend/next.config.mjs | 11 +- .../frontend/pages/api/experiments/listen.tsx | 139 +++++++++--------- 2 files changed, 72 insertions(+), 78 deletions(-) diff --git a/apps/frontend/next.config.mjs b/apps/frontend/next.config.mjs index aaffbc0e..4a3c2ea8 100644 --- a/apps/frontend/next.config.mjs +++ b/apps/frontend/next.config.mjs @@ -1,7 +1,4 @@ /** @type {import('next').NextConfig} */ - -const { webSocketUpgrade } = require('./pages/api/experiments/listen'); - const nextConfig = { reactStrictMode: true, output: 'standalone', // For deployment, https://nextjs.org/docs/advanced-features/output-file-tracing @@ -14,13 +11,7 @@ const nextConfig = { pathname: '/img/**' }, ], - }, - webpack: (config, { isServer }) => { - if (isServer) { - require('./pages/api/experiments/listen').webSocketUpgrade(config); - } - return config; - }, + } }; export default nextConfig; diff --git a/apps/frontend/pages/api/experiments/listen.tsx b/apps/frontend/pages/api/experiments/listen.tsx index de680867..d1e3b2a0 100644 --- a/apps/frontend/pages/api/experiments/listen.tsx +++ b/apps/frontend/pages/api/experiments/listen.tsx @@ -1,77 +1,80 @@ -// pages/api/experiments/listen.js import clientPromise, { COLLECTION_EXPERIMENTS, DB_NAME } from "../../../lib/mongodb"; -import { WebSocketServer } from 'ws'; - -let wss; // WebSocket server instance - -export const config = { - api: { - bodyParser: false, // Disable body parsing for WebSocket handling - }, -}; - -async function handler(req, res) { - if (!wss) { - // Initialize WebSocket server - wss = new WebSocketServer({ noServer: true }); - - wss.on('connection', async (ws, request) => { - const uid = new URL(request.url, `http://${request.headers.host}`).searchParams.get('uid'); - if (!uid) { - ws.close(); - return; - } - - console.log(`WebSocket connection established for user: ${uid}`); - - // MongoDB Change Stream setup - const client = await clientPromise; - const db = client.db(DB_NAME); - const experimentsCollection = db.collection(COLLECTION_EXPERIMENTS); - - const pipeline = [{ $match: { "fullDocument.creator": uid } }]; - const options = { fullDocument: "updateLookup" }; - const changeStream = experimentsCollection.watch(pipeline, options); - - // Initial data fetch and send to client - const initDocs = await experimentsCollection.find({ creator: uid }).toArray(); - ws.send(JSON.stringify(initDocs.map(doc => formatExperiment(doc)))); - - // Listen for MongoDB changes - changeStream.on('change', async () => { - const updatedDocuments = await experimentsCollection.find({ creator: uid }).toArray(); - ws.send(JSON.stringify(updatedDocuments.map(doc => formatExperiment(doc)))); - }); - - ws.on('close', () => { - console.log(`WebSocket connection closed for user: ${uid}`); - changeStream.close(); - }); - }); +import { WithId, Document } from "mongodb"; +import WebSocket from 'ws'; + +export default async function handler(req, res) { + const { uid } = req.query; + + if (!uid) { + return; } - if (req.method === 'GET') { - res.status(200).send('WebSocket server is running'); - } else { - res.status(405).end(); // Method Not Allowed + const wss = new WebSocket.Server({ noServer: true }); + wss.on('connection'), () => { + console.log("Made Websocket connection!"); } -} -// Upgrade WebSocket connection -export function webSocketUpgrade(server) { - server.on('upgrade', (req, socket, head) => { - if (req.url.startsWith('/api/experiments/listen')) { - wss.handleUpgrade(req, socket, head, (ws) => { - wss.emit('connection', ws, req); - }); - } else { - socket.destroy(); - } + // Connect to MongoDB + const client = await clientPromise; + const db = client.db(DB_NAME); + const experimentsCollection = db.collection(COLLECTION_EXPERIMENTS); + + // Set up a Change Stream for real-time updates + const pipeline = [{ $match: { "fullDocument.creator": uid } }]; + const options = { fullDocument: "updateLookup" }; + const changeStream = experimentsCollection.watch(pipeline, options); + + if (!res.writableEnded) { + res.writeHead(101, { + Connection: 'upgrade', + 'Content-Encoding': 'none', + 'Cache-Control': 'no-cache', + 'Content-Type': 'text/plain', + 'Upgrade': 'websocket' + }); + } + + wss.handleUpgrade(req, req.socket, Buffer.alloc(0), function done(ws) { + wss.emit('connection', ws, req); }); + + + const HEARTBEAT_INTERVAL = 2500; // 5 seconds (adjust this as needed) + const intervalId = setInterval(() => { + // Send a heartbeat message to keep the connection alive + wss.send(":heartbeat"); + }, HEARTBEAT_INTERVAL); + + //Create function to listen + + + const initDocs = await experimentsCollection + .find({ 'creator': uid }) + .toArray(); + const initArray = convertToExpsArray(initDocs); + wss.send(`data: ${JSON.stringify(initArray)}\n\n`); + + // Listen to changes in the collection + changeStream.on("change", async () => { + const updatedDocuments = await experimentsCollection + .find({ 'creator': uid }) + .toArray(); + + const result = convertToExpsArray(updatedDocuments); + // Send the updated experiments to the client + wss.send(`data: ${JSON.stringify(result)}\n\n`); + }); + + // Close the change stream and client connection when the request ends + req.on("close", () => { + changeStream.close(); + clearInterval(intervalId); + }); + } -function formatExperiment(doc) { - return { +function convertToExpsArray(arr: WithId[]) { + return arr.map((doc: WithId) => ({ id: doc._id.toString(), name: doc.name || "Untitled", creator: doc.creator || "Unknown", @@ -98,5 +101,5 @@ function formatExperiment(doc) { passes: doc.passes ?? 0, fails: doc.fails ?? 0, totalExperimentRuns: doc.totalExperimentRuns ?? 0, - }; -} + })); +} \ No newline at end of file From 0d261ef85e1bd9e1cd40398b4672826e97ad178e Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Thu, 7 Nov 2024 15:30:52 -0500 Subject: [PATCH 110/198] fix socket --- apps/frontend/package-lock.json | 11 ++++ apps/frontend/package.json | 1 + .../frontend/pages/api/experiments/listen.tsx | 60 ++++++++++--------- 3 files changed, 44 insertions(+), 28 deletions(-) diff --git a/apps/frontend/package-lock.json b/apps/frontend/package-lock.json index beb13338..ac23d6df 100644 --- a/apps/frontend/package-lock.json +++ b/apps/frontend/package-lock.json @@ -44,6 +44,7 @@ "@types/react": "^18.0.26", "@types/react-dom": "^18.0.10", "@types/uuid": "^9.0.0", + "@types/ws": "^8.5.13", "autoprefixer": "^10.4.12", "eslint": "^8.26.0", "eslint-config-google": "^0.14.0", @@ -2403,6 +2404,16 @@ "@types/webidl-conversions": "*" } }, + "node_modules/@types/ws": { + "version": "8.5.13", + "resolved": "https://registry.npmjs.org/@types/ws/-/ws-8.5.13.tgz", + "integrity": "sha512-osM/gWBTPKgHV8XkTunnegTRIsvF6owmf5w+JtAfOw472dptdm0dlGv4xCt6GwQRcC2XVOvvRE/0bAoQcL2QkA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, "node_modules/@types/yargs": { "version": "17.0.24", "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-17.0.24.tgz", diff --git a/apps/frontend/package.json b/apps/frontend/package.json index e262bfed..6539ad40 100644 --- a/apps/frontend/package.json +++ b/apps/frontend/package.json @@ -47,6 +47,7 @@ "@types/react": "^18.0.26", "@types/react-dom": "^18.0.10", "@types/uuid": "^9.0.0", + "@types/ws": "^8.5.13", "autoprefixer": "^10.4.12", "eslint": "^8.26.0", "eslint-config-google": "^0.14.0", diff --git a/apps/frontend/pages/api/experiments/listen.tsx b/apps/frontend/pages/api/experiments/listen.tsx index d1e3b2a0..d65028da 100644 --- a/apps/frontend/pages/api/experiments/listen.tsx +++ b/apps/frontend/pages/api/experiments/listen.tsx @@ -10,9 +10,38 @@ export default async function handler(req, res) { } const wss = new WebSocket.Server({ noServer: true }); - wss.on('connection'), () => { + + wss.on('connection', async (ws) => { console.log("Made Websocket connection!"); - } + + const HEARTBEAT_INTERVAL = 2500; // 5 seconds (adjust this as needed) + const intervalId = setInterval(() => { + // Send a heartbeat message to keep the connection alive + ws.send(":heartbeat"); + }, HEARTBEAT_INTERVAL); + + const initDocs = await experimentsCollection + .find({ 'creator': uid }) + .toArray(); + const initArray = convertToExpsArray(initDocs); + ws.send(`data: ${JSON.stringify(initArray)}\n\n`); + + // Listen to changes in the collection + changeStream.on("change", async () => { + const updatedDocuments = await experimentsCollection + .find({ 'creator': uid }) + .toArray(); + + const result = convertToExpsArray(updatedDocuments); + // Send the updated experiments to the client + ws.send(`data: ${JSON.stringify(result)}\n\n`); + }); + + ws.on('close', () => { + changeStream.close(); + clearInterval(intervalId); + }); + }); // Connect to MongoDB const client = await clientPromise; @@ -39,36 +68,11 @@ export default async function handler(req, res) { }); - const HEARTBEAT_INTERVAL = 2500; // 5 seconds (adjust this as needed) - const intervalId = setInterval(() => { - // Send a heartbeat message to keep the connection alive - wss.send(":heartbeat"); - }, HEARTBEAT_INTERVAL); - - //Create function to listen - - - const initDocs = await experimentsCollection - .find({ 'creator': uid }) - .toArray(); - const initArray = convertToExpsArray(initDocs); - wss.send(`data: ${JSON.stringify(initArray)}\n\n`); - // Listen to changes in the collection - changeStream.on("change", async () => { - const updatedDocuments = await experimentsCollection - .find({ 'creator': uid }) - .toArray(); - - const result = convertToExpsArray(updatedDocuments); - // Send the updated experiments to the client - wss.send(`data: ${JSON.stringify(result)}\n\n`); - }); // Close the change stream and client connection when the request ends req.on("close", () => { - changeStream.close(); - clearInterval(intervalId); + wss.close(); }); } From ed2b29d7b2f16be496030dc05888e6ba42da6bc8 Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Thu, 7 Nov 2024 15:32:03 -0500 Subject: [PATCH 111/198] Update page.tsx --- apps/frontend/app/dashboard/page.tsx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/apps/frontend/app/dashboard/page.tsx b/apps/frontend/app/dashboard/page.tsx index 8c7f1c69..28e96be2 100644 --- a/apps/frontend/app/dashboard/page.tsx +++ b/apps/frontend/app/dashboard/page.tsx @@ -207,8 +207,8 @@ export default function DashboardPage() { // var experiments = await fetchExperiments(userId); // setExperiments(experiments); // }, 2500) - - const socket = new WebSocket(`ws://${window.location.host}/api/experiments/listen?uid=${userId}`); + const protocol = window.location.protocol === "https:" ? "wss:" : "ws:"; // Use wss if HTTPS + const socket = new WebSocket(`${protocol}//${window.location.host}/api/experiments/listen?uid=${userId}`); socket.onopen = () => { console.log('Connected to WebSocket server'); From 80c5c8b5fb0ddc3786bd24bce4df67c9ae9f382d Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Thu, 7 Nov 2024 15:40:15 -0500 Subject: [PATCH 112/198] Update listen.tsx --- .../frontend/pages/api/experiments/listen.tsx | 75 ++++++++++--------- 1 file changed, 39 insertions(+), 36 deletions(-) diff --git a/apps/frontend/pages/api/experiments/listen.tsx b/apps/frontend/pages/api/experiments/listen.tsx index d65028da..ddf9796a 100644 --- a/apps/frontend/pages/api/experiments/listen.tsx +++ b/apps/frontend/pages/api/experiments/listen.tsx @@ -2,81 +2,84 @@ import clientPromise, { COLLECTION_EXPERIMENTS, DB_NAME } from "../../../lib/mon import { WithId, Document } from "mongodb"; import WebSocket from 'ws'; +export const config = { + api: { + bodyParser: false, // Disable body parser to handle WebSocket upgrade + }, +}; + export default async function handler(req, res) { const { uid } = req.query; if (!uid) { + res.status(400).send('User ID is required'); return; } + // Connect to MongoDB + const client = await clientPromise; + const db = client.db(DB_NAME); + const experimentsCollection = db.collection(COLLECTION_EXPERIMENTS); + + // Set up a Change Stream for real-time updates + const pipeline = [{ $match: { "fullDocument.creator": uid } }]; + const options = { fullDocument: "updateLookup" }; + const changeStream = experimentsCollection.watch(pipeline, options); + + // Create a new WebSocket server with `noServer` set to true const wss = new WebSocket.Server({ noServer: true }); wss.on('connection', async (ws) => { - console.log("Made Websocket connection!"); + console.log("Made WebSocket connection!"); - const HEARTBEAT_INTERVAL = 2500; // 5 seconds (adjust this as needed) + // Set up heartbeat to keep connection alive + const HEARTBEAT_INTERVAL = 2500; // Adjust this as needed (in milliseconds) const intervalId = setInterval(() => { - // Send a heartbeat message to keep the connection alive - ws.send(":heartbeat"); + ws.send("heartbeat"); // Send heartbeat message }, HEARTBEAT_INTERVAL); - const initDocs = await experimentsCollection - .find({ 'creator': uid }) - .toArray(); + // Initial data fetch and send it to the client + const initDocs = await experimentsCollection.find({ creator: uid }).toArray(); const initArray = convertToExpsArray(initDocs); - ws.send(`data: ${JSON.stringify(initArray)}\n\n`); + ws.send(JSON.stringify(initArray)); // Listen to changes in the collection - changeStream.on("change", async () => { - const updatedDocuments = await experimentsCollection - .find({ 'creator': uid }) - .toArray(); - + changeStream.on('change', async () => { + const updatedDocuments = await experimentsCollection.find({ creator: uid }).toArray(); const result = convertToExpsArray(updatedDocuments); // Send the updated experiments to the client - ws.send(`data: ${JSON.stringify(result)}\n\n`); + ws.send(JSON.stringify(result)); }); + // Clean up when WebSocket closes ws.on('close', () => { - changeStream.close(); - clearInterval(intervalId); + clearInterval(intervalId); // Clear heartbeat interval + changeStream.close(); // Close change stream when connection is closed }); }); - // Connect to MongoDB - const client = await clientPromise; - const db = client.db(DB_NAME); - const experimentsCollection = db.collection(COLLECTION_EXPERIMENTS); - - // Set up a Change Stream for real-time updates - const pipeline = [{ $match: { "fullDocument.creator": uid } }]; - const options = { fullDocument: "updateLookup" }; - const changeStream = experimentsCollection.watch(pipeline, options); - + // Handle WebSocket upgrade request if (!res.writableEnded) { res.writeHead(101, { - Connection: 'upgrade', - 'Content-Encoding': 'none', + 'Connection': 'Upgrade', + 'Upgrade': 'websocket', 'Cache-Control': 'no-cache', 'Content-Type': 'text/plain', - 'Upgrade': 'websocket' }); } - wss.handleUpgrade(req, req.socket, Buffer.alloc(0), function done(ws) { + // Handle the WebSocket upgrade and pass the request to the WebSocket server + wss.handleUpgrade(req, req.socket, Buffer.alloc(0), (ws) => { wss.emit('connection', ws, req); }); - - - - // Close the change stream and client connection when the request ends + // Clean up when request ends req.on("close", () => { wss.close(); }); - } +// Utility function to convert MongoDB documents to the desired structure function convertToExpsArray(arr: WithId[]) { return arr.map((doc: WithId) => ({ id: doc._id.toString(), @@ -106,4 +109,4 @@ function convertToExpsArray(arr: WithId[]) { fails: doc.fails ?? 0, totalExperimentRuns: doc.totalExperimentRuns ?? 0, })); -} \ No newline at end of file +} From ee68182e15f6236693f9fab286d7f427b3395741 Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Thu, 7 Nov 2024 15:45:51 -0500 Subject: [PATCH 113/198] Update listen.tsx --- apps/frontend/pages/api/experiments/listen.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/frontend/pages/api/experiments/listen.tsx b/apps/frontend/pages/api/experiments/listen.tsx index ddf9796a..30ac899e 100644 --- a/apps/frontend/pages/api/experiments/listen.tsx +++ b/apps/frontend/pages/api/experiments/listen.tsx @@ -1,6 +1,6 @@ import clientPromise, { COLLECTION_EXPERIMENTS, DB_NAME } from "../../../lib/mongodb"; import { WithId, Document } from "mongodb"; -import WebSocket from 'ws'; +import { WebSocket } from 'ws'; export const config = { api: { From d22df45d814eb6111b031cf38d509eb78248e72b Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Thu, 7 Nov 2024 15:47:56 -0500 Subject: [PATCH 114/198] Update listen.tsx --- apps/frontend/pages/api/experiments/listen.tsx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/apps/frontend/pages/api/experiments/listen.tsx b/apps/frontend/pages/api/experiments/listen.tsx index 30ac899e..98c9c96f 100644 --- a/apps/frontend/pages/api/experiments/listen.tsx +++ b/apps/frontend/pages/api/experiments/listen.tsx @@ -1,6 +1,6 @@ import clientPromise, { COLLECTION_EXPERIMENTS, DB_NAME } from "../../../lib/mongodb"; import { WithId, Document } from "mongodb"; -import { WebSocket } from 'ws'; +import { Server } from 'ws' export const config = { api: { @@ -27,7 +27,7 @@ export default async function handler(req, res) { const changeStream = experimentsCollection.watch(pipeline, options); // Create a new WebSocket server with `noServer` set to true - const wss = new WebSocket.Server({ noServer: true }); + const wss = new Server({ noServer: true }); wss.on('connection', async (ws) => { console.log("Made WebSocket connection!"); From aed558601c2b8ffdab18992d1b3692bc2e8585ae Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Thu, 7 Nov 2024 15:58:52 -0500 Subject: [PATCH 115/198] try next-ws --- apps/frontend/app/dashboard/page.tsx | 1 + apps/frontend/package-lock.json | 12 ++ apps/frontend/package.json | 1 + .../frontend/pages/api/experiments/listen.tsx | 151 ++++++++++-------- 4 files changed, 98 insertions(+), 67 deletions(-) diff --git a/apps/frontend/app/dashboard/page.tsx b/apps/frontend/app/dashboard/page.tsx index 28e96be2..60c957bc 100644 --- a/apps/frontend/app/dashboard/page.tsx +++ b/apps/frontend/app/dashboard/page.tsx @@ -212,6 +212,7 @@ export default function DashboardPage() { socket.onopen = () => { console.log('Connected to WebSocket server'); + socket.send("hello socket!"); }; socket.onmessage = (event) => { diff --git a/apps/frontend/package-lock.json b/apps/frontend/package-lock.json index ac23d6df..a48ba1f2 100644 --- a/apps/frontend/package-lock.json +++ b/apps/frontend/package-lock.json @@ -29,6 +29,7 @@ "joi": "^17.6.4", "mongodb": "^5.2.0", "next": "^15.0.2", + "next-ws": "^1.1.1", "pino": "^7.11.0", "react": "^18.2.0", "react-beautiful-dnd": "^13.1.1", @@ -5645,6 +5646,17 @@ } } }, + "node_modules/next-ws": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/next-ws/-/next-ws-1.1.1.tgz", + "integrity": "sha512-J/wNgcd1lXIkrFbRNP60lNVeAVieIDlxahkQ5jpfX3QJhuLPYLBh/CFvX82nkLUBlXANQbTqg9F9yU4XxjmUIg==", + "license": "MIT", + "peerDependencies": { + "next": ">=13.1.1", + "react": "*", + "ws": "*" + } + }, "node_modules/node-fetch": { "version": "2.6.7", "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.7.tgz", diff --git a/apps/frontend/package.json b/apps/frontend/package.json index 6539ad40..e737b9d9 100644 --- a/apps/frontend/package.json +++ b/apps/frontend/package.json @@ -32,6 +32,7 @@ "joi": "^17.6.4", "mongodb": "^5.2.0", "next": "^15.0.2", + "next-ws": "^1.1.1", "pino": "^7.11.0", "react": "^18.2.0", "react-beautiful-dnd": "^13.1.1", diff --git a/apps/frontend/pages/api/experiments/listen.tsx b/apps/frontend/pages/api/experiments/listen.tsx index 98c9c96f..4e815582 100644 --- a/apps/frontend/pages/api/experiments/listen.tsx +++ b/apps/frontend/pages/api/experiments/listen.tsx @@ -8,76 +8,93 @@ export const config = { }, }; -export default async function handler(req, res) { - const { uid } = req.query; - - if (!uid) { - res.status(400).send('User ID is required'); - return; - } - - // Connect to MongoDB - const client = await clientPromise; - const db = client.db(DB_NAME); - const experimentsCollection = db.collection(COLLECTION_EXPERIMENTS); - - // Set up a Change Stream for real-time updates - const pipeline = [{ $match: { "fullDocument.creator": uid } }]; - const options = { fullDocument: "updateLookup" }; - const changeStream = experimentsCollection.watch(pipeline, options); - - // Create a new WebSocket server with `noServer` set to true - const wss = new Server({ noServer: true }); - - wss.on('connection', async (ws) => { - console.log("Made WebSocket connection!"); - - // Set up heartbeat to keep connection alive - const HEARTBEAT_INTERVAL = 2500; // Adjust this as needed (in milliseconds) - const intervalId = setInterval(() => { - ws.send("heartbeat"); // Send heartbeat message - }, HEARTBEAT_INTERVAL); - - // Initial data fetch and send it to the client - const initDocs = await experimentsCollection.find({ creator: uid }).toArray(); - const initArray = convertToExpsArray(initDocs); - ws.send(JSON.stringify(initArray)); - - // Listen to changes in the collection - changeStream.on('change', async () => { - const updatedDocuments = await experimentsCollection.find({ creator: uid }).toArray(); - const result = convertToExpsArray(updatedDocuments); - // Send the updated experiments to the client - ws.send(JSON.stringify(result)); - }); - - // Clean up when WebSocket closes - ws.on('close', () => { - clearInterval(intervalId); // Clear heartbeat interval - changeStream.close(); // Close change stream when connection is closed - }); +export function SOCKET( + client: import('ws').WebSocket, + request: import('http').IncomingMessage, + server: import('ws').WebSocketServer + ) { + console.log('A client connected'); + + client.on('message', (message) => { + console.log('Received message:', message); + client.send(message); }); - - // Handle WebSocket upgrade request - if (!res.writableEnded) { - res.writeHead(101, { - 'Connection': 'Upgrade', - 'Upgrade': 'websocket', - 'Cache-Control': 'no-cache', - 'Content-Type': 'text/plain', - }); - } - - // Handle the WebSocket upgrade and pass the request to the WebSocket server - wss.handleUpgrade(req, req.socket, Buffer.alloc(0), (ws) => { - wss.emit('connection', ws, req); + + client.on('close', () => { + console.log('A client disconnected'); }); + } - // Clean up when request ends - req.on("close", () => { - wss.close(); - }); -} +// export default async function handler(req, res) { +// const { uid } = req.query; + +// if (!uid) { +// res.status(400).send('User ID is required'); +// return; +// } + +// // Connect to MongoDB +// const client = await clientPromise; +// const db = client.db(DB_NAME); +// const experimentsCollection = db.collection(COLLECTION_EXPERIMENTS); + +// // Set up a Change Stream for real-time updates +// const pipeline = [{ $match: { "fullDocument.creator": uid } }]; +// const options = { fullDocument: "updateLookup" }; +// const changeStream = experimentsCollection.watch(pipeline, options); + +// // Create a new WebSocket server with `noServer` set to true +// const wss = new Server({ noServer: true }); + +// wss.on('connection', async (ws) => { +// console.log("Made WebSocket connection!"); + +// // Set up heartbeat to keep connection alive +// const HEARTBEAT_INTERVAL = 2500; // Adjust this as needed (in milliseconds) +// const intervalId = setInterval(() => { +// ws.send("heartbeat"); // Send heartbeat message +// }, HEARTBEAT_INTERVAL); + +// // Initial data fetch and send it to the client +// const initDocs = await experimentsCollection.find({ creator: uid }).toArray(); +// const initArray = convertToExpsArray(initDocs); +// ws.send(JSON.stringify(initArray)); + +// // Listen to changes in the collection +// changeStream.on('change', async () => { +// const updatedDocuments = await experimentsCollection.find({ creator: uid }).toArray(); +// const result = convertToExpsArray(updatedDocuments); +// // Send the updated experiments to the client +// ws.send(JSON.stringify(result)); +// }); + +// // Clean up when WebSocket closes +// ws.on('close', () => { +// clearInterval(intervalId); // Clear heartbeat interval +// changeStream.close(); // Close change stream when connection is closed +// }); +// }); + +// // Handle WebSocket upgrade request +// if (!res.writableEnded) { +// res.writeHead(101, { +// 'Connection': 'Upgrade', +// 'Upgrade': 'websocket', +// 'Cache-Control': 'no-cache', +// 'Content-Type': 'text/plain', +// }); +// } + +// // Handle the WebSocket upgrade and pass the request to the WebSocket server +// wss.handleUpgrade(req, req.socket, Buffer.alloc(0), (ws) => { +// wss.emit('connection', ws, req); +// }); + +// // Clean up when request ends +// req.on("close", () => { +// wss.close(); +// }); +// } // Utility function to convert MongoDB documents to the desired structure function convertToExpsArray(arr: WithId[]) { From b0dd25ac38f6df19bf32388fa1629dfe5f4a4799 Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Thu, 7 Nov 2024 16:05:50 -0500 Subject: [PATCH 116/198] Update listen.tsx --- apps/frontend/pages/api/experiments/listen.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/frontend/pages/api/experiments/listen.tsx b/apps/frontend/pages/api/experiments/listen.tsx index 4e815582..f5fde260 100644 --- a/apps/frontend/pages/api/experiments/listen.tsx +++ b/apps/frontend/pages/api/experiments/listen.tsx @@ -8,7 +8,7 @@ export const config = { }, }; -export function SOCKET( +export default function SOCKET( client: import('ws').WebSocket, request: import('http').IncomingMessage, server: import('ws').WebSocketServer From 0a5525e4743f0edeee106f4c98df9c998b41b4a2 Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Thu, 7 Nov 2024 16:10:54 -0500 Subject: [PATCH 117/198] Update page.tsx --- apps/frontend/app/dashboard/page.tsx | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/apps/frontend/app/dashboard/page.tsx b/apps/frontend/app/dashboard/page.tsx index 60c957bc..ff9b7e9f 100644 --- a/apps/frontend/app/dashboard/page.tsx +++ b/apps/frontend/app/dashboard/page.tsx @@ -207,8 +207,7 @@ export default function DashboardPage() { // var experiments = await fetchExperiments(userId); // setExperiments(experiments); // }, 2500) - const protocol = window.location.protocol === "https:" ? "wss:" : "ws:"; // Use wss if HTTPS - const socket = new WebSocket(`${protocol}//${window.location.host}/api/experiments/listen?uid=${userId}`); + const socket = new WebSocket(`http://${window.location.host}/api/experiments/listen?uid=${userId}`); socket.onopen = () => { console.log('Connected to WebSocket server'); From ae9b7e49b81dc85be73d24bc9fbeb3d85c916d6a Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Thu, 7 Nov 2024 16:14:32 -0500 Subject: [PATCH 118/198] Update page.tsx --- apps/frontend/app/dashboard/page.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/frontend/app/dashboard/page.tsx b/apps/frontend/app/dashboard/page.tsx index ff9b7e9f..a0752dac 100644 --- a/apps/frontend/app/dashboard/page.tsx +++ b/apps/frontend/app/dashboard/page.tsx @@ -207,7 +207,7 @@ export default function DashboardPage() { // var experiments = await fetchExperiments(userId); // setExperiments(experiments); // }, 2500) - const socket = new WebSocket(`http://${window.location.host}/api/experiments/listen?uid=${userId}`); + const socket = new WebSocket(`https://${window.location.host}/api/experiments/listen?uid=${userId}`); socket.onopen = () => { console.log('Connected to WebSocket server'); From 4813b4250d4c1323c7581ebd99f97650f2e7f486 Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Thu, 7 Nov 2024 16:20:27 -0500 Subject: [PATCH 119/198] Update page.tsx --- apps/frontend/app/dashboard/page.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/frontend/app/dashboard/page.tsx b/apps/frontend/app/dashboard/page.tsx index a0752dac..ff9b7e9f 100644 --- a/apps/frontend/app/dashboard/page.tsx +++ b/apps/frontend/app/dashboard/page.tsx @@ -207,7 +207,7 @@ export default function DashboardPage() { // var experiments = await fetchExperiments(userId); // setExperiments(experiments); // }, 2500) - const socket = new WebSocket(`https://${window.location.host}/api/experiments/listen?uid=${userId}`); + const socket = new WebSocket(`http://${window.location.host}/api/experiments/listen?uid=${userId}`); socket.onopen = () => { console.log('Connected to WebSocket server'); From 2abc2f139da454c0d671c7db2f78beb1117a8919 Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Thu, 7 Nov 2024 16:25:31 -0500 Subject: [PATCH 120/198] move to apps folder --- apps/frontend/{pages => app}/api/experiments/listen.tsx | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) rename apps/frontend/{pages => app}/api/experiments/listen.tsx (96%) diff --git a/apps/frontend/pages/api/experiments/listen.tsx b/apps/frontend/app/api/experiments/listen.tsx similarity index 96% rename from apps/frontend/pages/api/experiments/listen.tsx rename to apps/frontend/app/api/experiments/listen.tsx index f5fde260..4bff5d78 100644 --- a/apps/frontend/pages/api/experiments/listen.tsx +++ b/apps/frontend/app/api/experiments/listen.tsx @@ -1,14 +1,7 @@ import clientPromise, { COLLECTION_EXPERIMENTS, DB_NAME } from "../../../lib/mongodb"; import { WithId, Document } from "mongodb"; -import { Server } from 'ws' -export const config = { - api: { - bodyParser: false, // Disable body parser to handle WebSocket upgrade - }, -}; - -export default function SOCKET( +export function SOCKET( client: import('ws').WebSocket, request: import('http').IncomingMessage, server: import('ws').WebSocketServer From 87ebe757265f894320e0ff037840554c365f34cc Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Thu, 7 Nov 2024 16:34:19 -0500 Subject: [PATCH 121/198] add socket test --- apps/frontend/app/api/socket.tsx | 40 ++++++++++++++++++++++++++++ apps/frontend/app/dashboard/page.tsx | 2 +- 2 files changed, 41 insertions(+), 1 deletion(-) create mode 100644 apps/frontend/app/api/socket.tsx diff --git a/apps/frontend/app/api/socket.tsx b/apps/frontend/app/api/socket.tsx new file mode 100644 index 00000000..9684e8c6 --- /dev/null +++ b/apps/frontend/app/api/socket.tsx @@ -0,0 +1,40 @@ +export function GET() { + const headers = new Headers(); + headers.set('Connection', 'Upgrade'); + headers.set('Upgrade', 'websocket'); + return new Response('Upgrade Required', { status: 426, headers }); + } + + export function SOCKET( + client: import('ws').WebSocket, + _request: import('node:http').IncomingMessage, + server: import('ws').WebSocketServer, + ) { + const { send, broadcast } = createHelpers(client, server); + + // When a new client connects broadcast a connect message + broadcast({ author: 'Server', content: 'A new client has connected.' }); + send({ author: 'Server', content: 'Welcome!' }); + + // Relay any message back to other clients + client.on('message', broadcast); + + // When this client disconnects broadcast a disconnect message + client.on('close', () => { + broadcast({ author: 'Server', content: 'A client has disconnected.' }); + }); + } + + function createHelpers( + client: import('ws').WebSocket, + server: import('ws').WebSocketServer, + ) { + const send = (payload: unknown) => client.send(JSON.stringify(payload)); + const broadcast = (payload: unknown) => { + if (payload instanceof Buffer) payload = payload.toString(); + if (typeof payload !== 'string') payload = JSON.stringify(payload); + for (const other of server.clients) + if (other !== client) other.send(String(payload)); + }; + return { send, broadcast }; + } \ No newline at end of file diff --git a/apps/frontend/app/dashboard/page.tsx b/apps/frontend/app/dashboard/page.tsx index ff9b7e9f..26914953 100644 --- a/apps/frontend/app/dashboard/page.tsx +++ b/apps/frontend/app/dashboard/page.tsx @@ -207,7 +207,7 @@ export default function DashboardPage() { // var experiments = await fetchExperiments(userId); // setExperiments(experiments); // }, 2500) - const socket = new WebSocket(`http://${window.location.host}/api/experiments/listen?uid=${userId}`); + const socket = new WebSocket(`ws://${window.location.host}/api/experiments/listen?uid=${userId}`); socket.onopen = () => { console.log('Connected to WebSocket server'); From b11b637f62d03d4f6f829754025ceae0bf9554ef Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Thu, 7 Nov 2024 16:38:39 -0500 Subject: [PATCH 122/198] move --- apps/frontend/{app => pages}/api/experiments/listen.tsx | 0 apps/frontend/{app => pages}/api/socket.tsx | 0 2 files changed, 0 insertions(+), 0 deletions(-) rename apps/frontend/{app => pages}/api/experiments/listen.tsx (100%) rename apps/frontend/{app => pages}/api/socket.tsx (100%) diff --git a/apps/frontend/app/api/experiments/listen.tsx b/apps/frontend/pages/api/experiments/listen.tsx similarity index 100% rename from apps/frontend/app/api/experiments/listen.tsx rename to apps/frontend/pages/api/experiments/listen.tsx diff --git a/apps/frontend/app/api/socket.tsx b/apps/frontend/pages/api/socket.tsx similarity index 100% rename from apps/frontend/app/api/socket.tsx rename to apps/frontend/pages/api/socket.tsx From fd4504d181e51e488ee67a11b1a070b862851d15 Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Thu, 7 Nov 2024 16:42:55 -0500 Subject: [PATCH 123/198] Revert "move" This reverts commit b11b637f62d03d4f6f829754025ceae0bf9554ef. --- apps/frontend/{pages => app}/api/experiments/listen.tsx | 0 apps/frontend/{pages => app}/api/socket.tsx | 0 2 files changed, 0 insertions(+), 0 deletions(-) rename apps/frontend/{pages => app}/api/experiments/listen.tsx (100%) rename apps/frontend/{pages => app}/api/socket.tsx (100%) diff --git a/apps/frontend/pages/api/experiments/listen.tsx b/apps/frontend/app/api/experiments/listen.tsx similarity index 100% rename from apps/frontend/pages/api/experiments/listen.tsx rename to apps/frontend/app/api/experiments/listen.tsx diff --git a/apps/frontend/pages/api/socket.tsx b/apps/frontend/app/api/socket.tsx similarity index 100% rename from apps/frontend/pages/api/socket.tsx rename to apps/frontend/app/api/socket.tsx From ad83febfbaa06db59da46ba3c6ed50f8ec69f483 Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Thu, 7 Nov 2024 16:42:58 -0500 Subject: [PATCH 124/198] Revert "add socket test" This reverts commit 87ebe757265f894320e0ff037840554c365f34cc. --- apps/frontend/app/api/socket.tsx | 40 ---------------------------- apps/frontend/app/dashboard/page.tsx | 2 +- 2 files changed, 1 insertion(+), 41 deletions(-) delete mode 100644 apps/frontend/app/api/socket.tsx diff --git a/apps/frontend/app/api/socket.tsx b/apps/frontend/app/api/socket.tsx deleted file mode 100644 index 9684e8c6..00000000 --- a/apps/frontend/app/api/socket.tsx +++ /dev/null @@ -1,40 +0,0 @@ -export function GET() { - const headers = new Headers(); - headers.set('Connection', 'Upgrade'); - headers.set('Upgrade', 'websocket'); - return new Response('Upgrade Required', { status: 426, headers }); - } - - export function SOCKET( - client: import('ws').WebSocket, - _request: import('node:http').IncomingMessage, - server: import('ws').WebSocketServer, - ) { - const { send, broadcast } = createHelpers(client, server); - - // When a new client connects broadcast a connect message - broadcast({ author: 'Server', content: 'A new client has connected.' }); - send({ author: 'Server', content: 'Welcome!' }); - - // Relay any message back to other clients - client.on('message', broadcast); - - // When this client disconnects broadcast a disconnect message - client.on('close', () => { - broadcast({ author: 'Server', content: 'A client has disconnected.' }); - }); - } - - function createHelpers( - client: import('ws').WebSocket, - server: import('ws').WebSocketServer, - ) { - const send = (payload: unknown) => client.send(JSON.stringify(payload)); - const broadcast = (payload: unknown) => { - if (payload instanceof Buffer) payload = payload.toString(); - if (typeof payload !== 'string') payload = JSON.stringify(payload); - for (const other of server.clients) - if (other !== client) other.send(String(payload)); - }; - return { send, broadcast }; - } \ No newline at end of file diff --git a/apps/frontend/app/dashboard/page.tsx b/apps/frontend/app/dashboard/page.tsx index 26914953..ff9b7e9f 100644 --- a/apps/frontend/app/dashboard/page.tsx +++ b/apps/frontend/app/dashboard/page.tsx @@ -207,7 +207,7 @@ export default function DashboardPage() { // var experiments = await fetchExperiments(userId); // setExperiments(experiments); // }, 2500) - const socket = new WebSocket(`ws://${window.location.host}/api/experiments/listen?uid=${userId}`); + const socket = new WebSocket(`http://${window.location.host}/api/experiments/listen?uid=${userId}`); socket.onopen = () => { console.log('Connected to WebSocket server'); From de040dc909eba2a9b3888a0ad1e92c2d4e962afc Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Thu, 7 Nov 2024 16:43:01 -0500 Subject: [PATCH 125/198] Revert "move to apps folder" This reverts commit 2abc2f139da454c0d671c7db2f78beb1117a8919. --- apps/frontend/{app => pages}/api/experiments/listen.tsx | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) rename apps/frontend/{app => pages}/api/experiments/listen.tsx (96%) diff --git a/apps/frontend/app/api/experiments/listen.tsx b/apps/frontend/pages/api/experiments/listen.tsx similarity index 96% rename from apps/frontend/app/api/experiments/listen.tsx rename to apps/frontend/pages/api/experiments/listen.tsx index 4bff5d78..f5fde260 100644 --- a/apps/frontend/app/api/experiments/listen.tsx +++ b/apps/frontend/pages/api/experiments/listen.tsx @@ -1,7 +1,14 @@ import clientPromise, { COLLECTION_EXPERIMENTS, DB_NAME } from "../../../lib/mongodb"; import { WithId, Document } from "mongodb"; +import { Server } from 'ws' -export function SOCKET( +export const config = { + api: { + bodyParser: false, // Disable body parser to handle WebSocket upgrade + }, +}; + +export default function SOCKET( client: import('ws').WebSocket, request: import('http').IncomingMessage, server: import('ws').WebSocketServer From cf209173771503d3b72924072eda734ddc6168ab Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Thu, 7 Nov 2024 16:43:18 -0500 Subject: [PATCH 126/198] Revert "Update page.tsx" This reverts commit 4813b4250d4c1323c7581ebd99f97650f2e7f486. --- apps/frontend/app/dashboard/page.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/frontend/app/dashboard/page.tsx b/apps/frontend/app/dashboard/page.tsx index ff9b7e9f..a0752dac 100644 --- a/apps/frontend/app/dashboard/page.tsx +++ b/apps/frontend/app/dashboard/page.tsx @@ -207,7 +207,7 @@ export default function DashboardPage() { // var experiments = await fetchExperiments(userId); // setExperiments(experiments); // }, 2500) - const socket = new WebSocket(`http://${window.location.host}/api/experiments/listen?uid=${userId}`); + const socket = new WebSocket(`https://${window.location.host}/api/experiments/listen?uid=${userId}`); socket.onopen = () => { console.log('Connected to WebSocket server'); From fd16b5b78034646dd0f1d4e2daf658d0fd7e447b Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Thu, 7 Nov 2024 16:43:20 -0500 Subject: [PATCH 127/198] Revert "Update page.tsx" This reverts commit ae9b7e49b81dc85be73d24bc9fbeb3d85c916d6a. --- apps/frontend/app/dashboard/page.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/frontend/app/dashboard/page.tsx b/apps/frontend/app/dashboard/page.tsx index a0752dac..ff9b7e9f 100644 --- a/apps/frontend/app/dashboard/page.tsx +++ b/apps/frontend/app/dashboard/page.tsx @@ -207,7 +207,7 @@ export default function DashboardPage() { // var experiments = await fetchExperiments(userId); // setExperiments(experiments); // }, 2500) - const socket = new WebSocket(`https://${window.location.host}/api/experiments/listen?uid=${userId}`); + const socket = new WebSocket(`http://${window.location.host}/api/experiments/listen?uid=${userId}`); socket.onopen = () => { console.log('Connected to WebSocket server'); From 85338bfe1a430b62f459cf0049842837304c26f3 Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Thu, 7 Nov 2024 16:43:22 -0500 Subject: [PATCH 128/198] Revert "Update page.tsx" This reverts commit 0a5525e4743f0edeee106f4c98df9c998b41b4a2. --- apps/frontend/app/dashboard/page.tsx | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/apps/frontend/app/dashboard/page.tsx b/apps/frontend/app/dashboard/page.tsx index ff9b7e9f..60c957bc 100644 --- a/apps/frontend/app/dashboard/page.tsx +++ b/apps/frontend/app/dashboard/page.tsx @@ -207,7 +207,8 @@ export default function DashboardPage() { // var experiments = await fetchExperiments(userId); // setExperiments(experiments); // }, 2500) - const socket = new WebSocket(`http://${window.location.host}/api/experiments/listen?uid=${userId}`); + const protocol = window.location.protocol === "https:" ? "wss:" : "ws:"; // Use wss if HTTPS + const socket = new WebSocket(`${protocol}//${window.location.host}/api/experiments/listen?uid=${userId}`); socket.onopen = () => { console.log('Connected to WebSocket server'); From a7bb6ca0cd572962043e39099e1fa9c54feb761b Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Thu, 7 Nov 2024 16:43:24 -0500 Subject: [PATCH 129/198] Revert "Update listen.tsx" This reverts commit b0dd25ac38f6df19bf32388fa1629dfe5f4a4799. --- apps/frontend/pages/api/experiments/listen.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/frontend/pages/api/experiments/listen.tsx b/apps/frontend/pages/api/experiments/listen.tsx index f5fde260..4e815582 100644 --- a/apps/frontend/pages/api/experiments/listen.tsx +++ b/apps/frontend/pages/api/experiments/listen.tsx @@ -8,7 +8,7 @@ export const config = { }, }; -export default function SOCKET( +export function SOCKET( client: import('ws').WebSocket, request: import('http').IncomingMessage, server: import('ws').WebSocketServer From 2f371b1089b141917d998f3cdfcf2684b4d34d5f Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Thu, 7 Nov 2024 16:43:26 -0500 Subject: [PATCH 130/198] Revert "try next-ws" This reverts commit aed558601c2b8ffdab18992d1b3692bc2e8585ae. --- apps/frontend/app/dashboard/page.tsx | 1 - apps/frontend/package-lock.json | 12 -- apps/frontend/package.json | 1 - .../frontend/pages/api/experiments/listen.tsx | 151 ++++++++---------- 4 files changed, 67 insertions(+), 98 deletions(-) diff --git a/apps/frontend/app/dashboard/page.tsx b/apps/frontend/app/dashboard/page.tsx index 60c957bc..28e96be2 100644 --- a/apps/frontend/app/dashboard/page.tsx +++ b/apps/frontend/app/dashboard/page.tsx @@ -212,7 +212,6 @@ export default function DashboardPage() { socket.onopen = () => { console.log('Connected to WebSocket server'); - socket.send("hello socket!"); }; socket.onmessage = (event) => { diff --git a/apps/frontend/package-lock.json b/apps/frontend/package-lock.json index a48ba1f2..ac23d6df 100644 --- a/apps/frontend/package-lock.json +++ b/apps/frontend/package-lock.json @@ -29,7 +29,6 @@ "joi": "^17.6.4", "mongodb": "^5.2.0", "next": "^15.0.2", - "next-ws": "^1.1.1", "pino": "^7.11.0", "react": "^18.2.0", "react-beautiful-dnd": "^13.1.1", @@ -5646,17 +5645,6 @@ } } }, - "node_modules/next-ws": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/next-ws/-/next-ws-1.1.1.tgz", - "integrity": "sha512-J/wNgcd1lXIkrFbRNP60lNVeAVieIDlxahkQ5jpfX3QJhuLPYLBh/CFvX82nkLUBlXANQbTqg9F9yU4XxjmUIg==", - "license": "MIT", - "peerDependencies": { - "next": ">=13.1.1", - "react": "*", - "ws": "*" - } - }, "node_modules/node-fetch": { "version": "2.6.7", "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.7.tgz", diff --git a/apps/frontend/package.json b/apps/frontend/package.json index e737b9d9..6539ad40 100644 --- a/apps/frontend/package.json +++ b/apps/frontend/package.json @@ -32,7 +32,6 @@ "joi": "^17.6.4", "mongodb": "^5.2.0", "next": "^15.0.2", - "next-ws": "^1.1.1", "pino": "^7.11.0", "react": "^18.2.0", "react-beautiful-dnd": "^13.1.1", diff --git a/apps/frontend/pages/api/experiments/listen.tsx b/apps/frontend/pages/api/experiments/listen.tsx index 4e815582..98c9c96f 100644 --- a/apps/frontend/pages/api/experiments/listen.tsx +++ b/apps/frontend/pages/api/experiments/listen.tsx @@ -8,93 +8,76 @@ export const config = { }, }; -export function SOCKET( - client: import('ws').WebSocket, - request: import('http').IncomingMessage, - server: import('ws').WebSocketServer - ) { - console.log('A client connected'); - - client.on('message', (message) => { - console.log('Received message:', message); - client.send(message); +export default async function handler(req, res) { + const { uid } = req.query; + + if (!uid) { + res.status(400).send('User ID is required'); + return; + } + + // Connect to MongoDB + const client = await clientPromise; + const db = client.db(DB_NAME); + const experimentsCollection = db.collection(COLLECTION_EXPERIMENTS); + + // Set up a Change Stream for real-time updates + const pipeline = [{ $match: { "fullDocument.creator": uid } }]; + const options = { fullDocument: "updateLookup" }; + const changeStream = experimentsCollection.watch(pipeline, options); + + // Create a new WebSocket server with `noServer` set to true + const wss = new Server({ noServer: true }); + + wss.on('connection', async (ws) => { + console.log("Made WebSocket connection!"); + + // Set up heartbeat to keep connection alive + const HEARTBEAT_INTERVAL = 2500; // Adjust this as needed (in milliseconds) + const intervalId = setInterval(() => { + ws.send("heartbeat"); // Send heartbeat message + }, HEARTBEAT_INTERVAL); + + // Initial data fetch and send it to the client + const initDocs = await experimentsCollection.find({ creator: uid }).toArray(); + const initArray = convertToExpsArray(initDocs); + ws.send(JSON.stringify(initArray)); + + // Listen to changes in the collection + changeStream.on('change', async () => { + const updatedDocuments = await experimentsCollection.find({ creator: uid }).toArray(); + const result = convertToExpsArray(updatedDocuments); + // Send the updated experiments to the client + ws.send(JSON.stringify(result)); + }); + + // Clean up when WebSocket closes + ws.on('close', () => { + clearInterval(intervalId); // Clear heartbeat interval + changeStream.close(); // Close change stream when connection is closed + }); }); - - client.on('close', () => { - console.log('A client disconnected'); - }); - } - -// export default async function handler(req, res) { -// const { uid } = req.query; - -// if (!uid) { -// res.status(400).send('User ID is required'); -// return; -// } - -// // Connect to MongoDB -// const client = await clientPromise; -// const db = client.db(DB_NAME); -// const experimentsCollection = db.collection(COLLECTION_EXPERIMENTS); - -// // Set up a Change Stream for real-time updates -// const pipeline = [{ $match: { "fullDocument.creator": uid } }]; -// const options = { fullDocument: "updateLookup" }; -// const changeStream = experimentsCollection.watch(pipeline, options); - -// // Create a new WebSocket server with `noServer` set to true -// const wss = new Server({ noServer: true }); - -// wss.on('connection', async (ws) => { -// console.log("Made WebSocket connection!"); - -// // Set up heartbeat to keep connection alive -// const HEARTBEAT_INTERVAL = 2500; // Adjust this as needed (in milliseconds) -// const intervalId = setInterval(() => { -// ws.send("heartbeat"); // Send heartbeat message -// }, HEARTBEAT_INTERVAL); -// // Initial data fetch and send it to the client -// const initDocs = await experimentsCollection.find({ creator: uid }).toArray(); -// const initArray = convertToExpsArray(initDocs); -// ws.send(JSON.stringify(initArray)); - -// // Listen to changes in the collection -// changeStream.on('change', async () => { -// const updatedDocuments = await experimentsCollection.find({ creator: uid }).toArray(); -// const result = convertToExpsArray(updatedDocuments); -// // Send the updated experiments to the client -// ws.send(JSON.stringify(result)); -// }); - -// // Clean up when WebSocket closes -// ws.on('close', () => { -// clearInterval(intervalId); // Clear heartbeat interval -// changeStream.close(); // Close change stream when connection is closed -// }); -// }); - -// // Handle WebSocket upgrade request -// if (!res.writableEnded) { -// res.writeHead(101, { -// 'Connection': 'Upgrade', -// 'Upgrade': 'websocket', -// 'Cache-Control': 'no-cache', -// 'Content-Type': 'text/plain', -// }); -// } - -// // Handle the WebSocket upgrade and pass the request to the WebSocket server -// wss.handleUpgrade(req, req.socket, Buffer.alloc(0), (ws) => { -// wss.emit('connection', ws, req); -// }); + // Handle WebSocket upgrade request + if (!res.writableEnded) { + res.writeHead(101, { + 'Connection': 'Upgrade', + 'Upgrade': 'websocket', + 'Cache-Control': 'no-cache', + 'Content-Type': 'text/plain', + }); + } + + // Handle the WebSocket upgrade and pass the request to the WebSocket server + wss.handleUpgrade(req, req.socket, Buffer.alloc(0), (ws) => { + wss.emit('connection', ws, req); + }); -// // Clean up when request ends -// req.on("close", () => { -// wss.close(); -// }); -// } + // Clean up when request ends + req.on("close", () => { + wss.close(); + }); +} // Utility function to convert MongoDB documents to the desired structure function convertToExpsArray(arr: WithId[]) { From b270d37c23e0777f1e993521bc25fe93f1d21988 Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Thu, 7 Nov 2024 16:43:28 -0500 Subject: [PATCH 131/198] Revert "Update listen.tsx" This reverts commit d22df45d814eb6111b031cf38d509eb78248e72b. --- apps/frontend/pages/api/experiments/listen.tsx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/apps/frontend/pages/api/experiments/listen.tsx b/apps/frontend/pages/api/experiments/listen.tsx index 98c9c96f..30ac899e 100644 --- a/apps/frontend/pages/api/experiments/listen.tsx +++ b/apps/frontend/pages/api/experiments/listen.tsx @@ -1,6 +1,6 @@ import clientPromise, { COLLECTION_EXPERIMENTS, DB_NAME } from "../../../lib/mongodb"; import { WithId, Document } from "mongodb"; -import { Server } from 'ws' +import { WebSocket } from 'ws'; export const config = { api: { @@ -27,7 +27,7 @@ export default async function handler(req, res) { const changeStream = experimentsCollection.watch(pipeline, options); // Create a new WebSocket server with `noServer` set to true - const wss = new Server({ noServer: true }); + const wss = new WebSocket.Server({ noServer: true }); wss.on('connection', async (ws) => { console.log("Made WebSocket connection!"); From 377a24cff9631d62090eea954206678f32e5a360 Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Thu, 7 Nov 2024 16:43:30 -0500 Subject: [PATCH 132/198] Revert "Update listen.tsx" This reverts commit ee68182e15f6236693f9fab286d7f427b3395741. --- apps/frontend/pages/api/experiments/listen.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/frontend/pages/api/experiments/listen.tsx b/apps/frontend/pages/api/experiments/listen.tsx index 30ac899e..ddf9796a 100644 --- a/apps/frontend/pages/api/experiments/listen.tsx +++ b/apps/frontend/pages/api/experiments/listen.tsx @@ -1,6 +1,6 @@ import clientPromise, { COLLECTION_EXPERIMENTS, DB_NAME } from "../../../lib/mongodb"; import { WithId, Document } from "mongodb"; -import { WebSocket } from 'ws'; +import WebSocket from 'ws'; export const config = { api: { From 0ff3652fa8599e15d28412279fe58d93cbd76416 Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Thu, 7 Nov 2024 16:43:32 -0500 Subject: [PATCH 133/198] Revert "Update listen.tsx" This reverts commit 80c5c8b5fb0ddc3786bd24bce4df67c9ae9f382d. --- .../frontend/pages/api/experiments/listen.tsx | 75 +++++++++---------- 1 file changed, 36 insertions(+), 39 deletions(-) diff --git a/apps/frontend/pages/api/experiments/listen.tsx b/apps/frontend/pages/api/experiments/listen.tsx index ddf9796a..d65028da 100644 --- a/apps/frontend/pages/api/experiments/listen.tsx +++ b/apps/frontend/pages/api/experiments/listen.tsx @@ -2,84 +2,81 @@ import clientPromise, { COLLECTION_EXPERIMENTS, DB_NAME } from "../../../lib/mon import { WithId, Document } from "mongodb"; import WebSocket from 'ws'; -export const config = { - api: { - bodyParser: false, // Disable body parser to handle WebSocket upgrade - }, -}; - export default async function handler(req, res) { const { uid } = req.query; if (!uid) { - res.status(400).send('User ID is required'); return; } - // Connect to MongoDB - const client = await clientPromise; - const db = client.db(DB_NAME); - const experimentsCollection = db.collection(COLLECTION_EXPERIMENTS); - - // Set up a Change Stream for real-time updates - const pipeline = [{ $match: { "fullDocument.creator": uid } }]; - const options = { fullDocument: "updateLookup" }; - const changeStream = experimentsCollection.watch(pipeline, options); - - // Create a new WebSocket server with `noServer` set to true const wss = new WebSocket.Server({ noServer: true }); wss.on('connection', async (ws) => { - console.log("Made WebSocket connection!"); + console.log("Made Websocket connection!"); - // Set up heartbeat to keep connection alive - const HEARTBEAT_INTERVAL = 2500; // Adjust this as needed (in milliseconds) + const HEARTBEAT_INTERVAL = 2500; // 5 seconds (adjust this as needed) const intervalId = setInterval(() => { - ws.send("heartbeat"); // Send heartbeat message + // Send a heartbeat message to keep the connection alive + ws.send(":heartbeat"); }, HEARTBEAT_INTERVAL); - // Initial data fetch and send it to the client - const initDocs = await experimentsCollection.find({ creator: uid }).toArray(); + const initDocs = await experimentsCollection + .find({ 'creator': uid }) + .toArray(); const initArray = convertToExpsArray(initDocs); - ws.send(JSON.stringify(initArray)); + ws.send(`data: ${JSON.stringify(initArray)}\n\n`); // Listen to changes in the collection - changeStream.on('change', async () => { - const updatedDocuments = await experimentsCollection.find({ creator: uid }).toArray(); + changeStream.on("change", async () => { + const updatedDocuments = await experimentsCollection + .find({ 'creator': uid }) + .toArray(); + const result = convertToExpsArray(updatedDocuments); // Send the updated experiments to the client - ws.send(JSON.stringify(result)); + ws.send(`data: ${JSON.stringify(result)}\n\n`); }); - // Clean up when WebSocket closes ws.on('close', () => { - clearInterval(intervalId); // Clear heartbeat interval - changeStream.close(); // Close change stream when connection is closed + changeStream.close(); + clearInterval(intervalId); }); }); - // Handle WebSocket upgrade request + // Connect to MongoDB + const client = await clientPromise; + const db = client.db(DB_NAME); + const experimentsCollection = db.collection(COLLECTION_EXPERIMENTS); + + // Set up a Change Stream for real-time updates + const pipeline = [{ $match: { "fullDocument.creator": uid } }]; + const options = { fullDocument: "updateLookup" }; + const changeStream = experimentsCollection.watch(pipeline, options); + if (!res.writableEnded) { res.writeHead(101, { - 'Connection': 'Upgrade', - 'Upgrade': 'websocket', + Connection: 'upgrade', + 'Content-Encoding': 'none', 'Cache-Control': 'no-cache', 'Content-Type': 'text/plain', + 'Upgrade': 'websocket' }); } - // Handle the WebSocket upgrade and pass the request to the WebSocket server - wss.handleUpgrade(req, req.socket, Buffer.alloc(0), (ws) => { + wss.handleUpgrade(req, req.socket, Buffer.alloc(0), function done(ws) { wss.emit('connection', ws, req); }); - // Clean up when request ends + + + + // Close the change stream and client connection when the request ends req.on("close", () => { wss.close(); }); + } -// Utility function to convert MongoDB documents to the desired structure function convertToExpsArray(arr: WithId[]) { return arr.map((doc: WithId) => ({ id: doc._id.toString(), @@ -109,4 +106,4 @@ function convertToExpsArray(arr: WithId[]) { fails: doc.fails ?? 0, totalExperimentRuns: doc.totalExperimentRuns ?? 0, })); -} +} \ No newline at end of file From a8277e7cae8ef35153675a9ab03f1a9c4b17b4d9 Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Thu, 7 Nov 2024 16:43:34 -0500 Subject: [PATCH 134/198] Revert "Update page.tsx" This reverts commit ed2b29d7b2f16be496030dc05888e6ba42da6bc8. --- apps/frontend/app/dashboard/page.tsx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/apps/frontend/app/dashboard/page.tsx b/apps/frontend/app/dashboard/page.tsx index 28e96be2..8c7f1c69 100644 --- a/apps/frontend/app/dashboard/page.tsx +++ b/apps/frontend/app/dashboard/page.tsx @@ -207,8 +207,8 @@ export default function DashboardPage() { // var experiments = await fetchExperiments(userId); // setExperiments(experiments); // }, 2500) - const protocol = window.location.protocol === "https:" ? "wss:" : "ws:"; // Use wss if HTTPS - const socket = new WebSocket(`${protocol}//${window.location.host}/api/experiments/listen?uid=${userId}`); + + const socket = new WebSocket(`ws://${window.location.host}/api/experiments/listen?uid=${userId}`); socket.onopen = () => { console.log('Connected to WebSocket server'); From 8d4fcc6d5a2cef66ecb079b496223cfbbb8952aa Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Thu, 7 Nov 2024 16:43:37 -0500 Subject: [PATCH 135/198] Revert "fix socket" This reverts commit 0d261ef85e1bd9e1cd40398b4672826e97ad178e. --- apps/frontend/package-lock.json | 11 ---- apps/frontend/package.json | 1 - .../frontend/pages/api/experiments/listen.tsx | 60 +++++++++---------- 3 files changed, 28 insertions(+), 44 deletions(-) diff --git a/apps/frontend/package-lock.json b/apps/frontend/package-lock.json index ac23d6df..beb13338 100644 --- a/apps/frontend/package-lock.json +++ b/apps/frontend/package-lock.json @@ -44,7 +44,6 @@ "@types/react": "^18.0.26", "@types/react-dom": "^18.0.10", "@types/uuid": "^9.0.0", - "@types/ws": "^8.5.13", "autoprefixer": "^10.4.12", "eslint": "^8.26.0", "eslint-config-google": "^0.14.0", @@ -2404,16 +2403,6 @@ "@types/webidl-conversions": "*" } }, - "node_modules/@types/ws": { - "version": "8.5.13", - "resolved": "https://registry.npmjs.org/@types/ws/-/ws-8.5.13.tgz", - "integrity": "sha512-osM/gWBTPKgHV8XkTunnegTRIsvF6owmf5w+JtAfOw472dptdm0dlGv4xCt6GwQRcC2XVOvvRE/0bAoQcL2QkA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/node": "*" - } - }, "node_modules/@types/yargs": { "version": "17.0.24", "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-17.0.24.tgz", diff --git a/apps/frontend/package.json b/apps/frontend/package.json index 6539ad40..e262bfed 100644 --- a/apps/frontend/package.json +++ b/apps/frontend/package.json @@ -47,7 +47,6 @@ "@types/react": "^18.0.26", "@types/react-dom": "^18.0.10", "@types/uuid": "^9.0.0", - "@types/ws": "^8.5.13", "autoprefixer": "^10.4.12", "eslint": "^8.26.0", "eslint-config-google": "^0.14.0", diff --git a/apps/frontend/pages/api/experiments/listen.tsx b/apps/frontend/pages/api/experiments/listen.tsx index d65028da..d1e3b2a0 100644 --- a/apps/frontend/pages/api/experiments/listen.tsx +++ b/apps/frontend/pages/api/experiments/listen.tsx @@ -10,38 +10,9 @@ export default async function handler(req, res) { } const wss = new WebSocket.Server({ noServer: true }); - - wss.on('connection', async (ws) => { + wss.on('connection'), () => { console.log("Made Websocket connection!"); - - const HEARTBEAT_INTERVAL = 2500; // 5 seconds (adjust this as needed) - const intervalId = setInterval(() => { - // Send a heartbeat message to keep the connection alive - ws.send(":heartbeat"); - }, HEARTBEAT_INTERVAL); - - const initDocs = await experimentsCollection - .find({ 'creator': uid }) - .toArray(); - const initArray = convertToExpsArray(initDocs); - ws.send(`data: ${JSON.stringify(initArray)}\n\n`); - - // Listen to changes in the collection - changeStream.on("change", async () => { - const updatedDocuments = await experimentsCollection - .find({ 'creator': uid }) - .toArray(); - - const result = convertToExpsArray(updatedDocuments); - // Send the updated experiments to the client - ws.send(`data: ${JSON.stringify(result)}\n\n`); - }); - - ws.on('close', () => { - changeStream.close(); - clearInterval(intervalId); - }); - }); + } // Connect to MongoDB const client = await clientPromise; @@ -68,11 +39,36 @@ export default async function handler(req, res) { }); + const HEARTBEAT_INTERVAL = 2500; // 5 seconds (adjust this as needed) + const intervalId = setInterval(() => { + // Send a heartbeat message to keep the connection alive + wss.send(":heartbeat"); + }, HEARTBEAT_INTERVAL); + + //Create function to listen + + + const initDocs = await experimentsCollection + .find({ 'creator': uid }) + .toArray(); + const initArray = convertToExpsArray(initDocs); + wss.send(`data: ${JSON.stringify(initArray)}\n\n`); + // Listen to changes in the collection + changeStream.on("change", async () => { + const updatedDocuments = await experimentsCollection + .find({ 'creator': uid }) + .toArray(); + + const result = convertToExpsArray(updatedDocuments); + // Send the updated experiments to the client + wss.send(`data: ${JSON.stringify(result)}\n\n`); + }); // Close the change stream and client connection when the request ends req.on("close", () => { - wss.close(); + changeStream.close(); + clearInterval(intervalId); }); } From 904fa67edb178f3eafc0e35ca96684c95f1c5baa Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Thu, 7 Nov 2024 16:43:39 -0500 Subject: [PATCH 136/198] Revert "update socket server" This reverts commit 50422cc4e177c0c0c740236ad01a2601c3f98c38. --- apps/frontend/next.config.mjs | 11 +- .../frontend/pages/api/experiments/listen.tsx | 139 +++++++++--------- 2 files changed, 78 insertions(+), 72 deletions(-) diff --git a/apps/frontend/next.config.mjs b/apps/frontend/next.config.mjs index 4a3c2ea8..aaffbc0e 100644 --- a/apps/frontend/next.config.mjs +++ b/apps/frontend/next.config.mjs @@ -1,4 +1,7 @@ /** @type {import('next').NextConfig} */ + +const { webSocketUpgrade } = require('./pages/api/experiments/listen'); + const nextConfig = { reactStrictMode: true, output: 'standalone', // For deployment, https://nextjs.org/docs/advanced-features/output-file-tracing @@ -11,7 +14,13 @@ const nextConfig = { pathname: '/img/**' }, ], - } + }, + webpack: (config, { isServer }) => { + if (isServer) { + require('./pages/api/experiments/listen').webSocketUpgrade(config); + } + return config; + }, }; export default nextConfig; diff --git a/apps/frontend/pages/api/experiments/listen.tsx b/apps/frontend/pages/api/experiments/listen.tsx index d1e3b2a0..de680867 100644 --- a/apps/frontend/pages/api/experiments/listen.tsx +++ b/apps/frontend/pages/api/experiments/listen.tsx @@ -1,80 +1,77 @@ +// pages/api/experiments/listen.js import clientPromise, { COLLECTION_EXPERIMENTS, DB_NAME } from "../../../lib/mongodb"; -import { WithId, Document } from "mongodb"; -import WebSocket from 'ws'; - -export default async function handler(req, res) { - const { uid } = req.query; - - if (!uid) { - return; - } - - const wss = new WebSocket.Server({ noServer: true }); - wss.on('connection'), () => { - console.log("Made Websocket connection!"); - } - - // Connect to MongoDB - const client = await clientPromise; - const db = client.db(DB_NAME); - const experimentsCollection = db.collection(COLLECTION_EXPERIMENTS); - - // Set up a Change Stream for real-time updates - const pipeline = [{ $match: { "fullDocument.creator": uid } }]; - const options = { fullDocument: "updateLookup" }; - const changeStream = experimentsCollection.watch(pipeline, options); - - if (!res.writableEnded) { - res.writeHead(101, { - Connection: 'upgrade', - 'Content-Encoding': 'none', - 'Cache-Control': 'no-cache', - 'Content-Type': 'text/plain', - 'Upgrade': 'websocket' +import { WebSocketServer } from 'ws'; + +let wss; // WebSocket server instance + +export const config = { + api: { + bodyParser: false, // Disable body parsing for WebSocket handling + }, +}; + +async function handler(req, res) { + if (!wss) { + // Initialize WebSocket server + wss = new WebSocketServer({ noServer: true }); + + wss.on('connection', async (ws, request) => { + const uid = new URL(request.url, `http://${request.headers.host}`).searchParams.get('uid'); + if (!uid) { + ws.close(); + return; + } + + console.log(`WebSocket connection established for user: ${uid}`); + + // MongoDB Change Stream setup + const client = await clientPromise; + const db = client.db(DB_NAME); + const experimentsCollection = db.collection(COLLECTION_EXPERIMENTS); + + const pipeline = [{ $match: { "fullDocument.creator": uid } }]; + const options = { fullDocument: "updateLookup" }; + const changeStream = experimentsCollection.watch(pipeline, options); + + // Initial data fetch and send to client + const initDocs = await experimentsCollection.find({ creator: uid }).toArray(); + ws.send(JSON.stringify(initDocs.map(doc => formatExperiment(doc)))); + + // Listen for MongoDB changes + changeStream.on('change', async () => { + const updatedDocuments = await experimentsCollection.find({ creator: uid }).toArray(); + ws.send(JSON.stringify(updatedDocuments.map(doc => formatExperiment(doc)))); + }); + + ws.on('close', () => { + console.log(`WebSocket connection closed for user: ${uid}`); + changeStream.close(); + }); }); } - wss.handleUpgrade(req, req.socket, Buffer.alloc(0), function done(ws) { - wss.emit('connection', ws, req); - }); - - - const HEARTBEAT_INTERVAL = 2500; // 5 seconds (adjust this as needed) - const intervalId = setInterval(() => { - // Send a heartbeat message to keep the connection alive - wss.send(":heartbeat"); - }, HEARTBEAT_INTERVAL); - - //Create function to listen - - - const initDocs = await experimentsCollection - .find({ 'creator': uid }) - .toArray(); - const initArray = convertToExpsArray(initDocs); - wss.send(`data: ${JSON.stringify(initArray)}\n\n`); - - // Listen to changes in the collection - changeStream.on("change", async () => { - const updatedDocuments = await experimentsCollection - .find({ 'creator': uid }) - .toArray(); - - const result = convertToExpsArray(updatedDocuments); - // Send the updated experiments to the client - wss.send(`data: ${JSON.stringify(result)}\n\n`); - }); + if (req.method === 'GET') { + res.status(200).send('WebSocket server is running'); + } else { + res.status(405).end(); // Method Not Allowed + } +} - // Close the change stream and client connection when the request ends - req.on("close", () => { - changeStream.close(); - clearInterval(intervalId); +// Upgrade WebSocket connection +export function webSocketUpgrade(server) { + server.on('upgrade', (req, socket, head) => { + if (req.url.startsWith('/api/experiments/listen')) { + wss.handleUpgrade(req, socket, head, (ws) => { + wss.emit('connection', ws, req); + }); + } else { + socket.destroy(); + } }); - } -function convertToExpsArray(arr: WithId[]) { - return arr.map((doc: WithId) => ({ +function formatExperiment(doc) { + return { id: doc._id.toString(), name: doc.name || "Untitled", creator: doc.creator || "Unknown", @@ -101,5 +98,5 @@ function convertToExpsArray(arr: WithId[]) { passes: doc.passes ?? 0, fails: doc.fails ?? 0, totalExperimentRuns: doc.totalExperimentRuns ?? 0, - })); -} \ No newline at end of file + }; +} From 24d80585dc466c78c76c2b6936bd1bab8685c5d0 Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Thu, 7 Nov 2024 16:43:41 -0500 Subject: [PATCH 137/198] Revert "switch to websockets" This reverts commit c6415b5aee95df1a97ddfd462f45b28714ab8939. --- apps/frontend/app/dashboard/page.tsx | 32 ++--- apps/frontend/next.config.mjs | 11 +- apps/frontend/package-lock.json | 24 +--- apps/frontend/package.json | 3 +- .../frontend/pages/api/experiments/listen.tsx | 120 +++++++++--------- 5 files changed, 80 insertions(+), 110 deletions(-) diff --git a/apps/frontend/app/dashboard/page.tsx b/apps/frontend/app/dashboard/page.tsx index 8c7f1c69..85629f1d 100644 --- a/apps/frontend/app/dashboard/page.tsx +++ b/apps/frontend/app/dashboard/page.tsx @@ -208,25 +208,27 @@ export default function DashboardPage() { // setExperiments(experiments); // }, 2500) - const socket = new WebSocket(`ws://${window.location.host}/api/experiments/listen?uid=${userId}`); + const eventSource = new EventSource(`/api/experiments/listen?uid=${userId}`) + console.log(eventSource); - socket.onopen = () => { - console.log('Connected to WebSocket server'); - }; + eventSource.onopen = () => { + console.log("SSE open!"); + } - socket.onmessage = (event) => { - const data = JSON.parse(event.data); - console.log('Received data:', data); - // Here you can update your component state with the received data - }; + eventSource.onmessage = (event) => { + console.log("received change!"); + // console.log("data was:" + event.data); + if (event.data) { + setExperiments(JSON.parse(event.data) as ExperimentData[]); + } - socket.onclose = () => { - console.log('WebSocket connection closed'); - }; + } + + // eventSource.onerror = (event) => { + // console.error('SSE Error:', event); + // }; - return () => { - socket.close(); - }; + return () => eventSource.close(); // return () => clearInterval(interval); }, [userId]); diff --git a/apps/frontend/next.config.mjs b/apps/frontend/next.config.mjs index aaffbc0e..4a3c2ea8 100644 --- a/apps/frontend/next.config.mjs +++ b/apps/frontend/next.config.mjs @@ -1,7 +1,4 @@ /** @type {import('next').NextConfig} */ - -const { webSocketUpgrade } = require('./pages/api/experiments/listen'); - const nextConfig = { reactStrictMode: true, output: 'standalone', // For deployment, https://nextjs.org/docs/advanced-features/output-file-tracing @@ -14,13 +11,7 @@ const nextConfig = { pathname: '/img/**' }, ], - }, - webpack: (config, { isServer }) => { - if (isServer) { - require('./pages/api/experiments/listen').webSocketUpgrade(config); - } - return config; - }, + } }; export default nextConfig; diff --git a/apps/frontend/package-lock.json b/apps/frontend/package-lock.json index beb13338..706a1fde 100644 --- a/apps/frontend/package-lock.json +++ b/apps/frontend/package-lock.json @@ -35,8 +35,7 @@ "react-dom": "^18.2.0", "react-icons": "^4.6.0", "tabler-icons-react": "^1.55.0", - "uuid": "^9.0.0", - "ws": "^8.18.0" + "uuid": "^9.0.0" }, "devDependencies": { "@tailwindcss/forms": "^0.5.3", @@ -7637,27 +7636,6 @@ "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" }, - "node_modules/ws": { - "version": "8.18.0", - "resolved": "https://registry.npmjs.org/ws/-/ws-8.18.0.tgz", - "integrity": "sha512-8VbfWfHLbbwu3+N6OKsOMpBdT4kXPDDB9cJk2bJ6mh9ucxdlnNvH1e+roYkKmN9Nxw2yjz7VzeO9oOz2zJ04Pw==", - "license": "MIT", - "engines": { - "node": ">=10.0.0" - }, - "peerDependencies": { - "bufferutil": "^4.0.1", - "utf-8-validate": ">=5.0.2" - }, - "peerDependenciesMeta": { - "bufferutil": { - "optional": true - }, - "utf-8-validate": { - "optional": true - } - } - }, "node_modules/y18n": { "version": "5.0.8", "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", diff --git a/apps/frontend/package.json b/apps/frontend/package.json index e262bfed..cf0932d0 100644 --- a/apps/frontend/package.json +++ b/apps/frontend/package.json @@ -38,8 +38,7 @@ "react-dom": "^18.2.0", "react-icons": "^4.6.0", "tabler-icons-react": "^1.55.0", - "uuid": "^9.0.0", - "ws": "^8.18.0" + "uuid": "^9.0.0" }, "devDependencies": { "@tailwindcss/forms": "^0.5.3", diff --git a/apps/frontend/pages/api/experiments/listen.tsx b/apps/frontend/pages/api/experiments/listen.tsx index de680867..66ed6c0d 100644 --- a/apps/frontend/pages/api/experiments/listen.tsx +++ b/apps/frontend/pages/api/experiments/listen.tsx @@ -1,77 +1,77 @@ -// pages/api/experiments/listen.js import clientPromise, { COLLECTION_EXPERIMENTS, DB_NAME } from "../../../lib/mongodb"; -import { WebSocketServer } from 'ws'; +import { WithId, Document } from "mongodb"; -let wss; // WebSocket server instance +export const runtime = 'nodejs'; +export const dynamic = "force-dynamic"; -export const config = { - api: { - bodyParser: false, // Disable body parsing for WebSocket handling - }, -}; +export default async function handler(req, res) { + const { uid } = req.query; -async function handler(req, res) { - if (!wss) { - // Initialize WebSocket server - wss = new WebSocketServer({ noServer: true }); + if (!uid){ + return; + } - wss.on('connection', async (ws, request) => { - const uid = new URL(request.url, `http://${request.headers.host}`).searchParams.get('uid'); - if (!uid) { - ws.close(); - return; - } + // Connect to MongoDB + const client = await clientPromise; + const db = client.db(DB_NAME); + const experimentsCollection = db.collection(COLLECTION_EXPERIMENTS); - console.log(`WebSocket connection established for user: ${uid}`); + // Set up a Change Stream for real-time updates + const pipeline = [{ $match: { "fullDocument.creator": uid } }]; + const options = { fullDocument: "updateLookup" }; + const changeStream = experimentsCollection.watch(pipeline, options); - // MongoDB Change Stream setup - const client = await clientPromise; - const db = client.db(DB_NAME); - const experimentsCollection = db.collection(COLLECTION_EXPERIMENTS); + // Set up real-time streaming of changes to the client using SSE + // res.setHeader("Access-Control-Allow-Origin", "*"); + // res.setHeader("Cache-Control", "no-cache"); + // res.setHeader("Connection", "keep-alive"); + // res.setHeader("Content-Type", "text/event-stream"); + res.writeHead(200, { + Connection: 'keep-alive', + 'Content-Encoding': 'none', + 'Cache-Control': 'no-cache', + 'Content-Type': 'text/event-stream', + }); - const pipeline = [{ $match: { "fullDocument.creator": uid } }]; - const options = { fullDocument: "updateLookup" }; - const changeStream = experimentsCollection.watch(pipeline, options); + const HEARTBEAT_INTERVAL = 2500; // 5 seconds (adjust this as needed) + const intervalId = setInterval(() => { + // Send a heartbeat message to keep the connection alive + res.write(': heartbeat\n\n'); + res.flush(); + }, HEARTBEAT_INTERVAL); - // Initial data fetch and send to client - const initDocs = await experimentsCollection.find({ creator: uid }).toArray(); - ws.send(JSON.stringify(initDocs.map(doc => formatExperiment(doc)))); + //Create function to listen - // Listen for MongoDB changes - changeStream.on('change', async () => { - const updatedDocuments = await experimentsCollection.find({ creator: uid }).toArray(); - ws.send(JSON.stringify(updatedDocuments.map(doc => formatExperiment(doc)))); - }); - ws.on('close', () => { - console.log(`WebSocket connection closed for user: ${uid}`); - changeStream.close(); - }); - }); - } + const initDocs = await experimentsCollection + .find({ 'creator': uid }) + .toArray(); + const initArray = convertToExpsArray(initDocs); + res.write(`data: ${JSON.stringify(initArray)}\n\n`); + res.flush(); - if (req.method === 'GET') { - res.status(200).send('WebSocket server is running'); - } else { - res.status(405).end(); // Method Not Allowed - } -} + // Listen to changes in the collection + changeStream.on("change", async () => { + const updatedDocuments = await experimentsCollection + .find({ 'creator': uid }) + .toArray(); -// Upgrade WebSocket connection -export function webSocketUpgrade(server) { - server.on('upgrade', (req, socket, head) => { - if (req.url.startsWith('/api/experiments/listen')) { - wss.handleUpgrade(req, socket, head, (ws) => { - wss.emit('connection', ws, req); - }); - } else { - socket.destroy(); - } + const result = convertToExpsArray(updatedDocuments); + // Send the updated experiments to the client + res.write(`data: ${JSON.stringify(result)}\n\n`); }); + + // Close the change stream and client connection when the request ends + req.on("close", () => { + changeStream.close(); + clearInterval(intervalId); + res.end() + }); + } -function formatExperiment(doc) { - return { +function convertToExpsArray(arr: WithId[]) { + return arr.map((doc: WithId) => ({ id: doc._id.toString(), name: doc.name || "Untitled", creator: doc.creator || "Unknown", @@ -98,5 +98,5 @@ function formatExperiment(doc) { passes: doc.passes ?? 0, fails: doc.fails ?? 0, totalExperimentRuns: doc.totalExperimentRuns ?? 0, - }; -} + })); +} \ No newline at end of file From b136ba41d18d6297f9fe95f65d4c16c351aad2a1 Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Thu, 7 Nov 2024 16:46:46 -0500 Subject: [PATCH 138/198] disable body parser --- apps/frontend/pages/api/experiments/listen.tsx | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/apps/frontend/pages/api/experiments/listen.tsx b/apps/frontend/pages/api/experiments/listen.tsx index 66ed6c0d..d39abb74 100644 --- a/apps/frontend/pages/api/experiments/listen.tsx +++ b/apps/frontend/pages/api/experiments/listen.tsx @@ -4,6 +4,12 @@ import { WithId, Document } from "mongodb"; export const runtime = 'nodejs'; export const dynamic = "force-dynamic"; +export const config = { + api: { + bodyParser: false, // Disables body parsing + }, +}; + export default async function handler(req, res) { const { uid } = req.query; From f1906c1bfc00491980cece8516be20a96c63c80d Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Thu, 7 Nov 2024 17:04:02 -0500 Subject: [PATCH 139/198] update sse --- apps/frontend/app/dashboard/page.tsx | 2 +- apps/frontend/pages/api/experiments/listen.tsx | 2 -- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/apps/frontend/app/dashboard/page.tsx b/apps/frontend/app/dashboard/page.tsx index 85629f1d..f7f59ed5 100644 --- a/apps/frontend/app/dashboard/page.tsx +++ b/apps/frontend/app/dashboard/page.tsx @@ -216,7 +216,7 @@ export default function DashboardPage() { } eventSource.onmessage = (event) => { - console.log("received change!"); + console.log("event.data"); // console.log("data was:" + event.data); if (event.data) { setExperiments(JSON.parse(event.data) as ExperimentData[]); diff --git a/apps/frontend/pages/api/experiments/listen.tsx b/apps/frontend/pages/api/experiments/listen.tsx index d39abb74..b6e57e3c 100644 --- a/apps/frontend/pages/api/experiments/listen.tsx +++ b/apps/frontend/pages/api/experiments/listen.tsx @@ -43,7 +43,6 @@ export default async function handler(req, res) { const intervalId = setInterval(() => { // Send a heartbeat message to keep the connection alive res.write(': heartbeat\n\n'); - res.flush(); }, HEARTBEAT_INTERVAL); //Create function to listen @@ -54,7 +53,6 @@ export default async function handler(req, res) { .toArray(); const initArray = convertToExpsArray(initDocs); res.write(`data: ${JSON.stringify(initArray)}\n\n`); - res.flush(); // Listen to changes in the collection changeStream.on("change", async () => { From 6d2af2d0536125c9431c2a166e1a20535f73c314 Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Thu, 7 Nov 2024 17:04:43 -0500 Subject: [PATCH 140/198] Update page.tsx --- apps/frontend/app/dashboard/page.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/frontend/app/dashboard/page.tsx b/apps/frontend/app/dashboard/page.tsx index f7f59ed5..df42a37d 100644 --- a/apps/frontend/app/dashboard/page.tsx +++ b/apps/frontend/app/dashboard/page.tsx @@ -216,7 +216,7 @@ export default function DashboardPage() { } eventSource.onmessage = (event) => { - console.log("event.data"); + console.log(event.data); // console.log("data was:" + event.data); if (event.data) { setExperiments(JSON.parse(event.data) as ExperimentData[]); From f86a45ff49a519bc8c74fe0651b36c44609e57c2 Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Thu, 7 Nov 2024 17:10:43 -0500 Subject: [PATCH 141/198] Update page.tsx --- apps/frontend/app/dashboard/page.tsx | 19 +------------------ 1 file changed, 1 insertion(+), 18 deletions(-) diff --git a/apps/frontend/app/dashboard/page.tsx b/apps/frontend/app/dashboard/page.tsx index df42a37d..1dd399fe 100644 --- a/apps/frontend/app/dashboard/page.tsx +++ b/apps/frontend/app/dashboard/page.tsx @@ -191,23 +191,6 @@ export default function DashboardPage() { return; } console.log("getting exps"); - // listenToExperiments(userId, (newExperimentList) => setExperiments(newExperimentList as ExperimentData[])); // TODO this assumes that all values will be present, which is not true - // console.log(experiments); - - //Initial get of experiments - // async () => { - // var experiments = await fetchExperiments(userId); - // console.log("experiments are") - // console.log(experiments); - // setExperiments(experiments); - // } - - // // Create interval to get experiments - // const interval = setInterval( async () => { - // var experiments = await fetchExperiments(userId); - // setExperiments(experiments); - // }, 2500) - const eventSource = new EventSource(`/api/experiments/listen?uid=${userId}`) console.log(eventSource); @@ -218,7 +201,7 @@ export default function DashboardPage() { eventSource.onmessage = (event) => { console.log(event.data); // console.log("data was:" + event.data); - if (event.data) { + if (event.data !== 'heartbeat') { setExperiments(JSON.parse(event.data) as ExperimentData[]); } From 543acc5b344854fb3026f7a742b34d9551ec0d32 Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Thu, 7 Nov 2024 17:17:40 -0500 Subject: [PATCH 142/198] update log --- .../app/components/flows/ViewExperiment/ExperimentListing.tsx | 4 ++-- apps/frontend/pages/api/experiments/listen.tsx | 3 --- 2 files changed, 2 insertions(+), 5 deletions(-) diff --git a/apps/frontend/app/components/flows/ViewExperiment/ExperimentListing.tsx b/apps/frontend/app/components/flows/ViewExperiment/ExperimentListing.tsx index 1d956bd5..69cb1834 100644 --- a/apps/frontend/app/components/flows/ViewExperiment/ExperimentListing.tsx +++ b/apps/frontend/app/components/flows/ViewExperiment/ExperimentListing.tsx @@ -66,8 +66,8 @@ export const ExperimentListing = ({ projectinit, onCopyExperiment, onDownloadRes } else { const eventSource = new EventSource(`/api/experiments/subscribe?expId=${project.expId}`); eventSource.onmessage = (event) => { - console.log("received change!"); - if (JSON.parse(event.data) !== ':heartbeat') { + console.log(event.data); + if (JSON.parse(event.data) !== 'heartbeat') { setProject(JSON.parse(event.data) as ExperimentData); } diff --git a/apps/frontend/pages/api/experiments/listen.tsx b/apps/frontend/pages/api/experiments/listen.tsx index b6e57e3c..0cb72b8a 100644 --- a/apps/frontend/pages/api/experiments/listen.tsx +++ b/apps/frontend/pages/api/experiments/listen.tsx @@ -45,9 +45,6 @@ export default async function handler(req, res) { res.write(': heartbeat\n\n'); }, HEARTBEAT_INTERVAL); - //Create function to listen - - const initDocs = await experimentsCollection .find({ 'creator': uid }) .toArray(); From ede1fb9595f7e9a51763020dd5fd454428ab2f81 Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Thu, 7 Nov 2024 17:22:02 -0500 Subject: [PATCH 143/198] Update ExperimentListing.tsx --- .../app/components/flows/ViewExperiment/ExperimentListing.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/frontend/app/components/flows/ViewExperiment/ExperimentListing.tsx b/apps/frontend/app/components/flows/ViewExperiment/ExperimentListing.tsx index 69cb1834..fc57fb69 100644 --- a/apps/frontend/app/components/flows/ViewExperiment/ExperimentListing.tsx +++ b/apps/frontend/app/components/flows/ViewExperiment/ExperimentListing.tsx @@ -67,7 +67,7 @@ export const ExperimentListing = ({ projectinit, onCopyExperiment, onDownloadRes const eventSource = new EventSource(`/api/experiments/subscribe?expId=${project.expId}`); eventSource.onmessage = (event) => { console.log(event.data); - if (JSON.parse(event.data) !== 'heartbeat') { + if (event.data !== 'heartbeat') { setProject(JSON.parse(event.data) as ExperimentData); } From 160e69a06709fe73e11e4f31666798361d7d90dd Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Thu, 7 Nov 2024 19:58:44 -0500 Subject: [PATCH 144/198] cleanup --- .../ViewExperiment/ExperimentListing.tsx | 4 -- apps/frontend/app/dashboard/page.tsx | 14 +---- apps/frontend/lib/mongo_funcs.ts | 51 ------------------- .../pages/api/experiments/subscribe.tsx | 2 +- 4 files changed, 2 insertions(+), 69 deletions(-) delete mode 100644 apps/frontend/lib/mongo_funcs.ts diff --git a/apps/frontend/app/components/flows/ViewExperiment/ExperimentListing.tsx b/apps/frontend/app/components/flows/ViewExperiment/ExperimentListing.tsx index fc57fb69..99c29aeb 100644 --- a/apps/frontend/app/components/flows/ViewExperiment/ExperimentListing.tsx +++ b/apps/frontend/app/components/flows/ViewExperiment/ExperimentListing.tsx @@ -66,15 +66,11 @@ export const ExperimentListing = ({ projectinit, onCopyExperiment, onDownloadRes } else { const eventSource = new EventSource(`/api/experiments/subscribe?expId=${project.expId}`); eventSource.onmessage = (event) => { - console.log(event.data); if (event.data !== 'heartbeat') { setProject(JSON.parse(event.data) as ExperimentData); } } - // subscribeToExp(project.expId, (data) => { - // setProject(data as ExperimentData); - // }); } }, [editingCanceled, originalProjectName, project.expId]); diff --git a/apps/frontend/app/dashboard/page.tsx b/apps/frontend/app/dashboard/page.tsx index 1dd399fe..e9cf273f 100644 --- a/apps/frontend/app/dashboard/page.tsx +++ b/apps/frontend/app/dashboard/page.tsx @@ -190,13 +190,7 @@ export default function DashboardPage() { if (!userId) { return; } - console.log("getting exps"); - const eventSource = new EventSource(`/api/experiments/listen?uid=${userId}`) - console.log(eventSource); - - eventSource.onopen = () => { - console.log("SSE open!"); - } + const eventSource = new EventSource(`/api/experiments/listen?uid=${userId}`); eventSource.onmessage = (event) => { console.log(event.data); @@ -207,13 +201,7 @@ export default function DashboardPage() { } - // eventSource.onerror = (event) => { - // console.error('SSE Error:', event); - // }; - return () => eventSource.close(); - - // return () => clearInterval(interval); }, [userId]); const QUEUE_UNKNOWN_LENGTH = -1; diff --git a/apps/frontend/lib/mongo_funcs.ts b/apps/frontend/lib/mongo_funcs.ts deleted file mode 100644 index 778e52ad..00000000 --- a/apps/frontend/lib/mongo_funcs.ts +++ /dev/null @@ -1,51 +0,0 @@ -'use server' - -import clientPromise, { COLLECTION_EXPERIMENTS, DB_NAME } from "./mongodb"; -import { ExperimentData } from "../firebase/db_types"; // Adjust the path -import { Document, WithId } from "mongodb"; - -// Function to listen to all experiments for a specific user and get the latest data every time there's a change -export async function fetchExperiments( - uid: string // User ID to filter experiments by -) { - const client = await clientPromise; - const db = client.db(DB_NAME); - const experimentsCollection = db.collection(COLLECTION_EXPERIMENTS); - - // Fetch all experiments for the given user ID (creator field matches uid) on initial load - const userExperiments = await experimentsCollection - .find({ 'creator': uid }) // Filter experiments by user ID (creator) - .toArray(); - - // Map the initial experiments to the correct format - const experimentsData: ExperimentData[] = userExperiments.map((doc: WithId) => ({ - id: doc._id.toString(), - name: doc.name || "Untitled", - creator: doc.creator || "Unknown", - description: doc.description || "No description", - verbose: doc.verbose ?? false, - workers: doc.workers ?? 0, - expId: doc.expId || "", - trialExtraFile: doc.trialExtraFile || "", - trialResult: doc.trialResult || "", - timeout: doc.timeout ?? 0, - keepLogs: doc.keepLogs ?? false, - scatter: doc.scatter ?? false, - scatterIndVar: doc.scatterIndVar || "", - scatterDepVar: doc.scatterDepVar || "", - dumbTextArea: doc.dumbTextArea || "", - created: doc.created?.toString() || "0", - hyperparameters: doc.hyperparameters ?? {}, - finished: doc.finished ?? false, - estimatedTotalTimeMinutes: doc.estimatedTotalTimeMinutes ?? 0, - expToRun: doc.expToRun ?? 0, - file: doc.file || "", - startedAtEpochMillis: doc.startedAtEpochMillis ?? 0, - finishedAtEpochMilliseconds: doc.finishedAtEpochMilliseconds ?? 0, - passes: doc.passes ?? 0, - fails: doc.fails ?? 0, - totalExperimentRuns: doc.totalExperimentRuns ?? 0, - })); - - return experimentsData -} diff --git a/apps/frontend/pages/api/experiments/subscribe.tsx b/apps/frontend/pages/api/experiments/subscribe.tsx index 93ee91d6..8bae39ff 100644 --- a/apps/frontend/pages/api/experiments/subscribe.tsx +++ b/apps/frontend/pages/api/experiments/subscribe.tsx @@ -24,7 +24,7 @@ export default async function handler(req, res) { const HEARTBEAT_INTERVAL = 2500; // 5 seconds (adjust this as needed) const intervalId = setInterval(() => { // Send a heartbeat message to keep the connection alive - res.write('data: heartbeat\n\n'); + res.write(': heartbeat\n\n'); }, HEARTBEAT_INTERVAL); const initDocs = await experimentsCollection From 4588d1d80ccacd6bb3088a47f7d649954c068325 Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Thu, 7 Nov 2024 20:04:30 -0500 Subject: [PATCH 145/198] fix import --- apps/frontend/app/dashboard/page.tsx | 1 - 1 file changed, 1 deletion(-) diff --git a/apps/frontend/app/dashboard/page.tsx b/apps/frontend/app/dashboard/page.tsx index e9cf273f..eff1966d 100644 --- a/apps/frontend/app/dashboard/page.tsx +++ b/apps/frontend/app/dashboard/page.tsx @@ -4,7 +4,6 @@ import NewExperiment, { FormStates } from '../components/flows/AddExperiment/New import { useAuth } from '../../firebase/fbAuth'; import { deleteExperiment } from '../../firebase/db'; import { downloadExperimentResults, downloadExperimentProjectZip, ExperimentDocumentId } from '../../firebase/db'; -import { fetchExperiments } from '../../lib/mongo_funcs'; import { Fragment, useState, useEffect } from 'react'; import { Disclosure, Menu, Transition } from '@headlessui/react'; import { From 9339e3bf6bcaf9c6116e64a015587d0f9bd50c66 Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Thu, 7 Nov 2024 20:12:16 -0500 Subject: [PATCH 146/198] fix returning of json --- apps/frontend/app/dashboard/page.tsx | 2 -- apps/frontend/firebase/db.ts | 4 ---- 2 files changed, 6 deletions(-) diff --git a/apps/frontend/app/dashboard/page.tsx b/apps/frontend/app/dashboard/page.tsx index eff1966d..465c6ac3 100644 --- a/apps/frontend/app/dashboard/page.tsx +++ b/apps/frontend/app/dashboard/page.tsx @@ -192,8 +192,6 @@ export default function DashboardPage() { const eventSource = new EventSource(`/api/experiments/listen?uid=${userId}`); eventSource.onmessage = (event) => { - console.log(event.data); - // console.log("data was:" + event.data); if (event.data !== 'heartbeat') { setExperiments(JSON.parse(event.data) as ExperimentData[]); } diff --git a/apps/frontend/firebase/db.ts b/apps/frontend/firebase/db.ts index 7d7c2b7e..a3e41b3e 100644 --- a/apps/frontend/firebase/db.ts +++ b/apps/frontend/firebase/db.ts @@ -36,10 +36,6 @@ export const submitExperiment = async (values: Partial, userId: return response.json(); } return Promise.reject(response); - }).then((expId: String) => { - console.log(expId); - }).catch((response: Response) => { - // might need this }); }; From be2648e3a4edddd7abe80e40616bc00be6cf6b79 Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Thu, 7 Nov 2024 20:17:32 -0500 Subject: [PATCH 147/198] Update db.ts --- apps/frontend/firebase/db.ts | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/apps/frontend/firebase/db.ts b/apps/frontend/firebase/db.ts index a3e41b3e..8c7b01c3 100644 --- a/apps/frontend/firebase/db.ts +++ b/apps/frontend/firebase/db.ts @@ -26,17 +26,13 @@ export const submitExperiment = async (values: Partial, userId: values.finished = false; values.estimatedTotalTimeMinutes = 0; values.totalExperimentRuns = 0; - await fetch(`/api/experiments/storeExp`, + const response = await fetch(`/api/experiments/storeExp`, { method: "POST", body: JSON.stringify(values) } - ).then(async (response) => { - if (response?.ok) { - return response.json(); - } - return Promise.reject(response); - }); + ) + return await response.json(); }; // TODO: will use mongo gridfs From e8d8333b958b95e7529df059700c764853946e1c Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Thu, 7 Nov 2024 20:24:18 -0500 Subject: [PATCH 148/198] fix delete and cleanup --- .../stepComponents/DispatchStep.tsx | 14 ++------------ apps/frontend/firebase/db.ts | 18 ------------------ .../api/experiments/delete/[expIdToDelete].tsx | 3 ++- 3 files changed, 4 insertions(+), 31 deletions(-) diff --git a/apps/frontend/app/components/flows/AddExperiment/stepComponents/DispatchStep.tsx b/apps/frontend/app/components/flows/AddExperiment/stepComponents/DispatchStep.tsx index d509bf20..4929f374 100644 --- a/apps/frontend/app/components/flows/AddExperiment/stepComponents/DispatchStep.tsx +++ b/apps/frontend/app/components/flows/AddExperiment/stepComponents/DispatchStep.tsx @@ -1,7 +1,7 @@ 'use client' import { Dropzone, DropzoneProps } from '@mantine/dropzone'; -import { submitExperiment, uploadExec } from '../../../../../firebase/db'; +import { submitExperiment } from '../../../../../firebase/db'; import { Group, Text } from '@mantine/core'; import { useAuth } from '../../../../../firebase/fbAuth'; @@ -27,7 +27,7 @@ export const DispatchStep = ({ id, form, ...props }) => { submitExperiment(form.values, userId as string).then(async (json) => { console.log(json); const expId = json['id']; - console.log(`Uploading file for ${expId}:`, files); + console.log(`Uploading file for ${expId}:`); const formData = new FormData(); formData.set("file", files[0]); formData.set("expId", expId); @@ -109,13 +109,3 @@ export const DispatchStep = ({ id, form, ...props }) => { ); }; - -function arrayBufferToBase64(buffer) { - let binary = ''; - const bytes = new Uint8Array(buffer); - for (let i = 0; i < bytes.byteLength; i++) { - binary += String.fromCharCode(bytes[i]); - } - return Buffer.from(binary).toString("base64"); -} - diff --git a/apps/frontend/firebase/db.ts b/apps/frontend/firebase/db.ts index 8c7b01c3..bc67488f 100644 --- a/apps/frontend/firebase/db.ts +++ b/apps/frontend/firebase/db.ts @@ -35,24 +35,6 @@ export const submitExperiment = async (values: Partial, userId: return await response.json(); }; -// TODO: will use mongo gridfs -export const uploadExec = async (id: ExperimentDocumentId, file) => { - const fileRef = ref(storage, `experiment${id}`); - return await uploadBytes(fileRef, file).then((snapshot) => { - console.log('Uploaded file. Updating doc...'); - const experimentRef = doc(db, DB_COLLECTION_EXPERIMENTS, id); - updateDoc(experimentRef, { - file: `experiment${id}`, - }).then(() => { - console.log(`Uploaded file for experiment ${id}`); - return true; - }).catch((error) => console.log('Upload doc error: ', error)); - return true; - }).catch((error) => { - console.log('Upload bytes error: ', error); - return false; - }); -}; const downloadArbitraryFile = (url: string, name: string) => { const anchor = document.createElement('a'); diff --git a/apps/frontend/pages/api/experiments/delete/[expIdToDelete].tsx b/apps/frontend/pages/api/experiments/delete/[expIdToDelete].tsx index b9aa7675..1cccebb2 100644 --- a/apps/frontend/pages/api/experiments/delete/[expIdToDelete].tsx +++ b/apps/frontend/pages/api/experiments/delete/[expIdToDelete].tsx @@ -1,6 +1,7 @@ import clientPromise, { DB_NAME, COLLECTION_EXPERIMENTS } from '../../../../lib/mongodb'; import { NextApiHandler } from 'next'; import { ExperimentData } from '../../../../firebase/db_types'; +import { ObjectId } from 'mongodb'; const mongoExpHandler: NextApiHandler = async (req, res) => { const { expIdToDelete } = req.query; @@ -15,7 +16,7 @@ const mongoExpHandler: NextApiHandler = async (req, res) => { const result = await db .collection(COLLECTION_EXPERIMENTS) - .deleteOne({ '_id': expIdToDelete as any }); // Assuming expId is the unique identifier in the collection + .deleteOne({ '_id': new ObjectId(expIdToDelete) }); // Assuming expId is the unique identifier in the collection if (result.deletedCount === 0) { return res.status(404).json({ response: 'Experiment not found' } as any); From a47aac8700b2d776c7c5733fd0a298733779bf8d Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Thu, 7 Nov 2024 20:47:59 -0500 Subject: [PATCH 149/198] try server func for getting exp --- .../flows/AddExperiment/NewExperiment.tsx | 95 ++++++++++++------- apps/frontend/lib/mongodb_funcs.ts | 17 ++++ .../experiments/delete/[expIdToDelete].tsx | 1 + 3 files changed, 78 insertions(+), 35 deletions(-) create mode 100644 apps/frontend/lib/mongodb_funcs.ts diff --git a/apps/frontend/app/components/flows/AddExperiment/NewExperiment.tsx b/apps/frontend/app/components/flows/AddExperiment/NewExperiment.tsx index e2d9edc6..9d5eec3e 100644 --- a/apps/frontend/app/components/flows/AddExperiment/NewExperiment.tsx +++ b/apps/frontend/app/components/flows/AddExperiment/NewExperiment.tsx @@ -16,7 +16,9 @@ import { ConfirmationStep } from './stepComponents/ConfirmationStep'; import { DumbTextArea } from './stepComponents/DumbTextAreaStep'; import { DB_COLLECTION_EXPERIMENTS } from '../../../../firebase/db'; -const DEFAULT_TRIAL_TIMEOUT_SECONDS = 5*60*60; // 5 hours in seconds +import { getDocumentFromId } from '../../../../lib/mongodb_funcs'; + +const DEFAULT_TRIAL_TIMEOUT_SECONDS = 5 * 60 * 60; // 5 hours in seconds export const FormStates = { Closed: -1, @@ -85,39 +87,60 @@ const NewExperiment = ({ formState, setFormState, copyID, setCopyId, ...rest }) useEffect(() => { if (copyID != null) { - const db = getFirestore(firebaseApp); - getDoc(doc(db, DB_COLLECTION_EXPERIMENTS, copyID)).then((docSnap) => { - if (docSnap.exists()) { - const expInfo = docSnap.data(); - const hyperparameters = JSON.parse(expInfo['hyperparameters'])['hyperparameters']; - form.setValues({ - hyperparameters: formList(hyperparameters), - name: expInfo['name'], - description: expInfo['description'], - trialExtraFile: expInfo['trialExtraFile'], - trialResult: expInfo['trialResult'], - verbose: expInfo['verbose'], - workers: expInfo['workers'], - scatter: expInfo['scatter'], - dumbTextArea: expInfo['dumbTextArea'], - scatterIndVar: expInfo['scatterIndVar'], - scatterDepVar: expInfo['scatterDepVar'], - timeout: expInfo['timeout'], - keepLogs: expInfo['keepLogs'], - }); - setCopyId(null); - setStatus(FormStates.Info); - console.log('Copied!'); - } else { - console.log('No such document!'); - } - }); + const expInfo = getDocumentFromId(copyID); + if (expInfo) { + const hyperparameters = JSON.parse(expInfo['hyperparameters'])['hyperparameters']; + form.setValues({ + hyperparameters: formList(hyperparameters), + name: expInfo['name'], + description: expInfo['description'], + trialExtraFile: expInfo['trialExtraFile'], + trialResult: expInfo['trialResult'], + verbose: expInfo['verbose'], + workers: expInfo['workers'], + scatter: expInfo['scatter'], + dumbTextArea: expInfo['dumbTextArea'], + scatterIndVar: expInfo['scatterIndVar'], + scatterDepVar: expInfo['scatterDepVar'], + timeout: expInfo['timeout'], + keepLogs: expInfo['keepLogs'], + }); + setCopyId(null); + setStatus(FormStates.Info); + console.log('Copied!'); + } else { + console.log('No such document!'); + } + // getDocumentFromId(copyID).then((docSnap) => { + // if (docSnap.exists()) { + // const expInfo = docSnap.data(); + // const hyperparameters = JSON.parse(expInfo['hyperparameters'])['hyperparameters']; + // form.setValues({ + // hyperparameters: formList(hyperparameters), + // name: expInfo['name'], + // description: expInfo['description'], + // trialExtraFile: expInfo['trialExtraFile'], + // trialResult: expInfo['trialResult'], + // verbose: expInfo['verbose'], + // workers: expInfo['workers'], + // scatter: expInfo['scatter'], + // dumbTextArea: expInfo['dumbTextArea'], + // scatterIndVar: expInfo['scatterIndVar'], + // scatterDepVar: expInfo['scatterDepVar'], + // timeout: expInfo['timeout'], + // keepLogs: expInfo['keepLogs'], + // }); + // setCopyId(null); + // setStatus(FormStates.Info); + // console.log('Copied!'); + // } + // }); } }, [copyID]); // TODO adding form or setCopyId causes render loop? const fields = form.values.hyperparameters.map(({ type, ...rest }, index) => { - return ; + return ; }); const [open, setOpen] = useState(true); @@ -191,7 +214,7 @@ const NewExperiment = ({ formState, setFormState, copyID, setCopyId, ...rest }) ) : status === FormStates.Confirmation ? ( ) : ( - + )}
@@ -230,11 +253,13 @@ const NewExperiment = ({ formState, setFormState, copyID, setCopyId, ...rest })
diff --git a/apps/frontend/firebase/db.ts b/apps/frontend/firebase/db.ts index bc67488f..8d326296 100644 --- a/apps/frontend/firebase/db.ts +++ b/apps/frontend/firebase/db.ts @@ -114,14 +114,10 @@ export const listenToExperiments = (uid: FirebaseUserId, callback: MultipleExper // TODO: Test this! export const deleteExperiment = async (expId: ExperimentDocumentId) => { await fetch(`/api/experiments/delete/${expId}`).then((response) => { - if (response?.ok) { + if (response.ok) { return response.json(); } return Promise.reject(response); - }).then((expId: String) => { - console.log(expId); - }).catch((response: Response) => { - // might need this }); }; From d9acde17615f072164313c79201abf19685a8d29 Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Sun, 10 Nov 2024 15:43:50 -0500 Subject: [PATCH 155/198] work on copy --- .../app/components/flows/AddExperiment/NewExperiment.tsx | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/apps/frontend/app/components/flows/AddExperiment/NewExperiment.tsx b/apps/frontend/app/components/flows/AddExperiment/NewExperiment.tsx index e4c74189..9b489038 100644 --- a/apps/frontend/app/components/flows/AddExperiment/NewExperiment.tsx +++ b/apps/frontend/app/components/flows/AddExperiment/NewExperiment.tsx @@ -91,8 +91,9 @@ const NewExperiment = ({ formState, setFormState, copyID, setCopyId, ...rest }) getDocumentFromId(copyID).then((expInfo) => { console.log(expInfo); console.log("that was the exp info that was retrieved!"); + console.log(expInfo['hyperparamters']); if (expInfo) { - const hyperparameters = JSON.parse(expInfo['hyperparameters']); + const hyperparameters = expInfo['hyperparameters']; form.setValues({ hyperparameters: formList(hyperparameters), name: expInfo['name'], From 8ca2eef0f04dea7d31bff70a178e4e1822baf845 Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Sun, 10 Nov 2024 15:45:25 -0500 Subject: [PATCH 156/198] Update runner.py --- apps/runner/runner.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/runner/runner.py b/apps/runner/runner.py index d617a210..9f954ac0 100644 --- a/apps/runner/runner.py +++ b/apps/runner/runner.py @@ -94,7 +94,7 @@ def run_batch(data: IncomingStartRequest): # Parse hyperaparameters into their datatype. Required to parse the rest of the experiment data try: - hyperparameters: "dict[str,Parameter]" = parseRawHyperparameterData(json.loads(experimentData['hyperparameters'])['hyperparameters']) + hyperparameters: "dict[str,Parameter]" = parseRawHyperparameterData(experimentData['hyperparameters']) except (KeyError, ValueError) as err: if isinstance(err, KeyError): explogger.error("Error generating hyperparameters - hyperparameters not found in experiment object, aborting") From d4abbf7bb085e353982f8ee438b3e907244689a0 Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Sun, 10 Nov 2024 15:46:48 -0500 Subject: [PATCH 157/198] listen to all docs for now --- apps/frontend/pages/api/experiments/listen.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/frontend/pages/api/experiments/listen.tsx b/apps/frontend/pages/api/experiments/listen.tsx index 386fc94c..b5031505 100644 --- a/apps/frontend/pages/api/experiments/listen.tsx +++ b/apps/frontend/pages/api/experiments/listen.tsx @@ -23,7 +23,7 @@ export default async function handler(req, res) { const experimentsCollection = db.collection(COLLECTION_EXPERIMENTS); // Set up a Change Stream for real-time updates - const pipeline = [{ $match: { "creator": uid } }]; + const pipeline = []; const options = { fullDocument: "updateLookup" }; const changeStream = experimentsCollection.watch(pipeline, options); From 3981d243ded0c77f590b734188895c081efd5887 Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Sun, 10 Nov 2024 16:00:23 -0500 Subject: [PATCH 158/198] change to objectID --- apps/frontend/pages/api/experiments/subscribe.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/frontend/pages/api/experiments/subscribe.tsx b/apps/frontend/pages/api/experiments/subscribe.tsx index 8bae39ff..a2355b87 100644 --- a/apps/frontend/pages/api/experiments/subscribe.tsx +++ b/apps/frontend/pages/api/experiments/subscribe.tsx @@ -10,7 +10,7 @@ export default async function handler(req, res) { const experimentsCollection = db.collection(COLLECTION_EXPERIMENTS); // Set up a Change Stream for real-time updates - const pipeline = [{ $match: { "fullDocument._id": expId } }]; + const pipeline = [{ $match: { "fullDocument._id": new ObjectId(expId) } }]; const changeStream = experimentsCollection.watch(pipeline); // Set up real-time streaming of changes to the client using SSE From 407173b28ca8b057c57cec89dc4eb2b86655bafe Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Sun, 10 Nov 2024 16:07:15 -0500 Subject: [PATCH 159/198] try this --- apps/frontend/pages/api/experiments/subscribe.tsx | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/apps/frontend/pages/api/experiments/subscribe.tsx b/apps/frontend/pages/api/experiments/subscribe.tsx index a2355b87..8e4d7cd4 100644 --- a/apps/frontend/pages/api/experiments/subscribe.tsx +++ b/apps/frontend/pages/api/experiments/subscribe.tsx @@ -10,7 +10,9 @@ export default async function handler(req, res) { const experimentsCollection = db.collection(COLLECTION_EXPERIMENTS); // Set up a Change Stream for real-time updates - const pipeline = [{ $match: { "fullDocument._id": new ObjectId(expId) } }]; + const pipeline = [ + { $match: { "documentKey._id": new ObjectId(expId) } } + ]; const changeStream = experimentsCollection.watch(pipeline); // Set up real-time streaming of changes to the client using SSE From e06b48c7254b0130f7f3d96e977c4a9efb1d24c6 Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Sun, 10 Nov 2024 16:33:58 -0500 Subject: [PATCH 160/198] try different pipeline --- apps/frontend/pages/api/experiments/listen.tsx | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/apps/frontend/pages/api/experiments/listen.tsx b/apps/frontend/pages/api/experiments/listen.tsx index b5031505..1d058c51 100644 --- a/apps/frontend/pages/api/experiments/listen.tsx +++ b/apps/frontend/pages/api/experiments/listen.tsx @@ -23,7 +23,14 @@ export default async function handler(req, res) { const experimentsCollection = db.collection(COLLECTION_EXPERIMENTS); // Set up a Change Stream for real-time updates - const pipeline = []; + const pipeline = [ + { + $match: { + $expr: { $eq: ["$fullDocument.creator", uid] }, + operationType: { $in: ["insert", "update", "replace"] } + } + } + ]; const options = { fullDocument: "updateLookup" }; const changeStream = experimentsCollection.watch(pipeline, options); From 40c3e786369c3fdcdb72f262592a4541b3bae231 Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Sun, 10 Nov 2024 16:35:19 -0500 Subject: [PATCH 161/198] add logging --- apps/frontend/app/dashboard/page.tsx | 1 + 1 file changed, 1 insertion(+) diff --git a/apps/frontend/app/dashboard/page.tsx b/apps/frontend/app/dashboard/page.tsx index 4058d20b..97f29b78 100644 --- a/apps/frontend/app/dashboard/page.tsx +++ b/apps/frontend/app/dashboard/page.tsx @@ -717,6 +717,7 @@ const ExperimentList = ({ experiments, onCopyExperiment, onDeleteExperiment }: E > {sortedExperiments?.map((project: ExperimentData) => { + console.log(project.created); if (!includeCompleted && project.finished) { return null; } From 36654244e2f5ec01f62c5b4bb2b3f01436ae208d Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Sun, 10 Nov 2024 16:38:58 -0500 Subject: [PATCH 162/198] make empty pipeline again --- apps/frontend/pages/api/experiments/listen.tsx | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/apps/frontend/pages/api/experiments/listen.tsx b/apps/frontend/pages/api/experiments/listen.tsx index 1d058c51..b5031505 100644 --- a/apps/frontend/pages/api/experiments/listen.tsx +++ b/apps/frontend/pages/api/experiments/listen.tsx @@ -23,14 +23,7 @@ export default async function handler(req, res) { const experimentsCollection = db.collection(COLLECTION_EXPERIMENTS); // Set up a Change Stream for real-time updates - const pipeline = [ - { - $match: { - $expr: { $eq: ["$fullDocument.creator", uid] }, - operationType: { $in: ["insert", "update", "replace"] } - } - } - ]; + const pipeline = []; const options = { fullDocument: "updateLookup" }; const changeStream = experimentsCollection.watch(pipeline, options); From 184cd80fff580dbdf0d981ff68c25add9704621a Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Sun, 10 Nov 2024 16:41:26 -0500 Subject: [PATCH 163/198] Do not return full doc --- apps/frontend/pages/api/experiments/listen.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/frontend/pages/api/experiments/listen.tsx b/apps/frontend/pages/api/experiments/listen.tsx index b5031505..4b577893 100644 --- a/apps/frontend/pages/api/experiments/listen.tsx +++ b/apps/frontend/pages/api/experiments/listen.tsx @@ -24,7 +24,7 @@ export default async function handler(req, res) { // Set up a Change Stream for real-time updates const pipeline = []; - const options = { fullDocument: "updateLookup" }; + const options = {}; const changeStream = experimentsCollection.watch(pipeline, options); // Set up real-time streaming of changes to the client using SSE From beda3aed98643448f0c14f648c9138d61dbc3ed4 Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Sun, 10 Nov 2024 16:44:46 -0500 Subject: [PATCH 164/198] handle deletion --- apps/frontend/pages/api/experiments/listen.tsx | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/apps/frontend/pages/api/experiments/listen.tsx b/apps/frontend/pages/api/experiments/listen.tsx index 4b577893..ba987678 100644 --- a/apps/frontend/pages/api/experiments/listen.tsx +++ b/apps/frontend/pages/api/experiments/listen.tsx @@ -23,7 +23,16 @@ export default async function handler(req, res) { const experimentsCollection = db.collection(COLLECTION_EXPERIMENTS); // Set up a Change Stream for real-time updates - const pipeline = []; + const pipeline = [ + { + $match: { + $or: [ + { "fullDocument.creator": uid }, // Match insert or update with the creator field + { operationType: "delete", "documentKey._id": { $exists: true } } // Handle deletion events + ] + } + } + ]; const options = {}; const changeStream = experimentsCollection.watch(pipeline, options); From 719debd2f1718bd7fcc513cdd95154e6848ed170 Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Sun, 10 Nov 2024 16:46:53 -0500 Subject: [PATCH 165/198] add some logging --- .../app/components/flows/ViewExperiment/ExperimentListing.tsx | 2 ++ apps/frontend/app/dashboard/page.tsx | 1 - 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/apps/frontend/app/components/flows/ViewExperiment/ExperimentListing.tsx b/apps/frontend/app/components/flows/ViewExperiment/ExperimentListing.tsx index 99c29aeb..2a14e255 100644 --- a/apps/frontend/app/components/flows/ViewExperiment/ExperimentListing.tsx +++ b/apps/frontend/app/components/flows/ViewExperiment/ExperimentListing.tsx @@ -60,6 +60,7 @@ export const ExperimentListing = ({ projectinit, onCopyExperiment, onDownloadRes }; useEffect(() => { + console.log(project.creator); if (editingCanceled) { setProjectName(originalProjectName); // Revert to the original name setEditingCanceled(true); @@ -68,6 +69,7 @@ export const ExperimentListing = ({ projectinit, onCopyExperiment, onDownloadRes eventSource.onmessage = (event) => { if (event.data !== 'heartbeat') { setProject(JSON.parse(event.data) as ExperimentData); + console.log(project.creator); } } diff --git a/apps/frontend/app/dashboard/page.tsx b/apps/frontend/app/dashboard/page.tsx index 97f29b78..4058d20b 100644 --- a/apps/frontend/app/dashboard/page.tsx +++ b/apps/frontend/app/dashboard/page.tsx @@ -717,7 +717,6 @@ const ExperimentList = ({ experiments, onCopyExperiment, onDeleteExperiment }: E > {sortedExperiments?.map((project: ExperimentData) => { - console.log(project.created); if (!includeCompleted && project.finished) { return null; } From d1a064e010c8ca643bf9319468f02aad815f5981 Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Sun, 10 Nov 2024 16:48:41 -0500 Subject: [PATCH 166/198] force to number --- .../app/components/flows/ViewExperiment/ExperimentListing.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/frontend/app/components/flows/ViewExperiment/ExperimentListing.tsx b/apps/frontend/app/components/flows/ViewExperiment/ExperimentListing.tsx index 2a14e255..e99078fb 100644 --- a/apps/frontend/app/components/flows/ViewExperiment/ExperimentListing.tsx +++ b/apps/frontend/app/components/flows/ViewExperiment/ExperimentListing.tsx @@ -288,7 +288,7 @@ export const ExperimentListing = ({ projectinit, onCopyExperiment, onDownloadRes null }

- Uploaded at {new Date(project['created']).toLocaleString()} + Uploaded at {new Date(Number(project['created'])).toLocaleString()}

{project['startedAtEpochMillis'] ?

From 505f382ab72480bf4369c4073071e546a691000d Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Sun, 10 Nov 2024 16:52:07 -0500 Subject: [PATCH 167/198] fix delete call --- apps/frontend/firebase/db.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/frontend/firebase/db.ts b/apps/frontend/firebase/db.ts index 8d326296..123dc145 100644 --- a/apps/frontend/firebase/db.ts +++ b/apps/frontend/firebase/db.ts @@ -115,7 +115,7 @@ export const listenToExperiments = (uid: FirebaseUserId, callback: MultipleExper export const deleteExperiment = async (expId: ExperimentDocumentId) => { await fetch(`/api/experiments/delete/${expId}`).then((response) => { if (response.ok) { - return response.json(); + return response; } return Promise.reject(response); }); From 8992cfa4eb35b503a276f2c1e91949b2414c6aba Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Sun, 10 Nov 2024 17:04:21 -0500 Subject: [PATCH 168/198] change delete to use server action --- apps/frontend/app/dashboard/page.tsx | 6 +++++- apps/frontend/lib/mongodb_funcs.ts | 14 ++++++++++++++ 2 files changed, 19 insertions(+), 1 deletion(-) diff --git a/apps/frontend/app/dashboard/page.tsx b/apps/frontend/app/dashboard/page.tsx index 4058d20b..1f667396 100644 --- a/apps/frontend/app/dashboard/page.tsx +++ b/apps/frontend/app/dashboard/page.tsx @@ -24,6 +24,7 @@ import { ExperimentListing as ExperimentListing } from '../components/flows/View import { ExperimentData } from '../../firebase/db_types'; import { Toggle } from '../components/Toggle'; import { QueueResponse } from '../../pages/api/queue'; +import { deleteDocumentById } from '../../lib/mongodb_funcs'; const navigation = [{ name: 'Admin', href: '#', current: false }]; const userNavigation = [ @@ -365,7 +366,10 @@ export default function DashboardPage() { setCopyId(experimentId); }} onDeleteExperiment={(experimentId) => { - deleteExperiment(experimentId); + // deleteExperiment(experimentId); + deleteDocumentById(experimentId).catch((reason) => { + console.log(`Failed delete, reason: ${reason}`); + }) }} />

{/* Activity feed */} diff --git a/apps/frontend/lib/mongodb_funcs.ts b/apps/frontend/lib/mongodb_funcs.ts index 2477be7e..e424bd9a 100644 --- a/apps/frontend/lib/mongodb_funcs.ts +++ b/apps/frontend/lib/mongodb_funcs.ts @@ -15,3 +15,17 @@ export async function getDocumentFromId(expId: string) { //just return the document return expDoc; } + +export async function deleteDocumentById(expId: string){ + 'use server'; + const client = await clientPromise; + const collection = client.db(DB_NAME).collection(COLLECTION_EXPERIMENTS); + + const deleted = await collection.deleteOne({"_id": new ObjectId(expId)}); + + if(deleted.deletedCount == 0){ + return Promise.reject(`Could not find document with id: ${expId}`); + } + + return Promise.resolve(); +} From 6a7f4b6440bafcb755245278f23f5e4c909788d2 Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Sun, 10 Nov 2024 17:09:25 -0500 Subject: [PATCH 169/198] add logging for json error --- .../app/components/flows/ViewExperiment/ExperimentListing.tsx | 2 +- apps/frontend/app/dashboard/page.tsx | 1 + apps/frontend/pages/api/experiments/storeExp.tsx | 2 +- 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/apps/frontend/app/components/flows/ViewExperiment/ExperimentListing.tsx b/apps/frontend/app/components/flows/ViewExperiment/ExperimentListing.tsx index e99078fb..96182a13 100644 --- a/apps/frontend/app/components/flows/ViewExperiment/ExperimentListing.tsx +++ b/apps/frontend/app/components/flows/ViewExperiment/ExperimentListing.tsx @@ -68,8 +68,8 @@ export const ExperimentListing = ({ projectinit, onCopyExperiment, onDownloadRes const eventSource = new EventSource(`/api/experiments/subscribe?expId=${project.expId}`); eventSource.onmessage = (event) => { if (event.data !== 'heartbeat') { + console.log(event.data); setProject(JSON.parse(event.data) as ExperimentData); - console.log(project.creator); } } diff --git a/apps/frontend/app/dashboard/page.tsx b/apps/frontend/app/dashboard/page.tsx index 1f667396..0c1b0c7c 100644 --- a/apps/frontend/app/dashboard/page.tsx +++ b/apps/frontend/app/dashboard/page.tsx @@ -194,6 +194,7 @@ export default function DashboardPage() { eventSource.onmessage = (event) => { if (event.data !== 'heartbeat') { + console.log(event.data); setExperiments(JSON.parse(event.data) as ExperimentData[]); } diff --git a/apps/frontend/pages/api/experiments/storeExp.tsx b/apps/frontend/pages/api/experiments/storeExp.tsx index 09566bc3..6e0d14fe 100644 --- a/apps/frontend/pages/api/experiments/storeExp.tsx +++ b/apps/frontend/pages/api/experiments/storeExp.tsx @@ -7,7 +7,7 @@ const mongoExpHandler: NextApiHandler = async (req, res) => { res.status(405).json({ response: 'Method not allowed, use POST' } as any); return; } - + console.log(req.body); const experimentData: Partial = JSON.parse(req.body); if (!experimentData || typeof experimentData !== 'object') { From 68177d9a6a1cf9ee2928aee4b4f9e998ede17feb Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Sun, 10 Nov 2024 17:17:45 -0500 Subject: [PATCH 170/198] close when document is deleted --- apps/frontend/pages/api/experiments/subscribe.tsx | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/apps/frontend/pages/api/experiments/subscribe.tsx b/apps/frontend/pages/api/experiments/subscribe.tsx index 8e4d7cd4..08e7b49f 100644 --- a/apps/frontend/pages/api/experiments/subscribe.tsx +++ b/apps/frontend/pages/api/experiments/subscribe.tsx @@ -12,7 +12,7 @@ export default async function handler(req, res) { // Set up a Change Stream for real-time updates const pipeline = [ { $match: { "documentKey._id": new ObjectId(expId) } } - ]; + ]; const changeStream = experimentsCollection.watch(pipeline); // Set up real-time streaming of changes to the client using SSE @@ -36,7 +36,15 @@ export default async function handler(req, res) { res.write(`data: ${JSON.stringify(initArray)}\n\n`); // Listen to changes in the collection - changeStream.on("change", async () => { + changeStream.on("change", async (change) => { + if (change.operationType === "delete") { + res.write('event: close\n'); + res.write('data: Connection closed\n\n'); + res.end(); // Close the SSE connection + clearInterval(intervalId); + changeStream.close(); + } + const updatedDocuments = await experimentsCollection .find({ '_id': new ObjectId(expId) }) .toArray(); From f089107798085ee705c9c9d76b6812168fe5d8c1 Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Sun, 10 Nov 2024 17:25:28 -0500 Subject: [PATCH 171/198] add error handling and cleanup --- .../flows/AddExperiment/NewExperiment.tsx | 59 ------------------- apps/frontend/app/dashboard/page.tsx | 9 ++- .../pages/api/experiments/storeExp.tsx | 2 +- 3 files changed, 8 insertions(+), 62 deletions(-) diff --git a/apps/frontend/app/components/flows/AddExperiment/NewExperiment.tsx b/apps/frontend/app/components/flows/AddExperiment/NewExperiment.tsx index 9b489038..3c748cf8 100644 --- a/apps/frontend/app/components/flows/AddExperiment/NewExperiment.tsx +++ b/apps/frontend/app/components/flows/AddExperiment/NewExperiment.tsx @@ -87,11 +87,7 @@ const NewExperiment = ({ formState, setFormState, copyID, setCopyId, ...rest }) useEffect(() => { if (copyID != null) { - console.log("Getting expinfo!") getDocumentFromId(copyID).then((expInfo) => { - console.log(expInfo); - console.log("that was the exp info that was retrieved!"); - console.log(expInfo['hyperparamters']); if (expInfo) { const hyperparameters = expInfo['hyperparameters']; form.setValues({ @@ -111,66 +107,11 @@ const NewExperiment = ({ formState, setFormState, copyID, setCopyId, ...rest }) }); setCopyId(null); setStatus(FormStates.Info); - console.log("copied exp!"); } else { console.log("Could not get expInfo!!!"); } }) - // async () => { - // const expInfo = await getDocumentFromId(copyID); - // console.log("Getting expinfo!") - // console.log(expInfo); - // console.log("that was the exp info that was retrieved!"); - // if (expInfo) { - // const hyperparameters = JSON.parse(expInfo['hyperparameters']); - // form.setValues({ - // hyperparameters: formList(hyperparameters), - // name: expInfo['name'], - // description: expInfo['description'], - // trialExtraFile: expInfo['trialExtraFile'], - // trialResult: expInfo['trialResult'], - // verbose: expInfo['verbose'], - // workers: expInfo['workers'], - // scatter: expInfo['scatter'], - // dumbTextArea: expInfo['dumbTextArea'], - // scatterIndVar: expInfo['scatterIndVar'], - // scatterDepVar: expInfo['scatterDepVar'], - // timeout: expInfo['timeout'], - // keepLogs: expInfo['keepLogs'], - // }); - // setCopyId(null); - // setStatus(FormStates.Info); - // console.log('Copied!'); - // } else { - // console.log('No such document!'); - // } - // } - - // getDocumentFromId(copyID).then((docSnap) => { - // if (docSnap.exists()) { - // const expInfo = docSnap.data(); - // const hyperparameters = JSON.parse(expInfo['hyperparameters'])['hyperparameters']; - // form.setValues({ - // hyperparameters: formList(hyperparameters), - // name: expInfo['name'], - // description: expInfo['description'], - // trialExtraFile: expInfo['trialExtraFile'], - // trialResult: expInfo['trialResult'], - // verbose: expInfo['verbose'], - // workers: expInfo['workers'], - // scatter: expInfo['scatter'], - // dumbTextArea: expInfo['dumbTextArea'], - // scatterIndVar: expInfo['scatterIndVar'], - // scatterDepVar: expInfo['scatterDepVar'], - // timeout: expInfo['timeout'], - // keepLogs: expInfo['keepLogs'], - // }); - // setCopyId(null); - // setStatus(FormStates.Info); - // console.log('Copied!'); - // } - // }); } }, [copyID]); // TODO adding form or setCopyId causes render loop? diff --git a/apps/frontend/app/dashboard/page.tsx b/apps/frontend/app/dashboard/page.tsx index 0c1b0c7c..3a1e6f12 100644 --- a/apps/frontend/app/dashboard/page.tsx +++ b/apps/frontend/app/dashboard/page.tsx @@ -194,8 +194,13 @@ export default function DashboardPage() { eventSource.onmessage = (event) => { if (event.data !== 'heartbeat') { - console.log(event.data); - setExperiments(JSON.parse(event.data) as ExperimentData[]); + try { + setExperiments(JSON.parse(event.data) as ExperimentData[]); + } + catch{ + console.log(`${event.data} was not valid JSON!`); + } + } } diff --git a/apps/frontend/pages/api/experiments/storeExp.tsx b/apps/frontend/pages/api/experiments/storeExp.tsx index 6e0d14fe..09566bc3 100644 --- a/apps/frontend/pages/api/experiments/storeExp.tsx +++ b/apps/frontend/pages/api/experiments/storeExp.tsx @@ -7,7 +7,7 @@ const mongoExpHandler: NextApiHandler = async (req, res) => { res.status(405).json({ response: 'Method not allowed, use POST' } as any); return; } - console.log(req.body); + const experimentData: Partial = JSON.parse(req.body); if (!experimentData || typeof experimentData !== 'object') { From 2eb73508e60563c42a855aacc1c44bb688c042b6 Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Sun, 10 Nov 2024 17:29:24 -0500 Subject: [PATCH 172/198] change update name to server action --- .../ViewExperiment/ExperimentListing.tsx | 4 ++-- apps/frontend/lib/mongodb_funcs.ts | 22 +++++++++++++++---- 2 files changed, 20 insertions(+), 6 deletions(-) diff --git a/apps/frontend/app/components/flows/ViewExperiment/ExperimentListing.tsx b/apps/frontend/app/components/flows/ViewExperiment/ExperimentListing.tsx index 96182a13..826463af 100644 --- a/apps/frontend/app/components/flows/ViewExperiment/ExperimentListing.tsx +++ b/apps/frontend/app/components/flows/ViewExperiment/ExperimentListing.tsx @@ -5,6 +5,7 @@ import { ExperimentDocumentId, updateExperimentName, getCurrentProjectName } fro import { ExperimentData } from '../../../../firebase/db_types'; import { MdEdit, MdPadding } from 'react-icons/md'; import { Timestamp } from 'mongodb'; +import { updateExperimentNameById } from '../../../../lib/mongodb_funcs'; export interface ExperimentListingProps { projectinit: ExperimentData; @@ -46,8 +47,7 @@ export const ExperimentListing = ({ projectinit, onCopyExperiment, onDownloadRes const handleSave = (newProjectName) => { // Update the project name in Firebase with the edited name - updateExperimentName(project.expId, projectName); - + updateExperimentNameById(project.expId, newProjectName); // Exit the editing mode setIsEditing(false); }; diff --git a/apps/frontend/lib/mongodb_funcs.ts b/apps/frontend/lib/mongodb_funcs.ts index e424bd9a..a973fd99 100644 --- a/apps/frontend/lib/mongodb_funcs.ts +++ b/apps/frontend/lib/mongodb_funcs.ts @@ -11,21 +11,35 @@ export async function getDocumentFromId(expId: string) { if (!expDoc) { return Promise.reject(`Could not find document with id: ${expId}`); } - + //just return the document return expDoc; } -export async function deleteDocumentById(expId: string){ +export async function deleteDocumentById(expId: string) { 'use server'; const client = await clientPromise; const collection = client.db(DB_NAME).collection(COLLECTION_EXPERIMENTS); - const deleted = await collection.deleteOne({"_id": new ObjectId(expId)}); + const deleted = await collection.deleteOne({ "_id": new ObjectId(expId) }); - if(deleted.deletedCount == 0){ + if (deleted.deletedCount == 0) { return Promise.reject(`Could not find document with id: ${expId}`); } return Promise.resolve(); } + +export async function updateExperimentNameById(expId: string, newExpName: string) { + 'use server'; + const client = await clientPromise; + const collection = client.db(DB_NAME).collection(COLLECTION_EXPERIMENTS); + + const experiment = await collection.updateOne({ '_id': expId as any }, { 'name': newExpName }); + + if(experiment.modifiedCount == 0){ + return Promise.reject(`Could not update document with id: ${expId}`); + } + + return Promise.resolve(); +} From 9ac961b97b51e20ca16ff68092424a186fbfff75 Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Sun, 10 Nov 2024 17:37:11 -0500 Subject: [PATCH 173/198] update update --- apps/frontend/lib/mongodb_funcs.ts | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/apps/frontend/lib/mongodb_funcs.ts b/apps/frontend/lib/mongodb_funcs.ts index a973fd99..4fbcc8d2 100644 --- a/apps/frontend/lib/mongodb_funcs.ts +++ b/apps/frontend/lib/mongodb_funcs.ts @@ -35,9 +35,9 @@ export async function updateExperimentNameById(expId: string, newExpName: string const client = await clientPromise; const collection = client.db(DB_NAME).collection(COLLECTION_EXPERIMENTS); - const experiment = await collection.updateOne({ '_id': expId as any }, { 'name': newExpName }); - - if(experiment.modifiedCount == 0){ + const experiment = await collection.updateOne({ '_id': expId as any }, { $set: { 'name': newExpName } }); + + if (experiment.modifiedCount == 0) { return Promise.reject(`Could not update document with id: ${expId}`); } From 2d8785bdbc2f76adee133312b00d0c00e4e5ef69 Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Sun, 10 Nov 2024 17:43:16 -0500 Subject: [PATCH 174/198] Update ExperimentListing.tsx --- .../app/components/flows/ViewExperiment/ExperimentListing.tsx | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/apps/frontend/app/components/flows/ViewExperiment/ExperimentListing.tsx b/apps/frontend/app/components/flows/ViewExperiment/ExperimentListing.tsx index 826463af..1aaa7a2c 100644 --- a/apps/frontend/app/components/flows/ViewExperiment/ExperimentListing.tsx +++ b/apps/frontend/app/components/flows/ViewExperiment/ExperimentListing.tsx @@ -47,7 +47,9 @@ export const ExperimentListing = ({ projectinit, onCopyExperiment, onDownloadRes const handleSave = (newProjectName) => { // Update the project name in Firebase with the edited name - updateExperimentNameById(project.expId, newProjectName); + updateExperimentNameById(project.expId, newProjectName).catch((reason) =>{ + console.log(`Failed to update experiment name, reason: ${reason}`); + }); // Exit the editing mode setIsEditing(false); }; From 8326ab84343ca2682bf76a468d973c248b8d17a0 Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Sun, 10 Nov 2024 17:46:50 -0500 Subject: [PATCH 175/198] Update frontend.Dockerfile --- apps/frontend/frontend.Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/frontend/frontend.Dockerfile b/apps/frontend/frontend.Dockerfile index 1fa7c33d..b9aa46bb 100644 --- a/apps/frontend/frontend.Dockerfile +++ b/apps/frontend/frontend.Dockerfile @@ -40,7 +40,7 @@ RUN npm run build FROM base AS runner WORKDIR /app -ENV NODE_ENV=production +ENV NODE_ENV=development RUN addgroup --system --gid 1001 nodejs RUN adduser --system --uid 1001 nextjs From 235f7998ea139a608b2cfe19b10db12496e5be63 Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Sun, 10 Nov 2024 17:53:17 -0500 Subject: [PATCH 176/198] fix update --- apps/frontend/frontend.Dockerfile | 2 +- apps/frontend/lib/mongodb_funcs.ts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/apps/frontend/frontend.Dockerfile b/apps/frontend/frontend.Dockerfile index b9aa46bb..1fa7c33d 100644 --- a/apps/frontend/frontend.Dockerfile +++ b/apps/frontend/frontend.Dockerfile @@ -40,7 +40,7 @@ RUN npm run build FROM base AS runner WORKDIR /app -ENV NODE_ENV=development +ENV NODE_ENV=production RUN addgroup --system --gid 1001 nodejs RUN adduser --system --uid 1001 nextjs diff --git a/apps/frontend/lib/mongodb_funcs.ts b/apps/frontend/lib/mongodb_funcs.ts index 4fbcc8d2..ebddfd28 100644 --- a/apps/frontend/lib/mongodb_funcs.ts +++ b/apps/frontend/lib/mongodb_funcs.ts @@ -35,7 +35,7 @@ export async function updateExperimentNameById(expId: string, newExpName: string const client = await clientPromise; const collection = client.db(DB_NAME).collection(COLLECTION_EXPERIMENTS); - const experiment = await collection.updateOne({ '_id': expId as any }, { $set: { 'name': newExpName } }); + const experiment = await collection.updateOne({ '_id': new ObjectId(expId) }, { $set: { 'name': newExpName } }); if (experiment.modifiedCount == 0) { return Promise.reject(`Could not update document with id: ${expId}`); From b576ff78bf299a05a7356ae25279d7c77aeb4b98 Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Sun, 10 Nov 2024 18:10:52 -0500 Subject: [PATCH 177/198] strip out firestore and firebase storage --- .../stepComponents/DispatchStep.tsx | 3 - .../ViewExperiment/ExperimentListing.tsx | 5 +- apps/frontend/app/dashboard/page.tsx | 10 +-- apps/frontend/firebase/db.ts | 88 +------------------ 4 files changed, 9 insertions(+), 97 deletions(-) diff --git a/apps/frontend/app/components/flows/AddExperiment/stepComponents/DispatchStep.tsx b/apps/frontend/app/components/flows/AddExperiment/stepComponents/DispatchStep.tsx index 4929f374..ddc3dc4e 100644 --- a/apps/frontend/app/components/flows/AddExperiment/stepComponents/DispatchStep.tsx +++ b/apps/frontend/app/components/flows/AddExperiment/stepComponents/DispatchStep.tsx @@ -23,11 +23,8 @@ export const DispatchStep = ({ id, form, ...props }) => { const onDropFile = (files: Parameters[0]) => { setLoading(true); - console.log('Submitting Experiment'); submitExperiment(form.values, userId as string).then(async (json) => { - console.log(json); const expId = json['id']; - console.log(`Uploading file for ${expId}:`); const formData = new FormData(); formData.set("file", files[0]); formData.set("expId", expId); diff --git a/apps/frontend/app/components/flows/ViewExperiment/ExperimentListing.tsx b/apps/frontend/app/components/flows/ViewExperiment/ExperimentListing.tsx index 1aaa7a2c..e49b4678 100644 --- a/apps/frontend/app/components/flows/ViewExperiment/ExperimentListing.tsx +++ b/apps/frontend/app/components/flows/ViewExperiment/ExperimentListing.tsx @@ -1,7 +1,7 @@ /* eslint-disable no-mixed-spaces-and-tabs */ import { ChevronRightIcon } from '@heroicons/react/24/solid'; import { useEffect, useState } from 'react'; -import { ExperimentDocumentId, updateExperimentName, getCurrentProjectName } from '../../../../firebase/db'; +import { ExperimentDocumentId} from '../../../../firebase/db'; import { ExperimentData } from '../../../../firebase/db_types'; import { MdEdit, MdPadding } from 'react-icons/md'; import { Timestamp } from 'mongodb'; @@ -69,8 +69,7 @@ export const ExperimentListing = ({ projectinit, onCopyExperiment, onDownloadRes } else { const eventSource = new EventSource(`/api/experiments/subscribe?expId=${project.expId}`); eventSource.onmessage = (event) => { - if (event.data !== 'heartbeat') { - console.log(event.data); + if (event.data !== 'heartbeat' && event.data) { setProject(JSON.parse(event.data) as ExperimentData); } diff --git a/apps/frontend/app/dashboard/page.tsx b/apps/frontend/app/dashboard/page.tsx index 3a1e6f12..5b8580d0 100644 --- a/apps/frontend/app/dashboard/page.tsx +++ b/apps/frontend/app/dashboard/page.tsx @@ -2,8 +2,7 @@ import NewExperiment, { FormStates } from '../components/flows/AddExperiment/NewExperiment'; import { useAuth } from '../../firebase/fbAuth'; -import { deleteExperiment } from '../../firebase/db'; -import { downloadExperimentResults, downloadExperimentProjectZip, ExperimentDocumentId } from '../../firebase/db'; +import { downloadExperimentResults, downloadExperimentProjectZip } from '../../firebase/db'; import { Fragment, useState, useEffect } from 'react'; import { Disclosure, Menu, Transition } from '@headlessui/react'; import { @@ -248,7 +247,7 @@ export default function DashboardPage() { }, []); - const [copyID, setCopyId] = useState(null as unknown as ExperimentDocumentId); // TODO refactor copy system to not need this middleman + const [copyID, setCopyId] = useState(null as unknown as string); // TODO refactor copy system to not need this middleman const [formState, setFormState] = useState(FormStates.Closed); const [label, setLabel] = useState('New Experiment'); useEffect(() => { @@ -436,8 +435,8 @@ export default function DashboardPage() { export interface ExperimentListProps { experiments: ExperimentData[]; - onCopyExperiment: (experiment: ExperimentDocumentId) => void; - onDeleteExperiment: (experiment: ExperimentDocumentId) => void; + onCopyExperiment: (experiment: string) => void; + onDeleteExperiment: (experiment: string) => void; } const SortingOptions = { @@ -469,7 +468,6 @@ const ExperimentList = ({ experiments, onCopyExperiment, onDeleteExperiment }: E // Sort the experiments based on the selected sorting option useEffect(() => { - console.log(experiments); switch (sortBy) { case SortingOptions.NAME: setSortedExperiments([...experiments].sort(sortByName)); diff --git a/apps/frontend/firebase/db.ts b/apps/frontend/firebase/db.ts index 123dc145..bcf42ac6 100644 --- a/apps/frontend/firebase/db.ts +++ b/apps/frontend/firebase/db.ts @@ -1,26 +1,11 @@ /* eslint-disable no-mixed-spaces-and-tabs */ -import { firebaseApp } from './firebaseClient'; -import { getFirestore, updateDoc, deleteDoc, getDoc } from 'firebase/firestore'; -import { collection, setDoc, doc, query, where, onSnapshot } from 'firebase/firestore'; -import { getDownloadURL, getStorage, ref, uploadBytes } from 'firebase/storage'; import { ExperimentData } from './db_types'; import { ResultsCsv, ProjectZip } from '../lib/mongodb_types'; export const DB_COLLECTION_EXPERIMENTS = 'Experiments'; -// Initialize Cloud Firestore and get a reference to the service -const db = getFirestore(firebaseApp); -const storage = getStorage(firebaseApp); -const experiments = collection(db, DB_COLLECTION_EXPERIMENTS); - -export type FirebaseId = string; -export type FirebaseDocumentId = FirebaseId; -export type FirebaseUserId = FirebaseId; - -export type ExperimentDocumentId = FirebaseDocumentId; - // test -export const submitExperiment = async (values: Partial, userId: FirebaseUserId) => { +export const submitExperiment = async (values: Partial, userId: string) => { values.creator = userId; values.created = Date.now(); values.finished = false; @@ -45,7 +30,7 @@ const downloadArbitraryFile = (url: string, name: string) => { document.body.removeChild(anchor); }; -export const downloadExperimentResults = async (expId: ExperimentDocumentId) => { +export const downloadExperimentResults = async (expId: string) => { console.log(`Downloading results for ${expId}...`); await fetch(`/api/download/csv/${expId}`).then((response) => { if (response?.ok) { @@ -69,7 +54,7 @@ export const downloadExperimentResults = async (expId: ExperimentDocumentId) => }); }; -export const downloadExperimentProjectZip = async (expId: ExperimentDocumentId) => { +export const downloadExperimentProjectZip = async (expId: string) => { console.log(`Downloading project zip for ${expId}...`); await fetch(`/api/download/zip/${expId}`).then((response) => { if (response?.ok) { @@ -92,70 +77,3 @@ export const downloadExperimentProjectZip = async (expId: ExperimentDocumentId) }); }); }; - - - - -export interface MultipleExperimentSubscribeCallback { - (data: Partial[]): any; -} - -// TODO: Convert from Firestore MongoDB -export const listenToExperiments = (uid: FirebaseUserId, callback: MultipleExperimentSubscribeCallback) => { - const q = query(experiments, where('creator', '==', uid)); - const unsubscribe = onSnapshot(q, (snapshot) => { - const result = [] as unknown as Partial[]; - snapshot.forEach((doc) => result.push(doc.data())); - callback(result); - }); - return unsubscribe; -}; - -// TODO: Test this! -export const deleteExperiment = async (expId: ExperimentDocumentId) => { - await fetch(`/api/experiments/delete/${expId}`).then((response) => { - if (response.ok) { - return response; - } - return Promise.reject(response); - }); -}; - -// TODO: Test this! -export const updateExperimentName = async (expId, updatedName) => { - await fetch(`/api/experiments/updatename/${expId}/${updatedName}`).then((response) => { - if (response?.ok) { - return response.json(); - } - return Promise.reject(response); - }).then((expId: String) => { - console.log(expId); - }).catch((response: Response) => { - // might need this - }); -}; - - -// Function to get the project name from Firebase -// TODO: Convert from Firestore to MongoDB -// Not being used right now; we have [expIdToGet].tsx, which might render this useless anyway. -export const getCurrentProjectName = async (projectId) => { - try { - // Reference the project document in Firebase - const experimentRef = doc(db, DB_COLLECTION_EXPERIMENTS, projectId); - - // Get the project document - const docSnapshot = await getDoc(experimentRef); - - if (docSnapshot.exists()) { - // Extract and return the project name - return docSnapshot.data().name; - } else { - console.error('Project document does not exist.'); - return null; - } - } catch (error) { - console.error('Error getting project name:', error); - return null; - } -}; From 6c1cbb34462f9d83f91e91b48c39d2ee691b5dbd Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Sun, 10 Nov 2024 18:15:53 -0500 Subject: [PATCH 178/198] fix imports --- .../app/components/flows/AddExperiment/NewExperiment.tsx | 3 --- .../flows/ViewExperiment/ExperimentListing.tsx | 9 ++++----- apps/frontend/firebase/db_types.ts | 5 ++--- apps/frontend/lib/mongodb.ts | 2 -- 4 files changed, 6 insertions(+), 13 deletions(-) diff --git a/apps/frontend/app/components/flows/AddExperiment/NewExperiment.tsx b/apps/frontend/app/components/flows/AddExperiment/NewExperiment.tsx index 3c748cf8..0c6b4c82 100644 --- a/apps/frontend/app/components/flows/AddExperiment/NewExperiment.tsx +++ b/apps/frontend/app/components/flows/AddExperiment/NewExperiment.tsx @@ -5,9 +5,6 @@ import Parameter from '../../Parameter'; import { useForm, formList, joiResolver } from '@mantine/form'; import { experimentSchema } from '../../../../utils/validators'; -import { firebaseApp } from '../../../../firebase/firebaseClient'; -import { getDoc, getFirestore, doc } from 'firebase/firestore'; - import { DispatchStep } from './stepComponents/DispatchStep'; import { InformationStep } from './stepComponents/InformationStep'; import { ParamStep } from './stepComponents/ParamStep'; diff --git a/apps/frontend/app/components/flows/ViewExperiment/ExperimentListing.tsx b/apps/frontend/app/components/flows/ViewExperiment/ExperimentListing.tsx index e49b4678..1ef69359 100644 --- a/apps/frontend/app/components/flows/ViewExperiment/ExperimentListing.tsx +++ b/apps/frontend/app/components/flows/ViewExperiment/ExperimentListing.tsx @@ -1,7 +1,6 @@ /* eslint-disable no-mixed-spaces-and-tabs */ import { ChevronRightIcon } from '@heroicons/react/24/solid'; import { useEffect, useState } from 'react'; -import { ExperimentDocumentId} from '../../../../firebase/db'; import { ExperimentData } from '../../../../firebase/db_types'; import { MdEdit, MdPadding } from 'react-icons/md'; import { Timestamp } from 'mongodb'; @@ -9,10 +8,10 @@ import { updateExperimentNameById } from '../../../../lib/mongodb_funcs'; export interface ExperimentListingProps { projectinit: ExperimentData; - onCopyExperiment: (experimentId: ExperimentDocumentId) => void; - onDownloadResults: (experimentId: ExperimentDocumentId) => Promise; - onDownloadProjectZip: (experimentId: ExperimentDocumentId) => Promise; - onDeleteExperiment: (experimentId: ExperimentDocumentId) => void; + onCopyExperiment: (experimentId: string) => void; + onDownloadResults: (experimentId: string) => Promise; + onDownloadProjectZip: (experimentId: string) => Promise; + onDeleteExperiment: (experimentId: string) => void; } diff --git a/apps/frontend/firebase/db_types.ts b/apps/frontend/firebase/db_types.ts index d64d4f2a..eb50603e 100644 --- a/apps/frontend/firebase/db_types.ts +++ b/apps/frontend/firebase/db_types.ts @@ -1,5 +1,4 @@ import { StorageReference } from 'firebase/storage'; -import { ExperimentDocumentId, FirebaseUserId } from './db'; export type FileName = string; @@ -53,12 +52,12 @@ export interface StringHyperparameter extends GenericHyperparameter { export interface ExperimentData { // TODO make sure these match what python expects as well - creator: FirebaseUserId; + creator: string; name: string; description: string; verbose: boolean; workers: number; - expId: ExperimentDocumentId; // TODO do we want to ensure this doesn't get stored in fb itself? + expId: string; // TODO do we want to ensure this doesn't get stored in fb itself? trialExtraFile: FileName; trialResult: FileName; timeout: number; diff --git a/apps/frontend/lib/mongodb.ts b/apps/frontend/lib/mongodb.ts index 4003b5d6..8d37e79b 100644 --- a/apps/frontend/lib/mongodb.ts +++ b/apps/frontend/lib/mongodb.ts @@ -1,8 +1,6 @@ // THIS IS CURRENTLY UNUSED, FIGURE OUT HOW TO IMPORT IT INTO api/experiments/ import { MongoClient } from 'mongodb'; import { getEnvVar } from '../utils/env'; -import { ExperimentData } from '../firebase/db_types'; -import { ExperimentDocumentId } from '../firebase/db'; // Adapted from https://github.com/vercel/next.js/tree/canary/examples/with-mongodb From aa5a8dcc4c981df3b7636a3a587097512024b143 Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Sun, 10 Nov 2024 20:48:21 -0500 Subject: [PATCH 179/198] add nfs server and mongodb helm --- helm_packages/README.md | 3 ++ helm_packages/mongodb-helm/deploy.sh | 5 ++ helm_packages/mongodb-helm/pvs.yaml | 47 +++++++++++++++++++ helm_packages/mongodb-helm/storage-class.yaml | 11 +++++ helm_packages/mongodb-helm/values.yaml | 30 ++++++++++++ helm_packages/nfs-server/deploy.sh | 12 +++++ helm_packages/nfs-server/nfs-server-pv.yaml | 24 ++++++++++ helm_packages/nfs-server/nfs-server.yaml | 35 ++++++++++++++ helm_packages/nfs-server/nfs-service.yaml | 12 +++++ .../nfs-server/nfs-storageclass.yaml | 9 ++++ 10 files changed, 188 insertions(+) create mode 100644 helm_packages/README.md create mode 100644 helm_packages/mongodb-helm/deploy.sh create mode 100644 helm_packages/mongodb-helm/pvs.yaml create mode 100644 helm_packages/mongodb-helm/storage-class.yaml create mode 100644 helm_packages/mongodb-helm/values.yaml create mode 100644 helm_packages/nfs-server/deploy.sh create mode 100644 helm_packages/nfs-server/nfs-server-pv.yaml create mode 100644 helm_packages/nfs-server/nfs-server.yaml create mode 100644 helm_packages/nfs-server/nfs-service.yaml create mode 100644 helm_packages/nfs-server/nfs-storageclass.yaml diff --git a/helm_packages/README.md b/helm_packages/README.md new file mode 100644 index 00000000..3ed7db43 --- /dev/null +++ b/helm_packages/README.md @@ -0,0 +1,3 @@ +# Helm + +This is a way for us to deploy easy to configure images to our kubernetes cluster. We will be using this to create our storage provider for the cluster as well as the MongoDB. diff --git a/helm_packages/mongodb-helm/deploy.sh b/helm_packages/mongodb-helm/deploy.sh new file mode 100644 index 00000000..83ac8282 --- /dev/null +++ b/helm_packages/mongodb-helm/deploy.sh @@ -0,0 +1,5 @@ +#!/bin/bash + +echo 'Deploying helm package for MongoDB replica set!' + +helm install glados-mongodb oci://registry-1.docker.io/bitnamicharts/mongodb -f ./mongodb-helm/values.yaml \ No newline at end of file diff --git a/helm_packages/mongodb-helm/pvs.yaml b/helm_packages/mongodb-helm/pvs.yaml new file mode 100644 index 00000000..d3a7da68 --- /dev/null +++ b/helm_packages/mongodb-helm/pvs.yaml @@ -0,0 +1,47 @@ +# kinda building off of this https://kubernetes.io/docs/concepts/storage/volumes/#local +apiVersion: v1 +kind: PersistentVolume +metadata: + name: mongo-pv-0 +spec: + storageClassName: local-storage + accessModes: + - ReadWriteOnce + persistentVolumeReclaimPolicy: Retain + capacity: + storage: 1Gi + volumeMode: Filesystem + hostPath: + path: "/srv/data/mongo-1" # change to where it is locally (For minikube, may need to mount: https://stackoverflow.com/questions/63559779/kubernetes-minikube-persistent-volume-local-filesystem-storage-location) +--- +# kinda building off of this https://kubernetes.io/docs/concepts/storage/volumes/#local +apiVersion: v1 +kind: PersistentVolume +metadata: + name: mongo-pv-1 +spec: + storageClassName: local-storage + accessModes: + - ReadWriteOnce + persistentVolumeReclaimPolicy: Retain + capacity: + storage: 1Gi + volumeMode: Filesystem + hostPath: + path: "/srv/data/mongo-2" # change to where it is locally (For minikube, may need to mount: https://stackoverflow.com/questions/63559779/kubernetes-minikube-persistent-volume-local-filesystem-storage-location) +--- +# kinda building off of this https://kubernetes.io/docs/concepts/storage/volumes/#local +apiVersion: v1 +kind: PersistentVolume +metadata: + name: mongo-pv-2 +spec: + storageClassName: local-storage + accessModes: + - ReadWriteOnce + persistentVolumeReclaimPolicy: Retain + capacity: + storage: 1Gi + volumeMode: Filesystem + hostPath: + path: "/srv/data/mongo-3" # change to where it is locally (For minikube, may need to mount: https://stackoverflow.com/questions/63559779/kubernetes-minikube-persistent-volume-local-filesystem-storage-location) diff --git a/helm_packages/mongodb-helm/storage-class.yaml b/helm_packages/mongodb-helm/storage-class.yaml new file mode 100644 index 00000000..4c1f46ec --- /dev/null +++ b/helm_packages/mongodb-helm/storage-class.yaml @@ -0,0 +1,11 @@ +# storage-class.yaml +apiVersion: storage.k8s.io/v1 +kind: StorageClass +metadata: + name: glados-storage +provisioner: gp2 +parameters: + type: gp2 +reclaimPolicy: Retain +volumeBindingMode: WaitForFirstConsumer + diff --git a/helm_packages/mongodb-helm/values.yaml b/helm_packages/mongodb-helm/values.yaml new file mode 100644 index 00000000..7d74d8a1 --- /dev/null +++ b/helm_packages/mongodb-helm/values.yaml @@ -0,0 +1,30 @@ +# values.yaml +architecture: replicaset + +replicaSet: + enabled: true + replicas: 3 # Set the number of replicas + waitForPrimary: true + +persistence: + enabled: true + storageClass: nfs-client # Use the storage class created earlier + accessModes: + - ReadWriteOnce + size: 10Gi # Adjust the size as needed + +auth: + enabled: true + rootPassword: 'password123' + usernames: ['adminuser'] + passwords: ['password123'] + databases: ['admin'] + +service: + nameOverride: glados-service-mongodb + +externalAccess: + enabled: true + service: + type: NodePort + nodePorts: [30000, 30001] diff --git a/helm_packages/nfs-server/deploy.sh b/helm_packages/nfs-server/deploy.sh new file mode 100644 index 00000000..881395eb --- /dev/null +++ b/helm_packages/nfs-server/deploy.sh @@ -0,0 +1,12 @@ +#!/bin/bash + +echo 'Setting up nfs-server!' + +kubectl apply -f nfs-server-pv.yaml +kubectl apply -f nfs-server.yaml +kubectl apply -f nfs-service.yaml + +helm repo add nfs-subdir-external-provisioner https://kubernetes-sigs.github.io/nfs-subdir-external-provisioner/ +helm install nfs-provisioner nfs-subdir-external-provisioner/nfs-subdir-external-provisioner \ + --set nfs.server=nfs-service.default.svc.cluster.local \ + --set nfs.path=/exports diff --git a/helm_packages/nfs-server/nfs-server-pv.yaml b/helm_packages/nfs-server/nfs-server-pv.yaml new file mode 100644 index 00000000..8d26624a --- /dev/null +++ b/helm_packages/nfs-server/nfs-server-pv.yaml @@ -0,0 +1,24 @@ +apiVersion: v1 +kind: PersistentVolume +metadata: + name: nfs-pv +spec: + capacity: + storage: 100Gi + accessModes: + - ReadWriteOnce + persistentVolumeReclaimPolicy: Retain + hostPath: + path: "/srv/data/nfs-server" # Adjust path as needed on your host + +--- +apiVersion: v1 +kind: PersistentVolumeClaim +metadata: + name: nfs-pvc +spec: + accessModes: + - ReadWriteOnce + resources: + requests: + storage: 100Gi diff --git a/helm_packages/nfs-server/nfs-server.yaml b/helm_packages/nfs-server/nfs-server.yaml new file mode 100644 index 00000000..e2639c64 --- /dev/null +++ b/helm_packages/nfs-server/nfs-server.yaml @@ -0,0 +1,35 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: nfs-server + labels: + app: nfs-server +spec: + replicas: 1 + selector: + matchLabels: + app: nfs-server + template: + metadata: + labels: + app: nfs-server + spec: + containers: + - name: nfs-server + image: itsthenetwork/nfs-server-alpine:latest + ports: + - name: nfs + containerPort: 2049 + securityContext: + privileged: true + env: + - name: SHARED_DIRECTORY + value: /exports + volumeMounts: + - name: nfs-storage + mountPath: /exports + volumes: + - name: nfs-storage + persistentVolumeClaim: + claimName: nfs-pvc + nodeName: glados-db diff --git a/helm_packages/nfs-server/nfs-service.yaml b/helm_packages/nfs-server/nfs-service.yaml new file mode 100644 index 00000000..9f6bd696 --- /dev/null +++ b/helm_packages/nfs-server/nfs-service.yaml @@ -0,0 +1,12 @@ +apiVersion: v1 +kind: Service +metadata: + name: nfs-service +spec: + selector: + app: nfs-server + ports: + - protocol: TCP + port: 2049 + targetPort: 2049 + clusterIP: None # Creates a headless service diff --git a/helm_packages/nfs-server/nfs-storageclass.yaml b/helm_packages/nfs-server/nfs-storageclass.yaml new file mode 100644 index 00000000..ad089eae --- /dev/null +++ b/helm_packages/nfs-server/nfs-storageclass.yaml @@ -0,0 +1,9 @@ +apiVersion: storage.k8s.io/v1 +kind: StorageClass +metadata: + name: nfs-client +provisioner: nfs-subdir-external-provisioner +parameters: + pathPattern: "${.PVC.namespace}/${.PVC.name}" # Organize storage by namespace and PVC name +reclaimPolicy: Retain +volumeBindingMode: Immediate From 995cbeab1429db53e6a144e4373bd528cbdced2f Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Sun, 10 Nov 2024 20:49:17 -0500 Subject: [PATCH 180/198] disable mongodb from init.py --- kubernetes_init/init.py | 36 ++++++++++++++++++------------------ 1 file changed, 18 insertions(+), 18 deletions(-) diff --git a/kubernetes_init/init.py b/kubernetes_init/init.py index a3e7e84a..a6a642fc 100644 --- a/kubernetes_init/init.py +++ b/kubernetes_init/init.py @@ -12,8 +12,8 @@ def clean_up(): init_frontend.cleanup_deployment() print("- Backend") init_backend.cleanup_backend() - print("- MongoDB") - init_mongodb.cleanup_deployment() + # print("- MongoDB") + # init_mongodb.cleanup_deployment() print("Cleaning up: Endpoints") if "--production" in argv[1:]: @@ -21,18 +21,18 @@ def clean_up(): init_frontend.cleanup_service() print("- Backend") init_backend.cleanup_service() - print("- MongoDB") - init_mongodb.cleanup_service() + # print("- MongoDB") + # init_mongodb.cleanup_service() print("Cleaning up: Secrets") init_secret.cleanup_secret() - print("Cleaning up: MongoDB Storage Class") - init_mongodb.cleanup_storage_class() + # print("Cleaning up: MongoDB Storage Class") + # init_mongodb.cleanup_storage_class() - print("Cleaning up: MongoDB Persistent Volume") - init_mongodb.cleanup_persistent_volume() - init_mongodb.cleanup_persistent_volume_claim() + # print("Cleaning up: MongoDB Persistent Volume") + # init_mongodb.cleanup_persistent_volume() + # init_mongodb.cleanup_persistent_volume_claim() print("Cleaning up: Backend Job Creation Cluster Permissions") init_backend.cleanup_cluster_role() @@ -51,8 +51,8 @@ def set_up(): init_frontend.setup_deployment() print("- Backend") init_backend.setup_deployment() - print("- MongoDB") - init_mongodb.setup_deployment() + # print("- MongoDB") + # init_mongodb.setup_deployment() print("Setting up: Endpoints") if "--production" in argv[1:]: @@ -60,15 +60,15 @@ def set_up(): init_frontend.setup_service() print("- Backend") init_backend.setup_service() - print("- MongoDB") - init_mongodb.setup_service() + # print("- MongoDB") + # init_mongodb.setup_service() - print("Setting up: MongoDB Storage Class") - init_mongodb.setup_storage_class() + # print("Setting up: MongoDB Storage Class") + # init_mongodb.setup_storage_class() - print("Setting up: MongoDB Persistent Volume") - init_mongodb.setup_persistent_volume() - init_mongodb.setup_persistent_volume_claim() + # print("Setting up: MongoDB Persistent Volume") + # init_mongodb.setup_persistent_volume() + # init_mongodb.setup_persistent_volume_claim() def main(): """Function that gets called when the file is ran""" From 39ff4bb1fbb4b779c05b7149ee66c1b603e3681f Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Sun, 10 Nov 2024 20:50:52 -0500 Subject: [PATCH 181/198] Update ipaddresspool.yaml --- kube_config/ipaddresspool.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/kube_config/ipaddresspool.yaml b/kube_config/ipaddresspool.yaml index d0daf414..400c05ef 100644 --- a/kube_config/ipaddresspool.yaml +++ b/kube_config/ipaddresspool.yaml @@ -5,4 +5,4 @@ metadata: namespace: metallb-system spec: addresses: - - 137.112.104.33/32 \ No newline at end of file + - 137.112.104.87/32 \ No newline at end of file From 327cc765eaa47bd76561b0995e31cd7e786fc6ff Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Sun, 10 Nov 2024 20:51:02 -0500 Subject: [PATCH 182/198] Update ipaddresspool.yaml --- kube_config/ipaddresspool.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/kube_config/ipaddresspool.yaml b/kube_config/ipaddresspool.yaml index 400c05ef..5dca45aa 100644 --- a/kube_config/ipaddresspool.yaml +++ b/kube_config/ipaddresspool.yaml @@ -5,4 +5,5 @@ metadata: namespace: metallb-system spec: addresses: + # dev is 137.112.104.86 - 137.112.104.87/32 \ No newline at end of file From a460cb86c43e3b81c2235c8db2d59c4ab0130179 Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Sun, 10 Nov 2024 21:32:40 -0500 Subject: [PATCH 183/198] update things --- helm_packages/nfs-server/deploy.sh | 2 +- helm_packages/nfs-server/nfs-server.yaml | 37 ++++++++++++----------- helm_packages/nfs-server/nfs-service.yaml | 1 - 3 files changed, 21 insertions(+), 19 deletions(-) diff --git a/helm_packages/nfs-server/deploy.sh b/helm_packages/nfs-server/deploy.sh index 881395eb..d03ec7b5 100644 --- a/helm_packages/nfs-server/deploy.sh +++ b/helm_packages/nfs-server/deploy.sh @@ -8,5 +8,5 @@ kubectl apply -f nfs-service.yaml helm repo add nfs-subdir-external-provisioner https://kubernetes-sigs.github.io/nfs-subdir-external-provisioner/ helm install nfs-provisioner nfs-subdir-external-provisioner/nfs-subdir-external-provisioner \ - --set nfs.server=nfs-service.default.svc.cluster.local \ + --set nfs.server=10.244.0.153:2049 \ --set nfs.path=/exports diff --git a/helm_packages/nfs-server/nfs-server.yaml b/helm_packages/nfs-server/nfs-server.yaml index e2639c64..c8aa3b37 100644 --- a/helm_packages/nfs-server/nfs-server.yaml +++ b/helm_packages/nfs-server/nfs-server.yaml @@ -2,8 +2,6 @@ apiVersion: apps/v1 kind: Deployment metadata: name: nfs-server - labels: - app: nfs-server spec: replicas: 1 selector: @@ -14,22 +12,27 @@ spec: labels: app: nfs-server spec: + initContainers: + - name: create-exports-dir + image: busybox + command: + - sh + - -c + - "mkdir -p /exports && chmod 777 /exports" + volumeMounts: + - mountPath: /exports + name: exports-volume containers: - name: nfs-server - image: itsthenetwork/nfs-server-alpine:latest - ports: - - name: nfs - containerPort: 2049 - securityContext: - privileged: true - env: - - name: SHARED_DIRECTORY - value: /exports + image: quay.io/external_storage/nfs-server-provisioner:latest volumeMounts: - - name: nfs-storage - mountPath: /exports + - mountPath: /exports + name: exports-volume + ports: + - containerPort: 2049 + - containerPort: 111 + - containerPort: 20048 + - containerPort: 32765 volumes: - - name: nfs-storage - persistentVolumeClaim: - claimName: nfs-pvc - nodeName: glados-db + - name: exports-volume + emptyDir: {} diff --git a/helm_packages/nfs-server/nfs-service.yaml b/helm_packages/nfs-server/nfs-service.yaml index 9f6bd696..b6bf4387 100644 --- a/helm_packages/nfs-server/nfs-service.yaml +++ b/helm_packages/nfs-server/nfs-service.yaml @@ -9,4 +9,3 @@ spec: - protocol: TCP port: 2049 targetPort: 2049 - clusterIP: None # Creates a headless service From 880ff82430dbb84d0a3444f7d6009e42dbf5137b Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Sun, 10 Nov 2024 21:34:47 -0500 Subject: [PATCH 184/198] Update nfs-server.yaml --- helm_packages/nfs-server/nfs-server.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/helm_packages/nfs-server/nfs-server.yaml b/helm_packages/nfs-server/nfs-server.yaml index c8aa3b37..9bd6d4b8 100644 --- a/helm_packages/nfs-server/nfs-server.yaml +++ b/helm_packages/nfs-server/nfs-server.yaml @@ -24,7 +24,7 @@ spec: name: exports-volume containers: - name: nfs-server - image: quay.io/external_storage/nfs-server-provisioner:latest + image: itsthenetwork/nfs-server-alpine:latest volumeMounts: - mountPath: /exports name: exports-volume From 8302b65d67f3e2b112447fbada95300fedfb0cd3 Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Sun, 10 Nov 2024 21:35:39 -0500 Subject: [PATCH 185/198] Update nfs-server.yaml --- helm_packages/nfs-server/nfs-server.yaml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/helm_packages/nfs-server/nfs-server.yaml b/helm_packages/nfs-server/nfs-server.yaml index 9bd6d4b8..0a20b9c5 100644 --- a/helm_packages/nfs-server/nfs-server.yaml +++ b/helm_packages/nfs-server/nfs-server.yaml @@ -25,6 +25,9 @@ spec: containers: - name: nfs-server image: itsthenetwork/nfs-server-alpine:latest + env: + - name: SHARED_DIRECTORY + value: "/exports" volumeMounts: - mountPath: /exports name: exports-volume From a029a26820a43501744bb60434845d66f7579bab Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Sun, 10 Nov 2024 21:43:50 -0500 Subject: [PATCH 186/198] nfs server later --- helm_packages/nfs-server/deploy.sh | 12 ------ helm_packages/nfs-server/nfs-server-pv.yaml | 24 ----------- helm_packages/nfs-server/nfs-server.yaml | 41 ------------------- helm_packages/nfs-server/nfs-service.yaml | 11 ----- .../nfs-server/nfs-storageclass.yaml | 9 ---- 5 files changed, 97 deletions(-) delete mode 100644 helm_packages/nfs-server/deploy.sh delete mode 100644 helm_packages/nfs-server/nfs-server-pv.yaml delete mode 100644 helm_packages/nfs-server/nfs-server.yaml delete mode 100644 helm_packages/nfs-server/nfs-service.yaml delete mode 100644 helm_packages/nfs-server/nfs-storageclass.yaml diff --git a/helm_packages/nfs-server/deploy.sh b/helm_packages/nfs-server/deploy.sh deleted file mode 100644 index d03ec7b5..00000000 --- a/helm_packages/nfs-server/deploy.sh +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/bash - -echo 'Setting up nfs-server!' - -kubectl apply -f nfs-server-pv.yaml -kubectl apply -f nfs-server.yaml -kubectl apply -f nfs-service.yaml - -helm repo add nfs-subdir-external-provisioner https://kubernetes-sigs.github.io/nfs-subdir-external-provisioner/ -helm install nfs-provisioner nfs-subdir-external-provisioner/nfs-subdir-external-provisioner \ - --set nfs.server=10.244.0.153:2049 \ - --set nfs.path=/exports diff --git a/helm_packages/nfs-server/nfs-server-pv.yaml b/helm_packages/nfs-server/nfs-server-pv.yaml deleted file mode 100644 index 8d26624a..00000000 --- a/helm_packages/nfs-server/nfs-server-pv.yaml +++ /dev/null @@ -1,24 +0,0 @@ -apiVersion: v1 -kind: PersistentVolume -metadata: - name: nfs-pv -spec: - capacity: - storage: 100Gi - accessModes: - - ReadWriteOnce - persistentVolumeReclaimPolicy: Retain - hostPath: - path: "/srv/data/nfs-server" # Adjust path as needed on your host - ---- -apiVersion: v1 -kind: PersistentVolumeClaim -metadata: - name: nfs-pvc -spec: - accessModes: - - ReadWriteOnce - resources: - requests: - storage: 100Gi diff --git a/helm_packages/nfs-server/nfs-server.yaml b/helm_packages/nfs-server/nfs-server.yaml deleted file mode 100644 index 0a20b9c5..00000000 --- a/helm_packages/nfs-server/nfs-server.yaml +++ /dev/null @@ -1,41 +0,0 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: - name: nfs-server -spec: - replicas: 1 - selector: - matchLabels: - app: nfs-server - template: - metadata: - labels: - app: nfs-server - spec: - initContainers: - - name: create-exports-dir - image: busybox - command: - - sh - - -c - - "mkdir -p /exports && chmod 777 /exports" - volumeMounts: - - mountPath: /exports - name: exports-volume - containers: - - name: nfs-server - image: itsthenetwork/nfs-server-alpine:latest - env: - - name: SHARED_DIRECTORY - value: "/exports" - volumeMounts: - - mountPath: /exports - name: exports-volume - ports: - - containerPort: 2049 - - containerPort: 111 - - containerPort: 20048 - - containerPort: 32765 - volumes: - - name: exports-volume - emptyDir: {} diff --git a/helm_packages/nfs-server/nfs-service.yaml b/helm_packages/nfs-server/nfs-service.yaml deleted file mode 100644 index b6bf4387..00000000 --- a/helm_packages/nfs-server/nfs-service.yaml +++ /dev/null @@ -1,11 +0,0 @@ -apiVersion: v1 -kind: Service -metadata: - name: nfs-service -spec: - selector: - app: nfs-server - ports: - - protocol: TCP - port: 2049 - targetPort: 2049 diff --git a/helm_packages/nfs-server/nfs-storageclass.yaml b/helm_packages/nfs-server/nfs-storageclass.yaml deleted file mode 100644 index ad089eae..00000000 --- a/helm_packages/nfs-server/nfs-storageclass.yaml +++ /dev/null @@ -1,9 +0,0 @@ -apiVersion: storage.k8s.io/v1 -kind: StorageClass -metadata: - name: nfs-client -provisioner: nfs-subdir-external-provisioner -parameters: - pathPattern: "${.PVC.namespace}/${.PVC.name}" # Organize storage by namespace and PVC name -reclaimPolicy: Retain -volumeBindingMode: Immediate From c3eca94dff93686fc23dd63dea7efa34f16bbbd9 Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Sun, 10 Nov 2024 21:48:23 -0500 Subject: [PATCH 187/198] test --- helm_packages/README.md | 2 +- helm_packages/mongodb-helm/storage-class.yaml | 11 -------- helm_packages/mongodb-helm/values.yaml | 25 +++++++++++++------ 3 files changed, 19 insertions(+), 19 deletions(-) delete mode 100644 helm_packages/mongodb-helm/storage-class.yaml diff --git a/helm_packages/README.md b/helm_packages/README.md index 3ed7db43..862f2a71 100644 --- a/helm_packages/README.md +++ b/helm_packages/README.md @@ -1,3 +1,3 @@ # Helm -This is a way for us to deploy easy to configure images to our kubernetes cluster. We will be using this to create our storage provider for the cluster as well as the MongoDB. +This is a way for us to deploy easy to configure images to our kubernetes cluster. We will be using this for just MongoDB for now. diff --git a/helm_packages/mongodb-helm/storage-class.yaml b/helm_packages/mongodb-helm/storage-class.yaml deleted file mode 100644 index 4c1f46ec..00000000 --- a/helm_packages/mongodb-helm/storage-class.yaml +++ /dev/null @@ -1,11 +0,0 @@ -# storage-class.yaml -apiVersion: storage.k8s.io/v1 -kind: StorageClass -metadata: - name: glados-storage -provisioner: gp2 -parameters: - type: gp2 -reclaimPolicy: Retain -volumeBindingMode: WaitForFirstConsumer - diff --git a/helm_packages/mongodb-helm/values.yaml b/helm_packages/mongodb-helm/values.yaml index 7d74d8a1..533c293e 100644 --- a/helm_packages/mongodb-helm/values.yaml +++ b/helm_packages/mongodb-helm/values.yaml @@ -3,22 +3,22 @@ architecture: replicaset replicaSet: enabled: true - replicas: 3 # Set the number of replicas + replicas: 3 # Set the number of replicas waitForPrimary: true persistence: enabled: true - storageClass: nfs-client # Use the storage class created earlier + storageClass: nfs-client # Use the storage class created earlier accessModes: - ReadWriteOnce - size: 10Gi # Adjust the size as needed + size: 10Gi # Adjust the size as needed auth: enabled: true - rootPassword: 'password123' - usernames: ['adminuser'] - passwords: ['password123'] - databases: ['admin'] + rootPassword: "password123" + usernames: ["adminuser"] + passwords: ["password123"] + databases: ["admin"] service: nameOverride: glados-service-mongodb @@ -28,3 +28,14 @@ externalAccess: service: type: NodePort nodePorts: [30000, 30001] + +## Node affinity settings +nodeAffinity: + requiredDuringSchedulingIgnoredDuringExecution: + nodeSelectorTerms: + - matchExpressions: + - key: "kubernetes.io/hostname" # This is the label you want to match + operator: In + values: + - "glados-db" # Replace with the node name you want to run MongoDB + From ef52cd616b0103c0a3e42554eb4f2e1fcd453f67 Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Sun, 10 Nov 2024 21:48:52 -0500 Subject: [PATCH 188/198] Update deploy.sh --- helm_packages/mongodb-helm/deploy.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/helm_packages/mongodb-helm/deploy.sh b/helm_packages/mongodb-helm/deploy.sh index 83ac8282..9b1d174c 100644 --- a/helm_packages/mongodb-helm/deploy.sh +++ b/helm_packages/mongodb-helm/deploy.sh @@ -2,4 +2,4 @@ echo 'Deploying helm package for MongoDB replica set!' -helm install glados-mongodb oci://registry-1.docker.io/bitnamicharts/mongodb -f ./mongodb-helm/values.yaml \ No newline at end of file +helm install glados-mongodb oci://registry-1.docker.io/bitnamicharts/mongodb -f values.yaml \ No newline at end of file From 8a3b2d00ae7ed2d0f38f4d91142a0fac321c737f Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Sun, 10 Nov 2024 21:53:27 -0500 Subject: [PATCH 189/198] Update values.yaml --- helm_packages/mongodb-helm/values.yaml | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/helm_packages/mongodb-helm/values.yaml b/helm_packages/mongodb-helm/values.yaml index 533c293e..5dde3a6e 100644 --- a/helm_packages/mongodb-helm/values.yaml +++ b/helm_packages/mongodb-helm/values.yaml @@ -29,13 +29,14 @@ externalAccess: type: NodePort nodePorts: [30000, 30001] -## Node affinity settings -nodeAffinity: - requiredDuringSchedulingIgnoredDuringExecution: - nodeSelectorTerms: - - matchExpressions: - - key: "kubernetes.io/hostname" # This is the label you want to match - operator: In - values: - - "glados-db" # Replace with the node name you want to run MongoDB +affinity: + nodeAffinity: + requiredDuringSchedulingIgnoredDuringExecution: + nodeSelectorTerms: + - matchExpressions: + - key: "kubernetes.io/hostname" # Key for the node selector + operator: In + values: + - "glados-db" # Replace with the name of the node you want to use + From 5187992f5bebc80454d6d772e9492692d2826835 Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Sun, 10 Nov 2024 21:55:50 -0500 Subject: [PATCH 190/198] Update deploy.sh --- helm_packages/mongodb-helm/deploy.sh | 2 ++ 1 file changed, 2 insertions(+) diff --git a/helm_packages/mongodb-helm/deploy.sh b/helm_packages/mongodb-helm/deploy.sh index 9b1d174c..63f0a9cd 100644 --- a/helm_packages/mongodb-helm/deploy.sh +++ b/helm_packages/mongodb-helm/deploy.sh @@ -2,4 +2,6 @@ echo 'Deploying helm package for MongoDB replica set!' +kubectl apply -f pvs.yaml + helm install glados-mongodb oci://registry-1.docker.io/bitnamicharts/mongodb -f values.yaml \ No newline at end of file From c7ab37b75a35f852b183ca50912e5b6f8e76673f Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Sun, 10 Nov 2024 21:58:00 -0500 Subject: [PATCH 191/198] Update pvs.yaml --- helm_packages/mongodb-helm/pvs.yaml | 20 ++------------------ 1 file changed, 2 insertions(+), 18 deletions(-) diff --git a/helm_packages/mongodb-helm/pvs.yaml b/helm_packages/mongodb-helm/pvs.yaml index d3a7da68..f1f552c1 100644 --- a/helm_packages/mongodb-helm/pvs.yaml +++ b/helm_packages/mongodb-helm/pvs.yaml @@ -9,7 +9,7 @@ spec: - ReadWriteOnce persistentVolumeReclaimPolicy: Retain capacity: - storage: 1Gi + storage: 10Gi volumeMode: Filesystem hostPath: path: "/srv/data/mongo-1" # change to where it is locally (For minikube, may need to mount: https://stackoverflow.com/questions/63559779/kubernetes-minikube-persistent-volume-local-filesystem-storage-location) @@ -25,23 +25,7 @@ spec: - ReadWriteOnce persistentVolumeReclaimPolicy: Retain capacity: - storage: 1Gi + storage: 10Gi volumeMode: Filesystem hostPath: path: "/srv/data/mongo-2" # change to where it is locally (For minikube, may need to mount: https://stackoverflow.com/questions/63559779/kubernetes-minikube-persistent-volume-local-filesystem-storage-location) ---- -# kinda building off of this https://kubernetes.io/docs/concepts/storage/volumes/#local -apiVersion: v1 -kind: PersistentVolume -metadata: - name: mongo-pv-2 -spec: - storageClassName: local-storage - accessModes: - - ReadWriteOnce - persistentVolumeReclaimPolicy: Retain - capacity: - storage: 1Gi - volumeMode: Filesystem - hostPath: - path: "/srv/data/mongo-3" # change to where it is locally (For minikube, may need to mount: https://stackoverflow.com/questions/63559779/kubernetes-minikube-persistent-volume-local-filesystem-storage-location) From 7d768023a75916e076633f61e16d4bb1c5a0dd2f Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Sun, 10 Nov 2024 21:59:21 -0500 Subject: [PATCH 192/198] Update values.yaml --- helm_packages/mongodb-helm/values.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/helm_packages/mongodb-helm/values.yaml b/helm_packages/mongodb-helm/values.yaml index 5dde3a6e..8f8a48d5 100644 --- a/helm_packages/mongodb-helm/values.yaml +++ b/helm_packages/mongodb-helm/values.yaml @@ -8,7 +8,7 @@ replicaSet: persistence: enabled: true - storageClass: nfs-client # Use the storage class created earlier + storageClass: local-storage # Use the storage class created earlier accessModes: - ReadWriteOnce size: 10Gi # Adjust the size as needed From 1b5e1d7cf8e887f46dcc4d875f8bb6e71d847843 Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Sun, 10 Nov 2024 22:10:07 -0500 Subject: [PATCH 193/198] update front end deployment --- kubernetes_init/frontend/deployment-frontend.yaml | 2 +- kubernetes_init/frontend/service-frontend.yaml | 7 ++----- 2 files changed, 3 insertions(+), 6 deletions(-) diff --git a/kubernetes_init/frontend/deployment-frontend.yaml b/kubernetes_init/frontend/deployment-frontend.yaml index 52c912a6..56c9f497 100644 --- a/kubernetes_init/frontend/deployment-frontend.yaml +++ b/kubernetes_init/frontend/deployment-frontend.yaml @@ -20,7 +20,7 @@ spec: image: gladospipeline/glados-frontend:main imagePullPolicy: Always ports: - - containerPort: 0000 + - containerPort: 3000 # hostPort: 0000 env: - name: BACKEND_PORT diff --git a/kubernetes_init/frontend/service-frontend.yaml b/kubernetes_init/frontend/service-frontend.yaml index de2741f7..3215bcd8 100644 --- a/kubernetes_init/frontend/service-frontend.yaml +++ b/kubernetes_init/frontend/service-frontend.yaml @@ -9,11 +9,8 @@ spec: selector: app: GLADOS tier: frontend - type: LoadBalancer + # type: LoadBalancer ports: - - name: https - port: 80 - targetPort: 3000 - - name: http + - protocol: TCP port: 3000 targetPort: 3000 \ No newline at end of file From 17f572827cea8c9f89cad2403987da6e271848f9 Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Sun, 10 Nov 2024 22:18:45 -0500 Subject: [PATCH 194/198] update service --- helm_packages/mongodb-helm/values.yaml | 2 +- kubernetes_init/frontend/service-frontend.yaml | 15 ++++++--------- 2 files changed, 7 insertions(+), 10 deletions(-) diff --git a/helm_packages/mongodb-helm/values.yaml b/helm_packages/mongodb-helm/values.yaml index 8f8a48d5..0a7867fb 100644 --- a/helm_packages/mongodb-helm/values.yaml +++ b/helm_packages/mongodb-helm/values.yaml @@ -27,7 +27,7 @@ externalAccess: enabled: true service: type: NodePort - nodePorts: [30000, 30001] + nodePorts: [30001, 30002] affinity: nodeAffinity: diff --git a/kubernetes_init/frontend/service-frontend.yaml b/kubernetes_init/frontend/service-frontend.yaml index 3215bcd8..9d34a015 100644 --- a/kubernetes_init/frontend/service-frontend.yaml +++ b/kubernetes_init/frontend/service-frontend.yaml @@ -2,15 +2,12 @@ apiVersion: v1 kind: Service metadata: name: glados-service-frontend - namespace: default - labels: - app: GLADOS spec: selector: - app: GLADOS - tier: frontend - # type: LoadBalancer + app: GLADOS # This should match the pod labels ports: - - protocol: TCP - port: 3000 - targetPort: 3000 \ No newline at end of file + - protocol: TCP + port: 3000 # The service port (accessible inside the cluster) + targetPort: 3000 # The container's port + nodePort: 30000 # The external port + type: NodePort \ No newline at end of file From d0ca270bf5e621e4b40deb5902910f0a742a647d Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Sun, 10 Nov 2024 22:26:54 -0500 Subject: [PATCH 195/198] fix --- helm_packages/mongodb-helm/values.yaml | 2 +- kubernetes_init/frontend/service-frontend.yaml | 9 ++++----- 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/helm_packages/mongodb-helm/values.yaml b/helm_packages/mongodb-helm/values.yaml index 0a7867fb..8f8a48d5 100644 --- a/helm_packages/mongodb-helm/values.yaml +++ b/helm_packages/mongodb-helm/values.yaml @@ -27,7 +27,7 @@ externalAccess: enabled: true service: type: NodePort - nodePorts: [30001, 30002] + nodePorts: [30000, 30001] affinity: nodeAffinity: diff --git a/kubernetes_init/frontend/service-frontend.yaml b/kubernetes_init/frontend/service-frontend.yaml index 9d34a015..9cbbcb02 100644 --- a/kubernetes_init/frontend/service-frontend.yaml +++ b/kubernetes_init/frontend/service-frontend.yaml @@ -4,10 +4,9 @@ metadata: name: glados-service-frontend spec: selector: - app: GLADOS # This should match the pod labels + app: GLADOS # Ensure this matches the pod label ports: - protocol: TCP - port: 3000 # The service port (accessible inside the cluster) - targetPort: 3000 # The container's port - nodePort: 30000 # The external port - type: NodePort \ No newline at end of file + port: 3000 # The service port + targetPort: 3000 # The container port where Next.js listens + type: LoadBalancer # Expose service with a load balancer From 2164d5a7f8022938f02129455abd0e079f3ae5d8 Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Sun, 10 Nov 2024 22:34:59 -0500 Subject: [PATCH 196/198] Update deployment-frontend.yaml --- kubernetes_init/frontend/deployment-frontend.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/kubernetes_init/frontend/deployment-frontend.yaml b/kubernetes_init/frontend/deployment-frontend.yaml index 56c9f497..164654ab 100644 --- a/kubernetes_init/frontend/deployment-frontend.yaml +++ b/kubernetes_init/frontend/deployment-frontend.yaml @@ -21,7 +21,7 @@ spec: imagePullPolicy: Always ports: - containerPort: 3000 - # hostPort: 0000 + hostPort: 3000 env: - name: BACKEND_PORT valueFrom: From 08f7565febc8db4597a7f885d1e087afcbfde5f0 Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Sun, 10 Nov 2024 22:42:13 -0500 Subject: [PATCH 197/198] Update job-runner.yaml --- apps/backend/job-runner.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/backend/job-runner.yaml b/apps/backend/job-runner.yaml index 0ff57460..cbd45305 100644 --- a/apps/backend/job-runner.yaml +++ b/apps/backend/job-runner.yaml @@ -10,7 +10,7 @@ spec: spec: containers: - name: runner - image: gladospipeline/glados-runner:development + image: gladospipeline/glados-runner:main imagePullPolicy: Always command: [] env: From 1c6d77cd97f380113c661da51fe7d036a710e48a Mon Sep 17 00:00:00 2001 From: Riley Windsor Date: Sun, 10 Nov 2024 23:01:22 -0500 Subject: [PATCH 198/198] i hate this check --- apps/backend/.pylintrc | 1 + 1 file changed, 1 insertion(+) diff --git a/apps/backend/.pylintrc b/apps/backend/.pylintrc index 71665644..63c0d032 100644 --- a/apps/backend/.pylintrc +++ b/apps/backend/.pylintrc @@ -4,6 +4,7 @@ extension-pkg-allow-list=pydantic # Updated from 'extension-pkg-whitelist' disable= C0301, # line too long + C0303, # trailing whitespace C0114, # missing-module-docstring C0116, # missing-function-docstring C0115, # missing-class-docstring