diff --git a/.eslintrc.json b/.eslintrc.json index 92f34228ca..112d716a26 100644 --- a/.eslintrc.json +++ b/.eslintrc.json @@ -4,7 +4,8 @@ "ecmaVersion": 2018, "sourceType": "module", "ecmaFeatures": { - "jsx": true + "jsx": true, + "js": true } }, "plugins": ["react-hooks"], @@ -29,12 +30,6 @@ "argsIgnorePattern": "^_" } ], - "new-cap": [ - "error", - { - "capIsNewExceptions": ["OneOf"] - } - ], "no-console": [ "warn", { @@ -49,10 +44,7 @@ "jsx-a11y/anchor-is-valid": 1, "import/extensions": [ "error", - "ignorePackages", - { - "js": "never" - } + "ignorePackages" ], "import/no-extraneous-dependencies": ["error", {"devDependencies": ["src/frontend/**/*.jsx", "src/south/**/*.jsx", "src/north/**/*.jsx"]}], "arrow-parens": ["error", "always"], diff --git a/package.json b/package.json index 80d22ae8f6..8b187f875e 100644 --- a/package.json +++ b/package.json @@ -4,6 +4,7 @@ "main": "index.js", "author": "Optimistik SAS", "license": "EUPL", + "type": "module", "engines": { "node": ">=v16.17.0", "npm": ">=8.15.0" diff --git a/src/engine/base-engine.js b/src/engine/base-engine.js index 02d4194335..c052dfbfa1 100644 --- a/src/engine/base-engine.js +++ b/src/engine/base-engine.js @@ -1,38 +1,62 @@ -const path = require('node:path') - -const VERSION = require('../../package.json').version - -const northList = {} -northList.OIAnalytics = require('../north/north-oianalytics/north-oianalytics') -northList.OIConnect = require('../north/north-oiconnect/north-oiconnect') -northList.FileWriter = require('../north/north-file-writer/north-file-writer') -northList.AmazonS3 = require('../north/north-amazon-s3/north-amazon-s3') -northList.InfluxDB = require('../north/north-influx-db/north-influx-db') -northList.TimescaleDB = require('../north/north-timescale-db/north-timescale-db') -northList.MongoDB = require('../north/north-mongo-db/north-mongo-db') -northList.MQTT = require('../north/north-mqtt/north-mqtt') -northList.Console = require('../north/north-console/north-console') -northList.WATSYConnect = require('../north/north-watsy/north-watsy') -northList.CsvToHttp = require('../north/north-csv-to-http/north-csv-to-http') - -const southList = {} -southList.SQL = require('../south/south-sql/south-sql') -southList.FolderScanner = require('../south/south-folder-scanner/south-folder-scanner') -southList.OPCUA_HA = require('../south/south-opcua-ha/south-opcua-ha') -southList.OPCUA_DA = require('../south/south-opcua-da/south-opcua-da') -southList.MQTT = require('../south/south-mqtt/south-mqtt') -southList.ADS = require('../south/south-ads/south-ads') -southList.Modbus = require('../south/south-modbus/south-modbus') -southList.OPCHDA = require('../south/south-opchda/south-opchda') -southList.RestApi = require('../south/south-rest/south-rest') - -const StatusService = require('../service/status.service') +import path from 'node:path' + +// North imports +import OIAnalytics from '../north/north-oianalytics/north-oianalytics.js' +import OIConnect from '../north/north-oiconnect/north-oiconnect.js' +import FileWriter from '../north/north-file-writer/north-file-writer.js' +import AmazonS3 from '../north/north-amazon-s3/north-amazon-s3.js' +import InfluxDB from '../north/north-influx-db/north-influx-db.js' +import TimescaleDB from '../north/north-timescale-db/north-timescale-db.js' +import MongoDB from '../north/north-mongo-db/north-mongo-db.js' +import MQTTNorth from '../north/north-mqtt/north-mqtt.js' +import Console from '../north/north-console/north-console.js' +import WATSYConnect from '../north/north-watsy/north-watsy.js' +import CsvToHttp from '../north/north-csv-to-http/north-csv-to-http.js' + +// South imports +import SQL from '../south/south-sql/south-sql.js' +import FolderScanner from '../south/south-folder-scanner/south-folder-scanner.js' +import OPCUA_HA from '../south/south-opcua-ha/south-opcua-ha.js' +import OPCUA_DA from '../south/south-opcua-da/south-opcua-da.js' +import MQTTSouth from '../south/south-mqtt/south-mqtt.js' +import ADS from '../south/south-ads/south-ads.js' +import Modbus from '../south/south-modbus/south-modbus.js' +import OPCHDA from '../south/south-opchda/south-opchda.js' +import RestApi from '../south/south-rest/south-rest.js' + +import StatusService from '../service/status.service.js' + +const northList = { + OIAnalytics, + OIConnect, + FileWriter, + AmazonS3, + InfluxDB, + TimescaleDB, + MongoDB, + MQTT: MQTTNorth, + Console, + WATSYConnect, + CsvToHttp, +} + +const southList = { + SQL, + FolderScanner, + OPCUA_HA, + OPCUA_DA, + MQTT: MQTTSouth, + ADS, + Modbus, + OPCHDA, + RestApi, +} /** * Abstract class used to manage North and South connectors * @class BaseEngine */ -class BaseEngine { +export default class BaseEngine { /** * Constructor for BaseEngine * @constructor @@ -48,7 +72,7 @@ class BaseEngine { loggerService, cacheFolder, ) { - this.version = VERSION + this.version = null this.cacheFolder = path.resolve(cacheFolder) this.installedNorthConnectors = northList @@ -69,6 +93,10 @@ class BaseEngine { * @returns {Promise} - The result promise */ async initEngineServices(engineConfig) { + // const packageJson = JSON.parse(await fs.readFile('package.json')) + // this.version = packageJson.version + // TODO + this.version = '2.4.0' this.oibusName = engineConfig.name this.defaultLogParameters = engineConfig.logParameters this.proxies = engineConfig.proxies @@ -178,5 +206,3 @@ class BaseEngine { })) } } - -module.exports = BaseEngine diff --git a/src/engine/health-signal.js b/src/engine/health-signal.js index c24e466139..a8defa4d7d 100644 --- a/src/engine/health-signal.js +++ b/src/engine/health-signal.js @@ -1,9 +1,9 @@ -const { httpSend, createProxyAgent, addAuthenticationToHeaders } = require('../service/http-request-static-functions') +import { httpSend, createProxyAgent, addAuthenticationToHeaders } from '../service/http-request-static-functions.js' /** * Class HealthSignal - sends health signal to a remote host or into the logs */ -class HealthSignal { +export default class HealthSignal { /** * Constructor for HealthSignal * @constructor @@ -162,5 +162,3 @@ class HealthSignal { } } } - -module.exports = HealthSignal diff --git a/src/engine/history-query-engine.js b/src/engine/history-query-engine.js index f9a1c9c638..8f7507a537 100644 --- a/src/engine/history-query-engine.js +++ b/src/engine/history-query-engine.js @@ -1,12 +1,12 @@ -const path = require('node:path') -const fs = require('node:fs/promises') +import path from 'node:path' +import fs from 'node:fs/promises' -const humanizeDuration = require('humanize-duration') +import humanizeDuration from 'humanize-duration' -const HistoryQuery = require('./history-query/history-query') -const BaseEngine = require('./base-engine') -const HistoryQueryRepository = require('./history-query/history-query-repository') -const databaseService = require('../service/database.service') +import HistoryQuery from './history-query/history-query.js' +import BaseEngine from './base-engine.js' +import HistoryQueryRepository from './history-query/history-query-repository.js' +import { getHistoryQuerySouthData } from '../service/database.service.js' const CACHE_FOLDER = './cache/history-query' const HISTORY_QUERIES_DB = './history-query.db' @@ -16,7 +16,7 @@ const HISTORY_TIMER_INTERVAL = 10000 * Manage history queries by running {@link HistoryQuery} one after another * @class HistoryQueryEngine */ -class HistoryQueryEngine extends BaseEngine { +export default class HistoryQueryEngine extends BaseEngine { /** * Constructor for Engine * Reads the config file and create the corresponding Object. @@ -229,7 +229,7 @@ class HistoryQueryEngine extends BaseEngine { const databasePath = `${folder}/${historyQueryConfig.southId}.db` try { await fs.stat(databasePath) - const entries = databaseService.getHistoryQuerySouthData(databasePath) + const entries = getHistoryQuerySouthData(databasePath) data.south = entries.map((entry) => ({ scanMode: entry.name.replace('lastCompletedAt-', ''), lastCompletedDate: entry.value, @@ -242,5 +242,3 @@ class HistoryQueryEngine extends BaseEngine { return data } } - -module.exports = HistoryQueryEngine diff --git a/src/engine/history-query/history-query-repository.js b/src/engine/history-query/history-query-repository.js index e97d0db21a..d18452c32a 100644 --- a/src/engine/history-query/history-query-repository.js +++ b/src/engine/history-query/history-query-repository.js @@ -1,7 +1,8 @@ -const db = require('better-sqlite3') -const HistoryQuery = require('./history-query') +import db from 'better-sqlite3' -class HistoryQueryRepository { +import HistoryQuery from './history-query.js' + +export default class HistoryQueryRepository { static TABLE = 'history_queries' constructor(databasePath) { @@ -202,5 +203,3 @@ class HistoryQueryRepository { return null } } - -module.exports = HistoryQueryRepository diff --git a/src/engine/history-query/history-query.js b/src/engine/history-query/history-query.js index 933f440425..1f559f4398 100644 --- a/src/engine/history-query/history-query.js +++ b/src/engine/history-query/history-query.js @@ -1,11 +1,11 @@ -const path = require('node:path') +import path from 'node:path' -const StatusService = require('../../service/status.service') -const { createFolder } = require('../../service/utils') +import StatusService from '../../service/status.service.js' +import { createFolder } from '../../service/utils.js' const FINISH_INTERVAL = 5000 -class HistoryQuery { +export default class HistoryQuery { // Waiting to be started static STATUS_PENDING = 'pending' @@ -172,5 +172,3 @@ class HistoryQuery { this.engine.historyQueryRepository.update(this.historyConfiguration) } } - -module.exports = HistoryQuery diff --git a/src/engine/oibus-engine.js b/src/engine/oibus-engine.js index eac4df316d..962c0edbc3 100644 --- a/src/engine/oibus-engine.js +++ b/src/engine/oibus-engine.js @@ -1,11 +1,11 @@ -const os = require('node:os') +import fs from 'node:fs/promises' +import os from 'node:os' -const timexe = require('timexe') -const humanizeDuration = require('humanize-duration') +import timexe from 'timexe' +import humanizeDuration from 'humanize-duration' -const fs = require('node:fs/promises') -const BaseEngine = require('./base-engine') -const HealthSignal = require('./health-signal') +import BaseEngine from './base-engine.js' +import HealthSignal from './health-signal.js' const CACHE_FOLDER = './cache/data-stream' @@ -13,7 +13,7 @@ const CACHE_FOLDER = './cache/data-stream' * At startup, handles initialization of configuration, North and South connectors. * @class OIBusEngine */ -class OIBusEngine extends BaseEngine { +export default class OIBusEngine extends BaseEngine { /** * Constructor for OIBusEngine * Reads the config file and create the corresponding Object. @@ -392,5 +392,3 @@ class OIBusEngine extends BaseEngine { }) } } - -module.exports = OIBusEngine diff --git a/src/index.js b/src/index.js index 9aaca7247f..fe3781b570 100644 --- a/src/index.js +++ b/src/index.js @@ -1,17 +1,15 @@ -const cluster = require('node:cluster') -const path = require('node:path') +import cluster from 'node:cluster' +import path from 'node:path' -const VERSION = require('../package.json').version +import migrationService from './migration/migration.service.js' +import ConfigurationService from './service/configuration.service.js' +import Server from './web-server/web-server.js' +import OIBusEngine from './engine/oibus-engine.js' +import HistoryQueryEngine from './engine/history-query-engine.js' +import LoggerService from './service/logger/logger.service.js' +import EncryptionService from './service/encryption.service.js' -const migrationService = require('./migration/migration.service') -const ConfigurationService = require('./service/configuration.service') -const Server = require('./web-server/web-server') -const OIBusEngine = require('./engine/oibus-engine') -const HistoryQueryEngine = require('./engine/history-query-engine') -const LoggerService = require('./service/logger/logger.service') -const EncryptionService = require('./service/encryption.service') - -const { getCommandLineArguments, createFolder } = require('./service/utils') +import { getCommandLineArguments, createFolder } from './service/utils.js' // In case there is an error the worker process will exit. // If this happens MAX_RESTART_COUNT times in less than MAX_INTERVAL_MILLISECOND interval @@ -44,6 +42,9 @@ if (cluster.isMaster) { const baseDir = path.resolve(path.extname(configFile) ? path.parse(configFile).dir : configFile) process.chdir(baseDir) createFolder(baseDir).then(async () => { + // TODO + const packageJson = '2.4.0' // JSON.parse(await fs.readFile('package.json')) + // Create the base cache folder await createFolder(CACHE_FOLDER) @@ -51,7 +52,7 @@ if (cluster.isMaster) { const mainLogger = loggerService.createChildLogger('main-thread') // Master role is nothing except launching a worker and relaunching another // one if exit is detected (typically to load a new configuration) - mainLogger.info(`Starting OIBus version ${VERSION}.`) + mainLogger.info(`Starting OIBus version ${packageJson}.`) let restartCount = 0 let startTime = (new Date()).getTime() @@ -115,7 +116,7 @@ if (cluster.isMaster) { const forkLogger = loggerService.createChildLogger('forked-thread') // Migrate config file, if needed - await migrationService.migrate(configFilePath, loggerService.createChildLogger('migration')) + await migrationService(configFilePath, loggerService.createChildLogger('migration')) const configService = new ConfigurationService(configFilePath, CACHE_FOLDER) const encryptionService = EncryptionService.getInstance() diff --git a/src/migration/database-migration.service.js b/src/migration/database-migration.service.js index 71edc4e623..1a82fe4d61 100644 --- a/src/migration/database-migration.service.js +++ b/src/migration/database-migration.service.js @@ -1,4 +1,4 @@ -const db = require('better-sqlite3') +import db from 'better-sqlite3' const CACHE_TABLE_NAME = 'cache' @@ -31,7 +31,7 @@ const changeColumnValue = (databasePath, columnName, oldValue, newValue) => { return true } -module.exports = { +export { changeColumnName, addColumn, removeColumn, diff --git a/src/migration/migration-rules.js b/src/migration/migration-rules.js index 89244e039d..0594257237 100644 --- a/src/migration/migration-rules.js +++ b/src/migration/migration-rules.js @@ -1,14 +1,21 @@ /* eslint-disable no-restricted-syntax, no-await-in-loop */ -const fs = require('node:fs/promises') -const path = require('node:path') -const db = require('better-sqlite3') -const { nanoid } = require('nanoid') -const databaseMigrationService = require('./database-migration.service') -const databaseService = require('../service/database.service') -const { createFolder, filesExists } = require('../service/utils') - -module.exports = { +import fs from 'node:fs/promises' +import path from 'node:path' + +import db from 'better-sqlite3' +import { nanoid } from 'nanoid' + +import { + changeColumnName, + addColumn, + removeColumn, + changeColumnValue, +} from './database-migration.service.js' +import { createConfigDatabase, getConfig, upsertConfig } from '../service/database.service.js' +import { createFolder, filesExists } from '../service/utils.js' + +export default { 2: (config, logger) => { config.south.dataSources.forEach((dataSource) => { if (dataSource.protocol === 'RawFile') { @@ -452,7 +459,7 @@ module.exports = { const logDatabase = config.engine.logParameters.sqliteLog.fileName try { await fs.access(logDatabase) - await databaseMigrationService.addColumn(logDatabase, 'logs', 'scope') + await addColumn(logDatabase, 'logs', 'scope') } catch { logger.info(`No log db file to migrate (file name: ${logDatabase})`) } @@ -1110,14 +1117,14 @@ module.exports = { if (dataSource.protocol === 'SQL') { logger.info(`Update lastCompletedAt key for ${dataSource.name}`) const databasePath = `${config.engine.caching.cacheFolder}/${dataSource.id}.db` - const database = databaseService.createConfigDatabase(databasePath) - const lastCompletedAt = databaseService.getConfig(database, 'lastCompletedAt') - databaseService.upsertConfig(database, `lastCompletedAt-${dataSource.scanMode}`, lastCompletedAt) + const database = createConfigDatabase(databasePath) + const lastCompletedAt = getConfig(database, 'lastCompletedAt') + upsertConfig(database, `lastCompletedAt-${dataSource.scanMode}`, lastCompletedAt) } if (['OPCUA_HA', 'OPCHDA'].includes(dataSource.protocol)) { const databasePath = `${config.engine.caching.cacheFolder}/${dataSource.id}.db` - const database = databaseService.createConfigDatabase(databasePath) + const database = createConfigDatabase(databasePath) if (!dataSource[dataSource.protocol].scanGroups) { dataSource[dataSource.protocol].scanGroups = [] } @@ -1125,10 +1132,10 @@ module.exports = { // eslint-disable-next-line no-restricted-syntax for (const scanMode of scanModes) { logger.info(`Update lastCompletedAt-${scanMode} value for ${dataSource.name}`) - const lastCompletedAtString = databaseService.getConfig(database, `lastCompletedAt-${scanMode}`) + const lastCompletedAtString = getConfig(database, `lastCompletedAt-${scanMode}`) if (lastCompletedAtString) { const lastCompletedAt = new Date(parseInt(lastCompletedAtString, 10)) - databaseService.upsertConfig(database, `lastCompletedAt-${scanMode}`, lastCompletedAt.toISOString()) + upsertConfig(database, `lastCompletedAt-${scanMode}`, lastCompletedAt.toISOString()) } } } @@ -1153,7 +1160,7 @@ module.exports = { // eslint-disable-next-line max-len logger.info(`Migration of value error database ${valueCacheErrorDbPath}: Renaming column name "application_id" into "application"`) try { - await databaseMigrationService.changeColumnName( + changeColumnName( valueCacheErrorDbPath, 'application_id', 'application', @@ -1205,7 +1212,7 @@ module.exports = { // eslint-disable-next-line max-len logger.info(`Migration of values database ${cachePath}/${application.id}.db: Renaming column name "data_source_id" into "data_source" for application ${application.name}`) try { - databaseMigrationService.changeColumnName( + changeColumnName( newApplicationPath, 'data_source_id', 'data_source', @@ -1218,7 +1225,7 @@ module.exports = { // eslint-disable-next-line max-len logger.info(`Migration of file database ${fileCacheDbPath}: Changing application value from ${application.name} to ${application.id}`) try { - databaseMigrationService.changeColumnValue( + changeColumnValue( fileCacheDbPath, 'application', application.name, @@ -1233,7 +1240,7 @@ module.exports = { // eslint-disable-next-line max-len logger.info(`Migration of file error database ${fileCacheErrorDbPath}: Changing application value from ${application.name} to ${application.id}`) try { - databaseMigrationService.changeColumnValue( + changeColumnValue( fileCacheErrorDbPath, 'application', application.name, @@ -1248,7 +1255,7 @@ module.exports = { // eslint-disable-next-line max-len logger.info(`Migration of value error database ${valueCacheErrorDbPath}: Changing application value from ${application.name} to ${application.id}`) try { - databaseMigrationService.changeColumnValue( + changeColumnValue( valueCacheErrorDbPath, 'application', application.name, @@ -1327,7 +1334,7 @@ module.exports = { await createFolder(path.resolve(northCache, 'errors')) try { await fs.rename(path.resolve('./cache', `${north.id}.db`), path.resolve(northCache, 'values.db')) - databaseMigrationService.changeColumnName( + changeColumnName( path.resolve(northCache, 'values.db'), 'data_source', 'south', @@ -1352,7 +1359,7 @@ module.exports = { + 'WHERE application = ? ' + 'ORDER BY timestamp' const results = database.prepare(query).all(north.id) - databaseMigrationService.removeColumn(path.resolve(northCache, 'files.db'), 'cache', 'application') + removeColumn(path.resolve(northCache, 'files.db'), 'cache', 'application') // Empty the database to rewrite the new file path database.prepare('DELETE FROM cache;').run() for (const result of results) { diff --git a/src/migration/migration.service.js b/src/migration/migration.service.js index ec2b6bd658..20118c16de 100644 --- a/src/migration/migration.service.js +++ b/src/migration/migration.service.js @@ -1,7 +1,7 @@ -const fs = require('node:fs/promises') -const path = require('node:path') +import fs from 'node:fs/promises' +import path from 'node:path' -const migrationRules = require('./migration-rules') +import migrationRules from './migration-rules.js' const REQUIRED_SCHEMA_VERSION = 29 const DEFAULT_VERSION = 1 @@ -78,4 +78,4 @@ const migrate = async (configFilePath, logger) => { } } -module.exports = { migrate } +export default migrate diff --git a/src/north/north-amazon-s3/north-amazon-s3.js b/src/north/north-amazon-s3/north-amazon-s3.js index ab84707c51..e43df41f3f 100644 --- a/src/north/north-amazon-s3/north-amazon-s3.js +++ b/src/north/north-amazon-s3/north-amazon-s3.js @@ -1,10 +1,10 @@ -const fs = require('node:fs') -const path = require('node:path') +import fs from 'node:fs' +import path from 'node:path' -const { S3Client, PutObjectCommand } = require('@aws-sdk/client-s3') -const { NodeHttpHandler } = require('@aws-sdk/node-http-handler') +import { S3Client, PutObjectCommand } from '@aws-sdk/client-s3' +import { NodeHttpHandler } from '@aws-sdk/node-http-handler' -const NorthConnector = require('../north-connector') +import NorthConnector from '../north-connector.js' /** * Get filename without timestamp from file path. @@ -20,7 +20,7 @@ const getFilenameWithoutTimestamp = (filePath) => { /** * Class NorthAmazonS3 - sends files to Amazon AWS S3 */ -class NorthAmazonS3 extends NorthConnector { +export default class NorthAmazonS3 extends NorthConnector { static category = 'FileIn' /** @@ -87,5 +87,3 @@ class NorthAmazonS3 extends NorthConnector { await this.s3.send(new PutObjectCommand(params)) } } - -module.exports = NorthAmazonS3 diff --git a/src/north/north-connector.js b/src/north/north-connector.js index fd692c98de..bc8bf8b873 100644 --- a/src/north/north-connector.js +++ b/src/north/north-connector.js @@ -1,13 +1,13 @@ -const path = require('node:path') +import path from 'node:path' -const EncryptionService = require('../service/encryption.service') -const CertificateService = require('../service/certificate.service') -const StatusService = require('../service/status.service') -const ValueCache = require('../service/cache/value-cache.service') -const FileCache = require('../service/cache/file-cache.service') -const { createFolder } = require('../service/utils') -const { createProxyAgent } = require('../service/http-request-static-functions') -const ArchiveService = require('../service/cache/archive.service') +import EncryptionService from '../service/encryption.service.js' +import CertificateService from '../service/certificate.service.js' +import StatusService from '../service/status.service.js' +import ValueCache from '../service/cache/value-cache.service.js' +import FileCache from '../service/cache/file-cache.service.js' +import { createFolder } from '../service/utils.js' +import { createProxyAgent } from '../service/http-request-static-functions.js' +import ArchiveService from '../service/cache/archive.service.js' /** * Class NorthConnector : provides general attributes and methods for north connectors. @@ -26,7 +26,7 @@ const ArchiveService = require('../service/cache/archive.service') * - **getProxy**: get the proxy handler * - **logger**: to log an event with different levels (error,warning,info,debug,trace) */ -class NorthConnector { +export default class NorthConnector { /** * Constructor for NorthConnector * @constructor @@ -319,5 +319,3 @@ class NorthConnector { return this.fileCache.retryAllErrorFiles() } } - -module.exports = NorthConnector diff --git a/src/north/north-console/north-console.js b/src/north/north-console/north-console.js index 8fe6b77bd5..ee9dd0f488 100644 --- a/src/north/north-console/north-console.js +++ b/src/north/north-console/north-console.js @@ -1,11 +1,11 @@ -const fs = require('node:fs/promises') +import fs from 'node:fs/promises' -const NorthConnector = require('../north-connector') +import NorthConnector from '../north-connector.js' /** * Class Console - display values and file path into the console */ -class Console extends NorthConnector { +export default class Console extends NorthConnector { static category = 'Debug' /** @@ -65,5 +65,3 @@ class Console extends NorthConnector { } } } - -module.exports = Console diff --git a/src/north/north-csv-to-http/north-csv-to-http.js b/src/north/north-csv-to-http/north-csv-to-http.js index fc0108bf3d..26fd8d1fba 100644 --- a/src/north/north-csv-to-http/north-csv-to-http.js +++ b/src/north/north-csv-to-http/north-csv-to-http.js @@ -1,17 +1,17 @@ -const { createReadStream } = require('node:fs') +import { createReadStream } from 'node:fs' -const csv = require('papaparse') +import csv from 'papaparse' -const NorthConnector = require('../north-connector') -const { convertCSVRowIntoHttpBody, isHeaderValid } = require('./utils') -const { httpSend, addAuthenticationToHeaders } = require('../../service/http-request-static-functions') +import NorthConnector from '../north-connector.js' +import { convertCSVRowIntoHttpBody, isHeaderValid } from './utils.js' +import { httpSend, addAuthenticationToHeaders } from '../../service/http-request-static-functions.js' const ERROR_PRINT_SIZE = 5 /** * Class NorthCsvToHttp - convert a CSV file into JSON payload for HTTP requests (POST/PUT/PATCH) */ -class NorthCsvToHttp extends NorthConnector { +export default class NorthCsvToHttp extends NorthConnector { static category = 'API' /** @@ -231,5 +231,3 @@ class NorthCsvToHttp extends NorthConnector { } } } - -module.exports = NorthCsvToHttp diff --git a/src/north/north-csv-to-http/utils.js b/src/north/north-csv-to-http/utils.js index 9ae7c38c11..7f923e135f 100644 --- a/src/north/north-csv-to-http/utils.js +++ b/src/north/north-csv-to-http/utils.js @@ -142,4 +142,4 @@ const convertCSVRowIntoHttpBody = (csvRowInJson, mappingValues) => { return object } -module.exports = { convertCSVRowIntoHttpBody, isHeaderValid } +export { convertCSVRowIntoHttpBody, isHeaderValid } diff --git a/src/north/north-file-writer/north-file-writer.js b/src/north/north-file-writer/north-file-writer.js index 9d7338f1cb..deca0fc13a 100644 --- a/src/north/north-file-writer/north-file-writer.js +++ b/src/north/north-file-writer/north-file-writer.js @@ -1,12 +1,12 @@ -const fs = require('node:fs/promises') -const path = require('node:path') +import fs from 'node:fs/promises' +import path from 'node:path' -const NorthConnector = require('../north-connector') +import NorthConnector from '../north-connector.js' /** * Class NorthFileWriter - Write file in an output folder. Values are stored in JSON files */ -class NorthFileWriter extends NorthConnector { +export default class NorthFileWriter extends NorthConnector { static category = 'FileIn' /** @@ -70,5 +70,3 @@ class NorthFileWriter extends NorthConnector { this.logger.debug(`File "${filePath}" copied into "${fileName}".`) } } - -module.exports = NorthFileWriter diff --git a/src/north/north-influx-db/north-influx-db.js b/src/north/north-influx-db/north-influx-db.js index 9305629905..142663ff35 100644 --- a/src/north/north-influx-db/north-influx-db.js +++ b/src/north/north-influx-db/north-influx-db.js @@ -1,8 +1,8 @@ -const { vsprintf } = require('sprintf-js') -const objectPath = require('object-path') +import { vsprintf } from 'sprintf-js' +import objectPath from 'object-path' -const NorthConnector = require('../north-connector') -const { httpSend } = require('../../service/http-request-static-functions') +import NorthConnector from '../north-connector.js' +import { httpSend } from '../../service/http-request-static-functions.js' /** * Convert timestamp to the configured precision @@ -32,7 +32,7 @@ const getConvertedTimestamp = (timestampTime, precision) => { /** * Class NorthInfluxDB - Send data to InfluxDB */ -class NorthInfluxDB extends NorthConnector { +export default class NorthInfluxDB extends NorthConnector { static category = 'DatabaseIn' /** @@ -190,5 +190,3 @@ class NorthInfluxDB extends NorthConnector { ) } } - -module.exports = NorthInfluxDB diff --git a/src/north/north-mongo-db/north-mongo-db.js b/src/north/north-mongo-db/north-mongo-db.js index 549dfbc962..2ecf6d72d7 100644 --- a/src/north/north-mongo-db/north-mongo-db.js +++ b/src/north/north-mongo-db/north-mongo-db.js @@ -1,13 +1,13 @@ -const mongo = require('mongodb') -const { vsprintf } = require('sprintf-js') -const objectPath = require('object-path') +import mongo from 'mongodb' +import { vsprintf } from 'sprintf-js' +import objectPath from 'object-path' -const NorthConnector = require('../north-connector') +import NorthConnector from '../north-connector.js' /** * Class NorthMongoDB - Send data to MongoDB */ -class NorthMongoDB extends NorthConnector { +export default class NorthMongoDB extends NorthConnector { static category = 'DatabaseIn' /** @@ -240,5 +240,3 @@ class NorthMongoDB extends NorthConnector { this.collectionExists = true } } - -module.exports = NorthMongoDB diff --git a/src/north/north-mqtt/north-mqtt.js b/src/north/north-mqtt/north-mqtt.js index 3006d3ddee..9ca82fffaf 100644 --- a/src/north/north-mqtt/north-mqtt.js +++ b/src/north/north-mqtt/north-mqtt.js @@ -1,13 +1,13 @@ -const { vsprintf } = require('sprintf-js') -const mqtt = require('mqtt') -const objectPath = require('object-path') +import { vsprintf } from 'sprintf-js' +import mqtt from 'mqtt' +import objectPath from 'object-path' -const NorthConnector = require('../north-connector') +import NorthConnector from '../north-connector.js' /** * Class NorthMQTT - Publish data to a MQTT broker */ -class NorthMQTT extends NorthConnector { +export default class NorthMQTT extends NorthConnector { static category = 'IoT' /** @@ -184,5 +184,3 @@ class NorthMQTT extends NorthConnector { }) } } - -module.exports = NorthMQTT diff --git a/src/north/north-oianalytics/north-oianalytics.js b/src/north/north-oianalytics/north-oianalytics.js index be9e9cfed9..f023eeee5d 100644 --- a/src/north/north-oianalytics/north-oianalytics.js +++ b/src/north/north-oianalytics/north-oianalytics.js @@ -1,11 +1,11 @@ -const NorthConnector = require('../north-connector') -const { httpSend, addAuthenticationToHeaders } = require('../../service/http-request-static-functions') +import NorthConnector from '../north-connector.js' +import { httpSend, addAuthenticationToHeaders } from '../../service/http-request-static-functions.js' /** * Class NorthOIAnalytics - Send files to a POST Multipart HTTP request and values as JSON payload * OIAnalytics endpoints are set in this connector */ -class NorthOIAnalytics extends NorthConnector { +export default class NorthOIAnalytics extends NorthConnector { static category = 'Optimistik' /** @@ -119,5 +119,3 @@ class NorthOIAnalytics extends NorthConnector { return retry } } - -module.exports = NorthOIAnalytics diff --git a/src/north/north-oiconnect/north-oiconnect.js b/src/north/north-oiconnect/north-oiconnect.js index ae44790260..a2e20da954 100644 --- a/src/north/north-oiconnect/north-oiconnect.js +++ b/src/north/north-oiconnect/north-oiconnect.js @@ -1,5 +1,5 @@ -const NorthConnector = require('../north-connector') -const { httpSend, addAuthenticationToHeaders } = require('../../service/http-request-static-functions') +import NorthConnector from '../north-connector.js' +import { httpSend, addAuthenticationToHeaders } from '../../service/http-request-static-functions.js' /** * Class NorthOIConnect - Send files through a POST Multipart HTTP request and values as JSON payload @@ -8,7 +8,7 @@ const { httpSend, addAuthenticationToHeaders } = require('../../service/http-req * -files endpoint: /engine/addFile * -values endpoint: /engine/addValues */ -class NorthOIConnect extends NorthConnector { +export default class NorthOIConnect extends NorthConnector { static category = 'Optimistik' /** @@ -132,5 +132,3 @@ class NorthOIConnect extends NorthConnector { return retry } } - -module.exports = NorthOIConnect diff --git a/src/north/north-timescale-db/north-timescale-db.js b/src/north/north-timescale-db/north-timescale-db.js index 728dd44597..00421df646 100644 --- a/src/north/north-timescale-db/north-timescale-db.js +++ b/src/north/north-timescale-db/north-timescale-db.js @@ -1,13 +1,13 @@ -const { Client } = require('pg') -const { vsprintf } = require('sprintf-js') -const objectPath = require('object-path') +import * as pg from 'pg' +import { vsprintf } from 'sprintf-js' +import objectPath from 'object-path' -const NorthConnector = require('../north-connector') +import NorthConnector from '../north-connector.js' /** * Class NorthTimescaleDB - Send Data to Timescale DB */ -class NorthTimescaleDB extends NorthConnector { +export default class NorthTimescaleDB extends NorthConnector { static category = 'DatabaseIn' /** @@ -66,7 +66,7 @@ class NorthTimescaleDB extends NorthConnector { const url = `postgres://${this.user}:${await this.encryptionService.decryptText(this.password)}@${this.host}/${this.database}` - this.client = new Client(url) + this.client = new pg.Client(url) await this.client.connect() await super.connect(`url: ${url}`) @@ -192,5 +192,3 @@ class NorthTimescaleDB extends NorthConnector { await super.disconnect() } } - -module.exports = NorthTimescaleDB diff --git a/src/north/north-watsy/north-watsy.js b/src/north/north-watsy/north-watsy.js index b8d1675f45..31e976be16 100644 --- a/src/north/north-watsy/north-watsy.js +++ b/src/north/north-watsy/north-watsy.js @@ -1,8 +1,7 @@ -const mqtt = require('mqtt') +import mqtt from 'mqtt' -const NorthConnector = require('../north-connector') - -const { initMQTTTopic, recursiveSplitMessages } = require('./utils') +import NorthConnector from '../north-connector.js' +import { initMQTTTopic, recursiveSplitMessages } from './utils.js' /** * Class NorthWATSY - Send MQTT messages for WATSY application @@ -23,7 +22,7 @@ const { initMQTTTopic, recursiveSplitMessages } = require('./utils') 'token' : $token (can't be null) } */ -class NorthWATSY extends NorthConnector { +export default class NorthWATSY extends NorthConnector { static category = 'API' /** @@ -156,5 +155,3 @@ class NorthWATSY extends NorthConnector { }) } } - -module.exports = NorthWATSY diff --git a/src/north/north-watsy/utils.js b/src/north/north-watsy/utils.js index 4c498f35a7..50b59d2ed1 100644 --- a/src/north/north-watsy/utils.js +++ b/src/north/north-watsy/utils.js @@ -95,4 +95,4 @@ const recursiveSplitMessages = (allWATSYMessages, messages, host, token, splitMe return allWATSYMessages } -module.exports = { initMQTTTopic, recursiveSplitMessages } +export { initMQTTTopic, recursiveSplitMessages } diff --git a/src/service/cache/archive.service.js b/src/service/cache/archive.service.js index 49e460afd2..6640cdec04 100644 --- a/src/service/cache/archive.service.js +++ b/src/service/cache/archive.service.js @@ -1,7 +1,7 @@ -const fs = require('node:fs/promises') -const path = require('node:path') +import fs from 'node:fs/promises' +import path from 'node:path' -const { createFolder } = require('../utils') +import { createFolder } from '../utils.js' // Time between two checks of the Archive Folder const ARCHIVE_TIMEOUT = 3600000 // one hour @@ -11,7 +11,7 @@ const ARCHIVE_FOLDER = 'archive' * Archive service used to archive sent file and check periodically the archive folder to remove old files * Once a file is sent by a North connector, the archiveOrRemoveFile is called by the connector to manage the file */ -class ArchiveService { +export default class ArchiveService { /** * @param {String} northId - The North ID connector * @param {Object} logger - The logger @@ -143,5 +143,3 @@ class ArchiveService { } } } - -module.exports = ArchiveService diff --git a/src/service/cache/file-cache.service.js b/src/service/cache/file-cache.service.js index cd4ebd7364..f926de924a 100644 --- a/src/service/cache/file-cache.service.js +++ b/src/service/cache/file-cache.service.js @@ -1,8 +1,8 @@ -const fs = require('node:fs/promises') -const path = require('node:path') +import fs from 'node:fs/promises' +import path from 'node:path' -const { createFolder, asyncFilter } = require('../utils') -const DeferredPromise = require('../deferred-promise') +import { createFolder, asyncFilter } from '../utils.js' +import DeferredPromise from '../deferred-promise.js' const RESEND_IMMEDIATELY_TIMEOUT = 100 @@ -14,7 +14,7 @@ const ERROR_FOLDER = 'files-errors' /** * Local cache implementation to group events and store them when the communication with the North is down. */ -class FileCacheService { +export default class FileCacheService { /** * @param {String} northId - The North ID connector * @param {Object} logger - The logger @@ -419,5 +419,3 @@ class FileCacheService { return dateIsBetween && filenameContains } } - -module.exports = FileCacheService diff --git a/src/service/cache/value-cache.service.js b/src/service/cache/value-cache.service.js index 04811a4cdd..f6f7bd80c5 100644 --- a/src/service/cache/value-cache.service.js +++ b/src/service/cache/value-cache.service.js @@ -1,10 +1,10 @@ -const fs = require('node:fs/promises') -const path = require('node:path') +import fs from 'node:fs/promises' +import path from 'node:path' -const { nanoid } = require('nanoid') +import { nanoid } from 'nanoid' -const { createFolder } = require('../utils') -const DeferredPromise = require('../deferred-promise') +import { createFolder } from '../utils.js' +import DeferredPromise from '../deferred-promise.js' const BUFFER_MAX = 250 const BUFFER_TIMEOUT = 300 @@ -16,7 +16,7 @@ const ERROR_FOLDER = 'values-errors' /** * Local cache implementation to group events and store them when the communication with the North is down. */ -class ValueCacheService { +export default class ValueCacheService { /** * @param {String} northId - The North ID connector * @param {Object} logger - The logger @@ -484,5 +484,3 @@ class ValueCacheService { clearTimeout(this.valuesTimeout) } } - -module.exports = ValueCacheService diff --git a/src/service/certificate.service.js b/src/service/certificate.service.js index 3880324739..a9b1d67367 100644 --- a/src/service/certificate.service.js +++ b/src/service/certificate.service.js @@ -1,10 +1,10 @@ -const fs = require('node:fs/promises') -const path = require('node:path') +import fs from 'node:fs/promises' +import path from 'node:path' /** * Class used to manage certificate files and their content */ -class CertificateService { +export default class CertificateService { constructor(logger) { this.logger = logger this.privateKey = null @@ -36,5 +36,3 @@ class CertificateService { } } } - -module.exports = CertificateService diff --git a/src/service/configuration.service.js b/src/service/configuration.service.js index ac8e61c2bf..fffdcebf07 100644 --- a/src/service/configuration.service.js +++ b/src/service/configuration.service.js @@ -1,8 +1,8 @@ -const path = require('node:path') -const fs = require('node:fs/promises') +import path from 'node:path' +import fs from 'node:fs/promises' -const EncryptionService = require('./encryption.service') -const { filesExists } = require('./utils') +import EncryptionService from './encryption.service.js' +import { filesExists } from './utils.js' const KEYS_FOLDER = './keys' const CERTS_FOLDER = './certs' @@ -11,7 +11,7 @@ const CERTS_FOLDER = './certs' * Class responsible for managing the configuration. * @class ConfigurationService */ -class ConfigurationService { +export default class ConfigurationService { /** * Constructor for ConfigService * @constructor @@ -128,5 +128,3 @@ class ConfigurationService { } } } - -module.exports = ConfigurationService diff --git a/src/service/database.service.js b/src/service/database.service.js index f931453d4e..85e257c407 100644 --- a/src/service/database.service.js +++ b/src/service/database.service.js @@ -1,4 +1,4 @@ -const db = require('better-sqlite3') +import db from 'better-sqlite3' const CACHE_TABLE_NAME = 'cache' const PAGE_SIZE = 50 @@ -129,7 +129,7 @@ const getHistoryQuerySouthData = (databasePath) => { return database.prepare(query).all() } -module.exports = { +export { createConfigDatabase, upsertConfig, getConfig, diff --git a/src/service/deferred-promise.js b/src/service/deferred-promise.js index ac055dbbe8..171501c58e 100644 --- a/src/service/deferred-promise.js +++ b/src/service/deferred-promise.js @@ -3,7 +3,7 @@ * It is used in OPCHDA to resolve the connection and disconnection when the * HDA Agent sends the associated messages */ -class DeferredPromise { +export default class DeferredPromise { constructor() { this.promise = new Promise((resolve, reject) => { this.reject = reject @@ -11,5 +11,3 @@ class DeferredPromise { }) } } - -module.exports = DeferredPromise diff --git a/src/service/encryption.service.js b/src/service/encryption.service.js index fe8dd3ad8c..92d3223fce 100644 --- a/src/service/encryption.service.js +++ b/src/service/encryption.service.js @@ -1,11 +1,11 @@ -const os = require('node:os') -const crypto = require('node:crypto') -const fs = require('node:fs/promises') -const path = require('node:path') +import os from 'node:os' +import crypto from 'node:crypto' +import fs from 'node:fs/promises' +import path from 'node:path' -const selfSigned = require('selfsigned') +import selfSigned from 'selfsigned' -const { createFolder, filesExists } = require('./utils') +import { createFolder, filesExists } from './utils.js' const CERT_PRIVATE_KEY_FILE_NAME = 'privateKey.pem' const CERT_PUBLIC_KEY_FILE_NAME = 'publicKey.pem' @@ -18,7 +18,7 @@ const OIBUS_PUBLIC_KEY_FILE_NAME = 'public.pem' * Service used to manage encryption and decryption of secrets in the config file * Also responsible to create private and public key used for encrypting the secrets */ -class EncryptionService { +export default class EncryptionService { static getInstance() { if (!EncryptionService.instance) { EncryptionService.instance = new EncryptionService() @@ -197,5 +197,3 @@ class EncryptionService { return decrypted.toString('utf8') } } - -module.exports = EncryptionService diff --git a/src/service/http-request-static-functions.js b/src/service/http-request-static-functions.js index 62b14b999b..c7bc675d55 100644 --- a/src/service/http-request-static-functions.js +++ b/src/service/http-request-static-functions.js @@ -1,10 +1,10 @@ -const url = require('node:url') -const { createReadStream } = require('node:fs') -const path = require('node:path') +import url from 'node:url' +import { createReadStream } from 'node:fs' +import path from 'node:path' -const FormData = require('form-data') -const ProxyAgent = require('proxy-agent') -const fetch = require('node-fetch') +import FormData from 'form-data' +import ProxyAgent from 'proxy-agent' +import fetch from 'node-fetch' /** * Create a proxy agent to use wih HTTP requests @@ -121,7 +121,7 @@ const httpSend = async ( } } -module.exports = { +export { createProxyAgent, addAuthenticationToHeaders, httpSend, diff --git a/src/service/logger/file-cleanup.service.js b/src/service/logger/file-cleanup.service.js index 3ede91bf2b..b30161f8dd 100644 --- a/src/service/logger/file-cleanup.service.js +++ b/src/service/logger/file-cleanup.service.js @@ -1,6 +1,7 @@ -const path = require('node:path') -const fs = require('node:fs/promises') -const { filesExists } = require('../utils') +import path from 'node:path' +import fs from 'node:fs/promises' + +import { filesExists } from '../utils.js' const CLEAN_UP_INTERVAL = 24 * 3600 * 1000 // One day @@ -8,7 +9,7 @@ const CLEAN_UP_INTERVAL = 24 * 3600 * 1000 // One day * Service used to clean up log files rolled by the pino-roll library * This service should be removed if pino-roll implements this feature one day */ -class FileCleanupService { +export default class FileCleanupService { /** * @param {String} logFolder - The path of the log folder * @param {Logger} logger - The logger @@ -98,5 +99,3 @@ class FileCleanupService { } } } - -module.exports = FileCleanupService diff --git a/src/service/logger/logger.service.js b/src/service/logger/logger.service.js index e843c95d45..64c21ac8b8 100644 --- a/src/service/logger/logger.service.js +++ b/src/service/logger/logger.service.js @@ -1,8 +1,10 @@ -const path = require('node:path') +import path from 'node:path' +import { fileURLToPath } from 'node:url' -const pino = require('pino') -const FileCleanupService = require('./file-cleanup.service') -const { createFolder } = require('../utils') +import pino from 'pino' + +import FileCleanupService from './file-cleanup.service.js' +import { createFolder } from '../utils.js' const LOG_FOLDER_NAME = 'logs' const LOG_FILE_NAME = 'journal.log' @@ -50,6 +52,9 @@ class LoggerService { const { consoleLog, fileLog, sqliteLog, lokiLog } = logParameters targets.push({ target: 'pino-pretty', options: { colorize: true, singleLine: true }, level: consoleLog.level }) + // Get current directory + const dirName = path.dirname(fileURLToPath(import.meta.url)) + const filePath = fileLog.fileName ? path.resolve(LOG_FOLDER_NAME, fileLog.fileName) : path.resolve(LOG_FOLDER_NAME, LOG_FILE_NAME) targets.push({ target: 'pino-roll', @@ -64,7 +69,7 @@ class LoggerService { const sqlDatabaseName = sqliteLog.fileName ? path.resolve(LOG_FOLDER_NAME, sqliteLog.fileName) : path.resolve(LOG_FOLDER_NAME, LOG_DB_NAME) targets.push({ - target: path.join(__dirname, 'sqlite-transport.js'), + target: path.join(dirName, 'sqlite-transport.js'), options: { fileName: sqlDatabaseName, maxNumberOfLogs: sqliteLog.maxNumberOfLogs, @@ -76,7 +81,7 @@ class LoggerService { if (lokiLog?.lokiAddress) { try { targets.push({ - target: path.join(__dirname, 'loki-transport.js'), + target: path.join(dirName, 'loki-transport.js'), options: { username: lokiLog.username, password: lokiLog.password ? await this.encryptionService.decryptText(lokiLog.password) : '', @@ -138,7 +143,7 @@ class LoggerService { Error.prepareStackTrace = (err, structuredStackTrace) => structuredStackTrace Error.captureStackTrace(this) // Get the first CallSite outside the logger and outside pino library - const callSite = this.stack.find((line) => line.getFileName().indexOf(path.basename(__filename)) === -1 + const callSite = this.stack.find((line) => line.getFileName().indexOf('logger.service.js') === -1 && line.getFileName().indexOf('pino') === -1) return `${path.parse(callSite.getFileName()).name}(${callSite.getLineNumber()})` } finally { @@ -155,4 +160,4 @@ class LoggerService { } } -module.exports = LoggerService +export default LoggerService diff --git a/src/service/logger/loki-transport.js b/src/service/logger/loki-transport.js index 7f581cefd4..c507c593b0 100644 --- a/src/service/logger/loki-transport.js +++ b/src/service/logger/loki-transport.js @@ -1,5 +1,5 @@ -const fetch = require('node-fetch') -const build = require('pino-abstract-transport') +import fetch from 'node-fetch' +import build from 'pino-abstract-transport' const MAX_BATCH_LOG = 500 const MAX_BATCH_INTERVAL_S = 60 @@ -177,4 +177,4 @@ const createTransport = async (opts) => { }) } -module.exports = createTransport +export default createTransport diff --git a/src/service/logger/sqlite-transport.js b/src/service/logger/sqlite-transport.js index 578f17da4d..3de0ba34e2 100644 --- a/src/service/logger/sqlite-transport.js +++ b/src/service/logger/sqlite-transport.js @@ -1,5 +1,5 @@ -const build = require('pino-abstract-transport') -const db = require('better-sqlite3') +import build from 'pino-abstract-transport' +import db from 'better-sqlite3' const LOGS_TABLE_NAME = 'logs' const DEFAULT_MAX_NUMBER_OF_LOGS = 2000000 @@ -118,4 +118,4 @@ const createTransport = async (opts) => { }) } -module.exports = createTransport +export default createTransport diff --git a/src/service/opcua.service.js b/src/service/opcua.service.js index e1d5e424b0..2a41c8447e 100644 --- a/src/service/opcua.service.js +++ b/src/service/opcua.service.js @@ -1,5 +1,5 @@ -const path = require('node:path') -const fs = require('node:fs/promises') +import path from 'node:path' +import fs from 'node:fs/promises' const mkdir = async (folderPath) => { try { @@ -34,4 +34,4 @@ const initOpcuaCertificateFolders = async (certFolder) => { await fs.copyFile(`${certFolder}/cert.pem`, `${rootFolder}/trusted/certs/oibus_client.pem`) } -module.exports = { initOpcuaCertificateFolders, MAX_NUMBER_OF_NODE_TO_LOG } +export { initOpcuaCertificateFolders, MAX_NUMBER_OF_NODE_TO_LOG } diff --git a/src/service/status.service.js b/src/service/status.service.js index e3620c38b8..4eab6ed8c1 100644 --- a/src/service/status.service.js +++ b/src/service/status.service.js @@ -1,10 +1,10 @@ -const EventEmitter = require('node:events') -const { PassThrough } = require('node:stream') +import EventEmitter from 'node:events' +import { PassThrough } from 'node:stream' /** * Class used to manage certificate files and their content */ -class StatusService { +export default class StatusService { constructor() { this.statusData = {} this.eventEmitter = new EventEmitter() @@ -69,5 +69,3 @@ class StatusService { this.stream?.destroy() } } - -module.exports = StatusService diff --git a/src/service/utils.js b/src/service/utils.js index b1780e5886..750d928350 100644 --- a/src/service/utils.js +++ b/src/service/utils.js @@ -1,10 +1,10 @@ -const fs = require('node:fs/promises') -const { createReadStream, createWriteStream } = require('node:fs') -const zlib = require('node:zlib') -const path = require('node:path') +import fs from 'node:fs/promises' +import { createReadStream, createWriteStream } from 'node:fs' +import zlib from 'node:zlib' +import path from 'node:path' -const minimist = require('minimist') -const { DateTime } = require('luxon') +import minimist from 'minimist' +import { DateTime } from 'luxon' const COMPRESSION_LEVEL = 9 @@ -142,7 +142,7 @@ const asyncFilter = async (array, predicate) => { return array.filter((item, index) => results[index]) } -module.exports = { +export { getCommandLineArguments, delay, generateIntervals, diff --git a/src/south/south-ads/south-ads.js b/src/south/south-ads/south-ads.js index 026e38f3f2..c1d0539a18 100644 --- a/src/south/south-ads/south-ads.js +++ b/src/south/south-ads/south-ads.js @@ -1,11 +1,11 @@ -const ads = require('ads-client') +import ads from 'ads-client' -const SouthConnector = require('../south-connector') +import SouthConnector from '../south-connector.js' /** * Class SouthADS - Provides instruction for TwinCAT ADS client connection */ -class SouthADS extends SouthConnector { +export default class SouthADS extends SouthConnector { static category = 'IoT' /** @@ -302,5 +302,3 @@ class SouthADS extends SouthConnector { await super.disconnect() } } - -module.exports = SouthADS diff --git a/src/south/south-connector.js b/src/south/south-connector.js index 063079ade9..589b9cf9e1 100644 --- a/src/south/south-connector.js +++ b/src/south/south-connector.js @@ -1,9 +1,10 @@ -const path = require('node:path') -const EncryptionService = require('../service/encryption.service') -const databaseService = require('../service/database.service') -const CertificateService = require('../service/certificate.service') -const StatusService = require('../service/status.service') -const { generateIntervals, delay, createFolder } = require('../service/utils') +import path from 'node:path' + +import EncryptionService from '../service/encryption.service.js' +import { createConfigDatabase, getConfig, upsertConfig } from '../service/database.service.js' +import CertificateService from '../service/certificate.service.js' +import StatusService from '../service/status.service.js' +import { generateIntervals, delay, createFolder } from '../service/utils.js' const CACHE_DB_FILE_NAME = 'cache.db' @@ -28,7 +29,7 @@ const CACHE_DB_FILE_NAME = 'cache.db' * All other operations (cache, store&forward, communication to North connectors) will be handled by the OIBus engine * and should not be taken care at the South level. */ -class SouthConnector { +export default class SouthConnector { /** * Constructor for SouthConnector * @constructor @@ -98,7 +99,7 @@ class SouthConnector { await this.certificate.init(this.keyFile, this.certFile, this.caFile) await createFolder(this.baseFolder) - this.southDatabase = databaseService.createConfigDatabase(path.resolve(this.baseFolder, CACHE_DB_FILE_NAME)) + this.southDatabase = createConfigDatabase(path.resolve(this.baseFolder, CACHE_DB_FILE_NAME)) const { supportListen, @@ -395,7 +396,7 @@ class SouthConnector { * @returns {String} - The value of the key */ getConfig(configKey) { - return databaseService.getConfig(this.southDatabase, configKey) + return getConfig(this.southDatabase, configKey) } /** @@ -405,8 +406,6 @@ class SouthConnector { * @returns {void} */ setConfig(configKey, value) { - databaseService.upsertConfig(this.southDatabase, configKey, value) + upsertConfig(this.southDatabase, configKey, value) } } - -module.exports = SouthConnector diff --git a/src/south/south-folder-scanner/south-folder-scanner.js b/src/south/south-folder-scanner/south-folder-scanner.js index 31ea164d0c..c9f1b44cba 100644 --- a/src/south/south-folder-scanner/south-folder-scanner.js +++ b/src/south/south-folder-scanner/south-folder-scanner.js @@ -1,13 +1,13 @@ -const fs = require('node:fs/promises') -const path = require('node:path') +import fs from 'node:fs/promises' +import path from 'node:path' -const SouthConnector = require('../south-connector') -const { compress } = require('../../service/utils') +import SouthConnector from '../south-connector.js' +import { compress } from '../../service/utils.js' /** * Class SouthFolderScanner - Retrieve file from a local or remote folder */ -class SouthFolderScanner extends SouthConnector { +export default class SouthFolderScanner extends SouthConnector { static category = 'FileOut' /** @@ -156,5 +156,3 @@ class SouthFolderScanner extends SouthConnector { } } } - -module.exports = SouthFolderScanner diff --git a/src/south/south-modbus/south-modbus.js b/src/south/south-modbus/south-modbus.js index 127f61c38a..f7f2c09355 100644 --- a/src/south/south-modbus/south-modbus.js +++ b/src/south/south-modbus/south-modbus.js @@ -1,14 +1,14 @@ -const net = require('node:net') +import net from 'node:net' -const modbus = require('jsmodbus') +import modbus from 'jsmodbus' -const SouthConnector = require('../south-connector') -const { getNumberOfWords } = require('./utils') +import SouthConnector from '../south-connector.js' +import getNumberOfWords from './utils.js' /** * Class SouthModbus - Provides instruction for Modbus client connection */ -class SouthModbus extends SouthConnector { +export default class SouthModbus extends SouthConnector { static category = 'IoT' /** @@ -244,5 +244,3 @@ class SouthModbus extends SouthConnector { await super.disconnect() } } - -module.exports = SouthModbus diff --git a/src/south/south-modbus/utils.js b/src/south/south-modbus/utils.js index c53ad5aca2..cd9b8678b1 100644 --- a/src/south/south-modbus/utils.js +++ b/src/south/south-modbus/utils.js @@ -14,4 +14,4 @@ const getNumberOfWords = (dataType) => { return 1 } -module.exports = { getNumberOfWords } +export default getNumberOfWords diff --git a/src/south/south-mqtt/south-mqtt.js b/src/south/south-mqtt/south-mqtt.js index aa461c65b3..522c97ae21 100644 --- a/src/south/south-mqtt/south-mqtt.js +++ b/src/south/south-mqtt/south-mqtt.js @@ -1,13 +1,13 @@ -const mqtt = require('mqtt') -const { DateTime } = require('luxon') +import mqtt from 'mqtt' +import { DateTime } from 'luxon' -const SouthConnector = require('../south-connector') -const { formatValue } = require('./utils') +import SouthConnector from '../south-connector.js' +import formatValue from './utils.js' /** * Class SouthMQTT - Subscribe to data topic from a MQTT broker */ -class SouthMQTT extends SouthConnector { +export default class SouthMQTT extends SouthConnector { static category = 'IoT' /** @@ -218,5 +218,3 @@ class SouthMQTT extends SouthConnector { await super.disconnect() } } - -module.exports = SouthMQTT diff --git a/src/south/south-mqtt/utils.js b/src/south/south-mqtt/utils.js index bf652c2652..116642ac9b 100644 --- a/src/south/south-mqtt/utils.js +++ b/src/south/south-mqtt/utils.js @@ -1,6 +1,6 @@ -const mqttWildcard = require('mqtt-wildcard') -const { vsprintf } = require('sprintf-js') -const { generateDateWithTimezone } = require('../../service/utils') +import mqttWildcard from 'mqtt-wildcard' +import { vsprintf } from 'sprintf-js' +import { generateDateWithTimezone } from '../../service/utils.js' /** * Get pointId. @@ -113,4 +113,4 @@ const formatValue = (data, topic, formatOptions, pointsList) => { } } -module.exports = { formatValue } +export default formatValue diff --git a/src/south/south-opchda/socket-session.js b/src/south/south-opchda/socket-session.js index 3b3149ddcb..f1340c4cf9 100644 --- a/src/south/south-opchda/socket-session.js +++ b/src/south/south-opchda/socket-session.js @@ -1,7 +1,7 @@ /** * Class representing a connected socketSession. */ -class SocketSession { +export default class SocketSession { /** * @param {Object} socket - The socket parameters * @param {Object} logger - The logger @@ -76,5 +76,3 @@ class SocketSession { } } } - -module.exports = SocketSession diff --git a/src/south/south-opchda/south-opchda.js b/src/south/south-opchda/south-opchda.js index 7572af50b9..4259918455 100644 --- a/src/south/south-opchda/south-opchda.js +++ b/src/south/south-opchda/south-opchda.js @@ -1,8 +1,8 @@ -const { spawn } = require('node:child_process') +import { spawn } from 'node:child_process' -const SouthConnector = require('../south-connector') -const TcpServer = require('./tcp-server') -const DeferredPromise = require('../../service/deferred-promise') +import SouthConnector from '../south-connector.js' +import TcpServer from './tcp-server.js' +import DeferredPromise from '../../service/deferred-promise.js' // Time to wait before closing the connection by timeout and killing the HDA Agent process const DISCONNECTION_TIMEOUT = 10000 @@ -12,7 +12,7 @@ const DISCONNECTION_TIMEOUT = 10000 * This connector communicates with the Agent through a TCP connection thanks to the TCP server created on OIBus * and associated to this connector */ -class SouthOPCHDA extends SouthConnector { +export default class SouthOPCHDA extends SouthConnector { static category = 'IoT' /** @@ -494,5 +494,3 @@ class SouthOPCHDA extends SouthConnector { } } } - -module.exports = SouthOPCHDA diff --git a/src/south/south-opchda/tcp-server.js b/src/south/south-opchda/tcp-server.js index 4516860973..3412ad76a7 100644 --- a/src/south/south-opchda/tcp-server.js +++ b/src/south/south-opchda/tcp-server.js @@ -1,11 +1,11 @@ -const net = require('node:net') +import net from 'node:net' -const SocketSession = require('./socket-session') +import SocketSession from './socket-session.js' /** * Class TcpServer - Create a TCP netServer to communicate with the HDA agent */ -class TcpServer { +export default class TcpServer { /** * Create a TCP Server * @param {Number} port - The port to listen to @@ -110,5 +110,3 @@ class TcpServer { } } } - -module.exports = TcpServer diff --git a/src/south/south-opcua-da/south-opcua-da.js b/src/south/south-opcua-da/south-opcua-da.js index 27204428c4..70e1b394ea 100644 --- a/src/south/south-opcua-da/south-opcua-da.js +++ b/src/south/south-opcua-da/south-opcua-da.js @@ -1,17 +1,18 @@ -const { +import { OPCUAClient, MessageSecurityMode, SecurityPolicy, UserTokenType, -} = require('node-opcua-client') -const { OPCUACertificateManager } = require('node-opcua-certificate-manager') -const SouthConnector = require('../south-connector') -const { initOpcuaCertificateFolders } = require('../../service/opcua.service') +} from 'node-opcua-client' +import { OPCUACertificateManager } from 'node-opcua-certificate-manager' + +import SouthConnector from '../south-connector.js' +import { initOpcuaCertificateFolders } from '../../service/opcua.service.js' /** * Class SouthOPCUADA - Connect to an OPCUA server in DA (Data Access) mode */ -class SouthOPCUADA extends SouthConnector { +export default class SouthOPCUADA extends SouthConnector { static category = 'IoT' /** @@ -269,5 +270,3 @@ class SouthOPCUADA extends SouthConnector { } } } - -module.exports = SouthOPCUADA diff --git a/src/south/south-opcua-ha/south-opcua-ha.js b/src/south/south-opcua-ha/south-opcua-ha.js index 7cac9bd443..74c82e2c67 100644 --- a/src/south/south-opcua-ha/south-opcua-ha.js +++ b/src/south/south-opcua-ha/south-opcua-ha.js @@ -1,4 +1,4 @@ -const { +import { OPCUAClient, MessageSecurityMode, SecurityPolicy, @@ -9,16 +9,16 @@ const { ReadRawModifiedDetails, HistoryReadRequest, AggregateFunction, -} = require('node-opcua-client') -const { OPCUACertificateManager } = require('node-opcua-certificate-manager') +} from 'node-opcua-client' +import { OPCUACertificateManager } from 'node-opcua-certificate-manager' -const SouthConnector = require('../south-connector') -const { initOpcuaCertificateFolders, MAX_NUMBER_OF_NODE_TO_LOG } = require('../../service/opcua.service') +import SouthConnector from '../south-connector.js' +import { initOpcuaCertificateFolders, MAX_NUMBER_OF_NODE_TO_LOG } from '../../service/opcua.service.js' /** * Class SouthOPCUAHA - Connect to an OPCUA server in HA (Historian Access) mode */ -class SouthOPCUAHA extends SouthConnector { +export default class SouthOPCUAHA extends SouthConnector { static category = 'IoT' /** @@ -496,5 +496,3 @@ class SouthOPCUAHA extends SouthConnector { } } } - -module.exports = SouthOPCUAHA diff --git a/src/south/south-rest/formatters/oia-time-values.js b/src/south/south-rest/formatters/oia-time-values.js index 2366c2a50e..c4eb28d1c9 100644 --- a/src/south/south-rest/formatters/oia-time-values.js +++ b/src/south/south-rest/formatters/oia-time-values.js @@ -1,5 +1,3 @@ -// eslint-disable-next-line max-len -// http://localhost:4200/api/oianalytics/data/values?data-reference=DCS_CONC_O2_MCT&from=2022-01-01T00%3A00%3A00Z&aggregation=RAW_VALUES&to=2022-02-01T00%3A00%3A00Z&data-reference=DCS_PH_MCT /** * check data from OIAnalytics API for result of * For now, only 'time-values' type is accepted @@ -76,4 +74,4 @@ const format = (httpResult) => { return { httpResults: formattedData, latestDateRetrieved } } -module.exports = format +export default format diff --git a/src/south/south-rest/formatters/slims.js b/src/south/south-rest/formatters/slims.js index 419b66fa79..3e35eb219a 100644 --- a/src/south/south-rest/formatters/slims.js +++ b/src/south/south-rest/formatters/slims.js @@ -41,4 +41,4 @@ const format = (httpResult) => { return { httpResults: formattedData, latestDateRetrieved: new Date(latestDateRetrieved.getTime() + 1) } } -module.exports = format +export default format diff --git a/src/south/south-rest/south-rest.js b/src/south/south-rest/south-rest.js index 9bee0a5a7f..c7a6b681d1 100644 --- a/src/south/south-rest/south-rest.js +++ b/src/south/south-rest/south-rest.js @@ -1,19 +1,19 @@ -const fs = require('node:fs/promises') -const path = require('node:path') +import fs from 'node:fs/promises' +import path from 'node:path' -const fetch = require('node-fetch') -const https = require('https') +import fetch from 'node-fetch' +import https from 'https' -const humanizeDuration = require('humanize-duration') -const SouthConnector = require('../south-connector') -const { parsers, httpGetWithBody, formatQueryParams, generateCSV } = require('./utils') -const { replaceFilenameWithVariable, compress } = require('../../service/utils') +import humanizeDuration from 'humanize-duration' +import SouthConnector from '../south-connector.js' +import { parsers, httpGetWithBody, formatQueryParams, generateCSV } from './utils.js' +import { replaceFilenameWithVariable, compress } from '../../service/utils.js' /** * Class SouthRest - Retrieve data from REST API * The results are parsed through the available parsers */ -class SouthRest extends SouthConnector { +export default class SouthRest extends SouthConnector { static category = 'API' /** @@ -281,5 +281,3 @@ class SouthRest extends SouthConnector { return response.json() } } - -module.exports = SouthRest diff --git a/src/south/south-rest/utils.js b/src/south/south-rest/utils.js index 1464786f9a..fd0a460d55 100644 --- a/src/south/south-rest/utils.js +++ b/src/south/south-rest/utils.js @@ -1,10 +1,10 @@ -const https = require('node:https') -const http = require('node:http') +import https from 'node:https' +import http from 'node:http' -const csv = require('papaparse') +import csv from 'papaparse' -const oiaTimeValues = require('./formatters/oia-time-values') -const slims = require('./formatters/slims') +import oiaTimeValues from './formatters/oia-time-values.js' +import slims from './formatters/slims.js' const parsers = { Raw: (httpResults) => ({ httpResults, latestDateRetrieved: new Date().toISOString() }), @@ -90,4 +90,4 @@ const generateCSV = (results, delimiter) => { return csv.unparse(results, options) } -module.exports = { parsers, httpGetWithBody, formatQueryParams, generateCSV } +export { parsers, httpGetWithBody, formatQueryParams, generateCSV } diff --git a/src/south/south-sql/south-sql.js b/src/south/south-sql/south-sql.js index 451944045d..a24358afc2 100644 --- a/src/south/south-sql/south-sql.js +++ b/src/south/south-sql/south-sql.js @@ -1,16 +1,16 @@ -const fs = require('node:fs/promises') -const path = require('node:path') +import fs from 'node:fs/promises' +import path from 'node:path' -const db = require('better-sqlite3') -const mssql = require('mssql') -const mysql = require('mysql2/promise') -const { Client, types } = require('pg') -const { DateTime } = require('luxon') -const humanizeDuration = require('humanize-duration') +import db from 'better-sqlite3' +import mssql from 'mssql' +import mysql from 'mysql2/promise' +import * as pg from 'pg' +import { DateTime } from 'luxon' +import humanizeDuration from 'humanize-duration' -const SouthConnector = require('../south-connector') -const { generateCSV, getMostRecentDate, generateReplacementParameters } = require('./utils') -const { replaceFilenameWithVariable, compress } = require('../../service/utils') +import SouthConnector from '../south-connector.js' +import { generateCSV, getMostRecentDate, generateReplacementParameters } from './utils.js' +import { replaceFilenameWithVariable, compress } from '../../service/utils.js' let oracledb /** @@ -22,7 +22,7 @@ let oracledb * - PostgreSQL * - SQLite */ -class SouthSQL extends SouthConnector { +export default class SouthSQL extends SouthConnector { static category = 'DatabaseOut' /** @@ -323,8 +323,8 @@ class SouthSQL extends SouthConnector { query_timeout: this.requestTimeout, } - types.setTypeParser(1114, (str) => new Date(`${str}Z`)) - const connection = new Client(config) + pg.types.setTypeParser(1114, (str) => new Date(`${str}Z`)) + const connection = new pg.Client(config) let data = [] try { await connection.connect() @@ -433,5 +433,3 @@ class SouthSQL extends SouthConnector { this.statusService.updateStatusDataStream({ 'Last SQL request': `"${query}" with ${startTimeLog} ${endTimeLog}` }) } } - -module.exports = SouthSQL diff --git a/src/south/south-sql/utils.js b/src/south/south-sql/utils.js index 904ad4abd1..d5607dfa47 100644 --- a/src/south/south-sql/utils.js +++ b/src/south/south-sql/utils.js @@ -1,5 +1,5 @@ -const csv = require('papaparse') -const { DateTime } = require('luxon') +import csv from 'papaparse' +import { DateTime } from 'luxon' /** * Format date taking into account the timezone configuration. @@ -111,4 +111,4 @@ const generateReplacementParameters = (query, startTime, endTime) => { return occurrences.map((occurrence) => occurrence.value) } -module.exports = { generateCSV, getMostRecentDate, generateReplacementParameters } +export { generateCSV, getMostRecentDate, generateReplacementParameters } diff --git a/src/web-server/controllers/config.controller.js b/src/web-server/controllers/config.controller.js index 71dc6e0559..8439f1d5b5 100644 --- a/src/web-server/controllers/config.controller.js +++ b/src/web-server/controllers/config.controller.js @@ -37,7 +37,7 @@ const activateConfiguration = async (ctx) => { } } -module.exports = { +export default { getActiveConfiguration, updateConfig, activateConfiguration, diff --git a/src/web-server/controllers/engine.controller.js b/src/web-server/controllers/engine.controller.js index 8451b5c6d5..37b33962b1 100644 --- a/src/web-server/controllers/engine.controller.js +++ b/src/web-server/controllers/engine.controller.js @@ -84,7 +84,7 @@ const aliveSignal = async (ctx) => { } } -module.exports = { +export default { getOIBusInfo, getNorthList, getSouthList, diff --git a/src/web-server/controllers/file-cache.controller.js b/src/web-server/controllers/file-cache.controller.js index 9a60d7d29d..0b69613859 100644 --- a/src/web-server/controllers/file-cache.controller.js +++ b/src/web-server/controllers/file-cache.controller.js @@ -93,7 +93,7 @@ const retryAllFileErrors = async (ctx) => { ctx.ok('All error files retried') } -module.exports = { +export default { getFileErrors, removeFileErrors, retryFileErrors, diff --git a/src/web-server/controllers/history-query.controller.js b/src/web-server/controllers/history-query.controller.js index f9f3bf552c..4cb0e7a957 100644 --- a/src/web-server/controllers/history-query.controller.js +++ b/src/web-server/controllers/history-query.controller.js @@ -1,4 +1,4 @@ -const { nanoid } = require('nanoid') +import { nanoid } from 'nanoid' /** * Create a new HistoryQuery entry. * @param {Object} ctx - The KOA context @@ -96,7 +96,7 @@ const getStatus = async (ctx) => { ctx.ok(status) } -module.exports = { +export default { createHistoryQuery, getHistoryQueries, getHistoryQueryById, diff --git a/src/web-server/controllers/log.controller.js b/src/web-server/controllers/log.controller.js index 6011d0da11..6ba4399953 100644 --- a/src/web-server/controllers/log.controller.js +++ b/src/web-server/controllers/log.controller.js @@ -1,5 +1,6 @@ -const path = require('node:path') -const databaseService = require('../../service/database.service') +import path from 'node:path' + +import { getLogs } from '../../service/database.service.js' const LOG_FOLDER = './logs' const LOG_DB_NAME = 'journal.db' @@ -14,7 +15,7 @@ const LOG_DB_NAME = 'journal.db' * @param {function} ctx.ok - The context response * @return {void} */ -const getLogs = (ctx) => { +const getLogsEndpoint = (ctx) => { const databasePath = path.resolve(LOG_FOLDER, LOG_DB_NAME) const now = Date.now() const dayAgo = new Date(now - 86400000) @@ -22,7 +23,7 @@ const getLogs = (ctx) => { const toDate = ctx.query.toDate || new Date(now).toISOString() const verbosity = ctx.query.verbosity?.replace(/[[\]]/g, '').split(',') || 'info' - const logs = databaseService.getLogs(databasePath, fromDate, toDate, verbosity) + const logs = getLogs(databasePath, fromDate, toDate, verbosity) ctx.ok(logs) } @@ -75,4 +76,4 @@ const addLogs = async (ctx) => { ctx.ok() } -module.exports = { getLogs, addLogs } +export default { getLogsEndpoint, addLogs } diff --git a/src/web-server/controllers/oibus.controller.js b/src/web-server/controllers/oibus.controller.js index 88b82cd2c3..1491c9e019 100644 --- a/src/web-server/controllers/oibus.controller.js +++ b/src/web-server/controllers/oibus.controller.js @@ -29,7 +29,7 @@ const shutdown = async (ctx) => { ctx.ok('Shutting down...') } -module.exports = { +export default { reload, shutdown, } diff --git a/src/web-server/controllers/web-client.controller.js b/src/web-server/controllers/web-client.controller.js deleted file mode 100644 index b84cd87c01..0000000000 --- a/src/web-server/controllers/web-client.controller.js +++ /dev/null @@ -1,13 +0,0 @@ -const koaSend = require('koa-send') - -const serveClient = async (ctx) => { - const root = `${__dirname}/../../../build/web-client` - const { path } = ctx - if (path?.match(/\.(js|js\.map|ico|ttf)$/)) { - await koaSend(ctx, path, { root, index: '/index.html' }) - } else { - await koaSend(ctx, '/index.html', { root }) - } -} - -module.exports = { serveClient } diff --git a/src/web-server/middlewares/auth.js b/src/web-server/middlewares/auth.js index 1e92149c88..7ccf0c1a66 100644 --- a/src/web-server/middlewares/auth.js +++ b/src/web-server/middlewares/auth.js @@ -1,4 +1,4 @@ -const basicAuth = require('basic-auth') +import basicAuth from 'basic-auth' const DEFAULT_PASSWORD = 'pass' @@ -36,4 +36,4 @@ const auth = (opts = {}) => { } } -module.exports = auth +export default auth diff --git a/src/web-server/middlewares/ip-filter.js b/src/web-server/middlewares/ip-filter.js index 89a7a03a46..f9be03e69a 100644 --- a/src/web-server/middlewares/ip-filter.js +++ b/src/web-server/middlewares/ip-filter.js @@ -1,12 +1,8 @@ -/** - * Module dependencies. - */ -const micromatch = require('micromatch') +import micromatch from 'micromatch' /** * Return ipFilter middleware: - * - * @param {string[]} filter - The filter + * @param {String[]} filter - The filter * @return {Function} - The middleware function * @api public */ @@ -20,4 +16,4 @@ const ipFilter = (filter) => async (ctx, next) => { } } -module.exports = ipFilter +export default ipFilter diff --git a/src/web-server/middlewares/web-client.js b/src/web-server/middlewares/web-client.js new file mode 100644 index 0000000000..9cf2078d5f --- /dev/null +++ b/src/web-server/middlewares/web-client.js @@ -0,0 +1,16 @@ +import koaSend from 'koa-send' +import path from 'node:path' +import { fileURLToPath } from 'node:url' + +const serveClient = async (ctx) => { + const dirName = path.dirname(fileURLToPath(import.meta.url)) + + const root = `${dirName}/../../../build/web-client` + if (ctx.path?.match(/\.(js|js\.map|ico|ttf)$/)) { + await koaSend(ctx, ctx.path, { root, index: '/index.html' }) + } else { + await koaSend(ctx, '/index.html', { root }) + } +} + +export default serveClient diff --git a/src/web-server/routes/index.js b/src/web-server/routes/index.js index 22749c1a62..4cdfdfaab9 100644 --- a/src/web-server/routes/index.js +++ b/src/web-server/routes/index.js @@ -1,12 +1,12 @@ -const Router = require('@koa/router') -const multer = require('@koa/multer') +import Router from '@koa/router' +import multer from '@koa/multer' -const configController = require('../controllers/config.controller') -const logController = require('../controllers/log.controller') -const engineController = require('../controllers/engine.controller') -const historyQueryController = require('../controllers/history-query.controller') -const oibusController = require('../controllers/oibus.controller') -const fileCacheController = require('../controllers/file-cache.controller') +import configController from '../controllers/config.controller.js' +import logController from '../controllers/log.controller.js' +import engineController from '../controllers/engine.controller.js' +import historyQueryController from '../controllers/history-query.controller.js' +import oibusController from '../controllers/oibus.controller.js' +import fileCacheController from '../controllers/file-cache.controller.js' const router = new Router() @@ -28,7 +28,7 @@ router.post('/engine/aliveSignal', engineController.aliveSignal) router.get('/info', engineController.getOIBusInfo) router.get('/reload', oibusController.reload) router.get('/shutdown', oibusController.shutdown) -router.get('/logs', logController.getLogs) +router.get('/logs', logController.getLogsEndpoint) router.post('/logs', logController.addLogs) router.post('/history-queries', historyQueryController.createHistoryQuery) @@ -46,4 +46,4 @@ router.post('/north/:id/cache/file-errors/retry', fileCacheController.retryFileE router.delete('/north/:id/cache/file-errors/remove-all', fileCacheController.removeAllFileErrors) router.post('/north/:id/cache/file-errors/retry-all', fileCacheController.retryAllFileErrors) -module.exports = router +export default router diff --git a/src/web-server/web-server.js b/src/web-server/web-server.js index 7f23cdadff..33e7a4b775 100644 --- a/src/web-server/web-server.js +++ b/src/web-server/web-server.js @@ -1,13 +1,13 @@ -const Koa = require('koa') -const cors = require('@koa/cors') -const bodyParser = require('koa-bodyparser') -const helmet = require('koa-helmet') -const respond = require('koa-respond') +import Koa from 'koa' +import cors from '@koa/cors' +import bodyParser from 'koa-bodyparser' +import helmet from 'koa-helmet' +import respond from 'koa-respond' -const authCrypto = require('./middlewares/auth') -const ipFilter = require('./middlewares/ip-filter') -const clientController = require('./controllers/web-client.controller') -const router = require('./routes') +import router from './routes/index.js' +import authCrypto from './middlewares/auth.js' +import ipFilter from './middlewares/ip-filter.js' +import webClient from './middlewares/web-client.js' /** * Add a socket to the Koa ctx @@ -29,7 +29,7 @@ const createSocket = (ctx) => { /** * Class Server - Provides the web client and establish socket connections. */ -class Server { +export default class Server { /** * Constructor for Server * @constructor @@ -168,7 +168,7 @@ class Server { // Define routes this.app.use(router.routes()) this.app.use(router.allowedMethods()) - this.app.use(clientController.serveClient) + this.app.use(webClient) this.webServer = this.app.listen(this.port, () => { this.logger.info(`Web server started on ${this.port}`) @@ -183,5 +183,3 @@ class Server { await this.webServer?.close() } } - -module.exports = Server diff --git a/tests/db/perf.js b/tests/db/perf.js deleted file mode 100644 index c61236d00d..0000000000 --- a/tests/db/perf.js +++ /dev/null @@ -1,38 +0,0 @@ -/* eslint-disable no-await-in-loop */ -/* eslint-disable no-console */ -const { createValuesDatabase, saveValues, getCount, getValuesToSend, removeSentValues } = require('../../src/service/database.service') - -const start = () => { - console.time('total') - const db = createValuesDatabase('./test.db', { wal: false, optimize: false }) - const timestamp = new Date('01/01/2020') - const quality = 'OK' - const dataSourceId = 'datasourceid' - const received = [] - const size = 1000000 - console.time('create.array') - for (let i = 0; i < size; i += 1) { - received.push({ timestamp: timestamp + 10 * i, pointId: `pointId${i}`, data: { value: i, quality, id: dataSourceId } }) - } - console.timeEnd('create.array') - for (let i = 0; i < 10; i += 1) { - console.time(`save${i}`) - saveValues(db, 'sourceid', received) - console.timeEnd(`save${i}`) - console.time(`-->count${i}`) - const count = getCount(db) - process.stdout.write(`${count}`) - console.timeEnd(`-->count${i}`) - console.time(`-> get${i}`) - for (let get = 0; get < 10; get += 1) { - const values = getValuesToSend(db, size / 10) - process.stdout.write('.') - removeSentValues(db, values) - process.stdout.write('-') - } - console.timeEnd(`-> get${i}`) - } - console.timeEnd('total') -} - -start()