diff --git a/.gitignore b/.gitignore
index f3f34e58a..7106ecba9 100644
--- a/.gitignore
+++ b/.gitignore
@@ -28,8 +28,6 @@ dist
**/config/main.json
**/config/secret.json
-redis/
-
docs/.DS_Store
/packages/core/kusama-matrix.txt
/packages/core/polkadot-matrix.txt
@@ -43,4 +41,4 @@ docs/.DS_Store
build/**
dist/**
.next/**
-coverage/**
\ No newline at end of file
+coverage/**
diff --git a/Dockerfile-dev b/Dockerfile-dev
deleted file mode 100644
index 733599d1f..000000000
--- a/Dockerfile-dev
+++ /dev/null
@@ -1,16 +0,0 @@
-FROM node:18-slim AS builder
-RUN apt-get update && apt-get install -y curl
-ARG MATRIX_TOKEN
-ARG PACKAGE
-ENV PACKAGE ${PACKAGE}
-COPY . /code
-WORKDIR /code
-RUN echo "building ${PACKAGE}... " && \
- yarn install && \
- echo "yarn install done. Building...." && \
- yarn build && \
- echo "building ${PACKAGE} done." && \
- apt-get update && \
- apt-get install -y libssl-dev && \
- apt-get clean
-CMD yarn run start:dev:${PACKAGE}
diff --git a/README.md b/README.md
index 8aa5181ec..dad866e32 100644
--- a/README.md
+++ b/README.md
@@ -26,16 +26,13 @@ The nominating backend will routinely change its nominations at every era. The b
> A monorepo containing TypeScript microservices for the Thousand Validators Program.
-The following is a monorepo of packages for the Thousand Validators Program. Each package is a microservice that can be run independently or together with other microservices.
+The following is a monorepo of packages for the Thousand Validators Program.
The monorepo is managed using Yarn workspaces, and contains the following packages:
- [`packages/common`](packages/common): A package containing common code shared across all microservices.
- [`packages/core`](packages/core): A package containing the core logic of the Thousand Validators Program.
- [`packages/gateway`](packages/gateway): A package for an API gateway that exposes the backend with a REST API.
- [`packages/telemetry`](packages/telemetry): A package for a telemetry client that monitors uptime
-- [`packages/worker`](packages/worker): A packages for job queue workers that perform background tasks.
-
-
## Installation & Setup
@@ -43,16 +40,9 @@ The monorepo is managed using Yarn workspaces, and contains the following packag
There's a few ways of running the backend with docker containers, either in kubernetes, or with docker-compose.
-There is the `Current / Monolith` way of running instances, and the `Microservice` way of running instances.
-
-`Current / Monolith` Architecture:
-
-![Current / Monolith Architecture](./docs/architecture/monolith.png)
+Current architecture:
-
-`Microservice` Architecture:
-
-![Microservice Architecture](./docs/architecture/microservice.png)
+![Current Architecture](./docs/architecture/monolith.png)
The following are different ways of running in either `Current` or `Microservice` architecture with either `Kusama` or `Polkadot`, and either `Development` or `Production`:
@@ -60,18 +50,6 @@ The following are different ways of running in either `Current` or `Microservice
- Running as a monolith with production values
- `Polkadot Current`
- Running as a monolith with production values
-- `Kusama Microservice`
- - Running as microservices with production values
-- `Polkadot Microservice`
- - Running as microservices with production values
-- `Polkadot Current Dev`
- - Running as a monolith with development values
-- `Kusama Current Dev`
- - Running as a monolith with development values
-- `Kusama Microservice Dev`
- - Running as microservices with development values
-- `Polkadot Microservice Dev`
- - Running as microservices with development values
Each package contains a `Dockerfile`, which is used for running in production, and `Dockerfile-dev`, which is used for development. The development images will use run with `nodemon` so that each time files is saved/changed it will rebuild the image and restart the container. Any changes for the regular run `Dockerfile` will need a manual rebuilding of the docker image.
@@ -86,8 +64,7 @@ cd 1k-validators-be
### Installing System Dependencies
Ensure the following are installed on your machine:
-- [Node.js](https://nodejs.org/en/) (v12 or higher)
-- [Yarn](https://yarnpkg.com/) (v1.22 or higher)
+- [Node.js](https://nodejs.org/en/) (v21 or higher)
- [Docker](https://www.docker.com/) (v19 or higher)
@@ -121,30 +98,6 @@ Polkadot Current / Monolith Dev:
yarn docker:polkadot-current-dev:start
```
-Kusama Microservice Production:
-
-```bash
-yarn docker:kusama-microscervice:start
-```
-
-Kusama Microservice Dev:
-
-```bash
-yarn docker:kusama-microservice-dev:start
-```
-
-Polkadot Microservice Production:
-
-```bash
-yarn docker:polkadot-current:start
-```
-
-Polkadot Microservice Dev:
-
-```bash
-yarn docker:polkadot-current-dev:start
-```
-
### Install Yarn Dependencies
```bash
yarn install
@@ -170,50 +123,6 @@ This will create a configuration file for a Kusama instance that mirrors what is
yarn create-config-polkadot-current
```
-Kusama Microservice Config:
-This will create configuration files for a Kusama instance for each microservice that runs with production values. This runs `core`, `gateway`, `telemetry`, and `worker` as separate processes in their own container - each one needs it's own configuration file.
-```bash
-yarn create-config-kusama-microservice
-```
-
-Polkadot Microservice Config:
-This will create configuration files for a Polkadot instance for each microservice that runs with production values. This runs `core`, `gateway`, `telemetry`, and `worker` as separate processes in their own container - each one needs it's own configuration file.
-```bash
-yarn create-config-polkadot-microservice
-```
-
-
-
-### Running the Microservices
-
-#### Running `Kusama Current` or `Polkadot Current`:
-
-Either is from the same `docker-compose.current.yml` file, and runs only the `core` container, `mongo` container, and `mongo-express` container.
-
-Build and run as detached daemon:
-```bash
-docker compose -f docker-compose.current.yml up -d --build
-```
-
-#### Running `Kusama Microservice` or `Polkadot Microservice`:
-
-Either is from the same `docker-compose.microservice.yml` file. This runs `core`, `gateway`, `telemetry`, and `worker` as separate processes in their own container - each one needs it's own configuration file. It additionally runs a `redis`, `mongo`, and `mongo-express` container.
-
-Build and run as detached daemon:
-```bash
-docker compose -f docker-compose.microservice.yml up -d --build
-```
-
-#### Running `Kusama Current Dev`, `Polkadot Current Dev`, `Kusama Microservice Dev`, or `Polkadot Microservice Dev`
-
-Either is from the same `docker-compose.yml` file.
-
-Build and run as detached daemon:
-```bash
-docker compose -f docker-compose.yml up -d --build
-```
-
-
### Viewing Logs
To view the aggregated logs of all the containers:
diff --git a/docker-compose.current.yml b/docker-compose.current.yml
deleted file mode 100644
index d692b1a9a..000000000
--- a/docker-compose.current.yml
+++ /dev/null
@@ -1,44 +0,0 @@
-version: '3'
-services:
- 1kv-core:
- build:
- context: .
- args:
- PACKAGE: core
- dockerfile: ./Dockerfile
- ports:
- - "127.0.0.1:3300:3300"
- networks:
- testing_net:
- ipv4_address: 172.28.1.1
-
- mongo:
- image: mongo
- restart: always
- ports:
- - "127.0.0.1:27017:27017"
- networks:
- testing_net:
- ipv4_address: 172.28.1.2
-
- mongo-express:
- image: mongo-express
- environment:
- - ME_CONFIG_MONGODB_SERVER=mongo
- - ME_CONFIG_MONGODB_PORT=27017
- - ME_CONFIG_MONGODB_ENABLE_ADMIN=true
- - ME_CONFIG_MONGODB_AUTH_DATABASE=admin
- depends_on:
- - mongo
- ports:
- - "127.0.0.1:8888:8081"
- networks:
- testing_net:
- ipv4_address: 172.28.1.3
-
-networks:
- testing_net:
- ipam:
- driver: default
- config:
- - subnet: 172.28.0.0/16
diff --git a/docker-compose.microservice.yml b/docker-compose.microservice.yml
deleted file mode 100644
index f6928d75c..000000000
--- a/docker-compose.microservice.yml
+++ /dev/null
@@ -1,168 +0,0 @@
-version: '3.8'
-services:
-
- 1kv-core:
- build:
- context: .
- args:
- PACKAGE: core
- dockerfile: ./Dockerfile
- depends_on:
- - redis
- ports:
- - "127.0.0.1:3300:3300"
- healthcheck:
- test: [ "CMD-SHELL", "curl -f http://localhost:3300/healthcheck" ]
- interval: 60s
- timeout: 60s
- retries: 3
- start_period: 180s
- deploy:
- restart_policy:
- condition: on-failure
- delay: 300s
- max_attempts: 3
- window: 120s
- networks:
- testing_net:
- ipv4_address: 172.28.1.1
-
- 1kv-gateway:
- build:
- context: .
- args:
- PACKAGE: gateway
- dockerfile: ./Dockerfile
- depends_on:
- - redis
- ports:
- - "127.0.0.1:3301:3301"
- healthcheck:
- test: ["CMD", "curl", "-f", "http://localhost:3301/healthcheck"]
- interval: 30s
- timeout: 10s
- retries: 3
- start_period: 60s
- deploy:
- restart_policy:
- condition: on-failure
- delay: 300s
- max_attempts: 3
- window: 120s
- networks:
- testing_net:
- ipv4_address: 172.28.1.2
-
-
- 1kv-telemetry:
- build:
- context: .
- args:
- PACKAGE: telemetry
- dockerfile: ./Dockerfile
- depends_on:
- - redis
- ports:
- - "127.0.0.1:3302:3302"
- healthcheck:
- test: [ "CMD-SHELL", "curl -f http://localhost:3302/healthcheck" ]
- interval: 30s
- timeout: 10s
- retries: 3
- start_period: 30s
- deploy:
- restart_policy:
- condition: on-failure
- delay: 300s
- max_attempts: 3
- window: 120s
- networks:
- testing_net:
- ipv4_address: 172.28.1.3
-
- 1kv-worker:
- build:
- context: .
- args:
- PACKAGE: worker
- dockerfile: ./Dockerfile
- deploy:
- # mode: replicated
- # replicas: 8
- restart_policy:
- condition: on-failure
- delay: 30s
- max_attempts: 3
- window: 120s
- depends_on:
- - redis
- - mongo
- ports:
- - "127.0.0.1:3303:3303"
- restart: on-failure
- healthcheck:
- test: [ "CMD-SHELL", "curl -f http://localhost:3303/healthcheck" ]
- interval: 30s
- timeout: 60s
- retries: 3
- start_period: 180s
- networks:
- testing_net:
-# ipv4_address: 172.28.1.3
-
-
- mongo:
- image: mongo
- restart: always
- ports:
- - "127.0.0.1:27017:27017"
- networks:
- testing_net:
- ipv4_address: 172.28.1.4
- logging:
- driver: "json-file"
- options:
- max-size: "50m"
-
- mongo-express:
- image: mongo-express
- environment:
- - ME_CONFIG_MONGODB_SERVER=mongo
- - ME_CONFIG_MONGODB_PORT=27017
- - ME_CONFIG_MONGODB_ENABLE_ADMIN=true
- - ME_CONFIG_MONGODB_AUTH_DATABASE=admin
- depends_on:
- - mongo
- ports:
- - "127.0.0.1:8888:8081"
- networks:
- testing_net:
- ipv4_address: 172.28.1.5
- logging:
- driver: "json-file"
- options:
- max-size: "50m"
-
- redis:
- image: redis:alpine
- expose:
- - "6379"
- volumes:
- - ./redis:/data
- restart: always
- command: ["redis-server", "--bind", "172.28.1.6", "--port", "6379"]
- logging:
- driver: "json-file"
- options:
- max-size: "50m"
- networks:
- testing_net:
- ipv4_address: 172.28.1.6
-
-
-networks:
- testing_net:
- ipam:
- driver: default
- config:
- - subnet: 172.28.0.0/16
diff --git a/docker-compose.yml b/docker-compose.yml
index f849e6b2e..d692b1a9a 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -1,218 +1,44 @@
-version: '3.8'
+version: '3'
services:
-
- autoheal:
- restart: always
- image: willfarrell/autoheal
- environment:
- - AUTOHEAL_CONTAINER_LABEL=all
- volumes:
- - /var/run/docker.sock:/var/run/docker.sock
-
- 1kv-core:
- environment:
- - PACKAGE=core
- image: 1kv-core:latest
- build:
- context: .
- args:
- PACKAGE: core
- dockerfile: ./Dockerfile-dev
- restart: on-failure
- depends_on:
- - redis
- - mongo
- volumes:
- - .:/code
- ports:
- - "127.0.0.1:3300:3300"
- healthcheck:
- test: [ "CMD-SHELL", "curl -f http://localhost:3300/healthcheck" ]
- interval: 60s
- timeout: 60s
- retries: 3
- start_period: 180s
- deploy:
- restart_policy:
- condition: on-failure
- delay: 300s
- max_attempts: 3
- window: 120s
- networks:
- testing_net:
- ipv4_address: 172.28.1.7
- logging:
- driver: "json-file"
- options:
- max-size: "50m"
-
- 1kv-gateway:
- environment:
- - PACKAGE=gateway
- image: 1kv-gateway:latest
- build:
- context: .
- args:
- PACKAGE: gateway
- dockerfile: ./Dockerfile-dev
- restart: on-failure
- depends_on:
- - redis
- - mongo
- volumes:
- - .:/code
- ports:
- - "127.0.0.1:3301:3301"
- healthcheck:
- test: ["CMD", "curl", "-f", "http://localhost:3301/healthcheck"]
- interval: 30s
- timeout: 10s
- retries: 3
- start_period: 60s
- deploy:
- restart_policy:
- condition: on-failure
- delay: 300s
- max_attempts: 3
- window: 120s
- networks:
- testing_net:
- ipv4_address: 172.28.1.8
- logging:
- driver: "json-file"
- options:
- max-size: "50m"
-
- 1kv-telemetry:
- environment:
- - PACKAGE=telemetry
- image: 1kv-telemetry:latest
- build:
- context: .
- args:
- PACKAGE: telemetry
- dockerfile: ./Dockerfile-dev
- restart: on-failure
- depends_on:
- - redis
- - mongo
- volumes:
- - .:/code
- ports:
- - "127.0.0.1:3302:3302"
- healthcheck:
- test: [ "CMD-SHELL", "curl -f http://localhost:3302/healthcheck" ]
- interval: 30s
- timeout: 10s
- retries: 3
- start_period: 30s
- deploy:
- restart_policy:
- condition: on-failure
- delay: 300s
- max_attempts: 3
- window: 120s
- networks:
- testing_net:
- ipv4_address: 172.28.1.9
- logging:
- driver: "json-file"
- options:
- max-size: "50m"
-
- 1kv-worker:
- environment:
- - PACKAGE=worker
- image: 1kv-worker:latest
- build:
- context: .
- args:
- PACKAGE: worker
- dockerfile: ./Dockerfile-dev
- volumes:
- - .:/code
- deploy:
-# mode: replicated
-# replicas: 8
- restart_policy:
- condition: on-failure
- delay: 30s
- max_attempts: 3
- window: 120s
- depends_on:
- - redis
- - mongo
- ports:
- - "127.0.0.1:3303:3303"
- restart: on-failure
- healthcheck:
- test: [ "CMD-SHELL", "curl -f http://localhost:3303/healthcheck" ]
- interval: 30s
- timeout: 60s
- retries: 3
- start_period: 180s
- networks:
- testing_net:
-# ipv4_address: 172.28.1.16
- logging:
- driver: "json-file"
- options:
- max-size: "50m"
-
- mongo:
- image: mongo
- restart: always
- ports:
- - "127.0.0.1:27017:27017"
- networks:
- testing_net:
- ipv4_address: 172.28.1.12
- logging:
- driver: "json-file"
- options:
- max-size: "50m"
-
- mongo-express:
- image: mongo-express
- environment:
- - ME_CONFIG_MONGODB_SERVER=mongo
- - ME_CONFIG_MONGODB_PORT=27017
- - ME_CONFIG_MONGODB_ENABLE_ADMIN=true
- - ME_CONFIG_MONGODB_AUTH_DATABASE=admin
- depends_on:
- - mongo
- ports:
- - "127.0.0.1:8888:8081"
- networks:
- testing_net:
- ipv4_address: 172.28.1.14
- logging:
- driver: "json-file"
- options:
- max-size: "50m"
-
- redis:
- image: redis:latest
- ports:
- - "127.0.0.1:6379:6379"
- expose:
- - "6379"
- volumes:
- - ./redis:/data
- restart: always
-# command: ["redis-server", "--bind", "172.28.1.13", "--port", "6379"]
- command: ["redis-server", "--port", "6379"]
- logging:
- driver: "json-file"
- options:
- max-size: "50m"
- networks:
- testing_net:
- ipv4_address: 172.28.1.13
+ 1kv-core:
+ build:
+ context: .
+ args:
+ PACKAGE: core
+ dockerfile: ./Dockerfile
+ ports:
+ - "127.0.0.1:3300:3300"
+ networks:
+ testing_net:
+ ipv4_address: 172.28.1.1
+
+ mongo:
+ image: mongo
+ restart: always
+ ports:
+ - "127.0.0.1:27017:27017"
+ networks:
+ testing_net:
+ ipv4_address: 172.28.1.2
+
+ mongo-express:
+ image: mongo-express
+ environment:
+ - ME_CONFIG_MONGODB_SERVER=mongo
+ - ME_CONFIG_MONGODB_PORT=27017
+ - ME_CONFIG_MONGODB_ENABLE_ADMIN=true
+ - ME_CONFIG_MONGODB_AUTH_DATABASE=admin
+ depends_on:
+ - mongo
+ ports:
+ - "127.0.0.1:8888:8081"
+ networks:
+ testing_net:
+ ipv4_address: 172.28.1.3
networks:
- testing_net:
- ipam:
- driver: default
- config:
- - subnet: 172.28.0.0/16
+ testing_net:
+ ipam:
+ driver: default
+ config:
+ - subnet: 172.28.0.0/16
diff --git a/docs/architecture/microservice.png b/docs/architecture/microservice.png
deleted file mode 100644
index 092be7645..000000000
Binary files a/docs/architecture/microservice.png and /dev/null differ
diff --git a/docs/docs/backend/backend.md b/docs/docs/backend/backend.md
index 2a6715774..e2c3da0be 100644
--- a/docs/docs/backend/backend.md
+++ b/docs/docs/backend/backend.md
@@ -18,15 +18,13 @@ A monorepo containing TypeScript microservices for the .
# Overview
-> A monorepo containing TypeScript microservices
+> A monorepo containing TypeScript packages
The monorepo is managed using Yarn workspaces, and contains the following packages:
-- [`packages/common`](packages/common): A package containing common code shared across all microservices.
+- [`packages/common`](packages/common): A package containing common code shared across all packages.
- [`packages/core`](packages/core): A package containing the core logic of the Thousand Validators Program.
- [`packages/gateway`](packages/gateway): A package for an API gateway that exposes the backend with a REST API.
- [`packages/telemetry`](packages/telemetry): A package for a telemetry client that monitors uptime
-- [`packages/worker`](packages/worker): A packages for job queue workers that perform background tasks.
-
## Installation & Setup
@@ -35,39 +33,10 @@ The monorepo is managed using Yarn workspaces, and contains the following packag
There's a few ways of running the backend with docker containers, either in kubernetes, or with docker-compose.
-There is the `Current / Monolith` way of running instances, and the `Microservice` way of running instances.
-
-`Current / Monolith` Architecture:
-
-![Current / Monolith Architecture](../../architecture/monolith.png)
-
-
-`Microservice` Architecture:
-
-![Microservice Architecture](../../architecture/microservice.png)
+Current Architecture:
-The following are different ways of running in either `Current` or `Microservice` architecture with either `Kusama` or `Polkadot`, and either `Development` or `Production`:
+![Current Architecture](../../architecture/monolith.png)
-- `Kusama Current`
- - Running as a monolith with production values
-- `Polkadot Current`
- - Running as a monolith with production values
-- `Kusama Microservice`
- - Running as microservices with production values
-- `Polkadot Microservice`
- - Running as microservices with production values
-- `Polkadot Current Dev`
- - Running as a monolith with development values
-- `Kusama Current Dev`
- - Running as a monolith with development values
-- `Kusama Microservice Dev`
- - Running as microservices with development values
-- `Polkadot Microservice Dev`
- - Running as microservices with development values
-
-Each package contains a `Dockerfile`, which is used for running in production, and `Dockerfile-dev`, which is used for development. The development images will use run with `nodemon` so that each time files is saved/changed it will rebuild the image and restart the container. Any changes for the regular run `Dockerfile` will need a manual rebuilding of the docker image.
-
-The difference of running as either `Current` or `Microservice` is in which docker containers get run with `docker-compose` (Microservices have services separated out as their own containers, and additionally rely on Redis for messages queues). Outside of this everything else (whether it's run as a Kusama or Polkadot instance) is determined by the JSON configuration files that get generated.
### Cloning the Repository
```bash
@@ -180,32 +149,13 @@ yarn create-config-polkadot-microservice
#### Running `Kusama Current` or `Polkadot Current`:
-Either is from the same `docker-compose.current.yml` file, and runs only the `core` container, `mongo` container, and `mongo-express` container.
-
-Build and run as detached daemon:
-```bash
-docker compose -f docker-compose.current.yml up -d --build
-```
-
-#### Running `Kusama Microservice` or `Polkadot Microservice`:
-
-Either is from the same `docker-compose.microservice.yml` file. This runs `core`, `gateway`, `telemetry`, and `worker` as separate processes in their own container - each one needs it's own configuration file. It additionally runs a `redis`, `mongo`, and `mongo-express` container.
-
-Build and run as detached daemon:
-```bash
-docker compose -f docker-compose.microservice.yml up -d --build
-```
-
-#### Running `Kusama Current Dev`, `Polkadot Current Dev`, `Kusama Microservice Dev`, or `Polkadot Microservice Dev`
-
-Either is from the same `docker-compose.yml` file.
+Either is from the same `docker-compose.yml` file, and runs only the `core` container, `mongo` container, and `mongo-express` container.
Build and run as detached daemon:
```bash
docker compose -f docker-compose.yml up -d --build
```
-
### Viewing Logs
To view the aggregated logs of all the containers:
diff --git a/docs/docs/backend/config.md b/docs/docs/backend/config.md
index 3488b36db..d8203d8bc 100644
--- a/docs/docs/backend/config.md
+++ b/docs/docs/backend/config.md
@@ -360,24 +360,6 @@ The format
- `forceRound`: Boolean. upon `scorekeeper` starting, will initiate new nominations immediately, regardless of the time since the last nomination. **required**, defaults to `false`. This can be useful to do nominations when there are issues with proxy transations getting stuck for example.
- `nominating`: Boolean. Indicates whether the nominator account will create and submit transactions or not. **required**. Nominators will only submit transactions when this is set to `true`, otherwise when a nomination is supposed to occur the process will not do anything when set to `false`.
-## Redis
-
-Configuration for Redis. Redis is used when run as microservices for messages queue passing. When run as a monolith it is not used and not required. When run as microservices, `core`, `gateway`, and `worker` will need to have their own redis parameters specified in their respective config files.
-
-An example config may look something like:
-
-```json
- "redis": {
- "enable": true,
- "host": "redis",
- "port": 6379
- },
-```
-
-- `enable`: Boolean. Enables or disables Redis. **optional**. defaults to `false if not specified
-- `host`: String. Redis host. **required** if run as microservices, **optional** if not.
-- `port`: Integer. Redis port. **required** if run as microservices, **optional** if not.
-
## Server
THe `gateway` package uses Koa to serve various db queries from specified endpoints. `gateway` may either be run as a monolith or as a microservice. If run as a microservice, the `gateway` service will need to have its own `server` parameters specified in its config file.
@@ -508,11 +490,6 @@ An example `core` config run as microservices may look something like:
"forceRound": false,
"nominating": false
},
- "redis": {
- "enable": true,
- "host": "redis",
- "port": 6379
- },
"server": {
"enable": false,
"port": 3300
@@ -544,11 +521,6 @@ An example gateway config run as microservices may look something like:
"uri": "mongodb://mongo:27017"
}
},
- "redis": {
- "enable": true,
- "host": "redis",
- "port": 6379
- },
"server": {
"enable": true,
"port": 3301,
@@ -640,11 +612,6 @@ An example Worker config run as microservices may look something like:
"useOpenGovDelegation": true,
"useRpc": true,
"useClient": true
- },
- "redis": {
- "enable": true,
- "host": "redis",
- "port": 6379
}
}
diff --git a/package.json b/package.json
index 2b7c7171e..03bd3e6d3 100644
--- a/package.json
+++ b/package.json
@@ -6,7 +6,6 @@
"workspaces": [
"packages/scorekeeper-status-ui",
"packages/common",
- "packages/worker",
"packages/gateway",
"packages/telemetry",
"packages/core"
@@ -17,44 +16,21 @@
"scorekeeper-status-ui:dev": "yarn workspace @1kv/scorekeeper-status-ui dev",
"open:polkadot-apps": "open-cli https://polkadot.js.org/apps/?rpc=ws%3A%2F%2F127.0.0.1%3A9944#/staking",
"open:bull": "open-cli http://localhost:3301/bull",
- "open:mongo-express: ": "open-cli http://localhost:8888",
+ "open:mongo-express": "open-cli http://localhost:8888",
"create-config-kusama-current": "yarn workspace @1kv/core run create-config-kusama-current",
"create-config-polkadot-current": "yarn workspace @1kv/core run create-config-polkadot-current",
- "create-config-kusama-current-dev": "yarn workspace @1kv/core run create-config-kusama-current-dev",
- "create-config-polkadot-current-dev": "yarn workspace @1kv/core run create-config-polkadot-current-dev",
- "create-config-kusama-microservice": "yarn workspaces foreach run create-config-kusama-microservice",
- "create-config-polkadot-microservice": "yarn workspaces foreach run create-config-polkadot-microservice",
- "create-config-kusama-microservice-dev": "yarn workspaces foreach run create-config-kusama-microservice-dev",
- "create-config-polkadot-microservice-dev": "yarn workspaces foreach run create-config-polkadot-microservice-dev",
- "docker:kusama-current:start": "yarn run create-config-kusama-current && docker compose -f docker-compose.current.yml up -d --build && yarn run docker:logs",
- "docker:polkadot-current:start": "yarn run create-config-polkadot-current && docker compose -f docker-compose.current.yml up -d --build && yarn run docker:logs",
- "docker:kusama-current-dev:start": "yarn run create-config-kusama-current-dev && docker compose -f docker-compose.yml up -d --build && yarn run docker:logs",
- "docker:polkadot-current-dev:start": "yarn run create-config-polkadot-current-dev && docker compose -f docker-compose.yml up -d --build && yarn run docker:logs",
- "docker:kusama-microservice:start": "yarn run create-config-kusama-microservice && docker compose -f docker-compose.microservice.yml up -d --build && yarn run docker:logs",
- "docker:kusama-microservice-dev:start": "yarn run create-config-kusama-microservice-dev && docker compose -f docker-compose.yml up -d --build && yarn run docker:logs",
- "docker:polkadot-microservice:start": "yarn run create-config-polkadot-microservice && docker compose -f docker-compose.microservice.yml up -d --build && yarn run docker:logs",
- "docker:polkadot-microservice-dev:start": "yarn run create-config-polkadot-microservice-dev && docker compose -f docker-compose.yml up -d --build && yarn run docker:logs",
+ "docker:kusama-current:start": "yarn run create-config-kusama-current && docker compose -f docker-compose.yml up -d --build && yarn run docker:logs",
+ "docker:polkadot-current:start": "yarn run create-config-polkadot-current && docker compose -f docker-compose.yml up -d --build && yarn run docker:logs",
"docker:logs": "docker compose logs -f",
"docker:logs:core": "docker logs 1k-validators-be-1kv-core-1 -f",
- "docker:logs:gateway": "docker logs 1k-validators-be-1kv-gateway-1 -f",
- "docker:logs:telemetry": "docker logs 1k-validators-be-1kv-telemetry-1 -f",
- "docker:logs:worker": "docker logs 1k-validators-be-1kv-worker-1 -f",
"docker:stop": "docker compose down",
"lint": "yarn workspaces foreach run lint",
"lint:fix": "yarn workspaces foreach run lint:fix",
- "clean": "rm -rf redis/ && yarn workspaces foreach run clean",
+ "clean": "yarn workspaces foreach run clean",
"build": "yarn run docs && yarn workspaces foreach -t run build",
- "build:prod": "yarn workspaces foreach -pt run build:prod",
- "build:core": "yarn workspace @1kv/common run build run build && yarn workspace @1kv/gateway run build && yarn workspace @1kv/telemetry run build && yarn workspace @1kv/worker run build && yarn workspace @1kv/core run build",
"build:clean": "yarn workspaces foreach run clean:build",
- "start:dev:gateway": "yarn workspace @1kv/gateway run start:dev",
- "start:js:gateway": "NODE_OPTIONS='--max-old-space-size=10096' yarn workspace @1kv/gateway run js:start",
"start:dev:core": "yarn workspace @1kv/core run start:dev",
"start:js:core": "NODE_OPTIONS='--max-old-space-size=10096' yarn workspace @1kv/core run js:start",
- "start:dev:telemetry": "yarn workspace @1kv/telemetry run start:dev",
- "start:js:telemetry": "NODE_OPTIONS='--max-old-space-size=10096' yarn workspace @1kv/telemetry run js:start",
- "start:dev:worker": "yarn workspace @1kv/worker run start:dev",
- "start:js:worker": "NODE_OPTIONS='--max-old-space-size=10096' yarn workspace @1kv/worker run js:start",
"test:core": "yarn workspace @1kv/core run test",
"test:common:int": "yarn workspace @1kv/common run test:int",
"test:common:unit": "yarn workspace @1kv/common run test:unit",
@@ -99,7 +75,6 @@
"@1kv/common": "workspace:*",
"@1kv/gateway": "workspace:*",
"@1kv/telemetry": "workspace:*",
- "@1kv/worker": "workspace:*",
"@bull-board/api": "^5.15.1",
"@bull-board/koa": "^5.15.0",
"@koa/router": "^12.0.1",
diff --git a/packages/common/Dockerfile b/packages/common/Dockerfile
deleted file mode 100644
index 8fd6d5bed..000000000
--- a/packages/common/Dockerfile
+++ /dev/null
@@ -1,7 +0,0 @@
-FROM node:17-slim
-ARG MATRIX_TOKEN
-WORKDIR /code
-COPY . .
-RUN ["npm", "i"]
-RUN ["npm", "run", "build"]
-CMD ["npm", "run", "js:start"]
diff --git a/packages/common/Dockerfile-dev b/packages/common/Dockerfile-dev
deleted file mode 100644
index 7b2ec4f15..000000000
--- a/packages/common/Dockerfile-dev
+++ /dev/null
@@ -1,4 +0,0 @@
-FROM node:18 AS builder
-COPY . /app
-WORKDIR /app
-RUN npm install -g typescript && yarn set version 3.2.2 && yarn install && yarn workspace @1kv/common build
diff --git a/packages/common/esbuild.js b/packages/common/esbuild.js
deleted file mode 100644
index 8bd0743c8..000000000
--- a/packages/common/esbuild.js
+++ /dev/null
@@ -1,98 +0,0 @@
-import esbuild from "esbuild";
-
-const isProduction = process.argv.includes("--prod");
-
-const externalPackages = [
- "@polkadot/api-augment",
- "velocityjs",
- "dustjs-linkedin",
- "atpl",
- "liquor",
- "twig",
- "eco",
- "jazz",
- "jqtpl",
- "hamljs",
- "hamlet",
- "whiskers",
- "haml-coffee",
- "hogan.js",
- "templayed",
- "underscore",
- "walrus",
- "mustache",
- "just",
- "ect",
- "mote",
- "toffee",
- "dot",
- "bracket-template",
- "ractive",
- "htmling",
- "babel-core",
- "plates",
- "vash",
- "slm",
- "marko",
- "teacup/lib/express",
- "coffee-script",
- "squirrelly",
- "twing",
- "matris-js-sdk",
- "@1kv/telemetry",
- "@1kv/gateway",
- "@1kv/common",
- "mongoose",
- "logform",
- "winston",
- "ws",
- "form-data",
- "combined-stream",
- "proxy-from-env",
- "follow-redirects",
- "cron",
- "coingecko-api-v3",
- "matrix-js-sdk",
- "node-mongodb-native",
- "mongoose",
-];
-
-const buildOptions = {
- entryPoints: ["src/index.ts"],
- bundle: true,
- minify: isProduction,
- platform: "node",
- target: "node18",
- external: externalPackages,
- outdir: "build",
- tsconfig: "tsconfig.json",
- // splitting: true,
- format: "esm",
- sourcemap: !isProduction,
- logLevel: "info",
-};
-
-if (process.argv.includes("--watch")) {
- buildOptions.watch = {
- onRebuild(error, result) {
- if (error) console.error("watch build failed:", error);
- else
- console.log(
- "watch build succeeded at",
- new Date().toLocaleTimeString(),
- );
- },
- };
- console.log("watch mode enabled");
-}
-
-if (isProduction) {
- buildOptions.define = {
- "process.env.NODE_ENV": "'production'",
- };
-}
-
-esbuild.build(buildOptions).catch((error) => {
- console.error(error);
- process.exit(1);
-});
diff --git a/packages/common/package.json b/packages/common/package.json
index 0d49037e1..c2054dae7 100644
--- a/packages/common/package.json
+++ b/packages/common/package.json
@@ -6,7 +6,6 @@
"types": "build/index.d.ts",
"scripts": {
"build": "tsc --build tsconfig.json",
- "build:prod": "node esbuild.js --prod",
"docker": "docker-compose rm -f; docker-compose build --no-cache; docker-compose up -d",
"lint": "../../node_modules/.bin/eslint 'src/**/*.{js,ts,tsx}' --quiet",
"lint:fix": "../../node_modules/.bin/eslint 'src/**/*.{js,ts,tsx, json}' --quiet --fix",
diff --git a/packages/common/src/config.ts b/packages/common/src/config.ts
index 5f28061e1..b34c2e813 100644
--- a/packages/common/src/config.ts
+++ b/packages/common/src/config.ts
@@ -100,11 +100,6 @@ export type ConfigSchema = {
room: string;
userId: string;
};
- redis: {
- enable: boolean;
- host: string;
- port: number;
- };
proxy: {
timeDelayBlocks: number;
blacklistedAnnouncements: string[];
diff --git a/packages/common/src/scorekeeper/jobs/JobsRunnerFactory.ts b/packages/common/src/scorekeeper/jobs/JobsRunnerFactory.ts
index 52912d8af..508e99134 100644
--- a/packages/common/src/scorekeeper/jobs/JobsRunnerFactory.ts
+++ b/packages/common/src/scorekeeper/jobs/JobsRunnerFactory.ts
@@ -1,4 +1,3 @@
-import { MicroserviceJobRunner } from "./MicroserviceJobRunner";
import { MonolithJobRunner } from "./MonolithJobRunner";
import { JobsRunner } from "./JobRunner";
import { JobRunnerMetadata } from "./JobsClass";
@@ -7,8 +6,6 @@ export class JobsRunnerFactory {
static makeJobs = async (
metadata: JobRunnerMetadata,
): Promise => {
- if (!metadata.config?.redis?.host && metadata.config?.redis?.port)
- return new MicroserviceJobRunner(metadata);
- else return new MonolithJobRunner(metadata);
+ return new MonolithJobRunner(metadata);
};
}
diff --git a/packages/common/src/scorekeeper/jobs/MicroserviceJobRunner.ts b/packages/common/src/scorekeeper/jobs/MicroserviceJobRunner.ts
deleted file mode 100644
index 72502b537..000000000
--- a/packages/common/src/scorekeeper/jobs/MicroserviceJobRunner.ts
+++ /dev/null
@@ -1,83 +0,0 @@
-import { logger } from "../..//index";
-
-// import { otvWorker } from "@1kv/worker";
-import { scorekeeperLabel } from "../scorekeeper";
-import { JobsRunner } from "./JobRunner";
-import { Job } from "./JobsClass";
-
-export class MicroserviceJobRunner extends JobsRunner {
- _startSpecificJobs = async (): Promise => {
- const { config, chaindata } = this.metadata;
- if (!config?.redis?.host || !config?.redis?.port) {
- logger.error(
- `No redis config found. Microservice Jobs will not be started.`,
- scorekeeperLabel,
- );
- return [];
- }
- try {
- // Jobs get run in separate worker
- logger.info(`Starting bullmq Queues and Workers....`, scorekeeperLabel);
- // const releaseMonitorQueue =
- // await otvWorker.queues.createReleaseMonitorQueue(
- // config.redis.host,
- // config.redis.port,
- // );
- // const constraintsQueue = await otvWorker.queues.createConstraintsQueue(
- // config.redis.host,
- // config.redis.port,
- // );
- // const chaindataQueue = await otvWorker.queues.createChainDataQueue(
- // config.redis.host,
- // config.redis.port,
- // );
- // const blockQueue = await otvWorker.queues.createBlockQueue(
- // config.redis.host,
- // config.redis.port,
- // );
- //
- // const removeRepeatableJobs = true;
- // if (removeRepeatableJobs) {
- // logger.info(`remove jobs: ${removeRepeatableJobs}`, scorekeeperLabel);
- // // Remove any previous repeatable jobs
- // await otvWorker.queues.removeRepeatableJobsFromQueues([
- // releaseMonitorQueue,
- // constraintsQueue,
- // chaindataQueue,
- // blockQueue,
- // ]);
- // }
- //
- // const obliterateQueues = false;
- // if (obliterateQueues) {
- // await otvWorker.queues.obliterateQueues([
- // releaseMonitorQueue,
- // constraintsQueue,
- // chaindataQueue,
- // blockQueue,
- // ]);
- // }
- //
- // // Add repeatable jobs to the queues
- // // Queues need to have different repeat time intervals
- // await otvWorker.queues.addReleaseMonitorJob(releaseMonitorQueue, 60000);
- // await otvWorker.queues.addValidityJob(constraintsQueue, 1000001);
- // await otvWorker.queues.addScoreJob(constraintsQueue, 100002);
- // await otvWorker.queues.addActiveValidatorJob(chaindataQueue, 100003);
- // await otvWorker.queues.addEraPointsJob(chaindataQueue, 100006);
- // await otvWorker.queues.addEraStatsJob(chaindataQueue, 110008);
- // await otvWorker.queues.addInclusionJob(chaindataQueue, 100008);
- // await otvWorker.queues.addNominatorJob(chaindataQueue, 100009);
- // await otvWorker.queues.addSessionKeyJob(chaindataQueue, 100010);
- // await otvWorker.queues.addValidatorPrefJob(chaindataQueue, 100101);
- // await otvWorker.queues.addAllBlocks(blockQueue, chaindata);
- // TODO update this as queue job
- // await startLocationStatsJob(this.config, this.chaindata);
- return [];
- } catch (e) {
- logger.error(JSON.stringify(e), scorekeeperLabel);
- logger.error("Error starting microservice jobs", scorekeeperLabel);
- return [];
- }
- };
-}
diff --git a/packages/core/Dockerfile b/packages/core/Dockerfile
deleted file mode 100644
index 8fd6d5bed..000000000
--- a/packages/core/Dockerfile
+++ /dev/null
@@ -1,7 +0,0 @@
-FROM node:17-slim
-ARG MATRIX_TOKEN
-WORKDIR /code
-COPY . .
-RUN ["npm", "i"]
-RUN ["npm", "run", "build"]
-CMD ["npm", "run", "js:start"]
diff --git a/packages/core/Dockerfile-dev b/packages/core/Dockerfile-dev
deleted file mode 100644
index 6e4e0a919..000000000
--- a/packages/core/Dockerfile-dev
+++ /dev/null
@@ -1,7 +0,0 @@
-FROM node:18 AS builder
-COPY . /app
-WORKDIR /app
-RUN yarn set version 3.2.2 && \
- yarn install && \
- yarn workspace @1kv/core build
-CMD yarn start:dev:core
diff --git a/packages/core/config/kusama.microservice.dev.sample.json b/packages/core/config/kusama.microservice.dev.sample.json
deleted file mode 100644
index 7a8cc3128..000000000
--- a/packages/core/config/kusama.microservice.dev.sample.json
+++ /dev/null
@@ -1,137 +0,0 @@
-{
- "global": {
- "dryRun": false,
- "networkPrefix": 2,
- "apiEndpoints": [
- "wss://kusama-rpc-tn.dwellir.com",
- "wss://kusama-rpc.dwellir.com",
- "wss://kusama.public.curie.radiumblock.xyz/ws",
- "wss://rpc.ibp.network/kusama",
- "wss://rpc.dotters.network/kusama",
- "wss://ksm-rpc.stakeworld.io"
- ],
- "apiPeopleEndpoints": ["wss://kusama-people-rpc.polkadot.io"],
- "kusamaBootstrapEndpoint": "https://kusama.w3f.community",
- "polkadotBootstrapEndpoint": "https://polkadot.w3f.community",
- "candidatesUrl": "https://raw.githubusercontent.com/w3f/1k-validators-be/master/candidates/kusama.json"
- },
- "cron": {
- "monitorEnabled": true,
- "clearOfflineEnabled": true,
- "validityEnabled": true,
- "scoreEnabled": true,
- "executionEnabled": true,
- "scorekeeperEnabled": true,
- "rewardClaimingEnabled": true,
- "cancelEnabled": true,
- "staleEnabled": true,
- "eraPointsEnabled": true,
- "eraStatsEnabled": true,
- "activeValidatorEnabled": true,
- "inclusionEnabled": true,
- "sessionKeyEnabled": true,
- "unclaimedErasEnabled": true,
- "validatorPrefEnabled": true,
- "nominatorEnabled": true,
- "locationStatsEnabled": true,
- "blockEnabled": true
- },
- "constraints": {
- "skipConnectionTime": true,
- "skipIdentity": false,
- "skipUnclaimed": false,
- "clientUpgrade": {
- "skip": false
- },
- "minSelfStake": 10000000000000,
- "commission": 150000000,
- "unclaimedEraThreshold": 4,
- "sanctionedGeoArea": {
- "skip": false,
- "sanctionedCountries": ["XXX"],
- "sanctionedRegions": ["XXX"]
- }
- },
- "db": {
- "mongo": {
- "uri": "mongodb://mongo:27017"
- }
- },
- "matrix": {
- "enabled": false,
- "baseUrl": "https://matrix.org",
- "room": "!mdugGIKqSTweIOpTlA:web3.foundation",
- "userId": "@1kv-stats:matrix.org"
- },
- "proxy": {
- "timeDelayBlocks": "10850",
- "blacklistedAnnouncements": []
- },
- "score": {
- "inclusion": "200",
- "spanInclusion": "200",
- "discovered": "5",
- "nominated": "30",
- "rank": "5",
- "bonded": "50",
- "faults": "5",
- "offline": "2",
- "location": "40",
- "region": "10",
- "country": "10",
- "provider": "100",
- "nominations": "100",
- "rpc": "100",
- "client": "100",
- "useInclusion": true,
- "useSpanInclusion": true,
- "useDiscovered": true,
- "useNominated": true,
- "useRank": true,
- "useBonded": true,
- "useFaults": true,
- "useOffline": true,
- "useLocation": true,
- "useRegion": true,
- "useCountry": true,
- "useProvider": true,
- "useNominations": true,
- "useDelegations": true,
- "useOpenGov": true,
- "useOpenGovDelegation": true,
- "useRpc": true,
- "useClient": true
- },
- "scorekeeper": {
- "forceRound": false,
- "nominating": false
- },
- "redis": {
- "enable": true,
- "host": "redis",
- "port": 6379
- },
- "server": {
- "onlyHealth": true,
- "port": 3300
- },
- "telemetry": {
- "blacklistedProviders": [
- "Hetzner Online GmbH",
- "Contabo Inc.",
- "Contabo GmbH"
- ],
- "enable": false,
- "chains": [
- "0xb0a8d493285c2df73290dfb7e61f870f17b41801197a149ca93654499ea3dafe"
- ],
- "host": "wss://telemetry-backend.w3f.community/feed"
- },
- "logger": {
- "level": "info",
- "excludedLabels": [
- "Location",
- "ValidatorPrefJob"
- ]
- }
-}
diff --git a/packages/core/config/kusama.microservice.sample.json b/packages/core/config/kusama.microservice.sample.json
deleted file mode 100644
index 951b6fcaa..000000000
--- a/packages/core/config/kusama.microservice.sample.json
+++ /dev/null
@@ -1,95 +0,0 @@
-{
- "global": {
- "networkPrefix": 2,
- "apiEndpoints": [
- "wss://kusama-rpc-tn.dwellir.com",
- "wss://kusama-rpc.dwellir.com",
- "wss://kusama.public.curie.radiumblock.xyz/ws",
- "wss://rpc.ibp.network/kusama",
- "wss://rpc.dotters.network/kusama",
- "wss://ksm-rpc.stakeworld.io"
- ],
- "apiPeopleEndpoints": ["wss://kusama-people-rpc.polkadot.io"],
- "kusamaBootstrapEndpoint": "https://kusama.w3f.community",
- "polkadotBootstrapEndpoint": "https://polkadot.w3f.community",
- "candidatesUrl": "https://raw.githubusercontent.com/w3f/1k-validators-be/master/candidates/kusama.json"
- },
- "constraints": {
- "skipConnectionTime": true,
- "skipIdentity": false,
- "skipUnclaimed": false,
- "clientUpgrade": {
- "skip": false
- },
- "minSelfStake": 10000000000000,
- "commission": 150000000,
- "unclaimedEraThreshold": 4,
- "sanctionedGeoArea": {
- "skip": false,
- "sanctionedCountries": ["XXX"],
- "sanctionedRegions": ["XXX"]
- }
- },
- "db": {
- "mongo": {
- "uri": "mongodb://mongo:27017"
- }
- },
- "matrix": {
- "enabled": false,
- "baseUrl": "https://matrix.org",
- "room": "!mdugGIKqSTweIOpTlA:web3.foundation",
- "userId": "@1kv-stats:matrix.org"
- },
- "proxy": {
- "timeDelayBlocks": "10850",
- "blacklistedAnnouncements": []
- },
- "score": {
- "inclusion": "200",
- "spanInclusion": "200",
- "discovered": "5",
- "nominated": "30",
- "rank": "5",
- "bonded": "50",
- "faults": "5",
- "offline": "2",
- "location": "40",
- "region": "10",
- "country": "10",
- "provider": "100",
- "nominations": "100"
- },
- "scorekeeper": {
- "forceRound": false,
- "nominating": false
- },
- "redis": {
- "enable": true,
- "host": "redis",
- "port": 6379
- },
- "server": {
- "onlyHealth": true,
- "port": 3300
- },
- "telemetry": {
- "blacklistedProviders": [
- "Hetzner Online GmbH",
- "Contabo Inc.",
- "Contabo GmbH"
- ],
- "enable": false,
- "chains": [
- "0xb0a8d493285c2df73290dfb7e61f870f17b41801197a149ca93654499ea3dafe"
- ],
- "host": "wss://telemetry-backend.w3f.community/feed"
- },
- "logger": {
- "level": "info",
- "excludedLabels": [
- "Location",
- "ValidatorPrefJob"
- ]
- }
-}
diff --git a/packages/core/config/main.sample.json b/packages/core/config/main.sample.json
index e8a952a27..4f02f73ac 100644
--- a/packages/core/config/main.sample.json
+++ b/packages/core/config/main.sample.json
@@ -123,10 +123,6 @@
"enable": true,
"port": 3300
},
- "redis": {
- "host": "redis",
- "port": 6379
- },
"telemetry": {
"blacklistedProviders": [
"Hetzner Online GmbH",
diff --git a/packages/core/config/polkadot.microservice.dev.sample.json b/packages/core/config/polkadot.microservice.dev.sample.json
deleted file mode 100644
index 177a762f7..000000000
--- a/packages/core/config/polkadot.microservice.dev.sample.json
+++ /dev/null
@@ -1,112 +0,0 @@
-{
- "global": {
- "networkPrefix": 0,
- "apiEndpoints": [
- "wss://rpc.dotters.network/polkadot",
- "wss://dot-rpc.stakeworld.io",
- "wss://polkadot.public.curie.radiumblock.co/ws",
- "wss://rpc.ibp.network/polkadot",
- "wss://polkadot-rpc-tn.dwellir.com"
- ],
- "kusamaBootstrapEndpoint": "https://kusama.w3f.community",
- "polkadotBootstrapEndpoint": "https://polkadot.w3f.community",
- "candidatesUrl": "https://raw.githubusercontent.com/w3f/1k-validators-be/master/candidates/polkadot.json"
- },
- "constraints": {
- "skipConnectionTime": false,
- "skipIdentity": false,
- "skipUnclaimed": true,
- "clientUpgrade": {
- "skip": false
- },
- "minSelfStake": 50000000000000,
- "commission": 50000000,
- "unclaimedEraThreshold": 1,
- "sanctionedGeoArea": {
- "skip": false,
- "sanctionedCountries": ["XXX"],
- "sanctionedRegions": ["XXX"]
- }
- },
- "db": {
- "mongo": {
- "uri": "mongodb://mongo:27017"
- }
- },
- "matrix": {
- "enabled": false,
- "baseUrl": "https://matrix.org",
- "room": "!mdugGIKqSTweIOpTlA:web3.foundation",
- "userId": "@1kv-stats:matrix.org"
- },
- "proxy": {
- "timeDelayBlocks": "10850",
- "blacklistedAnnouncements": []
- },
- "score": {
- "inclusion": "200",
- "spanInclusion": "200",
- "discovered": "5",
- "nominated": "30",
- "rank": "5",
- "bonded": "50",
- "faults": "5",
- "offline": "2",
- "location": "40",
- "region": "10",
- "country": "10",
- "provider": "100",
- "nominations": "100",
- "rpc": "100",
- "client": "100",
- "useInclusion": true,
- "useSpanInclusion": true,
- "useDiscovered": true,
- "useNominated": true,
- "useRank": true,
- "useBonded": true,
- "useFaults": true,
- "useOffline": true,
- "useLocation": true,
- "useRegion": true,
- "useCountry": true,
- "useProvider": true,
- "useNominations": true,
- "useDelegations": true,
- "useOpenGov": true,
- "useOpenGovDelegation": true,
- "useRpc": true,
- "useClient": true
- },
- "scorekeeper": {
- "forceRound": false,
- "nominating": false
- },
- "redis": {
- "host": "redis",
- "port": 6379
- },
- "server": {
- "onlyHealth": true,
- "port": 3300
- },
- "telemetry": {
- "blacklistedProviders": [
- "Hetzner Online GmbH",
- "Contabo Inc.",
- "Contabo GmbH"
- ],
- "enable": false,
- "chains": [
- "0xb0a8d493285c2df73290dfb7e61f870f17b41801197a149ca93654499ea3dafe"
- ],
- "host": "wss://telemetry-backend.w3f.community/feed"
- },
- "logger": {
- "level": "info",
- "excludedLabels": [
- "Location",
- "ValidatorPrefJob"
- ]
- }
-}
diff --git a/packages/core/config/polkadot.microservice.sample.json b/packages/core/config/polkadot.microservice.sample.json
deleted file mode 100644
index 9ad6bf7d3..000000000
--- a/packages/core/config/polkadot.microservice.sample.json
+++ /dev/null
@@ -1,113 +0,0 @@
-{
- "global": {
- "networkPrefix": 0,
- "apiEndpoints": [
- "wss://rpc.dotters.network/polkadot",
- "wss://dot-rpc.stakeworld.io",
- "wss://polkadot.public.curie.radiumblock.co/ws",
- "wss://rpc.ibp.network/polkadot",
- "wss://polkadot-rpc-tn.dwellir.com"
- ],
- "kusamaBootstrapEndpoint": "https://kusama.w3f.community",
- "polkadotBootstrapEndpoint": "https://polkadot.w3f.community",
- "candidatesUrl": "https://raw.githubusercontent.com/w3f/1k-validators-be/master/candidates/polkadot.json"
- },
- "constraints": {
- "skipConnectionTime": false,
- "skipIdentity": false,
- "skipUnclaimed": true,
- "clientUpgrade": {
- "skip": false
- },
- "minSelfStake": 50000000000000,
- "commission": 50000000,
- "unclaimedEraThreshold": 1,
- "sanctionedGeoArea": {
- "skip": false,
- "sanctionedCountries": ["XXX"],
- "sanctionedRegions": ["XXX"]
- }
- },
- "db": {
- "mongo": {
- "uri": "mongodb://mongo:27017"
- }
- },
- "matrix": {
- "enabled": false,
- "baseUrl": "https://matrix.org",
- "room": "!mdugGIKqSTweIOpTlA:web3.foundation",
- "userId": "@1kv-stats:matrix.org"
- },
- "proxy": {
- "timeDelayBlocks": "10850",
- "blacklistedAnnouncements": []
- },
- "score": {
- "inclusion": "200",
- "spanInclusion": "200",
- "discovered": "5",
- "nominated": "30",
- "rank": "5",
- "bonded": "50",
- "faults": "5",
- "offline": "2",
- "location": "40",
- "region": "10",
- "country": "10",
- "provider": "100",
- "nominations": "100",
- "rpc": "100",
- "client": "100",
- "useInclusion": true,
- "useSpanInclusion": true,
- "useDiscovered": true,
- "useNominated": true,
- "useRank": true,
- "useBonded": true,
- "useFaults": true,
- "useOffline": true,
- "useLocation": true,
- "useRegion": true,
- "useCountry": true,
- "useProvider": true,
- "useNominations": true,
- "useDelegations": true,
- "useOpenGov": true,
- "useOpenGovDelegation": true,
- "useRpc": true,
- "useClient": true
- },
- "scorekeeper": {
- "forceRound": false,
- "nominating": false
- },
- "redis": {
- "enable": true,
- "host": "redis",
- "port": 6379
- },
- "server": {
- "onlyHealth": true,
- "port": 3300
- },
- "telemetry": {
- "blacklistedProviders": [
- "Hetzner Online GmbH",
- "Contabo Inc.",
- "Contabo GmbH"
- ],
- "enable": false,
- "chains": [
- "0xb0a8d493285c2df73290dfb7e61f870f17b41801197a149ca93654499ea3dafe"
- ],
- "host": "wss://telemetry-backend.w3f.community/feed"
- },
- "logger": {
- "level": "info",
- "excludedLabels": [
- "Location",
- "ValidatorPrefJob"
- ]
- }
-}
diff --git a/packages/core/esbuild.js b/packages/core/esbuild.js
deleted file mode 100644
index 1f0e2edf6..000000000
--- a/packages/core/esbuild.js
+++ /dev/null
@@ -1,87 +0,0 @@
-import esbuild from "esbuild";
-
-const externalPackages = [
- "@polkadot/api-augment",
- "velocityjs",
- "dustjs-linkedin",
- "atpl",
- "liquor",
- "twig",
- "eco",
- "jazz",
- "jqtpl",
- "hamljs",
- "hamlet",
- "whiskers",
- "haml-coffee",
- "hogan.js",
- "templayed",
- "underscore",
- "walrus",
- "mustache",
- "just",
- "ect",
- "mote",
- "toffee",
- "dot",
- "bracket-template",
- "ractive",
- "htmling",
- "babel-core",
- "plates",
- "vash",
- "slm",
- "marko",
- "teacup/lib/express",
- "coffee-script",
- "squirrelly",
- "twing",
- "matris-js-sdk",
- "@1kv/telemetry",
- "@1kv/gateway",
- "@1kv/common",
-];
-
-const isProduction = process.argv.includes("--prod");
-
-const buildOptions = {
- entryPoints: ["src/index.ts"],
- bundle: true,
- minify: isProduction,
- platform: "node",
- target: "node18",
- external: externalPackages,
- outdir: "build",
- // entryNames: "[dir]/[name].mjs",
- tsconfig: "tsconfig.json",
- splitting: true,
- format: "esm",
- // outExtension: { ".js": ".mjs" },
- sourcemap: !isProduction,
- logLevel: "info",
-};
-
-if (process.argv.includes("--watch")) {
- buildOptions.watch = {
- onRebuild(error, result) {
- if (error) console.error("watch build failed:", error);
- else
- console.log(
- "watch build succeeded at",
- new Date().toLocaleTimeString(),
- );
- },
- };
- console.log("watch mode enabled");
-}
-
-if (isProduction) {
- buildOptions.define = {
- "process.env.NODE_ENV": "'production'",
- };
-}
-
-esbuild.build(buildOptions).catch((error) => {
- console.error(error);
- process.exit(1);
-});
diff --git a/packages/core/package.json b/packages/core/package.json
index 7f3434789..179579053 100644
--- a/packages/core/package.json
+++ b/packages/core/package.json
@@ -7,23 +7,15 @@
"open:polkadot-apps": "open-cli https://polkadot.js.org/apps/?rpc=ws%3A%2F%2F127.0.0.1%3A9944#/staking",
"open:telemetry": "open-cli http://localhost:3000",
"build": "tsc --build tsconfig.json",
- "build:prod": "node esbuild.js --prod",
"docker": "docker-compose rm -f; docker-compose build --no-cache; docker-compose up -d",
"clean": "../../node_modules/.bin/rimraf ./build tsconfig.tsbuildinfo combined.log",
"clean:build": "yarn clean && yarn build",
- "create-config-kusama-current-dev": "cp ./config/kusama.current.dev.sample.json ./config/main.json && cp ./config/secret.sample.json ./config/secret.json",
- "create-config-polkadot-current-dev": "cp ./config/polkadot.current.dev.sample.json ./config/main.json && cp ./config/secret.sample.json ./config/secret.json",
- "create-config-kusama-microservice-dev": "cp ./config/kusama.microservice.dev.sample.json ./config/main.json && cp ./config/secret.sample.json ./config/secret.json",
- "create-config-polkadot-microservice-dev": "cp ./config/polkadot.microservice.dev.sample.json ./config/main.json && cp ./config/secret.sample.json ./config/secret.json",
"create-config-kusama-current": "cp ./config/kusama.current.sample.json ./config/main.json && cp ./config/secret.sample.json ./config/secret.json",
"create-config-polkadot-current": "cp ./config/polkadot.current.sample.json ./config/main.json && cp ./config/secret.sample.json ./config/secret.json",
- "create-config-kusama-microservice": "cp ./config/kusama.microservice.sample.json ./config/main.json && cp ./config/secret.sample.json ./config/secret.json",
- "create-config-polkadot-microservice": "cp ./config/polkadot.microservice.sample.json ./config/main.json && cp ./config/secret.sample.json ./config/secret.json",
"lint": "../../node_modules/.bin/eslint 'src/**/*.{js,ts,tsx}' --quiet",
"lint:fix": "../../node_modules/.bin/eslint 'src/**/*.{js,ts,tsx, json}' --quiet --fix",
- "start": "NODE_OPTIONS='--max-old-space-size=8096' ts-node src/index.ts start",
- "start:watch": "nodemon --watch build --exec node --max-old-space-size=8024 build/index.js",
- "start:dev": "node esbuild.js --watch & npx nodemon --watch build --exec node --max-old-space-size=8024 build/index.mjs",
+ "start:dev": "NODE_OPTIONS='--max-old-space-size=8096' ts-node src/index.ts start",
+ "watch": "npx nodemon --watch src -x \"yarn start:dev\"",
"js:start": "NODE_OPTIONS='--max-old-space-size=10096' node build/index.js start",
"test": "yarn test:unit && yarn test:int",
"test:unit": "jest --config=jest.unit.config.js --verbose",
diff --git a/packages/core/tsconfig.json b/packages/core/tsconfig.json
index a934b32cf..acb774fb1 100644
--- a/packages/core/tsconfig.json
+++ b/packages/core/tsconfig.json
@@ -10,8 +10,7 @@
"paths": {
"@1kv/common": ["../common/build"],
"@1kv/gateway": ["../gateway/build"],
- "@1kv/telemetry": ["../telemetry/build"],
- "@1kv/worker": ["../worker/build"]
+ "@1kv/telemetry": ["../telemetry/build"]
},
/* Basic Options */
"target": "es2015", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017','ES2018' or 'ESNEXT'. */
@@ -74,7 +73,6 @@
"references": [
{ "path": "../common" },
{ "path": "../gateway" },
- { "path": "../telemetry" },
- { "path": "../worker" }
+ { "path": "../telemetry" }
]
}
diff --git a/packages/gateway/Dockerfile b/packages/gateway/Dockerfile
deleted file mode 100644
index 8fd6d5bed..000000000
--- a/packages/gateway/Dockerfile
+++ /dev/null
@@ -1,7 +0,0 @@
-FROM node:17-slim
-ARG MATRIX_TOKEN
-WORKDIR /code
-COPY . .
-RUN ["npm", "i"]
-RUN ["npm", "run", "build"]
-CMD ["npm", "run", "js:start"]
diff --git a/packages/gateway/Dockerfile-dev b/packages/gateway/Dockerfile-dev
deleted file mode 100644
index 33b50a4a7..000000000
--- a/packages/gateway/Dockerfile-dev
+++ /dev/null
@@ -1,8 +0,0 @@
-FROM node:18 AS builder
-COPY . /app
-WORKDIR /app
-RUN yarn set version 3.2.2 && \
- yarn install && \
- yarn workspace @1kv/common build && \
- yarn workspace @1kv/gateway build
-CMD yarn start:dev:gateway
diff --git a/packages/gateway/config/sample.json b/packages/gateway/config/sample.json
index 01c25d60b..9be5e3971 100644
--- a/packages/gateway/config/sample.json
+++ b/packages/gateway/config/sample.json
@@ -4,11 +4,6 @@
"uri": "mongodb://mongo:27017"
}
},
- "redis": {
- "host": "redis",
- "port": 6379,
- "cache": 180000
- },
"server": {
"port": 3301
}
diff --git a/packages/gateway/esbuild.mjs b/packages/gateway/esbuild.mjs
deleted file mode 100644
index df171caab..000000000
--- a/packages/gateway/esbuild.mjs
+++ /dev/null
@@ -1,97 +0,0 @@
-import esbuild from "esbuild";
-import packageJson from '../../package.json' assert {type: 'json'};
-
-const deps = Object.keys(packageJson.dependencies || {});
-
-
-const externalPackages = [
- "@polkadot/api-augment",
- "velocityjs",
- "dustjs-linkedin",
- "atpl",
- "liquor",
- "twig",
- "eco",
- "jazz",
- "jqtpl",
- "hamljs",
- "hamlet",
- "whiskers",
- "haml-coffee",
- "hogan.js",
- "templayed",
- "underscore",
- "walrus",
- "mustache",
- "just",
- "ect",
- "mote",
- "toffee",
- "dot",
- "bracket-template",
- "ractive",
- "htmling",
- "babel-core",
- "plates",
- "vash",
- "slm",
- "marko",
- "teacup/lib/express",
- "coffee-script",
- "squirrelly",
- "twing",
- "matris-js-sdk",
- "@1kv/telemetry",
- "@1kv/gateway",
- "tty",
- "koa",
- "coingecko-api-v3",
- "node-mongodb-native",
- "mongoose",
- "events"
- "@bull-board"
- // "@1kv/common"
-];
-
-const isProduction = process.argv.includes("--prod");
-
-const buildOptions = {
- entryPoints: ["src/index.ts"],
- bundle: true,
- minify: isProduction,
- platform: "node",
- target: "node18",
- external: [...deps, ...externalPackages],
- outdir: "build",
- tsconfig: "tsconfig.json",
- splitting: false,
- format: "esm",
- chunkNames: "chunks/[name]-[hash]",
- sourcemap: !isProduction,
- logLevel: "info",
-};
-
-if (process.argv.includes("--watch")) {
- buildOptions.watch = {
- onRebuild(error, result) {
- if (error) console.error("watch build failed:", error);
- else
- console.log(
- "watch build succeeded at",
- new Date().toLocaleTimeString(),
- );
- },
- };
- console.log("watch mode enabled");
-}
-
-if (isProduction) {
- buildOptions.define = {
- "process.env.NODE_ENV": "'production'",
- };
-}
-
-esbuild.build(buildOptions).catch((error) => {
- console.error(error);
- process.exit(1);
-});
diff --git a/packages/gateway/package.json b/packages/gateway/package.json
index 0ad3e4e1b..88ca7d299 100644
--- a/packages/gateway/package.json
+++ b/packages/gateway/package.json
@@ -6,13 +6,8 @@
"types": "build/index.d.ts",
"scripts": {
"build": "tsc --build tsconfig.json",
- "build:prod": "node esbuild.mjs --prod",
"clean": "../../node_modules/.bin/rimraf ./build tsconfig.tsbuildinfo combined.log",
"clean:build": "yarn clean && yarn build",
- "create-config-kusama-microservice": "cp config/sample.json ./config/main.json",
- "create-config-kusama-microservice-dev": "cp config/sample.json ./config/main.json",
- "create-config-polkadot-microservice": "cp config/sample.json ./config/main.json",
- "create-config-polkadot-microservice-dev": "cp config/sample.json ./config/main.json",
"docker": "docker-compose rm -f; docker-compose build --no-cache; docker-compose up -d",
"lint": "../../node_modules/.bin/eslint 'src/**/*.{js,ts,tsx}' --quiet",
"lint:fix": "../../node_modules/.bin/eslint 'src/**/*.{js,ts,tsx}' --quiet --fix",
diff --git a/packages/gateway/src/constants.ts b/packages/gateway/src/constants.ts
new file mode 100644
index 000000000..d2d3fb711
--- /dev/null
+++ b/packages/gateway/src/constants.ts
@@ -0,0 +1 @@
+export const gatewayLabel = { label: "Gateway" };
diff --git a/packages/gateway/src/controllers/Candidate.ts b/packages/gateway/src/controllers/Candidate.ts
index 9e6fe75c8..962fb5686 100644
--- a/packages/gateway/src/controllers/Candidate.ts
+++ b/packages/gateway/src/controllers/Candidate.ts
@@ -1,7 +1,7 @@
import * as CandidateService from "../services/Candidate";
import { response } from "./index";
import { logger } from "@1kv/common";
-import { gatewayLabel } from "../run";
+import { gatewayLabel } from "../constants";
import { requestEmitter } from "../events/requestEmitter";
export default class CandidateController {
diff --git a/packages/gateway/src/controllers/EraPoints.ts b/packages/gateway/src/controllers/EraPoints.ts
index 3d57849ad..a775772b0 100644
--- a/packages/gateway/src/controllers/EraPoints.ts
+++ b/packages/gateway/src/controllers/EraPoints.ts
@@ -1,7 +1,7 @@
import { response } from "./index";
import * as EraPointsService from "../services/EraPoints";
import { logger } from "@1kv/common";
-import { gatewayLabel } from "../run";
+import { gatewayLabel } from "../constants";
import { requestEmitter } from "../events/requestEmitter";
export default class EraPointsController {
diff --git a/packages/gateway/src/controllers/Location.ts b/packages/gateway/src/controllers/Location.ts
index 4d7503bb2..ee00173be 100644
--- a/packages/gateway/src/controllers/Location.ts
+++ b/packages/gateway/src/controllers/Location.ts
@@ -1,7 +1,7 @@
import { response } from "./index";
import * as LocationService from "../services/LocationService";
import { logger } from "@1kv/common";
-import { gatewayLabel } from "../run";
+import { gatewayLabel } from "../constants";
import { requestEmitter } from "../events/requestEmitter";
export default class LocationController {
diff --git a/packages/gateway/src/controllers/Nomination.ts b/packages/gateway/src/controllers/Nomination.ts
index 341fbbb50..15b492b65 100644
--- a/packages/gateway/src/controllers/Nomination.ts
+++ b/packages/gateway/src/controllers/Nomination.ts
@@ -1,7 +1,7 @@
import { logger } from "@1kv/common";
import { response } from "./index";
import * as NominationService from "../services/Nomination";
-import { gatewayLabel } from "../run";
+import { gatewayLabel } from "../constants";
import { requestEmitter } from "../events/requestEmitter";
export default class NominationController {
diff --git a/packages/gateway/src/controllers/Nominator.ts b/packages/gateway/src/controllers/Nominator.ts
index 2ebab5ace..4b979d611 100644
--- a/packages/gateway/src/controllers/Nominator.ts
+++ b/packages/gateway/src/controllers/Nominator.ts
@@ -1,7 +1,7 @@
import { response } from "./index";
import * as NominatorService from "../services/Nominator";
import { logger } from "@1kv/common";
-import { gatewayLabel } from "../run";
+import { gatewayLabel } from "../constants";
import { requestEmitter } from "../events/requestEmitter";
export default class NominatorController {
diff --git a/packages/gateway/src/controllers/Score.ts b/packages/gateway/src/controllers/Score.ts
index 470d00eae..d7a924f62 100644
--- a/packages/gateway/src/controllers/Score.ts
+++ b/packages/gateway/src/controllers/Score.ts
@@ -1,7 +1,7 @@
import { response } from "./index";
import * as ScoreService from "../services/Score";
import { logger } from "@1kv/common";
-import { gatewayLabel } from "../run";
+import { gatewayLabel } from "../constants";
import { requestEmitter } from "../events/requestEmitter";
export default class ScoreController {
diff --git a/packages/gateway/src/controllers/Stats.ts b/packages/gateway/src/controllers/Stats.ts
index d97040fde..deb0a53a9 100644
--- a/packages/gateway/src/controllers/Stats.ts
+++ b/packages/gateway/src/controllers/Stats.ts
@@ -1,7 +1,7 @@
import { logger } from "@1kv/common";
import { response } from "./index";
import * as StatsService from "../services/Stats";
-import { gatewayLabel } from "../run";
+import { gatewayLabel } from "../constants";
export default class StatsController {
public static async getLocationStats(context: any): Promise {
diff --git a/packages/gateway/src/controllers/Validators.ts b/packages/gateway/src/controllers/Validators.ts
index 7323a09c9..15be90560 100644
--- a/packages/gateway/src/controllers/Validators.ts
+++ b/packages/gateway/src/controllers/Validators.ts
@@ -1,10 +1,8 @@
import { response } from "./index";
import * as ValidatorService from "../services/Validator";
import { logger } from "@1kv/common";
-import { gatewayLabel } from "../run";
import { requestEmitter } from "../events/requestEmitter";
-
-const label = { label: "Gateway" };
+import { gatewayLabel } from "../constants";
export default class ValidatorController {
public static async getLatestValidatorSet(context: any): Promise {
diff --git a/packages/gateway/src/routes/setupRoutes.ts b/packages/gateway/src/routes/setupRoutes.ts
index a9c10136f..9ed1cba3f 100644
--- a/packages/gateway/src/routes/setupRoutes.ts
+++ b/packages/gateway/src/routes/setupRoutes.ts
@@ -131,87 +131,6 @@ export const setupDocs = (app: Koa, config: Config.ConfigSchema): boolean => {
}
};
-// export const setupMicroserviceRoutes = async (
-// app: Koa,
-// config: Config.ConfigSchema,
-// queues: Queue[],
-// ): Promise => {
-// try {
-// if (config?.redis?.host && config?.redis?.port) {
-// await addQueues(config, queues);
-// setupBullBoard(app, queues);
-// }
-// return true;
-// } catch (e) {
-// logger.error(`Error setting up microservice routes: ${e}`, {
-// label: "Gateway",
-// });
-// return false;
-// }
-// };
-
-// Add BullMQ Queues for Microservice Jobs
-// export const addQueues = async (
-// config: Config.ConfigSchema,
-// queues: Queue[],
-// ): Promise => {
-// try {
-// const releaseMonitorQueue = new Queue("releaseMonitor", {
-// connection: {
-// host: config?.redis?.host,
-// port: config?.redis?.port,
-// },
-// });
-// const constraintsQueue = new Queue("constraints", {
-// connection: {
-// host: config?.redis?.host,
-// port: config?.redis?.port,
-// },
-// });
-// const chaindataQueue = new Queue("chaindata", {
-// connection: {
-// host: config?.redis?.host,
-// port: config?.redis?.port,
-// },
-// });
-// const blockQueue = new Queue("block", {
-// connection: {
-// host: config?.redis?.host,
-// port: config?.redis?.port,
-// },
-// });
-//
-// queues.push(
-// releaseMonitorQueue,
-// constraintsQueue,
-// chaindataQueue,
-// blockQueue,
-// );
-// return true;
-// } catch (e) {
-// logger.error(`Error adding queues: ${e}`, { label: "Gateway" });
-// return false;
-// }
-// };
-
-// export const setupBullBoard = (app: Koa, queues: Queue[]): boolean => {
-// try {
-// const serverAdapter = new KoaAdapter();
-// createBullBoard({
-// queues: queues.map((queue) => {
-// return new BullMQAdapter(queue);
-// }),
-// serverAdapter,
-// });
-// serverAdapter.setBasePath("/bull");
-// app.use(serverAdapter.registerPlugin());
-// return true;
-// } catch (e) {
-// logger.error(`Error setting up BullBoard: ${e}`, { label: "Gateway" });
-// return false;
-// }
-// };
-
export const setupCache = (app: Koa, configCache: number): boolean => {
try {
logger.info(`Cache set to ${configCache}`, { label: "Gateway" });
@@ -273,9 +192,6 @@ export const setupRoutes = async (
setupScorekeeperRoutes(router, app, scorekeeper);
setupDocs(app, config);
- // Setup microservice routes if Redis is configured in config
- // await setupMicroserviceRoutes(app, config, queues);
-
// Serve all other routes
app.use(router.routes());
}
diff --git a/packages/gateway/src/run.ts b/packages/gateway/src/run.ts
deleted file mode 100644
index cbc03aa3b..000000000
--- a/packages/gateway/src/run.ts
+++ /dev/null
@@ -1,41 +0,0 @@
-import Server from "./server";
-import { Command } from "commander";
-import { Config, Db, logger } from "@1kv/common";
-import path from "path";
-
-const version = "v2.6.87";
-
-const catchAndQuit = async (fn: any) => {
- try {
- await fn;
- } catch (e) {
- console.error(JSON.stringify(e));
- process.exit(1);
- }
-};
-
-export const gatewayLabel = { label: "Gateway" };
-
-const start = async (cmd: { config: string }) => {
- const config = Config.loadConfig(path.join(cmd.config, "main.json"));
-
- logger.info(`starting the backend services. ${version}`, gatewayLabel);
- const db = await Db.create(config.db.mongo.uri);
- const server = new Server(config);
- server.start();
-};
-
-const program = new Command();
-
-if (require.main === module) {
- program
- .option(
- "--config ",
- "The path to the config directory.",
- "config",
- )
- .action((cmd: { config: string }) => catchAndQuit(start(cmd)));
-
- program.version(version);
- program.parse(process.argv);
-}
diff --git a/packages/gateway/tsconfig.json b/packages/gateway/tsconfig.json
index 00fa5fc96..00a07d46c 100644
--- a/packages/gateway/tsconfig.json
+++ b/packages/gateway/tsconfig.json
@@ -3,7 +3,6 @@
"exclude": ["test", "node_modules"],
"include": ["src/**/*.ts", "src/**/*.yml"],
"files": [
- "src/run.ts",
"src/index.ts",
"src/server.ts",
],
diff --git a/packages/scorekeeper-status-ui/package.json b/packages/scorekeeper-status-ui/package.json
index 8355cfa08..3bba366a9 100644
--- a/packages/scorekeeper-status-ui/package.json
+++ b/packages/scorekeeper-status-ui/package.json
@@ -6,7 +6,6 @@
"scripts": {
"dev": "vite",
"build": "tsc && vite build",
- "build:prod": "vite build",
"preview": "vite preview",
"clean": "rm -rf dist"
},
diff --git a/packages/telemetry/Dockerfile b/packages/telemetry/Dockerfile
deleted file mode 100644
index 8fd6d5bed..000000000
--- a/packages/telemetry/Dockerfile
+++ /dev/null
@@ -1,7 +0,0 @@
-FROM node:17-slim
-ARG MATRIX_TOKEN
-WORKDIR /code
-COPY . .
-RUN ["npm", "i"]
-RUN ["npm", "run", "build"]
-CMD ["npm", "run", "js:start"]
diff --git a/packages/telemetry/Dockerfile-dev b/packages/telemetry/Dockerfile-dev
deleted file mode 100644
index 029641d54..000000000
--- a/packages/telemetry/Dockerfile-dev
+++ /dev/null
@@ -1,8 +0,0 @@
-FROM node:18 AS builder
-COPY . /app
-WORKDIR /app
-RUN yarn set version 3.2.2 && \
- yarn install && \
- yarn workspace @1kv/common build && \
- yarn workspace @1kv/telemetry build
-CMD yarn start:dev:telemetry
diff --git a/packages/telemetry/esbuild.mjs b/packages/telemetry/esbuild.mjs
deleted file mode 100644
index e610f3526..000000000
--- a/packages/telemetry/esbuild.mjs
+++ /dev/null
@@ -1,85 +0,0 @@
-import esbuild from "esbuild";
-
-const externalPackages = [
- "@polkadot/api-augment",
- "velocityjs",
- "dustjs-linkedin",
- "atpl",
- "liquor",
- "twig",
- "eco",
- "jazz",
- "jqtpl",
- "hamljs",
- "hamlet",
- "whiskers",
- "haml-coffee",
- "hogan.js",
- "templayed",
- "underscore",
- "walrus",
- "mustache",
- "just",
- "ect",
- "mote",
- "toffee",
- "dot",
- "bracket-template",
- "ractive",
- "htmling",
- "babel-core",
- "plates",
- "vash",
- "slm",
- "marko",
- "teacup/lib/express",
- "coffee-script",
- "squirrelly",
- "twing",
- "matris-js-sdk",
- "@1kv/telemetry",
- "@1kv/gateway",
-];
-
-const isProduction = process.argv.includes("--prod");
-
-const buildOptions = {
- entryPoints: ["src/index.ts"],
- bundle: true,
- minify: isProduction,
- platform: "node",
- target: "node18",
- external: externalPackages,
- outdir: "build",
- tsconfig: "tsconfig.json",
- splitting: true,
- format: "esm",
- chunkNames: "chunks/[name]-[hash]",
- sourcemap: !isProduction,
- logLevel: "error",
-};
-
-if (process.argv.includes("--watch")) {
- buildOptions.watch = {
- onRebuild(error, result) {
- if (error) console.error("watch build failed:", error);
- else
- console.log(
- "watch build succeeded at",
- new Date().toLocaleTimeString(),
- );
- },
- };
- console.log("watch mode enabled");
-}
-
-if (isProduction) {
- buildOptions.define = {
- "process.env.NODE_ENV": "'production'",
- };
-}
-
-esbuild.build(buildOptions).catch((error) => {
- console.error(error);
- process.exit(1);
-});
diff --git a/packages/telemetry/package.json b/packages/telemetry/package.json
index aa8dd4a59..2d6c199f6 100644
--- a/packages/telemetry/package.json
+++ b/packages/telemetry/package.json
@@ -6,13 +6,8 @@
"types": "build/index.d.ts",
"scripts": {
"build": "tsc --build tsconfig.json",
- "build:prod": "node esbuild.mjs --prod",
"clean": "../../node_modules/.bin/rimraf ./build tsconfig.tsbuildinfo combined.log",
"clean:build": "yarn clean && yarn build",
- "create-config-kusama-microservice": "cp config/kusama.sample.json ./config/main.json",
- "create-config-kusama-microservice-dev": "cp config/kusama.sample.json ./config/main.json",
- "create-config-polkadot-microservice": "cp config/polkadot.sample.json ./config/main.json",
- "create-config-polkadot-microservice-dev": "cp config/polkadot.sample.json ./config/main.json",
"docker": "docker-compose rm -f; docker-compose build --no-cache; docker-compose up -d",
"lint": "tsc --noEmit && eslint 'src/**/*.{js,ts,tsx}' --quiet",
"lint:fix": "tsc --noEmit && eslint 'src/**/*.{js,ts,tsx}' --quiet --fix",
diff --git a/packages/telemetry/src/run.ts b/packages/telemetry/src/run.ts
deleted file mode 100644
index a4f5def90..000000000
--- a/packages/telemetry/src/run.ts
+++ /dev/null
@@ -1,54 +0,0 @@
-import { Config, Db, logger } from "@1kv/common";
-import { Command } from "commander";
-import path from "path";
-import { Server } from "@1kv/gateway";
-import TelemetryClient from "./Telemetry/Telemetry";
-
-const version = "v2.6.87";
-
-export const telemetryLabel = { label: "Telemetry" };
-
-const catchAndQuit = async (fn: any) => {
- try {
- await fn;
- } catch (e) {
- console.error(e);
- process.exit(1);
- }
-};
-
-export const createServer = async (config) => {
- try {
- logger.info(`Creating Server`, telemetryLabel);
- const server = new Server(config);
- await server.start();
- } catch (e) {
- logger.error(JSON.stringify(e));
- process.exit(1);
- }
-};
-const start = async (cmd: { config: string }) => {
- const config = Config.loadConfig(path.join(cmd.config, "main.json"));
- await createServer(config);
-
- logger.info(`Starting the backend services: ${version}`, telemetryLabel);
-
- const db = await Db.create(config.db.mongo.uri);
- const telemetry = new TelemetryClient(config);
- await telemetry.start();
-};
-
-const program = new Command();
-
-if (require.main === module) {
- program
- .option(
- "--config ",
- "The path to the config directory.",
- "config",
- )
- .action((cmd: { config: string }) => catchAndQuit(start(cmd)));
-
- program.version(version);
- program.parse(process.argv);
-}
diff --git a/packages/telemetry/tsconfig.json b/packages/telemetry/tsconfig.json
index 028391696..f0798cf42 100644
--- a/packages/telemetry/tsconfig.json
+++ b/packages/telemetry/tsconfig.json
@@ -4,7 +4,6 @@
"include": ["src/**/*.ts"],
"files": [
- "src/run.ts",
"src/index.ts",
],
"compilerOptions": {
diff --git a/packages/worker/.dockerignore b/packages/worker/.dockerignore
deleted file mode 100644
index 3ced431fe..000000000
--- a/packages/worker/.dockerignore
+++ /dev/null
@@ -1,5 +0,0 @@
-*
-!package.json
-!tsconfig.json
-!src
-!config
diff --git a/packages/worker/.eslintrc.js b/packages/worker/.eslintrc.js
deleted file mode 100644
index fb9c98819..000000000
--- a/packages/worker/.eslintrc.js
+++ /dev/null
@@ -1,16 +0,0 @@
-module.exports = {
- parser: "@typescript-eslint/parser", // Specifies the ESLint parser
- extends: [
- "plugin:@typescript-eslint/recommended", // Uses the recommended rules from the @typescript-eslint/eslint-plugin
- "plugin:security/recommended",
- "prettier", // Uses eslint-config-prettier to disable ESLint rules from @typescript-eslint/eslint-plugin that would conflict with prettier
- "plugin:prettier/recommended" // Enables eslint-plugin-prettier and eslint-config-prettier. This will display prettier errors as ESLint errors. Make sure this is always the last configuration in the extends array.
- ],
- parserOptions: {
- ecmaVersion: 2018, // Allows for the parsing of modern ECMAScript features
- sourceType: "module" // Allows for the use of imports
- },
- rules: {
- "@typescript-eslint/ban-ts-comment": "off"
- }
-};
diff --git a/packages/worker/Dockerfile b/packages/worker/Dockerfile
deleted file mode 100644
index 8fd6d5bed..000000000
--- a/packages/worker/Dockerfile
+++ /dev/null
@@ -1,7 +0,0 @@
-FROM node:17-slim
-ARG MATRIX_TOKEN
-WORKDIR /code
-COPY . .
-RUN ["npm", "i"]
-RUN ["npm", "run", "build"]
-CMD ["npm", "run", "js:start"]
diff --git a/packages/worker/Dockerfile-dev b/packages/worker/Dockerfile-dev
deleted file mode 100644
index 33b50a4a7..000000000
--- a/packages/worker/Dockerfile-dev
+++ /dev/null
@@ -1,8 +0,0 @@
-FROM node:18 AS builder
-COPY . /app
-WORKDIR /app
-RUN yarn set version 3.2.2 && \
- yarn install && \
- yarn workspace @1kv/common build && \
- yarn workspace @1kv/gateway build
-CMD yarn start:dev:gateway
diff --git a/packages/worker/config/kusama.sample.json b/packages/worker/config/kusama.sample.json
deleted file mode 100644
index f48623a57..000000000
--- a/packages/worker/config/kusama.sample.json
+++ /dev/null
@@ -1,61 +0,0 @@
-{
- "global": {
- "apiEndpoints": [
- "wss://kusama-rpc.polkadot.io",
- "wss://kusama-rpc.dwellir.com",
- "wss://kusama.public.curie.radiumblock.xyz/ws"
- ]
- },
- "db": {
- "mongo": {
- "uri": "mongodb://mongo:27017"
- }
- },
- "server": {
- "onlyHealth": true,
- "port": 3303
- },
- "score": {
- "inclusion": "200",
- "spanInclusion": "200",
- "discovered": "5",
- "nominated": "30",
- "rank": "5",
- "bonded": "50",
- "faults": "5",
- "offline": "2",
- "location": "40",
- "region": "10",
- "country": "10",
- "provider": "100",
- "nominations": "100",
- "delegations": "60",
- "openGov": "100",
- "openGovDelegation": "100",
- "rpc": "100",
- "client": "100",
- "useInclusion": true,
- "useSpanInclusion": true,
- "useDiscovered": true,
- "useNominated": true,
- "useRank": true,
- "useBonded": true,
- "useFaults": true,
- "useOffline": true,
- "useLocation": true,
- "useRegion": true,
- "useCountry": true,
- "useProvider": true,
- "useNominations": true,
- "useDelegations": true,
- "useOpenGov": true,
- "useOpenGovDelegation": true,
- "useRpc": true,
- "useClient": true
- },
- "redis": {
- "enable": true,
- "host": "redis",
- "port": 6379
- }
-}
diff --git a/packages/worker/config/polkadot.sample.json b/packages/worker/config/polkadot.sample.json
deleted file mode 100644
index 862534925..000000000
--- a/packages/worker/config/polkadot.sample.json
+++ /dev/null
@@ -1,59 +0,0 @@
-{
- "global": {
- "apiEndpoints": [
- "wss://kusama-rpc.polkadot.io"
- ]
- },
- "db": {
- "mongo": {
- "uri": "mongodb://mongo:27017"
- },
- },
- "server": {
- "onlyHealth": true,
- "port": 3303
- },
- "score": {
- "inclusion": "200",
- "spanInclusion": "200",
- "discovered": "5",
- "nominated": "30",
- "rank": "5",
- "bonded": "50",
- "faults": "5",
- "offline": "2",
- "location": "40",
- "region": "10",
- "country": "10",
- "provider": "100",
- "nominations": "100",
- "delegations": "60",
- "openGov": "100",
- "openGovDelegation": "100",
- "rpc": "100",
- "client": "100",
- "useInclusion": true,
- "useSpanInclusion": true,
- "useDiscovered": true,
- "useNominated": true,
- "useRank": true,
- "useBonded": true,
- "useFaults": true,
- "useOffline": true,
- "useLocation": true,
- "useRegion": true,
- "useCountry": true,
- "useProvider": true,
- "useNominations": true,
- "useDelegations": true,
- "useOpenGov": true,
- "useOpenGovDelegation": true,
- "useRpc": true,
- "useClient": true
- },
- "redis": {
- "enable": true,
- "host": "redis",
- "port": 6379
- }
-}
diff --git a/packages/worker/esbuild.js b/packages/worker/esbuild.js
deleted file mode 100644
index 1b57f9983..000000000
--- a/packages/worker/esbuild.js
+++ /dev/null
@@ -1,85 +0,0 @@
-const esbuild = require("esbuild");
-
-const externalPackages = [
- "@polkadot/api-augment",
- "velocityjs",
- "dustjs-linkedin",
- "atpl",
- "liquor",
- "twig",
- "eco",
- "jazz",
- "jqtpl",
- "hamljs",
- "hamlet",
- "whiskers",
- "haml-coffee",
- "hogan.js",
- "templayed",
- "underscore",
- "walrus",
- "mustache",
- "just",
- "ect",
- "mote",
- "toffee",
- "dot",
- "bracket-template",
- "ractive",
- "htmling",
- "babel-core",
- "plates",
- "vash",
- "slm",
- "marko",
- "teacup/lib/express",
- "coffee-script",
- "squirrelly",
- "twing",
- "matris-js-sdk",
- "@1kv/telemetry",
- "@1kv/gateway",
-];
-
-const isProduction = process.argv.includes("--prod");
-
-const buildOptions = {
- entryPoints: ["src/index.ts"],
- bundle: true,
- minify: isProduction,
- platform: "node",
- target: "node18",
- external: externalPackages,
- outdir: "build",
- tsconfig: "tsconfig.json",
- splitting: true,
- format: "esm",
- chunkNames: "chunks/[name]-[hash]",
- sourcemap: !isProduction,
- logLevel: "info",
-};
-
-if (process.argv.includes("--watch")) {
- buildOptions.watch = {
- onRebuild(error, result) {
- if (error) console.error("watch build failed:", error);
- else
- console.log(
- "watch build succeeded at",
- new Date().toLocaleTimeString(),
- );
- },
- };
- console.log("watch mode enabled");
-}
-
-if (isProduction) {
- buildOptions.define = {
- "process.env.NODE_ENV": "'production'",
- };
-}
-
-esbuild.build(buildOptions).catch((error) => {
- console.error(error);
- process.exit(1);
-});
diff --git a/packages/worker/package.json b/packages/worker/package.json
deleted file mode 100644
index f4f9c808a..000000000
--- a/packages/worker/package.json
+++ /dev/null
@@ -1,31 +0,0 @@
-{
- "name": "@1kv/worker",
- "version": "3.3.0",
- "description": "Services for running the Thousand Validator Program.",
- "main": "build/index.js",
- "types": "build/index.d.ts",
- "scripts": {
- "build": "tsc --build tsconfig.json",
- "build:prod": "node esbuild.js --prod",
- "clean": "../../node_modules/.bin/rimraf ./build tsconfig.tsbuildinfo combined.log",
- "clean:build": "yarn clean && yarn build",
- "create-config-kusama-microservice": "cp config/kusama.sample.json ./config/main.json",
- "create-config-kusama-microservice-dev": "cp config/kusama.sample.json ./config/main.json",
- "create-config-polkadot-microservice": "cp config/polkadot.sample.json ./config/main.json",
- "create-config-polkadot-microservice-dev": "cp config/polkadot.sample.json ./config/main.json",
- "docker": "docker-compose rm -f; docker-compose build --no-cache; docker-compose up -d",
- "lint": "tsc --noEmit && eslint 'src/**/*.{js,ts,tsx}' --quiet",
- "lint:fix": "tsc --noEmit && eslint 'src/**/*.{js,ts,tsx}' --quiet --fix",
- "start": "NODE_OPTIONS='--max-old-space-size=14096' ts-node src/run.ts start",
- "start:watch": "NODE_OPTIONS='--max-old-space-size=16096' nodemon --watch build --exec node build/run.js",
- "start:dev": "concurrently \"tsc -w\" \"yarn start:watch\"",
- "js:start": "NODE_OPTIONS='--max-old-space-size=6096' node build/run.js start"
- },
- "author": "Web3 Foundation ",
- "license": "GPL-3.0",
- "config": {
- "mongodbMemoryServer": {
- "debug": "on"
- }
- }
-}
diff --git a/packages/worker/prettierrc.js b/packages/worker/prettierrc.js
deleted file mode 100644
index aed7d4a6d..000000000
--- a/packages/worker/prettierrc.js
+++ /dev/null
@@ -1,7 +0,0 @@
-module.exports = {
- semi: true,
- trailingComma: "all",
- singleQuote: true,
- printWidth: 120,
- tabWidth: 2,
-};
diff --git a/packages/worker/src/index.ts b/packages/worker/src/index.ts
deleted file mode 100644
index 488dd3f6f..000000000
--- a/packages/worker/src/index.ts
+++ /dev/null
@@ -1,5 +0,0 @@
-import * as worker from "./worker";
-import * as queues from "./queues";
-import * as workers from "./workers";
-
-export const otvWorker = { worker, queues, workers };
diff --git a/packages/worker/src/queues/BlockQueue.ts b/packages/worker/src/queues/BlockQueue.ts
deleted file mode 100644
index 37030e49c..000000000
--- a/packages/worker/src/queues/BlockQueue.ts
+++ /dev/null
@@ -1,74 +0,0 @@
-import { Queue } from "bullmq";
-import { ChainData, logger, queries } from "@1kv/common";
-
-const label = { label: "BlockQueue" };
-
-export const createBlockQueue = async (host, port) => {
- const queue = new Queue("block", {
- connection: {
- host: host,
- port: port,
- },
- });
- return queue;
-};
-
-// Add new blocks to the queue
-export const addAllBlocks = async (queue: Queue, chaindata: ChainData) => {
- // Get the latest block from the chain
- const latestBlock = await chaindata.getLatestBlock();
-
- // get the indices of the earliest and latest blocks in the database
- const blockIndex = await queries.getBlockIndex();
- const latest = blockIndex?.latest || latestBlock;
- const earliest = blockIndex?.earliest || latestBlock;
-
- // If there is no block index in the database, add the latest block to the queue and set the block index to the latest block
- if (!blockIndex) {
- await addBlockJob(queue, latestBlock);
- await queries.setBlockIndex(latestBlock, latestBlock);
- }
-
- const threshold = 500000;
-
- // If the current chain block is higher than what is in the db, add all the blocks from the current one until the most recent indexed one to the queue
- if (blockIndex?.latest && latestBlock > blockIndex.latest) {
- logger.info(
- `latest block: ${latestBlock} db block: ${blockIndex.latest}, adding ${
- latestBlock - blockIndex.latest
- } blocks to queue`,
- label,
- );
- for (let i = blockIndex.latest + 1; i < latestBlock; i++) {
- await addBlockJob(queue, i);
- }
- }
-
- if (earliest) {
- const targetEarliest = earliest - threshold > 0 ? earliest - threshold : 0;
- logger.info(
- `earliest ${earliest} target earliest: ${targetEarliest}, adding ${
- earliest - targetEarliest
- } to the queue`,
- label,
- );
- for (let i = earliest; i > targetEarliest; i--) {
- await addBlockJob(queue, i);
- }
- }
-};
-
-export const addBlockJob = async (queue: Queue, blockNumber: number) => {
- await queue.add(
- "block",
- { blockNumber: blockNumber },
- {
- jobId: `block${blockNumber}`,
- attempts: 10,
- backoff: {
- type: "exponential",
- delay: 1000,
- },
- },
- );
-};
diff --git a/packages/worker/src/queues/ChainDataQueue.ts b/packages/worker/src/queues/ChainDataQueue.ts
deleted file mode 100644
index 35162227d..000000000
--- a/packages/worker/src/queues/ChainDataQueue.ts
+++ /dev/null
@@ -1,169 +0,0 @@
-import { Queue } from "bullmq";
-import { Jobs, logger, queries } from "@1kv/common";
-
-const label = { label: "ChainDataQueue" };
-
-export const createChainDataQueue = async (host, port) => {
- const queue = await new Queue("chaindata", {
- connection: {
- host: host,
- port: port,
- },
- });
- return queue;
-};
-
-export const addActiveValidatorJob = async (queue: Queue, repeat: number) => {
- logger.info(`adding Active Validator Job to Queue.....`, label);
- await queue.add(
- "chaindata",
- { jobType: Jobs.ACTIVE_VALIDATOR_JOB },
- {
- repeat: {
- every: repeat,
- // limit: 1000,
- },
- attempts: 10,
- backoff: {
- type: "exponential",
- delay: 1000,
- },
- },
- );
-};
-
-export const addEraPointsJob = async (queue: Queue, repeat: number) => {
- logger.info(`adding Era Points Job to Queue.....`);
- await queue.add(
- "chaindata",
- { jobType: Jobs.ERA_POINTS_JOB },
- {
- repeat: {
- every: repeat,
- // limit: 1000,
- },
- attempts: 10,
- backoff: {
- type: "exponential",
- delay: 1000,
- },
- },
- );
-};
-
-export const addEraStatsJob = async (queue: Queue, repeat: number) => {
- logger.info(`adding Era Stats Job to Queue.....`);
- await queue.add(
- "chaindata",
- { jobType: Jobs.ERA_STATS_JOB },
- {
- repeat: {
- every: repeat,
- // limit: 1000,
- },
- attempts: 10,
- backoff: {
- type: "exponential",
- delay: 1000,
- },
- },
- );
-};
-
-export const addInclusionJob = async (queue: Queue, repeat: number) => {
- logger.info(`adding Inclusion Job to Queue.....`);
- await queue.add(
- "chaindata",
- { jobType: Jobs.INCLUSION_JOB },
- {
- repeat: {
- every: repeat,
- // limit: 1000,
- },
- attempts: 10,
- backoff: {
- type: "exponential",
- delay: 1000,
- },
- },
- );
-};
-
-export const addNominatorJob = async (queue: Queue, repeat: number) => {
- logger.info(`adding Nominator Job to Queue.....`);
- await queue.add(
- "chaindata",
- { jobType: Jobs.NOMINATOR_JOB },
- {
- repeat: {
- every: repeat,
- // limit: 1000,
- },
- attempts: 10,
- backoff: {
- type: "exponential",
- delay: 1000,
- },
- },
- );
-};
-
-export const addSessionKeyJob = async (queue: Queue, repeat: number) => {
- logger.info(`adding Session Key Job to Queue.....`);
- await queue.add(
- "chaindata",
- { jobType: Jobs.SESSION_KEY_JOB },
- {
- repeat: {
- every: repeat,
- // limit: 1000,
- },
- attempts: 10,
- backoff: {
- type: "exponential",
- delay: 1000,
- },
- },
- );
-};
-
-// Adds a single job to the queue that processes all validators
-export const addValidatorPrefJobAll = async (queue: Queue, repeat: number) => {
- await queue.add(
- "chaindata",
- { jobType: Jobs.VALIDATOR_PREF_JOB },
- {
- repeat: {
- every: repeat,
- // limit: 1000,
- },
- attempts: 10,
- backoff: {
- type: "exponential",
- delay: 1000,
- },
- },
- );
-};
-
-export const addValidatorPrefJob = async (queue: Queue, repeat: number) => {
- logger.info(`adding Validator Pref Job to Queue.....`);
- const candidates = await queries.allCandidates();
- for (const [index, candidate] of candidates.entries()) {
- await queue.add(
- "chaindata",
- { jobType: Jobs.VALIDATOR_PREF_JOB, candidateAddress: candidate.stash },
- {
- repeat: {
- every: repeat + index,
- // limit: 1000,
- },
- attempts: 10,
- backoff: {
- type: "exponential",
- delay: 1000,
- },
- },
- );
- }
-};
diff --git a/packages/worker/src/queues/ContraintsQueue.ts b/packages/worker/src/queues/ContraintsQueue.ts
deleted file mode 100644
index 1b9488f94..000000000
--- a/packages/worker/src/queues/ContraintsQueue.ts
+++ /dev/null
@@ -1,61 +0,0 @@
-import { Queue } from "bullmq";
-import { logger } from "@1kv/common";
-
-const constraintsLabel = { label: "ConstraintsQueue" };
-
-export const createConstraintsQueue = async (host, port) => {
- const queue = await new Queue("constraints", {
- connection: {
- host: host,
- port: port,
- },
- });
- return queue;
-};
-
-export const addValidityJob = async (queue: Queue, repeat: number) => {
- logger.info(`adding Validity Job to Queue.....`, constraintsLabel);
- // const candidates = await queries.allCandidates();
- // for (const [index, candidate] of candidates.entries()) {
- await queue.add(
- "constraints",
- {
- jobType: "validityJob",
- },
- {
- repeat: {
- every: repeat,
- // limit: 1000,
- },
- attempts: 10,
- backoff: {
- type: "exponential",
- delay: 1000,
- },
- },
- );
- // }
-};
-
-export const addScoreJob = async (queue: Queue, repeat: number) => {
- // const candidates = await queries.allCandidates();
- // logger.info(`adding ${candidates.length} to be scored...`);
- // for (const [index, candidate] of candidates.entries()) {
- await queue.add(
- "constraints",
- { jobType: "scoreJob" },
- {
- repeat: {
- every: repeat,
- // limit: 100,
- },
- attempts: 10,
- backoff: {
- type: "exponential",
- delay: 1000,
- },
- },
- );
- // }
- logger.info(`adding Score Job to Queue.....`, constraintsLabel);
-};
diff --git a/packages/worker/src/queues/ReleaseMonitorQueue.ts b/packages/worker/src/queues/ReleaseMonitorQueue.ts
deleted file mode 100644
index c6f775c72..000000000
--- a/packages/worker/src/queues/ReleaseMonitorQueue.ts
+++ /dev/null
@@ -1,29 +0,0 @@
-import { Queue } from "bullmq";
-
-export const createReleaseMonitorQueue = async (host, port) => {
- const queue = await new Queue("releaseMonitor", {
- connection: {
- host: host,
- port: port,
- },
- });
- return queue;
-};
-
-export const addReleaseMonitorJob = async (queue: Queue, repeat: number) => {
- await queue.add(
- "releaseMonitor",
- {},
- {
- repeat: {
- every: repeat,
- limit: 100,
- },
- attempts: 10,
- backoff: {
- type: "exponential",
- delay: 1000,
- },
- },
- );
-};
diff --git a/packages/worker/src/queues/index.ts b/packages/worker/src/queues/index.ts
deleted file mode 100644
index 4c437215c..000000000
--- a/packages/worker/src/queues/index.ts
+++ /dev/null
@@ -1,59 +0,0 @@
-import { logger } from "@1kv/common";
-
-export * from "./ReleaseMonitorQueue";
-export * from "./ContraintsQueue";
-export * from "./ChainDataQueue";
-export * from "./BlockQueue";
-import { Queue } from "bullmq";
-
-export const removeRepeatableJobsFromQueues = async (queues: Queue[]) => {
- logger.info("Removing repeatable jobs", { label: "Queue" });
- for (const queue of queues) {
- try {
- await removeRepeatableJobs(queue);
- } catch (e) {
- logger.error(JSON.stringify(e));
- }
- }
-};
-
-export const removeRepeatableJobs = async (queue: Queue) => {
- const repeatableJobs = await queue.getRepeatableJobs();
- repeatableJobs.forEach((job) => {
- try {
- queue.removeRepeatableByKey(job.key);
- } catch (e) {
- logger.error(JSON.stringify(e));
- }
- });
-};
-
-export const drainQueues = async (queues: Queue[]) => {
- logger.info("Draining queues", { label: "Queue" });
- for (const queue of queues) {
- try {
- await drainQueue(queue);
- } catch (e) {
- logger.error(JSON.stringify(e));
- }
- }
-};
-
-export const drainQueue = async (queue: Queue) => {
- await queue.drain();
-};
-
-export const obliterateQueues = async (queues: Queue[]) => {
- logger.info("Obliterating queues", { label: "Queue" });
- for (const queue of queues) {
- try {
- await obliterateQueue(queue);
- } catch (e) {
- logger.error(JSON.stringify(e));
- }
- }
-};
-
-export const obliterateQueue = async (queue: Queue) => {
- await queue.obliterate();
-};
diff --git a/packages/worker/src/run.ts b/packages/worker/src/run.ts
deleted file mode 100644
index 5e4576da5..000000000
--- a/packages/worker/src/run.ts
+++ /dev/null
@@ -1,62 +0,0 @@
-import Worker from "./worker";
-import { Command } from "commander";
-import { Config, Db, logger } from "@1kv/common";
-import path from "path";
-import { Server } from "@1kv/gateway";
-
-const version = process.env.npm_package_version;
-
-const catchAndQuit = async (fn: any) => {
- try {
- await fn;
- } catch (e) {
- logger.info("There was an error!");
- logger.error(JSON.stringify(e));
- process.exit(1);
- }
-};
-export const createServer = async (config) => {
- try {
- logger.info(`Creating Server`, { label: "Worker" });
- const server = new Server(config);
- await server.start();
- } catch (e) {
- logger.error(JSON.stringify(e));
- process.exit(1);
- }
-};
-const start = async (cmd: { config: string }) => {
- const config = Config.loadConfig(path.join(cmd.config, "main.json"));
- await createServer(config);
-
- logger.info(`Starting the backend services. ${version}`, {
- label: "Worker",
- });
- const db = await Db.create(config.db.mongo.uri);
- const worker = new Worker(config);
- await worker.startWorker();
-};
-
-const program = new Command();
-
-if (require.main === module) {
- program
- .option(
- "--config ",
- "The path to the config directory.",
- "config",
- )
- .action((cmd: { config: string }) => catchAndQuit(start(cmd)));
-
- program.version(version);
- program.parse(process.argv);
-}
-
-process
- .on("unhandledRejection", (reason, p) => {
- console.error(reason, "Unhandled Rejection at Promise", p);
- })
- .on("uncaughtException", (err) => {
- console.error(err, "Uncaught Exception thrown");
- process.exit(1);
- });
diff --git a/packages/worker/src/worker.ts b/packages/worker/src/worker.ts
deleted file mode 100644
index 4f7b56b1f..000000000
--- a/packages/worker/src/worker.ts
+++ /dev/null
@@ -1,65 +0,0 @@
-import { ApiHandler, Config, Constraints, logger } from "@1kv/common";
-
-export const workerLabel = { label: "Worker" };
-
-class Worker {
- private api: ApiHandler;
- private apiEndpoints: string[];
- private config: Config.ConfigSchema;
- private host: string;
- private port: number;
- private constraints: Constraints.OTV;
-
- constructor(config: Config.ConfigSchema) {
- this.config = config;
- this.apiEndpoints = this.config.global.apiEndpoints;
- this.host = this.config.redis.host;
- this.port = this.config.redis.port;
- logger.info(`Redis host: ${this.host} port: ${this.port}`, workerLabel);
- }
-
- async initializeAPI(): Promise {
- const endpoints = this.apiEndpoints.sort(() => Math.random() - 0.5);
- logger.info(`ApiHandler connecting to ${endpoints[0]}`, workerLabel);
- this.api = new ApiHandler(endpoints);
- await this.api.getApi();
- }
-
- async initializeConstraints(): Promise {
- //this.constraints = new Constraints.OTV(this.api, this.config);
- }
-
- async startWorker(): Promise {
- logger.info(`starting worker....`, workerLabel);
- // await this.initializeAPI();
- // await this.initializeConstraints();
- // logger.info(`Redis host: ${this.host} port: ${this.port}`, workerLabel);
- // const releaseMonitorWorker = await createReleaseMonitorWorker(
- // this.host,
- // this.port,
- // );
- // logger.info(
- // `Created release monitor worker: ${releaseMonitorWorker.id}`,
- // workerLabel,
- // );
- // const constraintsWorker = await createConstraintsWorker(
- // this.host,
- // this.port,
- // this.constraints,
- // );
- // logger.info(
- // `Created constraints worker: ${constraintsWorker.id}`,
- // workerLabel,
- // );
- // const chaindataWorker = await createChainDataWorker(
- // this.host,
- // this.port,
- // this.api,
- // );
- // logger.info(`Created chaindata worker: ${chaindataWorker.id}`, workerLabel);
- // const blockWorker = await createBlockWorker(this.host, this.port, this.api);
- // logger.info(`Created block worker: ${blockWorker.id}`, workerLabel);
- }
-}
-
-export default Worker;
diff --git a/packages/worker/src/workers/BlockWorker.ts b/packages/worker/src/workers/BlockWorker.ts
deleted file mode 100644
index bd61d3fa1..000000000
--- a/packages/worker/src/workers/BlockWorker.ts
+++ /dev/null
@@ -1,29 +0,0 @@
-import { ApiHandler, logger } from "@1kv/common";
-
-export const createBlockWorker = async (host, port, api: ApiHandler) => {
- logger.info(`Creating Block Worker...`, { label: "BlockWorker" });
- // const chaindata = new ChainData(api);
- // const worker = await new Worker(
- // "block",
- // (job) => () => {
- // console.log();
- // }, //Jobs.processBlockDataJob(job, chaindata),
- // {
- // connection: {
- // host: host,
- // port: port,
- // },
- // concurrency: 4,
- // lockDuration: 300000,
- // },
- // );
- // worker.on("completed", (job, result) => {
- // // job has completed
- // const blockNumber = job.data.blockNumber;
- // const executionTime = result;
- // // logger.info(`Indexed block #${blockNumber} (${executionTime}s)`, {
- // // label: "BlockWorker",
- // // });
- // });
- // return worker;
-};
diff --git a/packages/worker/src/workers/ChainDataWorker.ts b/packages/worker/src/workers/ChainDataWorker.ts
deleted file mode 100644
index 77577a737..000000000
--- a/packages/worker/src/workers/ChainDataWorker.ts
+++ /dev/null
@@ -1,19 +0,0 @@
-import { ApiHandler, ChainData, logger } from "@1kv/common";
-
-export const createChainDataWorker = async (host, port, api: ApiHandler) => {
- logger.info(`Creating constraints worker...`);
- //const chaindata = new ChainData(api);
- // const worker = await new Worker(
- // "chaindata",
- // (job) => console.log(), //Jobs.processChainDataJob(job, chaindata),
- // {
- // connection: {
- // host: host,
- // port: port,
- // },
- // concurrency: 6,
- // lockDuration: 3000000,
- // },
- // );
- // return worker;
-};
diff --git a/packages/worker/src/workers/ConstraintsWorker.ts b/packages/worker/src/workers/ConstraintsWorker.ts
deleted file mode 100644
index d9da81dcf..000000000
--- a/packages/worker/src/workers/ConstraintsWorker.ts
+++ /dev/null
@@ -1,22 +0,0 @@
-import { Constraints, logger } from "@1kv/common";
-
-export const createConstraintsWorker = async (
- host,
- port,
- constraints: Constraints.OTV,
-) => {
- logger.info(`Creating constraints worker...`);
- // const worker = await new Worker(
- // "constraints",
- // (job) => console.log(), //Jobs.processConstraintsJob(job, constraints),
- // {
- // connection: {
- // host: host,
- // port: port,
- // },
- // concurrency: 10,
- // lockDuration: 3000000,
- // },
- // );
- // return worker;
-};
diff --git a/packages/worker/src/workers/ReleaseMonitorWorker.ts b/packages/worker/src/workers/ReleaseMonitorWorker.ts
deleted file mode 100644
index eae5e3bf2..000000000
--- a/packages/worker/src/workers/ReleaseMonitorWorker.ts
+++ /dev/null
@@ -1,18 +0,0 @@
-import { logger } from "@1kv/common";
-
-export const createReleaseMonitorWorker = async (host, port) => {
- logger.info(`Creating releaseMonitor worker...`);
- // const worker = await new Worker(
- // "releaseMonitor",
- // (job) => console.log(), //Jobs.processReleaseMonitorJob(job),
- // {
- // connection: {
- // host: host,
- // port: port,
- // },
- // concurrency: 4,
- // lockDuration: 300000,
- // },
- // );
- // return worker;
-};
diff --git a/packages/worker/src/workers/index.ts b/packages/worker/src/workers/index.ts
deleted file mode 100644
index 2639012f2..000000000
--- a/packages/worker/src/workers/index.ts
+++ /dev/null
@@ -1,3 +0,0 @@
-export * from "./ReleaseMonitorWorker";
-export * from "./ConstraintsWorker";
-export * from "./ChainDataWorker";
diff --git a/packages/worker/tsconfig.json b/packages/worker/tsconfig.json
deleted file mode 100644
index 972df6aee..000000000
--- a/packages/worker/tsconfig.json
+++ /dev/null
@@ -1,78 +0,0 @@
-{
- "extends": "../../tsconfig.json",
- "exclude": ["test", "node_modules"],
- "include": ["src/**/*.ts"],
-
- "files": [
- "src/run.ts",
- "src/index.ts",
- "src/worker.ts",
- ],
- "compilerOptions": {
- "paths": {
- "@1kv/common": ["../common/build"],
- "@1kv/gateway": ["../gateway/build"]
- },
- /* Basic Options */
- "target": "es2015", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017','ES2018' or 'ESNEXT'. */
- "module": "commonjs", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', or 'ESNext'. */
- "lib": ["ES2015", "dom"],
- // "allowJs": true, /* Allow javascript files to be compiled. */
- // "checkJs": true, /* Report errors in .js files. */
- // "jsx": "preserve", /* Specify JSX code generation: 'preserve', 'react-native', or 'react'. */
- // "declaration": true, /* Generates corresponding '.d.ts' file. */
- // "declarationMap": true, /* Generates a sourcemap for each corresponding '.d.ts' file. */
- // "sourceMap": true, /* Generates corresponding '.map' file. */
- // "outFile": "./", /* Concatenate and emit output to single file. */
- "outDir": "build",
- "skipLibCheck": true, /* Redirect output structure to the directory. */
- "rootDir": "src", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */
- "composite": true, /* Enable project compilation */
- // "removeComments": true, /* Do not emit comments to output. */
- // "noEmit": true, /* Do not emit outputs. */
- // "importHelpers": true, /* Import emit helpers from 'tslib'. */
- // "downlevelIteration": true, /* Provide full support for iterables in 'for-of', spread, and destructuring when targeting 'ES5' or 'ES3'. */
- // "isolatedModules": true, /* Transpile each file as a separate module (similar to 'ts.transpileModule'). */
-
- /* Strict Type-Checking Options */
- "strict": false, /* Enable all strict type-checking options. */
- // "noImplicitAny": true, /* Raise error on expressions and declarations with an implied 'any' type. */
- // "strictNullChecks": true, /* Enable strict null checks. */
- // "strictFunctionTypes": true, /* Enable strict checking of function types. */
- // "strictPropertyInitialization": true, /* Enable strict checking of property initialization in classes. */
- // "noImplicitThis": true, /* Raise error on 'this' expressions with an implied 'any' type. */
- // "alwaysStrict": true, /* Parse in strict mode and emit "use strict" for each source file. */
-
- /* Additional Checks */
- // "noUnusedLocals": true, /* Report errors on unused locals. */
- // "noUnusedParameters": true, /* Report errors on unused parameters. */
- // "noImplicitReturns": true, /* Report error when not all code paths in function return a value. */
- // "noFallthroughCasesInSwitch": true, /* Report errors for fallthrough cases in switch statement. */
-
- /* Module Resolution Options */
- "moduleResolution": "node", /* Specify module resolution strategy: 'node' (Node.js) or 'classic' (TypeScript pre-1.6). */
- // "baseUrl": "./", /* Base directory to resolve non-absolute module names. */
- // "paths": {}, /* A series of entries which re-map imports to lookup locations relative to the 'baseUrl'. */
- // "rootDirs": [], /* List of root folders whose combined content represents the structure of the project at runtime. */
- // "typeRoots": [], /* List of folders to include type definitions from. */
- // "types": [], /* Type declaration files to be included in compilation. */
- // "allowSyntheticDefaultImports": true, /* Allow default imports from modules with no default export. This does not affect code emit, just typechecking. */
- "esModuleInterop": true, /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */
- // "preserveSymlinks": true, /* Do not resolve the real path of symlinks. */
-
- /* Source Map Options */
- // "sourceRoot": "./", /* Specify the location where debugger should locate TypeScript files instead of source locations. */
- // "mapRoot": "./", /* Specify the location where debugger should locate map files instead of generated locations. */
- // "inlineSourceMap": true, /* Emit a single file with source maps instead of having a separate file. */
- // "inlineSources": true, /* Emit the source alongside the sourcemaps within a single file; requires '--inlineSourceMap' or '--sourceMap' to be set. */
-
- /* Experimental Options */
- // "experimentalDecorators": true, /* Enables experimental support for ES7 decorators. */
- // "emitDecoratorMetadata": true, /* Enables experimental support for emitting type metadata for decorators. */
- "resolveJsonModule": true
- },
- "references": [
- { "path": "../common" },
- { "path": "../gateway" }
- ]
-}
diff --git a/yarn.lock b/yarn.lock
index 26f2b0ccd..1cfb35ade 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -12,7 +12,6 @@ __metadata:
"@1kv/common": "workspace:*"
"@1kv/gateway": "workspace:*"
"@1kv/telemetry": "workspace:*"
- "@1kv/worker": "workspace:*"
"@ava/typescript": ^4.1.0
"@babel/preset-react": ^7.23.3
"@bull-board/api": ^5.15.1
@@ -149,12 +148,6 @@ __metadata:
languageName: unknown
linkType: soft
-"@1kv/worker@workspace:*, @1kv/worker@workspace:packages/worker":
- version: 0.0.0-use.local
- resolution: "@1kv/worker@workspace:packages/worker"
- languageName: unknown
- linkType: soft
-
"@aashutoshrathi/word-wrap@npm:^1.2.3":
version: 1.2.6
resolution: "@aashutoshrathi/word-wrap@npm:1.2.6"