Skip to content

Commit

Permalink
feat: add tests for cluster
Browse files Browse the repository at this point in the history
  • Loading branch information
luin committed Mar 14, 2022
1 parent 06782e6 commit 1eba58b
Show file tree
Hide file tree
Showing 6 changed files with 163 additions and 13 deletions.
20 changes: 8 additions & 12 deletions .github/workflows/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,26 +17,22 @@ on:

# A workflow run is made up of one or more jobs that can run sequentially or in parallel
jobs:
# This workflow contains a single job called "build"
build:
# The type of runner that the job will run on
test:
runs-on: ubuntu-latest

# See https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#using-environment-variables-in-a-matrix
strategy:
fail-fast: false
matrix:
include:
# Need a new enough git version to install the npm husky module.
- NODE_VERSION: 12-bullseye
- NODE_VERSION: 14-bullseye
- NODE_VERSION: 16-bullseye

# Steps represent a sequence of tasks that will be executed as part of the job
steps:
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
- uses: actions/checkout@v2

# Runs a single command using the runners shell
- name: Build and test in docker
- name: Build and test
run: bash test/docker/main.sh ${{ matrix.NODE_VERSION }}
test-cluster:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Build and test cluster
run: bash test-cluster/docker/main.sh
4 changes: 3 additions & 1 deletion lib/cluster/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ import {
weightSrvRecords,
} from "./util";
import Deque = require("denque");
import { addTransactionSupport, Transaction } from "../transaction";

const debug = Debug("cluster");

Expand Down Expand Up @@ -1042,6 +1043,7 @@ class Cluster extends Commander {
interface Cluster extends EventEmitter {}
applyMixin(Cluster, EventEmitter);

require("../transaction").addTransactionSupport(Cluster.prototype);
addTransactionSupport(Cluster.prototype);
interface Cluster extends Transaction {}

export default Cluster;
1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
"scripts": {
"test:tsd": "npm run build && tsd",
"test:js": "TS_NODE_TRANSPILE_ONLY=true TS_NODE_LOG_ERROR=true NODE_ENV=test mocha \"test/helpers/*.ts\" \"test/**/*.ts\"",
"test:js:cluster": "TS_NODE_TRANSPILE_ONLY=true TS_NODE_LOG_ERROR=true NODE_ENV=test mocha \"test-cluster/**/*.ts\"",
"test:docs": "(npm run docs && git diff-index --quiet HEAD docs) || (echo 'Run `npm run docs` and commit the updated docs' && exit 1)",
"test": "npm run test:js && npm run test:tsd",
"lint": "eslint --ext .js,.ts ./lib",
Expand Down
128 changes: 128 additions & 0 deletions test-cluster/basic.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,128 @@
import { expect } from "chai";
import Redis, { Cluster } from "../lib";

const masters = [30000, 30001, 30002];
const replicas = [30003, 30004, 30005];

describe("cluster", () => {
afterEach(async () => {
for (const port of masters) {
const redis = new Redis(port);
await redis.flushall();
await redis.script("FLUSH");
}
// Wait for replication
await new Promise((resolve) => setTimeout(resolve, 500));
});

it("discovers nodes from master", async () => {
const cluster = new Cluster([{ host: "127.0.0.1", port: masters[0] }]);
await cluster.set("foo", "bar");
expect(await cluster.get("foo")).to.eql("bar");
});

it("discovers nodes from replica", async () => {
const cluster = new Cluster([{ host: "127.0.0.1", port: replicas[0] }]);
await cluster.set("foo", "bar");
expect(await cluster.get("foo")).to.eql("bar");
});

describe("#nodes()", () => {
it("returns master nodes", async () => {
const cluster = new Cluster([{ host: "127.0.0.1", port: masters[0] }]);
await cluster.info();
const nodes = cluster.nodes("master");
expect(nodes.map((node) => node.options.port).sort()).to.eql(masters);
});

it("returns replica nodes", async () => {
const cluster = new Cluster([{ host: "127.0.0.1", port: masters[0] }]);
await cluster.info();
const nodes = cluster.nodes("slave");
expect(nodes.map((node) => node.options.port).sort()).to.eql(replicas);
});

it("returns all nodes", async () => {
const cluster = new Cluster([{ host: "127.0.0.1", port: masters[0] }]);
await cluster.info();
const nodes = cluster.nodes();
expect(nodes.map((node) => node.options.port).sort()).to.eql(
masters.concat(replicas)
);
});
});

describe("scaleReads", () => {
it("ensures non-readonly commands still working", async () => {
const cluster = new Cluster([{ host: "127.0.0.1", port: masters[0] }], {
scaleReads: "slave",
});
await cluster.set("foo", "bar");
expect(await cluster.get("foo")).to.eql("bar");
});
});

describe("pipeline", () => {
it("ensures script ordering when not loaded", async () => {
const cluster = new Cluster([{ host: "127.0.0.1", port: masters[0] }]);
cluster.defineCommand("myget", {
numberOfKeys: 1,
lua: "return redis.call('GET', KEYS[1])",
});

expect(
await cluster
.pipeline()
// @ts-expect-error
.myget("foo")
.set("foo", "setAfterMyGET")
.myget("foo")
.exec()
).to.eql([
[null, null],
[null, "OK"],
[null, "setAfterMyGET"],
]);
});

it("falls back to eval when the cache is flushed", async () => {
const cluster = new Cluster([{ host: "127.0.0.1", port: masters[0] }]);
cluster.defineCommand("myget", {
numberOfKeys: 1,
lua: "return redis.call('GET', KEYS[1])",
});

// @ts-expect-error
await cluster.myget("foo");

for (const node of cluster.nodes("master")) {
await node.script("FLUSH");
}

expect(
await cluster
.pipeline()
// @ts-expect-error
.myget("foo")
.set("foo", "setAfterMyGET")
.myget("foo")
.exec()
).to.eql([
[null, "setAfterMyGET"],
[null, "OK"],
[null, "setAfterMyGET"],
]);
});
});

describe("auto pipelining", () => {
it("works", async () => {
const cluster = new Cluster([{ host: "127.0.0.1", port: masters[0] }], {
enableAutoPipelining: true,
});

cluster.set("foo", "auto pipelining");
expect(await cluster.get("foo")).to.eql("auto pipelining");
});
});
});
19 changes: 19 additions & 0 deletions test-cluster/docker/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
FROM grokzen/redis-cluster

RUN apt-get --allow-releaseinfo-change update

RUN apt-get update -y && apt-get install -y redis-server && apt-get install -y curl
RUN touch /etc/apt/apt.conf.d/99verify-peer.conf \
&& echo >>/etc/apt/apt.conf.d/99verify-peer.conf "Acquire { https::Verify-Peer false }"
RUN echo insecure >> $HOME/.curlrc

RUN curl --insecure -fsSL https://deb.nodesource.com/setup_14.x | bash -
RUN apt-get install -y nodejs
RUN apt-get clean
RUN mkdir /code
WORKDIR /code
ADD package.json package-lock.json ./
# Install npm dependencies without converting the lockfile version in npm 7,
# and remove temporary files to save space when developing locally.
RUN npm install --no-save && npm cache clean --force

4 changes: 4 additions & 0 deletions test-cluster/docker/main.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
docker run -e "INITIAL_PORT=30000" -e "IP=0.0.0.0" -p 30000-30005:30000-30005 grokzen/redis-cluster:latest &
npm install
sleep 15
npm run test:js:cluster | npm run test:js:cluster

0 comments on commit 1eba58b

Please sign in to comment.