diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index b53360991..d04ee7bd5 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -4,7 +4,6 @@ name: CI env: JDK_JAVA_OPTIONS: -XX:+PrintCommandLineFlags - JVM_OPTS: -XX:+PrintCommandLineFlags 'on': workflow_dispatch: {} release: @@ -14,7 +13,6 @@ env: branches: - master pull_request: {} - create: {} jobs: build: name: Build @@ -22,16 +20,16 @@ jobs: continue-on-error: true steps: - name: Git Checkout - uses: actions/checkout@v4.0.0 + uses: actions/checkout@v4.1.0 with: fetch-depth: '0' - name: Install libuv run: sudo apt-get update && sudo apt-get install -y libuv1-dev - name: Setup Scala - uses: actions/setup-java@v3.12.0 + uses: actions/setup-java@v3.13.0 with: - distribution: temurin - java-version: '8' + distribution: corretto + java-version: '17' check-latest: true - name: Cache Dependencies uses: coursier/cache-action@v6 @@ -47,16 +45,16 @@ jobs: continue-on-error: false steps: - name: Git Checkout - uses: actions/checkout@v4.0.0 + uses: actions/checkout@v4.1.0 with: fetch-depth: '0' - name: Install libuv run: sudo apt-get update && sudo apt-get install -y libuv1-dev - name: Setup Scala - uses: actions/setup-java@v3.12.0 + uses: actions/setup-java@v3.13.0 with: - distribution: temurin - java-version: '8' + distribution: corretto + java-version: '17' check-latest: true - name: Cache Dependencies uses: coursier/cache-action@v6 @@ -72,32 +70,26 @@ jobs: fail-fast: false matrix: java: - - '8' - '11' - '17' + - '21' steps: - name: Install libuv run: sudo apt-get update && sudo apt-get install -y libuv1-dev - name: Setup Scala - uses: actions/setup-java@v3.12.0 + uses: actions/setup-java@v3.13.0 with: - distribution: temurin + distribution: corretto java-version: ${{ matrix.java }} check-latest: true - name: Cache Dependencies uses: coursier/cache-action@v6 - name: Git Checkout - uses: actions/checkout@v4.0.0 + uses: actions/checkout@v4.1.0 with: fetch-depth: '0' - name: Test run: sbt +test - - name: Run Redis - run: docker-compose -f docker/redis-compose.yml up -d - - name: Run Redis cluster - run: docker-compose -f docker/redis-cluster-compose.yml up -d - - name: Run integration tests - run: sbt ++${{ matrix.scala }} IntegrationTest/test update-readme: name: Update README runs-on: ubuntu-latest @@ -105,16 +97,16 @@ jobs: if: ${{ github.event_name == 'push' }} steps: - name: Git Checkout - uses: actions/checkout@v4.0.0 + uses: actions/checkout@v4.1.0 with: fetch-depth: '0' - name: Install libuv run: sudo apt-get update && sudo apt-get install -y libuv1-dev - name: Setup Scala - uses: actions/setup-java@v3.12.0 + uses: actions/setup-java@v3.13.0 with: - distribution: temurin - java-version: '8' + distribution: corretto + java-version: '17' check-latest: true - name: Cache Dependencies uses: coursier/cache-action@v6 @@ -179,16 +171,16 @@ jobs: if: ${{ github.event_name != 'pull_request' }} steps: - name: Git Checkout - uses: actions/checkout@v4.0.0 + uses: actions/checkout@v4.1.0 with: fetch-depth: '0' - name: Install libuv run: sudo apt-get update && sudo apt-get install -y libuv1-dev - name: Setup Scala - uses: actions/setup-java@v3.12.0 + uses: actions/setup-java@v3.13.0 with: - distribution: temurin - java-version: '8' + distribution: corretto + java-version: '17' check-latest: true - name: Cache Dependencies uses: coursier/cache-action@v6 @@ -208,16 +200,16 @@ jobs: if: ${{ ((github.event_name == 'release') && (github.event.action == 'published')) || (github.event_name == 'workflow_dispatch') }} steps: - name: Git Checkout - uses: actions/checkout@v4.0.0 + uses: actions/checkout@v4.1.0 with: fetch-depth: '0' - name: Install libuv run: sudo apt-get update && sudo apt-get install -y libuv1-dev - name: Setup Scala - uses: actions/setup-java@v3.12.0 + uses: actions/setup-java@v3.13.0 with: - distribution: temurin - java-version: '8' + distribution: corretto + java-version: '17' check-latest: true - name: Cache Dependencies uses: coursier/cache-action@v6 @@ -239,7 +231,7 @@ jobs: if: ${{ (github.event_name == 'release') && (github.event.action == 'published') }} steps: - name: Git Checkout - uses: actions/checkout@v4.0.0 + uses: actions/checkout@v4.1.0 with: fetch-depth: '0' - name: notify the main repo about the new release of docs package diff --git a/build.sbt b/build.sbt index 59cbe66c1..9eaa8bee6 100644 --- a/build.sbt +++ b/build.sbt @@ -1,3 +1,4 @@ +import zio.sbt.githubactions.Job import zio.sbt.githubactions.Step.SingleStep enablePlugins(ZioSbtEcosystemPlugin, ZioSbtCiPlugin) @@ -5,25 +6,7 @@ enablePlugins(ZioSbtEcosystemPlugin, ZioSbtCiPlugin) inThisBuild( List( name := "ZIO Redis", - zioVersion := "2.0.16", - scala212 := "2.12.18", - scala213 := "2.13.11", - scala3 := "3.3.1", ciEnabledBranches := List("master"), - ciExtraTestSteps := List( - SingleStep( - name = "Run Redis", - run = Some("docker-compose -f docker/redis-compose.yml up -d") - ), - SingleStep( - name = "Run Redis cluster", - run = Some("docker-compose -f docker/redis-cluster-compose.yml up -d") - ), - SingleStep( - name = "Run integration tests", - run = Some("sbt ++${{ matrix.scala }} IntegrationTest/test") - ) - ), developers := List( Developer("jdegoes", "John De Goes", "john@degoes.net", url("https://degoes.net")), Developer("mijicd", "Dejan Mijic", "dmijic@acm.org", url("https://github.com/mijicd")) @@ -40,31 +23,13 @@ lazy val root = crossScalaVersions := Nil, publish / skip := true ) - .aggregate(redis, embedded, benchmarks, example, docs) - -lazy val redis = - project - .in(file("modules/redis")) - .settings(addOptionsOn("2.13")("-Xlint:-infer-any")) - .settings(stdSettings(name = Some("zio-redis"), packageName = Some("zio.redis"))) - .settings(enableZIO(enableStreaming = true)) - .settings(libraryDependencies ++= Dependencies.redis(zioVersion.value)) - .settings(Defaults.itSettings) - .configs(IntegrationTest) - -lazy val embedded = - project - .in(file("modules/embedded")) - .settings(stdSettings(name = Some("zio-redis-embedded"), packageName = Some("zio.redis.embedded"))) - .settings(enableZIO()) - .settings(libraryDependencies ++= Dependencies.Embedded) - .dependsOn(redis) + .aggregate(benchmarks, client, docs, embedded, example, integrationTest) lazy val benchmarks = project .in(file("modules/benchmarks")) .enablePlugins(JmhPlugin) - .dependsOn(redis) + .dependsOn(client) .settings(stdSettings(name = Some("benchmarks"), packageName = Some("zio.redis.benchmarks"))) .settings( crossScalaVersions -= scala3.value, @@ -72,16 +37,13 @@ lazy val benchmarks = publish / skip := true ) -lazy val example = +lazy val client = project - .in(file("modules/example")) - .dependsOn(redis) - .settings(stdSettings(name = Some("example"), packageName = Some("zio.redis.example"))) + .in(file("modules/redis")) + .settings(addOptionsOn("2.13")("-Xlint:-infer-any")) + .settings(stdSettings(name = Some("zio-redis"), packageName = Some("zio.redis"))) .settings(enableZIO(enableStreaming = true)) - .settings( - publish / skip := true, - libraryDependencies ++= Dependencies.Example - ) + .settings(libraryDependencies ++= Dependencies.redis(zioVersion.value)) lazy val docs = project .in(file("zio-redis-docs")) @@ -93,9 +55,40 @@ lazy val docs = project .settings( moduleName := "zio-redis-docs", projectName := (ThisBuild / name).value, - mainModuleName := (redis / moduleName).value, + mainModuleName := (client / moduleName).value, projectStage := ProjectStage.Development, - ScalaUnidoc / unidoc / unidocProjectFilter := inProjects(redis) + ScalaUnidoc / unidoc / unidocProjectFilter := inProjects(client) ) - .dependsOn(redis, embedded) + .dependsOn(client, embedded) .enablePlugins(WebsitePlugin) + +lazy val embedded = + project + .in(file("modules/embedded")) + .settings(stdSettings(name = Some("zio-redis-embedded"), packageName = Some("zio.redis.embedded"))) + .settings(enableZIO()) + .settings(libraryDependencies ++= Dependencies.Embedded) + .dependsOn(client) + +lazy val example = + project + .in(file("modules/example")) + .dependsOn(client) + .settings(stdSettings(name = Some("example"), packageName = Some("zio.redis.example"))) + .settings(enableZIO(enableStreaming = true)) + .settings( + publish / skip := true, + libraryDependencies ++= Dependencies.Example + ) + +lazy val integrationTest = + project + .in(file("modules/redis-it")) + .settings(stdSettings(name = Some("zio-redis-it"))) + .settings(enableZIO(enableStreaming = true)) + .settings( + libraryDependencies ++= Dependencies.redis(zioVersion.value), + publish / skip := true, + Test / fork := false + ) + .dependsOn(client) diff --git a/docker/redis-cluster-compose.yml b/docker/redis-cluster-compose.yml deleted file mode 100644 index 8b3dd9312..000000000 --- a/docker/redis-cluster-compose.yml +++ /dev/null @@ -1,17 +0,0 @@ -version: '3.3' -services: - redis-cluster: - image: redis:7.2-rc1-alpine - container_name: redis-cluster - volumes: - - ./redis-cluster.sh:/data/redis-cluster.sh - command: ./redis-cluster.sh - stdin_open: true - tty: true - ports: - - "5000:5000" - - "5001:5001" - - "5002:5002" - - "5003:5003" - - "5004:5004" - - "5005:5005" \ No newline at end of file diff --git a/docker/redis-cluster.sh b/docker/redis-cluster.sh deleted file mode 100755 index 865b4bcc9..000000000 --- a/docker/redis-cluster.sh +++ /dev/null @@ -1,22 +0,0 @@ -#!/bin/sh - -IP="127.0.0.1" -PORTS="5000 5001 5002 5003 5004 5005" -TIMEOUT=5000 -NODES="" - -mkdir cluster-test -cd cluster-test || exit - -echo "Redis cluster is starting" -for port in $PORTS -do -echo "Starting $port" -redis-server --port $port --cluster-enabled yes --cluster-config-file nodes-${port}.conf --cluster-node-timeout $TIMEOUT --appendonly yes --appendfilename appendonly-${port}.aof --logfile ${port}.log --daemonize yes -NODES="${NODES} ${IP}:${port}" -done -echo "All nodes for redis cluster have been started ${NODES}" - -redis-cli --cluster create ${NODES} --cluster-replicas 1 --cluster-yes -echo "Redis cluster has been created" -sleep 10000 diff --git a/docker/redis-compose.yml b/docker/redis-compose.yml deleted file mode 100644 index bcec64510..000000000 --- a/docker/redis-compose.yml +++ /dev/null @@ -1,14 +0,0 @@ -version: "3.2" - -services: - redis1: - container_name: test_redis_1 - image: redis:7.2-rc1-alpine - ports: - - "6379:6379" - - redis2: - container_name: test_redis_2 - image: redis:7.2-rc1-alpine - ports: - - "6380:6379" diff --git a/modules/redis-it/src/test/resources/docker-compose.yml b/modules/redis-it/src/test/resources/docker-compose.yml new file mode 100644 index 000000000..eec55f5cf --- /dev/null +++ b/modules/redis-it/src/test/resources/docker-compose.yml @@ -0,0 +1,56 @@ +version: "3.3" + +services: + single-node-0: + image: bitnami/redis:7.2 + environment: + - 'ALLOW_EMPTY_PASSWORD=yes' + + single-node-1: + image: bitnami/redis:7.2 + environment: + - 'ALLOW_EMPTY_PASSWORD=yes' + + cluster-node-0: + image: bitnami/redis-cluster:7.2 + environment: + - 'ALLOW_EMPTY_PASSWORD=yes' + - 'REDIS_NODES=cluster-node-0 cluster-node-1 cluster-node-2 cluster-node-3 cluster-node-4 cluster-node-5' + + cluster-node-1: + image: bitnami/redis-cluster:7.2 + environment: + - 'ALLOW_EMPTY_PASSWORD=yes' + - 'REDIS_NODES=cluster-node-0 cluster-node-1 cluster-node-2 cluster-node-3 cluster-node-4 cluster-node-5' + + cluster-node-2: + image: bitnami/redis-cluster:7.2 + environment: + - 'ALLOW_EMPTY_PASSWORD=yes' + - 'REDIS_NODES=cluster-node-0 cluster-node-1 cluster-node-2 cluster-node-3 cluster-node-4 cluster-node-5' + + cluster-node-3: + image: bitnami/redis-cluster:7.2 + environment: + - 'ALLOW_EMPTY_PASSWORD=yes' + - 'REDIS_NODES=cluster-node-0 cluster-node-1 cluster-node-2 cluster-node-3 cluster-node-4 cluster-node-5' + + cluster-node-4: + image: bitnami/redis-cluster:7.2 + environment: + - 'ALLOW_EMPTY_PASSWORD=yes' + - 'REDIS_NODES=cluster-node-0 cluster-node-1 cluster-node-2 cluster-node-3 cluster-node-4 cluster-node-5' + + cluster-node-5: + image: bitnami/redis-cluster:7.2 + depends_on: + - cluster-node-0 + - cluster-node-1 + - cluster-node-2 + - cluster-node-3 + - cluster-node-4 + environment: + - 'ALLOW_EMPTY_PASSWORD=yes' + - 'REDIS_CLUSTER_REPLICAS=1' + - 'REDIS_NODES=cluster-node-0 cluster-node-1 cluster-node-2 cluster-node-3 cluster-node-4 cluster-node-5' + - 'REDIS_CLUSTER_CREATOR=yes' diff --git a/modules/redis/src/it/scala/zio/redis/ApiSpec.scala b/modules/redis-it/src/test/scala/zio/redis/ApiSpec.scala similarity index 54% rename from modules/redis/src/it/scala/zio/redis/ApiSpec.scala rename to modules/redis-it/src/test/scala/zio/redis/ApiSpec.scala index 3a77297e2..b0c436989 100644 --- a/modules/redis/src/it/scala/zio/redis/ApiSpec.scala +++ b/modules/redis-it/src/test/scala/zio/redis/ApiSpec.scala @@ -1,5 +1,6 @@ package zio.redis +import com.dimafeng.testcontainers.DockerComposeContainer import zio._ import zio.test.TestAspect._ import zio.test._ @@ -20,42 +21,54 @@ object ApiSpec with PubSubSpec { def spec: Spec[TestEnvironment, Any] = - suite("Redis commands")(clusterSuite, singleNodeSuite) @@ sequential @@ withLiveEnvironment + suite("Redis commands")(ClusterSuite, SingleNodeSuite) + .provideShared( + compose( + service(BaseSpec.SingleNode0, ".*Ready to accept connections.*"), + service(BaseSpec.SingleNode1, ".*Ready to accept connections.*"), + service(BaseSpec.MasterNode, ".*Cluster correctly created.*") + ) + ) @@ sequential @@ withLiveEnvironment - private val singleNodeSuite = - suite("Single node executor")( + private final val ClusterSuite = + suite("Cluster executor")( connectionSuite, keysSuite, listSuite, + stringsSuite, + hashSuite, setsSuite, sortedSetsSuite, - stringsSuite, - geoSuite, hyperLogLogSuite, - hashSuite, + geoSuite, streamsSuite, scriptingSpec, - pubSubSuite - ).provideShared(Redis.local, RedisSubscription.local, ZLayer.succeed(ProtobufCodecSupplier)) + clusterSpec + ).provideSomeShared[DockerComposeContainer]( + Redis.cluster, + masterNodeConfig, + ZLayer.succeed(ProtobufCodecSupplier) + ).filterNotTags(_.contains(BaseSpec.ClusterExecutorUnsupported)) + .getOrElse(Spec.empty) @@ eventually - private val clusterSuite = - suite("Cluster executor")( + private final val SingleNodeSuite = + suite("Single node executor")( connectionSuite, keysSuite, listSuite, - stringsSuite, - hashSuite, setsSuite, sortedSetsSuite, - hyperLogLogSuite, + stringsSuite, geoSuite, + hyperLogLogSuite, + hashSuite, streamsSuite, scriptingSpec, - clusterSpec - ).provideShared( - Redis.cluster, - ZLayer.succeed(ProtobufCodecSupplier), - ZLayer.succeed(RedisClusterConfig(Chunk(RedisUri("localhost", 5000)))) - ).filterNotTags(_.contains(BaseSpec.ClusterExecutorUnsupported)) - .getOrElse(Spec.empty) + pubSubSuite + ).provideSomeShared[DockerComposeContainer]( + Redis.singleNode, + RedisSubscription.singleNode, + singleNodeConfig(BaseSpec.SingleNode0), + ZLayer.succeed(ProtobufCodecSupplier) + ) } diff --git a/modules/redis-it/src/test/scala/zio/redis/BaseSpec.scala b/modules/redis-it/src/test/scala/zio/redis/BaseSpec.scala new file mode 100644 index 000000000..0f70aa0fe --- /dev/null +++ b/modules/redis-it/src/test/scala/zio/redis/BaseSpec.scala @@ -0,0 +1,76 @@ +package zio.redis + +import com.dimafeng.testcontainers.{DockerComposeContainer, ExposedService} +import org.testcontainers.containers.wait.strategy.Wait +import zio.schema.Schema +import zio.schema.codec.{BinaryCodec, ProtobufCodec} +import zio.test.TestAspect.{fibers, silentLogging, tag} +import zio.test._ +import zio.testcontainers._ +import zio.{ULayer, _} + +import java.io.File +import java.util.UUID + +trait BaseSpec extends ZIOSpecDefault { + implicit def summonCodec[A: Schema]: BinaryCodec[A] = ProtobufCodec.protobufCodec + + override def aspects: Chunk[TestAspectAtLeastR[Live]] = + Chunk(fibers, silentLogging) + + final def compose(services: ExposedService*): ULayer[DockerComposeContainer] = + ZLayer.fromTestContainer { + DockerComposeContainer( + new File(getClass.getResource("/docker-compose.yml").getFile), + services.toList + ) + } + + final def masterNodeConfig: URLayer[DockerComposeContainer, RedisClusterConfig] = + ZLayer { + for { + docker <- ZIO.service[DockerComposeContainer] + hostAndPort <- docker.getHostAndPort(BaseSpec.MasterNode)(6379) + uri = RedisUri(hostAndPort._1, hostAndPort._2) + } yield RedisClusterConfig(Chunk(uri)) + } + + final def service(name: String, waitMessage: String): ExposedService = + ExposedService(name, 6379, Wait.forLogMessage(waitMessage, 1)) + + final def singleNodeConfig(host: String): URLayer[DockerComposeContainer, RedisConfig] = + ZLayer { + for { + docker <- ZIO.service[DockerComposeContainer] + hostAndPort <- docker.getHostAndPort(host)(6379) + } yield RedisConfig(hostAndPort._1, hostAndPort._2) + } + + /* TODO + * We can try to support the most unsupported commands for cluster with: + * - [DONE] default connection for commands without a key and for multiple key commands with + * the limitation that all keys have to be in the same slot + * - fork/join approach for commands that operate on keys with different slots + */ + final val clusterExecutorUnsupported: TestAspectPoly = + tag(BaseSpec.ClusterExecutorUnsupported) + + final val genStringRedisTypeOption: Gen[Any, Option[RedisType]] = + Gen.option(Gen.constSample(Sample.noShrink(RedisType.String))) + + final val genCountOption: Gen[Any, Option[Count]] = + Gen.option(Gen.long(0, 100000).map(Count(_))) + + final val genPatternOption: Gen[Any, Option[String]] = + Gen.option(Gen.constSample(Sample.noShrink("*"))) + + final val uuid: UIO[String] = + ZIO.succeed(UUID.randomUUID().toString) +} + +object BaseSpec { + final val ClusterExecutorUnsupported = "cluster executor not supported" + final val MasterNode = "cluster-node-5" + final val SingleNode0 = "single-node-0" + final val SingleNode1 = "single-node-1" +} diff --git a/modules/redis-it/src/test/scala/zio/redis/ClusterSpec.scala b/modules/redis-it/src/test/scala/zio/redis/ClusterSpec.scala new file mode 100644 index 000000000..3d139be8d --- /dev/null +++ b/modules/redis-it/src/test/scala/zio/redis/ClusterSpec.scala @@ -0,0 +1,27 @@ +package zio.redis + +import com.dimafeng.testcontainers.DockerComposeContainer +import zio._ +import zio.test._ + +trait ClusterSpec extends BaseSpec { + def clusterSpec: Spec[DockerComposeContainer & Redis, RedisError] = + suite("cluster")( + suite("slots")( + test("get cluster slots") { + for { + res <- ZIO.serviceWithZIO[Redis](_.slots) + docker <- ZIO.service[DockerComposeContainer] + port = 6379 + expected <- + ZIO + .foreach(0 to 5) { n => + ZIO.attempt(docker.getServiceHost(s"cluster-node-$n", port)).map(host => RedisUri(host, port)) + } + .orDie + actual = res.map(_.master.address) ++ res.flatMap(_.slaves.map(_.address)) + } yield assertTrue(actual.distinct.size == expected.size) + } + ) + ) +} diff --git a/modules/redis/src/it/scala/zio/redis/ConnectionSpec.scala b/modules/redis-it/src/test/scala/zio/redis/ConnectionSpec.scala similarity index 100% rename from modules/redis/src/it/scala/zio/redis/ConnectionSpec.scala rename to modules/redis-it/src/test/scala/zio/redis/ConnectionSpec.scala diff --git a/modules/redis/src/it/scala/zio/redis/GeoSpec.scala b/modules/redis-it/src/test/scala/zio/redis/GeoSpec.scala similarity index 100% rename from modules/redis/src/it/scala/zio/redis/GeoSpec.scala rename to modules/redis-it/src/test/scala/zio/redis/GeoSpec.scala diff --git a/modules/redis/src/it/scala/zio/redis/HashSpec.scala b/modules/redis-it/src/test/scala/zio/redis/HashSpec.scala similarity index 100% rename from modules/redis/src/it/scala/zio/redis/HashSpec.scala rename to modules/redis-it/src/test/scala/zio/redis/HashSpec.scala diff --git a/modules/redis/src/it/scala/zio/redis/HyperLogLogSpec.scala b/modules/redis-it/src/test/scala/zio/redis/HyperLogLogSpec.scala similarity index 100% rename from modules/redis/src/it/scala/zio/redis/HyperLogLogSpec.scala rename to modules/redis-it/src/test/scala/zio/redis/HyperLogLogSpec.scala diff --git a/modules/redis/src/it/scala/zio/redis/KeysSpec.scala b/modules/redis-it/src/test/scala/zio/redis/KeysSpec.scala similarity index 92% rename from modules/redis/src/it/scala/zio/redis/KeysSpec.scala rename to modules/redis-it/src/test/scala/zio/redis/KeysSpec.scala index 2a8269c75..b3d5dd125 100644 --- a/modules/redis/src/it/scala/zio/redis/KeysSpec.scala +++ b/modules/redis-it/src/test/scala/zio/redis/KeysSpec.scala @@ -1,5 +1,6 @@ package zio.redis +import com.dimafeng.testcontainers.DockerComposeContainer import zio._ import zio.redis.RedisError.ProtocolError import zio.test.Assertion.{exists => _, _} @@ -7,7 +8,7 @@ import zio.test.TestAspect.{restore => _, _} import zio.test._ trait KeysSpec extends BaseSpec { - def keysSuite: Spec[Redis, RedisError] = { + def keysSuite: Spec[DockerComposeContainer & Redis, RedisError] = { suite("keys")( test("set followed by get") { for { @@ -163,18 +164,19 @@ trait KeysSpec extends BaseSpec { key <- uuid value <- uuid _ <- redis.set(key, value) - response <- redis.migrate( - "redis2", - 6379, - key, - 0L, - KeysSpec.MigrateTimeout, - copy = Option(Copy), - replace = Option(Replace), - keys = None - ) + response <- redis + .migrate( + BaseSpec.SingleNode1, + 6379, + key, + 0L, + KeysSpec.MigrateTimeout, + copy = Option(Copy), + replace = Option(Replace), + keys = None + ) originGet <- redis.get(key).returning[String] - destGet <- redis.get(key).returning[String].provideLayer(KeysSpec.SecondExecutor) + destGet <- ZIO.serviceWithZIO[Redis](_.get(key).returning[String]).provideLayer(secondExecutor) } yield assert(response)(equalTo("OK")) && assert(originGet)(isSome(equalTo(value))) && assert(destGet)(isSome(equalTo(value))) @@ -186,8 +188,8 @@ trait KeysSpec extends BaseSpec { redis <- ZIO.service[Redis] _ <- redis.set(key, value) response <- redis.migrate( - "redis2", - 6379, + BaseSpec.SingleNode1, + 6379L, key, 0L, KeysSpec.MigrateTimeout, @@ -196,7 +198,7 @@ trait KeysSpec extends BaseSpec { keys = None ) originGet <- redis.get(key).returning[String] - destGet <- ZIO.serviceWithZIO[Redis](_.get(key).returning[String]).provideLayer(KeysSpec.SecondExecutor) + destGet <- ZIO.serviceWithZIO[Redis](_.get(key).returning[String]).provideLayer(secondExecutor) } yield assert(response)(equalTo("OK")) && assert(originGet)(isNone) && assert(destGet)(isSome(equalTo(value))) @@ -209,11 +211,19 @@ trait KeysSpec extends BaseSpec { _ <- redis.set(key, value) _ <- ZIO .serviceWithZIO[Redis](_.set(key, value)) - .provideLayer(KeysSpec.SecondExecutor) // also add to second Redis - response <- - redis - .migrate("redis2", 6379, key, 0L, KeysSpec.MigrateTimeout, copy = None, replace = None, keys = None) - .either + .provideLayer(secondExecutor) // also add to second Redis + response <- redis + .migrate( + BaseSpec.SingleNode1, + 6379, + key, + 0L, + KeysSpec.MigrateTimeout, + copy = None, + replace = None, + keys = None + ) + .either } yield assert(response)(isLeft(isSubtype[ProtocolError](anything))) } ) @@ clusterExecutorUnsupported, @@ -490,17 +500,16 @@ trait KeysSpec extends BaseSpec { ) ) } -} - -object KeysSpec { - final val MigrateTimeout: Duration = 5.seconds - final val SecondExecutor: Layer[RedisError.IOError, Redis] = + private val secondExecutor = ZLayer - .make[Redis]( - ZLayer.succeed(RedisConfig("localhost", 6380)), + .makeSome[DockerComposeContainer, Redis]( + singleNodeConfig(BaseSpec.SingleNode1), ZLayer.succeed[CodecSupplier](ProtobufCodecSupplier), Redis.singleNode ) - .fresh +} + +object KeysSpec { + final val MigrateTimeout = 5.seconds } diff --git a/modules/redis/src/it/scala/zio/redis/ListSpec.scala b/modules/redis-it/src/test/scala/zio/redis/ListSpec.scala similarity index 100% rename from modules/redis/src/it/scala/zio/redis/ListSpec.scala rename to modules/redis-it/src/test/scala/zio/redis/ListSpec.scala diff --git a/modules/redis/src/it/scala/zio/redis/ProtobufCodecSupplier.scala b/modules/redis-it/src/test/scala/zio/redis/ProtobufCodecSupplier.scala similarity index 100% rename from modules/redis/src/it/scala/zio/redis/ProtobufCodecSupplier.scala rename to modules/redis-it/src/test/scala/zio/redis/ProtobufCodecSupplier.scala diff --git a/modules/redis/src/it/scala/zio/redis/PubSubSpec.scala b/modules/redis-it/src/test/scala/zio/redis/PubSubSpec.scala similarity index 99% rename from modules/redis/src/it/scala/zio/redis/PubSubSpec.scala rename to modules/redis-it/src/test/scala/zio/redis/PubSubSpec.scala index 411b0b925..2857537f0 100644 --- a/modules/redis/src/it/scala/zio/redis/PubSubSpec.scala +++ b/modules/redis-it/src/test/scala/zio/redis/PubSubSpec.scala @@ -237,7 +237,7 @@ trait PubSubSpec extends BaseSpec { ) } ) - ) + ) @@ TestAspect.eventually private def generateRandomString(prefix: String = "") = ZIO.succeed(Random.alphanumeric.take(15).mkString).map(prefix + _.substring((prefix.length - 1) max 0)) diff --git a/modules/redis/src/it/scala/zio/redis/ScriptingSpec.scala b/modules/redis-it/src/test/scala/zio/redis/ScriptingSpec.scala similarity index 100% rename from modules/redis/src/it/scala/zio/redis/ScriptingSpec.scala rename to modules/redis-it/src/test/scala/zio/redis/ScriptingSpec.scala diff --git a/modules/redis/src/it/scala/zio/redis/SetsSpec.scala b/modules/redis-it/src/test/scala/zio/redis/SetsSpec.scala similarity index 100% rename from modules/redis/src/it/scala/zio/redis/SetsSpec.scala rename to modules/redis-it/src/test/scala/zio/redis/SetsSpec.scala diff --git a/modules/redis/src/it/scala/zio/redis/SortedSetsSpec.scala b/modules/redis-it/src/test/scala/zio/redis/SortedSetsSpec.scala similarity index 100% rename from modules/redis/src/it/scala/zio/redis/SortedSetsSpec.scala rename to modules/redis-it/src/test/scala/zio/redis/SortedSetsSpec.scala diff --git a/modules/redis/src/it/scala/zio/redis/StreamsSpec.scala b/modules/redis-it/src/test/scala/zio/redis/StreamsSpec.scala similarity index 100% rename from modules/redis/src/it/scala/zio/redis/StreamsSpec.scala rename to modules/redis-it/src/test/scala/zio/redis/StreamsSpec.scala diff --git a/modules/redis/src/it/scala/zio/redis/StringsSpec.scala b/modules/redis-it/src/test/scala/zio/redis/StringsSpec.scala similarity index 100% rename from modules/redis/src/it/scala/zio/redis/StringsSpec.scala rename to modules/redis-it/src/test/scala/zio/redis/StringsSpec.scala diff --git a/modules/redis/src/it/scala/zio/redis/internal/ClusterExecutorSpec.scala b/modules/redis-it/src/test/scala/zio/redis/internal/ClusterExecutorSpec.scala similarity index 83% rename from modules/redis/src/it/scala/zio/redis/internal/ClusterExecutorSpec.scala rename to modules/redis-it/src/test/scala/zio/redis/internal/ClusterExecutorSpec.scala index 2a3316dd8..40965b269 100644 --- a/modules/redis/src/it/scala/zio/redis/internal/ClusterExecutorSpec.scala +++ b/modules/redis-it/src/test/scala/zio/redis/internal/ClusterExecutorSpec.scala @@ -1,5 +1,4 @@ package zio.redis.internal - import zio._ import zio.redis._ import zio.redis.options.Cluster.{Slot, SlotsAmount} @@ -7,7 +6,7 @@ import zio.test._ object ClusterExecutorSpec extends BaseSpec { def spec: Spec[TestEnvironment, Any] = - suite("cluster executor")( + suite("Cluster executor")( test("check cluster responsiveness when ASK redirect happens") { for { redis <- ZIO.service[Redis] @@ -19,17 +18,17 @@ object ClusterExecutorSpec extends BaseSpec { sourceMaster = sourcePart.master destPart = initSlots((id + 1) % initSlots.size) destMaster = destPart.master - destMasterConn = getRedisNodeLayer(destMaster.address) + destMasterConn = redisNodeLayer(destMaster.address) _ = ZIO.logDebug(s"$key _____ Importing $keySlot to ${destMaster.id} - ${destMaster.address}") _ <- ZIO.serviceWithZIO[Redis](_.setSlotImporting(keySlot, sourceMaster.id)).provideLayer(destMasterConn) _ = ZIO.logDebug(s"$key _____ Migrating $keySlot from ${sourceMaster.id}- ${sourceMaster.address}") - sourceMasterConn = getRedisNodeLayer(sourceMaster.address) + sourceMasterConn = redisNodeLayer(sourceMaster.address) _ <- ZIO.serviceWithZIO[Redis](_.setSlotMigrating(keySlot, destMaster.id)).provideLayer(sourceMasterConn) value2 <- redis.get(key).returning[String] // have to redirect without error ASK value3 <- redis.get(key).returning[String] // have to redirect without creating new connection _ <- ZIO.serviceWithZIO[Redis](_.setSlotStable(keySlot)).provideLayer(destMasterConn) } yield assertTrue(value1 == value2) && assertTrue(value2 == value3) - } @@ TestAspect.flaky, + }, test("check client responsiveness when Moved redirect happened") { for { redis <- ZIO.service[Redis] @@ -42,11 +41,11 @@ object ClusterExecutorSpec extends BaseSpec { sourceMaster = sourcePart.master destPart = initSlots((id + 1) % initSlots.size) destMaster = destPart.master - destMasterConn = getRedisNodeLayer(destMaster.address) + destMasterConn = redisNodeLayer(destMaster.address) _ <- ZIO.logDebug(s"$key _____ Importing $keySlot to ${destMaster.id}") _ <- ZIO.serviceWithZIO[Redis](_.setSlotImporting(keySlot, sourceMaster.id)).provideLayer(destMasterConn) _ <- ZIO.logDebug(s"$key _____ Migrating $keySlot from ${sourceMaster.id}") - sourceMasterConn = getRedisNodeLayer(sourceMaster.address) + sourceMasterConn = redisNodeLayer(sourceMaster.address) _ <- ZIO.serviceWithZIO[Redis](_.setSlotMigrating(keySlot, destMaster.id)).provideLayer(sourceMasterConn) _ <- ZIO .serviceWithZIO[Redis]( @@ -59,23 +58,17 @@ object ClusterExecutorSpec extends BaseSpec { value3 <- redis.get(key).returning[String] // have to get value without refreshing connection } yield assertTrue(value1 == value2) && assertTrue(value2 == value3) } - ).provideLayerShared(ClusterLayer) + ).provideShared( + Redis.cluster, + compose(service(BaseSpec.MasterNode, ".*Cluster correctly created.*")), + masterNodeConfig, + ZLayer.succeed(ProtobufCodecSupplier) + ) @@ TestAspect.flaky - private final def getRedisNodeLayer(uri: RedisUri): Layer[Any, Redis] = + private def redisNodeLayer(uri: RedisUri): Layer[Any, Redis] = ZLayer.make[Redis]( ZLayer.succeed(RedisConfig(uri.host, uri.port)), ZLayer.succeed(ProtobufCodecSupplier), Redis.singleNode ) - - private val ClusterLayer: Layer[Any, Redis] = { - val address1 = RedisUri("localhost", 5010) - val address2 = RedisUri("localhost", 5000) - - ZLayer.make[Redis]( - ZLayer.succeed(RedisClusterConfig(Chunk(address1, address2))), - ZLayer.succeed(ProtobufCodecSupplier), - Redis.cluster - ) - } } diff --git a/modules/redis/src/it/scala/zio/redis/BaseSpec.scala b/modules/redis/src/it/scala/zio/redis/BaseSpec.scala deleted file mode 100644 index 7107fa215..000000000 --- a/modules/redis/src/it/scala/zio/redis/BaseSpec.scala +++ /dev/null @@ -1,41 +0,0 @@ -package zio.redis - -import zio._ -import zio.schema.Schema -import zio.schema.codec.{BinaryCodec, ProtobufCodec} -import zio.test.TestAspect.{fibers, silentLogging, tag, timeout} -import zio.test._ - -import java.util.UUID - -trait BaseSpec extends ZIOSpecDefault { - implicit def summonCodec[A: Schema]: BinaryCodec[A] = ProtobufCodec.protobufCodec - - override def aspects: Chunk[TestAspectAtLeastR[Live]] = - Chunk(fibers, silentLogging, timeout(10.seconds)) - - final val genStringRedisTypeOption: Gen[Any, Option[RedisType]] = - Gen.option(Gen.constSample(Sample.noShrink(RedisType.String))) - - final val genCountOption: Gen[Any, Option[Count]] = - Gen.option(Gen.long(0, 100000).map(Count(_))) - - final val genPatternOption: Gen[Any, Option[String]] = - Gen.option(Gen.constSample(Sample.noShrink("*"))) - - final val uuid: UIO[String] = - ZIO.succeed(UUID.randomUUID().toString) - - /* TODO - * We can try to support the most unsupported commands for cluster with: - * - [DONE] default connection for commands without a key and for multiple key commands with - * the limitation that all keys have to be in the same slot - * - fork/join approach for commands that operate on keys with different slots - */ - final val clusterExecutorUnsupported: TestAspectPoly = - tag(BaseSpec.ClusterExecutorUnsupported) -} - -object BaseSpec { - final val ClusterExecutorUnsupported = "cluster executor not supported" -} diff --git a/modules/redis/src/it/scala/zio/redis/ClusterSpec.scala b/modules/redis/src/it/scala/zio/redis/ClusterSpec.scala deleted file mode 100644 index 95ad21c82..000000000 --- a/modules/redis/src/it/scala/zio/redis/ClusterSpec.scala +++ /dev/null @@ -1,22 +0,0 @@ -package zio.redis - -import zio.ZIO -import zio.test.Assertion._ -import zio.test._ - -trait ClusterSpec extends BaseSpec { - def clusterSpec: Spec[Redis, RedisError] = - suite("cluster")( - suite("slots")( - test("get cluster slots") { - for { - res <- ZIO.serviceWithZIO[Redis](_.slots) - } yield { - val addresses = (5000 to 5005).map(port => RedisUri("127.0.0.1", port)) - val resAddresses = res.map(_.master.address) ++ res.flatMap(_.slaves.map(_.address)) - assert(resAddresses.distinct)(hasSameElements(addresses)) - } - } - ) - ) -} diff --git a/modules/redis/src/it/scala/zio/redis/internal/RedisConnectionSpec.scala b/modules/redis/src/it/scala/zio/redis/internal/RedisConnectionSpec.scala deleted file mode 100644 index c1ce8ac6b..000000000 --- a/modules/redis/src/it/scala/zio/redis/internal/RedisConnectionSpec.scala +++ /dev/null @@ -1,22 +0,0 @@ -package zio.redis.internal - -import zio.redis._ -import zio.test.Assertion.{equalTo, isSome} -import zio.test.{Spec, assert} -import zio.{Chunk, ZIO} - -import java.nio.charset.StandardCharsets - -object RedisConnectionSpec extends BaseSpec { - override def spec: Spec[Environment, Any] = - suite("Redis Connection Byte stream")( - test("can write and read") { - for { - stream <- ZIO.service[RedisConnection] - data = Chunk.fromArray("*2\r\n$7\r\nCOMMAND\r\n$4\r\nINFO\r\n$3\r\nGET\r\n".getBytes(StandardCharsets.UTF_8)) - _ <- stream.write(data) - res <- stream.read.runHead - } yield assert(res)(isSome(equalTo('*'.toByte))) - } - ).provideLayer(RedisConnection.local) -} diff --git a/modules/redis/src/test/scala/zio/redis/BaseSpec.scala b/modules/redis/src/test/scala/zio/redis/BaseSpec.scala index e85ed691f..a1414a076 100644 --- a/modules/redis/src/test/scala/zio/redis/BaseSpec.scala +++ b/modules/redis/src/test/scala/zio/redis/BaseSpec.scala @@ -1,14 +1,13 @@ package zio.redis - import zio._ import zio.schema.Schema import zio.schema.codec.{BinaryCodec, ProtobufCodec} -import zio.test.TestAspect.{fibers, silentLogging, timeout} +import zio.test.TestAspect.{fibers, silentLogging} import zio.test._ trait BaseSpec extends ZIOSpecDefault { implicit def summonCodec[A: Schema]: BinaryCodec[A] = ProtobufCodec.protobufCodec override def aspects: Chunk[TestAspectAtLeastR[Live]] = - Chunk(fibers, silentLogging, timeout(10.seconds)) + Chunk(fibers, silentLogging) } diff --git a/project/Dependencies.scala b/project/Dependencies.scala index e1123dc78..4ee86729d 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -2,14 +2,15 @@ import sbt._ object Dependencies { private object Versions { - val CatsEffect = "3.5.1" - val EmbeddedRedis = "0.6" - val Redis4Cats = "1.5.0" - val Sttp = "3.9.0" - val ZHttp = "2.0.0-RC11" - val ZioConfig = "3.0.7" - val ZioJson = "0.6.2" - val ZioSchema = "0.4.14" + val CatsEffect = "3.5.1" + val EmbeddedRedis = "0.6" + val Redis4Cats = "1.5.0" + val Sttp = "3.9.0" + val ZHttp = "2.0.0-RC11" + val ZioConfig = "3.0.7" + val ZioJson = "0.6.2" + val ZioSchema = "0.4.14" + val ZioTestContainers = "0.4.1" } lazy val Benchmarks = @@ -45,10 +46,11 @@ object Dependencies { def redis(zioVersion: String) = List( - "dev.zio" %% "zio-concurrent" % zioVersion, - "dev.zio" %% "zio-schema" % Versions.ZioSchema, - "dev.zio" %% "zio-schema-protobuf" % Versions.ZioSchema % "it,test", - "dev.zio" %% "zio-test" % zioVersion % IntegrationTest, - "dev.zio" %% "zio-test-sbt" % zioVersion % IntegrationTest + "dev.zio" %% "zio-concurrent" % zioVersion, + "dev.zio" %% "zio-schema" % Versions.ZioSchema, + "dev.zio" %% "zio-schema-protobuf" % Versions.ZioSchema % Test, + "dev.zio" %% "zio-test" % zioVersion % Test, + "dev.zio" %% "zio-test-sbt" % zioVersion % Test, + "com.github.sideeffffect" %% "zio-testcontainers" % Versions.ZioTestContainers % Test ) } diff --git a/project/build.properties b/project/build.properties index 06969a377..303541e50 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version = 1.9.4 +sbt.version = 1.9.6 diff --git a/project/plugins.sbt b/project/plugins.sbt index 86577e950..653e9dcd2 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -1,4 +1,4 @@ -val ZioSbtVersion = "0.4.0-alpha.18" +val ZioSbtVersion = "0.4.0-alpha.21" addSbtPlugin("com.thoughtworks.sbt-api-mappings" % "sbt-api-mappings" % "3.0.2") addSbtPlugin("dev.zio" % "zio-sbt-ci" % ZioSbtVersion)