From d76bb6b9869ed8f11d1710dde1113763330e07a3 Mon Sep 17 00:00:00 2001 From: Jules Ivanic Date: Sun, 24 Sep 2023 17:19:33 +0400 Subject: [PATCH] Migrate to sbt-ci-release && Remove ScalaJS support && Delete Lagom related code && Rename `io.getquill.context.zio` package to `io.getquill.context.qzio` in the `jasync` modules (#2866) * Migrate to sbt-ci-release * Migrate to sbt-ci-release * Fix sbt * Don't wait for build * Speed up release by splitting per Scala version * Improve memory usage * clean * fix scala version * clean * fix CI * Improve memory usage * clean * scalafmt * try fix scaladoc * try fix scaladoc * try fix Scala 2.12 build * Don't publish `quill-codegen-tests` module * Revert "try fix Scala 2.12 build" This reverts commit eb622c71dbe185d03fb8aba8e6d0d66b6440ad2a. * Don't publish `quill-codegen-tests` module * Don't publish Scala JS modules * Fix Scala Steward configuration (#2868) * Remove ScalaJS support (#2869) * Remove ScalaJS support * run publish * fix compilation * fix compilation * fix CI * Fix Scala 2.12 doc generation * Clean CI * clean * clean * scalafmt * Fix CI --- .github/workflows/ci.yml | 151 +----- .gitignore | 1 + build.sbt | 309 +++-------- build/build.sh | 8 +- build/credentials.sbt.enc | Bin 160 -> 0 bytes build/deploy_key.pem.enc | 2 - build/pubring.gpg.enc | Bin 1792 -> 0 bytes build/release.sh | 136 ----- build/secring.gpg.enc | Bin 3824 -> 0 bytes project/plugins.sbt | 16 +- .../getquill/CassandraLagomAsyncContext.scala | 81 --- .../CassandraLagomSessionContext.scala | 33 -- .../CassandraLagomStreamContext.scala | 65 --- .../src/test/resources/application.conf | 18 - .../src/test/scala/io/getquill/Spec.scala | 38 -- .../test/scala/io/getquill/TestEntities.scala | 57 -- .../cassandra/CassandraTestEntities.scala | 16 - .../cassandra/EncodingSpecHelper.scala | 128 ----- .../QueryResultTypeCassandraSpec.scala | 34 -- .../cassandra/lagom/DecodeNullSpec.scala | 34 -- .../cassandra/lagom/EncodingSpec.scala | 43 -- .../QueryResultTypeCassandraAsyncSpec.scala | 73 --- .../context/cassandra/lagom/package.scala | 8 - .../cassandra/streaming/DecodeNullSpec.scala | 35 -- .../cassandra/streaming/EncodingSpec.scala | 58 --- .../QueryResultTypeCassandraStreamSpec.scala | 35 -- .../context/cassandra/streaming/package.scala | 8 - .../cassandra/utils/DummyService.scala | 10 - .../context/cassandra/utils/package.scala | 30 -- .../other/ExampleAppImplicitEnv.scala | 3 +- .../getquill/codegen/util/SchemaMaker.scala | 2 +- .../io/getquill/codegen/dag/Ancestry.scala | 2 +- .../io/getquill/dsl/DynamicQueryDSL.scala | 491 ------------------ .../scala/io/getquill/log/ContextLog.scala | 5 - .../io/getquill/dsl/DynamicQueryDSL.scala | 14 +- .../scala/io/getquill/log/ContextLog.scala | 0 .../scala/io/getquill/MirrorContexts.scala | 2 +- .../context/mirror/MirrorIdiomSpec.scala | 2 +- .../io/getquill/util/LogToFileSpec.scala | 0 .../PostgresJAsyncContextConfig.scala | 2 +- .../PostgresZioJAsyncContext.scala | 5 +- .../{zio => qzio}/jasync/ArrayDecoders.scala | 4 +- .../{zio => qzio}/jasync/ArrayEncoders.scala | 6 +- .../PostgresJAsyncContextConfigSpec.scala | 5 +- .../io/getquill/TypeParamExtensionTest.scala | 2 +- .../postgres/ArrayAsyncEncodingSpec.scala | 8 +- .../jasync/postgres/ArrayOpsAsyncSpec.scala | 2 +- .../postgres/CaseClassQueryAsyncSpec.scala | 2 +- .../DepartmentsPostgresAsyncSpec.scala | 2 +- .../jasync/postgres/OnConflictAsyncSpec.scala | 2 +- .../postgres/PeopleAsyncReturningSpec.scala | 2 +- .../postgres/PeoplePostgresAsyncSpec.scala | 2 +- .../postgres/PostgresAsyncEncodingSpec.scala | 5 +- .../postgres/PostgresJAsyncContextSpec.scala | 4 +- .../postgres/ProductPostgresAsyncSpec.scala | 5 +- .../QueryResultTypePostgresAsyncSpec.scala | 2 +- .../jasync/postgres/TestContext.scala | 10 +- .../jasync/postgres/ZioSpec.scala | 4 +- .../jasync/postgres/package.scala | 2 +- .../context/{zio => qzio}/Decoders.scala | 2 +- .../context/{zio => qzio}/Encoders.scala | 2 +- .../{zio => qzio}/JAsyncContextConfig.scala | 2 +- .../context/{zio => qzio}/SqlTypes.scala | 2 +- .../{zio => qzio}/UUIDObjectEncoding.scala | 2 +- .../{zio => qzio}/UUIDStringEncoding.scala | 2 +- .../context/{zio => qzio}/ZIOMonad.scala | 2 +- .../{zio => qzio}/ZioJAsyncConnection.scala | 10 +- .../{zio => qzio}/ZioJAsyncContext.scala | 2 +- .../scala/io/getquill/ZioJdbcContexts.scala | 2 +- .../scala/io/getquill/context/ZioJdbc.scala | 5 +- .../context/qzio/ZioJdbcContext.scala | 5 +- .../examples/other/ZioAppImplicitEnv.scala | 1 + .../scala/io/getquill/QuillSparkContext.scala | 2 +- .../io/getquill/context/spark/Decoders.scala | 4 +- version.sbt | 1 - 75 files changed, 162 insertions(+), 1908 deletions(-) delete mode 100644 build/credentials.sbt.enc delete mode 100644 build/deploy_key.pem.enc delete mode 100644 build/pubring.gpg.enc delete mode 100755 build/release.sh delete mode 100644 build/secring.gpg.enc delete mode 100644 quill-cassandra-lagom/src/main/scala/io/getquill/CassandraLagomAsyncContext.scala delete mode 100644 quill-cassandra-lagom/src/main/scala/io/getquill/CassandraLagomSessionContext.scala delete mode 100644 quill-cassandra-lagom/src/main/scala/io/getquill/CassandraLagomStreamContext.scala delete mode 100644 quill-cassandra-lagom/src/test/resources/application.conf delete mode 100644 quill-cassandra-lagom/src/test/scala/io/getquill/Spec.scala delete mode 100644 quill-cassandra-lagom/src/test/scala/io/getquill/TestEntities.scala delete mode 100644 quill-cassandra-lagom/src/test/scala/io/getquill/context/cassandra/CassandraTestEntities.scala delete mode 100644 quill-cassandra-lagom/src/test/scala/io/getquill/context/cassandra/EncodingSpecHelper.scala delete mode 100644 quill-cassandra-lagom/src/test/scala/io/getquill/context/cassandra/QueryResultTypeCassandraSpec.scala delete mode 100644 quill-cassandra-lagom/src/test/scala/io/getquill/context/cassandra/lagom/DecodeNullSpec.scala delete mode 100644 quill-cassandra-lagom/src/test/scala/io/getquill/context/cassandra/lagom/EncodingSpec.scala delete mode 100644 quill-cassandra-lagom/src/test/scala/io/getquill/context/cassandra/lagom/QueryResultTypeCassandraAsyncSpec.scala delete mode 100644 quill-cassandra-lagom/src/test/scala/io/getquill/context/cassandra/lagom/package.scala delete mode 100644 quill-cassandra-lagom/src/test/scala/io/getquill/context/cassandra/streaming/DecodeNullSpec.scala delete mode 100644 quill-cassandra-lagom/src/test/scala/io/getquill/context/cassandra/streaming/EncodingSpec.scala delete mode 100644 quill-cassandra-lagom/src/test/scala/io/getquill/context/cassandra/streaming/QueryResultTypeCassandraStreamSpec.scala delete mode 100644 quill-cassandra-lagom/src/test/scala/io/getquill/context/cassandra/streaming/package.scala delete mode 100644 quill-cassandra-lagom/src/test/scala/io/getquill/context/cassandra/utils/DummyService.scala delete mode 100644 quill-cassandra-lagom/src/test/scala/io/getquill/context/cassandra/utils/package.scala delete mode 100644 quill-core/js/src/main/scala/io/getquill/dsl/DynamicQueryDSL.scala delete mode 100644 quill-core/js/src/main/scala/io/getquill/log/ContextLog.scala rename quill-core/{jvm => }/src/main/scala/io/getquill/dsl/DynamicQueryDSL.scala (99%) rename quill-core/{jvm => }/src/main/scala/io/getquill/log/ContextLog.scala (100%) rename quill-core/{jvm => }/src/test/scala/io/getquill/util/LogToFileSpec.scala (100%) rename quill-jasync-zio-postgres/src/main/scala/io/getquill/context/{zio => qzio}/PostgresJAsyncContextConfig.scala (92%) rename quill-jasync-zio-postgres/src/main/scala/io/getquill/context/{zio => qzio}/PostgresZioJAsyncContext.scala (90%) rename quill-jasync-zio-postgres/src/main/scala/io/getquill/context/{zio => qzio}/jasync/ArrayDecoders.scala (96%) rename quill-jasync-zio-postgres/src/main/scala/io/getquill/context/{zio => qzio}/jasync/ArrayEncoders.scala (93%) rename quill-jasync-zio-postgres/src/test/scala/io/getquill/context/{zio => qzio}/jasync/postgres/ArrayAsyncEncodingSpec.scala (98%) rename quill-jasync-zio-postgres/src/test/scala/io/getquill/context/{zio => qzio}/jasync/postgres/ArrayOpsAsyncSpec.scala (93%) rename quill-jasync-zio-postgres/src/test/scala/io/getquill/context/{zio => qzio}/jasync/postgres/CaseClassQueryAsyncSpec.scala (97%) rename quill-jasync-zio-postgres/src/test/scala/io/getquill/context/{zio => qzio}/jasync/postgres/DepartmentsPostgresAsyncSpec.scala (95%) rename quill-jasync-zio-postgres/src/test/scala/io/getquill/context/{zio => qzio}/jasync/postgres/OnConflictAsyncSpec.scala (95%) rename quill-jasync-zio-postgres/src/test/scala/io/getquill/context/{zio => qzio}/jasync/postgres/PeopleAsyncReturningSpec.scala (97%) rename quill-jasync-zio-postgres/src/test/scala/io/getquill/context/{zio => qzio}/jasync/postgres/PeoplePostgresAsyncSpec.scala (97%) rename quill-jasync-zio-postgres/src/test/scala/io/getquill/context/{zio => qzio}/jasync/postgres/PostgresAsyncEncodingSpec.scala (96%) rename quill-jasync-zio-postgres/src/test/scala/io/getquill/context/{zio => qzio}/jasync/postgres/PostgresJAsyncContextSpec.scala (95%) rename quill-jasync-zio-postgres/src/test/scala/io/getquill/context/{zio => qzio}/jasync/postgres/ProductPostgresAsyncSpec.scala (96%) rename quill-jasync-zio-postgres/src/test/scala/io/getquill/context/{zio => qzio}/jasync/postgres/QueryResultTypePostgresAsyncSpec.scala (98%) rename quill-jasync-zio-postgres/src/test/scala/io/getquill/context/{zio => qzio}/jasync/postgres/TestContext.scala (81%) rename quill-jasync-zio-postgres/src/test/scala/io/getquill/context/{zio => qzio}/jasync/postgres/ZioSpec.scala (93%) rename quill-jasync-zio-postgres/src/test/scala/io/getquill/context/{zio => qzio}/jasync/postgres/package.scala (63%) rename quill-jasync-zio/src/main/scala/io/getquill/context/{zio => qzio}/Decoders.scala (99%) rename quill-jasync-zio/src/main/scala/io/getquill/context/{zio => qzio}/Encoders.scala (98%) rename quill-jasync-zio/src/main/scala/io/getquill/context/{zio => qzio}/JAsyncContextConfig.scala (98%) rename quill-jasync-zio/src/main/scala/io/getquill/context/{zio => qzio}/SqlTypes.scala (90%) rename quill-jasync-zio/src/main/scala/io/getquill/context/{zio => qzio}/UUIDObjectEncoding.scala (91%) rename quill-jasync-zio/src/main/scala/io/getquill/context/{zio => qzio}/UUIDStringEncoding.scala (92%) rename quill-jasync-zio/src/main/scala/io/getquill/context/{zio => qzio}/ZIOMonad.scala (98%) rename quill-jasync-zio/src/main/scala/io/getquill/context/{zio => qzio}/ZioJAsyncConnection.scala (90%) rename quill-jasync-zio/src/main/scala/io/getquill/context/{zio => qzio}/ZioJAsyncContext.scala (99%) delete mode 100644 version.sbt diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 127a66c896..5dc65007e0 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -5,24 +5,14 @@ on: push: branches: - master - # Do not activate CI on tags for now. For releases, run the build the initial release build - # tags: - # - "**" + release: + types: [ published ] # Triggers the CI when a new Github Release is published + +env: + JAVA_OPTS: -Xms6G -Xmx6G -XX:+UseG1GC + JDK_JAVA_OPTIONS: -Xms6G -Xmx6G -XX:+UseG1GC # See https://stackoverflow.com/a/73708006 jobs: - secrets: - runs-on: ubuntu-latest - outputs: - secrets: ${{ steps.secrets.outputs.secrets }} - steps: - - name: secrets - id: secrets - env: - HAS_PGP_PASSPHRASE: ${{ secrets.PGP_PASSPHRASE != '' }} - HAS_ENCRYPTION_PASSWORD: ${{ secrets.ENCRYPTION_PASSWORD != '' }} - shell: bash - if: env.HAS_PGP_PASSPHRASE == 'true' && env.HAS_ENCRYPTION_PASSWORD == 'true' - run: echo ::set-output name=secrets::true build: runs-on: ubuntu-latest @@ -35,7 +25,7 @@ jobs: fail-fast: false matrix: scala: [2.12.x, 2.13.x, 3.3.x] - module: [base, db, js, async, codegen, bigdata] + module: [base, db, async, codegen, bigdata] include: - scala: 2.12.x scala_short: 212 @@ -50,8 +40,6 @@ jobs: # For now, only do the `base` build for Scala 3 - scala: 3.3.x module: db - - scala: 3.3.x - module: js - scala: 3.3.x module: async - scala: 3.3.x @@ -67,7 +55,7 @@ jobs: name: Build ${{matrix.scala_short_dot}} - ${{matrix.module}} steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4.0.0 - name: Cache sbt uses: actions/cache@v3 with: @@ -77,7 +65,7 @@ jobs: key: ${{ runner.os }}-${{matrix.scala}}-${{matrix.module}}-${{ hashFiles('**/build.sbt') }} - name: Setup Java and Scala - uses: actions/setup-java@v3.10.0 + uses: actions/setup-java@v3.12.0 with: distribution: temurin java-version: '8' @@ -90,123 +78,28 @@ jobs: env: SCALA_VERSION: ${{ matrix.scala }} - release: - if: ${{github.event_name != 'pull_request' && needs.secrets.outputs.secrets }} - runs-on: ubuntu-latest - - needs: - - build - - secrets - - strategy: - fail-fast: false - matrix: - scala: [2.12.x, 2.13.x, 3.3.x] - module: [docs, base, db, js, async, codegen, bigdata] - include: - - scala: 2.12.x - scala_short: 212 - scala_short_dot: 2.12 - - scala: 2.13.x - scala_short: 213 - scala_short_dot: 2.13 - - scala: 3.3.x - scala_short: 33 - scala_short_dot: 3.3 - exclude: - - scala: 3.3.x - module: db - - scala: 3.3.x - module: js - - scala: 3.3.x - module: async - - scala: 3.3.x - module: codegen - - scala: 3.3.x - module: bigdata - - name: Release ${{matrix.scala_short_dot}} - ${{matrix.module}} - - steps: - - uses: actions/checkout@v3 - - name: Cache sbt - uses: actions/cache@v3 - with: - path: | - ~/.cache/coursier - ~/.sbt - key: ${{ runner.os }}-${{matrix.scala}}-${{matrix.module}}-${{ hashFiles('**/build.sbt') }} - - - name: Setup Java and Scala - uses: actions/setup-java@v3.10.0 - with: - distribution: temurin - java-version: '8' - check-latest: true - - - name: Release - run: | - echo "SCALA_VERSION='$SCALA_VERSION'" - echo "PULL_REQUEST='$PULL_REQUEST'" - echo "GITHUB_REF='$GITHUB_REF'" - export BRANCH=$(git for-each-ref ${{ github.ref }} --format='%(refname:short)') - echo "BRANCH='$BRANCH'" - ./build/release.sh ${{ matrix.scala_short }} ${{ matrix.module }} - env: - ENCRYPTION_PASSWORD: ${{ secrets.ENCRYPTION_PASSWORD }} - GITHUB_REF: ${{ github.ref }} - PGP_PASSPHRASE: ${{ secrets.PGP_PASSPHRASE }} - PULL_REQUEST: ${{ !!github.event.pull_request }} - SCALA_VERSION: ${{ matrix.scala }} - - publish: - if: ${{github.event_name != 'pull_request' && needs.secrets.outputs.secrets }} + release_scala: + if: ${{github.event_name != 'pull_request'}} runs-on: ubuntu-latest - - needs: - - release - - secrets - + needs: build strategy: fail-fast: false matrix: - scala: [2.13.x] - module: [publish] - include: - - scala: 2.13.x - scala_short: 213 - scala_short_dot: 2.13 - - name: Publish ${{matrix.scala_short_dot}} - ${{matrix.module}} - + scala: [ 2.12.x, 2.13.x, 3.3.x ] + env: + CI_RELEASE: publishSigned # By default, sbt-ci-release uses the `+publishSigned` which is publishing for all the Scala versions configured, which is not what we want + CI_SNAPSHOT_RELEASE: publish # By default, sbt-ci-release uses the `+publish` which is publishing for all the Scala versions configured, which is not what we want steps: - - uses: actions/checkout@v3 - - name: Cache sbt - uses: actions/cache@v3 - with: - path: | - ~/.cache/coursier - ~/.sbt - key: ${{ runner.os }}-${{matrix.scala}}-${{matrix.module}}-${{ hashFiles('**/build.sbt') }} - + - uses: actions/checkout@v4.0.0 - name: Setup Java and Scala - uses: actions/setup-java@v3.10.0 + uses: actions/setup-java@v3.12.0 with: distribution: temurin java-version: '8' check-latest: true - - - name: Release - run: | - echo "SCALA_VERSION='$SCALA_VERSION'" - echo "PULL_REQUEST='$PULL_REQUEST'" - echo "GITHUB_REF='$GITHUB_REF'" - export BRANCH=$(git for-each-ref ${{ github.ref }} --format='%(refname:short)') - echo "BRANCH='$BRANCH'" - ./build/release.sh ${{ matrix.scala_short }} ${{ matrix.module }} + - run: sbt ++${{ matrix.scala }} -Dquill.scala.version=${{ matrix.scala }} ci-release env: - ENCRYPTION_PASSWORD: ${{ secrets.ENCRYPTION_PASSWORD }} - GITHUB_REF: ${{ github.ref }} PGP_PASSPHRASE: ${{ secrets.PGP_PASSPHRASE }} - PULL_REQUEST: ${{ !!github.event.pull_request }} - SCALA_VERSION: ${{ matrix.scala }} + PGP_SECRET: ${{ secrets.PGP_SECRET }} + SONATYPE_PASSWORD: ${{ secrets.SONATYPE_PASSWORD }} + SONATYPE_USERNAME: ${{ secrets.SONATYPE_USERNAME }} diff --git a/.gitignore b/.gitignore index 9aed71bc5c..a2450b5df1 100644 --- a/.gitignore +++ b/.gitignore @@ -46,3 +46,4 @@ project/.bloop/ .DS_Store .vscode .history +metals.sbt diff --git a/build.sbt b/build.sbt index 84fb3886c5..8df35756ba 100644 --- a/build.sbt +++ b/build.sbt @@ -1,20 +1,21 @@ -import ReleaseTransformations._ -import sbtrelease.ReleasePlugin -import sbtcrossproject.CrossPlugin.autoImport.crossProject - import java.io.{File => JFile} +import com.jsuereth.sbtpgp.PgpKeys.publishSigned import scala.collection.immutable.ListSet +Global / onChangedBuildSource := ReloadOnSourceChanges + inThisBuild( List( organization := "io.getquill", homepage := Some(url("https://zio.dev/zio-quill")), + licenses := List(("Apache License 2.0", url("http://www.apache.org/licenses/LICENSE-2.0"))), + developers := List( + Developer("fwbrasil", "Flavio W. Brasil", "", url("https://github.com/fwbrasil")), + Developer("deusaquilus", "Alexander Ioffe", "", url("https://github.com/deusaquilus")) + ), scmInfo := Some( - ScmInfo( - homepage.value.get, - "scm:git:git@github.com:zio/zio-quill.git" - ) + ScmInfo(url("https://github.com/zio/zio-quill"), "git:git@github.com:zio/zio-quill.git") ), scalafmtCheck := true, scalafmtSbtCheck := true, @@ -22,38 +23,13 @@ inThisBuild( ) ) -// During release cycles, GPG will expect passphrase user-input EVEN when --passphrase is specified -// this should add --pinentry-loopback in order to disable that. See here for more info: -// https://github.com/sbt/sbt-pgp/issues/178 -Global / useGpgPinentry := true - -// Do not strip the qualifier, want to keep that. If I set version.sbt to 1.2.3.foo.1 that's exactly what I want the version to be -releaseVersion := { ver => ver } -releaseNextVersion := { ver => - val withoutLast = ver.reverse.dropWhile(_.isDigit).reverse - val last = ver.reverse.takeWhile(_.isDigit).reverse - println(s"Detected original version: ${ver}. Which is ${withoutLast} + ${last}") - // see if the last group of chars are numeric, if they are, just increment - val actualLast = scala.util.Try(last.toInt).map(i => (i + 1).toString).getOrElse(last) - val newVer = withoutLast + actualLast + "-SNAPSHOT" - println(s"Final computed version is: ${newVer}") - newVer -} - val CodegenTag = Tags.Tag("CodegenTag") (Global / concurrentRestrictions) += Tags.exclusive(CodegenTag) -(Global / concurrentRestrictions) += Tags.limit(ScalaJSTags.Link, 1) - -lazy val jsModules = Seq[sbt.ClasspathDep[sbt.ProjectReference]]( - `quill-engine-js`, - `quill-core-js`, - `quill-sql-js` -) lazy val baseModules = Seq[sbt.ClasspathDep[sbt.ProjectReference]]( - `quill-engine-jvm`, - `quill-core-jvm`, - `quill-sql-jvm`, + `quill-engine`, + `quill-core`, + `quill-sql`, `quill-monix`, `quill-zio`, `quill-util` @@ -94,10 +70,10 @@ lazy val bigdataModules = Seq[sbt.ClasspathDep[sbt.ProjectReference]]( ) lazy val allModules = - baseModules ++ jsModules ++ dbModules ++ jasyncModules ++ codegenModules ++ bigdataModules ++ docsModules + baseModules ++ dbModules ++ jasyncModules ++ codegenModules ++ bigdataModules ++ docsModules lazy val scala213Modules = - baseModules ++ jsModules ++ dbModules ++ codegenModules ++ Seq[sbt.ClasspathDep[sbt.ProjectReference]]( + baseModules ++ dbModules ++ codegenModules ++ Seq[sbt.ClasspathDep[sbt.ProjectReference]]( `quill-cassandra`, `quill-cassandra-alpakka`, `quill-cassandra-monix`, @@ -111,7 +87,7 @@ lazy val scala213Modules = `quill-spark` ) -lazy val scala3Modules = Seq[sbt.ClasspathDep[sbt.ProjectReference]](`quill-engine-jvm`, `quill-util`) +lazy val scala3Modules = Seq[sbt.ClasspathDep[sbt.ProjectReference]](`quill-engine`, `quill-util`) def isScala213 = { val scalaVersion = sys.props.get("quill.scala.version") @@ -145,9 +121,6 @@ lazy val filteredModules = { case "base" => println("SBT =:> Compiling Base Modules") baseModules - case "js" => - println("SBT =:> Compiling JavaScript Modules") - jsModules case "db" => println("SBT =:> Compiling Database Modules") dbModules @@ -159,7 +132,7 @@ lazy val filteredModules = { codegenModules case "nocodegen" => println("Compiling Not-Code Generator Modules") - baseModules ++ jsModules ++ dbModules ++ jasyncModules ++ bigdataModules + baseModules ++ dbModules ++ jasyncModules ++ bigdataModules case "bigdata" => println("SBT =:> Compiling Big Data Modules") bigdataModules @@ -204,10 +177,14 @@ lazy val filteredModules = { lazy val `quill` = (project in file(".")) .settings(commonSettings: _*) + .settings( + publishArtifact := false, + publish / skip := true, + publishLocal / skip := true, + publishSigned / skip := true, + crossScalaVersions := Nil // https://www.scala-sbt.org/1.x/docs/Cross-Build.html#Cross+building+a+project+statefully + ) .aggregate(filteredModules.map(_.project).toSeq: _*) - .dependsOn(filteredModules.toSeq: _*) - -`quill` / publishArtifact := false lazy val `quill-util` = (project in file("quill-util")) @@ -226,7 +203,7 @@ lazy val `quill-util` = ExclusionRule(organization = "com.lihaoyi", name = "fansi_2.13") ) else - Seq() + Seq.empty }): _* ) .cross(CrossVersion.for3Use2_13) @@ -240,75 +217,28 @@ lazy val `quill-util` = "com.lihaoyi" %% "sourcecode" % "0.3.0", "com.lihaoyi" %% "fansi" % "0.3.0" ) - else Seq() + else Seq.empty } ) .enablePlugins(MimaPlugin) -lazy val superPure = new sbtcrossproject.CrossType { - def projectDir(crossBase: File, projectType: String): File = - projectType match { - case "jvm" => crossBase / s"$projectType" - case "js" => crossBase / s"$projectType" - } - - def sharedSrcDir(projectBase: File, conf: String): Option[File] = - Some(projectBase.getParentFile / "src" / conf / "scala") - - override def projectDir(crossBase: File, projectType: sbtcrossproject.Platform): File = - projectType match { - case JVMPlatform => crossBase / "jvm" - case JSPlatform => crossBase / "js" - } -} - -lazy val ultraPure = new sbtcrossproject.CrossType { - def projectDir(crossBase: File, projectType: String): File = - projectType match { - case "jvm" => crossBase - case "js" => crossBase / s".$projectType" - } - - def sharedSrcDir(projectBase: File, conf: String): Option[File] = - Some(projectBase.getParentFile / "src" / conf / "scala") - - override def projectDir(crossBase: File, projectType: sbtcrossproject.Platform): File = - projectType match { - case JVMPlatform => crossBase - case JSPlatform => crossBase / ".js" - } -} - lazy val `quill-engine` = - crossProject(JVMPlatform, JSPlatform) - .crossType(ultraPure) + project .settings(commonSettings: _*) .settings( libraryDependencies ++= Seq( "com.typesafe" % "config" % "1.4.2", "com.typesafe.scala-logging" %% "scala-logging" % "3.9.5", - ("com.github.takayahilton" %%% "sql-formatter" % "1.2.1").cross(CrossVersion.for3Use2_13), + ("com.github.takayahilton" %% "sql-formatter" % "1.2.1").cross(CrossVersion.for3Use2_13), "io.suzaku" %% "boopickle" % "1.4.0", - "com.lihaoyi" %%% "pprint" % "0.8.1" + "com.lihaoyi" %% "pprint" % "0.8.1" ), coverageExcludedPackages := ";.*AstPrinter;.*Using;io.getquill.Model;io.getquill.ScalarTag;io.getquill.QuotationTag" ) - .jsSettings( - libraryDependencies ++= Seq( - "io.github.cquiroz" %%% "scala-java-time" % "2.5.0", - "org.scala-lang.modules" %%% "scala-collection-compat" % scalaCollectionCompatVersion, - "io.suzaku" %%% "boopickle" % "1.4.0" - ), - coverageExcludedPackages := ".*" - ) .enablePlugins(MimaPlugin) -lazy val `quill-engine-jvm` = `quill-engine`.jvm -lazy val `quill-engine-js` = `quill-engine`.js - lazy val `quill-core` = - crossProject(JVMPlatform, JSPlatform) - .crossType(superPure) + project .settings(commonSettings: _*) .settings( libraryDependencies ++= Seq( @@ -317,44 +247,25 @@ lazy val `quill-core` = "dev.zio" %% "zio" % Version.zio, "dev.zio" %% "zio-streams" % Version.zio, "com.typesafe.scala-logging" %% "scala-logging" % "3.9.5" - ) - ) - .jvmSettings( + ), Test / fork := true ) - .jsSettings( - unmanagedSources / excludeFilter := new SimpleFileFilter(file => file.getName == "DynamicQuerySpec.scala"), - coverageExcludedPackages := ".*" - ) .dependsOn(`quill-engine` % "compile->compile") .enablePlugins(MimaPlugin) -// dependsOn in these clauses technically not needed however, intellij does not work properly without them -lazy val `quill-core-jvm` = `quill-core`.jvm.dependsOn(`quill-engine-jvm` % "compile->compile") -lazy val `quill-core-js` = `quill-core`.js.dependsOn(`quill-engine-js` % "compile->compile") - lazy val `quill-sql` = - crossProject(JVMPlatform, JSPlatform) - .crossType(ultraPure) + project .settings(commonSettings: _*) - .jsSettings( - scalaJSLinkerConfig ~= { _.withModuleKind(ModuleKind.CommonJSModule) }, - coverageExcludedPackages := ".*", - libraryDependencies += "org.scala-js" %%% "scalajs-java-securerandom" % "1.0.0" - ) .dependsOn( `quill-engine` % "compile->compile", `quill-core` % "compile->compile;test->test" ) .enablePlugins(MimaPlugin) -lazy val `quill-sql-jvm` = `quill-sql`.jvm -lazy val `quill-sql-js` = `quill-sql`.js - lazy val `quill-codegen` = (project in file("quill-codegen")) .settings(commonSettings: _*) - .dependsOn(`quill-core-jvm` % "compile->compile;test->test") + .dependsOn(`quill-core` % "compile->compile;test->test") lazy val `quill-codegen-jdbc` = (project in file("quill-codegen-jdbc")) @@ -373,6 +284,7 @@ lazy val `quill-codegen-tests` = (project in file("quill-codegen-tests")) .settings(commonSettings: _*) .settings( + publish / skip := true, libraryDependencies += "org.scala-lang" % "scala-compiler" % scalaVersion.value % Test, Test / fork := true, (Test / sourceGenerators) += Def.task { @@ -409,20 +321,14 @@ val excludeTests = case regex => ExcludeTests.KeepSome(regex) } -val skipPush = - sys.props.getOrElse("skipPush", "false").toBoolean - val debugMacro = sys.props.getOrElse("debugMacro", "false").toBoolean -val skipTag = - sys.props.getOrElse("skipTag", "false").toBoolean - lazy val `quill-jdbc` = (project in file("quill-jdbc")) .settings(commonSettings: _*) .settings(jdbcTestingSettings: _*) - .dependsOn(`quill-sql-jvm` % "compile->compile;test->test") + .dependsOn(`quill-sql` % "compile->compile;test->test") .enablePlugins(MimaPlugin) ThisBuild / libraryDependencySchemes += "org.typelevel" %% "cats-effect" % "always" @@ -449,7 +355,7 @@ lazy val `quill-monix` = ("io.monix" %% "monix-reactive" % "3.0.0").cross(CrossVersion.for3Use2_13) ) ) - .dependsOn(`quill-core-jvm` % "compile->compile;test->test") + .dependsOn(`quill-core` % "compile->compile;test->test") .enablePlugins(MimaPlugin) lazy val `quill-jdbc-monix` = @@ -473,7 +379,7 @@ lazy val `quill-jdbc-monix` = } ) .dependsOn(`quill-monix` % "compile->compile;test->test") - .dependsOn(`quill-sql-jvm` % "compile->compile;test->test") + .dependsOn(`quill-sql` % "compile->compile;test->test") .dependsOn(`quill-jdbc` % "compile->compile;test->test") .enablePlugins(MimaPlugin) @@ -487,7 +393,7 @@ lazy val `quill-zio` = "dev.zio" %% "zio-streams" % Version.zio ) ) - .dependsOn(`quill-core-jvm` % "compile->compile;test->test") + .dependsOn(`quill-core` % "compile->compile;test->test") .enablePlugins(MimaPlugin) lazy val `quill-jdbc-zio` = @@ -520,7 +426,7 @@ lazy val `quill-jdbc-zio` = } ) .dependsOn(`quill-zio` % "compile->compile;test->test") - .dependsOn(`quill-sql-jvm` % "compile->compile;test->test") + .dependsOn(`quill-sql` % "compile->compile;test->test") .dependsOn(`quill-jdbc` % "compile->compile;test->test") .enablePlugins(MimaPlugin) @@ -532,7 +438,7 @@ lazy val `quill-spark` = libraryDependencies ++= Seq("org.apache.spark" %% "spark-sql" % "3.4.0"), excludeDependencies ++= Seq("ch.qos.logback" % "logback-classic") ) - .dependsOn(`quill-sql-jvm` % "compile->compile;test->test") + .dependsOn(`quill-sql` % "compile->compile;test->test") .enablePlugins(MimaPlugin) lazy val `quill-jasync` = @@ -545,7 +451,7 @@ lazy val `quill-jasync` = "org.scala-lang.modules" %% "scala-java8-compat" % "0.9.1" ) ) - .dependsOn(`quill-sql-jvm` % "compile->compile;test->test") + .dependsOn(`quill-sql` % "compile->compile;test->test") .enablePlugins(MimaPlugin) lazy val `quill-jasync-postgres` = @@ -585,7 +491,7 @@ lazy val `quill-jasync-zio` = ) ) .dependsOn(`quill-zio` % "compile->compile;test->test") - .dependsOn(`quill-sql-jvm` % "compile->compile;test->test") + .dependsOn(`quill-sql` % "compile->compile;test->test") .enablePlugins(MimaPlugin) lazy val `quill-jasync-zio-postgres` = @@ -613,7 +519,7 @@ lazy val `quill-cassandra` = }) ) ) - .dependsOn(`quill-core-jvm` % "compile->compile;test->test") + .dependsOn(`quill-core` % "compile->compile;test->test") .enablePlugins(MimaPlugin) lazy val `quill-cassandra-monix` = @@ -653,25 +559,6 @@ lazy val `quill-cassandra-alpakka` = .dependsOn(`quill-cassandra` % "compile->compile;test->test") .enablePlugins(MimaPlugin) -//lazy val `quill-cassandra-lagom` = -// (project in file("quill-cassandra-lagom")) -// .settings(commonSettings: _*) -// .settings( -// Test / fork := true, -// libraryDependencies ++= { -// val lagomVersion = if (scalaVersion.value.startsWith("2.13")) "1.6.5" else "1.5.5" -// val versionSpecificDependencies = if (scalaVersion.value.startsWith("2.13")) Seq("com.typesafe.play" %% "play-akka-http-server" % "2.8.8") else Seq.empty -// Seq( -// "com.lightbend.lagom" %% "lagom-scaladsl-persistence-cassandra" % lagomVersion % Provided, -// "com.lightbend.lagom" %% "lagom-scaladsl-testkit" % lagomVersion % Test, -// "com.datastax.cassandra" % "cassandra-driver-core" % "3.11.2", -// // lagom uses datastax 3.x driver - not compatible with 4.x in API level -// "io.getquill" %% "quill-cassandra" % "3.10.0" % "compile->compile" -// ) ++ versionSpecificDependencies -// } -// ) -// .enablePlugins(MimaPlugin) - lazy val `quill-orientdb` = (project in file("quill-orientdb")) .settings(commonSettings: _*) @@ -681,20 +568,9 @@ lazy val `quill-orientdb` = "com.orientechnologies" % "orientdb-graphdb" % "3.2.23" ) ) - .dependsOn(`quill-sql-jvm` % "compile->compile;test->test") + .dependsOn(`quill-sql` % "compile->compile;test->test") .enablePlugins(MimaPlugin) -commands += Command.command("checkUnformattedFiles") { st => - val vcs = Project.extract(st).get(releaseVcs).get - val modified = - vcs.cmd("ls-files", "--modified", "--exclude-standard").!!.trim.split('\n').filter(_.contains(".scala")) - if (modified.nonEmpty) - throw new IllegalStateException( - s"Please run `sbt scalafmtAll` and resubmit your pull request. Found unformatted files: ${modified.toList}" - ) - st -} - lazy val jdbcTestingLibraries = Seq( libraryDependencies ++= Seq( "com.zaxxer" % "HikariCP" % "4.0.3" exclude ("org.slf4j", "*"), @@ -777,12 +653,12 @@ lazy val loggingSettings = Seq( lazy val basicSettings = excludeFilterSettings ++ Seq( Test / testOptions += Tests.Argument("-oI"), - organization := "io.getquill", scalaVersion := scala_v_13, crossScalaVersions := Seq(scala_v_12, scala_v_13, scala_v_30), libraryDependencies ++= Seq( - "org.scalatest" %%% "scalatest" % "3.2.17" % Test, - "com.google.code.findbugs" % "jsr305" % "3.0.2" % Provided // just to avoid warnings during compilation + "org.scalatest" %% "scalatest" % "3.2.17" % Test, + "org.scala-lang.modules" %% "scala-collection-compat" % scalaCollectionCompatVersion, + "com.google.code.findbugs" % "jsr305" % "3.0.2" % Provided // just to avoid warnings during compilation ) ++ { if (debugMacro && isScala2) Seq( @@ -790,9 +666,7 @@ lazy val basicSettings = excludeFilterSettings ++ Seq( "org.scala-lang" % "scala-compiler" % scalaVersion.value, "org.scala-lang" % "scala-reflect" % scalaVersion.value ) - else Seq() - } ++ { - Seq("org.scala-lang.modules" %%% "scala-collection-compat" % scalaCollectionCompatVersion) + else Seq.empty }, Test / unmanagedClasspath ++= Seq( baseDirectory.value / "src" / "test" / "resources" @@ -824,7 +698,7 @@ lazy val basicSettings = excludeFilterSettings ++ Seq( "-Ywarn-unused:imports", "-Ycache-macro-class-loader:last-modified" ) - case _ => Seq() + case _ => Seq.empty } }, Global / concurrentRestrictions += Tags.limit(Tags.Test, 1), @@ -832,78 +706,8 @@ lazy val basicSettings = excludeFilterSettings ++ Seq( scoverage.ScoverageKeys.coverageFailOnMinimum := false ) -def doOnDefault(steps: ReleaseStep*): Seq[ReleaseStep] = - Seq[ReleaseStep](steps: _*) - -def doOnPush(steps: ReleaseStep*): Seq[ReleaseStep] = - if (skipPush) - Seq[ReleaseStep]() - else - Seq[ReleaseStep](steps: _*) - -lazy val commonNoLogSettings = ReleasePlugin.extraReleaseCommands ++ basicSettings ++ releaseSettings -lazy val commonSettings = ReleasePlugin.extraReleaseCommands ++ basicSettings ++ loggingSettings ++ releaseSettings - -lazy val releaseSettings = Seq( - resolvers ++= Seq( - Resolver.mavenLocal, - "Sonatype OSS Snapshots" at "https://oss.sonatype.org/content/repositories/snapshots", - "Sonatype OSS Releases" at "https://oss.sonatype.org/content/repositories/releases" - ), - releasePublishArtifactsAction := PgpKeys.publishSigned.value, - publishMavenStyle := true, - publishTo := { - val nexus = "https://oss.sonatype.org/" - if (isSnapshot.value) - Some("snapshots" at nexus + "content/repositories/snapshots") - else - Some("releases" at nexus + "service/local/staging/deploy/maven2") - }, - pgpSecretRing := file("local.secring.gpg"), - pgpPublicRing := file("local.pubring.gpg"), - releaseVersionBump := sbtrelease.Version.Bump.Nano, - releasePublishArtifactsAction := PgpKeys.publishSigned.value, - releaseProcess := { - CrossVersion.partialVersion(scalaVersion.value) match { - case Some((2, 12)) => - doOnDefault(checkSnapshotDependencies) ++ - doOnDefault(inquireVersions) ++ - doOnDefault(runClean) ++ - doOnPush(setReleaseVersion) ++ - doOnPush(commitReleaseVersion) ++ - doOnPush(tagRelease) ++ - doOnDefault(publishArtifacts) ++ - doOnPush(setNextVersion) ++ - doOnPush(commitNextVersion) ++ - // doOnPush(releaseStepCommand("sonatypeReleaseAll")) ++ - doOnPush(pushChanges) - case Some((2, 13)) => - doOnDefault(checkSnapshotDependencies) ++ - doOnDefault(inquireVersions) ++ - doOnDefault(runClean) ++ - doOnPush(setReleaseVersion) ++ - doOnDefault(publishArtifacts) - // doOnPush ("sonatypeReleaseAll") ++ - case Some((3, _)) => - doOnDefault(checkSnapshotDependencies) ++ - doOnDefault(inquireVersions) ++ - doOnDefault(runClean) ++ - doOnPush(setReleaseVersion) ++ - doOnDefault(publishArtifacts) - // doOnPush ("sonatypeReleaseAll") ++ - case _ => Seq[ReleaseStep]() - } - }, - homepage := Some(url("https://zio.dev/zio-quill/")), - licenses := List(("Apache License 2.0", url("http://www.apache.org/licenses/LICENSE-2.0"))), - developers := List( - Developer("fwbrasil", "Flavio W. Brasil", "", url("https://github.com/fwbrasil")), - Developer("deusaquilus", "Alexander Ioffe", "", url("https://github.com/deusaquilus")) - ), - scmInfo := Some( - ScmInfo(url("https://github.com/zio/zio-quill"), "git:git@github.com:zio/zio-quill.git") - ) -) +lazy val commonNoLogSettings = basicSettings +lazy val commonSettings = basicSettings ++ loggingSettings lazy val docs = project .in(file("zio-quill-docs")) @@ -916,10 +720,15 @@ lazy val docs = project scalacOptions += "-Xlog-implicits", libraryDependencies ++= Seq("dev.zio" %% "zio" % Version.zio), projectName := "ZIO Quill", - mainModuleName := (`quill-core-jvm` / moduleName).value, -// ScalaUnidoc / unidoc / unidocProjectFilter := inProjects( -// `quill-engine-jvm`, -// ), + mainModuleName := (`quill-core` / moduleName).value, + // With Scala 2.12, these projects doc isn't compiling. + ScalaUnidoc / unidoc / unidocProjectFilter := inAnyProject -- inProjects( + `quill-engine`, + `quill-core`, + `quill-cassandra-monix`, + `quill-orientdb`, + `quill-doobie` + ), projectStage := ProjectStage.ProductionReady, checkArtifactBuildProcessWorkflowStep := None, docsPublishBranch := "master", diff --git a/build/build.sh b/build/build.sh index 25409682a0..406cb24ddb 100755 --- a/build/build.sh +++ b/build/build.sh @@ -72,7 +72,7 @@ function wait_for_databases() { show_mem #sbt scalafmtAll - #sbt checkUnformattedFiles + #sbt scalafmtCheckAll # Start sbt compilation and database setup in parallel echo "build.sh =:> Base Compile in wait_for_databases" @@ -106,7 +106,7 @@ function wait_for_mysql_postgres() { show_mem #sbt scalafmtAll - #sbt checkUnformattedFiles + #sbt scalafmtCheckAll # Start sbt compilation and database setup in parallel echo "build.sh =:> Base Compile in wait_for_mysql_postgres" @@ -139,8 +139,8 @@ function wait_for_bigdata() { show_mem sbt scalafmtAll - sbt checkUnformattedFiles - sbt $SBT_ARGS quill-coreJVM/test:compile & COMPILE=$! + sbt scalafmtCheckAll + sbt $SBT_ARGS quill-core/Test/compile & COMPILE=$! ./build/setup_bigdata.sh & SETUP=$! wait $SETUP diff --git a/build/credentials.sbt.enc b/build/credentials.sbt.enc deleted file mode 100644 index f5ed7c7fb09829997193c8cf2f3494569fc8fa7c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 160 zcmV;R0AK%8VQh3|WM5yJ@}LR-Pd)-bx&03+1Dg?Y9n}iOFG1{JgMi7vSatc$a+_3B zBeMxp8%n%o#{sx9#+5}3@Vlc&d4H{7iZ(rXD+8Gx?~$o3#_SXyD)N68G`>c8quvO4 z%f?SrO@hb8P|^1$A5cY5N}uwdt<#Tgb`^(f#-yBwrac^IUJd~O7h_YiNe)tZw4!EP OozH^1Ca}x|FV7W)$(mdN0y%wk5N!T&N}@C1l*o!uTHwkt z3-^dQXR=bKzal1BxccFXc3m$o1Ll!V*qT|sV}3CNK@{?X9)`NPZN(H5TquI>mMXZv z949~7>%iDPQb>p$8ukUz?#YJ9)EsAKddyUi~9fr??9WJnL5q9F937-I=wEp_h8O5#f zbHybvx*Y;Mx{}mh`9>HaIdh7FJ~;`6V@yWmQzn_2L%G*uN&g9L=8Iz$flM)Vk zG>W0%dz6`aWz$8Ht$zILyb@mULYo~GPM~fbb%d1YkME#$4%aZDR-f-O=ln`7o9_46 zD%|^}y(s-dKR}zTAxFU^_{32+1pxz$5r;!6CTagpwD<}a?xC6IbF3(YG=)BeCbC8; zv`f~;*sQGfBcZyQ7OUaRj8{ghvEBctYguuXRlJ>hs3x#YjlLIAn0!vvQEb_#1R+zb zphbFBU7Z{aOK^ihvv7rZcOp?x;(uL_cf|LD>}!QeA_*}6PpVJlgOn|k8z!Yl^LrP% zJtP2pqu5Yozo^|@F7tU&a)hEDZRl>pL(r;2jof2HOhB%E8FP<3(c#6e%N3URrcd}P z4F3KS4Xz0h1cgct)|Pv|b2{X%JEGpSA_W5}oK!}P1U22ZCSqX6Lk>M}`no1vjH2y! zjM_Ja1@MUmwvZNyRMq`Ff?+_BhhwO^2tZm8lO@tZP!Bpco>`gYEiDtBV}@P%=Tq4H zC+=f5g!zRPVqKR#MV=R&WA&7kx{;5|L!y1psASY9sp?g)5L=M9#`3-eb^%le!lCix zVaBPk-m@}v$!)>Fm;u4Ov5AJc9}m-3^#;-cD6h~DZ?O#bXm_?)Q?>iEn^uy6uF6gJbDn)3?#^E`SaxS0{POVzUIaR zRe12WBOa(jvR-I=2;UaEm9s zJY&kB9^`s>sl!-(Zy7z@qfuHGBWSb62ipgoQ6K7q19UR;T+)iNMfz&ps1|YM<9w*L zNe4PMT1I4aB8zo|n-kXGN2BHxCmLzaNxgm|zCXAe);y&i9aHuX(s>|tk}Rv_af}hl z71oO$XPq*vlAeWI(I1tVp$#Ab>BS8-vWm#8vsqjulaa$qD$U82^THCyj#U9~Z>;F$ zw`XEM>Ok{-WZJ3P^i_05@V#SIo(QKF*F+8s%I~P!lG)VY=|YiRnigLRU3mqx>xLOU8HR9wF%p7(jrbLb-4y*EzDxV+I3!bf zg+}9Ykdih~A^F5)StER#%zN7ok&s0JV$|J7%?@CGMb2ddr6fby7N^5Iu^L9bkxUCD zUcP8^`r~QhKfu7=3U?(~y`z>_=BbB9*!q1pBl~Q^WB#UeI?P4#hD^$y=#jA+pt&6+ zcKogSDm2>fX0X|TVZFI`F%&V%%jFZLP50UI3JSz}v>L$mk)i`-czo`EJjP$T-MnjG zMSGX-z=QEhWqn6jZ&$C{tfEZhF_OF{^;^dYAS+%E08@Wfl&&O!9le$s}+sK9u4l%O=ISXVZOh_F^NjPNg?9U4=Z zI-47Y&sNP|QWHjBe_bQ}hcE2p5@Y^_{hcS^<>L5Hjq`7f9iDQ7^f6*-SBA3?v%mrP{;+D(BNL-3^_D|0ng` zT=|vf>``L}2G_jSSe(T`?$qW8$&c0)T}}- i^A@U`t`*iKs#3lDfWhP}W>vS`VE$2nc><1s`4Q7q`+mg$ diff --git a/build/release.sh b/build/release.sh deleted file mode 100755 index 9268effa3a..0000000000 --- a/build/release.sh +++ /dev/null @@ -1,136 +0,0 @@ -#!/usr/bin/env bash -set -e # Any subsequent(*) commands which fail will cause the shell script to exit immediately - -VERSION=$1 -ARTIFACT=$2 - -echo "Begin Release Script for BRANCH=$BRANCH VERSION=$VERSION ARTIFACT=$ARTIFACT" - -if [[ -z $ARTIFACT ]] -then - echo "No Artifact Specified" -fi - -export JAVA_OPTS="-Xms4g -Xmx4g -Xss10m" - -SBT_2_12="sbt ++2.12.17 -Dquill.macro.log=false -Dquill.scala.version=2.12.17" -SBT_2_13="sbt ++2.13.10 -Dquill.macro.log=false -Dquill.scala.version=2.13.10" -SBT_3_3="sbt ++3.3.0 -Dquill.macro.log=false -Dquill.scala.version=3.3.0" - -if [[ $VERSION -eq 212 ]] -then - SBT_VER=$SBT_2_12 -elif [[ $VERSION -eq 213 ]] -then - SBT_VER=$SBT_2_13 -elif [[ $VERSION -eq 33 ]] -then - SBT_VER=$SBT_3_3 -else - echo "No Valid SBT Version Entered" - exit 1 -fi - -echo "$SBT_VER" -if [[ $PULL_REQUEST == "false" ]] -then - echo "Export secring" - openssl aes-256-cbc -md sha256 -salt -pbkdf2 -pass pass:$ENCRYPTION_PASSWORD -in ./build/secring.gpg.enc -out local.secring.gpg -d - echo "Export pubring" - openssl aes-256-cbc -md sha256 -salt -pbkdf2 -pass pass:$ENCRYPTION_PASSWORD -in ./build/pubring.gpg.enc -out local.pubring.gpg -d - echo "Export creds" - openssl aes-256-cbc -md sha256 -salt -pbkdf2 -pass pass:$ENCRYPTION_PASSWORD -in ./build/credentials.sbt.enc -out local.credentials.sbt -d - echo "Export key" - openssl aes-256-cbc -md sha256 -salt -pbkdf2 -pass pass:$ENCRYPTION_PASSWORD -in ./build/deploy_key.pem.enc -out local.deploy_key.pem -d - - ls -ltr - - gpg --version - - echo "Import pubring" - gpg --import --batch local.pubring.gpg - echo "Import secring" - gpg --import --batch local.secring.gpg - echo "List keys" - gpg --list-keys - - #echo "Set to trust" - #echo "Trust Keys" - - # Need to specify to trust GPG keys. Answer '5' (ultimate trust) to "Please decide how far you trust this user" and then 'y' to acknowledge that - for fpr in $(gpg --list-keys --with-colons | awk -F: '/fpr:/ {print $10}' | sort -u); do echo -e "5\ny\n" | gpg --command-fd 0 --status-fd 2 --batch --expert --edit-key $fpr trust; done - # Same for secret keys - for fpr in $(gpg --list-secret-keys --with-colons | awk -F: '/fpr:/ {print $10}' | sort -u); do echo -e "5\ny\n" | gpg --command-fd 0 --status-fd 2 --batch --expert --edit-key $fpr trust; done - - - ls -ltr - sleep 3 # Need to wait until credential files fully written or build fails sometimes - project_version="v$(cat version.sbt | awk -F'"' '{print $2}')" - echo "Detected project_version '$project_version' from SBT Files (on BRANCH '$BRANCH')" - - # When an artifact is actually published, a build will go out on the git commit: "Setting version to ". - # The job before that is the one that creates the vX.X.X tag e.g. v3.0.0. We build and release on that one - # as well as any branch name 're-release*' in case a build fails and we need to re-publish. - # (Also note, we could technically use $project_version instead of $(cat version.sbt) but I don't want to change that this time around.) - - if [[ ($BRANCH == "master" || $BRANCH == "re-release"*) && $(cat version.sbt) != *"SNAPSHOT"* ]] - then - echo "Release Build for $BRANCH - Artifact: '$ARTIFACT'" - eval "$(ssh-agent -s)" - chmod 600 local.deploy_key.pem - ssh-add local.deploy_key.pem - git config --global user.name "Quill CI" - git config --global user.email "quillci@getquill.io" - git remote set-url origin git@github.com:getquill/quill.git - - if [[ $ARTIFACT == "base" ]]; then $SBT_VER -Dmodules=base -DskipPush=true 'release with-defaults'; fi - if [[ $ARTIFACT == "db" ]]; then $SBT_VER -Dmodules=db -DskipPush=true 'release with-defaults'; fi - if [[ $ARTIFACT == "js" ]]; then $SBT_VER -Dmodules=js -DskipPush=true 'release with-defaults'; fi - if [[ $ARTIFACT == "async" ]]; then $SBT_VER -Dmodules=async -DskipPush=true 'release with-defaults'; fi - if [[ $ARTIFACT == "codegen" ]]; then $SBT_VER -Dmodules=codegen -DskipPush=true 'release with-defaults'; fi - if [[ $ARTIFACT == "bigdata" ]]; then $SBT_VER -Dmodules=bigdata -DskipPush=true 'release with-defaults'; fi - - # Commit next version and tag if we are on the master branch (i.e. not if we are on a re-release) - if [[ $BRANCH == "master" && $ARTIFACT == "publish" ]]; then - echo "Doing Master Publish for BRANCH=$BRANCH VERSION=$VERSION ARTIFACT=$ARTIFACT" - # Delete the website tag. If it does not currently exist then ignore it. - git push --delete origin website || true - $SBT_VER -Dmodules=none 'release with-defaults default-tag-exists-answer o'; - fi - - elif [[ $BRANCH == "master" && $(cat version.sbt) == *"SNAPSHOT"* ]] - then - echo "Master Non-Release Build for $BRANCH - Artifact: '$ARTIFACT'" - if [[ $ARTIFACT == "base" ]]; then $SBT_VER -Dmodules=base publish; fi - if [[ $ARTIFACT == "db" ]]; then $SBT_VER -Dmodules=db publish; fi - if [[ $ARTIFACT == "js" ]]; then $SBT_VER -Dmodules=js publish; fi - if [[ $ARTIFACT == "async" ]]; then $SBT_VER -Dmodules=async publish; fi - if [[ $ARTIFACT == "codegen" ]]; then $SBT_VER -Dmodules=codegen publish; fi - if [[ $ARTIFACT == "bigdata" ]]; then $SBT_VER -Dmodules=bigdata publish; fi - - # No-Op Publish - if [[ $ARTIFACT == "publish" ]]; then echo "No-Op Publish for Non Release Master Branch"; fi - - # If we are a branch build publish it. We are assuming this script does NOT become activated in pulls requests - # and that condition is done at a higher level then this script - elif [[ $BRANCH != "master" ]] - then - echo "Branch build for $BRANCH - Artifact: '$ARTIFACT'" - echo "ThisBuild / version := \"$BRANCH-SNAPSHOT\"" > version.sbt - if [[ $ARTIFACT == "base" ]]; then $SBT_VER -Dmodules=base publish; fi - if [[ $ARTIFACT == "db" ]]; then $SBT_VER -Dmodules=db publish; fi - if [[ $ARTIFACT == "js" ]]; then $SBT_VER -Dmodules=js publish; fi - if [[ $ARTIFACT == "async" ]]; then $SBT_VER -Dmodules=async publish; fi - if [[ $ARTIFACT == "codegen" ]]; then $SBT_VER -Dmodules=codegen publish; fi - if [[ $ARTIFACT == "bigdata" ]]; then $SBT_VER -Dmodules=bigdata publish; fi - if [[ $ARTIFACT == "docs" ]]; then $SBT_VER -Dmodules=docs publish; fi - - # No-Op Publish - if [[ $ARTIFACT == "publish" ]]; then echo "No-Op Publish for Non Release Snapshot Branch"; fi - else - VERSION_FILE=$(cat version.sbt) - echo "GitHub actions branch was: ${BRANCH} and version file is $VERSION_FILE. Not Sure what to do." - fi -else - echo "PULL_REQUEST is not 'false' ($PULL_REQUEST). Not doing a release." -fi diff --git a/build/secring.gpg.enc b/build/secring.gpg.enc deleted file mode 100644 index 803c57975cc09618e5ee1c815acd2b7d48dbf00d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 3824 zcmVQ==;auDh4V#foD?;-8?~3c%WXcXH!W~@iZH9a?-S>d&uo>B=>~d3q(7IO zxTSB+QIUE^BDCwBKQ9gaO;}I#G4NWK-w@RJLEiL-&oEF%*yj^v#%ea^6m+VrpqawJ zm@2aZp+Qyu3;{wk-$h-x^4xh(3f*ALZrl}@e<5T}d{WHYX12)7$XNKd8oBAYFPZZi zo+t*KRT)Db5T2*1>3DASL5f)`T7;R5q#|=oI$RspvK`lZRz$(L-0wL4-$G-nzUpzO zCM$p+jL0HOZM$|JZd3TNr7L7<3dPtIm8e5v&&7+f{+E=ZKvan}={|3gXxSnkzQ8N7 z1F%I|9RYJpL=fDoX2-3VYF-s$Tb$Bb#NP$RF7S6}3-u%*01W(EBiLb+pxN0pb8`g< z_uZs8w|+d7JpX^sc0w1zuSpk*6%>xSQYh8h(VvAe76@ov)9)4wa!0|f-ay`nYJV_~ z=rnqVO&EW9{7QSSQeO;egh2wSzMk>;cahF5LE+G0Gr04SbMxlz;Hfzwv-$a`a zh}5)1hwY|>x*x~zteRz>ea#&SR+2HC0n)s09tcTJ$GkaCLBl-Y6~%;-AKQ`+wZ}vw zM|u5$^8VX%Z&a`{XXHkwM)-p^e9Z=`9wbzmnEhSyOHkFo z6~q>@GM2}O$vIxydu31}pZO9#8wZuUlgUof3QMayt48yX->^qBJl+zE5-|mFpCnQ4 ziO@(fOy>hq1it|c8zTjoSU)Dpwtk+J9NxT}Wg2kDQv5ge-fPt2E@6 zYGd~UE)TB!Hwwg54eJwfr1rVjv*g^@{AtDw7-Lj=io2ZW7<`b^n9HhJ?&ykv&8irQ zm?LnxSVFH6boJxcyH}^E30qRUTN5%pVX!~a*nE>Hz8v|#aD9l~bMn^g;FO|wO>J1- zZMfeJ_h>`@S3Tk{(ijX#Hj+rx-~)!NpvCO^s(QKx7KjrSZq|rX7VKs{yF|f~GpZ1v{_o#H5VuSEsj}M*2B< zkj?s3B@WFUvNLhq{b<2y&s73Sg!dwiW#;yX-zAS!5wh)G+t@9wOT2J)JuP1DYq_6} zXCdYc_Dh9+6_i%&oqAXSbnW^_v{F+Vn?l~dbnpHOkuD3gsD4K%&*8Ugh+yw@$SQl3 zGQq%4AxpcdG1+MSElJ^NnTH)#FJ491!zVoQ>%z@i>f*cK4o~u%1K|$_$mF zsitom)4ToCve+Cb3`Qqm7d`nv85p~%t|Ro}NZsG}20Tx? z9>(++*Bqo>got!;y%JzrBIwZrmM!8QxY-1xX7`be- zc)l$Kpm|6%jk(te@GVt)9Z!tcT<%di2<<08H z0!3NYH|m;qboX3s=LwP(4;eU|>MU)M$xC3*krf#`5YS51vqu^ z%602$&*PAY7#nSxy^e4Z%5vEI(hUh=6b6Rw7Q%#4slJ!a(@EU{FNibcu1|J>s>M{^ zfzvf!AvNhZE&gAVAU=>rqTBB3ID$7$!*~>5onk_aIoPMY|fv8R z-$TnX#;hlSv+QQ!j;WH9u9EXD^Gk)f*COEIg8^owdU6~_!U%euQyVk3vREa_y)GAM zZg2HOTx*?u2+&$q1vL0vUTczD+N3g&p07+8&Q8Pj{V~IfrpAO~1)GZFE>+o8Ivktb&@7P=z^ zppnts#eO@@Q*>>l~;spsftoUU1Q?${)X!H5bwtWM~adDtx^R&vJ*T$ zZ2MYw01c73a(IIVXiu6<*KP|!V`BuY0A%N^8<*pvnLxoF2>z&aAx0LP8A{LD@>tWi z)&@2AAH+jjDAC0Yq^B^7JGSwUrPymOFvfNU3aO0>A!r(%eTJG|A-?+Pkj_8#>Gn*= zWhm3yQ>&3+{f|zHzG%(KXuDA+DkzF>o1?DgB*>S|e)G^z+&I}1BYD4zQH@LCnVzcS zXTyg7-|VX(0p}cj!#jy~2NkP|I9`bv%?2WnXoZ%%kQI%bigH#p`52NVdGqJ)1qAI! z1)AGjVM~s^hBtr0E~sa=Syr+Bva1CrM0GY?CliYyS5LBf8AnrZZc6~UFzr%y0{%`5 zM$38LMqlk4ksE%5g55MA7dyz@)4`f#Gg05=KK#pWSXCaBn8?9)?Ni;6DMJ|LR~5hQ z5gp^2k2mUg7PpK*Y;eIJ6hF_*Dx%-#tjb^Jj-%U>n%oPRITNt2=sqBO2YY0*0~9qd zVTf6+SfX)u7^E#f;l#I3(%63+=w;fBTnV4D0{(Pf9Oz8+Hzi1{XC{NZL|mvlHM1IX zua8BFQbphE{EHP>U?s#bF_Z=RQ=_oFTZCaj{tuXJWb4LrD8L{A;GU!W)A{Lwc|5Y= zRRV!hHulsIG@O#ieWjCydPc#x=-^6gSr3_}aUOPSsZ~!C3y$FWKYf8L57xjaxwUd~ zKY$sEbh^F1ce7P1ANuj|Mk=(BjMQz=O!HLLIPB&>VnK$lzJFX65-j=O}cyV5zW1#UJKMER53+t>V0= z$mKf;N=HP9LYXh|Z9IxI@O2(QnD8?mba|}^t0S0&(cKj9$k9LEI2Ix=#?u{ghkE~b mZXpuh)W`EtAE~4@Mecsa|7LOAQGO~eZ)4qd4|ZDOLRrx8PlV0@ diff --git a/project/plugins.sbt b/project/plugins.sbt index 465d32e325..0f8118549a 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -4,13 +4,9 @@ resolvers += "Typesafe repository" at "https://repo.typesafe.com/typesafe/releas addDependencyTreePlugin -addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.5.2") -addSbtPlugin("org.scoverage" % "sbt-scoverage" % "2.0.9") -addSbtPlugin("com.github.sbt" % "sbt-release" % "1.1.0") -addSbtPlugin("com.github.sbt" % "sbt-pgp" % "2.2.1") -addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "3.9.21") -addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.1.3") -addSbtPlugin("org.scala-js" % "sbt-scalajs" % "1.13.2") -addSbtPlugin("org.portable-scala" % "sbt-scalajs-crossproject" % "1.3.2") -addSbtPlugin("com.etsy" % "sbt-compile-quick-plugin" % "1.4.0") -addSbtPlugin("dev.zio" % "zio-sbt-website" % "0.3.10") +addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.5.2") +addSbtPlugin("org.scoverage" % "sbt-scoverage" % "2.0.9") +addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.1.3") +addSbtPlugin("com.etsy" % "sbt-compile-quick-plugin" % "1.4.0") +addSbtPlugin("dev.zio" % "zio-sbt-website" % "0.3.10") +addSbtPlugin("com.github.sbt" % "sbt-ci-release" % "1.5.12") diff --git a/quill-cassandra-lagom/src/main/scala/io/getquill/CassandraLagomAsyncContext.scala b/quill-cassandra-lagom/src/main/scala/io/getquill/CassandraLagomAsyncContext.scala deleted file mode 100644 index 5b54bb580f..0000000000 --- a/quill-cassandra-lagom/src/main/scala/io/getquill/CassandraLagomAsyncContext.scala +++ /dev/null @@ -1,81 +0,0 @@ -package io.getquill - -import akka.Done -import com.datastax.driver.core.BoundStatement -import com.lightbend.lagom.scaladsl.persistence.cassandra.CassandraSession -import io.getquill.context.ExecutionInfo -import io.getquill.util.ContextLogger - -import scala.concurrent.{ExecutionContext, Future} - -class CassandraLagomAsyncContext[+N <: NamingStrategy]( - naming: N, - session: CassandraSession -) extends CassandraLagomSessionContext[N](naming, session) { - - override type Result[T] = Future[T] - override type RunQuerySingleResult[T] = Option[T] - override type RunQueryResult[T] = Seq[T] - override type RunActionResult = Done - override type RunBatchActionResult = Done - override type Session = CassandraLagomSession - - private val logger = ContextLogger(this.getClass) - - def prepareAction[T](cql: String, prepare: Prepare = identityPrepare)(info: ExecutionInfo, dc: DatasourceContext)( - implicit executionContext: ExecutionContext - ): CassandraLagomSession => Future[BoundStatement] = (session: Session) => { - val prepareResult = session.cs.prepare(cql).map(bs => prepare(bs.bind(), session)) - val preparedRow = prepareResult.map { case (params, bs) => - logger.logQuery(cql, params) - bs - } - preparedRow - } - - def prepareBatchAction[T](groups: List[BatchGroup])(info: ExecutionInfo, dc: DatasourceContext)(implicit - executionContext: ExecutionContext - ): CassandraLagomSession => Future[List[BoundStatement]] = (session: Session) => { - val batches = groups.flatMap { case BatchGroup(cql, prepares) => - prepares.map(cql -> _) - } - Future.traverse(batches) { case (cql, prepare) => - val prepareCql = prepareAction(cql, prepare)(info, dc) - prepareCql(session) - } - } - - def executeQuery[T](cql: String, prepare: Prepare = identityPrepare, extractor: Extractor[T] = identityExtractor)( - info: ExecutionInfo, - dc: DatasourceContext - )(implicit executionContext: ExecutionContext): Result[RunQueryResult[T]] = { - val statement = prepareAsyncAndGetStatement(cql, prepare, wrappedSession, logger) - statement.flatMap(st => session.selectAll(st)).map(_.map(row => extractor(row, wrappedSession))) - } - - def executeQuerySingle[T]( - cql: String, - prepare: Prepare = identityPrepare, - extractor: Extractor[T] = identityExtractor - )(info: ExecutionInfo, dc: DatasourceContext)(implicit - executionContext: ExecutionContext - ): Result[RunQuerySingleResult[T]] = - executeQuery(cql, prepare, extractor)(info, dc).map(_.headOption) - - def executeAction(cql: String, prepare: Prepare = identityPrepare)(info: ExecutionInfo, dc: DatasourceContext)( - implicit executionContext: ExecutionContext - ): Result[RunActionResult] = { - val statement = prepareAsyncAndGetStatement(cql, prepare, wrappedSession, logger) - statement.flatMap(st => session.executeWrite(st)) - } - - def executeBatchAction(groups: List[BatchGroup])(info: ExecutionInfo, dc: DatasourceContext)(implicit - executionContext: ExecutionContext - ): Result[RunBatchActionResult] = - Future.sequence { - groups.flatMap { case BatchGroup(cql, prepares) => - prepares.map(executeAction(cql, _)(info, dc)) - } - }.map(_ => Done) - -} diff --git a/quill-cassandra-lagom/src/main/scala/io/getquill/CassandraLagomSessionContext.scala b/quill-cassandra-lagom/src/main/scala/io/getquill/CassandraLagomSessionContext.scala deleted file mode 100644 index 98d87db937..0000000000 --- a/quill-cassandra-lagom/src/main/scala/io/getquill/CassandraLagomSessionContext.scala +++ /dev/null @@ -1,33 +0,0 @@ -package io.getquill - -import akka.Done -import com.datastax.driver.core.BoundStatement -import com.lightbend.lagom.scaladsl.persistence.cassandra.CassandraSession -import io.getquill.context.UdtValueLookup -import io.getquill.context.cassandra.CassandraSessionlessContext - -import scala.concurrent.{ExecutionContext, Future} - -case class CassandraLagomSession(cs: CassandraSession) extends UdtValueLookup - -abstract class CassandraLagomSessionContext[+N <: NamingStrategy]( - val naming: N, - val session: CassandraSession -) extends CassandraSessionlessContext[N] { - - override type RunActionResult = Done - override type RunBatchActionResult = Done - override type Session = CassandraLagomSession - - val wrappedSession = CassandraLagomSession(session) - - override def prepareAsync(cql: String)(implicit executionContext: ExecutionContext): Future[BoundStatement] = - session.prepare(cql).map(_.bind()) - - override def close() = { - import scala.concurrent.ExecutionContext.Implicits.global - session.underlying().map(_.close()) - () - } - -} diff --git a/quill-cassandra-lagom/src/main/scala/io/getquill/CassandraLagomStreamContext.scala b/quill-cassandra-lagom/src/main/scala/io/getquill/CassandraLagomStreamContext.scala deleted file mode 100644 index 42532e0efb..0000000000 --- a/quill-cassandra-lagom/src/main/scala/io/getquill/CassandraLagomStreamContext.scala +++ /dev/null @@ -1,65 +0,0 @@ -package io.getquill - -import akka.stream.scaladsl.Source -import akka.{Done, NotUsed} -import com.lightbend.lagom.scaladsl.persistence.cassandra.CassandraSession -import io.getquill.context.ExecutionInfo -import io.getquill.util.ContextLogger - -import scala.concurrent.ExecutionContext - -class CassandraLagomStreamContext[+N <: NamingStrategy]( - naming: N, - session: CassandraSession -) extends CassandraLagomSessionContext[N](naming, session) { - - override type Result[T] = Source[T, NotUsed] - override type RunQuerySingleResult[T] = T - override type RunQueryResult[T] = T - override type RunActionResult = Done - override type RunBatchActionResult = Done - - private val logger = ContextLogger(this.getClass) - - def executeQuery[T]( - cql: String, - prepare: Prepare = identityPrepare, - extractor: Extractor[T] = identityExtractor - )(info: ExecutionInfo, dc: DatasourceContext)(implicit - executionContext: ExecutionContext - ): Result[RunQueryResult[T]] = { - val statement = prepareAsyncAndGetStatement(cql, prepare, wrappedSession, logger) - val resultSource = statement.map(st => session.select(st).map(row => extractor(row, wrappedSession))) - Source - .fromFutureSource(resultSource) - .mapMaterializedValue(_ => NotUsed) - } - - def executeQuerySingle[T]( - cql: String, - prepare: Prepare = identityPrepare, - extractor: Extractor[T] = identityExtractor - )(info: ExecutionInfo, dc: DatasourceContext)(implicit - executionContext: ExecutionContext - ): Result[RunQuerySingleResult[T]] = - executeQuery(cql, prepare, extractor)(info, dc).take(1) - - def executeAction(cql: String, prepare: Prepare = identityPrepare)(info: ExecutionInfo, dc: DatasourceContext)( - implicit executionContext: ExecutionContext - ): Result[RunActionResult] = { - val statement = prepareAsyncAndGetStatement(cql, prepare, CassandraLagomSession(session), logger) - Source.fromFuture(statement).mapAsync(1) { st => - session.executeWrite(st) - } - } - - def executeBatchAction(groups: List[BatchGroup])(info: ExecutionInfo, dc: DatasourceContext)(implicit - executionContext: ExecutionContext - ): Result[RunBatchActionResult] = { - val sourceList = groups.flatMap { case BatchGroup(cql, prepares) => - prepares.map(executeAction(cql, _)(info, dc)) - } - Source(sourceList).flatMapConcat(identity) - } - -} diff --git a/quill-cassandra-lagom/src/test/resources/application.conf b/quill-cassandra-lagom/src/test/resources/application.conf deleted file mode 100644 index 9a9a316b99..0000000000 --- a/quill-cassandra-lagom/src/test/resources/application.conf +++ /dev/null @@ -1,18 +0,0 @@ -testStreamDB.keyspace=quill_test -testStreamDB.preparedStatementCacheSize=1000 -testStreamDB.session.contactPoint=127.0.0.1 -testStreamDB.session.contactPoint=${?CASSANDRA_HOST} -testStreamDB.session.port=9042 -testStreamDB.session.port=${?CASSANDRA_PORT} -testStreamDB.session.queryOptions.fetchSize=1 -testStreamDB.session.queryOptions.consistencyLevel=LOCAL_QUORUM - -cassandra-journal.keyspace = ${testStreamDB.keyspace} -cassandra-snapshot-store.keyspace = ${testStreamDB.keyspace} -lagom.persistence.read-side.cassandra.keyspace = ${testStreamDB.keyspace} - -lagom.services { - cas_native = "tcp://"${testStreamDB.session.contactPoint}":"${testStreamDB.session.port} -} - -akka.discovery.method = "akka-dns" diff --git a/quill-cassandra-lagom/src/test/scala/io/getquill/Spec.scala b/quill-cassandra-lagom/src/test/scala/io/getquill/Spec.scala deleted file mode 100644 index 6199dee202..0000000000 --- a/quill-cassandra-lagom/src/test/scala/io/getquill/Spec.scala +++ /dev/null @@ -1,38 +0,0 @@ -package io.getquill - -import io.getquill.ast.{Ident, StatelessTransformer} -import io.getquill.norm.capture.TemporaryIdent -import io.getquill.quat.Quat -import org.scalatest.BeforeAndAfterAll -import org.scalatest.freespec.AnyFreeSpec -import org.scalatest.matchers.must.Matchers - -import scala.concurrent.duration.Duration -import scala.concurrent.{Await, Future} - -abstract class Spec extends AnyFreeSpec with Matchers with BeforeAndAfterAll { - val QV = Quat.Value - val QEP = Quat.Product.empty - def QP(fields: String*) = Quat.LeafProduct(fields: _*) - - // Used by various tests to replace temporary idents created by AttachToEntity with 'x' - val replaceTempIdent = new StatelessTransformer { - override def applyIdent(id: Ident): Ident = - id match { - case TemporaryIdent(tid) => - Ident("x", id.quat) - case _ => - id - } - } - - implicit class QuatOps(quat: Quat) { - def productOrFail() = - quat match { - case p: Quat.Product => p - case _ => throw new IllegalArgumentException(s"The quat ${quat} is expected to be a product but is not") - } - } - - def await[T](f: Future[T]): T = Await.result(f, Duration.Inf) -} diff --git a/quill-cassandra-lagom/src/test/scala/io/getquill/TestEntities.scala b/quill-cassandra-lagom/src/test/scala/io/getquill/TestEntities.scala deleted file mode 100644 index 20e9cdcaeb..0000000000 --- a/quill-cassandra-lagom/src/test/scala/io/getquill/TestEntities.scala +++ /dev/null @@ -1,57 +0,0 @@ -package io.getquill - -import io.getquill.context.Context -import io.getquill.quat.Quat - -trait TestEntities { - this: Context[_, _] => - - case class TestEntity(s: String, i: Int, l: Long, o: Option[Int], b: Boolean) - case class Emb(s: String, i: Int) - case class TestEntityEmb(emb: Emb, l: Long, o: Option[Int]) - case class TestEntity2(s: String, i: Int, l: Long, o: Option[Int]) - case class TestEntity3(s: String, i: Int, l: Long, o: Option[Int]) - case class TestEntity4(i: Long) - case class TestEntity5(i: Long, s: String) - case class EmbSingle(i: Long) - case class TestEntity4Emb(emb: EmbSingle) - case class TestEntityRegular(s: String, i: Long) - - private val QV = Quat.Value - private val QBV = Quat.BooleanValue - - val TestEntityQuat = Quat.Product("s" -> QV, "i" -> QV, "l" -> QV, "o" -> QV, "b" -> QBV) - val TestEntityEmbQuat = Quat.Product("emb" -> Quat.Product("s" -> QV, "i" -> QV), "l" -> QV, "o" -> QV) - val TestEntity2Quat = Quat.Product("s" -> QV, "i" -> QV, "l" -> QV, "o" -> QV) - val TestEntity3Quat = Quat.Product("s" -> QV, "i" -> QV, "l" -> QV, "o" -> QV) - val TestEntity4Quat = Quat.Product("i" -> QV) - val TestEntity5Quat = Quat.Product("i" -> QV, "s" -> QV) - val TestEntity4EmbQuat = Quat.Product("emb" -> Quat.Product("i" -> QV)) - - val qr1 = quote { - query[TestEntity] - } - val qr1Emb = quote { - querySchema[TestEntityEmb]("TestEntity") - } - val qr2 = quote { - query[TestEntity2] - } - val qr3 = quote { - query[TestEntity3] - } - val qr4 = quote { - query[TestEntity4] - } - val qr5 = quote { - query[TestEntity5] - } - val qr4Emb = quote { - querySchema[TestEntity4Emb]("TestEntity4") - } - val qrRegular = quote { - for { - a <- query[TestEntity] - } yield TestEntityRegular(a.s, a.l) - } -} diff --git a/quill-cassandra-lagom/src/test/scala/io/getquill/context/cassandra/CassandraTestEntities.scala b/quill-cassandra-lagom/src/test/scala/io/getquill/context/cassandra/CassandraTestEntities.scala deleted file mode 100644 index 7e282aad91..0000000000 --- a/quill-cassandra-lagom/src/test/scala/io/getquill/context/cassandra/CassandraTestEntities.scala +++ /dev/null @@ -1,16 +0,0 @@ -package io.getquill.context.cassandra - -import io.getquill.TestEntities - -trait CassandraTestEntities extends TestEntities { - this: CassandraContext[_] => - - case class MapFrozen(id: Map[Int, Boolean]) - val mapFroz = quote(query[MapFrozen]) - - case class SetFrozen(id: Set[Int]) - val setFroz = quote(query[SetFrozen]) - - case class ListFrozen(id: List[Int]) - val listFroz = quote(query[ListFrozen]) -} diff --git a/quill-cassandra-lagom/src/test/scala/io/getquill/context/cassandra/EncodingSpecHelper.scala b/quill-cassandra-lagom/src/test/scala/io/getquill/context/cassandra/EncodingSpecHelper.scala deleted file mode 100644 index 514230d50c..0000000000 --- a/quill-cassandra-lagom/src/test/scala/io/getquill/context/cassandra/EncodingSpecHelper.scala +++ /dev/null @@ -1,128 +0,0 @@ -package io.getquill.context.cassandra - -import java.util.{Date, UUID} - -import com.datastax.driver.core.LocalDate -import io.getquill.Spec - -abstract class EncodingSpecHelper extends Spec { - protected def verify(result: List[EncodingTestEntity]): Unit = - result.zip(insertValues) match { - case List((e1, a1), (e2, a2)) => - verify(e1, a1) - verify(e2, a2) - } - - protected def verify(e: EncodingTestEntity, a: EncodingTestEntity): Unit = { - e.id mustEqual a.id - - e.v1 mustEqual a.v1 - e.v2 mustEqual a.v2 - e.v3 mustEqual a.v3 - e.v4 mustEqual a.v4 - e.v5 mustEqual a.v5 - e.v6 mustEqual a.v6 - e.v7 mustEqual a.v7 - e.v8.toList mustEqual a.v8.toList - e.v9 mustEqual a.v9 - e.v10 mustEqual a.v10 - e.v11 mustEqual a.v11 - e.o1 mustEqual a.o1 - e.o2 mustEqual a.o2 - e.o3 mustEqual a.o3 - e.o4 mustEqual a.o4 - e.o5 mustEqual a.o5 - e.o6 mustEqual a.o6 - e.o7 mustEqual a.o7 - e.o8.map(_.toList) mustEqual a.o8.map(_.toList) - e.o9 mustEqual a.o9 - e.o10 mustEqual a.o10 - - () - } - - case class EncodingTestEntity( - id: Int, - v1: String, - v2: BigDecimal, - v3: Boolean, - v4: Int, - v5: Long, - v6: Float, - v7: Double, - v8: Array[Byte], - v9: LocalDate, - v10: UUID, - v11: Date, - v12: Byte, - v13: Short, - o1: Option[String], - o2: Option[BigDecimal], - o3: Option[Boolean], - o4: Option[Int], - o5: Option[Long], - o6: Option[Float], - o7: Option[Double], - o8: Option[Array[Byte]], - o9: Option[Date], - o10: Option[LocalDate] - ) - - protected val fixUUID: UUID = UUID.fromString("606c79e8-a331-4810-8bd7-0668ff7a23ef") - - val insertValues = - List( - EncodingTestEntity( - id = 1, - v1 = "s", - v2 = BigDecimal(1.1), - v3 = true, - v4 = 33, - v5 = 431L, - v6 = 34.4f, - v7 = 42d, - v8 = Array(1.toByte, 2.toByte), - v9 = LocalDate.fromYearMonthDay(2014, 11, 11), - v10 = fixUUID, - v11 = new Date(31202000), - v12 = (Byte.MaxValue - 10).toByte, - v13 = (Short.MaxValue - 10).toShort, - o1 = Some("s"), - o2 = Some(BigDecimal(1.1)), - o3 = Some(true), - o4 = Some(33), - o5 = Some(431L), - o6 = Some(34.4f), - o7 = Some(42d), - o8 = Some(Array(1.toByte, 2.toByte)), - o9 = Some(new Date(31200000)), - o10 = Some(LocalDate.fromYearMonthDay(2014, 11, 11)) - ), - EncodingTestEntity( - id = 2, - v1 = "", - v2 = BigDecimal(0), - v3 = false, - v4 = 0, - v5 = 0L, - v6 = 0f, - v7 = 0d, - v8 = Array(), - v9 = LocalDate.fromMillisSinceEpoch(0), - v10 = fixUUID, - v11 = new Date(0), - v12 = 0, - v13 = 0, - o1 = None, - o2 = None, - o3 = None, - o4 = None, - o5 = None, - o6 = None, - o7 = None, - o8 = None, - o9 = None, - o10 = None - ) - ) -} diff --git a/quill-cassandra-lagom/src/test/scala/io/getquill/context/cassandra/QueryResultTypeCassandraSpec.scala b/quill-cassandra-lagom/src/test/scala/io/getquill/context/cassandra/QueryResultTypeCassandraSpec.scala deleted file mode 100644 index 404c865a1c..0000000000 --- a/quill-cassandra-lagom/src/test/scala/io/getquill/context/cassandra/QueryResultTypeCassandraSpec.scala +++ /dev/null @@ -1,34 +0,0 @@ -package io.getquill.context.cassandra - -import io.getquill.Spec -import io.getquill.context.cassandra.encoding.Encoders -import io.getquill.context.cassandra.encoding.Decoders -import io.getquill.Ord - -trait QueryResultTypeCassandraSpec extends Spec { - - val context: CassandraContext[_] with Encoders with Decoders - import context._ - - case class OrderTestEntity(id: Int, i: Int) - - val entries = List( - OrderTestEntity(1, 1), - OrderTestEntity(2, 2), - OrderTestEntity(3, 3) - ) - - val insert = quote((e: OrderTestEntity) => query[OrderTestEntity].insert(e)) - val deleteAll = quote(query[OrderTestEntity].delete) - val selectAll = quote(query[OrderTestEntity]) - val map = quote(query[OrderTestEntity].map(_.id)) - val filter = quote(query[OrderTestEntity].filter(_.id == 1)) - val withFilter = quote(query[OrderTestEntity].withFilter(_.id == 1)) - val sortBy = quote(query[OrderTestEntity].filter(_.id == 1).sortBy(_.i)(Ord.asc)) - val take = quote(query[OrderTestEntity].take(10)) - val entitySize = quote(query[OrderTestEntity].size) - val parametrizedSize = quote { (id: Int) => - query[OrderTestEntity].filter(_.id == id).size - } - val distinct = quote(query[OrderTestEntity].map(_.id).distinct) -} diff --git a/quill-cassandra-lagom/src/test/scala/io/getquill/context/cassandra/lagom/DecodeNullSpec.scala b/quill-cassandra-lagom/src/test/scala/io/getquill/context/cassandra/lagom/DecodeNullSpec.scala deleted file mode 100644 index ae2be36366..0000000000 --- a/quill-cassandra-lagom/src/test/scala/io/getquill/context/cassandra/lagom/DecodeNullSpec.scala +++ /dev/null @@ -1,34 +0,0 @@ -package io.getquill.context.cassandra.lagom - -import io.getquill._ - -class DecodeNullSpec extends Spec { - - "no default values when reading null" - { - "stream" in { - import io.getquill.context.cassandra.utils.executionContext - import testLagomAsyncDB._ - val writeEntities = quote(querySchema[DecodeNullTestWriteEntity]("DecodeNullTestEntity")) - - val result = - for { - _ <- testLagomAsyncDB.run(writeEntities.delete) - _ <- testLagomAsyncDB.run(writeEntities.insert(lift(insertValue))) - result <- testLagomAsyncDB.run(query[DecodeNullTestEntity]) - } yield { - result - } - intercept[IllegalStateException] { - await { - result - } - } - } - } - - case class DecodeNullTestEntity(id: Int, value: Int) - - case class DecodeNullTestWriteEntity(id: Int, value: Option[Int]) - - val insertValue = DecodeNullTestWriteEntity(0, None) -} diff --git a/quill-cassandra-lagom/src/test/scala/io/getquill/context/cassandra/lagom/EncodingSpec.scala b/quill-cassandra-lagom/src/test/scala/io/getquill/context/cassandra/lagom/EncodingSpec.scala deleted file mode 100644 index fb2206223b..0000000000 --- a/quill-cassandra-lagom/src/test/scala/io/getquill/context/cassandra/lagom/EncodingSpec.scala +++ /dev/null @@ -1,43 +0,0 @@ -package io.getquill.context.cassandra.lagom - -import io.getquill.context.cassandra.EncodingSpecHelper -import io.getquill.Query - -class EncodingSpec extends EncodingSpecHelper { - "encodes and decodes types" - { - "stream" in { - import io.getquill.context.cassandra.utils.executionContext - import testLagomAsyncDB._ - val result = - for { - _ <- testLagomAsyncDB.run(query[EncodingTestEntity].delete) - _ <- testLagomAsyncDB.run(liftQuery(insertValues).foreach(e => query[EncodingTestEntity].insert(e))) - result <- testLagomAsyncDB.run(query[EncodingTestEntity]) - } yield { - result - } - val f = result.map(_.toList) - verify(await(f)) - } - } - - "encodes collections" - { - "stream" in { - import io.getquill.context.cassandra.utils.executionContext - import testLagomAsyncDB._ - val q = quote { (list: Query[Int]) => - query[EncodingTestEntity].filter(t => list.contains(t.id)) - } - val result = - for { - _ <- testLagomAsyncDB.run(query[EncodingTestEntity].delete) - _ <- testLagomAsyncDB.run(liftQuery(insertValues).foreach(e => query[EncodingTestEntity].insert(e))) - result <- testLagomAsyncDB.run(q(liftQuery(insertValues.map(_.id)))) - } yield { - result - } - val f = result.map(_.toList) - verify(await(f)) - } - } -} diff --git a/quill-cassandra-lagom/src/test/scala/io/getquill/context/cassandra/lagom/QueryResultTypeCassandraAsyncSpec.scala b/quill-cassandra-lagom/src/test/scala/io/getquill/context/cassandra/lagom/QueryResultTypeCassandraAsyncSpec.scala deleted file mode 100644 index 2ac847b3f0..0000000000 --- a/quill-cassandra-lagom/src/test/scala/io/getquill/context/cassandra/lagom/QueryResultTypeCassandraAsyncSpec.scala +++ /dev/null @@ -1,73 +0,0 @@ -package io.getquill.context.cassandra.lagom - -import com.lightbend.lagom.scaladsl.persistence.cassandra.CassandraSession -import io.getquill.context.cassandra.QueryResultTypeCassandraSpec - -import scala.concurrent.Future - -class QueryResultTypeCassandraAsyncSpec extends QueryResultTypeCassandraSpec { - - import io.getquill.context.cassandra.utils.executionContext - - val context = testLagomAsyncDB - - import context._ - - def result[T](function: CassandraSession => Future[T]): T = - await(function(context.session)) - - def result[T](future: Future[T]): T = - await(future) - - override def beforeAll = { - result(context.run(deleteAll)) - result(context.run(liftQuery(entries).foreach(e => insert(e)))) - () - } - - "bind" - { - "action" - { - "noArgs" in { - val bs = result(context.prepare(insert(OrderTestEntity(1, 2)))(context.wrappedSession)) - bs.preparedStatement().getVariables.size() mustEqual 0 - } - - "withArgs" in { - val bs = result(context.prepare(insert(lift(OrderTestEntity(1, 2))))(context.wrappedSession)) - bs.preparedStatement().getVariables.size() mustEqual 2 - bs.getInt("id") mustEqual 1 - bs.getInt("i") mustEqual 2 - } - } - - "query" - { - "noArgs" in { - val bs = result(context.prepare(deleteAll)(context.wrappedSession)) - bs.preparedStatement().getVariables.size() mustEqual 0 - } - - "withArgs" in { - val batches = result( - context.prepare(liftQuery(List(OrderTestEntity(1, 2))).foreach(e => insert(e)))(context.wrappedSession) - ) - batches.foreach { bs => - bs.preparedStatement().getVariables.size() mustEqual 2 - bs.getInt("id") mustEqual 1 - } - } - } - } - - "query" in { - result(context.run(selectAll)) mustEqual entries - } - - "querySingle" - { - "size" in { - result(context.run(entitySize)) mustEqual Option(3) - } - "parametrized size" in { - result(context.run(parametrizedSize(lift(10000)))) mustEqual Option(0) - } - } -} diff --git a/quill-cassandra-lagom/src/test/scala/io/getquill/context/cassandra/lagom/package.scala b/quill-cassandra-lagom/src/test/scala/io/getquill/context/cassandra/lagom/package.scala deleted file mode 100644 index af388ca7f2..0000000000 --- a/quill-cassandra-lagom/src/test/scala/io/getquill/context/cassandra/lagom/package.scala +++ /dev/null @@ -1,8 +0,0 @@ -package io.getquill.context.cassandra - -import io.getquill.{CassandraLagomAsyncContext, Literal} -import utils._ - -package object lagom { - lazy val testLagomAsyncDB = new CassandraLagomAsyncContext(Literal, cassandraSession) with CassandraTestEntities -} diff --git a/quill-cassandra-lagom/src/test/scala/io/getquill/context/cassandra/streaming/DecodeNullSpec.scala b/quill-cassandra-lagom/src/test/scala/io/getquill/context/cassandra/streaming/DecodeNullSpec.scala deleted file mode 100644 index a0b31c2164..0000000000 --- a/quill-cassandra-lagom/src/test/scala/io/getquill/context/cassandra/streaming/DecodeNullSpec.scala +++ /dev/null @@ -1,35 +0,0 @@ -package io.getquill.context.cassandra.streaming - -import io.getquill._ -import io.getquill.context.cassandra.utils.executionContext -import io.getquill.context.cassandra.utils.materializer - -class DecodeNullSpec extends Spec { - - "no default values when reading null" - { - "stream" in { - import testStreamDB._ - val writeEntities = quote(querySchema[DecodeNullTestWriteEntity]("DecodeNullTestEntity")) - - val result = - for { - _ <- testStreamDB.run(writeEntities.delete).runForeach(_ => ()) - _ <- testStreamDB.run(writeEntities.insert(lift(insertValue))).runForeach(_ => ()) - result <- testStreamDB.run(query[DecodeNullTestEntity]).runFold(List.empty[DecodeNullTestEntity])(_ :+ _) - } yield { - result - } - intercept[IllegalStateException] { - await { - result.map(_.head) - } - } - } - } - - case class DecodeNullTestEntity(id: Int, value: Int) - - case class DecodeNullTestWriteEntity(id: Int, value: Option[Int]) - - val insertValue = DecodeNullTestWriteEntity(0, None) -} diff --git a/quill-cassandra-lagom/src/test/scala/io/getquill/context/cassandra/streaming/EncodingSpec.scala b/quill-cassandra-lagom/src/test/scala/io/getquill/context/cassandra/streaming/EncodingSpec.scala deleted file mode 100644 index c5b0395046..0000000000 --- a/quill-cassandra-lagom/src/test/scala/io/getquill/context/cassandra/streaming/EncodingSpec.scala +++ /dev/null @@ -1,58 +0,0 @@ -package io.getquill.context.cassandra.streaming - -import akka.{Done, NotUsed} -import akka.stream.scaladsl.Source -import io.getquill.context.cassandra.{EncodingSpecHelper, utils} -import io.getquill.Query - -import scala.concurrent.Future - -class EncodingSpec extends EncodingSpecHelper { - - import utils.executionContext - import utils.materializer - - def actionResult(stream: Source[Done, NotUsed]): Future[Done] = - stream.runForeach(_ => ()) - - def queryResult[T](stream: Source[T, NotUsed]): Future[List[T]] = - stream.runFold(List.empty[T])(_ :+ _) - - "encodes and decodes types" - { - "stream" in { - import testStreamDB._ - val result = - for { - _ <- actionResult(testStreamDB.run(query[EncodingTestEntity].delete)) - _ <- actionResult( - testStreamDB.run( - liftQuery(insertValues) - .foreach(e => query[EncodingTestEntity].insert(e)) - ) - ) - result <- queryResult(testStreamDB.run(query[EncodingTestEntity])) - } yield { - result - } - verify(await(result)) - } - } - - "encodes collections" - { - "stream" in { - import testStreamDB._ - val q = quote { (list: Query[Int]) => - query[EncodingTestEntity].filter(t => list.contains(t.id)) - } - val result = - for { - _ <- actionResult(testStreamDB.run(query[EncodingTestEntity].delete)) - _ <- actionResult(testStreamDB.run(liftQuery(insertValues).foreach(e => query[EncodingTestEntity].insert(e)))) - result <- queryResult(testStreamDB.run(q(liftQuery(insertValues.map(_.id))))) - } yield { - result - } - verify(await(result)) - } - } -} diff --git a/quill-cassandra-lagom/src/test/scala/io/getquill/context/cassandra/streaming/QueryResultTypeCassandraStreamSpec.scala b/quill-cassandra-lagom/src/test/scala/io/getquill/context/cassandra/streaming/QueryResultTypeCassandraStreamSpec.scala deleted file mode 100644 index 88100acca9..0000000000 --- a/quill-cassandra-lagom/src/test/scala/io/getquill/context/cassandra/streaming/QueryResultTypeCassandraStreamSpec.scala +++ /dev/null @@ -1,35 +0,0 @@ -package io.getquill.context.cassandra.streaming - -import akka.NotUsed -import akka.stream.scaladsl.Source -import io.getquill.context.cassandra.QueryResultTypeCassandraSpec - -class QueryResultTypeCassandraStreamSpec extends QueryResultTypeCassandraSpec { - - import io.getquill.context.cassandra.utils._ - - val context = testStreamDB - import context._ - - def result[T](stream: Source[T, NotUsed]): List[T] = - await(stream.runFold(List.empty[T])(_ :+ _)) - - override def beforeAll = { - result(context.run(deleteAll)) - result(context.run(liftQuery(entries).foreach(e => insert(e)))) - () - } - - "query" in { - result(context.run(selectAll)) mustEqual entries - } - - "querySingle" - { - "size" in { - result(context.run(entitySize)) mustEqual List(3) - } - "parametrized size" in { - result(context.run(parametrizedSize(lift(10000)))) mustEqual List(0) - } - } -} diff --git a/quill-cassandra-lagom/src/test/scala/io/getquill/context/cassandra/streaming/package.scala b/quill-cassandra-lagom/src/test/scala/io/getquill/context/cassandra/streaming/package.scala deleted file mode 100644 index c474002820..0000000000 --- a/quill-cassandra-lagom/src/test/scala/io/getquill/context/cassandra/streaming/package.scala +++ /dev/null @@ -1,8 +0,0 @@ -package io.getquill.context.cassandra - -import io.getquill.{CassandraLagomStreamContext, Literal} -import utils._ - -package object streaming { - lazy val testStreamDB = new CassandraLagomStreamContext(Literal, cassandraSession) with CassandraTestEntities -} diff --git a/quill-cassandra-lagom/src/test/scala/io/getquill/context/cassandra/utils/DummyService.scala b/quill-cassandra-lagom/src/test/scala/io/getquill/context/cassandra/utils/DummyService.scala deleted file mode 100644 index 205007039a..0000000000 --- a/quill-cassandra-lagom/src/test/scala/io/getquill/context/cassandra/utils/DummyService.scala +++ /dev/null @@ -1,10 +0,0 @@ -package io.getquill.context.cassandra.utils - -import com.lightbend.lagom.scaladsl.api.{Descriptor, Service} - -class DummyService extends Service { - override def descriptor: Descriptor = { - import Service._ - named("dummy") - } -} diff --git a/quill-cassandra-lagom/src/test/scala/io/getquill/context/cassandra/utils/package.scala b/quill-cassandra-lagom/src/test/scala/io/getquill/context/cassandra/utils/package.scala deleted file mode 100644 index 45de0d09cc..0000000000 --- a/quill-cassandra-lagom/src/test/scala/io/getquill/context/cassandra/utils/package.scala +++ /dev/null @@ -1,30 +0,0 @@ -package io.getquill.context.cassandra - -import akka.stream.Materializer -import com.lightbend.lagom.scaladsl.client.ConfigurationServiceLocatorComponents -import com.lightbend.lagom.scaladsl.persistence.cassandra.{CassandraPersistenceComponents, CassandraSession} -import com.lightbend.lagom.scaladsl.playjson.{EmptyJsonSerializerRegistry, JsonSerializerRegistry} -import com.lightbend.lagom.scaladsl.server.{LagomApplication, LagomServer} -import com.lightbend.lagom.scaladsl.testkit.ServiceTest -import play.api.libs.ws.ahc.AhcWSComponents - -import scala.concurrent.ExecutionContext - -package object utils { - - val server = ServiceTest.startServer(ServiceTest.defaultSetup.withCassandra(false).withCluster(true)) { ctx => - new LagomApplication(ctx) - with AhcWSComponents - with CassandraPersistenceComponents - with ConfigurationServiceLocatorComponents { - - override def lagomServer: LagomServer = serverFor[DummyService](new DummyService) - - override def jsonSerializerRegistry: JsonSerializerRegistry = EmptyJsonSerializerRegistry - } - } - - val cassandraSession: CassandraSession = server.application.cassandraSession - implicit val executionContext: ExecutionContext = server.application.executionContext - implicit val materializer: Materializer = server.application.materializer -} diff --git a/quill-cassandra-zio/src/test/scala/io/getquill/context/cassandra/zio/examples/other/ExampleAppImplicitEnv.scala b/quill-cassandra-zio/src/test/scala/io/getquill/context/cassandra/zio/examples/other/ExampleAppImplicitEnv.scala index 37dfdfee27..a11b4b3199 100644 --- a/quill-cassandra-zio/src/test/scala/io/getquill/context/cassandra/zio/examples/other/ExampleAppImplicitEnv.scala +++ b/quill-cassandra-zio/src/test/scala/io/getquill/context/cassandra/zio/examples/other/ExampleAppImplicitEnv.scala @@ -1,6 +1,7 @@ package io.getquill.context.cassandra.zio.examples.other -import io.getquill.{CassandraZioContext, _} +import io.getquill._ +import io.getquill.context.qzio.ImplicitSyntax.Implicit import zio.{ZIO, ZIOAppDefault} import zio.Console.printLine import io.getquill.context.qzio.ImplicitSyntax._ diff --git a/quill-codegen-jdbc/src/test/scala/io/getquill/codegen/util/SchemaMaker.scala b/quill-codegen-jdbc/src/test/scala/io/getquill/codegen/util/SchemaMaker.scala index 05efca8d55..4c5c830f5e 100644 --- a/quill-codegen-jdbc/src/test/scala/io/getquill/codegen/util/SchemaMaker.scala +++ b/quill-codegen-jdbc/src/test/scala/io/getquill/codegen/util/SchemaMaker.scala @@ -13,7 +13,7 @@ abstract class CodegenSpec extends AnyFreeSpec with SchemaMaker { type Prefix <: ConfigPrefix val prefix: Prefix - implicit def regToOption[T](t: T) = Some(t) + implicit def regToOption[T](t: T): Option[T] = Some(t) } object SchemaMaker extends SchemaMaker diff --git a/quill-codegen/src/main/scala/io/getquill/codegen/dag/Ancestry.scala b/quill-codegen/src/main/scala/io/getquill/codegen/dag/Ancestry.scala index 9ac4a07eef..29359c1800 100644 --- a/quill-codegen/src/main/scala/io/getquill/codegen/dag/Ancestry.scala +++ b/quill-codegen/src/main/scala/io/getquill/codegen/dag/Ancestry.scala @@ -17,7 +17,7 @@ object DefaultNodeCatalog extends NodeCatalog { private val logger = Logger(LoggerFactory.getLogger(this.getClass)) - implicit def nodeToOpt(dagNode: DagNode) = Some(dagNode) + implicit def nodeToOpt(dagNode: DagNode): Option[DagNode] = Some(dagNode) object StringNode extends DagNode(classTag[String], None) diff --git a/quill-core/js/src/main/scala/io/getquill/dsl/DynamicQueryDSL.scala b/quill-core/js/src/main/scala/io/getquill/dsl/DynamicQueryDSL.scala deleted file mode 100644 index 966668b1f5..0000000000 --- a/quill-core/js/src/main/scala/io/getquill/dsl/DynamicQueryDSL.scala +++ /dev/null @@ -1,491 +0,0 @@ -package io.getquill.dsl - -import io.getquill.ast.Renameable.Fixed - -import scala.language.implicitConversions -import scala.language.experimental.macros -import io.getquill.ast.{External, _} -import io.getquill.quat._ - -import scala.reflect.macros.whitebox.{Context => MacroContext} -import io.getquill.util.Messages._ - -import scala.util.DynamicVariable -import scala.reflect.ClassTag -import io.getquill.{ActionReturning, Delete, EntityQuery, Insert, Ord, Query, Quoted, Update, Action => DslAction} - -import scala.annotation.tailrec - -class DynamicQueryDslMacro(val c: MacroContext) { - import c.universe._ - - def dynamicUnquote(d: Tree): Tree = - q"${c.prefix}.unquote($d.q)" - - def insertValue(value: Tree): Tree = - q""" - DynamicInsert(${c.prefix}.q.insertValue(lift($value))) - """ - - def updateValue(value: Tree): Tree = - q""" - DynamicUpdate(${c.prefix}.q.updateValue(lift($value))) - """ -} - -trait DynamicQueryDsl { - dsl: CoreDsl => - - implicit class ToDynamicQuery[T](q: Quoted[Query[T]]) { - def dynamic: DynamicQuery[T] = DynamicQuery(q) - } - - implicit class ToDynamicEntityQuery[T](q: Quoted[EntityQuery[T]]) { - def dynamic: DynamicEntityQuery[T] = DynamicEntityQuery(q) - } - - implicit class ToDynamicAction[T](q: Quoted[DslAction[T]]) { - def dynamic: DynamicAction[DslAction[T]] = DynamicAction(q) - } - - implicit class ToDynamicInsert[T](q: Quoted[Insert[T]]) { - def dynamic: DynamicInsert[T] = DynamicInsert(q) - } - - implicit class ToDynamicUpdate[T](q: Quoted[Update[T]]) { - def dynamic: DynamicUpdate[T] = DynamicUpdate(q) - } - - implicit class ToDynamicActionReturning[T, U]( - q: Quoted[ActionReturning[T, U]] - ) { - def dynamic: DynamicActionReturning[T, U] = DynamicActionReturning(q) - } - - implicit def dynamicUnquote[T](d: DynamicQuery[T]): Query[T] = macro DynamicQueryDslMacro.dynamicUnquote - - implicit def toQuoted[T](q: DynamicQuery[T]): Quoted[Query[T]] = q.q - implicit def toQuoted[T](q: DynamicEntityQuery[T]): Quoted[EntityQuery[T]] = - q.q - implicit def toQuoted[T <: DslAction[_]](q: DynamicAction[T]): Quoted[T] = q.q - - def dynamicQuery[T](implicit t: ClassTag[T]): DynamicEntityQuery[T] = - DynamicEntityQuery( - splice[EntityQuery[T]]( - Entity(t.runtimeClass.getSimpleName, Nil, RuntimeEntityQuat[T].probit) - ) - ) - - case class DynamicAlias[T](property: Quoted[T] => Quoted[Any], name: String) - - def alias[T]( - property: Quoted[T] => Quoted[Any], - name: String - ): DynamicAlias[T] = DynamicAlias(property, name) - - sealed trait DynamicSet[T, U] - - case class DynamicSetValue[T, U]( - property: Quoted[T] => Quoted[U], - value: Quoted[U] - ) extends DynamicSet[T, U] - case class DynamicSetEmpty[T, U]() extends DynamicSet[T, U] - - def set[T, U]( - property: Quoted[T] => Quoted[U], - value: Quoted[U] - ): DynamicSet[T, U] = - DynamicSetValue(property, value) - - def setValue[T, U]( - property: Quoted[T] => Quoted[U], - value: U - )(implicit enc: Encoder[U]): DynamicSet[T, U] = - set[T, U](property, spliceLift(value)) - - def setOpt[T, U](property: Quoted[T] => Quoted[U], value: Option[U])(implicit - enc: Encoder[U] - ): DynamicSet[T, U] = - value match { - case Some(v) => setValue(property, v) - case None => DynamicSetEmpty() - } - - def set[T, U](property: String, value: Quoted[U]): DynamicSet[T, U] = - set((f: Quoted[T]) => splice(Property(f.ast, property)), value) - - def setValue[T, U]( - property: String, - value: U - )(implicit enc: Encoder[U]): DynamicSet[T, U] = - set(property, spliceLift(value)) - - def dynamicQuerySchema[T]( - entity: String, - columns: DynamicAlias[T]* - )(implicit ct: ClassTag[T]): DynamicEntityQuery[T] = { - val aliases = - columns.map { alias => - @tailrec def path(ast: Ast, acc: List[String] = Nil): List[String] = - ast match { - case Property(a, name) => - path(a, name :: acc) - case _ => - acc - } - - PropertyAlias( - path(alias.property(splice[T](Ident("v", RuntimeEntityQuat[T]))).ast), - alias.name - ) - } - DynamicEntityQuery( - splice[EntityQuery[T]](Entity.Opinionated(entity, aliases.toList, RuntimeEntityQuat[T].probit, Fixed)) - ) - } - - private[this] val nextIdentId = new DynamicVariable(0) - - private[this] def withFreshIdent[R](f: Ident => R)(quat: Quat): R = { - val idx = nextIdentId.value - nextIdentId.withValue(idx + 1) { - f(Ident(s"v$idx", quat)) - } - } - - private def dyn[T](ast: Ast): DynamicQuery[T] = - DynamicQuery[T](splice[Query[T]](ast)) - - private def splice[T](a: Ast) = - new Quoted[T] { - override def ast = a - } - - protected def spliceLift[O](o: O)(implicit enc: Encoder[O]) = - splice[O](ScalarValueLift("o", External.Source.Parser, o, enc, Quat.Value)) - - object DynamicQuery { - def apply[T](p: Quoted[Query[T]]) = - new DynamicQuery[T] { - override def q = p - } - } - - sealed trait DynamicQuery[+T] { - - protected[getquill] def q: Quoted[Query[T]] - - protected[this] def transform[U, V, R]( - f: Quoted[U] => Quoted[V], - t: (Ast, Ident, Ast) => Ast, - r: Ast => R = dyn _ - ) = - withFreshIdent { v => - r(t(q.ast, v, f(splice(v)).ast)) - }(Quat.Generic) - - protected[this] def transformOpt[O, R, D <: DynamicQuery[T]]( - opt: Option[O], - f: (Quoted[T], Quoted[O]) => Quoted[R], - t: (Quoted[T] => Quoted[R]) => D, - thiz: D - )(implicit enc: Encoder[O]) = - opt match { - case Some(o) => - t(v => f(v, spliceLift(o))) - case None => - thiz - } - - def map[R](f: Quoted[T] => Quoted[R]): DynamicQuery[R] = - transform(f, Map) - - def flatMap[R](f: Quoted[T] => Quoted[Query[R]]): DynamicQuery[R] = - transform(f, FlatMap) - - def filter(f: Quoted[T] => Quoted[Boolean]): DynamicQuery[T] = - transform(f, Filter) - - def withFilter(f: Quoted[T] => Quoted[Boolean]): DynamicQuery[T] = - filter(f) - - def filterOpt[O](opt: Option[O])( - f: (Quoted[T], Quoted[O]) => Quoted[Boolean] - )(implicit enc: Encoder[O]): DynamicQuery[T] = - transformOpt(opt, f, filter, this) - - def filterIf( - cond: Boolean - )(f: Quoted[T] => Quoted[Boolean]): DynamicQuery[T] = - if (cond) filter(f) - else this - - def concatMap[R, U]( - f: Quoted[T] => Quoted[U] - )(implicit ev: U => Iterable[R]): DynamicQuery[R] = - transform(f, ConcatMap) - - def sortBy[R]( - f: Quoted[T] => Quoted[R] - )(implicit ord: Ord[R]): DynamicQuery[T] = - transform(f, SortBy(_, _, _, ord.ord)) - - def take(n: Quoted[Int]): DynamicQuery[T] = - dyn(Take(q.ast, n.ast)) - - def take(n: Int): DynamicQuery[T] = - take(spliceLift(n)) - - def takeOpt(opt: Option[Int]): DynamicQuery[T] = - opt match { - case Some(o) => take(o) - case None => this - } - - def drop(n: Quoted[Int]): DynamicQuery[T] = - dyn(Drop(q.ast, n.ast)) - - def drop(n: Int): DynamicQuery[T] = - drop(spliceLift(n)) - - def dropOpt(opt: Option[Int]): DynamicQuery[T] = - opt match { - case Some(o) => drop(o) - case None => this - } - - def ++[U >: T](q2: Quoted[Query[U]]): DynamicQuery[U] = - dyn(UnionAll(q.ast, q2.ast)) - - def unionAll[U >: T](q2: Quoted[Query[U]]): DynamicQuery[U] = - dyn(UnionAll(q.ast, q2.ast)) - - def union[U >: T](q2: Quoted[Query[U]]): DynamicQuery[U] = - dyn(Union(q.ast, q2.ast)) - - def groupBy[R](f: Quoted[T] => Quoted[R]): DynamicQuery[(R, Query[T])] = - transform(f, GroupBy) - - private def aggregate(op: AggregationOperator) = - splice(Aggregation(op, q.ast)) - - def min[U >: T]: Quoted[Option[T]] = - aggregate(AggregationOperator.min) - - def max[U >: T]: Quoted[Option[T]] = - aggregate(AggregationOperator.max) - - def avg[U >: T](implicit n: Numeric[U]): Quoted[Option[T]] = - aggregate(AggregationOperator.avg) - - def sum[U >: T](implicit n: Numeric[U]): Quoted[Option[T]] = - aggregate(AggregationOperator.sum) - - def size: Quoted[Long] = - aggregate(AggregationOperator.size) - - def join[A >: T, B](q2: Quoted[Query[B]]): DynamicJoinQuery[A, B, (A, B)] = - DynamicJoinQuery(InnerJoin, q, q2) - - def leftJoin[A >: T, B]( - q2: Quoted[Query[B]] - ): DynamicJoinQuery[A, B, (A, Option[B])] = - DynamicJoinQuery(LeftJoin, q, q2) - - def rightJoin[A >: T, B]( - q2: Quoted[Query[B]] - ): DynamicJoinQuery[A, B, (Option[A], B)] = - DynamicJoinQuery(RightJoin, q, q2) - - def fullJoin[A >: T, B]( - q2: Quoted[Query[B]] - ): DynamicJoinQuery[A, B, (Option[A], Option[B])] = - DynamicJoinQuery(FullJoin, q, q2) - - private[this] def flatJoin[R]( - tpe: JoinType, - on: Quoted[T] => Quoted[Boolean] - ): DynamicQuery[R] = - withFreshIdent { v => - dyn(FlatJoin(tpe, q.ast, v, on(splice(v)).ast)) - }(Quat.Generic) - - def join[A >: T](on: Quoted[A] => Quoted[Boolean]): DynamicQuery[A] = - flatJoin(InnerJoin, on) - - def leftJoin[A >: T]( - on: Quoted[A] => Quoted[Boolean] - ): DynamicQuery[Option[A]] = - flatJoin(LeftJoin, on) - - def rightJoin[A >: T]( - on: Quoted[A] => Quoted[Boolean] - ): DynamicQuery[Option[A]] = - flatJoin(RightJoin, on) - - def nonEmpty: Quoted[Boolean] = - splice(UnaryOperation(SetOperator.nonEmpty, q.ast)) - - def isEmpty: Quoted[Boolean] = - splice(UnaryOperation(SetOperator.isEmpty, q.ast)) - - def contains[B >: T](value: B)(implicit enc: Encoder[B]): Quoted[Boolean] = - contains(spliceLift(value)) - - def contains[B >: T](value: Quoted[B]): Quoted[Boolean] = - splice(BinaryOperation(q.ast, SetOperator.contains, value.ast)) - - def distinct: DynamicQuery[T] = - dyn(Distinct(q.ast)) - - def distinctOn[R](f: Quoted[T] => Quoted[R]): DynamicQuery[R] = - transform(f, DistinctOn) - - def nested: DynamicQuery[T] = - dyn(Nested(q.ast)) - - override def toString = q.toString - } - - case class DynamicJoinQuery[A, B, R]( - tpe: JoinType, - q1: Quoted[Query[A]], - q2: Quoted[Query[B]] - ) { - def on(f: (Quoted[A], Quoted[B]) => Quoted[Boolean]): DynamicQuery[R] = - withFreshIdent { iA => - withFreshIdent { iB => - dyn(Join(tpe, q1.ast, q2.ast, iA, iB, f(splice(iA), splice(iB)).ast)) - }(q2.ast.quat) // TODO Verify Quat Later - }(q1.ast.quat) // TODO Verify Quat Later - } - - case class DynamicEntityQuery[T](q: Quoted[EntityQuery[T]]) extends DynamicQuery[T] { - - private[this] def dyn[R](ast: Ast) = - DynamicEntityQuery(splice[EntityQuery[R]](ast)) - - override def filter( - f: Quoted[T] => Quoted[Boolean] - ): DynamicEntityQuery[T] = - transform(f, Filter, dyn) - - override def withFilter( - f: Quoted[T] => Quoted[Boolean] - ): DynamicEntityQuery[T] = - filter(f) - - override def filterOpt[O](opt: Option[O])( - f: (Quoted[T], Quoted[O]) => Quoted[Boolean] - )(implicit enc: Encoder[O]): DynamicEntityQuery[T] = - transformOpt(opt, f, filter, this) - - override def map[R](f: Quoted[T] => Quoted[R]): DynamicEntityQuery[R] = - transform(f, Map, dyn) - - def insertValue(value: T): DynamicInsert[T] = macro DynamicQueryDslMacro.insertValue - - type DynamicAssignment[U] = ((Quoted[T] => Quoted[U]), U) - - private[this] def assignments[S]( - l: List[DynamicSet[S, _]] - ): List[Assignment] = - l.collect { case s: DynamicSetValue[_, _] => - val v = Ident("v", Quat.Generic) - Assignment(v, s.property(splice(v)).ast, s.value.ast) - } - - def insert(l: DynamicSet[T, _]*): DynamicInsert[T] = - DynamicInsert( - splice(Insert(DynamicEntityQuery.this.q.ast, assignments(l.toList))) - ) - - def updateValue(value: T): DynamicUpdate[T] = macro DynamicQueryDslMacro.updateValue - - def update(sets: DynamicSet[T, _]*): DynamicUpdate[T] = - DynamicUpdate( - splice[Update[T]]( - Update(DynamicEntityQuery.this.q.ast, assignments(sets.toList)) - ) - ) - - def delete: DynamicDelete[T] = - DynamicDelete(splice[Delete[T]](Delete(DynamicEntityQuery.this.q.ast))) - } - - object DynamicAction { - def apply[A <: DslAction[_]](p: Quoted[A]) = - new DynamicAction[A] { - override val q = p - } - } - - sealed trait DynamicAction[A <: DslAction[_]] { - protected[getquill] def q: Quoted[A] - - override def toString = q.toString - } - - object DynamicInsert { - def apply[E](p: Quoted[Insert[E]]) = - new DynamicInsert[E] { - override val q = p - } - } - - trait DynamicInsert[E] extends DynamicAction[Insert[E]] { - - private[this] def dyn[R](ast: Ast) = - DynamicInsert[R](splice(ast)) - - def returning[R](f: Quoted[E] => Quoted[R]): DynamicActionReturning[E, R] = - withFreshIdent { v => - DynamicActionReturning[E, R](splice(Returning(q.ast, v, f(splice(v)).ast))) - }(Quat.Generic) - - def returningGenerated[R]( - f: Quoted[E] => Quoted[R] - ): DynamicActionReturning[E, R] = - withFreshIdent { v => - DynamicActionReturning[E, R]( - splice(ReturningGenerated(q.ast, v, f(splice(v)).ast)) - ) - }(Quat.Generic) - - def onConflictIgnore: DynamicInsert[E] = - dyn( - OnConflict( - DynamicInsert.this.q.ast, - OnConflict.NoTarget, - OnConflict.Ignore - ) - ) - - def onConflictIgnore( - targets: (Quoted[E] => Quoted[Any])* - ): DynamicInsert[E] = { - val v = splice[E](Ident("v", Quat.Generic)) - val properties = - targets.toList.map { f => - f(v).ast match { - case p: Property => p - case p => - fail(s"Invalid ignore column: $p") - } - } - dyn( - OnConflict( - DynamicInsert.this.q.ast, - OnConflict.Properties(properties), - OnConflict.Ignore - ) - ) - } - } - - case class DynamicActionReturning[E, Output]( - q: Quoted[ActionReturning[E, Output]] - ) extends DynamicAction[ActionReturning[E, Output]] - case class DynamicUpdate[E](q: Quoted[Update[E]]) extends DynamicAction[Update[E]] - case class DynamicDelete[E](q: Quoted[Delete[E]]) extends DynamicAction[Delete[E]] -} diff --git a/quill-core/js/src/main/scala/io/getquill/log/ContextLog.scala b/quill-core/js/src/main/scala/io/getquill/log/ContextLog.scala deleted file mode 100644 index cd3f953bb0..0000000000 --- a/quill-core/js/src/main/scala/io/getquill/log/ContextLog.scala +++ /dev/null @@ -1,5 +0,0 @@ -package io.getquill.log - -object ContextLog { - def apply(str: String) = println(str) -} diff --git a/quill-core/jvm/src/main/scala/io/getquill/dsl/DynamicQueryDSL.scala b/quill-core/src/main/scala/io/getquill/dsl/DynamicQueryDSL.scala similarity index 99% rename from quill-core/jvm/src/main/scala/io/getquill/dsl/DynamicQueryDSL.scala rename to quill-core/src/main/scala/io/getquill/dsl/DynamicQueryDSL.scala index c9ecdcabf9..4e1806613e 100644 --- a/quill-core/jvm/src/main/scala/io/getquill/dsl/DynamicQueryDSL.scala +++ b/quill-core/src/main/scala/io/getquill/dsl/DynamicQueryDSL.scala @@ -1,21 +1,17 @@ package io.getquill.dsl import io.getquill.ast.Renameable.Fixed - -import scala.language.implicitConversions -import scala.language.experimental.macros import io.getquill.ast._ import io.getquill.quat._ - -import scala.reflect.macros.whitebox.{Context => MacroContext} import io.getquill.util.Messages._ - -import scala.util.DynamicVariable -import scala.reflect.ClassTag -import scala.reflect.runtime.{universe => u} import io.getquill.{ActionReturning, Delete, EntityQuery, Insert, Ord, Query, Quoted, Update, Action => DslAction} import scala.annotation.tailrec +import scala.language.experimental.macros +import scala.language.implicitConversions +import scala.reflect.macros.whitebox.{Context => MacroContext} +import scala.reflect.runtime.{universe => u} +import scala.util.DynamicVariable class DynamicQueryDslMacro(val c: MacroContext) { import c.universe._ diff --git a/quill-core/jvm/src/main/scala/io/getquill/log/ContextLog.scala b/quill-core/src/main/scala/io/getquill/log/ContextLog.scala similarity index 100% rename from quill-core/jvm/src/main/scala/io/getquill/log/ContextLog.scala rename to quill-core/src/main/scala/io/getquill/log/ContextLog.scala diff --git a/quill-core/src/test/scala/io/getquill/MirrorContexts.scala b/quill-core/src/test/scala/io/getquill/MirrorContexts.scala index e13c61a118..e5847795a4 100644 --- a/quill-core/src/test/scala/io/getquill/MirrorContexts.scala +++ b/quill-core/src/test/scala/io/getquill/MirrorContexts.scala @@ -9,7 +9,7 @@ object MirrorContexts { object testAsyncContext extends AsyncMirrorContext(MirrorIdiom, Literal) with TestEntities { // hack to avoid Await.result since scala.js doesn't support it - implicit val immediateEC = new ExecutionContext { + implicit val immediateEC: ExecutionContext = new ExecutionContext { def execute(runnable: Runnable) = runnable.run() def reportFailure(cause: Throwable) = () } diff --git a/quill-core/src/test/scala/io/getquill/context/mirror/MirrorIdiomSpec.scala b/quill-core/src/test/scala/io/getquill/context/mirror/MirrorIdiomSpec.scala index 500484c5ab..4f4656fa39 100644 --- a/quill-core/src/test/scala/io/getquill/context/mirror/MirrorIdiomSpec.scala +++ b/quill-core/src/test/scala/io/getquill/context/mirror/MirrorIdiomSpec.scala @@ -15,7 +15,7 @@ class MirrorIdiomSpec extends Spec { import MirrorIdiom._ - implicit val naming = Literal + implicit val naming: Literal = Literal "shows schema query" - { "entity" in { diff --git a/quill-core/jvm/src/test/scala/io/getquill/util/LogToFileSpec.scala b/quill-core/src/test/scala/io/getquill/util/LogToFileSpec.scala similarity index 100% rename from quill-core/jvm/src/test/scala/io/getquill/util/LogToFileSpec.scala rename to quill-core/src/test/scala/io/getquill/util/LogToFileSpec.scala diff --git a/quill-jasync-zio-postgres/src/main/scala/io/getquill/context/zio/PostgresJAsyncContextConfig.scala b/quill-jasync-zio-postgres/src/main/scala/io/getquill/context/qzio/PostgresJAsyncContextConfig.scala similarity index 92% rename from quill-jasync-zio-postgres/src/main/scala/io/getquill/context/zio/PostgresJAsyncContextConfig.scala rename to quill-jasync-zio-postgres/src/main/scala/io/getquill/context/qzio/PostgresJAsyncContextConfig.scala index 964fe7bf97..6e88f2b6b9 100644 --- a/quill-jasync-zio-postgres/src/main/scala/io/getquill/context/zio/PostgresJAsyncContextConfig.scala +++ b/quill-jasync-zio-postgres/src/main/scala/io/getquill/context/qzio/PostgresJAsyncContextConfig.scala @@ -1,4 +1,4 @@ -package io.getquill.context.zio +package io.getquill.context.qzio import com.github.jasync.sql.db.postgresql.PostgreSQLConnection import com.github.jasync.sql.db.postgresql.pool.PostgreSQLConnectionFactory diff --git a/quill-jasync-zio-postgres/src/main/scala/io/getquill/context/zio/PostgresZioJAsyncContext.scala b/quill-jasync-zio-postgres/src/main/scala/io/getquill/context/qzio/PostgresZioJAsyncContext.scala similarity index 90% rename from quill-jasync-zio-postgres/src/main/scala/io/getquill/context/zio/PostgresZioJAsyncContext.scala rename to quill-jasync-zio-postgres/src/main/scala/io/getquill/context/qzio/PostgresZioJAsyncContext.scala index 7e9c4c36b5..bca502f162 100644 --- a/quill-jasync-zio-postgres/src/main/scala/io/getquill/context/zio/PostgresZioJAsyncContext.scala +++ b/quill-jasync-zio-postgres/src/main/scala/io/getquill/context/qzio/PostgresZioJAsyncContext.scala @@ -1,10 +1,9 @@ -package io.getquill.context.zio +package io.getquill.context.qzio import com.github.jasync.sql.db.postgresql.PostgreSQLConnection import com.github.jasync.sql.db.{QueryResult => DBQueryResult} import io.getquill.ReturnAction.{ReturnColumns, ReturnNothing, ReturnRecord} -import io.getquill.context.zio.jasync.{ArrayDecoders, ArrayEncoders} -import io.getquill.util.Messages.fail +import io.getquill.context.qzio.jasync.{ArrayDecoders, ArrayEncoders} import io.getquill.{NamingStrategy, PostgresDialect, ReturnAction} import scala.jdk.CollectionConverters._ diff --git a/quill-jasync-zio-postgres/src/main/scala/io/getquill/context/zio/jasync/ArrayDecoders.scala b/quill-jasync-zio-postgres/src/main/scala/io/getquill/context/qzio/jasync/ArrayDecoders.scala similarity index 96% rename from quill-jasync-zio-postgres/src/main/scala/io/getquill/context/zio/jasync/ArrayDecoders.scala rename to quill-jasync-zio-postgres/src/main/scala/io/getquill/context/qzio/jasync/ArrayDecoders.scala index 037471bca5..f8c043a0f8 100644 --- a/quill-jasync-zio-postgres/src/main/scala/io/getquill/context/zio/jasync/ArrayDecoders.scala +++ b/quill-jasync-zio-postgres/src/main/scala/io/getquill/context/qzio/jasync/ArrayDecoders.scala @@ -1,7 +1,7 @@ -package io.getquill.context.zio.jasync +package io.getquill.context.qzio.jasync +import io.getquill.context.qzio.{PostgresZioJAsyncContext, SqlTypes} import io.getquill.context.sql.encoding.ArrayEncoding -import io.getquill.context.zio.{PostgresZioJAsyncContext, SqlTypes} import io.getquill.util.Messages.fail import java.time.{LocalDate, LocalDateTime, ZoneId} diff --git a/quill-jasync-zio-postgres/src/main/scala/io/getquill/context/zio/jasync/ArrayEncoders.scala b/quill-jasync-zio-postgres/src/main/scala/io/getquill/context/qzio/jasync/ArrayEncoders.scala similarity index 93% rename from quill-jasync-zio-postgres/src/main/scala/io/getquill/context/zio/jasync/ArrayEncoders.scala rename to quill-jasync-zio-postgres/src/main/scala/io/getquill/context/qzio/jasync/ArrayEncoders.scala index 207809594a..7fea21e84a 100644 --- a/quill-jasync-zio-postgres/src/main/scala/io/getquill/context/zio/jasync/ArrayEncoders.scala +++ b/quill-jasync-zio-postgres/src/main/scala/io/getquill/context/qzio/jasync/ArrayEncoders.scala @@ -1,10 +1,10 @@ -package io.getquill.context.zio.jasync +package io.getquill.context.qzio.jasync + +import io.getquill.context.qzio.{PostgresZioJAsyncContext, SqlTypes} -import java.sql.Timestamp import java.time.{LocalDate, LocalDateTime, OffsetDateTime} import java.util.Date import io.getquill.context.sql.encoding.ArrayEncoding -import io.getquill.context.zio.{PostgresZioJAsyncContext, SqlTypes} trait ArrayEncoders extends ArrayEncoding { self: PostgresZioJAsyncContext[_] => diff --git a/quill-jasync-zio-postgres/src/test/scala/io/getquill/PostgresJAsyncContextConfigSpec.scala b/quill-jasync-zio-postgres/src/test/scala/io/getquill/PostgresJAsyncContextConfigSpec.scala index 64f6337dfa..6590727867 100644 --- a/quill-jasync-zio-postgres/src/test/scala/io/getquill/PostgresJAsyncContextConfigSpec.scala +++ b/quill-jasync-zio-postgres/src/test/scala/io/getquill/PostgresJAsyncContextConfigSpec.scala @@ -1,11 +1,12 @@ package io.getquill -import java.io.File import com.github.jasync.sql.db.SSLConfiguration import com.github.jasync.sql.db.SSLConfiguration.Mode import com.typesafe.config.{ConfigFactory, ConfigValueFactory} import io.getquill.base.Spec -import io.getquill.context.zio.PostgresJAsyncContextConfig +import io.getquill.context.qzio.PostgresJAsyncContextConfig + +import java.io.File class PostgresJAsyncContextConfigSpec extends Spec { "parses ssl config" in { diff --git a/quill-jasync-zio-postgres/src/test/scala/io/getquill/TypeParamExtensionTest.scala b/quill-jasync-zio-postgres/src/test/scala/io/getquill/TypeParamExtensionTest.scala index 44c511fe68..d80d437a65 100644 --- a/quill-jasync-zio-postgres/src/test/scala/io/getquill/TypeParamExtensionTest.scala +++ b/quill-jasync-zio-postgres/src/test/scala/io/getquill/TypeParamExtensionTest.scala @@ -1,7 +1,7 @@ package io.getquill import io.getquill.context.Context -import io.getquill.context.zio.PostgresZioJAsyncContext +import io.getquill.context.qzio.PostgresZioJAsyncContext // Testing we are passing type params explicitly into AsyncContext, otherwise // this file will fail to compile diff --git a/quill-jasync-zio-postgres/src/test/scala/io/getquill/context/zio/jasync/postgres/ArrayAsyncEncodingSpec.scala b/quill-jasync-zio-postgres/src/test/scala/io/getquill/context/qzio/jasync/postgres/ArrayAsyncEncodingSpec.scala similarity index 98% rename from quill-jasync-zio-postgres/src/test/scala/io/getquill/context/zio/jasync/postgres/ArrayAsyncEncodingSpec.scala rename to quill-jasync-zio-postgres/src/test/scala/io/getquill/context/qzio/jasync/postgres/ArrayAsyncEncodingSpec.scala index a92d373eae..8d14a01c8c 100644 --- a/quill-jasync-zio-postgres/src/test/scala/io/getquill/context/zio/jasync/postgres/ArrayAsyncEncodingSpec.scala +++ b/quill-jasync-zio-postgres/src/test/scala/io/getquill/context/qzio/jasync/postgres/ArrayAsyncEncodingSpec.scala @@ -1,10 +1,10 @@ -package io.getquill.context.zio.jasync.postgres +package io.getquill.context.qzio.jasync.postgres -import java.time.{LocalDate, LocalDateTime} -import java.util.{Date, UUID} import io.getquill.context.sql.EncodingTestType import io.getquill.context.sql.encoding.ArrayEncodingBaseSpec -import zio.FiberFailure + +import java.time.{LocalDate, LocalDateTime} +import java.util.{Date, UUID} class ArrayAsyncEncodingSpec extends ArrayEncodingBaseSpec with ZioSpec { import context._ diff --git a/quill-jasync-zio-postgres/src/test/scala/io/getquill/context/zio/jasync/postgres/ArrayOpsAsyncSpec.scala b/quill-jasync-zio-postgres/src/test/scala/io/getquill/context/qzio/jasync/postgres/ArrayOpsAsyncSpec.scala similarity index 93% rename from quill-jasync-zio-postgres/src/test/scala/io/getquill/context/zio/jasync/postgres/ArrayOpsAsyncSpec.scala rename to quill-jasync-zio-postgres/src/test/scala/io/getquill/context/qzio/jasync/postgres/ArrayOpsAsyncSpec.scala index 0d7329a47c..5a139e7c21 100644 --- a/quill-jasync-zio-postgres/src/test/scala/io/getquill/context/zio/jasync/postgres/ArrayOpsAsyncSpec.scala +++ b/quill-jasync-zio-postgres/src/test/scala/io/getquill/context/qzio/jasync/postgres/ArrayOpsAsyncSpec.scala @@ -1,4 +1,4 @@ -package io.getquill.context.zio.jasync.postgres +package io.getquill.context.qzio.jasync.postgres import io.getquill.context.sql.base.ArrayOpsSpec diff --git a/quill-jasync-zio-postgres/src/test/scala/io/getquill/context/zio/jasync/postgres/CaseClassQueryAsyncSpec.scala b/quill-jasync-zio-postgres/src/test/scala/io/getquill/context/qzio/jasync/postgres/CaseClassQueryAsyncSpec.scala similarity index 97% rename from quill-jasync-zio-postgres/src/test/scala/io/getquill/context/zio/jasync/postgres/CaseClassQueryAsyncSpec.scala rename to quill-jasync-zio-postgres/src/test/scala/io/getquill/context/qzio/jasync/postgres/CaseClassQueryAsyncSpec.scala index a9379341f0..4d332cdc2f 100644 --- a/quill-jasync-zio-postgres/src/test/scala/io/getquill/context/zio/jasync/postgres/CaseClassQueryAsyncSpec.scala +++ b/quill-jasync-zio-postgres/src/test/scala/io/getquill/context/qzio/jasync/postgres/CaseClassQueryAsyncSpec.scala @@ -1,4 +1,4 @@ -package io.getquill.context.zio.jasync.postgres +package io.getquill.context.qzio.jasync.postgres import io.getquill.context.sql.base.CaseClassQuerySpec import org.scalatest.matchers.should.Matchers._ diff --git a/quill-jasync-zio-postgres/src/test/scala/io/getquill/context/zio/jasync/postgres/DepartmentsPostgresAsyncSpec.scala b/quill-jasync-zio-postgres/src/test/scala/io/getquill/context/qzio/jasync/postgres/DepartmentsPostgresAsyncSpec.scala similarity index 95% rename from quill-jasync-zio-postgres/src/test/scala/io/getquill/context/zio/jasync/postgres/DepartmentsPostgresAsyncSpec.scala rename to quill-jasync-zio-postgres/src/test/scala/io/getquill/context/qzio/jasync/postgres/DepartmentsPostgresAsyncSpec.scala index 63d06faf4b..c84a23438a 100644 --- a/quill-jasync-zio-postgres/src/test/scala/io/getquill/context/zio/jasync/postgres/DepartmentsPostgresAsyncSpec.scala +++ b/quill-jasync-zio-postgres/src/test/scala/io/getquill/context/qzio/jasync/postgres/DepartmentsPostgresAsyncSpec.scala @@ -1,4 +1,4 @@ -package io.getquill.context.zio.jasync.postgres +package io.getquill.context.qzio.jasync.postgres import io.getquill.context.sql.base.DepartmentsSpec diff --git a/quill-jasync-zio-postgres/src/test/scala/io/getquill/context/zio/jasync/postgres/OnConflictAsyncSpec.scala b/quill-jasync-zio-postgres/src/test/scala/io/getquill/context/qzio/jasync/postgres/OnConflictAsyncSpec.scala similarity index 95% rename from quill-jasync-zio-postgres/src/test/scala/io/getquill/context/zio/jasync/postgres/OnConflictAsyncSpec.scala rename to quill-jasync-zio-postgres/src/test/scala/io/getquill/context/qzio/jasync/postgres/OnConflictAsyncSpec.scala index af0a2f5e20..fb97bec942 100644 --- a/quill-jasync-zio-postgres/src/test/scala/io/getquill/context/zio/jasync/postgres/OnConflictAsyncSpec.scala +++ b/quill-jasync-zio-postgres/src/test/scala/io/getquill/context/qzio/jasync/postgres/OnConflictAsyncSpec.scala @@ -1,4 +1,4 @@ -package io.getquill.context.zio.jasync.postgres +package io.getquill.context.qzio.jasync.postgres import io.getquill.context.sql.base.OnConflictSpec diff --git a/quill-jasync-zio-postgres/src/test/scala/io/getquill/context/zio/jasync/postgres/PeopleAsyncReturningSpec.scala b/quill-jasync-zio-postgres/src/test/scala/io/getquill/context/qzio/jasync/postgres/PeopleAsyncReturningSpec.scala similarity index 97% rename from quill-jasync-zio-postgres/src/test/scala/io/getquill/context/zio/jasync/postgres/PeopleAsyncReturningSpec.scala rename to quill-jasync-zio-postgres/src/test/scala/io/getquill/context/qzio/jasync/postgres/PeopleAsyncReturningSpec.scala index 98e77e3e48..4d6a424f8a 100644 --- a/quill-jasync-zio-postgres/src/test/scala/io/getquill/context/zio/jasync/postgres/PeopleAsyncReturningSpec.scala +++ b/quill-jasync-zio-postgres/src/test/scala/io/getquill/context/qzio/jasync/postgres/PeopleAsyncReturningSpec.scala @@ -1,4 +1,4 @@ -package io.getquill.context.zio.jasync.postgres +package io.getquill.context.qzio.jasync.postgres import io.getquill.context.sql.base.PeopleReturningSpec diff --git a/quill-jasync-zio-postgres/src/test/scala/io/getquill/context/zio/jasync/postgres/PeoplePostgresAsyncSpec.scala b/quill-jasync-zio-postgres/src/test/scala/io/getquill/context/qzio/jasync/postgres/PeoplePostgresAsyncSpec.scala similarity index 97% rename from quill-jasync-zio-postgres/src/test/scala/io/getquill/context/zio/jasync/postgres/PeoplePostgresAsyncSpec.scala rename to quill-jasync-zio-postgres/src/test/scala/io/getquill/context/qzio/jasync/postgres/PeoplePostgresAsyncSpec.scala index 2d17ae6e52..38a2f013b4 100644 --- a/quill-jasync-zio-postgres/src/test/scala/io/getquill/context/zio/jasync/postgres/PeoplePostgresAsyncSpec.scala +++ b/quill-jasync-zio-postgres/src/test/scala/io/getquill/context/qzio/jasync/postgres/PeoplePostgresAsyncSpec.scala @@ -1,4 +1,4 @@ -package io.getquill.context.zio.jasync.postgres +package io.getquill.context.qzio.jasync.postgres import io.getquill.context.sql.base.PeopleSpec diff --git a/quill-jasync-zio-postgres/src/test/scala/io/getquill/context/zio/jasync/postgres/PostgresAsyncEncodingSpec.scala b/quill-jasync-zio-postgres/src/test/scala/io/getquill/context/qzio/jasync/postgres/PostgresAsyncEncodingSpec.scala similarity index 96% rename from quill-jasync-zio-postgres/src/test/scala/io/getquill/context/zio/jasync/postgres/PostgresAsyncEncodingSpec.scala rename to quill-jasync-zio-postgres/src/test/scala/io/getquill/context/qzio/jasync/postgres/PostgresAsyncEncodingSpec.scala index 32012102b0..09ec7e74cb 100644 --- a/quill-jasync-zio-postgres/src/test/scala/io/getquill/context/zio/jasync/postgres/PostgresAsyncEncodingSpec.scala +++ b/quill-jasync-zio-postgres/src/test/scala/io/getquill/context/qzio/jasync/postgres/PostgresAsyncEncodingSpec.scala @@ -1,12 +1,11 @@ -package io.getquill.context.zio.jasync.postgres +package io.getquill.context.qzio.jasync.postgres -import java.time.{LocalDate, LocalDateTime, ZonedDateTime} +import java.time.{LocalDate, LocalDateTime} import io.getquill.context.sql.EncodingSpec import java.util.Date import java.util.UUID import io.getquill.Query -import zio.FiberFailure class PostgresAsyncEncodingSpec extends EncodingSpec with ZioSpec { diff --git a/quill-jasync-zio-postgres/src/test/scala/io/getquill/context/zio/jasync/postgres/PostgresJAsyncContextSpec.scala b/quill-jasync-zio-postgres/src/test/scala/io/getquill/context/qzio/jasync/postgres/PostgresJAsyncContextSpec.scala similarity index 95% rename from quill-jasync-zio-postgres/src/test/scala/io/getquill/context/zio/jasync/postgres/PostgresJAsyncContextSpec.scala rename to quill-jasync-zio-postgres/src/test/scala/io/getquill/context/qzio/jasync/postgres/PostgresJAsyncContextSpec.scala index 93a6994019..6fd2ed5b08 100644 --- a/quill-jasync-zio-postgres/src/test/scala/io/getquill/context/zio/jasync/postgres/PostgresJAsyncContextSpec.scala +++ b/quill-jasync-zio-postgres/src/test/scala/io/getquill/context/qzio/jasync/postgres/PostgresJAsyncContextSpec.scala @@ -1,9 +1,9 @@ -package io.getquill.context.zio.jasync.postgres +package io.getquill.context.qzio.jasync.postgres import com.github.jasync.sql.db.{QueryResult, ResultSetKt} import io.getquill.ReturnAction.ReturnColumns import io.getquill.base.Spec -import io.getquill.context.zio.PostgresZioJAsyncContext +import io.getquill.context.qzio.PostgresZioJAsyncContext import io.getquill.{Literal, ReturnAction} diff --git a/quill-jasync-zio-postgres/src/test/scala/io/getquill/context/zio/jasync/postgres/ProductPostgresAsyncSpec.scala b/quill-jasync-zio-postgres/src/test/scala/io/getquill/context/qzio/jasync/postgres/ProductPostgresAsyncSpec.scala similarity index 96% rename from quill-jasync-zio-postgres/src/test/scala/io/getquill/context/zio/jasync/postgres/ProductPostgresAsyncSpec.scala rename to quill-jasync-zio-postgres/src/test/scala/io/getquill/context/qzio/jasync/postgres/ProductPostgresAsyncSpec.scala index ba2aa5bde6..84320c16a5 100644 --- a/quill-jasync-zio-postgres/src/test/scala/io/getquill/context/zio/jasync/postgres/ProductPostgresAsyncSpec.scala +++ b/quill-jasync-zio-postgres/src/test/scala/io/getquill/context/qzio/jasync/postgres/ProductPostgresAsyncSpec.scala @@ -1,7 +1,6 @@ -package io.getquill.context.zio.jasync.postgres +package io.getquill.context.qzio.jasync.postgres -import io.getquill.context.sql.ProductSpec -import io.getquill.context.sql.Id +import io.getquill.context.sql.{Id, ProductSpec} import zio.ZIO class ProductPostgresAsyncSpec extends ProductSpec with ZioSpec { diff --git a/quill-jasync-zio-postgres/src/test/scala/io/getquill/context/zio/jasync/postgres/QueryResultTypePostgresAsyncSpec.scala b/quill-jasync-zio-postgres/src/test/scala/io/getquill/context/qzio/jasync/postgres/QueryResultTypePostgresAsyncSpec.scala similarity index 98% rename from quill-jasync-zio-postgres/src/test/scala/io/getquill/context/zio/jasync/postgres/QueryResultTypePostgresAsyncSpec.scala rename to quill-jasync-zio-postgres/src/test/scala/io/getquill/context/qzio/jasync/postgres/QueryResultTypePostgresAsyncSpec.scala index 85e504fdfb..805cf2ffe5 100644 --- a/quill-jasync-zio-postgres/src/test/scala/io/getquill/context/zio/jasync/postgres/QueryResultTypePostgresAsyncSpec.scala +++ b/quill-jasync-zio-postgres/src/test/scala/io/getquill/context/qzio/jasync/postgres/QueryResultTypePostgresAsyncSpec.scala @@ -1,4 +1,4 @@ -package io.getquill.context.zio.jasync.postgres +package io.getquill.context.qzio.jasync.postgres import io.getquill.context.sql.base.QueryResultTypeSpec diff --git a/quill-jasync-zio-postgres/src/test/scala/io/getquill/context/zio/jasync/postgres/TestContext.scala b/quill-jasync-zio-postgres/src/test/scala/io/getquill/context/qzio/jasync/postgres/TestContext.scala similarity index 81% rename from quill-jasync-zio-postgres/src/test/scala/io/getquill/context/zio/jasync/postgres/TestContext.scala rename to quill-jasync-zio-postgres/src/test/scala/io/getquill/context/qzio/jasync/postgres/TestContext.scala index 35d3b409c7..51a4af396d 100644 --- a/quill-jasync-zio-postgres/src/test/scala/io/getquill/context/zio/jasync/postgres/TestContext.scala +++ b/quill-jasync-zio-postgres/src/test/scala/io/getquill/context/qzio/jasync/postgres/TestContext.scala @@ -1,15 +1,15 @@ -package io.getquill.context.zio.jasync.postgres +package io.getquill.context.qzio.jasync.postgres import com.github.jasync.sql.db.postgresql.PostgreSQLConnection -import io.getquill.context.sql.{TestDecoders, TestEncoders} -import io.getquill.context.zio.{ +import io.getquill.context.qzio.{ JAsyncContextConfig, - PostgresZioJAsyncContext, PostgresJAsyncContextConfig, + PostgresZioJAsyncContext, ZioJAsyncConnection } +import io.getquill.context.sql.{TestDecoders, TestEncoders} import io.getquill.util.LoadConfig -import io.getquill.{Literal, PostgresDialect, TestEntities} +import io.getquill.{Literal, TestEntities} import zio._ class TestContext extends PostgresZioJAsyncContext(Literal) with TestEntities with TestEncoders with TestDecoders { diff --git a/quill-jasync-zio-postgres/src/test/scala/io/getquill/context/zio/jasync/postgres/ZioSpec.scala b/quill-jasync-zio-postgres/src/test/scala/io/getquill/context/qzio/jasync/postgres/ZioSpec.scala similarity index 93% rename from quill-jasync-zio-postgres/src/test/scala/io/getquill/context/zio/jasync/postgres/ZioSpec.scala rename to quill-jasync-zio-postgres/src/test/scala/io/getquill/context/qzio/jasync/postgres/ZioSpec.scala index ef50101395..668335c3f4 100644 --- a/quill-jasync-zio-postgres/src/test/scala/io/getquill/context/zio/jasync/postgres/ZioSpec.scala +++ b/quill-jasync-zio-postgres/src/test/scala/io/getquill/context/qzio/jasync/postgres/ZioSpec.scala @@ -1,7 +1,7 @@ -package io.getquill.context.zio.jasync.postgres +package io.getquill.context.qzio.jasync.postgres import io.getquill.base.Spec -import io.getquill.context.zio.ZioJAsyncConnection +import io.getquill.context.qzio.ZioJAsyncConnection import org.scalatest.BeforeAndAfterAll import zio.stream.{ZSink, ZStream} import zio.{Runtime, Unsafe, ZIO} diff --git a/quill-jasync-zio-postgres/src/test/scala/io/getquill/context/zio/jasync/postgres/package.scala b/quill-jasync-zio-postgres/src/test/scala/io/getquill/context/qzio/jasync/postgres/package.scala similarity index 63% rename from quill-jasync-zio-postgres/src/test/scala/io/getquill/context/zio/jasync/postgres/package.scala rename to quill-jasync-zio-postgres/src/test/scala/io/getquill/context/qzio/jasync/postgres/package.scala index 32a1e19b03..b3e77cf9f3 100644 --- a/quill-jasync-zio-postgres/src/test/scala/io/getquill/context/zio/jasync/postgres/package.scala +++ b/quill-jasync-zio-postgres/src/test/scala/io/getquill/context/qzio/jasync/postgres/package.scala @@ -1,4 +1,4 @@ -package io.getquill.context.zio.jasync +package io.getquill.context.qzio.jasync package object postgres { object testContext extends TestContext diff --git a/quill-jasync-zio/src/main/scala/io/getquill/context/zio/Decoders.scala b/quill-jasync-zio/src/main/scala/io/getquill/context/qzio/Decoders.scala similarity index 99% rename from quill-jasync-zio/src/main/scala/io/getquill/context/zio/Decoders.scala rename to quill-jasync-zio/src/main/scala/io/getquill/context/qzio/Decoders.scala index b449513142..bc5654c0ad 100644 --- a/quill-jasync-zio/src/main/scala/io/getquill/context/zio/Decoders.scala +++ b/quill-jasync-zio/src/main/scala/io/getquill/context/qzio/Decoders.scala @@ -1,4 +1,4 @@ -package io.getquill.context.zio +package io.getquill.context.qzio import com.github.jasync.sql.db.RowData import io.getquill.context.Context diff --git a/quill-jasync-zio/src/main/scala/io/getquill/context/zio/Encoders.scala b/quill-jasync-zio/src/main/scala/io/getquill/context/qzio/Encoders.scala similarity index 98% rename from quill-jasync-zio/src/main/scala/io/getquill/context/zio/Encoders.scala rename to quill-jasync-zio/src/main/scala/io/getquill/context/qzio/Encoders.scala index d88585dc1d..52daa2c718 100644 --- a/quill-jasync-zio/src/main/scala/io/getquill/context/zio/Encoders.scala +++ b/quill-jasync-zio/src/main/scala/io/getquill/context/qzio/Encoders.scala @@ -1,4 +1,4 @@ -package io.getquill.context.zio +package io.getquill.context.qzio import com.github.jasync.sql.db.RowData import io.getquill.context.Context diff --git a/quill-jasync-zio/src/main/scala/io/getquill/context/zio/JAsyncContextConfig.scala b/quill-jasync-zio/src/main/scala/io/getquill/context/qzio/JAsyncContextConfig.scala similarity index 98% rename from quill-jasync-zio/src/main/scala/io/getquill/context/zio/JAsyncContextConfig.scala rename to quill-jasync-zio/src/main/scala/io/getquill/context/qzio/JAsyncContextConfig.scala index 615f6f2834..dbf7d86a47 100644 --- a/quill-jasync-zio/src/main/scala/io/getquill/context/zio/JAsyncContextConfig.scala +++ b/quill-jasync-zio/src/main/scala/io/getquill/context/qzio/JAsyncContextConfig.scala @@ -1,4 +1,4 @@ -package io.getquill.context.zio +package io.getquill.context.qzio import com.github.jasync.sql.db._ import com.github.jasync.sql.db.pool.ObjectFactory diff --git a/quill-jasync-zio/src/main/scala/io/getquill/context/zio/SqlTypes.scala b/quill-jasync-zio/src/main/scala/io/getquill/context/qzio/SqlTypes.scala similarity index 90% rename from quill-jasync-zio/src/main/scala/io/getquill/context/zio/SqlTypes.scala rename to quill-jasync-zio/src/main/scala/io/getquill/context/qzio/SqlTypes.scala index 3a663a9339..a8751dc973 100644 --- a/quill-jasync-zio/src/main/scala/io/getquill/context/zio/SqlTypes.scala +++ b/quill-jasync-zio/src/main/scala/io/getquill/context/qzio/SqlTypes.scala @@ -1,4 +1,4 @@ -package io.getquill.context.zio +package io.getquill.context.qzio object SqlTypes extends Enumeration { type SqlTypes = Value diff --git a/quill-jasync-zio/src/main/scala/io/getquill/context/zio/UUIDObjectEncoding.scala b/quill-jasync-zio/src/main/scala/io/getquill/context/qzio/UUIDObjectEncoding.scala similarity index 91% rename from quill-jasync-zio/src/main/scala/io/getquill/context/zio/UUIDObjectEncoding.scala rename to quill-jasync-zio/src/main/scala/io/getquill/context/qzio/UUIDObjectEncoding.scala index 8b508c2d49..dc1b89c95f 100644 --- a/quill-jasync-zio/src/main/scala/io/getquill/context/zio/UUIDObjectEncoding.scala +++ b/quill-jasync-zio/src/main/scala/io/getquill/context/qzio/UUIDObjectEncoding.scala @@ -1,4 +1,4 @@ -package io.getquill.context.zio +package io.getquill.context.qzio import java.util.UUID diff --git a/quill-jasync-zio/src/main/scala/io/getquill/context/zio/UUIDStringEncoding.scala b/quill-jasync-zio/src/main/scala/io/getquill/context/qzio/UUIDStringEncoding.scala similarity index 92% rename from quill-jasync-zio/src/main/scala/io/getquill/context/zio/UUIDStringEncoding.scala rename to quill-jasync-zio/src/main/scala/io/getquill/context/qzio/UUIDStringEncoding.scala index 1b0c645a59..c92bb102c0 100644 --- a/quill-jasync-zio/src/main/scala/io/getquill/context/zio/UUIDStringEncoding.scala +++ b/quill-jasync-zio/src/main/scala/io/getquill/context/qzio/UUIDStringEncoding.scala @@ -1,4 +1,4 @@ -package io.getquill.context.zio +package io.getquill.context.qzio import java.util.UUID diff --git a/quill-jasync-zio/src/main/scala/io/getquill/context/zio/ZIOMonad.scala b/quill-jasync-zio/src/main/scala/io/getquill/context/qzio/ZIOMonad.scala similarity index 98% rename from quill-jasync-zio/src/main/scala/io/getquill/context/zio/ZIOMonad.scala rename to quill-jasync-zio/src/main/scala/io/getquill/context/qzio/ZIOMonad.scala index 6e4340f43a..4f8b2f80cf 100644 --- a/quill-jasync-zio/src/main/scala/io/getquill/context/zio/ZIOMonad.scala +++ b/quill-jasync-zio/src/main/scala/io/getquill/context/qzio/ZIOMonad.scala @@ -1,4 +1,4 @@ -package io.getquill.context.zio +package io.getquill.context.qzio import io.getquill.context.Context import io.getquill.monad.{IOMonad, IOMonadMacro} diff --git a/quill-jasync-zio/src/main/scala/io/getquill/context/zio/ZioJAsyncConnection.scala b/quill-jasync-zio/src/main/scala/io/getquill/context/qzio/ZioJAsyncConnection.scala similarity index 90% rename from quill-jasync-zio/src/main/scala/io/getquill/context/zio/ZioJAsyncConnection.scala rename to quill-jasync-zio/src/main/scala/io/getquill/context/qzio/ZioJAsyncConnection.scala index 603cd50689..5eaac3591a 100644 --- a/quill-jasync-zio/src/main/scala/io/getquill/context/zio/ZioJAsyncConnection.scala +++ b/quill-jasync-zio/src/main/scala/io/getquill/context/qzio/ZioJAsyncConnection.scala @@ -1,7 +1,7 @@ -package io.getquill.context.zio +package io.getquill.context.qzio -import com.github.jasync.sql.db.{ConcreteConnection, QueryResult} import com.github.jasync.sql.db.pool.{ConnectionPool => KConnectionPool} +import com.github.jasync.sql.db.{ConcreteConnection, QueryResult} import zio.{RIO, Scope, Tag, Task, ZIO, ZLayer} import scala.jdk.CollectionConverters._ @@ -9,7 +9,7 @@ import scala.jdk.CollectionConverters._ trait ZioJAsyncConnection { protected def takeConnection: ZIO[Scope, Throwable, ConcreteConnection] - private[zio] final def transaction[R <: ZioJAsyncConnection, A](action: RIO[R, A]): ZIO[R, Throwable, A] = + private[qzio] final def transaction[R <: ZioJAsyncConnection, A](action: RIO[R, A]): ZIO[R, Throwable, A] = // Taken from ConcreteConnectionBase.kt to avoid usage of pool.inTransaction ZIO.scoped[R] { takeConnection.flatMap(conn => @@ -21,12 +21,12 @@ trait ZioJAsyncConnection { ) } - private[zio] final def sendQuery(query: String): Task[QueryResult] = + private[qzio] final def sendQuery(query: String): Task[QueryResult] = ZIO.scoped { takeConnection.flatMap(conn => ZIO.fromCompletableFuture(conn.sendQuery(query))) } - private[zio] final def sendPreparedStatement(sql: String, params: Seq[Any]): Task[QueryResult] = + private[qzio] final def sendPreparedStatement(sql: String, params: Seq[Any]): Task[QueryResult] = ZIO.scoped { takeConnection.flatMap(conn => ZIO.fromCompletableFuture( diff --git a/quill-jasync-zio/src/main/scala/io/getquill/context/zio/ZioJAsyncContext.scala b/quill-jasync-zio/src/main/scala/io/getquill/context/qzio/ZioJAsyncContext.scala similarity index 99% rename from quill-jasync-zio/src/main/scala/io/getquill/context/zio/ZioJAsyncContext.scala rename to quill-jasync-zio/src/main/scala/io/getquill/context/qzio/ZioJAsyncContext.scala index 64c85370b9..129f974f79 100644 --- a/quill-jasync-zio/src/main/scala/io/getquill/context/zio/ZioJAsyncContext.scala +++ b/quill-jasync-zio/src/main/scala/io/getquill/context/qzio/ZioJAsyncContext.scala @@ -1,4 +1,4 @@ -package io.getquill.context.zio +package io.getquill.context.qzio import com.github.jasync.sql.db.{ConcreteConnection, QueryResult, RowData} import io.getquill.context.sql.SqlContext diff --git a/quill-jdbc-zio/src/main/scala/io/getquill/ZioJdbcContexts.scala b/quill-jdbc-zio/src/main/scala/io/getquill/ZioJdbcContexts.scala index f4c7c88a95..13fefdef31 100644 --- a/quill-jdbc-zio/src/main/scala/io/getquill/ZioJdbcContexts.scala +++ b/quill-jdbc-zio/src/main/scala/io/getquill/ZioJdbcContexts.scala @@ -11,8 +11,8 @@ import io.getquill.context.jdbc.{ SqliteJdbcTypes } import io.getquill.context.sql.idiom.SqlIdiom -import io.getquill.context.qzio.{ZioJdbcContext, ZioJdbcUnderlyingContext} import io.getquill.context.json.PostgresJsonExtensions +import io.getquill.context.qzio.{ZioJdbcContext, ZioJdbcUnderlyingContext} import io.getquill.util.LoadConfig import javax.sql.DataSource diff --git a/quill-jdbc-zio/src/main/scala/io/getquill/context/ZioJdbc.scala b/quill-jdbc-zio/src/main/scala/io/getquill/context/ZioJdbc.scala index 871e6ecbc5..657114b28d 100644 --- a/quill-jdbc-zio/src/main/scala/io/getquill/context/ZioJdbc.scala +++ b/quill-jdbc-zio/src/main/scala/io/getquill/context/ZioJdbc.scala @@ -6,6 +6,7 @@ import io.getquill.util.{ContextLogger, LoadConfig} import io.getquill.jdbczio.Quill import zio.{Scope, ZEnvironment, ZIO, ZLayer} import zio.stream.ZStream +import io.getquill.context.qzio.ImplicitSyntax.Implicit import izumi.reflect.Tag import java.io.Closeable @@ -78,7 +79,6 @@ object ZioJdbc { } implicit class QuillZioDataSourceExt[T](qzio: ZIO[DataSource, Throwable, T]) { - import io.getquill.context.qzio.ImplicitSyntax._ def implicitDS(implicit implicitEnv: Implicit[DataSource]): ZIO[Any, SQLException, T] = (for { q <- qzio.provideEnvironment(ZEnvironment(implicitEnv.env)) @@ -86,7 +86,6 @@ object ZioJdbc { } implicit class QuillZioSomeDataSourceExt[T, R](qzio: ZIO[DataSource with R, Throwable, T])(implicit tag: Tag[R]) { - import io.getquill.context.qzio.ImplicitSyntax._ def implicitSomeDS(implicit implicitEnv: Implicit[DataSource]): ZIO[R, SQLException, T] = (for { r <- ZIO.environment[R] @@ -98,8 +97,6 @@ object ZioJdbc { implicit class QuillZioExtPlain[T](qzio: ZIO[Connection, Throwable, T]) { - import io.getquill.context.qzio.ImplicitSyntax._ - def onDataSource: ZIO[DataSource, SQLException, T] = (for { q <- qzio.provideSomeLayer(Quill.Connection.acquireScoped) diff --git a/quill-jdbc-zio/src/main/scala/io/getquill/context/qzio/ZioJdbcContext.scala b/quill-jdbc-zio/src/main/scala/io/getquill/context/qzio/ZioJdbcContext.scala index 52c545b9d7..a084a44aa9 100644 --- a/quill-jdbc-zio/src/main/scala/io/getquill/context/qzio/ZioJdbcContext.scala +++ b/quill-jdbc-zio/src/main/scala/io/getquill/context/qzio/ZioJdbcContext.scala @@ -5,7 +5,6 @@ import io.getquill.context.jdbc.JdbcContextTypes import io.getquill.context.sql.idiom.SqlIdiom import io.getquill.context._ import io.getquill.jdbczio.Quill -import io.getquill.context.json.PostgresJsonExtensions import io.getquill.{NamingStrategy, ReturnAction} import zio.Exit.{Failure, Success} import zio.stream.ZStream @@ -76,7 +75,9 @@ abstract class ZioJdbcContext[+Dialect <: SqlIdiom, +Naming <: NamingStrategy] val currentConnection: FiberRef[Option[Connection]] = Unsafe.unsafe { implicit u => - Runtime.default.unsafe.run(zio.Scope.global.extend(FiberRef.make(Option.empty[java.sql.Connection]))).getOrThrow() + Runtime.default.unsafe + .run(zio.Scope.global.extend(FiberRef.make(Option.empty[java.sql.Connection]))) + .getOrThrow() } lazy val underlying: ZioJdbcUnderlyingContext[Dialect, Naming] = connDelegate diff --git a/quill-jdbc-zio/src/test/scala/io/getquill/examples/other/ZioAppImplicitEnv.scala b/quill-jdbc-zio/src/test/scala/io/getquill/examples/other/ZioAppImplicitEnv.scala index fdcd0e5a3f..58e56da29c 100644 --- a/quill-jdbc-zio/src/test/scala/io/getquill/examples/other/ZioAppImplicitEnv.scala +++ b/quill-jdbc-zio/src/test/scala/io/getquill/examples/other/ZioAppImplicitEnv.scala @@ -5,6 +5,7 @@ import io.getquill.context.qzio.ImplicitSyntax._ import io.getquill.util.LoadConfig import zio.Console.printLine import zio.ZIOAppDefault + import javax.sql.DataSource object ZioAppImplicitEnv extends ZIOAppDefault { diff --git a/quill-spark/src/main/scala/io/getquill/QuillSparkContext.scala b/quill-spark/src/main/scala/io/getquill/QuillSparkContext.scala index 71c3e1c022..396d7df08d 100644 --- a/quill-spark/src/main/scala/io/getquill/QuillSparkContext.scala +++ b/quill-spark/src/main/scala/io/getquill/QuillSparkContext.scala @@ -47,7 +47,7 @@ trait QuillSparkContext extends Context[SparkDialect, Literal] with Encoders wit val idiom = SparkDialect val naming = Literal - private implicit def datasetEncoder[T] = + private implicit def datasetEncoder[T]: (Index, Dataset[T], List[Binding], ResultRow) => List[Binding] = (idx: Int, ds: Dataset[T], row: List[Binding], session: Session) => row :+ DatasetBinding(ds) def liftQuery[T](ds: Dataset[T]) = diff --git a/quill-spark/src/main/scala/io/getquill/context/spark/Decoders.scala b/quill-spark/src/main/scala/io/getquill/context/spark/Decoders.scala index 095236a015..9217ea26ec 100644 --- a/quill-spark/src/main/scala/io/getquill/context/spark/Decoders.scala +++ b/quill-spark/src/main/scala/io/getquill/context/spark/Decoders.scala @@ -9,8 +9,8 @@ trait Decoders { type Decoder[T] = BaseDecoder[T] type ResultRow = Unit - implicit def dummyDecoder[T] = - (idx: Int, row: ResultRow, session: Session) => Messages.fail("quill decoders are not used for spark") + implicit def dummyDecoder[T]: (Index, ResultRow, ResultRow) => Nothing = + (_: Int, _: ResultRow, _: Session) => Messages.fail("quill decoders are not used for spark") implicit def mappedDecoder[I, O](implicit mapped: MappedEncoding[I, O], decoder: Decoder[I]): Decoder[O] = dummyDecoder[O] diff --git a/version.sbt b/version.sbt deleted file mode 100644 index 5529f058b7..0000000000 --- a/version.sbt +++ /dev/null @@ -1 +0,0 @@ -ThisBuild / version := "4.6.1"