From cc6de551676e8397e54c849438d3d65663918685 Mon Sep 17 00:00:00 2001 From: jules Ivanic Date: Sat, 3 Jun 2023 12:07:19 +0400 Subject: [PATCH] Drop `ndbc` support --- build.sbt | 50 +---- docs/changelog.md | 1 + .../getquill/PostgresMonixNdbcContext.scala | 22 --- .../context/monix/MonixNdbcContext.scala | 187 ------------------ .../src/test/resources/application.conf | 6 - .../test/scala/io/getquill/MonixSpec.scala | 18 -- .../monix/MonixNdbcContextEffectSpec.scala | 63 ------ .../postgres/MonixNdbcContextSpec.scala | 136 ------------- .../postgres/PeopleMonixNdbcSpec.scala | 85 -------- .../getquill/postgres/ProductNdbcSpec.scala | 84 -------- .../scala/io/getquill/postgres/package.scala | 13 -- quill-ndbc-postgres/project/build.properties | 1 - .../io/getquill/PostgresNdbcContext.scala | 17 -- .../context/ndbc/PostgresDecoders.scala | 98 --------- .../context/ndbc/PostgresEncoders.scala | 93 --------- .../ndbc/PostgresNdbcContextBase.scala | 27 --- .../src/test/resources/application.conf | 6 - .../src/test/resources/logback.xml | 16 -- .../ArrayNdbcPostgresEncodingSpec.scala | 29 --- .../CaseClassQueryNdbcPostgresSpec.scala | 49 ----- .../DepartmentsNdbcPostgresSpec.scala | 34 ---- .../postgres/NdbcPostgresEncodingSpec.scala | 107 ---------- .../postgres/OptionalNestedNdbcSpec.scala | 95 --------- .../postgres/PeopleNdbcPostgresSpec.scala | 62 ------ .../postgres/PeopleNdbcReturningSpec.scala | 73 ------- .../postgres/PostgresNdbcContextSpec.scala | 85 -------- .../postgres/ProductNdbcPostgresSpec.scala | 94 --------- .../QueryResultTypeNdbcPostgresSpec.scala | 107 ---------- .../context/ndbc/postgres/TestContext.scala | 16 -- .../context/ndbc/postgres/package.scala | 5 - .../getquill/context/ndbc/NdbcContext.scala | 115 ----------- .../context/ndbc/NdbcContextBase.scala | 170 ---------------- .../context/ndbc/NdbcContextConfig.scala | 25 --- .../getquill/ndbc/TraneFutureConverters.scala | 33 ---- .../context/ndbc/NdbcContextEffectSpec.scala | 63 ------ 35 files changed, 4 insertions(+), 2081 deletions(-) delete mode 100644 quill-ndbc-monix/src/main/scala/io/getquill/PostgresMonixNdbcContext.scala delete mode 100644 quill-ndbc-monix/src/main/scala/io/getquill/context/monix/MonixNdbcContext.scala delete mode 100644 quill-ndbc-monix/src/test/resources/application.conf delete mode 100644 quill-ndbc-monix/src/test/scala/io/getquill/MonixSpec.scala delete mode 100644 quill-ndbc-monix/src/test/scala/io/getquill/context/monix/MonixNdbcContextEffectSpec.scala delete mode 100644 quill-ndbc-monix/src/test/scala/io/getquill/postgres/MonixNdbcContextSpec.scala delete mode 100644 quill-ndbc-monix/src/test/scala/io/getquill/postgres/PeopleMonixNdbcSpec.scala delete mode 100644 quill-ndbc-monix/src/test/scala/io/getquill/postgres/ProductNdbcSpec.scala delete mode 100644 quill-ndbc-monix/src/test/scala/io/getquill/postgres/package.scala delete mode 100644 quill-ndbc-postgres/project/build.properties delete mode 100644 quill-ndbc-postgres/src/main/scala/io/getquill/PostgresNdbcContext.scala delete mode 100644 quill-ndbc-postgres/src/main/scala/io/getquill/context/ndbc/PostgresDecoders.scala delete mode 100644 quill-ndbc-postgres/src/main/scala/io/getquill/context/ndbc/PostgresEncoders.scala delete mode 100644 quill-ndbc-postgres/src/main/scala/io/getquill/context/ndbc/PostgresNdbcContextBase.scala delete mode 100644 quill-ndbc-postgres/src/test/resources/application.conf delete mode 100644 quill-ndbc-postgres/src/test/resources/logback.xml delete mode 100644 quill-ndbc-postgres/src/test/scala/io/getquill/context/ndbc/postgres/ArrayNdbcPostgresEncodingSpec.scala delete mode 100644 quill-ndbc-postgres/src/test/scala/io/getquill/context/ndbc/postgres/CaseClassQueryNdbcPostgresSpec.scala delete mode 100644 quill-ndbc-postgres/src/test/scala/io/getquill/context/ndbc/postgres/DepartmentsNdbcPostgresSpec.scala delete mode 100644 quill-ndbc-postgres/src/test/scala/io/getquill/context/ndbc/postgres/NdbcPostgresEncodingSpec.scala delete mode 100644 quill-ndbc-postgres/src/test/scala/io/getquill/context/ndbc/postgres/OptionalNestedNdbcSpec.scala delete mode 100644 quill-ndbc-postgres/src/test/scala/io/getquill/context/ndbc/postgres/PeopleNdbcPostgresSpec.scala delete mode 100644 quill-ndbc-postgres/src/test/scala/io/getquill/context/ndbc/postgres/PeopleNdbcReturningSpec.scala delete mode 100644 quill-ndbc-postgres/src/test/scala/io/getquill/context/ndbc/postgres/PostgresNdbcContextSpec.scala delete mode 100644 quill-ndbc-postgres/src/test/scala/io/getquill/context/ndbc/postgres/ProductNdbcPostgresSpec.scala delete mode 100644 quill-ndbc-postgres/src/test/scala/io/getquill/context/ndbc/postgres/QueryResultTypeNdbcPostgresSpec.scala delete mode 100644 quill-ndbc-postgres/src/test/scala/io/getquill/context/ndbc/postgres/TestContext.scala delete mode 100644 quill-ndbc-postgres/src/test/scala/io/getquill/context/ndbc/postgres/package.scala delete mode 100644 quill-ndbc/src/main/scala/io/getquill/context/ndbc/NdbcContext.scala delete mode 100644 quill-ndbc/src/main/scala/io/getquill/context/ndbc/NdbcContextBase.scala delete mode 100644 quill-ndbc/src/main/scala/io/getquill/context/ndbc/NdbcContextConfig.scala delete mode 100644 quill-ndbc/src/main/scala/io/getquill/ndbc/TraneFutureConverters.scala delete mode 100644 quill-ndbc/src/test/scala/io/getquill/context/ndbc/NdbcContextEffectSpec.scala diff --git a/build.sbt b/build.sbt index 38f041d05d..65b1de9029 100644 --- a/build.sbt +++ b/build.sbt @@ -78,12 +78,6 @@ lazy val jasyncModules = Seq[sbt.ClasspathDep[sbt.ProjectReference]]( `quill-jasync-zio-postgres` ) -lazy val asyncModules = Seq[sbt.ClasspathDep[sbt.ProjectReference]]( - `quill-ndbc`, - `quill-ndbc-postgres`, - `quill-ndbc-monix` -) ++ jasyncModules - lazy val codegenModules = Seq[sbt.ClasspathDep[sbt.ProjectReference]]( `quill-codegen`, `quill-codegen-jdbc`, @@ -100,7 +94,7 @@ lazy val bigdataModules = Seq[sbt.ClasspathDep[sbt.ProjectReference]]( ) lazy val allModules = - baseModules ++ jsModules ++ dbModules ++ asyncModules ++ codegenModules ++ bigdataModules ++ docsModules + baseModules ++ jsModules ++ dbModules ++ jasyncModules ++ codegenModules ++ bigdataModules ++ docsModules lazy val scala213Modules = baseModules ++ jsModules ++ dbModules ++ codegenModules ++ Seq[sbt.ClasspathDep[sbt.ProjectReference]]( @@ -159,13 +153,13 @@ lazy val filteredModules = { dbModules case "async" => println("SBT =:> Compiling Async Database Modules") - asyncModules + jasyncModules case "codegen" => println("SBT =:> Compiling Code Generator Modules") codegenModules case "nocodegen" => println("Compiling Not-Code Generator Modules") - baseModules ++ jsModules ++ dbModules ++ asyncModules ++ bigdataModules + baseModules ++ jsModules ++ dbModules ++ jasyncModules ++ bigdataModules case "bigdata" => println("SBT =:> Compiling Big Data Modules") bigdataModules @@ -523,18 +517,6 @@ lazy val `quill-jdbc-zio` = .dependsOn(`quill-jdbc` % "compile->compile;test->test") .enablePlugins(MimaPlugin) -lazy val `quill-ndbc-monix` = - (project in file("quill-ndbc-monix")) - .settings(commonSettings: _*) - .settings( - Test / fork := true - ) - .dependsOn(`quill-monix` % "compile->compile;test->test") - .dependsOn(`quill-sql-jvm` % "compile->compile;test->test") - .dependsOn(`quill-ndbc` % "compile->compile;test->test") - .dependsOn(`quill-ndbc-postgres` % "compile->compile;test->test") - .enablePlugins(MimaPlugin) - lazy val `quill-spark` = (project in file("quill-spark")) .settings(commonNoLogSettings: _*) @@ -611,32 +593,6 @@ lazy val `quill-jasync-zio-postgres` = .dependsOn(`quill-jasync-zio` % "compile->compile;test->test") .enablePlugins(MimaPlugin) -lazy val `quill-ndbc` = - (project in file("quill-ndbc")) - .settings(commonSettings: _*) - .settings( - Test / fork := true, - libraryDependencies ++= Seq( - "io.trane" % "future-scala" % "0.3.2", - "io.trane" % "ndbc-core" % "0.1.3" - ) - ) - .dependsOn(`quill-sql-jvm` % "compile->compile;test->test") - .enablePlugins(MimaPlugin) - -lazy val `quill-ndbc-postgres` = - (project in file("quill-ndbc-postgres")) - .settings(commonSettings: _*) - .settings( - Test / fork := true, - libraryDependencies ++= Seq( - "io.trane" % "future-scala" % "0.3.2", - "io.trane" % "ndbc-postgres-netty4" % "0.1.3" - ) - ) - .dependsOn(`quill-ndbc` % "compile->compile;test->test") - .enablePlugins(MimaPlugin) - lazy val `quill-cassandra` = (project in file("quill-cassandra")) .settings(commonSettings: _*) diff --git a/docs/changelog.md b/docs/changelog.md index ab4c827ee4..1b0b76d7f5 100644 --- a/docs/changelog.md +++ b/docs/changelog.md @@ -7,6 +7,7 @@ title: "Quill Changelog" - [Update Scala 2.12 && Drop `quill-finagle-mysql` and `quill-finagle-postgres` modules](https://github.com/zio/zio-quill/pull/2756) - [Update Scala3 version to `3.3.0`](https://github.com/zio/zio-quill/pull/2759) +- [Drop `ndbc` support](https://github.com/zio/zio-quill/pull/2760) # 4.6.1 diff --git a/quill-ndbc-monix/src/main/scala/io/getquill/PostgresMonixNdbcContext.scala b/quill-ndbc-monix/src/main/scala/io/getquill/PostgresMonixNdbcContext.scala deleted file mode 100644 index 5fd9296e8e..0000000000 --- a/quill-ndbc-monix/src/main/scala/io/getquill/PostgresMonixNdbcContext.scala +++ /dev/null @@ -1,22 +0,0 @@ -package io.getquill - -import com.typesafe.config.Config -import io.getquill.context.monix.MonixNdbcContext -import io.getquill.context.monix.MonixNdbcContext.Runner -import io.getquill.context.ndbc.{NdbcContextConfig, PostgresNdbcContextBase} -import io.getquill.util.LoadConfig -import io.trane.ndbc.{DataSource, PostgresDataSource, PostgresPreparedStatement, PostgresRow} - -class PostgresMonixNdbcContext[+N <: NamingStrategy]( - val naming: N, - val dataSource: DataSource[PostgresPreparedStatement, PostgresRow], - runner: Runner -) extends MonixNdbcContext[PostgresDialect, N, PostgresPreparedStatement, PostgresRow](dataSource, runner) - with PostgresNdbcContextBase[N] { - - def this(naming: N, config: NdbcContextConfig, runner: Runner) = - this(naming, PostgresDataSource.create(config.dataSource), runner) - def this(naming: N, config: Config, runner: Runner) = this(naming, NdbcContextConfig(config), runner) - def this(naming: N, configPrefix: String, runner: Runner) = this(naming, LoadConfig(configPrefix), runner) - def this(naming: N, configPrefix: String) = this(naming, LoadConfig(configPrefix), Runner.default) -} diff --git a/quill-ndbc-monix/src/main/scala/io/getquill/context/monix/MonixNdbcContext.scala b/quill-ndbc-monix/src/main/scala/io/getquill/context/monix/MonixNdbcContext.scala deleted file mode 100644 index 9e2e55c9e3..0000000000 --- a/quill-ndbc-monix/src/main/scala/io/getquill/context/monix/MonixNdbcContext.scala +++ /dev/null @@ -1,187 +0,0 @@ -package io.getquill.context.monix - -import java.sql.{Array => _} -import io.getquill.context.{ExecutionInfo, ContextVerbStream} -import io.getquill.context.monix.MonixNdbcContext.Runner -import io.getquill.context.ndbc.NdbcContextBase -import io.getquill.context.sql.idiom.SqlIdiom -import io.getquill.ndbc.TraneFutureConverters -import io.getquill.ndbc.TraneFutureConverters._ -import io.getquill.util.ContextLogger -import io.getquill.{NamingStrategy, ReturnAction} -import io.trane.future.scala.Future -import io.trane.ndbc.{DataSource, PreparedStatement, Row} -import monix.eval.Task -import monix.execution.Scheduler -import monix.reactive.Observable - -import scala.concurrent.Promise -import scala.concurrent.duration.Duration - -object MonixNdbcContext { - trait Runner { - - /** - * The schedule method can be used to change the scheduler that this task - * should be run on - */ - def schedule[T](t: Task[T]): Task[T] - - /** - * The schedule method can be used to change the scheduler that this - * observable should be observed on - */ - def schedule[T](o: Observable[T]): Observable[T] - } - - object Runner { - def default = new Runner { - override def schedule[T](t: Task[T]): Task[T] = t - override def schedule[T](o: Observable[T]): Observable[T] = o - } - - def using(scheduler: Scheduler) = new Runner { - override def schedule[T](t: Task[T]): Task[T] = t.executeOn(scheduler, forceAsync = true) - - override def schedule[T](o: Observable[T]): Observable[T] = o.executeOn(scheduler, forceAsync = true) - } - } - - object ContextEffect extends NdbcContextBase.ContextEffect[Task, Scheduler] { - override def wrapAsync[T](f: (Complete[T]) => Unit): Task[T] = Task.deferFuture { - val p = Promise[T]() - f { complete => - p.complete(complete) - () - } - p.future - } - - override def toFuture[T](eff: Task[T], ec: Scheduler): Future[T] = - TraneFutureConverters.scalaToTraneScala(eff.runToFuture(ec))(ec) - - override def fromDeferredFuture[T](f: Scheduler => Future[T]): Task[T] = Task.deferFutureAction(f(_)) - - override def flatMap[A, B](a: Task[A])(f: A => Task[B]): Task[B] = a.flatMap(f) - - override def runBlocking[T](eff: Task[T], timeout: Duration): T = { - import monix.execution.Scheduler.Implicits.global - eff.runSyncUnsafe(timeout) - } - - override def wrap[T](t: => T): Task[T] = Task.apply(t) - - override def push[A, B](fa: Task[A])(f: A => B): Task[B] = fa.map(f) - - override def seq[T](f: List[Task[T]]): Task[List[T]] = Task.sequence(f) - } -} - -/** - * Quill context that wraps all NDBC calls in `monix.eval.Task`. - */ -abstract class MonixNdbcContext[+Dialect <: SqlIdiom, +Naming <: NamingStrategy, P <: PreparedStatement, R <: Row]( - dataSource: DataSource[P, R], - runner: Runner -) extends MonixContext[Dialect, Naming] - with NdbcContextBase[Dialect, Naming, P, R] - with ContextVerbStream[Dialect, Naming] - with MonixTranslateContext { - - import runner._ - - override private[getquill] val logger = ContextLogger(classOf[MonixNdbcContext[_, _, _, _]]) - - override type RunActionResult = Long - override type RunActionReturningResult[T] = T - override type RunBatchActionResult = List[Long] - override type RunBatchActionReturningResult[T] = List[T] - - override implicit protected val resultEffect: NdbcContextBase.ContextEffect[Task, Scheduler] = - MonixNdbcContext.ContextEffect - - def close(): Unit = { - dataSource.close() - () - } - - // Need explicit return-type annotations due to scala/bug#8356. Otherwise macro system will not understand Result[Long]=Task[Long] etc... - override def executeAction(sql: String, prepare: Prepare = identityPrepare)( - info: ExecutionInfo, - dc: Runner - ): Task[Long] = - super.executeAction(sql, prepare)(info, dc) - - override def executeQuery[T]( - sql: String, - prepare: Prepare = identityPrepare, - extractor: Extractor[T] = identityExtractor - )(info: ExecutionInfo, dc: Runner): Task[List[T]] = - super.executeQuery(sql, prepare, extractor)(info, dc) - - override def executeQuerySingle[T]( - sql: String, - prepare: Prepare = identityPrepare, - extractor: Extractor[T] = identityExtractor - )(info: ExecutionInfo, dc: Runner): Task[T] = - super.executeQuerySingle(sql, prepare, extractor)(info, dc) - - override def executeActionReturning[O]( - sql: String, - prepare: Prepare = identityPrepare, - extractor: Extractor[O], - returningBehavior: ReturnAction - )(info: ExecutionInfo, dc: Runner): Task[O] = - super.executeActionReturning(sql, prepare, extractor, returningBehavior)(info, dc) - - override def executeActionReturningMany[O]( - sql: String, - prepare: Prepare = identityPrepare, - extractor: Extractor[O], - returningBehavior: ReturnAction - )(info: ExecutionInfo, dc: Runner): Task[List[O]] = - super.executeActionReturningMany(sql, prepare, extractor, returningBehavior)(info, dc) - - override def executeBatchAction(groups: List[BatchGroup])(info: ExecutionInfo, dc: Runner): Task[List[Long]] = - super.executeBatchAction(groups)(info, dc) - - override def executeBatchActionReturning[T](groups: List[BatchGroupReturning], extractor: Extractor[T])( - info: ExecutionInfo, - dc: Runner - ): Task[List[T]] = - super.executeBatchActionReturning(groups, extractor)(info, dc) - - override def transaction[T](f: => Task[T]): Task[T] = super.transaction(f) - - override protected def withDataSource[T](f: DataSource[P, R] => Task[T]): Task[T] = - schedule(f(dataSource)) - - protected def withDataSourceObservable[T](f: DataSource[P, R] => Observable[T]): Observable[T] = - schedule(f(dataSource)) - - // TODO: What about fetchSize? Not really applicable here - def streamQuery[T]( - fetchSize: Option[Index], - sql: String, - prepare: Prepare = identityPrepare, - extractor: Extractor[T] = identityExtractor - )(info: ExecutionInfo, dc: Runner): Observable[T] = - Observable.eval { - // TODO: Do we need to set ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY? - val stmt = createPreparedStatement(sql) - val (params, ps) = prepare(stmt, ()) - logger.logQuery(sql, params) - ps - } - .flatMap(ps => - withDataSourceObservable { ds => - Observable.fromReactivePublisher(ds.stream(ps)) - } - ) - .map(row => extractor(row, ())) - - override private[getquill] def prepareParams(statement: String, prepare: Prepare): Task[Seq[String]] = - withDataSource { _ => - resultEffect.wrap(prepare(createPreparedStatement(statement), ())._1.reverse.map(prepareParam)) - } -} diff --git a/quill-ndbc-monix/src/test/resources/application.conf b/quill-ndbc-monix/src/test/resources/application.conf deleted file mode 100644 index f56c384e41..0000000000 --- a/quill-ndbc-monix/src/test/resources/application.conf +++ /dev/null @@ -1,6 +0,0 @@ -testPostgresDB.ndbc.dataSourceSupplierClass=io.trane.ndbc.postgres.netty4.DataSourceSupplier -testPostgresDB.ndbc.host=${?POSTGRES_HOST} -testPostgresDB.ndbc.port=${?POSTGRES_PORT} -testPostgresDB.ndbc.user=postgres -testPostgresDB.ndbc.password=postgres -testPostgresDB.ndbc.database=quill_test diff --git a/quill-ndbc-monix/src/test/scala/io/getquill/MonixSpec.scala b/quill-ndbc-monix/src/test/scala/io/getquill/MonixSpec.scala deleted file mode 100644 index a9d36a09b7..0000000000 --- a/quill-ndbc-monix/src/test/scala/io/getquill/MonixSpec.scala +++ /dev/null @@ -1,18 +0,0 @@ -package io.getquill - -import io.getquill.base.Spec -import io.getquill.context.monix.MonixNdbcContext -import monix.execution.Scheduler -import monix.reactive.Observable - -trait MonixSpec extends Spec { - implicit val scheduler = Scheduler.global - - val context: MonixNdbcContext[_, _, _, _] with TestEntities - - def accumulate[T](o: Observable[T]) = - o.foldLeft(List[T]()) { case (l, elem) => elem +: l }.firstL - - def collect[T](o: Observable[T]) = - accumulate(o).runSyncUnsafe() -} diff --git a/quill-ndbc-monix/src/test/scala/io/getquill/context/monix/MonixNdbcContextEffectSpec.scala b/quill-ndbc-monix/src/test/scala/io/getquill/context/monix/MonixNdbcContextEffectSpec.scala deleted file mode 100644 index e865520943..0000000000 --- a/quill-ndbc-monix/src/test/scala/io/getquill/context/monix/MonixNdbcContextEffectSpec.scala +++ /dev/null @@ -1,63 +0,0 @@ -package io.getquill.context.monix - -import io.getquill.base.Spec -import scala.concurrent.duration.Duration -import io.trane.future.scala.{Await, Future} -import io.getquill.ndbc.TraneFutureConverters._ -import monix.eval.Task -import monix.execution.Scheduler.Implicits.global - -import scala.util.Try - -class MonixNdbcContextEffectSpec extends Spec { - import MonixNdbcContext.ContextEffect._ - - def await[T](t: Task[T]): T = { - val f: Future[T] = t.runToFuture - Await.result[T](f, Duration.Inf) - } - - "evaluates simple values" in { - val task = wrap("He-man") - await(task) mustEqual "He-man" - } - - "evaluates asynchronous values" in { - await(wrapAsync[String] { doComplete => - Future(Thread.sleep(100)).onComplete { _ => - doComplete(Try("hello")) - () - } - }) mustEqual "hello" - } - - "encapsulates exception throw" in { - val task = wrap(throw new RuntimeException("Surprise!")).failed - await(task).getMessage mustEqual "Surprise!" - } - - "pushes an effect correctly" in { - await(push(Task(1))(_ + 1)) mustEqual 2 - } - - "executes effects in sequence" in { - await(wrap(2).flatMap(prev => wrap(prev + 3))) mustEqual 5 - } - - "converts a sequence correctly" in { - await(seq(List(Task(1), Task(2), Task(3)))) mustEqual List(1, 2, 3) - } - - "converts to Scala Future correctly" in { - Await.result(toFuture(Task("hello"), monix.execution.Scheduler.Implicits.global), Duration.Inf) mustEqual "hello" - } - - "creates Future from deferred Future" in { - val f = fromDeferredFuture((_) => Future.successful("hello")) - await(f) mustEqual "hello" - } - - "runs blockingly" in { - runBlocking(Task { Thread.sleep(100); "foo" }, Duration.Inf) mustEqual "foo" - } -} diff --git a/quill-ndbc-monix/src/test/scala/io/getquill/postgres/MonixNdbcContextSpec.scala b/quill-ndbc-monix/src/test/scala/io/getquill/postgres/MonixNdbcContextSpec.scala deleted file mode 100644 index b30da88cfd..0000000000 --- a/quill-ndbc-monix/src/test/scala/io/getquill/postgres/MonixNdbcContextSpec.scala +++ /dev/null @@ -1,136 +0,0 @@ -package io.getquill.postgres - -import io.getquill.MonixSpec -import monix.eval.Task - -import scala.concurrent.duration._ -import scala.language.postfixOps - -class MonixNdbcContextSpec extends MonixSpec { - - val context = testContext - import testContext._ - - "without transaction" - { - "does simple insert" in { - (for { - _ <- testContext.run(qr1.delete) - _ <- testContext.run(qr1.insert(_.i -> 33)) - r <- testContext.run(qr1) - } yield r).runSyncUnsafe(10 seconds).map(_.i) mustEqual List(33) - } - - "streams empty result set" in { - (for { - _ <- testContext.run(qr1.delete) - s <- accumulate(testContext.stream(qr1)) - } yield s).runSyncUnsafe(10 seconds).map(_.i) mustEqual List() - } - - "streams single result" in { - (for { - _ <- testContext.run(qr1.delete) - _ <- testContext.run(qr1.insert(_.i -> 666)) - s <- accumulate(testContext.stream(qr1)) - } yield s).runSyncUnsafe(10 seconds).map(_.i) mustEqual List(666) - } - - "streams multiple results" in { - (for { - _ <- testContext.run(qr1.delete) - _ <- testContext.run(liftQuery(List(1, 2, 3, 4)).foreach(n => qr1.insert(_.i -> n))) - s <- accumulate(testContext.stream(qr1)) - } yield s).runSyncUnsafe(10 seconds).map(_.i) mustEqual List(4, 3, 2, 1) - // Caution: NDBC streams result in reverse order compared to JDBC. But users can not expect a specific order anyway. - } - } - - "with transaction" - { - "does simple insert" in { - (for { - _ <- testContext.run(qr1.delete) - _ <- testContext.transaction { - testContext.run(qr1.insert(_.i -> 33)) - } - r <- testContext.run(qr1) - } yield r).runSyncUnsafe(10 seconds).map(_.i) mustEqual List(33) - } - - /* Ignore because apparently NDBC streaming doesn't work with transactions, as these tests - show. https://github.com/traneio/ndbc/blob/1f37baf4815a90299842afeb3c710a80b86ef9d6/ndbc-core/src/main/java/io/trane/ndbc/datasource/PooledDataSource.java#L49 - */ - "streams empty result set" ignore { - (for { - _ <- testContext.run(qr1.delete) - s <- testContext.transaction(accumulate(testContext.stream(qr1))) - } yield s).runSyncUnsafe(10 seconds).map(_.i) mustEqual List() - } - - /* Ignore because apparently NDBC streaming doesn't work with transactions, as these tests - show. https://github.com/traneio/ndbc/blob/1f37baf4815a90299842afeb3c710a80b86ef9d6/ndbc-core/src/main/java/io/trane/ndbc/datasource/PooledDataSource.java#L49 - */ - "streams single result" ignore { - (for { - _ <- testContext.run(qr1.delete) - s <- testContext.transaction(for { - _ <- testContext.run(qr1.insert(_.i -> 33)) - s <- accumulate(testContext.stream(qr1)) - } yield s) - } yield s).runSyncUnsafe(10 seconds).map(_.i) mustEqual List(33) - } - - /* Ignore because apparently NDBC streaming doesn't work with transactions, as these tests - show. https://github.com/traneio/ndbc/blob/1f37baf4815a90299842afeb3c710a80b86ef9d6/ndbc-core/src/main/java/io/trane/ndbc/datasource/PooledDataSource.java#L49 - */ - "streams multiple results" ignore { - (for { - _ <- testContext.run(qr1.delete) - s <- testContext.transaction(for { - _ <- testContext.run(liftQuery(List(1, 2, 3, 4)).foreach(n => qr1.insert(_.i -> n))) - s <- accumulate(testContext.stream(qr1)) - } yield s) - } yield s).runSyncUnsafe(10 seconds).map(_.i) mustEqual List(4, 3, 2, 1) - // Caution: NDBC streams result in reverse order compared to JDBC. But users can not expect a specific order anyway. - } - - "reverts when failed" in { - (for { - _ <- testContext.run(qr1.delete) - e <- testContext.transaction { - Task.sequence( - Seq( - testContext.run(qr1.insert(_.i -> 18)), - Task.eval { - throw new IllegalStateException - } - ) - ) - }.onErrorHandleWith { case e: Exception => - Task(e.getClass.getSimpleName) - } - r <- testContext.run(qr1) - } yield (e, r.isEmpty)).runSyncUnsafe(10 seconds) mustEqual (("IllegalStateException", true)) - } - - "supports nested transactions" in { - (for { - _ <- testContext.run(qr1.delete) - _ <- testContext.transaction { - testContext.transaction { - testContext.run(qr1.insert(_.i -> 33)) - } - } - r <- testContext.run(qr1) - } yield r).runSyncUnsafe(10 seconds).map(_.i) mustEqual List(33) - } - - "prepare" in { - testContext - .prepareParams( - "select * from Person where name=? and age > ?", - (ps, session) => (List("Sarah", 127), ps) - ) - .runSyncUnsafe() mustEqual List("127", "'Sarah'") - } - } -} diff --git a/quill-ndbc-monix/src/test/scala/io/getquill/postgres/PeopleMonixNdbcSpec.scala b/quill-ndbc-monix/src/test/scala/io/getquill/postgres/PeopleMonixNdbcSpec.scala deleted file mode 100644 index 8fe30d91c8..0000000000 --- a/quill-ndbc-monix/src/test/scala/io/getquill/postgres/PeopleMonixNdbcSpec.scala +++ /dev/null @@ -1,85 +0,0 @@ -package io.getquill.postgres - -import io.getquill.context.sql.base.PeopleSpec -import monix.execution.Scheduler -import monix.reactive.Observable -import org.scalatest.matchers.should.Matchers._ - -class PeopleMonixNdbcSpec extends PeopleSpec { - - implicit val scheduler = Scheduler.global - - val context = testContext - import context._ - - def collect[T](o: Observable[T]) = - o.foldLeft(List[T]()) { case (l, elem) => elem +: l } - .firstL - .runSyncUnsafe() - - override def beforeAll = - testContext.transaction { - for { - _ <- testContext.run(query[Couple].delete) - _ <- testContext.run(query[Person].delete) - _ <- testContext.run(liftQuery(peopleEntries).foreach(p => peopleInsert(p))) - _ <- testContext.run(liftQuery(couplesEntries).foreach(p => couplesInsert(p))) - } yield () - }.runSyncUnsafe() - - val `Ex 11 query` = quote(query[Person]) - val `Ex 11 expected` = peopleEntries - - "Example 1 - differences" in { - testContext.run(`Ex 1 differences`).runSyncUnsafe() should contain theSameElementsAs `Ex 1 expected result` - } - - "Example 2 - range simple" in { - testContext - .run(`Ex 2 rangeSimple`(lift(`Ex 2 param 1`), lift(`Ex 2 param 2`))) - .runSyncUnsafe() should contain theSameElementsAs `Ex 2 expected result` - } - - "Example 3 - satisfies" in { - testContext.run(`Ex 3 satisfies`).runSyncUnsafe() should contain theSameElementsAs `Ex 3 expected result` - } - - "Example 4 - satisfies" in { - testContext.run(`Ex 4 satisfies`).runSyncUnsafe() should contain theSameElementsAs `Ex 4 expected result` - } - - "Example 5 - compose" in { - testContext - .run(`Ex 5 compose`(lift(`Ex 5 param 1`), lift(`Ex 5 param 2`))) - .runSyncUnsafe() mustEqual `Ex 5 expected result` - } - - "Example 6 - predicate 0" in { - testContext.run(satisfies(eval(`Ex 6 predicate`))).runSyncUnsafe() mustEqual `Ex 6 expected result` - } - - "Example 7 - predicate 1" in { - testContext.run(satisfies(eval(`Ex 7 predicate`))).runSyncUnsafe() mustEqual `Ex 7 expected result` - } - - "Example 8 - contains empty" in { - testContext.run(`Ex 8 and 9 contains`(liftQuery(`Ex 8 param`))).runSyncUnsafe() mustEqual `Ex 8 expected result` - } - - "Example 9 - contains non empty" in { - testContext.run(`Ex 8 and 9 contains`(liftQuery(`Ex 9 param`))).runSyncUnsafe() mustEqual `Ex 9 expected result` - } - - "Example 10 - pagination" in { - testContext.run(`Ex 10 page 1 query`).runSyncUnsafe() mustEqual `Ex 10 page 1 expected` - testContext.run(`Ex 10 page 2 query`).runSyncUnsafe() mustEqual `Ex 10 page 2 expected` - } - - "Example 11 - streaming" in { - collect(testContext.stream(`Ex 11 query`)) should contain theSameElementsAs `Ex 11 expected` - } - - "Example 12 - probe" in { - probe("select 1").toOption mustBe defined - } -} diff --git a/quill-ndbc-monix/src/test/scala/io/getquill/postgres/ProductNdbcSpec.scala b/quill-ndbc-monix/src/test/scala/io/getquill/postgres/ProductNdbcSpec.scala deleted file mode 100644 index d625e1fccc..0000000000 --- a/quill-ndbc-monix/src/test/scala/io/getquill/postgres/ProductNdbcSpec.scala +++ /dev/null @@ -1,84 +0,0 @@ -package io.getquill.postgres - -import io.getquill.context.sql.ProductSpec -import monix.execution.Scheduler - -class ProductNdbcSpec extends ProductSpec { - - val context = testContext - import testContext._ - - implicit val scheduler = Scheduler.global - - override def beforeAll = { - testContext.run(quote(query[Product].delete)).runSyncUnsafe() - () - } - - "Product" - { - "Insert multiple products" in { - val (inserted, product) = - (for { - i <- testContext.run(liftQuery(productEntries).foreach(e => productInsert(e))) - ps <- testContext.run(productById(lift(i(2)))) - } yield (i, ps.head)).runSyncUnsafe() - - product.description mustEqual productEntries(2).description - product.id mustEqual inserted(2) - } - - "Single insert product" in { - val (inserted, product) = - (for { - i <- testContext.run(productSingleInsert) - ps <- testContext.run(productById(lift(i))) - } yield (i, ps.head)).runSyncUnsafe() - product.description mustEqual "Window" - product.id mustEqual inserted - } - - "Single insert with inlined free variable" in { - val prd = Product(0L, "test1", 1L) - val (inserted, returnedProduct) = - (for { - i <- testContext.run { - product.insert(_.sku -> lift(prd.sku), _.description -> lift(prd.description)).returning(_.id) - } - rps <- testContext.run(productById(lift(i))) - } yield (i, rps.head)).runSyncUnsafe() - - returnedProduct.description mustEqual "test1" - returnedProduct.sku mustEqual 1L - returnedProduct.id mustEqual inserted - } - - "Single insert with free variable and explicit quotation" in { - val prd = Product(0L, "test2", 2L) - val q1 = quote { - product.insert(_.sku -> lift(prd.sku), _.description -> lift(prd.description)).returning(_.id) - } - val (inserted, returnedProduct) = - (for { - i <- testContext.run(q1) - rps <- testContext.run(productById(lift(i))) - } yield (i, rps.head)).runSyncUnsafe() - - returnedProduct.description mustEqual "test2" - returnedProduct.sku mustEqual 2L - returnedProduct.id mustEqual inserted - } - - "Single product insert with a method quotation" in { - val prd = Product(0L, "test3", 3L) - val (inserted, returnedProduct) = - (for { - i <- testContext.run(productInsert(lift(prd))) - rps <- testContext.run(productById(lift(i))) - } yield (i, rps.head)).runSyncUnsafe() - - returnedProduct.description mustEqual "test3" - returnedProduct.sku mustEqual 3L - returnedProduct.id mustEqual inserted - } - } -} diff --git a/quill-ndbc-monix/src/test/scala/io/getquill/postgres/package.scala b/quill-ndbc-monix/src/test/scala/io/getquill/postgres/package.scala deleted file mode 100644 index cbce2f7fa8..0000000000 --- a/quill-ndbc-monix/src/test/scala/io/getquill/postgres/package.scala +++ /dev/null @@ -1,13 +0,0 @@ -package io.getquill - -import io.getquill.context.sql.{TestDecoders, TestEncoders} -import monix.execution.Scheduler - -package object postgres { - private implicit val scheduler = Scheduler.global - object testContext - extends PostgresMonixNdbcContext(Literal, "testPostgresDB") - with TestEntities - with TestEncoders - with TestDecoders -} diff --git a/quill-ndbc-postgres/project/build.properties b/quill-ndbc-postgres/project/build.properties deleted file mode 100644 index c0bab04941..0000000000 --- a/quill-ndbc-postgres/project/build.properties +++ /dev/null @@ -1 +0,0 @@ -sbt.version=1.2.8 diff --git a/quill-ndbc-postgres/src/main/scala/io/getquill/PostgresNdbcContext.scala b/quill-ndbc-postgres/src/main/scala/io/getquill/PostgresNdbcContext.scala deleted file mode 100644 index 6d1b866bdf..0000000000 --- a/quill-ndbc-postgres/src/main/scala/io/getquill/PostgresNdbcContext.scala +++ /dev/null @@ -1,17 +0,0 @@ -package io.getquill - -import com.typesafe.config.Config -import io.getquill.context.ndbc._ -import io.getquill.util.LoadConfig -import io.trane.ndbc._ - -class PostgresNdbcContext[+N <: NamingStrategy]( - naming: N, - dataSource: DataSource[PostgresPreparedStatement, PostgresRow] -) extends NdbcContext[PostgresDialect, N, PostgresPreparedStatement, PostgresRow](PostgresDialect, naming, dataSource) - with PostgresNdbcContextBase[N] { - - def this(naming: N, config: NdbcContextConfig) = this(naming, PostgresDataSource.create(config.dataSource)) - def this(naming: N, config: Config) = this(naming, NdbcContextConfig(config)) - def this(naming: N, configPrefix: String) = this(naming, LoadConfig(configPrefix)) -} diff --git a/quill-ndbc-postgres/src/main/scala/io/getquill/context/ndbc/PostgresDecoders.scala b/quill-ndbc-postgres/src/main/scala/io/getquill/context/ndbc/PostgresDecoders.scala deleted file mode 100644 index 188c002d11..0000000000 --- a/quill-ndbc-postgres/src/main/scala/io/getquill/context/ndbc/PostgresDecoders.scala +++ /dev/null @@ -1,98 +0,0 @@ -package io.getquill.context.ndbc - -import io.getquill.context.sql.encoding.ArrayEncoding -import io.trane.ndbc.PostgresRow -import io.trane.ndbc.value.Value - -import java.time._ -import java.util.{Date, UUID} -import scala.collection.compat._ -import scala.language.implicitConversions -import scala.math.BigDecimal.javaBigDecimal2bigDecimal - -class Default[+T](val default: T) - -object Default { - implicit def defaultNull[T <: AnyRef]: Default[T] = new Default[T](null.asInstanceOf[T]) - implicit def defaultNumeric[T <: Numeric[_]](n: T) = new Default[T](0.asInstanceOf[T]) - implicit object DefaultBoolean extends Default[Boolean](false) - - def value[A](implicit value: Default[A]): A = value.default -} - -trait PostgresDecoders { - this: NdbcContextBase[_, _, _, PostgresRow] with ArrayEncoding => - - type Decoder[T] = BaseDecoder[T] - - protected val zoneOffset: ZoneOffset - - def decoder[T, U](f: PostgresRow => Int => T)(implicit map: T => U): Decoder[U] = - (index, row, session) => - row.column(index) match { - case Value.NULL => Default.value[U] - case _ => map(f(row)(index)) - } - - def arrayDecoder[T, U, Col <: Seq[U]]( - f: PostgresRow => Int => Array[T] - )(implicit map: T => U, bf: CBF[U, Col]): Decoder[Col] = - (index, row, session) => { - f(row)(index) - .foldLeft(bf.newBuilder) { case (b, v) => - b += map(v) - } - .result() - } - - implicit def mappedDecoder[I, O](implicit mapped: MappedEncoding[I, O], d: Decoder[I]): Decoder[O] = - mappedBaseDecoder(mapped, d) - - implicit def optionDecoder[T](implicit d: Decoder[T]): Decoder[Option[T]] = - (idx, row, session) => - row.column(idx) match { - case Value.NULL => None - case value => Option(d(idx, row, session)) - } - - private implicit def toDate(v: LocalDateTime): Date = Date.from(v.toInstant(zoneOffset)) - - implicit val uuidDecoder: Decoder[UUID] = decoder(_.getUUID) - implicit val stringDecoder: Decoder[String] = decoder(_.getString) - implicit val bigDecimalDecoder: Decoder[BigDecimal] = decoder(_.getBigDecimal) - implicit val booleanDecoder: Decoder[Boolean] = decoder(_.getBoolean) - implicit val byteDecoder: Decoder[Byte] = decoder(_.getByte) - implicit val shortDecoder: Decoder[Short] = decoder(_.getShort) - implicit val intDecoder: Decoder[Int] = decoder(_.getInteger) - implicit val longDecoder: Decoder[Long] = decoder(_.getLong) - implicit val floatDecoder: Decoder[Float] = decoder(_.getFloat) - implicit val doubleDecoder: Decoder[Double] = decoder(_.getDouble) - implicit val byteArrayDecoder: Decoder[Array[Byte]] = decoder(_.getByteArray) - implicit val dateDecoder: Decoder[Date] = decoder(_.getLocalDateTime) - implicit val localDateDecoder: Decoder[LocalDate] = decoder(_.getLocalDate) - implicit val localDateTimeDecoder: Decoder[LocalDateTime] = decoder(_.getLocalDateTime) - implicit val offsetTimeDecoder: Decoder[OffsetTime] = decoder(_.getOffsetTime) - - implicit def arrayStringDecoder[Col <: Seq[String]](implicit bf: CBF[String, Col]): Decoder[Col] = - arrayDecoder[String, String, Col](_.getStringArray) - implicit def arrayBigDecimalDecoder[Col <: Seq[BigDecimal]](implicit bf: CBF[BigDecimal, Col]): Decoder[Col] = - arrayDecoder[java.math.BigDecimal, BigDecimal, Col](_.getBigDecimalArray) - implicit def arrayBooleanDecoder[Col <: Seq[Boolean]](implicit bf: CBF[Boolean, Col]): Decoder[Col] = - arrayDecoder[java.lang.Boolean, Boolean, Col](_.getBooleanArray) - implicit def arrayByteDecoder[Col <: Seq[Byte]](implicit bf: CBF[Byte, Col]): Decoder[Col] = - arrayDecoder[Byte, Byte, Col](_.getByteArray) - implicit def arrayShortDecoder[Col <: Seq[Short]](implicit bf: CBF[Short, Col]): Decoder[Col] = - arrayDecoder[java.lang.Short, Short, Col](_.getShortArray) - implicit def arrayIntDecoder[Col <: Seq[Int]](implicit bf: CBF[Int, Col]): Decoder[Col] = - arrayDecoder[java.lang.Integer, Int, Col](_.getIntegerArray) - implicit def arrayLongDecoder[Col <: Seq[Long]](implicit bf: CBF[Long, Col]): Decoder[Col] = - arrayDecoder[java.lang.Long, Long, Col](_.getLongArray) - implicit def arrayFloatDecoder[Col <: Seq[Float]](implicit bf: CBF[Float, Col]): Decoder[Col] = - arrayDecoder[java.lang.Float, Float, Col](_.getFloatArray) - implicit def arrayDoubleDecoder[Col <: Seq[Double]](implicit bf: CBF[Double, Col]): Decoder[Col] = - arrayDecoder[java.lang.Double, Double, Col](_.getDoubleArray) - implicit def arrayDateDecoder[Col <: Seq[Date]](implicit bf: CBF[Date, Col]): Decoder[Col] = - arrayDecoder[LocalDateTime, Date, Col](_.getLocalDateTimeArray) - implicit def arrayLocalDateDecoder[Col <: Seq[LocalDate]](implicit bf: CBF[LocalDate, Col]): Decoder[Col] = - arrayDecoder[LocalDate, LocalDate, Col](_.getLocalDateArray) -} diff --git a/quill-ndbc-postgres/src/main/scala/io/getquill/context/ndbc/PostgresEncoders.scala b/quill-ndbc-postgres/src/main/scala/io/getquill/context/ndbc/PostgresEncoders.scala deleted file mode 100644 index 3011215c2d..0000000000 --- a/quill-ndbc-postgres/src/main/scala/io/getquill/context/ndbc/PostgresEncoders.scala +++ /dev/null @@ -1,93 +0,0 @@ -package io.getquill.context.ndbc - -import java.time.{LocalDate, LocalDateTime, ZoneOffset} -import java.util.{Date, UUID} - -import io.getquill.dsl.CoreDsl -import io.trane.ndbc.PostgresPreparedStatement - -import scala.language.implicitConversions -import scala.reflect.ClassTag - -trait LowPriorityPostgresImplicits { - this: CoreDsl => - - implicit def mappedEncoder[I, O](implicit mapped: MappedEncoding[I, O], e: BaseEncoder[O]): BaseEncoder[I] = - mappedBaseEncoder(mapped, e) -} - -trait PostgresEncoders extends LowPriorityPostgresImplicits with io.getquill.dsl.LowPriorityImplicits { - this: NdbcContextBase[_, _, PostgresPreparedStatement, _] => - - type Encoder[T] = BaseEncoder[T] - - protected val zoneOffset: ZoneOffset - - def encoder[T, U]( - f: PostgresPreparedStatement => (Int, U) => PostgresPreparedStatement - )(implicit ev: T => U): Encoder[T] = - (idx, v, ps, session) => - if (v == null) ps.setNull(idx) - else f(ps)(idx, v) - - def arrayEncoder[T, U: ClassTag, Col <: Seq[T]]( - f: PostgresPreparedStatement => (Int, Array[U]) => PostgresPreparedStatement - )(ev: T => U): Encoder[Col] = - (idx, v, ps, session) => - if (v == null) ps.setNull(idx) - else f(ps)(idx, v.map(ev).toArray[U]) - - implicit override def anyValMappedEncoder[I <: AnyVal, O](implicit - mapped: MappedEncoding[I, O], - encoder: Encoder[O] - ): Encoder[I] = mappedEncoder - - implicit def optionEncoder[T](implicit e: Encoder[T]): Encoder[Option[T]] = - (idx, v, ps, session) => - if (v == null) ps.setNull(idx) - else - v match { - case None => ps.setNull(idx) - case Some(v) => e(idx, v, ps, session) - } - - implicit def toLocalDateTime(d: Date) = LocalDateTime.ofInstant(d.toInstant(), zoneOffset) - - implicit val uuidEncoder: Encoder[UUID] = encoder(_.setUUID) - implicit val stringEncoder: Encoder[String] = encoder(_.setString) - implicit val bigDecimalEncoder: Encoder[BigDecimal] = encoder(_.setBigDecimal)(_.bigDecimal) - implicit val booleanEncoder: Encoder[Boolean] = encoder(_.setBoolean) - implicit val byteEncoder: Encoder[Byte] = encoder(_.setByte) - implicit val shortEncoder: Encoder[Short] = encoder(_.setShort) - implicit val intEncoder: Encoder[Int] = encoder(_.setInteger) - implicit val longEncoder: Encoder[Long] = encoder(_.setLong) - implicit val floatEncoder: Encoder[Float] = encoder(_.setFloat) - implicit val doubleEncoder: Encoder[Double] = encoder(_.setDouble) - implicit val byteArrayEncoder: Encoder[Array[Byte]] = encoder(_.setByteArray) - implicit val dateEncoder: Encoder[Date] = encoder(_.setLocalDateTime) - implicit val localDateEncoder: Encoder[LocalDate] = encoder(_.setLocalDate) - implicit val localDateTimeEncoder: Encoder[LocalDateTime] = encoder(_.setLocalDateTime) - - implicit def arrayStringEncoder[Col <: Seq[String]]: Encoder[Col] = - arrayEncoder[String, String, Col](_.setStringArray)(identity) - implicit def arrayBigDecimalEncoder[Col <: Seq[BigDecimal]]: Encoder[Col] = - arrayEncoder[BigDecimal, java.math.BigDecimal, Col](_.setBigDecimalArray)(_.bigDecimal) - implicit def arrayBooleanEncoder[Col <: Seq[Boolean]]: Encoder[Col] = - arrayEncoder[Boolean, java.lang.Boolean, Col](_.setBooleanArray)(_.booleanValue) - implicit def arrayByteEncoder[Col <: Seq[Byte]]: Encoder[Col] = - arrayEncoder[Byte, java.lang.Short, Col](_.setShortArray)(identity) - implicit def arrayShortEncoder[Col <: Seq[Short]]: Encoder[Col] = - arrayEncoder[Short, java.lang.Short, Col](_.setShortArray)(_.shortValue) - implicit def arrayIntEncoder[Col <: Seq[Int]]: Encoder[Col] = - arrayEncoder[Int, java.lang.Integer, Col](_.setIntegerArray)(_.intValue) - implicit def arrayLongEncoder[Col <: Seq[Long]]: Encoder[Col] = - arrayEncoder[Long, java.lang.Long, Col](_.setLongArray)(_.longValue) - implicit def arrayFloatEncoder[Col <: Seq[Float]]: Encoder[Col] = - arrayEncoder[Float, java.lang.Float, Col](_.setFloatArray)(_.floatValue) - implicit def arrayDoubleEncoder[Col <: Seq[Double]]: Encoder[Col] = - arrayEncoder[Double, java.lang.Double, Col](_.setDoubleArray)(_.doubleValue) - implicit def arrayDateEncoder[Col <: Seq[Date]]: Encoder[Col] = - arrayEncoder[Date, LocalDateTime, Col](_.setLocalDateTimeArray)(identity) - implicit def arrayLocalDateEncoder[Col <: Seq[LocalDate]]: Encoder[Col] = - arrayEncoder[LocalDate, LocalDate, Col](_.setLocalDateArray)(identity) -} diff --git a/quill-ndbc-postgres/src/main/scala/io/getquill/context/ndbc/PostgresNdbcContextBase.scala b/quill-ndbc-postgres/src/main/scala/io/getquill/context/ndbc/PostgresNdbcContextBase.scala deleted file mode 100644 index 9658af37ff..0000000000 --- a/quill-ndbc-postgres/src/main/scala/io/getquill/context/ndbc/PostgresNdbcContextBase.scala +++ /dev/null @@ -1,27 +0,0 @@ -package io.getquill.context.ndbc - -import java.time.ZoneOffset - -import io.getquill.context.sql.encoding.ArrayEncoding -import io.getquill.{NamingStrategy, PostgresDialect} -import io.trane.ndbc.{PostgresPreparedStatement, PostgresRow} - -trait PostgresNdbcContextBase[+N <: NamingStrategy] - extends NdbcContextBase[PostgresDialect, N, PostgresPreparedStatement, PostgresRow] - with ArrayEncoding - with PostgresEncoders - with PostgresDecoders { - - override type NullChecker = PostgresNdbcNullChecker - class PostgresNdbcNullChecker extends BaseNullChecker { - override def apply(index: Index, row: ResultRow): Boolean = - row.column(index).isNull - } - implicit val nullChecker: NullChecker = new PostgresNdbcNullChecker() - - override val idiom = PostgresDialect - - override protected def createPreparedStatement(sql: String) = PostgresPreparedStatement.create(sql) - - override protected val zoneOffset: ZoneOffset = ZoneOffset.UTC -} diff --git a/quill-ndbc-postgres/src/test/resources/application.conf b/quill-ndbc-postgres/src/test/resources/application.conf deleted file mode 100644 index f56c384e41..0000000000 --- a/quill-ndbc-postgres/src/test/resources/application.conf +++ /dev/null @@ -1,6 +0,0 @@ -testPostgresDB.ndbc.dataSourceSupplierClass=io.trane.ndbc.postgres.netty4.DataSourceSupplier -testPostgresDB.ndbc.host=${?POSTGRES_HOST} -testPostgresDB.ndbc.port=${?POSTGRES_PORT} -testPostgresDB.ndbc.user=postgres -testPostgresDB.ndbc.password=postgres -testPostgresDB.ndbc.database=quill_test diff --git a/quill-ndbc-postgres/src/test/resources/logback.xml b/quill-ndbc-postgres/src/test/resources/logback.xml deleted file mode 100644 index f7387fb8bc..0000000000 --- a/quill-ndbc-postgres/src/test/resources/logback.xml +++ /dev/null @@ -1,16 +0,0 @@ - - - - %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n%ex - - - - - - - - - - - - diff --git a/quill-ndbc-postgres/src/test/scala/io/getquill/context/ndbc/postgres/ArrayNdbcPostgresEncodingSpec.scala b/quill-ndbc-postgres/src/test/scala/io/getquill/context/ndbc/postgres/ArrayNdbcPostgresEncodingSpec.scala deleted file mode 100644 index 6890cdecb8..0000000000 --- a/quill-ndbc-postgres/src/test/scala/io/getquill/context/ndbc/postgres/ArrayNdbcPostgresEncodingSpec.scala +++ /dev/null @@ -1,29 +0,0 @@ -package io.getquill.context.ndbc.postgres - -import io.getquill.context.sql.encoding.ArrayEncodingBaseSpec - -class ArrayNdbcPostgresEncodingSpec extends ArrayEncodingBaseSpec { - - val ctx = testContext - import ctx._ - - val q = quote(query[ArraysTestEntity]) - - "Support all sql base types and `Traversable` implementers" in { - get(ctx.run(q.insertValue(lift(e)))) - val actual = get(ctx.run(q)).head - actual mustEqual e - baseEntityDeepCheck(actual, e) - } - - "Support Traversable encoding basing on MappedEncoding" in { - val wrapQ = quote(querySchema[WrapEntity]("ArraysTestEntity")) - get(ctx.run(wrapQ.insertValue(lift(wrapE)))) - get(ctx.run(wrapQ)).head mustBe wrapE - } - - override protected def beforeEach(): Unit = { - get(ctx.run(q.delete)) - () - } -} diff --git a/quill-ndbc-postgres/src/test/scala/io/getquill/context/ndbc/postgres/CaseClassQueryNdbcPostgresSpec.scala b/quill-ndbc-postgres/src/test/scala/io/getquill/context/ndbc/postgres/CaseClassQueryNdbcPostgresSpec.scala deleted file mode 100644 index 3fd97c7180..0000000000 --- a/quill-ndbc-postgres/src/test/scala/io/getquill/context/ndbc/postgres/CaseClassQueryNdbcPostgresSpec.scala +++ /dev/null @@ -1,49 +0,0 @@ -package io.getquill.context.ndbc.postgres - -import io.getquill.context.sql.base.CaseClassQuerySpec - -class CaseClassQueryNdbcPostgresSpec extends CaseClassQuerySpec { - - val context = testContext - import context._ - - override def beforeAll = - get { - context.transaction { - for { - _ <- context.run(query[Contact].delete) - _ <- context.run(query[Address].delete) - _ <- context.run(liftQuery(peopleEntries).foreach(e => peopleInsert(e))) - _ <- context.run(liftQuery(addressEntries).foreach(e => addressInsert(e))) - } yield {} - } - } - - "Example 1 - Single Case Class Mapping" in { - get( - context.run(`Ex 1 CaseClass Record Output`) - ) must contain theSameElementsAs (`Ex 1 CaseClass Record Output expected result`) - } - - "Example 1A - Single Case Class Mapping" in { - get( - context.run(`Ex 1A CaseClass Record Output`) - ) must contain theSameElementsAs `Ex 1 CaseClass Record Output expected result` - } - - "Example 1B - Single Case Class Mapping" in { - get( - context.run(`Ex 1B CaseClass Record Output`) - ) must contain theSameElementsAs `Ex 1 CaseClass Record Output expected result` - } - - "Example 2 - Single Record Mapped Join" in { - get(context.run(`Ex 2 Single-Record Join`)) must contain theSameElementsAs `Ex 2 Single-Record Join expected result` - } - - "Example 3 - Inline Record as Filter" in { - get( - context.run(`Ex 3 Inline Record Usage`) - ) must contain theSameElementsAs `Ex 3 Inline Record Usage expected result` - } -} diff --git a/quill-ndbc-postgres/src/test/scala/io/getquill/context/ndbc/postgres/DepartmentsNdbcPostgresSpec.scala b/quill-ndbc-postgres/src/test/scala/io/getquill/context/ndbc/postgres/DepartmentsNdbcPostgresSpec.scala deleted file mode 100644 index d8393d5c39..0000000000 --- a/quill-ndbc-postgres/src/test/scala/io/getquill/context/ndbc/postgres/DepartmentsNdbcPostgresSpec.scala +++ /dev/null @@ -1,34 +0,0 @@ -package io.getquill.context.ndbc.postgres - -import io.getquill.context.sql.base.DepartmentsSpec - -class DepartmentsNdbcPostgresSpec extends DepartmentsSpec { - - val context = testContext - import context._ - - override def beforeAll = { - get { - context.transaction { - for { - a <- context.run(query[Department].delete) - b <- context.run(query[Employee].delete) - _ <- context.run(query[Task].delete) - - _ <- context.run(liftQuery(departmentEntries).foreach(e => departmentInsert(e))) - _ <- context.run(liftQuery(employeeEntries).foreach(e => employeeInsert(e))) - _ <- context.run(liftQuery(taskEntries).foreach(e => taskInsert(e))) - } yield {} - } - } - () - } - - "Example 8 - nested naive" in { - get(context.run(`Example 8 expertise naive`(lift(`Example 8 param`)))) mustEqual `Example 8 expected result` - } - - "Example 9 - nested db" in { - get(context.run(`Example 9 expertise`(lift(`Example 9 param`)))) mustEqual `Example 9 expected result` - } -} diff --git a/quill-ndbc-postgres/src/test/scala/io/getquill/context/ndbc/postgres/NdbcPostgresEncodingSpec.scala b/quill-ndbc-postgres/src/test/scala/io/getquill/context/ndbc/postgres/NdbcPostgresEncodingSpec.scala deleted file mode 100644 index bbd154f5f2..0000000000 --- a/quill-ndbc-postgres/src/test/scala/io/getquill/context/ndbc/postgres/NdbcPostgresEncodingSpec.scala +++ /dev/null @@ -1,107 +0,0 @@ -package io.getquill.context.ndbc.postgres - -import java.time.{LocalDate, LocalDateTime} -import java.util.{Date, UUID} - -import io.getquill.context.sql.EncodingSpec -import io.getquill.Query - -class NdbcPostgresEncodingSpec extends EncodingSpec { - - val context = testContext - import context._ - - "encodes and decodes types" in { - val r = - for { - _ <- context.run(delete) - _ <- context.run(liftQuery(insertValues).foreach(e => insert(e))) - result <- context.run(query[EncodingTestEntity]) - } yield result - - verify(get(r)) - } - - "encodes and decodes uuids" in { - case class EncodingUUIDTestEntity(v1: UUID) - val testUUID = UUID.fromString("e5240c08-6ee7-474a-b5e4-91f79c48338f") - - // delete old values - val q0 = quote(query[EncodingUUIDTestEntity].delete) - val rez0 = get(context.run(q0)) - - // insert new uuid - val rez1 = get(context.run(query[EncodingUUIDTestEntity].insertValue(lift(EncodingUUIDTestEntity(testUUID))))) - - // verify you can get the uuid back from the db - val q2 = quote(query[EncodingUUIDTestEntity].map(p => p.v1)) - - val rez2 = get(testContext.run(q2)) - - rez2 mustEqual List(testUUID) - } - - "fails if the column has the wrong type" - { - "numeric" in { - get(context.run(liftQuery(insertValues).foreach(e => insert(e)))) - case class EncodingTestEntity(v1: Int) - val e = intercept[UnsupportedOperationException] { - get(context.run(query[EncodingTestEntity])) - } - } - "non-numeric" in { - get(context.run(liftQuery(insertValues).foreach(e => insert(e)))) - case class EncodingTestEntity(v1: Date) - val e = intercept[UnsupportedOperationException] { - get(context.run(query[EncodingTestEntity])) - } - } - } - - "encodes sets" in { - val q = quote { (set: Query[Int]) => - query[EncodingTestEntity].filter(t => set.contains(t.v6)) - } - val fut = - for { - _ <- context.run(query[EncodingTestEntity].delete) - _ <- context.run(liftQuery(insertValues).foreach(e => query[EncodingTestEntity].insertValue(e))) - r <- context.run(q(liftQuery(insertValues.map(_.v6)))) - } yield { - r - } - verify(get(fut)) - } - - "returning UUID" in { - val success = for { - uuid <- get(context.run(insertBarCode(lift(barCodeEntry)))) - barCode <- get(context.run(findBarCodeByUuid(uuid))).headOption - } yield { - verifyBarcode(barCode) - } - success must not be empty - } - - "encodes localdate type" in { - case class DateEncodingTestEntity(v1: LocalDate, v2: LocalDate) - val entity = DateEncodingTestEntity(LocalDate.now, LocalDate.now) - val r = for { - _ <- context.run(query[DateEncodingTestEntity].delete) - _ <- context.run(query[DateEncodingTestEntity].insertValue(lift(entity))) - result <- context.run(query[DateEncodingTestEntity]) - } yield result - get(r) must contain(entity) - } - - "encodes localdatetime type" in { - case class DateEncodingTestEntity(v1: LocalDateTime, v2: LocalDateTime) - val entity = DateEncodingTestEntity(LocalDateTime.now, LocalDateTime.now) - val r = for { - _ <- context.run(query[DateEncodingTestEntity].delete) - _ <- context.run(query[DateEncodingTestEntity].insertValue(lift(entity))) - result <- context.run(query[DateEncodingTestEntity]) - } yield result - get(r) - } -} diff --git a/quill-ndbc-postgres/src/test/scala/io/getquill/context/ndbc/postgres/OptionalNestedNdbcSpec.scala b/quill-ndbc-postgres/src/test/scala/io/getquill/context/ndbc/postgres/OptionalNestedNdbcSpec.scala deleted file mode 100644 index b5b8146c9c..0000000000 --- a/quill-ndbc-postgres/src/test/scala/io/getquill/context/ndbc/postgres/OptionalNestedNdbcSpec.scala +++ /dev/null @@ -1,95 +0,0 @@ -package io.getquill.context.ndbc.postgres - -import io.getquill.context.encoding.OptionalNestedSpec - -class OptionalProductEncodingJasyncSpec extends OptionalNestedSpec { - - val context: testContext.type = testContext - import testContext._ - - override protected def beforeEach() = { - import Setup._ - get(testContext.run(query[Contact].delete)) - () - } - - "1.Optional Inner Product" - { - import `1.Optional Inner Product`._ - "1.Ex1 - Not null inner product" in { - get(context.run(`1.Ex1 - Not null inner product insert`)) - get(context.run(data)) mustEqual List(`1.Ex1 - Not null inner product result`) - } - "1.Ex1 Auto - Not null inner product" in { - val result = `1.Ex1 - Not null inner product result` - get(context.run(data.insertValue(lift(result)))) - get(context.run(data)) mustEqual List(result) - } - - "1.Ex2 - null inner product" in { - get(context.run(`1.Ex2 - null inner product insert`)) - // NDBC behaves exactly the same as JDBC making getInt on a null column 0 - get(context.run(data)) mustEqual List(`1.Ex2 - null inner product result`) - } - "1.Ex2 Auto - null inner product" in { - val result = `1.Ex2 - null inner product result` - get(context.run(data.insertValue(lift(result)))) - get(context.run(data)) mustEqual List(result) - } - } - - "2.Optional Inner Product" - { - import `2.Optional Inner Product with Optional Leaf`._ - "2.Ex1 - Not null inner product" in { - get(context.run(`2.Ex1 - not-null insert`)) - get(context.run(data)) mustEqual List(`2.Ex1 - not-null result`) - } - "2.Ex1 Auto - Not null inner product" in { - val result = `2.Ex1 - not-null result` - get(context.run(data.insertValue(lift(result)))) - get(context.run(data)) mustEqual List(result) - } - - "2.Ex2 - Not null inner product" in { - get(context.run(`2.Ex2 - Null inner product insert`)) - get(context.run(data)) mustEqual List(`2.Ex2 - Null inner product result`) - } - "2.Ex2 Auto - Not null inner product" in { - val result = `2.Ex2 - Null inner product result` - get(context.run(data.insertValue(lift(result)))) - get(context.run(data)) mustEqual List(result) - } - - "2.Ex3 - Null inner leaf" in { - get(context.run(`2.Ex3 - Null inner leaf insert`)) - get(context.run(data)) mustEqual List(`2.Ex3 - Null inner leaf result`) - } - "2.Ex3 Auto - Null inner leaf" in { - val result = `2.Ex3 - Null inner leaf result` - get(context.run(data.insertValue(lift(result)))) - get(context.run(data)) mustEqual List(result) - } - } - - "3.Optional Nested Inner Product" - { - import `3.Optional Nested Inner Product`._ - "3.Ex1 - Null inner product insert" in { - get(context.run(`3.Ex1 - Null inner product insert`)) - get(context.run(data)) mustEqual List(`3.Ex1 - Null inner product result`) - } - "3.Ex1 Auto - Null inner product insert" in { - val result = `3.Ex1 - Null inner product result` - get(context.run(data.insertValue(lift(result)))) - get(context.run(data)) mustEqual List(result) - } - - "3.Ex2 - Null inner leaf" in { - get(context.run(`3.Ex2 - Null inner leaf insert`)) - get(context.run(data)) mustEqual List(`3.Ex2 - Null inner leaf result`) - } - "3.Ex2 Auto - Null inner leaf" in { - val result = `3.Ex2 - Null inner leaf result` - get(context.run(data.insertValue(lift(result)))) - get(context.run(data)) mustEqual List(result) - } - } -} diff --git a/quill-ndbc-postgres/src/test/scala/io/getquill/context/ndbc/postgres/PeopleNdbcPostgresSpec.scala b/quill-ndbc-postgres/src/test/scala/io/getquill/context/ndbc/postgres/PeopleNdbcPostgresSpec.scala deleted file mode 100644 index 4dbcad646d..0000000000 --- a/quill-ndbc-postgres/src/test/scala/io/getquill/context/ndbc/postgres/PeopleNdbcPostgresSpec.scala +++ /dev/null @@ -1,62 +0,0 @@ -package io.getquill.context.ndbc.postgres - -import io.getquill.context.sql.base.PeopleSpec - -class PeopleNdbcPostgresSpec extends PeopleSpec { - - val context = testContext - import context._ - - override def beforeAll = - get { - context.transaction { - for { - _ <- context.run(query[Couple].delete) - _ <- context.run(query[Person].delete) - _ <- context.run(liftQuery(peopleEntries).foreach(e => peopleInsert(e))) - _ <- context.run(liftQuery(couplesEntries).foreach(e => couplesInsert(e))) - } yield {} - } - } - - "Example 1 - differences" in { - get(context.run(`Ex 1 differences`)) mustEqual `Ex 1 expected result` - } - - "Example 2 - range simple" in { - get(context.run(`Ex 2 rangeSimple`(lift(`Ex 2 param 1`), lift(`Ex 2 param 2`)))) mustEqual `Ex 2 expected result` - } - - "Example 3 - satisfies" in { - get(context.run(`Ex 3 satisfies`)) mustEqual `Ex 3 expected result` - } - - "Example 4 - satisfies" in { - get(context.run(`Ex 4 satisfies`)) mustEqual `Ex 4 expected result` - } - - "Example 5 - compose" in { - get(context.run(`Ex 5 compose`(lift(`Ex 5 param 1`), lift(`Ex 5 param 2`)))) mustEqual `Ex 5 expected result` - } - - "Example 6 - predicate 0" in { - get(context.run(satisfies(eval(`Ex 6 predicate`)))) mustEqual `Ex 6 expected result` - } - - "Example 7 - predicate 1" in { - get(context.run(satisfies(eval(`Ex 7 predicate`)))) mustEqual `Ex 7 expected result` - } - - "Example 8 - contains empty" in { - get(context.run(`Ex 8 and 9 contains`(liftQuery(`Ex 8 param`)))) mustEqual `Ex 8 expected result` - } - - "Example 9 - contains non empty" in { - get(context.run(`Ex 8 and 9 contains`(liftQuery(`Ex 9 param`)))) mustEqual `Ex 9 expected result` - } - - "Example 10 - pagination" in { - get(context.run(`Ex 10 page 1 query`)) mustEqual `Ex 10 page 1 expected` - get(context.run(`Ex 10 page 2 query`)) mustEqual `Ex 10 page 2 expected` - } -} diff --git a/quill-ndbc-postgres/src/test/scala/io/getquill/context/ndbc/postgres/PeopleNdbcReturningSpec.scala b/quill-ndbc-postgres/src/test/scala/io/getquill/context/ndbc/postgres/PeopleNdbcReturningSpec.scala deleted file mode 100644 index 238f7b872b..0000000000 --- a/quill-ndbc-postgres/src/test/scala/io/getquill/context/ndbc/postgres/PeopleNdbcReturningSpec.scala +++ /dev/null @@ -1,73 +0,0 @@ -package io.getquill.context.ndbc.postgres - -import io.getquill.context.sql.base.PeopleReturningSpec - -class PeopleNdbcReturningSpec extends PeopleReturningSpec { - - val context = testContext - import context.{get => runSyncUnsafe, _} - - override def beforeEach(): Unit = { - runSyncUnsafe { - testContext.transaction { - for { - _ <- testContext.run(query[Contact].delete) - _ <- testContext.run(query[Product].delete) - _ <- testContext.run(liftQuery(people).foreach(p => peopleInsert(p))) - } yield () - } - } - super.beforeEach() - } - - "Ex 0 insert.returning(_.generatedColumn) mod" in { - import `Ex 0 insert.returning(_.generatedColumn) mod`._ - runSyncUnsafe(for { - id <- testContext.run(op) - output <- testContext.run(get) - } yield (output.toSet mustEqual result(id).toSet)) - } - - "Ex 0.5 insert.returning(wholeRecord) mod" in { - import `Ex 0.5 insert.returning(wholeRecord) mod`._ - runSyncUnsafe(for { - product <- testContext.run(op) - output <- testContext.run(get) - } yield (output mustEqual result(product))) - } - - "Ex 1 insert.returningMany(_.generatedColumn) mod" in { - import `Ex 1 insert.returningMany(_.generatedColumn) mod`._ - runSyncUnsafe(for { - id <- testContext.run(op) - output <- testContext.run(get) - } yield (output mustEqual result(id.head))) - } - - "Ex 2 update.returningMany(_.singleColumn) mod" in { - import `Ex 2 update.returningMany(_.singleColumn) mod`._ - runSyncUnsafe(for { - opResult <- testContext.run(op) - _ = opResult.toSet mustEqual expect.toSet - output <- testContext.run(get) - } yield (output.toSet mustEqual result.toSet)) - } - - "Ex 3 delete.returningMany(wholeRecord)" in { - import `Ex 3 delete.returningMany(wholeRecord)`._ - runSyncUnsafe(for { - opResult <- testContext.run(op) - _ = opResult.toSet mustEqual expect.toSet - output <- testContext.run(get) - } yield (output.toSet mustEqual result.toSet)) - } - - "Ex 4 update.returningMany(query)" in { - import `Ex 4 update.returningMany(query)`._ - runSyncUnsafe(for { - opResult <- testContext.run(op) - _ = opResult.toSet mustEqual expect.toSet - output <- testContext.run(get) - } yield (output.toSet mustEqual result.toSet)) - } -} diff --git a/quill-ndbc-postgres/src/test/scala/io/getquill/context/ndbc/postgres/PostgresNdbcContextSpec.scala b/quill-ndbc-postgres/src/test/scala/io/getquill/context/ndbc/postgres/PostgresNdbcContextSpec.scala deleted file mode 100644 index 9efa759de5..0000000000 --- a/quill-ndbc-postgres/src/test/scala/io/getquill/context/ndbc/postgres/PostgresNdbcContextSpec.scala +++ /dev/null @@ -1,85 +0,0 @@ -package io.getquill.context.ndbc.postgres - -import io.getquill.base.Spec -import io.trane.future.scala.Future - -class PostgresNdbcContextSpec extends Spec { - - val ctx = testContext - import ctx._ - - "run non-batched action" in { - get(ctx.run(qr1.delete)) - val insert = quote { (i: Int) => - qr1.insert(_.i -> i) - } - get(ctx.run(insert(lift(1)))) mustEqual 1 - } - - "insert with returning" - { - "single column table" in { - val inserted: Long = get(ctx.run { - qr4.insertValue(lift(TestEntity4(0))).returningGenerated(_.i) - }) - get(ctx.run(qr4.filter(_.i == lift(inserted)))).head.i mustBe inserted - } - - "multiple columns" in { - get(ctx.run(qr1.delete)) - val inserted = get(ctx.run { - qr1.insertValue(lift(TestEntity("foo", 1, 18L, Some(123), true))).returning(r => (r.i, r.s, r.o)) - }) - (1, "foo", Some(123)) mustBe inserted - } - } - - "transaction support" - { - "success" in { - get(for { - _ <- ctx.run(qr1.delete) - _ <- ctx.transaction { - ctx.run(qr1.insert(_.i -> 33)) - } - r <- ctx.run(qr1) - } yield r).map(_.i) mustEqual List(33) - } - - "failure" in { - get(for { - _ <- ctx.run(qr1.delete) - e <- ctx.transaction { - Future.sequence( - Seq( - ctx.run(qr1.insert(_.i -> 19)), - Future(throw new IllegalStateException) - ) - ) - }.recoverWith { case e: Exception => - Future(e.getClass.getSimpleName) - } - r <- ctx.run(qr1) - } yield (e, r.isEmpty)) mustEqual (("IllegalStateException", true)) - } - - "nested" in { - get(for { - _ <- ctx.run(qr1.delete) - _ <- ctx.transaction { - ctx.transaction { - ctx.run(qr1.insert(_.i -> 33)) - } - } - r <- ctx.run(qr1) - } yield r).map(_.i) mustEqual List(33) - } - - "prepare" in { - get( - ctx.prepareParams( - "select * from Person where name=? and age > ?", - (pr, session) => (List("David Bowie", 69), pr) - ) - ) mustEqual List("69", "'David Bowie'") - } - } -} diff --git a/quill-ndbc-postgres/src/test/scala/io/getquill/context/ndbc/postgres/ProductNdbcPostgresSpec.scala b/quill-ndbc-postgres/src/test/scala/io/getquill/context/ndbc/postgres/ProductNdbcPostgresSpec.scala deleted file mode 100644 index 1a2abbd90e..0000000000 --- a/quill-ndbc-postgres/src/test/scala/io/getquill/context/ndbc/postgres/ProductNdbcPostgresSpec.scala +++ /dev/null @@ -1,94 +0,0 @@ -package io.getquill.context.ndbc.postgres - -import io.getquill.context.sql.{Id, ProductSpec} -import io.trane.future.scala.Future - -class ProductNdbcPostgresSpec extends ProductSpec { - - val context = testContext - import context._ - - override def beforeAll = { - get(context.run(quote(query[Product].delete))) - () - } - - "Product" - { - "Insert multiple products" in { - val inserted = get(Future.sequence(productEntries.map(product => context.run(productInsert(lift(product)))))) - val product = get(context.run(productById(lift(inserted(2))))).head - product.description mustEqual productEntries(2).description - product.id mustEqual inserted(2) - } - - "Single insert product" in { - val inserted = get(context.run(productSingleInsert)) - val product = get(context.run(productById(lift(inserted)))).head - product.description mustEqual "Window" - product.id mustEqual inserted - } - - "Single insert with inlined free variable" in { - val prd = Product(0L, "test1", 1L) - val inserted = get { - context.run { - product.insert(_.sku -> lift(prd.sku), _.description -> lift(prd.description)).returning(_.id) - } - } - val returnedProduct = get(context.run(productById(lift(inserted)))).head - returnedProduct.description mustEqual "test1" - returnedProduct.sku mustEqual 1L - returnedProduct.id mustEqual inserted - } - - "Single insert with free variable and explicit quotation" in { - val prd = Product(0L, "test2", 2L) - val q1 = quote { - product.insert(_.sku -> lift(prd.sku), _.description -> lift(prd.description)).returning(_.id) - } - val inserted = get(context.run(q1)) - val returnedProduct = get(context.run(productById(lift(inserted)))).head - returnedProduct.description mustEqual "test2" - returnedProduct.sku mustEqual 2L - returnedProduct.id mustEqual inserted - } - - "Single product insert with a method quotation" in { - val prd = Product(0L, "test3", 3L) - val inserted = get(context.run(productInsert(lift(prd)))) - val returnedProduct = get(context.run(productById(lift(inserted)))).head - returnedProduct.description mustEqual "test3" - returnedProduct.sku mustEqual 3L - returnedProduct.id mustEqual inserted - } - - "Single insert with value class" in { - case class Product(id: Id, description: String, sku: Long) - val prd = Product(Id(0L), "test2", 2L) - val q1 = quote { - query[Product].insert(_.sku -> lift(prd.sku), _.description -> lift(prd.description)).returning(_.id) - } - get(context.run(q1)) mustBe a[Id] - } - - "supports casts from string to number" - { - "toInt" in { - case class Product(id: Long, description: String, sku: Int) - val queried = get { - context.run { - query[Product].filter(_.sku == lift("1004").toInt) - } - }.head - queried.sku mustEqual 1004L - } - "toLong" in { - val queried = get { - context.run { - query[Product].filter(_.sku == lift("1004").toLong) - } - }.head - queried.sku mustEqual 1004L - } - } - } -} diff --git a/quill-ndbc-postgres/src/test/scala/io/getquill/context/ndbc/postgres/QueryResultTypeNdbcPostgresSpec.scala b/quill-ndbc-postgres/src/test/scala/io/getquill/context/ndbc/postgres/QueryResultTypeNdbcPostgresSpec.scala deleted file mode 100644 index c33582c4de..0000000000 --- a/quill-ndbc-postgres/src/test/scala/io/getquill/context/ndbc/postgres/QueryResultTypeNdbcPostgresSpec.scala +++ /dev/null @@ -1,107 +0,0 @@ -package io.getquill.context.ndbc.postgres - -import io.getquill.context.sql.base.QueryResultTypeSpec -import java.util.concurrent.ConcurrentLinkedQueue -import scala.BigDecimal -import scala.jdk.CollectionConverters._ -import scala.math.BigDecimal.int2bigDecimal -import io.getquill.util.PrintMac - -class QueryResultTypeNdbcPostgresSpec extends QueryResultTypeSpec { - - val context = testContext // - import context._ - - val insertedProducts = new ConcurrentLinkedQueue[Product] - - override def beforeAll = { - get(context.run(deleteAll)) - val ids = get(context.run(liftQuery(productEntries).foreach(e => productInsert(e)))) - val inserted = (ids zip productEntries).map { case (id, prod) => - prod.copy(id = id) - } - insertedProducts.addAll(inserted.asJava) - () - } - - def products = insertedProducts.asScala.toList - - "return list" - { - "select" in { - get(context.run(selectAll)) must contain theSameElementsAs (products) - } - "map" in { - get(context.run(map)) must contain theSameElementsAs (products.map(_.id)) - } - "filter" in { - get(context.run(filter)) must contain theSameElementsAs (products) - } - "withFilter" in { - get(context.run(withFilter)) must contain theSameElementsAs (products) - } - "sortBy" in { - get(context.run(sortBy)) must contain theSameElementsInOrderAs (products) - } - "take" in { - get(context.run(take)) must contain theSameElementsAs (products) - } - "drop" in { - get(context.run(drop)) must contain theSameElementsAs (products.drop(1)) - } - "++" in { - get(context.run(`++`)) must contain theSameElementsAs (products ++ products) - } - "unionAll" in { - get(context.run(unionAll)) must contain theSameElementsAs (products ++ products) - } - "union" in { - get(context.run(union)) must contain theSameElementsAs (products) - } - "join" in { - get(context.run(join)) must contain theSameElementsAs (products zip products) - } - "distinct" in { - get(context.run(distinct)) must contain theSameElementsAs (products.map(_.id).distinct) - } - } - - "return single result" - { - "min" - { - "some" in { - get(context.run(minExists)) mustEqual Some(products.map(_.sku).min) - } - "none" in { - get(context.run(minNonExists)) mustBe None - } - } - "max" - { - "some" in { - get(context.run(maxExists)) mustBe Some(products.map(_.sku).max) - } - "none" in { - get(context.run(maxNonExists)) mustBe None - } - } - "avg" - { - "some" in { - get(context.run(avgExists)) mustBe Some(BigDecimal(products.map(_.sku).sum) / products.size) - } - "none" in { - get(context.run(avgNonExists)) mustBe None - } - } - "size" in { - get(context.run(productSize)) mustEqual products.size - } - "parametrized size" in { - get(context.run(parametrizedSize(lift(10000)))) mustEqual 0 - } - "nonEmpty" in { - PrintMac(context.run(nonEmpty)) - get(context.run(nonEmpty)) mustEqual true - } - "isEmpty" in { - get(context.run(isEmpty)) mustEqual false - } - } -} diff --git a/quill-ndbc-postgres/src/test/scala/io/getquill/context/ndbc/postgres/TestContext.scala b/quill-ndbc-postgres/src/test/scala/io/getquill/context/ndbc/postgres/TestContext.scala deleted file mode 100644 index f58cb53f9f..0000000000 --- a/quill-ndbc-postgres/src/test/scala/io/getquill/context/ndbc/postgres/TestContext.scala +++ /dev/null @@ -1,16 +0,0 @@ -package io.getquill.context.ndbc.postgres - -import scala.concurrent.duration.Duration - -import io.getquill.{Literal, PostgresNdbcContext, TestEntities} -import io.getquill.context.sql.{TestDecoders, TestEncoders} -import io.trane.future.scala.{Await, Future} - -class TestContext - extends PostgresNdbcContext(Literal, "testPostgresDB") - with TestEntities - with TestEncoders - with TestDecoders { - - def get[T](f: Future[T]): T = Await.result(f, Duration.Inf) -} diff --git a/quill-ndbc-postgres/src/test/scala/io/getquill/context/ndbc/postgres/package.scala b/quill-ndbc-postgres/src/test/scala/io/getquill/context/ndbc/postgres/package.scala deleted file mode 100644 index 5d4f61d416..0000000000 --- a/quill-ndbc-postgres/src/test/scala/io/getquill/context/ndbc/postgres/package.scala +++ /dev/null @@ -1,5 +0,0 @@ -package io.getquill.context.ndbc - -package object postgres { - object testContext extends TestContext -} diff --git a/quill-ndbc/src/main/scala/io/getquill/context/ndbc/NdbcContext.scala b/quill-ndbc/src/main/scala/io/getquill/context/ndbc/NdbcContext.scala deleted file mode 100644 index 4aa8eb00cf..0000000000 --- a/quill-ndbc/src/main/scala/io/getquill/context/ndbc/NdbcContext.scala +++ /dev/null @@ -1,115 +0,0 @@ -package io.getquill.context.ndbc - -import io.getquill.context.{ExecutionInfo, ContextTranslateProto} -import io.getquill.context.sql.idiom.SqlIdiom -import io.getquill.{NamingStrategy, ReturnAction} -import io.trane.future.scala.{Await, Future, Promise} -import io.trane.ndbc.{DataSource, PreparedStatement, Row} - -import scala.concurrent.duration.Duration - -abstract class NdbcContext[I <: SqlIdiom, +N <: NamingStrategy, P <: PreparedStatement, R <: Row]( - override val idiom: I, - override val naming: N, - val dataSource: DataSource[P, R] -) extends NdbcContextBase[I, N, P, R] - with ContextTranslateProto { - - override type Result[T] = Future[T] - override type RunQueryResult[T] = List[T] - override type RunQuerySingleResult[T] = T - override type RunActionResult = Long - override type RunActionReturningResult[T] = T - override type RunBatchActionResult = List[Long] - override type RunBatchActionReturningResult[T] = List[T] - - override implicit protected val resultEffect: NdbcContextBase.ContextEffect[Future, Unit] = NdbcContext.ContextEffect - - override type TranslateResult[T] = Future[T] - - def wrap[T](t: => T): Future[T] = resultEffect.wrap(t) - def push[A, B](result: Future[A])(f: A => B): Future[B] = resultEffect.push(result)(f) - def seq[A](list: List[Future[A]]): Future[List[A]] = resultEffect.seq(list) - - // Need explicit return-type annotations due to scala/bug#8356. Otherwise macro system will not understand Result[Long]=Long etc... - override def executeAction(sql: String, prepare: Prepare = identityPrepare)( - info: ExecutionInfo, - dc: Runner - ): Future[Long] = - super.executeAction(sql, prepare)(info, dc) - - override def executeQuery[T]( - sql: String, - prepare: Prepare = identityPrepare, - extractor: Extractor[T] = identityExtractor - )(info: ExecutionInfo, dc: Runner): Future[List[T]] = - super.executeQuery(sql, prepare, extractor)(info, dc) - - override def executeQuerySingle[T]( - sql: String, - prepare: Prepare = identityPrepare, - extractor: Extractor[T] = identityExtractor - )(info: ExecutionInfo, dc: Runner): Future[T] = - super.executeQuerySingle(sql, prepare, extractor)(info, dc) - - override def executeActionReturning[O]( - sql: String, - prepare: Prepare = identityPrepare, - extractor: Extractor[O], - returningBehavior: ReturnAction - )(info: ExecutionInfo, dc: Runner): Future[O] = - super.executeActionReturning(sql, prepare, extractor, returningBehavior)(info, dc) - - override def executeBatchAction(groups: List[BatchGroup])(info: ExecutionInfo, dc: Runner): Future[List[Long]] = - super.executeBatchAction(groups)(info, dc) - - override def executeBatchActionReturning[T](groups: List[BatchGroupReturning], extractor: Extractor[T])( - info: ExecutionInfo, - dc: Runner - ): Future[List[T]] = - super.executeBatchActionReturning(groups, extractor)(info, dc) - - override def transaction[T](f: => Future[T]): Future[T] = super.transaction(f) - - /* TODO: I'm assuming that we don't need to bracket and close the dataSource like with JDBC - because previously it wasn't done here either */ - override def withDataSource[T](f: DataSource[P, R] => Future[T]): Future[T] = f(dataSource) - - def close(): Unit = { - dataSource.close() - () - } - - override private[getquill] def prepareParams(statement: String, prepare: Prepare): Future[Seq[String]] = - withDataSource { _ => - resultEffect.wrap(prepare(createPreparedStatement(statement), ())._1.reverse.map(prepareParam)) - } -} - -object NdbcContext { - object ContextEffect extends NdbcContextBase.ContextEffect[Future, Unit] { - override def wrap[T](t: => T): Future[T] = Future(t) - - // noinspection DuplicatedCode - override def wrapAsync[T](f: (Complete[T]) => Unit): Future[T] = { - val p = Promise[T]() - f { complete => - p.complete(complete) - () - } - p.future - } - - override def toFuture[T](eff: Future[T], ec: Unit): Future[T] = eff - - override def fromDeferredFuture[T](f: Unit => Future[T]): Future[T] = f(()) - - override def push[A, B](a: Future[A])(f: A => B): Future[B] = a.map(f) - - override def flatMap[A, B](a: Future[A])(f: A => Future[B]): Future[B] = a.flatMap(f) - - override def seq[T](list: List[Future[T]]): Future[List[T]] = Future.sequence(list) - - override def runBlocking[T](eff: Future[T], timeout: Duration): T = Await.result(eff, timeout) - } -} diff --git a/quill-ndbc/src/main/scala/io/getquill/context/ndbc/NdbcContextBase.scala b/quill-ndbc/src/main/scala/io/getquill/context/ndbc/NdbcContextBase.scala deleted file mode 100644 index 0769874a52..0000000000 --- a/quill-ndbc/src/main/scala/io/getquill/context/ndbc/NdbcContextBase.scala +++ /dev/null @@ -1,170 +0,0 @@ -package io.getquill.context.ndbc - -import java.util -import java.util.concurrent.Executors -import java.util.function.Supplier -import io.getquill._ -import io.getquill.context.ExecutionInfo -import io.getquill.context.sql.SqlContext -import io.getquill.context.sql.idiom.SqlIdiom -import io.getquill.ndbc.TraneFutureConverters._ -import io.getquill.util.ContextLogger -import io.trane.future.FuturePool -import io.trane.future.scala.{Future, toScalaFuture} -import io.trane.ndbc.{DataSource, PreparedStatement, Row} - -import scala.annotation.tailrec -import scala.collection.mutable.ArrayBuffer -import scala.concurrent.duration.Duration -import scala.language.{higherKinds, implicitConversions} -import scala.util.Try - -object NdbcContextBase { - trait ContextEffect[F[_], FutureExecutionContext_] { - final type Complete[T] = (Try[T] => Unit) - - def wrap[T](t: => T): F[T] - def seq[A](f: List[F[A]]): F[List[A]] - def push[A, B](result: F[A])(f: A => B): F[B] - - final type FutureExecutionContext = FutureExecutionContext_ - type Runner = Unit - - def wrapAsync[T](f: Complete[T] => Unit): F[T] - - def wrapFromFuture[T](fut: Future[T]): F[T] = wrapAsync(fut.onComplete) - - def toFuture[T](eff: F[T], ec: this.FutureExecutionContext): Future[T] - - def fromDeferredFuture[T](f: (this.FutureExecutionContext) => Future[T]): F[T] - - def flatMap[A, B](a: F[A])(f: A => F[B]): F[B] - def traverse[A, B](list: List[A])(f: A => F[B]) = seq(list.map(f)) - - def runBlocking[T](eff: F[T], timeout: Duration): T - } -} - -trait NdbcContextBase[+Idiom <: SqlIdiom, +Naming <: NamingStrategy, P <: PreparedStatement, R <: Row] - extends SqlContext[Idiom, Naming] { - - private[getquill] val logger = ContextLogger(classOf[NdbcContext[_, _, _, _]]) - - final override type PrepareRow = P - final override type ResultRow = R - override type Session = Unit - type Runner = Unit - - protected implicit val resultEffect: NdbcContextBase.ContextEffect[Result, _] - import resultEffect._ - - protected def withDataSource[T](f: DataSource[P, R] => Result[T]): Result[T] - - final protected def withDataSourceFromFuture[T](f: DataSource[P, R] => Future[T]): Result[T] = - withDataSource(ds => resultEffect.wrapFromFuture(f(ds))) - - protected def createPreparedStatement(sql: String): P - - protected def expandAction(sql: String, returningAction: ReturnAction) = sql - - def executeQuery[T]( - sql: String, - prepare: Prepare = identityPrepare, - extractor: (R, Session) => T = (r: R, s: Session) => r - )(info: ExecutionInfo, dc: Runner): Result[List[T]] = - withDataSourceFromFuture { ds => - val (params, ps) = prepare(createPreparedStatement(sql), ()) - logger.logQuery(sql, params) - - ds.query(ps).toScala.map { rs => - extractResult(rs.iterator, (r: R) => extractor(r, ())) - } - } - - def executeQuerySingle[T]( - sql: String, - prepare: Prepare = identityPrepare, - extractor: (R, Session) => T = (r: R, s: Session) => r - )(info: ExecutionInfo, dc: Runner): Result[T] = - push(executeQuery(sql, prepare, extractor)(info, dc))(handleSingleResult(sql, _)) - - def executeAction(sql: String, prepare: Prepare = identityPrepare)(info: ExecutionInfo, dc: Runner): Result[Long] = - withDataSourceFromFuture { ds => - val (params, ps) = prepare(createPreparedStatement(sql), ()) - logger.logQuery(sql, params) - ds.execute(ps).toScala.map(_.longValue) - } - - def executeActionReturning[O]( - sql: String, - prepare: Prepare = identityPrepare, - extractor: (R, Session) => O, - returningAction: ReturnAction - )(info: ExecutionInfo, dc: Runner): Result[O] = - push(executeActionReturningMany[O](sql, prepare, extractor, returningAction)(info, dc))(handleSingleResult(sql, _)) - - def executeActionReturningMany[O]( - sql: String, - prepare: Prepare = identityPrepare, - extractor: (R, Session) => O, - returningAction: ReturnAction - )(info: ExecutionInfo, dc: Runner): Result[List[O]] = { - val expanded = expandAction(sql, returningAction) - executeQuery(expanded, prepare, extractor)(info, dc) - } - - def executeBatchAction(groups: List[BatchGroup])(info: ExecutionInfo, dc: Runner): Result[List[Long]] = - push( - traverse(groups) { case BatchGroup(sql, prepares) => - prepares.foldLeft(wrap(ArrayBuffer.empty[Long])) { (acc, prepare) => - flatMap(acc) { array => - push(executeAction(sql, prepare)(info, dc))(array :+ _) - } - } - } - )(_.flatten) - - // TODO: Should this be blocking? Previously it was just a Future wrapped in a Try, which makes no sense - def probe(sql: String): Try[_] = - Try(runBlocking(withDataSourceFromFuture(_.query(sql).toScala), Duration.Inf)) - - def executeBatchActionReturning[T]( - groups: List[BatchGroupReturning], - extractor: (R, Session) => T - )(info: ExecutionInfo, dc: Runner): Result[List[T]] = - push( - traverse(groups) { case BatchGroupReturning(sql, column, prepare) => - prepare.foldLeft(wrap(ArrayBuffer.empty[T])) { (acc, prepare) => - flatMap(acc) { array => - push(executeActionReturning(sql, prepare, extractor, column)(info, dc))(array :+ _) - } - } - } - )(_.flatten) - - @tailrec - private def extractResult[T](rs: util.Iterator[R], extractor: R => T, acc: List[T] = Nil): List[T] = - if (rs.hasNext) - extractResult(rs, extractor, extractor(rs.next()) :: acc) - else - acc.reverse - - def transaction[T](f: => Result[T]): Result[T] = withDataSource { ds => - /* TODO: I'm assuming that we don't need to turn autocommit off/on for streaming because I can't - find any way to do so with the NDBC DataSource and it seems to handle streaming on its own */ - - implicit def javaSupplier[S](s: => S): Supplier[S] = new Supplier[S] { - override def get = s - } - - val javaFuturePool = FuturePool.apply(Executors.newCachedThreadPool()) - - resultEffect.fromDeferredFuture(implicit scheduler => - javaFuturePool.isolate( - ds.transactional { - resultEffect.toFuture(f, scheduler).toJava - } - ) - ) - } -} diff --git a/quill-ndbc/src/main/scala/io/getquill/context/ndbc/NdbcContextConfig.scala b/quill-ndbc/src/main/scala/io/getquill/context/ndbc/NdbcContextConfig.scala deleted file mode 100644 index ed5b7aa5ff..0000000000 --- a/quill-ndbc/src/main/scala/io/getquill/context/ndbc/NdbcContextConfig.scala +++ /dev/null @@ -1,25 +0,0 @@ -package io.getquill.context.ndbc - -import java.util.Properties -import scala.util.control.NonFatal -import com.typesafe.config.Config -import io.trane.ndbc.DataSource - -case class NdbcContextConfig(config: Config) { - - private def configProperties = { - import scala.jdk.CollectionConverters._ - val p = new Properties - for (entry <- config.entrySet.asScala) - p.setProperty(entry.getKey, entry.getValue.unwrapped.toString) - p - } - - def dataSource = - try - DataSource.fromProperties("ndbc", configProperties) - catch { - case NonFatal(ex) => - throw new IllegalStateException(s"Failed to load data source for config: '$config'", ex) - } -} diff --git a/quill-ndbc/src/main/scala/io/getquill/ndbc/TraneFutureConverters.scala b/quill-ndbc/src/main/scala/io/getquill/ndbc/TraneFutureConverters.scala deleted file mode 100644 index 3f49f0e761..0000000000 --- a/quill-ndbc/src/main/scala/io/getquill/ndbc/TraneFutureConverters.scala +++ /dev/null @@ -1,33 +0,0 @@ -package io.getquill.ndbc - -import io.trane.future.scala.{toJavaFuture, toScalaFuture, Future => TFutureS, Promise => TPromiseS} -import io.trane.future.{Future => TFutureJ} - -import scala.concurrent.{ExecutionContext, Future, Promise} -import scala.language.implicitConversions - -object TraneFutureConverters { - implicit def traneScalaToScala[T](tFuture: TFutureS[T]): Future[T] = { - val promise = Promise[T]() - tFuture.onComplete(promise.complete) - promise.future - } - - implicit def traneJavaToScala[T](jFuture: TFutureJ[T]): Future[T] = - traneScalaToScala(jFuture.toScala) - - implicit def scalaToTraneScala[T](future: Future[T])(implicit ec: ExecutionContext): TFutureS[T] = { - val promise = TPromiseS[T]() - future.onComplete(promise.complete) - promise.future - } - - implicit def scalaToTraneJava[T](future: Future[T])(implicit ec: ExecutionContext): TFutureJ[T] = - scalaToTraneScala(future).toJava - - implicit def traneScalaToTraneJava[T](future: TFutureS[T]): TFutureJ[T] = - future.toJava - - implicit def traneJavaToTraneScala[T](future: TFutureJ[T]): TFutureS[T] = - future.toScala -} diff --git a/quill-ndbc/src/test/scala/io/getquill/context/ndbc/NdbcContextEffectSpec.scala b/quill-ndbc/src/test/scala/io/getquill/context/ndbc/NdbcContextEffectSpec.scala deleted file mode 100644 index 362fdd6498..0000000000 --- a/quill-ndbc/src/test/scala/io/getquill/context/ndbc/NdbcContextEffectSpec.scala +++ /dev/null @@ -1,63 +0,0 @@ -package io.getquill.context.ndbc - -import io.getquill.base.Spec -import scala.concurrent.duration.Duration -import io.trane.future.scala.{Await, Future, Promise} - -import scala.util.Try - -class NdbcContextEffectSpec extends Spec { - - def get[T](f: Future[T]): T = Await.result(f, Duration.Inf) - - import NdbcContext.ContextEffect._ - - "evaluates simple values" in { - val future = wrap("He-man") - get(future) mustEqual "He-man" - } - - "evaluates asynchronous values" in { - get(wrapAsync[String] { doComplete => - Future(Thread.sleep(100)).onComplete { _ => - doComplete(Try("hello")) - () - } - }) mustEqual "hello" - } - - "encapsulates exception throw" in { - val future = wrap(throw new RuntimeException("Surprise!")).failed - get(future).getMessage mustEqual "Surprise!" - } - - "pushes an effect correctly" in { - get(push(Future(1))(_ + 1)) mustEqual 2 - } - - "executes effects in sequence" in { - get(wrap(2).flatMap(prev => wrap(prev + 3))) mustEqual 5 - } - - "converts a sequence correctly" in { - get(seq(List(Future(1), Future(2), Future(3)))) mustEqual List(1, 2, 3) - } - - "converts to Scala Future correctly" in { - val p = Promise[Unit]() - val f = toFuture(p.future, ()) - - f.isCompleted mustEqual false - p.complete(Try(())) - f.isCompleted mustEqual true - } - - "creates Future from deferred Future" in { - val f = fromDeferredFuture((_) => Future.successful("hello")) - get(f) mustEqual "hello" - } - - "runs blockingly" in { - runBlocking(Future { Thread.sleep(100); "foo" }, Duration.Inf) mustEqual "foo" - } -}