diff --git a/.github/ISSUE_TEMPLATE.md b/.github/ISSUE_TEMPLATE.md index c60f4bf54b..d2b7b57043 100644 --- a/.github/ISSUE_TEMPLATE.md +++ b/.github/ISSUE_TEMPLATE.md @@ -10,7 +10,7 @@ This template isn't a strict requirement to open issues, but please try to provi ### Steps to reproduce the behavior -If the issue can be reproduced using a [mirror context](http://zio.dev/zio-quill/contexts#mirror-context), please provide a scastie snippet that reproduces it. See https://scastie.scala-lang.org/fwbrasil/Z2CeR2qHQJK6EyQWUBhANA as an example. Remember to select the correct Quill version in the left menu. +If the issue can be reproduced using a [mirror context](https://zio.dev/zio-quill/contexts#mirror-context), please provide a scastie snippet that reproduces it. See https://scastie.scala-lang.org/fwbrasil/Z2CeR2qHQJK6EyQWUBhANA as an example. Remember to select the correct Quill version in the left menu. ### Workaround diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md index e7eac2127b..31e4c7aada 100644 --- a/CODE_OF_CONDUCT.md +++ b/CODE_OF_CONDUCT.md @@ -68,7 +68,7 @@ members of the project's leadership. ## Attribution This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, -available at [http://contributor-covenant.org/version/1/4][version] +available at [https://contributor-covenant.org/version/1/4][version] -[homepage]: http://contributor-covenant.org -[version]: http://contributor-covenant.org/version/1/4/ +[homepage]: https://contributor-covenant.org +[version]: https://contributor-covenant.org/version/1/4/ diff --git a/README.md b/README.md index 58cad77d56..ba84a3c0e9 100644 --- a/README.md +++ b/README.md @@ -7,7 +7,7 @@ # ZIO Quill -Quill provides a Quoted Domain Specific Language ([QDSL](http://homepages.inf.ed.ac.uk/wadler/papers/qdsl/qdsl.pdf)) to express queries in Scala and execute them in a target language. +Quill provides a Quoted Domain Specific Language ([QDSL](https://homepages.inf.ed.ac.uk/wadler/papers/qdsl/qdsl.pdf)) to express queries in Scala and execute them in a target language. [![Production Ready](https://img.shields.io/badge/Project%20Stage-Production%20Ready-brightgreen.svg)](https://github.com/zio/zio/wiki/Project-Stages) ![CI Badge](https://github.com/zio/zio-quill/workflows/CI/badge.svg) [![Sonatype Releases](https://img.shields.io/nexus/r/https/oss.sonatype.org/io.getquill/quill-core_2.12.svg?label=Sonatype%20Release)](https://oss.sonatype.org/content/repositories/releases/io/getquill/quill-core_2.12/) [![Sonatype Snapshots](https://img.shields.io/nexus/s/https/oss.sonatype.org/io.getquill/quill-core_2.12.svg?label=Sonatype%20Snapshot)](https://oss.sonatype.org/content/repositories/snapshots/io/getquill/quill-core_2.12/) [![javadoc](https://javadoc.io/badge2/io.getquill/zio-quill-docs_2.12/javadoc.svg)](https://javadoc.io/doc/io.getquill/zio-quill-docs_2.12) [![ZIO Quill](https://img.shields.io/github/stars/zio/zio-quill?style=social)](https://github.com/zio/zio-quill) @@ -71,11 +71,11 @@ You can notify all current maintainers using the handle `@getquill/maintainers`. ## Acknowledgement -The project was created having Philip Wadler's talk ["A practical theory of language-integrated query"](http://www.infoq.com/presentations/theory-language-integrated-query) as its initial inspiration. The development was heavily influenced by the following papers: +The project was created having Philip Wadler's talk ["A practical theory of language-integrated query"](https://www.infoq.com/presentations/theory-language-integrated-query) as its initial inspiration. The development was heavily influenced by the following papers: -* [A Practical Theory of Language-Integrated Query](http://homepages.inf.ed.ac.uk/slindley/papers/practical-theory-of-linq.pdf) -* [Everything old is new again: Quoted Domain Specific Languages](http://homepages.inf.ed.ac.uk/wadler/papers/qdsl/qdsl.pdf) -* [The Flatter, the Better](http://db.inf.uni-tuebingen.de/staticfiles/publications/the-flatter-the-better.pdf) +* [A Practical Theory of Language-Integrated Query](https://homepages.inf.ed.ac.uk/slindley/papers/practical-theory-of-linq.pdf) +* [Everything old is new again: Quoted Domain Specific Languages](https://homepages.inf.ed.ac.uk/wadler/papers/qdsl/qdsl.pdf) +* [The Flatter, the Better](https://db.inf.uni-tuebingen.de/staticfiles/publications/the-flatter-the-better.pdf) ## License diff --git a/build.sbt b/build.sbt index a1a39d6ff5..907324ffab 100644 --- a/build.sbt +++ b/build.sbt @@ -931,7 +931,7 @@ lazy val releaseSettings = Seq( homepage := Some(url("https://zio.dev/zio-quill/")), licenses := List(("Apache License 2.0", url("http://www.apache.org/licenses/LICENSE-2.0"))), developers := List( - Developer("fwbrasil", "Flavio W. Brasil", "", url("http://github.com/fwbrasil")), + Developer("fwbrasil", "Flavio W. Brasil", "", url("https://github.com/fwbrasil")), Developer("deusaquilus", "Alexander Ioffe", "", url("https://github.com/deusaquilus")) ), scmInfo := Some( @@ -963,11 +963,11 @@ lazy val docs = project |

|""".stripMargin, readmeAcknowledgement := - """|The project was created having Philip Wadler's talk ["A practical theory of language-integrated query"](http://www.infoq.com/presentations/theory-language-integrated-query) as its initial inspiration. The development was heavily influenced by the following papers: + """|The project was created having Philip Wadler's talk ["A practical theory of language-integrated query"](https://www.infoq.com/presentations/theory-language-integrated-query) as its initial inspiration. The development was heavily influenced by the following papers: | - |* [A Practical Theory of Language-Integrated Query](http://homepages.inf.ed.ac.uk/slindley/papers/practical-theory-of-linq.pdf) - |* [Everything old is new again: Quoted Domain Specific Languages](http://homepages.inf.ed.ac.uk/wadler/papers/qdsl/qdsl.pdf) - |* [The Flatter, the Better](http://db.inf.uni-tuebingen.de/staticfiles/publications/the-flatter-the-better.pdf)""".stripMargin, + |* [A Practical Theory of Language-Integrated Query](https://homepages.inf.ed.ac.uk/slindley/papers/practical-theory-of-linq.pdf) + |* [Everything old is new again: Quoted Domain Specific Languages](https://homepages.inf.ed.ac.uk/wadler/papers/qdsl/qdsl.pdf) + |* [The Flatter, the Better](https://db.inf.uni-tuebingen.de/staticfiles/publications/the-flatter-the-better.pdf)""".stripMargin, readmeMaintainers := """|- @deusaquilus (lead maintainer) |- @fwbrasil (creator) diff --git a/build/m1/README.MD b/build/m1/README.MD index 5c6c631eda..e30f4064c8 100644 --- a/build/m1/README.MD +++ b/build/m1/README.MD @@ -3,7 +3,7 @@ In order to get this project to build on a Mac with an M1 or later chip, you wil 1. Ensure your Docker Desktop has enough resources. We recommend you set it for at least 4 CPUs and 8.25GB of RAM. Anything less will likely result in mysterious hangs/crashes and much wailing and gnashing of teeth during the build process. ![docker-resources.png](docker-resources.png) -2. Enable the Experimental 'Big Sur Virtualization'. Doing so will reduce your build time by nearly 1 hour! On a Macbook Air with 16gb of ram and the 1st gen M1 chip - the build (w/o codegen) should take around 25 minutes give or take with this setting enabled. +2. Enable the Experimental 'Big Sur Virtualization'. Doing so will reduce your build time by nearly 1 hour! On a MacBook Air with 16gb of ram and the 1st gen M1 chip - the build (w/o codegen) should take around 25 minutes give or take with this setting enabled. ![img.png](experimental.png) 3. Use the `docker-compose-m1.yml` file instead of the default `docker-compose.yml` - this adjusts the platform where necessary to match up with the ARM based M1 chip. @@ -28,7 +28,7 @@ Be nice to your M1 system - stop your services - don't down them. Running `dock If you plan to have some rapid build/test cycles - run: `docker-compose -f docker-compose-m1.yml stop` to stop your services. It will preserve the volumes that were created when you ran setup. It'll save you a good amount of time. ## Build With a Specific Scala Version -By default the build executes with Scala 2.13. Not horrible - but if you want to take advantage of the improved compiler of a more recent 2.x verison of Scala you can specify that! +By default the build executes with Scala 2.13. Not horrible - but if you want to take advantage of the improved compiler of a more recent 2.x version of Scala you can specify that! You can simply set the `quill.scala.version` when you start your build: `docker-compose -f docker-compose-m1.yml run sbt sbt -Dquill.scala.version=2.13.6 -Dmodules=db test` diff --git a/build/release.sh b/build/release.sh index d0ac13bf1c..4c5d469832 100755 --- a/build/release.sh +++ b/build/release.sh @@ -127,7 +127,7 @@ then if [[ $ARTIFACT == "publish" ]]; then echo "No-Op Publish for Non Release Snapshot Branch"; fi else VERSION_FILE=$(cat version.sbt) - echo "Github actions branch was: ${BRANCH} and version file is $VERSION_FILE. Not Sure what to do." + echo "GitHub actions branch was: ${BRANCH} and version file is $VERSION_FILE. Not Sure what to do." fi else echo "PULL_REQUEST is not 'false' ($PULL_REQUEST). Not doing a release." diff --git a/build/setup_db_scripts.sh b/build/setup_db_scripts.sh index 777db72979..f451d8da08 100755 --- a/build/setup_db_scripts.sh +++ b/build/setup_db_scripts.sh @@ -133,7 +133,7 @@ function setup_sqlserver() { /opt/mssql-tools/bin/sqlcmd -S $1 -U SA -P "QuillRocks!" -d quill_test -i $2 } -# Do a simple necat poll to make sure the oracle database is ready. +# Do a simple netcat poll to make sure the oracle database is ready. # All internal database creation and schema setup scripts are handled # by the container and docker-compose steps. diff --git a/docs/CONTRIBUTING.md b/docs/CONTRIBUTING.md index f1a6611d33..f4e868956a 100644 --- a/docs/CONTRIBUTING.md +++ b/docs/CONTRIBUTING.md @@ -167,18 +167,18 @@ Finally, you can use `sbt` locally. ### All In One ### -To restart the database services, rebuild them, and start with locally explosed ports run: +To restart the database services, rebuild them, and start with locally exposed ports run: docker-compose down && docker-compose build && docker-compose run --rm --service-ports setup Note: Make sure you have exposed all the ports as mentioned above. -## Debugging using Intellij +## Debugging using IntelliJ -[Intellij](https://www.jetbrains.com/idea/) has a comprehensive debugger that also works with macros which is very -helpful when working on Quill. There are two ways to debug Quill macros using Intellij. The first way is to launch SBT in -debug mode and use Intellij to remote debug it. The second way is to launch a debug session -from Intellij from the "Run/Debug Configurations" menu. +[IntelliJ](https://www.jetbrains.com/idea/) has a comprehensive debugger that also works with macros which is very +helpful when working on Quill. There are two ways to debug Quill macros using IntelliJ. The first way is to launch SBT in +debug mode and use IntelliJ to remote debug it. The second way is to launch a debug session +from IntelliJ from the "Run/Debug Configurations" menu. ### Debug Macros by Remote Debugging SBT @@ -190,10 +190,10 @@ After this you need to launch sbt with `sbt -jvm-debug 5005`. Note that since th recommended to launch sbt with additional memory, i.e. `sbt -jvm-debug 5005 -mem 4096` otherwise sbt may complain about having memory issues. -Then in Intellij you need to +Then in IntelliJ you need to [add a remote configuration](https://www.jetbrains.com/help/idea/run-debug-configuration-remote-debug.html). The default parameters will work fine (note that we started sbt with the debug port `5005` which is also the default debug port -in Intellij). After you have added the configuration you should be able to start it to start debugging! Feel to free +in IntelliJ). After you have added the configuration you should be able to start it to start debugging! Feel to free to add breakpoints to step through the code. Note that its possible to debug macros (you can even @@ -204,12 +204,12 @@ invocations are cached on a file basis. You can easily do this just by adding ne ### Debug Macros by Launching a Session Firstly, you will need to build Quill with some additional dependencies that include the file `scala.tools.nsc.Main`. -You can do this adding the argument `-DdebugMacro=true` to the sbt launcher. You can do this in the Intellij SBT +You can do this adding the argument `-DdebugMacro=true` to the sbt launcher. You can do this in the IntelliJ SBT menu: -![Intellij-SBT-Settings.png](etc/Intellij-SBT-Settings.png) +![IntelliJ-SBT-Settings.png](etc/IntelliJ-SBT-Settings.png) -In Intellij, go to `Run -> Edit Configurations...` click on the Plus (i.e. `+`) button (or `Add New Configuration`) +In IntelliJ, go to `Run -> Edit Configurations...` click on the Plus (i.e. `+`) button (or `Add New Configuration`) and select `Application`. Then enter the following settings: ``` @@ -222,12 +222,12 @@ Build, no error check (make sure to set this since you will frequently want to d ``` It should look like this: -![Intellij-Run-Debug-Config.png](etc/Intellij-Run-Debug-Config.png) +![IntelliJ-Run-Debug-Config.png](etc/IntelliJ-Run-Debug-Config.png) > NOTE In this example, our entry-point into Quill-macro-debugging is `MySqlTest.scala`. -> In our Intellij application configuration this file name is being explicitly specified.
+> In our IntelliJ application configuration this file name is being explicitly specified.
> If you wish to easily be able to macro-debug multiple entry-point files, an alternative method would be to -> use some Intellij variables to automatically pass whatever file is currently selected. You can do this by using +> use some IntelliJ variables to automatically pass whatever file is currently selected. You can do this by using > the configuration: > ``` > -cp $FileFQPackage$$FileName$ $FilePath$ @@ -254,17 +254,17 @@ object MySqlTest { ``` Set a breakpoint anywhere in the Quill codebase and run this configuration from the top-right menu shortcut: -![Intellij-Debug-App-Launcher](etc/Intellij-Debug-App-Launcher.png) +![IntelliJ-Debug-App-Launcher](etc/IntelliJ-Debug-App-Launcher.png) ## Additional Debug Arguments Some additional arguments you can add to your compiler's VM args provide insight into Quill's compilation: ``` --DdebugMacro=true // Enables libraries needed to debug via an Intellij Application session (default=false) +-DdebugMacro=true // Enables libraries needed to debug via an IntelliJ Application session (default=false) -DexcludeTests=false // Excludes testing code from being build. Useful during development times that require rapid iteration -Dquill.macro.log.pretty=true // Pretty print the SQL Queries that Quill produces (default=false) --Dquill.macro.log=true // Enable/Disable priting of the SQL Queries Quill generates during compile-time (default=true) +-Dquill.macro.log=true // Enable/Disable printing of the SQL Queries Quill generates during compile-time (default=true) -Dquill.trace.enabled=true // Global switch that Enables/Disables printing of Quill ASTs during compilation (default=false) -Dquill.trace.color=true // Print Quill ASTs in color (default=false) -Dquill.trace.opinion=false // Print the parts of Quill ASTs not directly used in the main transformation phases (called Opinions). (default=false) @@ -272,8 +272,8 @@ Some additional arguments you can add to your compiler's VM args provide insight -Dquill.trace.types=sql,standard,alias,norm // What parts of the Quill transformations to print during compilation? ``` -In Intellij, add them in the SBT settings if your are compiling using SBT: -![Intellj-SBT-Settings-Additional.png](etc/Intellj-SBT-Settings-Additional.png) +In IntelliJ, add them in the SBT settings if your are compiling using SBT: +![IntelliJ-SBT-Settings-Additional.png](etc/IntelliJ-SBT-Settings-Additional.png) ## 'Trick' Debugging via the Dynamic Query API diff --git a/docs/additional-resources.md b/docs/additional-resources.md index c2d2bddb65..198294726a 100644 --- a/docs/additional-resources.md +++ b/docs/additional-resources.md @@ -38,13 +38,13 @@ Please refer to [CASSANDRA.md](https://github.com/getquill/quill/blob/master/CAS ### Blog posts -- **[Intro]** Haoyi's Programming Blog - [Working with Databases using Scala and Quill](http://www.lihaoyi.com/post/WorkingwithDatabasesusingScalaandQuill.html) +- **[Intro]** Haoyi's Programming Blog - [Working with Databases using Scala and Quill](https://www.lihaoyi.com/post/WorkingwithDatabasesusingScalaandQuill.html) - Juliano Alves's Blog - [Streaming all the way with ZIO, Doobie, Quill, http4s and fs2](https://juliano-alves.com/2020/06/15/streaming-all-the-way-zio-doobie-quill-http4s-fs2/) - Juliano Alves's Blog - [Quill: Translating Boolean Literals](https://juliano-alves.com/2020/09/14/quill-translating-boolean-literals/) - Juliano Alves's Blog - [Quill NDBC Postgres: A New Async Module](https://juliano-alves.com/2019/11/29/quill-ndbc-postgres-a-new-async-module/) - Juliano Alves's Blog - [Contributing to Quill, a Pairing Session](https://juliano-alves.com/2019/11/18/contributing-to-quill-a-pairing-session/) - Medium @ Fwbrasil - [quill-spark: A type-safe Scala API for Spark SQL](https://medium.com/@fwbrasil/quill-spark-a-type-safe-scala-api-for-spark-sql-2672e8582b0d) -- Scalac.io blog - [Compile-time Queries with Quill](http://blog.scalac.io/2016/07/21/compile-time-queries-with-quill.html) +- Scalac.io blog - [Compile-time Queries with Quill](https://web.archive.org/web/20170512003505/https://blog.scalac.io/2016/07/21/compile-time-queries-with-quill.html) ## Code of Conduct diff --git a/docs/changelog.md b/docs/changelog.md index 2f8f820b4b..94b4a4dd51 100644 --- a/docs/changelog.md +++ b/docs/changelog.md @@ -279,7 +279,7 @@ The type `Runner` needs to be used by ProtoQuill to define quill-context-specifi #### Migration Notes: All ZIO JDBC context `run` methods have now switched from have switched their dependency (i.e. `R`) from `Has[Connection]` to -`Has[DataSource]`. This should clear up many innocent errors that have happened because how this `Has[Connecction]` is supposed +`Has[DataSource]`. This should clear up many innocent errors that have happened because how this `Has[Connection]` is supposed to be provided was unclear. As I have come to understand, nearly all DAO service patterns involve grabbing a connection from a pooled DataSource, doing one single crud operation, and then returning the connection back to the pool. The new JDBC ZIO context memorialize this pattern. @@ -384,7 +384,7 @@ to use them as well. # 3.9.0 -- [Pass Session to all Encoders/Decoders allowing UDT Encoding without local session varaible in contexts e.g. ZIO and others](https://github.com/getquill/quill/pull/2219) +- [Pass Session to all Encoders/Decoders allowing UDT Encoding without local session variable in contexts e.g. ZIO and others](https://github.com/getquill/quill/pull/2219) - [Fixing on-conflict case with querySchema/schemaMeta renamed columns](https://github.com/getquill/quill/pull/2218) #### Migration Notes: @@ -542,7 +542,7 @@ Similarly for quill-cassandra-zio # 3.6.1 - [Memoize Passed-By-Name Quats of Asts Ident, Entity, and Others](https://github.com/getquill/quill/pull/2084) -- [Minior Quat Fixes and More Tests](https://github.com/getquill/quill/pull/2057) +- [Minor Quat Fixes and More Tests](https://github.com/getquill/quill/pull/2057) Migration Notes: @@ -575,7 +575,7 @@ Migration Notes: // SELECT ... FROM Person p WHERE isJoe(p.name) // Becomes> SELECT ... FROM Person p WHERE 1 = isJoe(p.name) ``` - This is because the aforementioned databases not not directly support boolean literals (i.e. true/false) or expressions + This is because the aforementioned databases do not directly support boolean literals (i.e. true/false) or expressions that yield them. In some cases however, it is desirable for the above behavior not to happen and for the whole infix statement to be treated @@ -615,7 +615,7 @@ Migration Notes: // SELECT ... FROM Person p WHERE isJoe(p.name) // Becomes> SELECT ... FROM Person p WHERE 1 = isJoe(p.name) ``` - This is because the aforementioned databases not not directly support boolean literals (i.e. true/false) or expressions + This is because the aforementioned databases do not directly support boolean literals (i.e. true/false) or expressions that yield them. In some cases however, it is desirable for the above behavior not to happen and for the whole infix statement to be treated @@ -631,7 +631,7 @@ Migration Notes: // We Need This> SELECT ... FROM Person p WHERE p.age > 21 ``` If the condition represents a pure function, be sure to use `sql"...".pure.asCondition`. - - This realease is not binary compatible with any Quill version before 3.5.3. + - This release is not binary compatible with any Quill version before 3.5.3. - Any code generated by the Quill Code Generator with `quote { ... }` blocks will have to be regenerated with this Quill version if generated before 3.5.3. - In most SQL dialects (i.e. everything except Postgres) boolean literals and expressions yielding them are @@ -671,7 +671,7 @@ Migration Notes: Migration Notes: - - This realease is not binary compatible with any Quill version before 3.5.3. + - This release is not binary compatible with any Quill version before 3.5.3. - Any code generated by the Quill Code Generator with `quote { ... }` blocks will have to be regenerated with this Quill version if generated before 3.5.3. - In most SQL dialects (i.e. everything except Postgres) boolean literals and expressions yielding them are @@ -757,7 +757,7 @@ Migration Notes: - Much of the content in `QueryDsl` has been moved to the top-level for better portability with the upcoming Dotty implementation. This means that things like `Query` are no longer part of `Context` but now are directly in the `io.getquill` package. If you are importing `io.getquill._` your code should be unaffected. - - Custom decoders written for Finalge Postgres no longer require a `ClassTag`. + - Custom decoders written for Finagle Postgres no longer require a `ClassTag`. # 3.5.1 @@ -794,7 +794,7 @@ implementation. This means that things like `Query` are no longer part of `Conte Documentation Updates: - [Update database drivers versions to latest in docs](https://github.com/getquill/quill/pull/1617) -- [Document remote debugging in Intellij](https://github.com/getquill/quill/pull/1615) +- [Document remote debugging in IntelliJ](https://github.com/getquill/quill/pull/1615) Migration Notes: - Monix 3.0.0 is not binary compatible with 3.0.0-RC3 which was a dependency of Quill 3.4.7. @@ -885,7 +885,7 @@ idiomatic way i.e. `None == None := false`. See the 'equals' section of the docu # 3.2.0 - [Allow == for Option[T] and/or T columns](https://github.com/getquill/quill/pull/1437) -- [Introducing Code Genereator](https://github.com/getquill/quill/pull/1396) +- [Introducing Code Generator](https://github.com/getquill/quill/pull/1396) - [Fix variable shadowing issue in action metas](https://github.com/getquill/quill/pull/1412) - [Change effect to protected](https://github.com/getquill/quill/pull/1413) - [Update spark-sql to 2.4.1](https://github.com/getquill/quill/pull/1398) @@ -924,7 +924,7 @@ idiomatic way i.e. `None == None := false`. See the 'equals' section of the docu - [#1204 add explicit `AS` for aliases (except table context)](https://github.com/getquill/quill/pull/1252) - [sqlite dialect - translate boolean literals into 1/0](https://github.com/getquill/quill/pull/1248) - [sqlite dialect - ignore null ordering](https://github.com/getquill/quill/pull/1247) -- [fail is property is not a case acessor](https://github.com/getquill/quill/pull/1246) +- [fail is property is not a case accessor](https://github.com/getquill/quill/pull/1246) - [verify table references](https://github.com/getquill/quill/pull/1244) - [fix property renaming for nested queries within infixes](https://github.com/getquill/quill/pull/1243) - [expand map.distinct](https://github.com/getquill/quill/pull/1242) @@ -1157,7 +1157,7 @@ historical moments (`java.sql.Timestamp` extents `java.util.Date`). # 1.0.1 - [include SQL type info in Encoder/Decoder](https://github.com/getquill/quill/pull/588) -- [make encoder helpers and wrapper type public for quill-finangle-postgres](https://github.com/getquill/quill/pull/608) +- [make encoder helpers and wrapper type public for quill-finagle-postgres](https://github.com/getquill/quill/pull/608) - [fix property renaming normalization order](https://github.com/getquill/quill/pull/609) - [workaround compiler bug involving reflective calls](https://github.com/getquill/quill/pull/612) - [fix flat joins support](https://github.com/getquill/quill/pull/613) @@ -1223,7 +1223,7 @@ historical moments (`java.sql.Timestamp` extents `java.util.Date`). ### Migration notes * The fallback mechanism that looks for implicit encoders defined in the context instance has been removed. This means that if you don't `import context._`, you have to change the specific imports to include the encoders in use. -* `context.run` now receives only one parameter. The second parameter that used to receive runtime values now doesn't exist any more. Use [`lift` or `liftQuery`](https://github.com/getquill/quill/#bindings) instead. +* `context.run` now receives only one parameter. The second parameter that used to receive runtime values now doesn't exist anymore. Use [`lift` or `liftQuery`](https://github.com/getquill/quill/#bindings) instead. * Use [`liftQuery` + `foreach`](https://github.com/getquill/quill/#bindings) to perform batch actions and define contains/in queries. * `insert` now always receives a parameter, that [can be a case class](https://github.com/getquill/quill/#actions). - Non-lifted collections aren't supported anymore. Example: `query[Person].filter(t => List(10, 20).contains(p.age))`. Use `liftQuery` instead. @@ -1249,7 +1249,7 @@ historical moments (`java.sql.Timestamp` extents `java.util.Date`). ### Migration notes -This version [introduces `Context`](https://github.com/getquill/quill/pull/417) as a relacement for `Source`. This change makes the quotation creation dependent on the context to open the path for a few refactorings and improvements we're planning to work on before the `1.0-RC1` release. +This version [introduces `Context`](https://github.com/getquill/quill/pull/417) as a replacement for `Source`. This change makes the quotation creation dependent on the context to open the path for a few refactorings and improvements we're planning to work on before the `1.0-RC1` release. Migration steps: @@ -1282,7 +1282,7 @@ Migration steps: * [support contains for Traversable](https://github.com/getquill/quill/pull/290) * [`equals` support](https://github.com/getquill/quill/pull/328) * [Always return List for any type of query](https://github.com/getquill/quill/pull/324) -* [quilll-sql: support value queries](https://github.com/getquill/quill/pull/354) +* [quill-sql: support value queries](https://github.com/getquill/quill/pull/354) * [quill-sql: `in`/`contains` - support empty sets](https://github.com/getquill/quill/pull/329) * [Support `Ord` quotation](https://github.com/getquill/quill/pull/301) * [`blockParser` off-by-one error](https://github.com/getquill/quill/pull/292) diff --git a/docs/code-generation.md b/docs/code-generation.md index d3450bd9b0..f3000d3918 100644 --- a/docs/code-generation.md +++ b/docs/code-generation.md @@ -125,7 +125,7 @@ gen.writeFiles("src/main/scala/com/my/project") You can parse column and table names using either the `SnakeCaseNames` or the and the `LiteralNames` parser which are used with the respective Quill Naming Strategies. They cannot be customized further with this code generator. -The following case case classes will be generated +The following case classes will be generated ````scala // src/main/scala/com/my/project/public/Person.scala package com.my.project.public @@ -148,7 +148,7 @@ in order to generate your schemas with `querySchema` objects. The `ComposeableTraitsJdbcCodegen` enables more customized code generation. It allows you to determine the tables to generate entity classes for, -their naming stragety, the types for columns in Scala, +their naming strategy, the types for columns in Scala, and generates the necessary `querySchema` object in order to map the fields. Additionally, it generates a database-independent query schema trait which can be composed with a `Context` object of your choice. @@ -310,7 +310,7 @@ Here is an example of how that is done: val gen = new ComposeableTraitsJdbcCodegen(twoSchemaConfig, "com.my.project") { override def namingStrategy: EntityNamingStrategy = CustomStrategy() override val namespacer: Namespacer = - ts => if (ts.tableSchem.toLowerCase == "alpha" || ts.tableSchem.toLowerCase == "bravo") "common" else ts.tableSchem.toLowerCase + ts => if (ts.tableSchema.toLowerCase == "alpha" || ts.tableSchema.toLowerCase == "bravo") "common" else ts.tableSchema.toLowerCase // Be sure to set the querySchemaNaming correctly so that the different // querySchemas generated won't all be called '.query' in the common object (which would diff --git a/docs/contexts.md b/docs/contexts.md index e437d33ea0..f79a9d5fcd 100644 --- a/docs/contexts.md +++ b/docs/contexts.md @@ -279,7 +279,7 @@ The transformations are applied from left to right. ### Configuration -The string passed to the context is used as the key in order to obtain configurations using the [typesafe config](http://github.com/typesafehub/config) library. +The string passed to the context is used as the key in order to obtain configurations using the [typesafe config](https://github.com/typesafehub/config) library. Additionally, the contexts provide multiple constructors. For instance, with `JdbcContext` it's possible to specify a `DataSource` directly, without using the configuration: @@ -1777,7 +1777,7 @@ libraryDependencies ++= Seq( lazy val ctx = new OrientDBSyncContext(SnakeCase, "ctx") ``` -The configurations are set using [`OPartitionedDatabasePool`](http://orientdb.com/javadoc/latest/com/orientechnologies/orient/core/db/OPartitionedDatabasePool.html) which creates a pool of DB connections from which an instance of connection can be acquired. It is possible to set DB credentials using the parameter called `username` and `password`. +The configurations are set using [`OPartitionedDatabasePool`](https://orientdb.com/javadoc/latest/com/orientechnologies/orient/core/db/OPartitionedDatabasePool.html) which creates a pool of DB connections from which an instance of connection can be acquired. It is possible to set DB credentials using the parameter called `username` and `password`. #### application.properties ``` diff --git a/docs/etc/Intellij-Debug-App-Launcher.png b/docs/etc/IntelliJ-Debug-App-Launcher.png similarity index 100% rename from docs/etc/Intellij-Debug-App-Launcher.png rename to docs/etc/IntelliJ-Debug-App-Launcher.png diff --git a/docs/etc/Intellij-Run-Debug-Config.png b/docs/etc/IntelliJ-Run-Debug-Config.png similarity index 100% rename from docs/etc/Intellij-Run-Debug-Config.png rename to docs/etc/IntelliJ-Run-Debug-Config.png diff --git a/docs/etc/Intellj-SBT-Settings-Additional.png b/docs/etc/IntelliJ-SBT-Settings-Additional.png similarity index 100% rename from docs/etc/Intellj-SBT-Settings-Additional.png rename to docs/etc/IntelliJ-SBT-Settings-Additional.png diff --git a/docs/etc/Intellij-SBT-Settings.png b/docs/etc/IntelliJ-SBT-Settings.png similarity index 100% rename from docs/etc/Intellij-SBT-Settings.png rename to docs/etc/IntelliJ-SBT-Settings.png diff --git a/docs/extending-quill.md b/docs/extending-quill.md index 9a49fd2752..ca4cd5612b 100644 --- a/docs/extending-quill.md +++ b/docs/extending-quill.md @@ -57,7 +57,7 @@ run(q) // ) AS e WHERE e.value <= 100 ``` -If you are sure that the the content of your infix is a pure function, you canse use the `pure` method +If you are sure that the content of your infix is a pure function, you can use the `pure` method in order to indicate to Quill that the infix clause can be copied in the query. This gives Quill much more leeway to flatten your query, possibly improving performance. @@ -80,7 +80,7 @@ run(q) #### Summary Use `sql"...".asCondition` to express an infix that represents a conditional expression. -#### Explination +#### Explanation When synthesizing queries for databases which do not have proper boolean-type support (e.g. SQL Server, Oracle etc...) boolean infix clauses inside projections must become values. diff --git a/docs/getting-started.md b/docs/getting-started.md index 2168eea4da..9de5053589 100644 --- a/docs/getting-started.md +++ b/docs/getting-started.md @@ -7,7 +7,7 @@ sidebar_label: "Getting Started" > ### [Scastie](https://scastie.scala-lang.org/) is a great tool to try out Quill without having to prepare a local environment. It works with [mirror contexts](contexts.md#mirror-context), see [this](https://scastie.scala-lang.org/QwOewNEiR3mFlKIM7v900A) snippet as an example. Quill has integrations with many libraries. If you are using a regular RDBMS e.g. PostgreSQL -and want to use Quill to query it with an asychronous, non-blocking, reactive application, the easiest way to get +and want to use Quill to query it with an asynchronous, non-blocking, reactive application, the easiest way to get started is by using an awesome library called ZIO. A simple ZIO + Quill application looks like this: @@ -48,7 +48,7 @@ libraryDependencies ++= Seq( ``` You can find this code (with some more examples) complete with a docker-provided Postgres database [here](https://github.com/deusaquilus/zio-quill-gettingstarted). -A veriety of other examples using Quill with ZIO are available in the [examples](https://github.com/zio/zio-quill/tree/master/quill-jdbc-zio/src/test/scala/io/getquill/examples) folder. +A variety of other examples using Quill with ZIO are available in the [examples](https://github.com/zio/zio-quill/tree/master/quill-jdbc-zio/src/test/scala/io/getquill/examples) folder. ## Choosing a Module diff --git a/docs/index.md b/docs/index.md index c45807b0bf..be572c0479 100644 --- a/docs/index.md +++ b/docs/index.md @@ -4,7 +4,7 @@ title: "Introduction to ZIO Quill" sidebar_label: "ZIO Quill" --- -Quill provides a Quoted Domain Specific Language ([QDSL](http://homepages.inf.ed.ac.uk/wadler/papers/qdsl/qdsl.pdf)) to express queries in Scala and execute them in a target language. +Quill provides a Quoted Domain Specific Language ([QDSL](https://homepages.inf.ed.ac.uk/wadler/papers/qdsl/qdsl.pdf)) to express queries in Scala and execute them in a target language. @PROJECT_BADGES@ diff --git a/docs/logging.md b/docs/logging.md index 09614bcec5..3bdd612119 100644 --- a/docs/logging.md +++ b/docs/logging.md @@ -9,9 +9,9 @@ To write compile-time queries to a log, use the `-Dquill.log.file=queries.sql` a the file to be written (e.g. `queries.sql`). The path is based on the build root (i.e. the current-working-directory of the Java build). When using SBT, this parameter can be set either in your SBT_OPTS, the project-specific .sbtopts file or directly passed to the SBT command. -In Intellij this can be set under settings -> sbt -> VM Parameters. +In IntelliJ this can be set under settings -> sbt -> VM Parameters. -(Also make sure that `use for: "Builds"` is selected otherwise Intellij will not use SBT for the build in the first place.) +(Also make sure that `use for: "Builds"` is selected otherwise IntelliJ will not use SBT for the build in the first place.) ![Screenshot from 2022-04-14 23-28-47](https://user-images.githubusercontent.com/1369480/163513653-b5266cd6-1bff-4792-b0d2-936d24b7e0f1.png) diff --git a/docs/quill-vs-cassandra.md b/docs/quill-vs-cassandra.md index 0d017b6708..369acc19c1 100644 --- a/docs/quill-vs-cassandra.md +++ b/docs/quill-vs-cassandra.md @@ -3,7 +3,7 @@ id: quill-vs-cassandra title: "Quill vs. Cassandra" --- -This document compares Quill to the [Datastax Java](https://github.com/datastax/java-driver) driver and the [Phantom](http://websudos.github.io/phantom/) library. This is an incomplete comparison, additions and corrections are welcome. +This document compares Quill to the [Datastax Java](https://github.com/datastax/java-driver) driver and the [Phantom](https://web.archive.org/web/20141229171448/http://websudos.github.io:80/phantom) library. This is an incomplete comparison, additions and corrections are welcome. All examples have been properly tested, and they should work out of the box. diff --git a/docs/quill-vs-slick.md b/docs/quill-vs-slick.md index cd80faa42a..78a32ffdf1 100644 --- a/docs/quill-vs-slick.md +++ b/docs/quill-vs-slick.md @@ -3,19 +3,19 @@ id: quill-vs-slick title: "Quill vs. Slick" --- -This document compares Quill to the [Typesafe Slick](http://slick.typesafe.com) library. This is an incomplete comparison, additions and corrections are welcome. +This document compares Quill to the [Typesafe Slick](https://scala-slick.org) library. This is an incomplete comparison, additions and corrections are welcome. ## Abstraction level ## Quill and Slick have similar abstraction levels. They represent database rows as flat immutable structures (case classes without nested data) and provide a type-safe composable query DSL. -Slick's documentation refers to this abstraction level as a [new paradigm called functional-relational mapping (FRM)](https://github.com/slick/slick/blob/3b3bd36c93c6d9c63b0471ff4d8409f913954b2b/slick/src/sphinx/introduction.rst#functional-relational-mapping). In fact, the approach is not new and was introduced in the late '90s by ["Kleisli􏰂, a Functional Query System"](https://www.comp.nus.edu.sg/~wongls/psZ/wls-jfp98-3.ps). It was also used by the [Links programming language](http://groups.inf.ed.ac.uk/links/papers/links-fmco06.pdf), and later on was popularized by [Microsoft LINQ](https://msdn.microsoft.com/en-us/library/bb425822.aspx) in a less functional manner. +Slick's documentation refers to this abstraction level as a [new paradigm called functional-relational mapping (FRM)](https://github.com/slick/slick/blob/3b3bd36c93c6d9c63b0471ff4d8409f913954b2b/slick/src/sphinx/introduction.rst#functional-relational-mapping). In fact, the approach is not new and was introduced in the late '90s by ["Kleisli􏰂, a Functional Query System"](https://www.comp.nus.edu.sg/~wongls/psZ/wls-jfp98-3.ps). It was also used by the [Links programming language](https://web.archive.org/web/20120127183323/https://groups.inf.ed.ac.uk/links/papers/links-fmco06.pdf), and later on was popularized by [Microsoft LINQ](https://msdn.microsoft.com/en-us/library/bb425822.aspx) in a less functional manner. Quill is referred as a Language Integrated Query library to match the available publications on the subject. The paper ["Language-integrated query using comprehension syntax: state of the art, open problems, and work in progress"](http://research.microsoft.com/en-us/events/dcp2014/cheney.pdf) has an overview with some of the available implementations of language integrated queries. ## QDSL versus EDSL ## -Quill's DSL is a macro-based quotation mechanism, allowing usage of Scala types and operators directly. Please refer to the paper ["Everything old is new again: Quoted Domain Specific Languages"](http://homepages.inf.ed.ac.uk/wadler/papers/qdsl/qdsl.pdf) for more details. On the other hand, Slick provides a DSL that requires lifting of types and operations to the DSL counterparts at runtime. Example: +Quill's DSL is a macro-based quotation mechanism, allowing usage of Scala types and operators directly. Please refer to the paper ["Everything old is new again: Quoted Domain Specific Languages"](https://homepages.inf.ed.ac.uk/wadler/papers/qdsl/qdsl.pdf) for more details. On the other hand, Slick provides a DSL that requires lifting of types and operations to the DSL counterparts at runtime. Example: **quill** ```scala @@ -88,7 +88,7 @@ val q = } ``` -Slick requires explicit type definition to map the database model to lifted values, which can be automatically generated and maintained by the [`slick-codegen`](http://slick.typesafe.com/doc/3.1.0/code-generation.html) tool. The query definition also requires special equality operators and usage of `Rep` for composable queries. +Slick requires explicit type definition to map the database model to lifted values, which can be automatically generated and maintained by the [`slick-codegen`](https://scala-slick.org/doc/3.1.0/code-generation.html) tool. The query definition also requires special equality operators and usage of `Rep` for composable queries. ## Compile-time versus Runtime ## @@ -106,7 +106,7 @@ It is common to have to write plain SQL statements when a feature is not support ## Normalization ## -Quill's normalization engine is based on the rules introduced by the paper ["A practical theory of language-integrated query"](http://www.infoq.com/presentations/theory-language-integrated-query). They ensure that, given some fulfilled requirements, the normalization will always succeed. Quill verifies these requirements at compile-time. +Quill's normalization engine is based on the rules introduced by the paper ["A practical theory of language-integrated query"](https://www.infoq.com/presentations/theory-language-integrated-query). They ensure that, given some fulfilled requirements, the normalization will always succeed. Quill verifies these requirements at compile-time. Unfortunately, the paper doesn't cover all SQL features supported by Quill. Some additional transformations were added to the normalization engine for this reason. diff --git a/docs/writing-queries.md b/docs/writing-queries.md index aff21c33fb..7a3b9e13e6 100644 --- a/docs/writing-queries.md +++ b/docs/writing-queries.md @@ -1478,7 +1478,7 @@ This feature is disabled by default. To enable it, mix the `QueryProbing` trait object myContext extends YourContextType with QueryProbing ``` -The context must be created in a separate compilation unit in order to be loaded at compile time. Please use [this guide](http://www.scala-sbt.org/0.13/docs/Macro-Projects.html) that explains how to create a separate compilation unit for macros, that also serves to the purpose of defining a query-probing-capable context. `context` could be used instead of `macros` as the name of the separate compilation unit. +The context must be created in a separate compilation unit in order to be loaded at compile time. Please use [this guide](https://www.scala-sbt.org/0.13/docs/Macro-Projects.html) that explains how to create a separate compilation unit for macros, that also serves to the purpose of defining a query-probing-capable context. `context` could be used instead of `macros` as the name of the separate compilation unit. The configurations correspondent to the config key must be available at compile time. You can achieve it by adding this line to your project settings: @@ -1530,7 +1530,7 @@ ctx.run(a) //: List[Long] size = 2. Contains 1 @ positions, where row was insert Just as in regular queries use the extended insert/update syntaxes to achieve finer-grained control of the data being created/modified modified. For example, if the ID is a generated value you can skip ID insertion like this: -(This can also be accomplied with an insert-meta). +(This can also be accomplished with an insert-meta). ```scala // case class Person(id: Int, name: String, age: Int) val a = quote { @@ -1725,7 +1725,7 @@ val a = quote { ## Batch Optimization When doing batch INSERT queries (as well as UPDATE, and DELETE), Quill mostly delegates the functionality to standard JDBC batching. -This functionality works roughtly in the following way. +This functionality works roughly in the following way. ```scala val ps: PreparedStatement = connection.prepareStatement("INSERT ... VALUES ...") // 1. Iterate over the rows @@ -1801,7 +1801,7 @@ The server supports a maximum of 2100 parameters. Reduce the number of parameter This means that in SQL Server, for a batch-size of 100, you can only insert into a table of up to 21 columns. In the future, we hope to alleviate this issue by directly substituting variables into `?` variables before the query is executed -however such functionality could potentially come at the risk of SQL-injection vunerabilities. +however such functionality could potentially come at the risk of SQL-injection vulnerabilities. ## Printing Queries @@ -1971,7 +1971,7 @@ val a: IO[ctx.RunQueryResult[Person], Effect.Write with Effect.Read] = } ``` -This mechanism is useful to limit the kind of operations that can be performed. See this [blog post](http://danielwestheide.com/blog/2015/06/28/put-your-writes-where-your-master-is-compile-time-restriction-of-slick-effect-types.html) as an example. +This mechanism is useful to limit the kind of operations that can be performed. See this [blog post](https://danielwestheide.com/blog/2015/06/28/put-your-writes-where-your-master-is-compile-time-restriction-of-slick-effect-types.html) as an example. ## Implicit query @@ -2362,5 +2362,5 @@ Normalization caching was introduced to improve the situation, which will speedu To disable dynamic normalization caching, pass following property to sbt during compile time ``` -sbt -Dquill.query.cacheDaynamic=false +sbt -Dquill.query.cacheDynamic=false ``` diff --git a/quill-cassandra-alpakka/src/test/scala/io/getquill/context/cassandra/alpakka/CaseClassQueryCassandraSpec.scala b/quill-cassandra-alpakka/src/test/scala/io/getquill/context/cassandra/alpakka/CaseClassQueryCassandraSpec.scala index a9749403c2..31a4be24ef 100644 --- a/quill-cassandra-alpakka/src/test/scala/io/getquill/context/cassandra/alpakka/CaseClassQueryCassandraSpec.scala +++ b/quill-cassandra-alpakka/src/test/scala/io/getquill/context/cassandra/alpakka/CaseClassQueryCassandraSpec.scala @@ -45,7 +45,7 @@ class CaseClassQueryCassandraSpec extends CassandraAlpakkaSpec { query[Contact].filter(p => p.id == filtrationObject.idFilter) } - val `Ex 3 Inline Record Usage exepected result` = List( + val `Ex 3 Inline Record Usage expected result` = List( new Contact(1, "Alex", "Jones", 60, 2, "foo") ) @@ -74,7 +74,7 @@ class CaseClassQueryCassandraSpec extends CassandraAlpakkaSpec { "Example 2 - Inline Record as Filter" in { await { - testDB.run(`Ex 3 Inline Record Usage`).map(res => res mustEqual `Ex 3 Inline Record Usage exepected result`) + testDB.run(`Ex 3 Inline Record Usage`).map(res => res mustEqual `Ex 3 Inline Record Usage expected result`) } } } diff --git a/quill-cassandra-alpakka/src/test/scala/io/getquill/context/cassandra/alpakka/EncodingSpec.scala b/quill-cassandra-alpakka/src/test/scala/io/getquill/context/cassandra/alpakka/EncodingSpec.scala index 54e012375f..19f56415fd 100644 --- a/quill-cassandra-alpakka/src/test/scala/io/getquill/context/cassandra/alpakka/EncodingSpec.scala +++ b/quill-cassandra-alpakka/src/test/scala/io/getquill/context/cassandra/alpakka/EncodingSpec.scala @@ -65,15 +65,15 @@ class EncodingSpec extends EncodingSpecHelper with CassandraAlpakkaSpec { val ctx = testDB import ctx._ - val epoh = System.currentTimeMillis() - val epohDay = epoh / 86400000L - val instant = Instant.ofEpochMilli(epoh) + val epoch = System.currentTimeMillis() + val epochDay = epoch / 86400000L + val instant = Instant.ofEpochMilli(epoch) val zonedDateTime = ZonedDateTime.ofInstant(instant, ZoneId.systemDefault) val jq = quote(querySchema[Java8Types]("EncodingTestEntity")) - val j = Java8Types(LocalDate.ofEpochDay(epohDay), instant, Some(zonedDateTime)) + val j = Java8Types(LocalDate.ofEpochDay(epochDay), instant, Some(zonedDateTime)) val cq = quote(querySchema[CasTypes]("EncodingTestEntity")) - val c = CasTypes(LocalDate.ofEpochDay(epohDay), Instant.ofEpochMilli(epoh), Some(zonedDateTime)) + val c = CasTypes(LocalDate.ofEpochDay(epochDay), Instant.ofEpochMilli(epoch), Some(zonedDateTime)) await { for { diff --git a/quill-cassandra/src/main/scala/io/getquill/context/cassandra/encoding/UdtEncodingMacro.scala b/quill-cassandra/src/main/scala/io/getquill/context/cassandra/encoding/UdtEncodingMacro.scala index abb875f8db..ce005618ee 100644 --- a/quill-cassandra/src/main/scala/io/getquill/context/cassandra/encoding/UdtEncodingMacro.scala +++ b/quill-cassandra/src/main/scala/io/getquill/context/cassandra/encoding/UdtEncodingMacro.scala @@ -137,7 +137,7 @@ class UdtEncodingMacro(val c: MacroContext) { private def encodeUdt[T](udtType: Type) = { // The `session` variable represents CassandraSession which will either be `this` (if it is CassandraClusterSessionContext) - // or it will be `CassanraZioSession` otherwise. Either way, it should have the `udtValueOf` method. + // or it will be `CassandraZioSession` otherwise. Either way, it should have the `udtValueOf` method. // It is passed in via the context.encoder (i.e. $prefix.encoder) variable val trees = ListBuffer[Tree](q"val udt = session.udtValueOf(meta.name, meta.keyspace)") val (typeDefs, params) = udtFields(udtType).map { case (name, field, tpe, mapper, absType, absTypeDef, tag) => diff --git a/quill-cassandra/src/test/scala/io/getquill/context/cassandra/CaseClassQueryCassandraSpec.scala b/quill-cassandra/src/test/scala/io/getquill/context/cassandra/CaseClassQueryCassandraSpec.scala index deb81781e4..a2d4b984e9 100644 --- a/quill-cassandra/src/test/scala/io/getquill/context/cassandra/CaseClassQueryCassandraSpec.scala +++ b/quill-cassandra/src/test/scala/io/getquill/context/cassandra/CaseClassQueryCassandraSpec.scala @@ -47,7 +47,7 @@ class CaseClassQueryCassandraSpec extends Spec { query[Contact].filter(p => p.id == filtrationObject.idFilter) } - val `Ex 3 Inline Record Usage exepected result` = List( + val `Ex 3 Inline Record Usage expected result` = List( new Contact(1, "Alex", "Jones", 60, 2, "foo") ) @@ -63,6 +63,6 @@ class CaseClassQueryCassandraSpec extends Spec { } "Example 2 - Inline Record as Filter" in { - testSyncDB.run(`Ex 3 Inline Record Usage`) mustEqual `Ex 3 Inline Record Usage exepected result` + testSyncDB.run(`Ex 3 Inline Record Usage`) mustEqual `Ex 3 Inline Record Usage expected result` } } diff --git a/quill-cassandra/src/test/scala/io/getquill/context/cassandra/EncodingSpec.scala b/quill-cassandra/src/test/scala/io/getquill/context/cassandra/EncodingSpec.scala index 33aea5a47f..a80db1de43 100644 --- a/quill-cassandra/src/test/scala/io/getquill/context/cassandra/EncodingSpec.scala +++ b/quill-cassandra/src/test/scala/io/getquill/context/cassandra/EncodingSpec.scala @@ -86,15 +86,15 @@ class EncodingSpec extends EncodingSpecHelper { val ctx = testSyncDB import ctx._ - val epoh = System.currentTimeMillis() - val epohDay = epoh / 86400000L - val instant = Instant.ofEpochMilli(epoh) + val epoch = System.currentTimeMillis() + val epochDay = epoch / 86400000L + val instant = Instant.ofEpochMilli(epoch) val zonedDateTime = ZonedDateTime.ofInstant(instant, ZoneId.systemDefault) val jq = quote(querySchema[Java8Types]("EncodingTestEntity")) - val j = Java8Types(LocalDate.ofEpochDay(epohDay), instant, Some(zonedDateTime)) + val j = Java8Types(LocalDate.ofEpochDay(epochDay), instant, Some(zonedDateTime)) val cq = quote(querySchema[CasTypes]("EncodingTestEntity")) - val c = CasTypes(LocalDate.ofEpochDay(epohDay), Instant.ofEpochMilli(epoh), Some(zonedDateTime)) + val c = CasTypes(LocalDate.ofEpochDay(epochDay), Instant.ofEpochMilli(epoch), Some(zonedDateTime)) ctx.run(jq.delete) ctx.run(jq.insertValue(lift(j))) diff --git a/quill-codegen-jdbc/src/main/scala/io/getquill/codegen/jdbc/ComposeableTraitsJdbcCodegen.scala b/quill-codegen-jdbc/src/main/scala/io/getquill/codegen/jdbc/ComposeableTraitsJdbcCodegen.scala index e07f2a5a53..32ea211c37 100644 --- a/quill-codegen-jdbc/src/main/scala/io/getquill/codegen/jdbc/ComposeableTraitsJdbcCodegen.scala +++ b/quill-codegen-jdbc/src/main/scala/io/getquill/codegen/jdbc/ComposeableTraitsJdbcCodegen.scala @@ -43,8 +43,8 @@ import javax.sql.DataSource * Common object. * *
 class MyStereotypingGen(...) extends ComposeableTraitsGen(...) {
- * override def namespacer: Namespacer = ts=> if(ts.tableSchem == "alpha" ||
- * ts.tableSchem == "bravo") "common" else ts.tableSchem
+ * override def namespacer: Namespacer = ts=> if(ts.tableSchema == "alpha" ||
+ * ts.tableSchema == "bravo") "common" else ts.tableSchema
  *
  * override def memberNamer: MemberNamer = ts => ts.tableName.snakeToLowerCamel
  * } 
@@ -105,7 +105,7 @@ import javax.sql.DataSource * // Since PersonDao is inside MyCustomContext.alpha and MyCustomContext.bravo * as opposed to MyCustomContext // there will be no collision. object * MyCustomContext extends SqlMirrorContext[H2Dialect, Literal](H2Dialect, - * Literal) with AlphaExtnsions[H2Dialect, Literal] with + * Literal) with AlphaExtensions[H2Dialect, Literal] with * BravoExtensions[H2Dialect, Literal] */ diff --git a/quill-codegen-jdbc/src/main/scala/io/getquill/codegen/jdbc/gen/DefaultJdbcSchemaReader.scala b/quill-codegen-jdbc/src/main/scala/io/getquill/codegen/jdbc/gen/DefaultJdbcSchemaReader.scala index ac58cfb5a0..800707aa5e 100644 --- a/quill-codegen-jdbc/src/main/scala/io/getquill/codegen/jdbc/gen/DefaultJdbcSchemaReader.scala +++ b/quill-codegen-jdbc/src/main/scala/io/getquill/codegen/jdbc/gen/DefaultJdbcSchemaReader.scala @@ -78,13 +78,13 @@ class DefaultJdbcSchemaReader( override def apply(connectionMaker: JdbcConnectionMaker): Seq[RawSchema[JdbcTableMeta, JdbcColumnMeta]] = { val tableMap = extractTables(connectionMaker) - .map(t => ((t.tableCat, t.tableSchem, t.tableName), t)) + .map(t => ((t.tableCat, t.tableSchema, t.tableName), t)) .toMap val columns = extractColumns(connectionMaker) val tableColumns = columns - .groupBy(c => (c.tableCat, c.tableSchem, c.tableName)) + .groupBy(c => (c.tableCat, c.tableSchema, c.tableName)) .map { case (tup, cols) => tableMap.get(tup).map(RawSchema(_, cols)) } .collect { case Some(tbl) => tbl } diff --git a/quill-codegen-jdbc/src/main/scala/io/getquill/codegen/jdbc/gen/JdbcGenerator.scala b/quill-codegen-jdbc/src/main/scala/io/getquill/codegen/jdbc/gen/JdbcGenerator.scala index d2b5f2594d..0d217de47e 100644 --- a/quill-codegen-jdbc/src/main/scala/io/getquill/codegen/jdbc/gen/JdbcGenerator.scala +++ b/quill-codegen-jdbc/src/main/scala/io/getquill/codegen/jdbc/gen/JdbcGenerator.scala @@ -28,11 +28,11 @@ trait JdbcGenerator extends Generator { this: JdbcCodeGeneratorComponents with J override def filter(tc: RawSchema[JdbcTableMeta, JdbcColumnMeta]): Boolean = databaseType match { case MySql => !tc.table.tableCat.existsInSetNocase(defaultExcludedSchemas.toList: _*) - case _ => !tc.table.tableSchem.existsInSetNocase(defaultExcludedSchemas.toList: _*) + case _ => !tc.table.tableSchema.existsInSetNocase(defaultExcludedSchemas.toList: _*) } override def namespacer: Namespacer[TableMeta] = databaseType match { case MySql | SqlServer => tm => tm.tableCat.map(_.snakeToLowerCamel).getOrElse(defaultNamespace) - case _ => tm => tm.tableSchem.orElse(tm.tableCat).map(_.snakeToLowerCamel).getOrElse(defaultNamespace) + case _ => tm => tm.tableSchema.orElse(tm.tableCat).map(_.snakeToLowerCamel).getOrElse(defaultNamespace) } } diff --git a/quill-codegen-jdbc/src/test/scala/io/getquill/codegen/SimpleCodegenSpec.scala b/quill-codegen-jdbc/src/test/scala/io/getquill/codegen/SimpleCodegenSpec.scala index b0451c8a4b..c1b3c73a42 100644 --- a/quill-codegen-jdbc/src/test/scala/io/getquill/codegen/SimpleCodegenSpec.scala +++ b/quill-codegen-jdbc/src/test/scala/io/getquill/codegen/SimpleCodegenSpec.scala @@ -41,9 +41,9 @@ class SimpleCodegenSpec extends AnyFreeSpec with Matchers { querySchemas.zip(body).foreach { case (schema, methodTree) => { methodTree match { - case q"$mods def $tname(...$paramss): $tpt = $expr" => { + case q"$mods def $tname(...$params): $tpt = $expr" => { assert(tname.toString.unquote == schema.defName, s"Def method ${tname} should be ${schema.defName}") - assert(paramss.length == 0, s"Def method ${tname} should not have any params for $tname") + assert(params.length == 0, s"Def method ${tname} should not have any params for $tname") val quotedExpr = expr match { case q"quote { $qs_args }" => { @@ -116,9 +116,9 @@ class SimpleCodegenSpec extends AnyFreeSpec with Matchers { val tb = runtimeMirror(this.getClass.getClassLoader).mkToolBox() val cc = tb.parse(generatedCode) cc match { - case q"case class $tpname(...$paramss) extends { ..$earlydefns } with ..$parents" => { + case q"case class $tpname(...$params) extends { ..$earlydefns } with ..$parents" => { tpname.toString() should equal(className) - val constructorList = paramss + val constructorList = params if (constructorList.length != 1) fail(s"Class $tpname has more then one constructor list") val paramList: Seq[_] = constructorList.toList(0).toList if (paramList.length != fields.length) diff --git a/quill-codegen-jdbc/src/test/scala/io/getquill/codegen/StructuralTests.scala b/quill-codegen-jdbc/src/test/scala/io/getquill/codegen/StructuralTests.scala index 50b66a0c76..2f7381498c 100644 --- a/quill-codegen-jdbc/src/test/scala/io/getquill/codegen/StructuralTests.scala +++ b/quill-codegen-jdbc/src/test/scala/io/getquill/codegen/StructuralTests.scala @@ -68,7 +68,7 @@ class StructuralTests extends SimpleCodegenSpec with WithStandardCodegen { } } - "custom naming strateogy" - { + "custom naming strategy" - { val personData = fdgConv("id" -> "Int", "firstname" -> "Option[String]", "lastname" -> "Option[String]", "age" -> "Int")( @@ -165,7 +165,7 @@ class StructuralTests extends SimpleCodegenSpec with WithStandardCodegen { "with snake schema" - { - "prefix collision - different columns without datatype perculation" - { + "prefix collision - different columns without datatype percolation" - { val personData = fdgConv("id" -> "Int", "firstName" -> "Option[String]", "lastName" -> "Option[String]", "age" -> "Int")( @@ -203,7 +203,7 @@ class StructuralTests extends SimpleCodegenSpec with WithStandardCodegen { ) } - "prefix collision - different columns with datatype perculation" in { + "prefix collision - different columns with datatype percolation" in { val gens = standardCodegen( `schema_snakecase_twotable_differentcolumns`, entityNamingStrategy = SnakeCaseCustomTable(_.tableName.toLowerCase.replaceFirst("(alpha_)|(bravo_)", "")) @@ -232,7 +232,7 @@ class StructuralTests extends SimpleCodegenSpec with WithStandardCodegen { } } - "prefix collision - different columns with datatype perculation" - { + "prefix collision - different columns with datatype percolation" - { val personData = fdgConv( "id" -> "Int", @@ -276,7 +276,7 @@ class StructuralTests extends SimpleCodegenSpec with WithStandardCodegen { } } - "namespace collision - different columns with datatype perculation" - { + "namespace collision - different columns with datatype percolation" - { val personData = fdgConv( "id" -> "Int", @@ -296,8 +296,8 @@ class StructuralTests extends SimpleCodegenSpec with WithStandardCodegen { entityNamingStrategy = SnakeCaseCustomTable(_.tableName.toLowerCase.replaceFirst("(alpha_)|(bravo_)", "").capitalize), entityNamespacer = - _.tableSchem.map(_.toLowerCase.replaceAll("(alpha)|(bravo)", "public")).getOrElse(this.defaultNamespace), - entityMemberNamer = ts => s"${ts.tableSchem.get}_${ts.tableName}".toLowerCase.snakeToLowerCamel + _.tableSchema.map(_.toLowerCase.replaceAll("(alpha)|(bravo)", "public")).getOrElse(this.defaultNamespace), + entityMemberNamer = ts => s"${ts.tableSchema.get}_${ts.tableName}".toLowerCase.snakeToLowerCamel ).makeGenerators.toList.sortBy(_.caseClassesCode) gens.foreach(gen => LOG.info(gen.code)) diff --git a/quill-codegen-jdbc/src/test/scala/io/getquill/codegen/WithStandardCodegen.scala b/quill-codegen-jdbc/src/test/scala/io/getquill/codegen/WithStandardCodegen.scala index d94b84fa6d..4d48c224c8 100644 --- a/quill-codegen-jdbc/src/test/scala/io/getquill/codegen/WithStandardCodegen.scala +++ b/quill-codegen-jdbc/src/test/scala/io/getquill/codegen/WithStandardCodegen.scala @@ -17,7 +17,7 @@ trait WithStandardCodegen { schemaConfig: SchemaConfig, tableFilter: RawSchema[JdbcTableMeta, JdbcColumnMeta] => Boolean = _ => true, entityNamingStrategy: NameParser = LiteralNames, - entityNamespacer: Namespacer[JdbcTableMeta] = ts => ts.tableSchem.getOrElse(defaultNamespace), + entityNamespacer: Namespacer[JdbcTableMeta] = ts => ts.tableSchema.getOrElse(defaultNamespace), entityMemberNamer: JdbcQuerySchemaNaming = ts => ts.tableName.snakeToLowerCamel ) = new JdbcGeneratorBase(() => { diff --git a/quill-codegen-jdbc/src/test/scala/io/getquill/codegen/integration/CodegenTestCases.scala b/quill-codegen-jdbc/src/test/scala/io/getquill/codegen/integration/CodegenTestCases.scala index ccd62e82b6..41df2b9528 100644 --- a/quill-codegen-jdbc/src/test/scala/io/getquill/codegen/integration/CodegenTestCases.scala +++ b/quill-codegen-jdbc/src/test/scala/io/getquill/codegen/integration/CodegenTestCases.scala @@ -251,7 +251,7 @@ object CodegenTestCases { case H2 => tc.table.tableCat.exists(_.toLowerCase startsWith "codegen_test") case Postgres => - tc.table.tableSchem.existsInSetNocase("public", "alpha", "bravo") + tc.table.tableSchema.existsInSetNocase("public", "alpha", "bravo") case SqlServer => tc.table.tableCat.existsInSetNocase("codegen_test", "alpha", "bravo") } @@ -260,7 +260,7 @@ object CodegenTestCases { dbPrefix match { // SQLite does not support user-defined schemas. It has the ability to use multiple files // but does not show what table belongs to what file in any JDBC call. This makes multi-schema - // stereotyping untenable so the respective tests are not not included. + // stereotyping untenable so the respective tests are not included. case TestSqliteDB => List( `1-simple-snake`, diff --git a/quill-codegen-jdbc/src/test/scala/io/getquill/codegen/integration/DbHelper.scala b/quill-codegen-jdbc/src/test/scala/io/getquill/codegen/integration/DbHelper.scala index 7863b1ff27..61df0a8362 100644 --- a/quill-codegen-jdbc/src/test/scala/io/getquill/codegen/integration/DbHelper.scala +++ b/quill-codegen-jdbc/src/test/scala/io/getquill/codegen/integration/DbHelper.scala @@ -104,7 +104,7 @@ object DbHelper { (select table_catalog as _1, table_schema as _2, table_name as _3, table_type as _4 from bravo.information_schema.tables) """.as[Query[(String, String, String, String)]] ) - tables.map { case (cat, schem, name, tpe) => JdbcTableMeta(Option(cat), Option(schem), name, Option(tpe)) } + tables.map { case (cat, schema, name, tpe) => JdbcTableMeta(Option(cat), Option(schema), name, Option(tpe)) } } case _ => @@ -113,8 +113,8 @@ object DbHelper { val getSchema: JdbcTableMeta => Option[String] = databaseType match { case MySql => tm => tm.tableCat - case SqlServer => tm => tm.tableCat.flatMap(tc => tm.tableSchem.flatMap(ts => Some(s"${tc}.${ts}"))) - case _ => tm => tm.tableSchem + case SqlServer => tm => tm.tableCat.flatMap(tc => tm.tableSchema.flatMap(ts => Some(s"${tc}.${ts}"))) + case _ => tm => tm.tableSchema } val tables = allTables.filter { tm => @@ -122,18 +122,18 @@ object DbHelper { case MySql => tm.tableCat.existsInSetNocase("codegen_test", "alpha", "bravo") case SqlServer => - tm.tableCat.existsInSetNocase("codegen_test", "alpha", "bravo") && tm.tableSchem.exists( + tm.tableCat.existsInSetNocase("codegen_test", "alpha", "bravo") && tm.tableSchema.exists( _.toLowerCase == "dbo" ) case Oracle => - tm.tableSchem.existsInSetNocase("codegen_test", "alpha", "bravo") + tm.tableSchema.existsInSetNocase("codegen_test", "alpha", "bravo") case Sqlite => // SQLite does not have individual schemas at all. true case Postgres => - tm.tableSchem.existsInSetNocase("public", "alpha", "bravo") + tm.tableSchema.existsInSetNocase("public", "alpha", "bravo") case H2 => tm.tableCat.exists(_.toLowerCase == "codegen_test.h2") && - tm.tableSchem.exists(_.toLowerCase != "information_schema") + tm.tableSchema.exists(_.toLowerCase != "information_schema") } } diff --git a/quill-codegen/src/main/scala/io/getquill/codegen/dag/Ancestry.scala b/quill-codegen/src/main/scala/io/getquill/codegen/dag/Ancestry.scala index 119737a073..9ac4a07eef 100644 --- a/quill-codegen/src/main/scala/io/getquill/codegen/dag/Ancestry.scala +++ b/quill-codegen/src/main/scala/io/getquill/codegen/dag/Ancestry.scala @@ -51,7 +51,7 @@ object DefaultNodeCatalog extends NodeCatalog { override def lookup(cls: ClassTag[_]): DagNode = nodeCatalogNodes .find(_.cls == cls) .getOrElse({ - logger.warn(s"Could not find type hiearchy node for: ${cls} Must assume it's a string") + logger.warn(s"Could not find type hierarchy node for: ${cls} Must assume it's a string") StringNode }) } diff --git a/quill-codegen/src/main/scala/io/getquill/codegen/gen/CodeGeneratorComponents.scala b/quill-codegen/src/main/scala/io/getquill/codegen/gen/CodeGeneratorComponents.scala index 9c91786b3d..fbca33fdfc 100644 --- a/quill-codegen/src/main/scala/io/getquill/codegen/gen/CodeGeneratorComponents.scala +++ b/quill-codegen/src/main/scala/io/getquill/codegen/gen/CodeGeneratorComponents.scala @@ -50,7 +50,7 @@ trait CodeGeneratorComponents extends HasBasicMeta with QuerySchemaNaming { *
{@code case class Person(firstName:String, lastName:String, age:Int)
    *
    * object Person { // Taking ts.tableName.snakeToLowerCamel will ensure each
-   * one has a different name. Otherise // all of them will be 'query' which
+   * one has a different name. Otherwise // all of them will be 'query' which
    * will result in a compile error. def alphaPerson =
    * querySchema[Person]("ALPHA.PERSON", ...) def bravoPerson =
    * querySchema[Person]("BRAVO.PERSON", ...) } }
diff --git a/quill-codegen/src/main/scala/io/getquill/codegen/gen/Generator.scala b/quill-codegen/src/main/scala/io/getquill/codegen/gen/Generator.scala index 2ffd934d5d..fb8a1e3294 100644 --- a/quill-codegen/src/main/scala/io/getquill/codegen/gen/Generator.scala +++ b/quill-codegen/src/main/scala/io/getquill/codegen/gen/Generator.scala @@ -23,7 +23,7 @@ trait Generator { /** * Should we prefix object/package produced by this generator? Set this as the - * the value of that. Otherwise set this to be the empty string. + * value of that. Otherwise set this to be the empty string. */ def packagePrefix: String def connectionMakers: Seq[ConnectionMaker] @@ -52,8 +52,8 @@ trait Generator { } def makeGenerators = new MultiGeneratorFactory(generatorMaker).apply - def writeAllFiles(localtion: String): Future[Seq[Path]] = - Future.sequence(writeFiles(localtion)) + def writeAllFiles(location: String): Future[Seq[Path]] = + Future.sequence(writeFiles(location)) def writeFiles(location: String): Seq[Future[Path]] = { // can't put Seq[Gen] into here because doing Seq[Gen] <: SingleUnitCodegen makes it covariant @@ -187,7 +187,7 @@ trait Generator { override def code: String = surroundByObject(body) override def objectName: Option[String] = Some(escape(tableColumns.table.name)) - // TODO Have this come directly from the Generator's context (but make sure to override it in the structural tests so it doesn't distrub them) + // TODO Have this come directly from the Generator's context (but make sure to override it in the structural tests so it doesn't disturb them) def imports = querySchemaImports // generate variables for every schema e.g. @@ -225,7 +225,7 @@ trait Generator { """.stripMargin.trimFront override def tableName: String = schema.tableName - override def schemaName: Option[String] = schema.tableSchem + override def schemaName: Option[String] = schema.tableSchema def QuerySchemaMapping = new QuerySchemaMappingGen(_) class QuerySchemaMappingGen(val column: ColumnFusion[ColumnMeta]) diff --git a/quill-codegen/src/main/scala/io/getquill/codegen/model/PackagingStrategy.scala b/quill-codegen/src/main/scala/io/getquill/codegen/model/PackagingStrategy.scala index a64116c5dd..4a7c3f8a6a 100644 --- a/quill-codegen/src/main/scala/io/getquill/codegen/model/PackagingStrategy.scala +++ b/quill-codegen/src/main/scala/io/getquill/codegen/model/PackagingStrategy.scala @@ -21,7 +21,7 @@ object PackagingStrategy { * Use this strategy when you want a separate source code file (or string) * for every single table. Typically you'll want to use this when table * schemas are very large and you want to minimize the footprint of your - * imports (i.e. since each file is a seperate table you can be sure to just + * imports (i.e. since each file is a separate table you can be sure to just * imports the exact tables needed for every source file). */ def TablePerFile(packagePrefix: String = "") = diff --git a/quill-codegen/src/main/scala/io/getquill/codegen/model/SchemaModel.scala b/quill-codegen/src/main/scala/io/getquill/codegen/model/SchemaModel.scala index cd1934abc4..5ad0910c2c 100644 --- a/quill-codegen/src/main/scala/io/getquill/codegen/model/SchemaModel.scala +++ b/quill-codegen/src/main/scala/io/getquill/codegen/model/SchemaModel.scala @@ -5,7 +5,7 @@ import java.sql.ResultSet case class RawSchema[T, C](table: T, columns: Seq[C]) trait BasicTableMeta { - def tableSchem: Option[String] + def tableSchema: Option[String] def tableName: String } @@ -15,7 +15,7 @@ trait BasicColumnMeta { case class JdbcTableMeta( tableCat: Option[String], - tableSchem: Option[String], + tableSchema: Option[String], tableName: String, tableType: Option[String] ) extends BasicTableMeta @@ -23,7 +23,7 @@ case class JdbcTableMeta( object JdbcTableMeta { def fromResultSet(rs: ResultSet) = JdbcTableMeta( tableCat = Option(rs.getString("TABLE_CAT")), - tableSchem = Option(rs.getString("TABLE_SCHEM")), + tableSchema = Option(rs.getString("TABLE_SCHEM")), tableName = rs.getString("TABLE_NAME"), tableType = Option(rs.getString("TABLE_TYPE")) ) @@ -31,7 +31,7 @@ object JdbcTableMeta { case class JdbcColumnMeta( tableCat: Option[String], - tableSchem: Option[String], + tableSchema: Option[String], tableName: String, columnName: String, dataType: Int, @@ -44,7 +44,7 @@ object JdbcColumnMeta { def fromResultSet(rs: ResultSet) = JdbcColumnMeta( tableCat = Option(rs.getString("TABLE_CAT")), - tableSchem = Option(rs.getString("TABLE_SCHEM")), + tableSchema = Option(rs.getString("TABLE_SCHEM")), tableName = rs.getString("TABLE_NAME"), columnName = rs.getString("COLUMN_NAME"), dataType = rs.getInt("DATA_TYPE"), diff --git a/quill-codegen/src/main/scala/io/getquill/codegen/model/StererotypedModel.scala b/quill-codegen/src/main/scala/io/getquill/codegen/model/StereotypedModel.scala similarity index 100% rename from quill-codegen/src/main/scala/io/getquill/codegen/model/StererotypedModel.scala rename to quill-codegen/src/main/scala/io/getquill/codegen/model/StereotypedModel.scala diff --git a/quill-core/js/src/main/scala/io/getquill/dsl/DynamicQueryDSL.scala b/quill-core/js/src/main/scala/io/getquill/dsl/DynamicQueryDSL.scala index b333584585..966668b1f5 100644 --- a/quill-core/js/src/main/scala/io/getquill/dsl/DynamicQueryDSL.scala +++ b/quill-core/js/src/main/scala/io/getquill/dsl/DynamicQueryDSL.scala @@ -387,7 +387,7 @@ trait DynamicQueryDsl { type DynamicAssignment[U] = ((Quoted[T] => Quoted[U]), U) - private[this] def assignemnts[S]( + private[this] def assignments[S]( l: List[DynamicSet[S, _]] ): List[Assignment] = l.collect { case s: DynamicSetValue[_, _] => @@ -397,7 +397,7 @@ trait DynamicQueryDsl { def insert(l: DynamicSet[T, _]*): DynamicInsert[T] = DynamicInsert( - splice(Insert(DynamicEntityQuery.this.q.ast, assignemnts(l.toList))) + splice(Insert(DynamicEntityQuery.this.q.ast, assignments(l.toList))) ) def updateValue(value: T): DynamicUpdate[T] = macro DynamicQueryDslMacro.updateValue @@ -405,7 +405,7 @@ trait DynamicQueryDsl { def update(sets: DynamicSet[T, _]*): DynamicUpdate[T] = DynamicUpdate( splice[Update[T]]( - Update(DynamicEntityQuery.this.q.ast, assignemnts(sets.toList)) + Update(DynamicEntityQuery.this.q.ast, assignments(sets.toList)) ) ) diff --git a/quill-core/jvm/src/main/scala/io/getquill/dsl/DynamicQueryDSL.scala b/quill-core/jvm/src/main/scala/io/getquill/dsl/DynamicQueryDSL.scala index 4042b3638f..c9ecdcabf9 100644 --- a/quill-core/jvm/src/main/scala/io/getquill/dsl/DynamicQueryDSL.scala +++ b/quill-core/jvm/src/main/scala/io/getquill/dsl/DynamicQueryDSL.scala @@ -410,7 +410,7 @@ trait DynamicQueryDsl { type DynamicAssignment[U] = ((Quoted[T] => Quoted[U]), U) - private[this] def assignemnts[S]( + private[this] def assignments[S]( l: List[DynamicSet[S, _]] ): List[Assignment] = l.collect { case s: DynamicSetValue[_, _] => @@ -420,7 +420,7 @@ trait DynamicQueryDsl { def insert(l: DynamicSet[T, _]*): DynamicInsert[T] = DynamicInsert( - splice(Insert(DynamicEntityQuery.this.q.ast, assignemnts(l.toList))) + splice(Insert(DynamicEntityQuery.this.q.ast, assignments(l.toList))) ) def updateValue(value: T): DynamicUpdate[T] = macro DynamicQueryDslMacro.updateValue @@ -428,7 +428,7 @@ trait DynamicQueryDsl { def update(sets: DynamicSet[T, _]*): DynamicUpdate[T] = DynamicUpdate( splice[Update[T]]( - Update(DynamicEntityQuery.this.q.ast, assignemnts(sets.toList)) + Update(DynamicEntityQuery.this.q.ast, assignments(sets.toList)) ) ) diff --git a/quill-core/src/main/scala/io/getquill/MirrorContext.scala b/quill-core/src/main/scala/io/getquill/MirrorContext.scala index 4c7f458618..291a94ad2f 100644 --- a/quill-core/src/main/scala/io/getquill/MirrorContext.scala +++ b/quill-core/src/main/scala/io/getquill/MirrorContext.scala @@ -21,9 +21,9 @@ case class BatchActionReturningMirrorGeneric[T, PrepareRow, Extractor[_]]( * This is supposed to emulate how Row retrieval works in JDBC Int JDBC, * ResultSet won't ever actually have Option values inside, so the actual * option-decoder needs to understand that fact e.g. - * `Deocder[Option[Int]](java.sql.ResultSet(foo:1, etc)).getInt(1)`* and wrap it + * `Decoder[Option[Int]](java.sql.ResultSet(foo:1, etc)).getInt(1)`* and wrap it * into a Optional value for the equivalent row implementation: - * `Deocder[Option[Int]](Row(foo:1, etc)).apply(1)`. (*note that + * `Decoder[Option[Int]](Row(foo:1, etc)).apply(1)`. (*note that * java.sql.ResultSet actually doesn't have this syntax because it isn't a * product). Similarly, when doing `ResultSet(foo:null /*Expecting an int*/, * etc).getInt(1)` the result will be 0 as opposed to throwing a NPE as would be diff --git a/quill-core/src/main/scala/io/getquill/Quoted.scala b/quill-core/src/main/scala/io/getquill/Quoted.scala index f426e7ed7f..0207f5c5cb 100644 --- a/quill-core/src/main/scala/io/getquill/Quoted.scala +++ b/quill-core/src/main/scala/io/getquill/Quoted.scala @@ -4,9 +4,9 @@ import io.getquill.ast.Ast /** * Defines the primary interface by which information in Quill is composed. This - * includes not only queries but all code fragements. A quotation can be a - * simple value: {{ val pi = quote(3.14159) }} And be used within another - * quotation: {{ case class Circle(radius: Float) + * includes not only queries but all code fragments. A quotation can be a simple + * value: {{ val pi = quote(3.14159) }} And be used within another quotation: {{ + * case class Circle(radius: Float) * * val areas = quote { query[Circle].map(c => pi * c.radius * c.radius) } }} * Quotations can also contain high-order functions and inline values: {{ val diff --git a/quill-core/src/main/scala/io/getquill/context/ActionMacro.scala b/quill-core/src/main/scala/io/getquill/context/ActionMacro.scala index 8996a80014..8085933581 100644 --- a/quill-core/src/main/scala/io/getquill/context/ActionMacro.scala +++ b/quill-core/src/main/scala/io/getquill/context/ActionMacro.scala @@ -127,7 +127,7 @@ class ActionMacro(val c: MacroContext) extends ContextMacro with ReifyLiftings { val idiomContext = $idiomContext /* for liftQuery(people:List[Person]) `batch` is `people` */ /* TODO Need secondary check to see if context is actually capable of batch-values insert */ - /* If there is a INSERT ... VALUES clause this will be cnoded as ValuesClauseToken(lifts) which we need to duplicate */ + /* If there is a INSERT ... VALUES clause this will be encoded as ValuesClauseToken(lifts) which we need to duplicate */ /* batches: List[List[Person]] */ val batches = if ($canDoBatch && $numRows != 1) { @@ -307,7 +307,7 @@ class ActionMacro(val c: MacroContext) extends ContextMacro with ReifyLiftings { super.apply(e) } - // Only extrace lifts that come from values-clauses: + // Only extract lifts that come from values-clauses: // liftQuery(people).foreach(ps => query[Person].filter(_.name == lift("not this")).insertValue(_.name -> , ...)) override def apply(e: Ast): (Ast, StatefulTransformer[List[(ScalarTag, ScalarLift)]]) = e match { diff --git a/quill-core/src/main/scala/io/getquill/context/ContextMacro.scala b/quill-core/src/main/scala/io/getquill/context/ContextMacro.scala index 809193eaa3..a1163a5b00 100644 --- a/quill-core/src/main/scala/io/getquill/context/ContextMacro.scala +++ b/quill-core/src/main/scala/io/getquill/context/ContextMacro.scala @@ -114,7 +114,7 @@ trait ContextMacro extends Quotation { val liftQuat: Liftable[Quat] = liftUnlift.quatLiftable val transpileConfig = summonTranspileConfig() val transpileConfigExpr = ConfigLiftables.transpileConfigLiftable(transpileConfig) - // Compile-time AST might have Dynamic parts, we need those resoved (i.e. at runtime to be able to get the query type) + // Compile-time AST might have Dynamic parts, we need those resolved (i.e. at runtime to be able to get the query type) val queryTypeExpr = q"_root_.io.getquill.IdiomContext.QueryType.discoverFromAst($ast, $batchAlias)" c.info("Dynamic query") val translateMethod = if (io.getquill.util.Messages.cacheDynamicQueries) { diff --git a/quill-core/src/main/scala/io/getquill/context/Expand.scala b/quill-core/src/main/scala/io/getquill/context/Expand.scala index 5eee575d9a..41013154ba 100644 --- a/quill-core/src/main/scala/io/getquill/context/Expand.scala +++ b/quill-core/src/main/scala/io/getquill/context/Expand.scala @@ -9,7 +9,7 @@ import io.getquill.quat.Quat object CanDoBatchedInsert { def apply(ast: Ast, idiom: Idiom, statement: Token, isReturning: Boolean, idiomContext: IdiomContext): Boolean = { // find any actions that could have a VALUES clause. Right now just ast.Insert, - // in the future might be Update and Dlete + // in the future might be Update and Delete val actions = CollectAst.byType[Action](ast) // only one action allowed per-query in general if (actions.length != 1) @@ -141,7 +141,7 @@ case class ExpandWithInjectables[T, C <: Context[_, _]]( naming: NamingStrategy, executionType: ExecutionType, subBatch: List[T], - inejctables: List[(String, T => ScalarLift)] + injectables: List[(String, T => ScalarLift)] ) { val (string, externals) = @@ -151,7 +151,7 @@ case class ExpandWithInjectables[T, C <: Context[_, _]]( statement, forProbing = false, subBatch, - inejctables + injectables ) val liftings = externals.collect { case lift: ScalarLift => diff --git a/quill-core/src/main/scala/io/getquill/context/ProbeStatement.scala b/quill-core/src/main/scala/io/getquill/context/ProbeStatement.scala index 619262259c..0824f31dff 100644 --- a/quill-core/src/main/scala/io/getquill/context/ProbeStatement.scala +++ b/quill-core/src/main/scala/io/getquill/context/ProbeStatement.scala @@ -29,7 +29,7 @@ object ProbeStatement { c.error( s"Can't load the context of type '$tpe' for a compile-time query probing. " + s"Make sure that context creation happens in a separate compilation unit. " + - s"For more information please refer to the documentation http://getquill.io/#quotation-query-probing. " + + s"For more information please refer to the documentation https://getquill.io/#quotation-query-probing. " + s"Reason: '$ex'" ) None diff --git a/quill-core/src/main/scala/io/getquill/dsl/MetaDslMacro.scala b/quill-core/src/main/scala/io/getquill/dsl/MetaDslMacro.scala index 00f6d0cb43..5b8e5463fe 100644 --- a/quill-core/src/main/scala/io/getquill/dsl/MetaDslMacro.scala +++ b/quill-core/src/main/scala/io/getquill/dsl/MetaDslMacro.scala @@ -118,7 +118,7 @@ class MetaDslMacro(val c: MacroContext) extends ValueComputation { // E.g. for Person("Joe", 123) the List(q"!nullChecker(0,row)", q"!nullChecker(1,row)") columns // that eventually turn into List(!NullChecker("Joe"), !NullChecker(123)) columns. // Once you are in a product that has a product inside e.g. Person(name: Name("Joe", "Bloggs"), age: 123) - // they will be the concatonations of the Or-clauses e.g. + // they will be the concatenations of the Or-clauses e.g. // List( (NullChecker("Joe") || NullChecker("Bloggs")), NullChecker(123)) // This is what needs to be the null-checker of the outer entity i.e. // if ((NullChecker("Joe") || NullChecker("Bloggs")) || NullChecker(123)) Some(new Name(...)) else None @@ -146,7 +146,7 @@ class MetaDslMacro(val c: MacroContext) extends ValueComputation { // Some((Person(name:String, age:Int), Address(street:Option[String])) // from the row: // Row(null, null, null) which becomes Option((Person(null,0), Address(None))) - // and say we are are processing the 'Address' part which can't be null. We still want to + // and say we are processing the 'Address' part which can't be null. We still want to // return the internal columns of Address since the outer Option can be None. // Address. allColumnsNotNull diff --git a/quill-core/src/main/scala/io/getquill/quat/QuatMaking.scala b/quill-core/src/main/scala/io/getquill/quat/QuatMaking.scala index 7dd1ff78d1..7b5adcd0bf 100644 --- a/quill-core/src/main/scala/io/getquill/quat/QuatMaking.scala +++ b/quill-core/src/main/scala/io/getquill/quat/QuatMaking.scala @@ -333,7 +333,7 @@ trait QuatMakingBase extends MacroUtilUniverse { Quat.Null // For other types of case classes (and if there does not exist an encoder for it) - // the exception to that is a cassandra UDT that we treat like an encodeable entity even if it has a parsed type + // the exception to that is a cassandra UDT that we treat like an encodable entity even if it has a parsed type case CaseClassBaseType(name, fields) if !existsEncoderFor(tpe) || tpe <:< typeOf[Udt] => Quat.Product( name.split('.').last, diff --git a/quill-core/src/main/scala/io/getquill/quotation/Parsing.scala b/quill-core/src/main/scala/io/getquill/quotation/Parsing.scala index 5197e7a6ce..e5a0823c86 100644 --- a/quill-core/src/main/scala/io/getquill/quotation/Parsing.scala +++ b/quill-core/src/main/scala/io/getquill/quotation/Parsing.scala @@ -26,7 +26,7 @@ trait Parsing extends ValueComputation with QuatMaking with MacroUtilBase { import c.universe.{Ident => _, Constant => _, Function => _, If => _, Block => _, _} // Variables that need to be sanitized out in various places due to internal conflicts with the way - // macros hard handeled in MetaDsl + // macros hard handled in MetaDsl private[getquill] val dangerousVariables: Set[IdentName] = Set(IdentName("v")) case class Parser[T](p: PartialFunction[Tree, T])(implicit ct: ClassTag[T]) { @@ -476,7 +476,7 @@ trait Parsing extends ValueComputation with QuatMaking with MacroUtilBase { private def ident(x: TermName, quat: Quat): Ident = identClean(Ident(x.decodedName.toString, quat)) /** - * In order to guarentee consistent behavior across multiple databases, we + * In order to guarantee consistent behavior across multiple databases, we * have begun to explicitly to null-check nullable columns that are wrapped * inside of `Option[T]` whenever a `Option.map`, `Option.flatMap`, * `Option.forall`, and `Option.exists` are used. However, we would like users @@ -1025,7 +1025,7 @@ trait Parsing extends ValueComputation with QuatMaking with MacroUtilBase { } // (Query[Person]) example - query[Person] or query[Person].filter(p => p.name == "Jack") - // (Action[Person] example - (Query[Perosn]).insert(_.name -> "Joe", _.age -> 123) + // (Action[Person] example - (Query[Person]).insert(_.name -> "Joe", _.age -> 123) val actionParser: Parser[Ast] = Parser[Ast] { // (Query[Person]).update(_.name -> "Joe", _.age > 123) case q"$query.$method(..$assignments)" if (method.decodedName.toString == "update") => @@ -1040,12 +1040,12 @@ trait Parsing extends ValueComputation with QuatMaking with MacroUtilBase { case q"$query.delete" => Delete(astParser(query)) - // Theory: ( (Query[Perosn]).update(....) ).returning[T] + // Theory: ( (Query[Person]).update(....) ).returning[T] // Example: ( query[Person].filter(p => p.name == "Joe").update(....) ).returning[Something] case q"$action.returning[$r]" => c.fail(s"A 'returning' clause must have arguments.") - // ( (Query[Perosn]).insert(_.name -> "Joe", _.age -> 123) ).returning(p => (p.id, p.age)) + // ( (Query[Person]).insert(_.name -> "Joe", _.age -> 123) ).returning(p => (p.id, p.age)) case q"$action.returning[$r](($alias) => $body)" => val ident = identParser(alias) val bodyAst = reprocessReturnClause(ident, astParser(body), action) @@ -1053,7 +1053,7 @@ trait Parsing extends ValueComputation with QuatMaking with MacroUtilBase { idiomReturnCapability.foreach(_.verifyAst(bodyAst)) // Verify that the AST in the returning-body is valid Returning(astParser(action), ident, bodyAst) - // ( (Query[Perosn]).insert(_.name -> "Joe", _.age -> 123) ).returningMany(p => (p.id, p.age)) + // ( (Query[Person]).insert(_.name -> "Joe", _.age -> 123) ).returningMany(p => (p.id, p.age)) case q"$action.returningMany[$r](($alias) => $body)" => val ident = identParser(alias) val bodyAst = reprocessReturnClause(ident, astParser(body), action) @@ -1061,7 +1061,7 @@ trait Parsing extends ValueComputation with QuatMaking with MacroUtilBase { idiomReturnCapability.foreach(_.verifyAst(bodyAst)) // Verify that the AST in the returning-body is valid Returning(astParser(action), ident, bodyAst) - // ( (Query[Perosn]).insert(_.name -> "Joe", _.age -> 123) ).returningGenerated(p => (p.id, p.otherGeneratedProp)) + // ( (Query[Person]).insert(_.name -> "Joe", _.age -> 123) ).returningGenerated(p => (p.id, p.otherGeneratedProp)) case q"$action.returningGenerated[$r](($alias) => $body)" => val ident = identParser(alias) val bodyAst = reprocessReturnClause(ident, astParser(body), action) @@ -1185,7 +1185,7 @@ trait Parsing extends ValueComputation with QuatMaking with MacroUtilBase { /** * Type-check two trees, if one of them has optionals, go into the optionals * to find the root types in each of them. Then compare the types that are - * inside. If they are not compareable, abort the build. Otherwise return type + * inside. If they are not comparable, abort the build. Otherwise return type * of which side (or both) has the optional. In order to do the actual * comparison, the 'weak conformance' operator is used and a subclass is * allowed on either side of the `==`. Weak conformance is necessary so that diff --git a/quill-core/src/main/scala/io/getquill/quotation/Rebind.scala b/quill-core/src/main/scala/io/getquill/quotation/Rebind.scala index 3213d30b75..2ba4d123d5 100644 --- a/quill-core/src/main/scala/io/getquill/quotation/Rebind.scala +++ b/quill-core/src/main/scala/io/getquill/quotation/Rebind.scala @@ -20,7 +20,7 @@ object Rebind { } def toIdent(s: Symbol) = - // Casing there is needed because scala doesn't undestand c.universe.Type =:= infer.c.universe.Type + // Casing there is needed because scala doesn't understand c.universe.Type =:= infer.c.universe.Type // alternatively, we could wrap this entire clause (starting with 'apply') in a class and extend inferQuat Ident( s.name.decodedName.toString, diff --git a/quill-core/src/test/scala/io/getquill/OpsSpec.scala b/quill-core/src/test/scala/io/getquill/OpsSpec.scala index 2ba4490839..69934dc5b2 100644 --- a/quill-core/src/test/scala/io/getquill/OpsSpec.scala +++ b/quill-core/src/test/scala/io/getquill/OpsSpec.scala @@ -63,7 +63,7 @@ class OpsSpec extends Spec { } } - "unquotes duble quotations" in { + "unquotes double quotations" in { val q: Quoted[EntityQuery[TestEntity]] = quote { quote(query[TestEntity]) } diff --git a/quill-core/src/test/scala/io/getquill/context/ActionMacroSpec.scala b/quill-core/src/test/scala/io/getquill/context/ActionMacroSpec.scala index 03fa4665d7..6a75ea3093 100644 --- a/quill-core/src/test/scala/io/getquill/context/ActionMacroSpec.scala +++ b/quill-core/src/test/scala/io/getquill/context/ActionMacroSpec.scala @@ -34,7 +34,7 @@ class ActionMacroSpec extends Spec { r.string mustEqual """querySchema("TestEntity").insert(v => v.s -> ?, v => v.i -> ?, v => v.l -> ?, v => v.o -> ?, v => v.b -> ?)""" r.prepareRow mustEqual Row("s", 1, 2L, None, true) } - "nexted case class lifting" in { + "nested case class lifting" in { val q = quote { t: TestEntity => qr1.insertValue(t) } diff --git a/quill-core/src/test/scala/io/getquill/context/EncodeBindVariablesSpec.scala b/quill-core/src/test/scala/io/getquill/context/EncodeBindVariablesSpec.scala index 0e52e0580d..25effb5683 100644 --- a/quill-core/src/test/scala/io/getquill/context/EncodeBindVariablesSpec.scala +++ b/quill-core/src/test/scala/io/getquill/context/EncodeBindVariablesSpec.scala @@ -26,7 +26,7 @@ class EncodeBindVariablesSpec extends Spec { } } - "fails if there isn't an encoder for the binded value" in { + "fails if there isn't an encoder for the bound value" in { val q = quote { (i: Thread) => qr1.map(t => i) } diff --git a/quill-core/src/test/scala/io/getquill/context/mirror/MirrorIdiomSpec.scala b/quill-core/src/test/scala/io/getquill/context/mirror/MirrorIdiomSpec.scala index e6dac65478..df821600b3 100644 --- a/quill-core/src/test/scala/io/getquill/context/mirror/MirrorIdiomSpec.scala +++ b/quill-core/src/test/scala/io/getquill/context/mirror/MirrorIdiomSpec.scala @@ -248,7 +248,7 @@ class MirrorIdiomSpec extends Spec { stmt"""(s) => !(s == "s")""" } } - "prostfix" - { + "postfix" - { "isEmpty" in { val q = quote { (xs: Query[_]) => xs.isEmpty diff --git a/quill-core/src/test/scala/io/getquill/norm/ApplyMapSpec.scala b/quill-core/src/test/scala/io/getquill/norm/ApplyMapSpec.scala index 62ba1fb28e..b764cef49e 100644 --- a/quill-core/src/test/scala/io/getquill/norm/ApplyMapSpec.scala +++ b/quill-core/src/test/scala/io/getquill/norm/ApplyMapSpec.scala @@ -14,7 +14,7 @@ class ApplyMapSpec extends Spec { val ApplyMap = new ApplyMap(TraceConfig.Empty) - "avoids applying the intermmediate map after a groupBy" - { + "avoids applying the intermediate map after a groupBy" - { "flatMap" in { val q = quote { qr1.groupBy(t => t.s).map(y => y._1).flatMap(s => qr2.filter(z => z.s == s)) diff --git a/quill-core/src/test/scala/io/getquill/norm/BetaReductionSpec.scala b/quill-core/src/test/scala/io/getquill/norm/BetaReductionSpec.scala index 72f3db5459..07756973e6 100644 --- a/quill-core/src/test/scala/io/getquill/norm/BetaReductionSpec.scala +++ b/quill-core/src/test/scala/io/getquill/norm/BetaReductionSpec.scala @@ -8,7 +8,7 @@ import io.getquill.quat.Quat class BetaReductionSpec extends Spec { - "simplifies the ast by applying functons" - { + "simplifies the ast by applying functions" - { "tuple field" in { val ast: Ast = Property(Tuple(List(Ident("a"))), "_1") BetaReduction(ast) mustEqual Ident("a") diff --git a/quill-core/src/test/scala/io/getquill/norm/NormalizeNestedStructuresSpec.scala b/quill-core/src/test/scala/io/getquill/norm/NormalizeNestedStructuresSpec.scala index be5fe602b7..e32e69adbb 100644 --- a/quill-core/src/test/scala/io/getquill/norm/NormalizeNestedStructuresSpec.scala +++ b/quill-core/src/test/scala/io/getquill/norm/NormalizeNestedStructuresSpec.scala @@ -9,7 +9,7 @@ import io.getquill.MirrorContexts.testContext.unquote class NormalizeNestedStructuresSpec extends Spec { - val unormalized = quote { + val unnormalized = quote { qr1.map(x => x.i).take(1).size } @@ -58,7 +58,7 @@ class NormalizeNestedStructuresSpec extends Spec { } "sortBy" in { val q = quote { - qr1.sortBy(t => unormalized) + qr1.sortBy(t => unnormalized) } val n = quote { qr1.sortBy(t => normalized) @@ -67,7 +67,7 @@ class NormalizeNestedStructuresSpec extends Spec { } "groupBy" in { val q = quote { - qr1.groupBy(t => unormalized) + qr1.groupBy(t => unnormalized) } val n = quote { qr1.groupBy(t => normalized) @@ -76,7 +76,7 @@ class NormalizeNestedStructuresSpec extends Spec { } "aggregation" in { val q = quote { - qr1.map(t => unormalized).max + qr1.map(t => unnormalized).max } val n = quote { qr1.map(t => normalized).max @@ -85,7 +85,7 @@ class NormalizeNestedStructuresSpec extends Spec { } "take" in { val q = quote { - qr1.sortBy(t => unormalized).take(1) + qr1.sortBy(t => unnormalized).take(1) } val n = quote { qr1.sortBy(t => normalized).take(1) @@ -94,7 +94,7 @@ class NormalizeNestedStructuresSpec extends Spec { } "drop" in { val q = quote { - qr1.sortBy(t => unormalized).drop(1) + qr1.sortBy(t => unnormalized).drop(1) } val n = quote { qr1.sortBy(t => normalized).drop(1) @@ -103,7 +103,7 @@ class NormalizeNestedStructuresSpec extends Spec { } "union" in { val q = quote { - qr1.filter(t => unormalized == 1L).union(qr1) + qr1.filter(t => unnormalized == 1L).union(qr1) } val n = quote { qr1.filter(t => normalized == 1L).union(qr1) @@ -112,7 +112,7 @@ class NormalizeNestedStructuresSpec extends Spec { } "unionAll" in { val q = quote { - qr1.filter(t => unormalized == 1L).unionAll(qr1) + qr1.filter(t => unnormalized == 1L).unionAll(qr1) } val n = quote { qr1.filter(t => normalized == 1L).unionAll(qr1) @@ -122,7 +122,7 @@ class NormalizeNestedStructuresSpec extends Spec { "outer join" - { "left" in { val q = quote { - qr1.filter(t => unormalized == 1L).rightJoin(qr1).on((a, b) => a.s == b.s) + qr1.filter(t => unnormalized == 1L).rightJoin(qr1).on((a, b) => a.s == b.s) } val n = quote { qr1.filter(t => normalized == 1L).rightJoin(qr1).on((a, b) => a.s == b.s) @@ -131,7 +131,7 @@ class NormalizeNestedStructuresSpec extends Spec { } "right" in { val q = quote { - qr1.rightJoin(qr1.filter(t => unormalized == 1L)).on((a, b) => a.s == b.s) + qr1.rightJoin(qr1.filter(t => unnormalized == 1L)).on((a, b) => a.s == b.s) } val n = quote { qr1.rightJoin(qr1.filter(t => normalized == 1L)).on((a, b) => a.s == b.s) @@ -140,7 +140,7 @@ class NormalizeNestedStructuresSpec extends Spec { } "on" in { val q = quote { - qr1.rightJoin(qr1).on((a, b) => unormalized == 1L) + qr1.rightJoin(qr1).on((a, b) => unnormalized == 1L) } val n = quote { qr1.rightJoin(qr1).on((a, b) => normalized == 1L) @@ -150,7 +150,7 @@ class NormalizeNestedStructuresSpec extends Spec { } "distinct" in { val q = quote { - qr1.filter(t => unormalized == 1L).distinct + qr1.filter(t => unnormalized == 1L).distinct } val n = quote { qr1.filter(t => normalized == 1L).distinct diff --git a/quill-core/src/test/scala/io/getquill/norm/StabilizeLiftsSpec.scala b/quill-core/src/test/scala/io/getquill/norm/StabilizeLiftsSpec.scala new file mode 100644 index 0000000000..72a0538298 --- /dev/null +++ b/quill-core/src/test/scala/io/getquill/norm/StabilizeLiftsSpec.scala @@ -0,0 +1,77 @@ +package io.getquill.norm + +import io.getquill.ast._ +import io.getquill.base.Spec +import io.getquill.quat._ +import io.getquill.MirrorContexts.testContext._ +import scala.collection.immutable.{Map => IMap} + +class StabilizeLiftsSpec extends Spec { + + case class Foo(id: Long) + + "stabilize lifts" - { + "ScalarValueLift" in { + val scalarValue = 1 + val ast = quote(lift(scalarValue)).ast + val astQuat = quatOf[Int] + val (stabilized, state) = StabilizeLifts.stabilize(ast) + stabilized must matchPattern { + case ScalarValueLift("scalarValue", External.Source.Parser, StabilizeLifts.Token(0), _, `astQuat`) => + } + state.replaceTable mustEqual (IMap(StabilizeLifts.Token(0) -> scalarValue)) + StabilizeLifts.revert(stabilized, state) mustEqual (ast) + } + "ScalarQueryLift" in { + val scalarQuery = Seq(1, 2, 3) + val ast = quote(liftQuery(scalarQuery)).ast + val astQuat = ast.quat + val (stabilized, state) = StabilizeLifts.stabilize(ast) + stabilized must matchPattern { case ScalarQueryLift("scalarQuery", StabilizeLifts.Token(0), _, `astQuat`) => + } + StabilizeLifts.revert(stabilized, state) mustEqual (ast) + } + "CaseClassValueLift" in { + + val caseClass = Foo(0L) + val ast = quote(lift(caseClass)).ast + val astQuat = quatOf[Foo] + val (stabilized, state) = StabilizeLifts.stabilize(ast) + stabilized must matchPattern { + case CaseClassValueLift("caseClass", "caseClass", StabilizeLifts.Token(0), `astQuat`) => + } + state.replaceTable mustEqual (IMap(StabilizeLifts.Token(0) -> caseClass)) + StabilizeLifts.revert(stabilized, state) mustEqual (ast) + } + + "CaseClassQueryLift" in { + val caseClasses = Seq(Foo(0L), Foo(1L)) + val ast = quote(liftQuery(caseClasses)).ast + val astQuat = ast.quat + val (stabilized, state) = StabilizeLifts.stabilize(ast) + stabilized must matchPattern { case CaseClassQueryLift("caseClasses", StabilizeLifts.Token(0), `astQuat`) => + } + state.replaceTable mustEqual (IMap(StabilizeLifts.Token(0) -> caseClasses)) + StabilizeLifts.revert(stabilized, state) mustEqual (ast) + } + + "multiple lifts" in { + val a = "s" + val b = 2 + val ast = quote(lift(a) + lift(b)).ast + val quatA = Quat.Value + val quatB = Quat.Value + val (stabilized, state) = StabilizeLifts.stabilize(ast) + stabilized must matchPattern { + case BinaryOperation( + ScalarValueLift("a", External.Source.Parser, StabilizeLifts.Token(0), _, `quatA`), + StringOperator.`+`, + ScalarValueLift("b", External.Source.Parser, StabilizeLifts.Token(1), _, `quatB`) + ) => + } + val expectedTable = IMap(StabilizeLifts.Token(0) -> a, StabilizeLifts.Token(1) -> b) + state.replaceTable must contain theSameElementsAs (expectedTable) + StabilizeLifts.revert(stabilized, state) mustEqual (ast) + } + } +} diff --git a/quill-core/src/test/scala/io/getquill/norm/StablizeLiftsSpec.scala b/quill-core/src/test/scala/io/getquill/norm/StablizeLiftsSpec.scala deleted file mode 100644 index 7fd03b5a35..0000000000 --- a/quill-core/src/test/scala/io/getquill/norm/StablizeLiftsSpec.scala +++ /dev/null @@ -1,77 +0,0 @@ -package io.getquill.norm - -import io.getquill.ast._ -import io.getquill.base.Spec -import io.getquill.quat._ -import io.getquill.MirrorContexts.testContext._ -import scala.collection.immutable.{Map => IMap} - -class StablizeLiftsSpec extends Spec { - - case class Foo(id: Long) - - "stablize lifts" - { - "ScalarValueLift" in { - val scalarValue = 1 - val ast = quote(lift(scalarValue)).ast - val astQuat = quatOf[Int] - val (stablized, state) = StablizeLifts.stablize(ast) - stablized must matchPattern { - case ScalarValueLift("scalarValue", External.Source.Parser, StablizeLifts.Token(0), _, `astQuat`) => - } - state.replaceTable mustEqual (IMap(StablizeLifts.Token(0) -> scalarValue)) - StablizeLifts.revert(stablized, state) mustEqual (ast) - } - "ScalarQueryLift" in { - val scalarQuery = Seq(1, 2, 3) - val ast = quote(liftQuery(scalarQuery)).ast - val astQuat = ast.quat - val (stablized, state) = StablizeLifts.stablize(ast) - stablized must matchPattern { case ScalarQueryLift("scalarQuery", StablizeLifts.Token(0), _, `astQuat`) => - } - StablizeLifts.revert(stablized, state) mustEqual (ast) - } - "CaseClassValueLift" in { - - val caseClass = Foo(0L) - val ast = quote(lift(caseClass)).ast - val astQuat = quatOf[Foo] - val (stablized, state) = StablizeLifts.stablize(ast) - stablized must matchPattern { - case CaseClassValueLift("caseClass", "caseClass", StablizeLifts.Token(0), `astQuat`) => - } - state.replaceTable mustEqual (IMap(StablizeLifts.Token(0) -> caseClass)) - StablizeLifts.revert(stablized, state) mustEqual (ast) - } - - "CaseClassQueryLift" in { - val caseClasses = Seq(Foo(0L), Foo(1L)) - val ast = quote(liftQuery(caseClasses)).ast - val astQuat = ast.quat - val (stablized, state) = StablizeLifts.stablize(ast) - stablized must matchPattern { case CaseClassQueryLift("caseClasses", StablizeLifts.Token(0), `astQuat`) => - } - state.replaceTable mustEqual (IMap(StablizeLifts.Token(0) -> caseClasses)) - StablizeLifts.revert(stablized, state) mustEqual (ast) - } - - "multiple lifts" in { - val a = "s" - val b = 2 - val ast = quote(lift(a) + lift(b)).ast - val quatA = Quat.Value - val quatB = Quat.Value - val (stablized, state) = StablizeLifts.stablize(ast) - stablized must matchPattern { - case BinaryOperation( - ScalarValueLift("a", External.Source.Parser, StablizeLifts.Token(0), _, `quatA`), - StringOperator.`+`, - ScalarValueLift("b", External.Source.Parser, StablizeLifts.Token(1), _, `quatB`) - ) => - } - val expectedTable = IMap(StablizeLifts.Token(0) -> a, StablizeLifts.Token(1) -> b) - state.replaceTable must contain theSameElementsAs (expectedTable) - StablizeLifts.revert(stablized, state) mustEqual (ast) - } - } -} diff --git a/quill-core/src/test/scala/io/getquill/quat/QuatSpec.scala b/quill-core/src/test/scala/io/getquill/quat/QuatSpec.scala index 0de1ef3dd5..8a9be1f1d6 100644 --- a/quill-core/src/test/scala/io/getquill/quat/QuatSpec.scala +++ b/quill-core/src/test/scala/io/getquill/quat/QuatSpec.scala @@ -79,7 +79,7 @@ class QuatSpec extends Spec { } "not propagating from transparent infixes where it is dynamic: query-ops function" in { - // I.e. can't propagate from a dynamic query since don't know the inside of the quat varaible + // I.e. can't propagate from a dynamic query since don't know the inside of the quat variable def appendFooFun[Q <: Query[_]]: Quoted[Q => Q] = quote((q: Q) => sql"$q APPEND FOO".pure.as[Q]) val q = quote(appendFooFun(query[MyPerson])) q.ast.quat mustEqual Quat.Unknown diff --git a/quill-core/src/test/scala/io/getquill/quotation/QuotationSpec.scala b/quill-core/src/test/scala/io/getquill/quotation/QuotationSpec.scala index b8c449a09a..91d01be020 100644 --- a/quill-core/src/test/scala/io/getquill/quotation/QuotationSpec.scala +++ b/quill-core/src/test/scala/io/getquill/quotation/QuotationSpec.scala @@ -838,7 +838,7 @@ class QuotationSpec extends Spec { } """ mustNot compile } - "comparing types with suclassing" - { + "comparing types with subclassing" - { case class Foo(id: Int) trait Foot case class Bar(id: Int) @@ -1087,7 +1087,7 @@ class QuotationSpec extends Spec { } """ mustNot compile } - "comparing types with suclassing" - { + "comparing types with subclassing" - { case class Foo(id: Int) trait Foot case class Bar(id: Int) @@ -1746,7 +1746,7 @@ class QuotationSpec extends Spec { val q: Quoted[Int] = quote(i + 1) quote(unquote(q)).ast mustEqual BinaryOperation(Constant.auto(1), NumericOperator.`+`, Constant.auto(1)) } - "abritrary tree" in { + "arbitrary tree" in { object test { def a = quote("a") } @@ -1838,7 +1838,7 @@ class QuotationSpec extends Spec { l.value mustEqual t.a l.encoder mustEqual stringEncoder } - "abritrary" in { + "arbitrary" in { class A { def x = 1 } val q = quote(lift(new A().x)) q.liftings.`new A().x`.value mustEqual new A().x diff --git a/quill-core/src/test/scala/io/getquill/util/InterpolatorSpec.scala b/quill-core/src/test/scala/io/getquill/util/InterpolatorSpec.scala index 50748be490..bfcdf422a4 100644 --- a/quill-core/src/test/scala/io/getquill/util/InterpolatorSpec.scala +++ b/quill-core/src/test/scala/io/getquill/util/InterpolatorSpec.scala @@ -21,7 +21,7 @@ class InterpolatorSpec extends Spec { trace"small object: $small and $small".generateString() mustEqual (("small object: Small(123) and Small(123) ", 0)) } - "traces multiple small objects multline text" in { + "traces multiple small objects multiline text" in { trace"""small object: $small and foo and bar $small""".generateString() mustEqual ( ( diff --git a/quill-engine/src/main/scala/io/getquill/Model.scala b/quill-engine/src/main/scala/io/getquill/Model.scala index ce3f4dbd20..74455be279 100644 --- a/quill-engine/src/main/scala/io/getquill/Model.scala +++ b/quill-engine/src/main/scala/io/getquill/Model.scala @@ -132,8 +132,7 @@ sealed trait Insert[E] extends QAC[E, Nothing] with Action[E] { * insert.onConflictUpdate(_.id)((t, e) => t.col -> (e.col + t.col)) * }}} * If insert statement violates conflict target then the column `col` of row - * will be updated with sum of existing value and and proposed `col` in - * insert. + * will be updated with sum of existing value and proposed `col` in insert. */ @compileTimeOnly(NonQuotedException.message) def onConflictUpdate(target: E => Any, targets: (E => Any)*)( diff --git a/quill-engine/src/main/scala/io/getquill/PostgresDialect.scala b/quill-engine/src/main/scala/io/getquill/PostgresDialect.scala index 9e7c4c2806..71fac8ea39 100644 --- a/quill-engine/src/main/scala/io/getquill/PostgresDialect.scala +++ b/quill-engine/src/main/scala/io/getquill/PostgresDialect.scala @@ -200,7 +200,7 @@ trait PostgresDialect ReplaceLiftings.of(clause)(batchAlias, List()) // Choose table alias based on how assignments clauses were realized. Batch-Alias should mean the same thing as when NormalizeFilteredActionAliases was run in Idiom should the - // value should be the same thing as the cluases that were realiased. + // value should be the same thing as the clauses that were realiased. if (valuesLifts.nonEmpty) { val tableAlias = NormalizeFilteredActionAliases.chooseAlias(table.name, Some(batchAlias)) val colsId = batchAlias diff --git a/quill-engine/src/main/scala/io/getquill/ast/Ast.scala b/quill-engine/src/main/scala/io/getquill/ast/Ast.scala index 18bf429466..9bc7f00eb9 100644 --- a/quill-engine/src/main/scala/io/getquill/ast/Ast.scala +++ b/quill-engine/src/main/scala/io/getquill/ast/Ast.scala @@ -173,7 +173,7 @@ case class ConcatMap(query: Ast, alias: Ident, body: Ast) extends Query { def bestQuat: Quat = body.bestQuat } -case class SortBy(query: Ast, alias: Ident, criterias: Ast, ordering: Ast) extends Query { +case class SortBy(query: Ast, alias: Ident, criteria: Ast, ordering: Ast) extends Query { def quat = query.quat def bestQuat: Quat = query.bestQuat } @@ -332,7 +332,7 @@ final class Ident private (val name: String)(theQuat: => Quat)(val visibility: V override def withQuat(quat: => Quat): Ident = Ident.Opinionated(this.name, quat, this.visibility) - // need to define a copy which will propogate current value of visibility into the copy + // need to define a copy which will propagate current value of visibility into the copy def copy(name: String = this.name, quat: => Quat = this.quat): Ident = Ident.Opinionated(name, quat, this.visibility) } @@ -379,7 +379,7 @@ final class ExternalIdent private (val name: String)(theQuat: => Quat)(val renam override def hashCode = id.hashCode() - // need to define a copy which will propogate current value of visibility into the copy + // need to define a copy which will propagate current value of visibility into the copy def copy(name: String = this.name, quat: => Quat = this.quat): ExternalIdent = ExternalIdent.Opinionated(name, quat, this.renameable) } @@ -696,7 +696,7 @@ case class Val(name: Ident, body: Ast) extends Ast { sealed trait Action extends Ast -// Note, technically return type of Actions for most Actions is a Int value but Quat here is used for Retruning Quat types +// Note, technically return type of Actions for most Actions is a Int value but Quat here is used for Returning Quat types case class Update(query: Ast, assignments: List[Assignment]) extends Action { def quat = query.quat; def bestQuat = query.bestQuat } diff --git a/quill-engine/src/main/scala/io/getquill/ast/CollectAst.scala b/quill-engine/src/main/scala/io/getquill/ast/CollectAst.scala index 080cffc04e..3f217d9844 100644 --- a/quill-engine/src/main/scala/io/getquill/ast/CollectAst.scala +++ b/quill-engine/src/main/scala/io/getquill/ast/CollectAst.scala @@ -5,7 +5,7 @@ import scala.reflect.ClassTag /** * The collection is treated as immutable internally but an ArrayBuffer is more - * effecient then Collection.list at appending which is mostly what the + * efficient then Collection.list at appending which is mostly what the * collection does */ class CollectAst[T](p: PartialFunction[Ast, T], val state: Queue[T]) extends StatefulTransformer[Queue[T]] { diff --git a/quill-engine/src/main/scala/io/getquill/context/cassandra/CqlQuery.scala b/quill-engine/src/main/scala/io/getquill/context/cassandra/CqlQuery.scala index 0dad9b3085..3dbf85009a 100644 --- a/quill-engine/src/main/scala/io/getquill/context/cassandra/CqlQuery.scala +++ b/quill-engine/src/main/scala/io/getquill/context/cassandra/CqlQuery.scala @@ -48,7 +48,7 @@ object CqlQuery { private def apply(q: Query, limit: Option[Ast], select: List[Ast], distinct: Boolean): CqlQuery = q match { case SortBy(q: Query, x, p, o) => - apply(q, orderByCriterias(p, o), limit, select, distinct) + apply(q, orderByCriteria(p, o), limit, select, distinct) case other => apply(q, List(), limit, select, distinct) } @@ -101,11 +101,11 @@ object CqlQuery { case other => fail(s"Cql supports only properties as select elements. Found: $other") } - private def orderByCriterias(ast: Ast, ordering: Ast): List[OrderByCriteria] = + private def orderByCriteria(ast: Ast, ordering: Ast): List[OrderByCriteria] = (ast, ordering) match { - case (Tuple(properties), ord: PropertyOrdering) => properties.flatMap(orderByCriterias(_, ord)) + case (Tuple(properties), ord: PropertyOrdering) => properties.flatMap(orderByCriteria(_, ord)) case (Tuple(properties), TupleOrdering(ord)) => - properties.zip(ord).flatMap { case (a, o) => orderByCriterias(a, o) } + properties.zip(ord).flatMap { case (a, o) => orderByCriteria(a, o) } case (a: Property, o: PropertyOrdering) => List(OrderByCriteria(a, o)) case other => fail(s"Invalid order by criteria $ast") } diff --git a/quill-engine/src/main/scala/io/getquill/idiom/StatementInterpolator.scala b/quill-engine/src/main/scala/io/getquill/idiom/StatementInterpolator.scala index 0f9d0898fb..9bf9204a9b 100644 --- a/quill-engine/src/main/scala/io/getquill/idiom/StatementInterpolator.scala +++ b/quill-engine/src/main/scala/io/getquill/idiom/StatementInterpolator.scala @@ -120,9 +120,9 @@ object StatementInterpolator { private def flatten(tokens: List[Token]): List[Token] = { - def unestStatements(tokens: List[Token]): List[Token] = + def unnestStatements(tokens: List[Token]): List[Token] = tokens.flatMap { - case Statement(innerTokens) => unestStatements(innerTokens) + case Statement(innerTokens) => unnestStatements(innerTokens) case token => token :: Nil } @@ -146,7 +146,7 @@ object StatementInterpolator { resultBuilder.result() } - (unestStatements _) + (unnestStatements _) .andThen(mergeStringTokens _) .apply(tokens) } diff --git a/quill-engine/src/main/scala/io/getquill/norm/ApplyMap.scala b/quill-engine/src/main/scala/io/getquill/norm/ApplyMap.scala index 2483be36dc..b0f76c7fa0 100644 --- a/quill-engine/src/main/scala/io/getquill/norm/ApplyMap.scala +++ b/quill-engine/src/main/scala/io/getquill/norm/ApplyMap.scala @@ -9,8 +9,8 @@ import io.getquill.sql.Common.ContainsImpurities /** * Notes for the conceptual examples below. Gin and Tonic were used as * prototypical examples of things that "are joined". In the table form, they - * are alude to the following tonics is Query[Tonic], tonic is Tonic gins is - * Query[Gin], is Gin waters is Query[Water], water is Water + * are aliased to the following tonics is Query[Tonic], tonic is Tonic, gins is + * Query[Gin], gin is Gin, waters is Query[Water], water is Water * * ginifySpirit is some f:Spirit => Gin tonicfyWater is some f:Tonic => Water * bottleGin is some f:Gin => Bottle Additionally Map(a,b,c).quat is the same as @@ -21,7 +21,7 @@ class ApplyMap(traceConfig: TraceConfig) { val interp = new Interpolator(TraceType.ApplyMap, traceConfig, 3) import interp._ - // Note, since the purpose of this beta reduction is to check isomophism types should not actually be + // Note, since the purpose of this beta reduction is to check isomorphism types should not actually be // checked here since they may be wrong (i.e. if there is no actual isomorphism). private def isomorphic(e: Ast, c: Ast, alias: Ident) = BetaReduction(e, TypeBehavior.ReplaceWithReduction, alias -> c) == c @@ -157,7 +157,7 @@ class ApplyMap(traceConfig: TraceConfig) { // === Conceptual Example (same as for groupBy.map) === // Instead of transforming spirit into gin and the bottling the gin, bottle the // spirit first, then have the spirit transform into gin inside of the bottles. - // (The only differnce between this and groupByMap is that we have two kinds of bottles: A and B) + // (The only difference between this and groupByMap is that we have two kinds of bottles: A and B) // // spirits.map(spirit => ginifySpirit).groupByMap(gin => bottleGinA)(gin => bottleGinB) => // spirits.groupByMap(spirit => bottleGinA[gin := ginifySpirit])(spirit => bottleGinB[gin := ginifySpirit]) @@ -190,7 +190,7 @@ class ApplyMap(traceConfig: TraceConfig) { // Instead of combining gin and tonic, pour spirit and water into a cup and transform both // the spirit into gin, and the water into tonic inside of the cup. // - // spirits.map(spirit => ginifySpririt).join(waters.map(water => tonicfyWater)).on((gin, tonic) => on) + // spirits.map(spirit => ginifySpirit).join(waters.map(water => tonicfyWater)).on((gin, tonic) => on) // spirits.join(waters).on((spirit, water) => on[gin := ginifySpirit, tonic := tonicfyWater]).map(t:Tuple[(Gin, Tonic)] => (ginifySpirit[spirit := t._1], tonicfyWater[water := t._2])) // a.map(b => c).*join(d.map(e => f)).on((iA, iB) => on) @@ -225,7 +225,7 @@ class ApplyMap(traceConfig: TraceConfig) { // inside of the tup into tonic. // // spirits.map(spirit => ginifySpirit).join(tonics).on((gin, tonic) => on) - // spirits.join(tonics).on((spirit, tonic) => on[gin := ginifySpirit]).map(t:Tuple[(Spririt, Tonic)] => (ginifySpirit[spirit := t._1], t._2)) :Tuple[(Gin, Tonic)] + // spirits.join(tonics).on((spirit, tonic) => on[gin := ginifySpirit]).map(t:Tuple[(Spirit, Tonic)] => (ginifySpirit[spirit := t._1], t._2)) :Tuple[(Gin, Tonic)] // a.map(b => c).*join(d).on((iA, iB) => on) // a.*join(d).on((b, iB) => on[iA := c]).map(t => (c[b := t._1], t._2)) diff --git a/quill-engine/src/main/scala/io/getquill/norm/AttachToEntity.scala b/quill-engine/src/main/scala/io/getquill/norm/AttachToEntity.scala index 57fc69ac18..054ba489a7 100644 --- a/quill-engine/src/main/scala/io/getquill/norm/AttachToEntity.scala +++ b/quill-engine/src/main/scala/io/getquill/norm/AttachToEntity.scala @@ -34,7 +34,7 @@ import io.getquill.ast._ * FlatMap(A, a, Filter(Entity(C), {dangerous_tmp}, If(a == x, foo, bar)) * }}} * - * If `{dangerious_tmp}` is the Ident 'a' then the following happens: (I have + * If `{dangerous_tmp}` is the Ident 'a' then the following happens: (I have * added curly braces {} around this Ident just to distinguish it) * {{{ * FlatMap(A, a, Filter(Entity(C), {a}, If(b == x, foo, bar)) diff --git a/quill-engine/src/main/scala/io/getquill/norm/BetaReduction.scala b/quill-engine/src/main/scala/io/getquill/norm/BetaReduction.scala index a23d505f29..c4dedabd13 100644 --- a/quill-engine/src/main/scala/io/getquill/norm/BetaReduction.scala +++ b/quill-engine/src/main/scala/io/getquill/norm/BetaReduction.scala @@ -12,7 +12,7 @@ import scala.collection.immutable.{Map => IMap} * CC(foo:V) and T is CC(foo:V, bar:V) (NOTE: see the notes on Quat * Shorthand Syntax in Quats.scala if unfamiliar with the syntax above) However * if T is not a subtype of X, then we need to throw an error. The exception to - * this is in the case where we are substutiting a real type for a Quat.Null or + * this is in the case where we are substituting a real type for a Quat.Null or * Quat.Generic (roughly speaking, a 'Bottom Type'). In that case, just do the * substitution. This general behavior we call `SubstituteSubtypes`, it is also * considered the default. @@ -23,7 +23,7 @@ import scala.collection.immutable.{Map => IMap} * plugging in a Generic type that is being specialized (e.g. X is Quat.Generic) * or reducing some type CC(foo:V) to the corresponding renamed type * CC(foo:V)[foo->renameFoo]. This general behavior we call - * `ReplaceWithReduction` i.e. Quat types are replaced with whatever varaibles + * `ReplaceWithReduction` i.e. Quat types are replaced with whatever variables * are being beta-reduced irregardless of subtyping. */ sealed trait TypeBehavior diff --git a/quill-engine/src/main/scala/io/getquill/norm/ExpandReturning.scala b/quill-engine/src/main/scala/io/getquill/norm/ExpandReturning.scala index 1557ff1ccd..4097c7a8f5 100644 --- a/quill-engine/src/main/scala/io/getquill/norm/ExpandReturning.scala +++ b/quill-engine/src/main/scala/io/getquill/norm/ExpandReturning.scala @@ -9,7 +9,7 @@ import io.getquill.{NamingStrategy, ReturnAction, IdiomContext} /** * Take the `.returning` part in a query that contains it and return the array - * of columns representing of the returning seccovtion with any other operations + * of columns representing of the returning section with any other operations * etc... that they might contain. */ object ExpandReturning { diff --git a/quill-engine/src/main/scala/io/getquill/norm/NormalizeCaching.scala b/quill-engine/src/main/scala/io/getquill/norm/NormalizeCaching.scala index d59acc0cd4..d7f568db6f 100644 --- a/quill-engine/src/main/scala/io/getquill/norm/NormalizeCaching.scala +++ b/quill-engine/src/main/scala/io/getquill/norm/NormalizeCaching.scala @@ -7,16 +7,16 @@ object NormalizeCaching { private val cache = new ConcurrentHashMap[Ast, Ast] def apply(f: Ast => Ast): Ast => Ast = { ori => - val (stablized, state) = StablizeLifts.stablize(ori) - val cachedR = cache.get(stablized) + val (stabilized, state) = StabilizeLifts.stabilize(ori) + val cachedR = cache.get(stabilized) val normalized = if (cachedR != null) { cachedR } else { - val r = f(stablized) - cache.put(stablized, r) + val r = f(stabilized) + cache.put(stabilized, r) r } - StablizeLifts.revert(normalized, state) + StabilizeLifts.revert(normalized, state) } } diff --git a/quill-engine/src/main/scala/io/getquill/norm/RenameProperties.scala b/quill-engine/src/main/scala/io/getquill/norm/RenameProperties.scala index 650a091274..1aa5908d70 100644 --- a/quill-engine/src/main/scala/io/getquill/norm/RenameProperties.scala +++ b/quill-engine/src/main/scala/io/getquill/norm/RenameProperties.scala @@ -7,7 +7,7 @@ import io.getquill.util.Messages.{TraceType, title} /** * Rename properties now relies on the Quats themselves to propagate field - * renames. The previous itreations of this phase relied on schema propagation + * renames. The previous iterations of this phase relied on schema propagation * via stateful transforms holding field-renames which were then compared to * Property AST elements. This was a painstakingly complex and highly * error-prone especially when embedded objects were used requiring computation @@ -21,12 +21,12 @@ import io.getquill.util.Messages.{TraceType, title} * This has the simple requirement that renames must be propagated fully before * they are actually committed so that the knowledge of what needs to be renamed * into what can be distributed easily throughout the AST.
  • Once these - * future-renames are staged to Quats throught the AST, a simple stateless + * future-renames are staged to Quats through the AST, a simple stateless * reduction will then apply the renames to the Property AST elements around the * Ident's (and potentially Lifts etc...) with the renamed Quats. * * The entire process above can be done with a series of stateless - * transformations with straighforward operations since the majority of the + * transformations with straightforward operations since the majority of the * logic actually lives within the Quats themselves. */ class RenameProperties(traceConfig: TraceConfig) { @@ -73,7 +73,7 @@ object CompleteRenames extends StatelessTransformer { } } -/** Take renames propogated to the quats and apply them to properties */ +/** Take renames propagated to the quats and apply them to properties */ class ApplyRenamesToProps(traceConfig: TraceConfig) extends StatelessTransformer { val interp = new Interpolator(TraceType.RenameProperties, traceConfig, 1) @@ -161,7 +161,7 @@ object SeedRenames extends StatelessTransformer { } // Represents a nested property path to an identity i.e. Property(Property(... Ident(), ...)) -object PropertyMatroshka { +object PropertyMatryoshka { def traverse(initial: Property): Option[(Ast, List[String], List[Renameable])] = initial match { diff --git a/quill-engine/src/main/scala/io/getquill/norm/RepropagateQuats.scala b/quill-engine/src/main/scala/io/getquill/norm/RepropagateQuats.scala index fcc6350804..93bacd7d9a 100644 --- a/quill-engine/src/main/scala/io/getquill/norm/RepropagateQuats.scala +++ b/quill-engine/src/main/scala/io/getquill/norm/RepropagateQuats.scala @@ -184,7 +184,7 @@ class RepropagateQuats(traceConfig: TraceConfig) extends StatelessTransformer { case OnConflict.Properties(props) => val propsR = props.map { // Recreate the assignment with new idents but only if we need to repropagate - case prop @ PropertyMatroshka(ident: Ident, _, _) => + case prop @ PropertyMatryoshka(ident: Ident, _, _) => trace"Repropagate OnConflict.Properties Quat ${oca.quat.suppress(msg)} from $oca into:" andReturn BetaReduction(prop, RWR, ident -> ident.retypeQuatFrom(oca.quat)).asInstanceOf[Property] case other => @@ -214,7 +214,7 @@ class RepropagateQuats(traceConfig: TraceConfig) extends StatelessTransformer { OnConflict.Update(assignmentsR) case _ => act } - trace"Completing OnConflict Repropogation: " andReturn + trace"Completing OnConflict Repropagation: " andReturn OnConflict(actionR, targetR, actR) case other => super.apply(other) diff --git a/quill-engine/src/main/scala/io/getquill/norm/SheathLeafClauses.scala b/quill-engine/src/main/scala/io/getquill/norm/SheathLeafClauses.scala index a58dbbe7ee..89aaff0516 100644 --- a/quill-engine/src/main/scala/io/getquill/norm/SheathLeafClauses.scala +++ b/quill-engine/src/main/scala/io/getquill/norm/SheathLeafClauses.scala @@ -181,13 +181,13 @@ case class SheathLeafClauses(state: Option[String], traceConfig: TraceConfig) */ case GroupByMap(query, eg, by, e, LeafQuat(body)) => val innerState = query match { - // If it's an infix inside e.g. Grp(i:Infix,..)(e,by) the higher-level apply should have changed it approporately + // If it's an infix inside e.g. Grp(i:Infix,..)(e,by) the higher-level apply should have changed it appropriately // by adding an extra Map step inside which has a CaseClass that holds a new attribute that we will pass around // e.g. from GrpTo(leaf,e,e)(e,Agg(e)) should have changed to GrpTo(M(leaf,e,CC(i->e)),e,e.i)(e,Agg(M(e->e.i))) case infix: io.getquill.ast.Infix => val newId = Ident("i", infix.quat) Some((Map(infix, newId, CaseClass.Single("i" -> newId)), Some("i"))) - // If it's a query inside e.g. Grp(qry:Query,..)(e,by) the higher-level apply should have changed it approporately + // If it's a query inside e.g. Grp(qry:Query,..)(e,by) the higher-level apply should have changed it appropriately // e.g. from GrpTo(ent,e,e.v)(e,Agg(e)) should have changed to GrpTo(ent,e,CC(v->e.v))(e,Agg(M(e->e.v)) case _: Query => val (q, s) = apply(query) @@ -223,13 +223,13 @@ case class SheathLeafClauses(state: Option[String], traceConfig: TraceConfig) // Typically the body of a groupBy.map is an aggregation. case Map(grpBy @ GroupBy(LeafQuat(query), eg, LeafQuat(by)), e, LeafQuat(body)) => val innerState = query match { - // If it's an infix inside e.g. Map(Grp(i:Infix),e,by) the higher-level apply should have changed it approporately + // If it's an infix inside e.g. Map(Grp(i:Infix),e,by) the higher-level apply should have changed it appropriately // by adding an extra Map step inside which has a CaseClass that holds a new attribute that we will pass around // e.g. from Map(Grp(leaf,e,e),e,Agg(e)) should have changed to Map(Grp(M(leaf,e,CC(i->e)),e,e.i),e,Agg(M(e->e.i))) case infix: io.getquill.ast.Infix => val newId = Ident("i", infix.quat) Some((Map(infix, newId, CaseClass.Single("i" -> newId)), Some("i"))) - // If it's a query inside e.g. Map(Grp(qry:Query),e,by) the higher-level apply should have changed it approporately + // If it's a query inside e.g. Map(Grp(qry:Query),e,by) the higher-level apply should have changed it appropriately // e.g. from Map(Grp(M(ent,e,e.v),e,e),e,Agg(e)) should have changed to Map(Grp(M(ent,e,CC(v->e.v)),e,e.v),e,Agg(M(e->e.v))) case _: Query => val (q, s) = apply(query) diff --git a/quill-engine/src/main/scala/io/getquill/norm/StablizeLifts.scala b/quill-engine/src/main/scala/io/getquill/norm/StabilizeLifts.scala similarity index 66% rename from quill-engine/src/main/scala/io/getquill/norm/StablizeLifts.scala rename to quill-engine/src/main/scala/io/getquill/norm/StabilizeLifts.scala index 65f792d435..84a73345ec 100644 --- a/quill-engine/src/main/scala/io/getquill/norm/StablizeLifts.scala +++ b/quill-engine/src/main/scala/io/getquill/norm/StabilizeLifts.scala @@ -3,10 +3,10 @@ package io.getquill.norm import io.getquill.ast._ import scala.collection.immutable.{Map => IMap} -private[getquill] object StablizeLifts { +private[getquill] object StabilizeLifts { - def stablize(ast: Ast): (Ast, State) = { - val (a, t) = StubLiftValus(State(IMap.empty, Token(0))).apply(ast) + def stabilize(ast: Ast): (Ast, State) = { + val (a, t) = StubLiftValues(State(IMap.empty, Token(0))).apply(ast) (a, t.state) } @@ -49,32 +49,32 @@ private[getquill] object StablizeLifts { } } - case class StubLiftValus(state: State) extends StatefulTransformer[State] { + case class StubLiftValues(state: State) extends StatefulTransformer[State] { override def apply(e: Ast): (Ast, StatefulTransformer[State]) = e match { case l: Lift => val (ast, ss) = applyLift(l) - (ast, StubLiftValus(ss)) + (ast, StubLiftValues(ss)) case others => super.apply(others) } private def applyLift(ast: Lift): (Ast, State) = ast match { case l: ScalarValueLift => - val stub = state.nextToken - val stablized = l.copy(value = stub) - stablized -> state.addReplace(stub, l.value) + val stub = state.nextToken + val stabilized = l.copy(value = stub) + stabilized -> state.addReplace(stub, l.value) case l: ScalarQueryLift => - val stub = state.nextToken - val stablized = l.copy(value = stub) - stablized -> state.addReplace(stub, l.value) + val stub = state.nextToken + val stabilized = l.copy(value = stub) + stabilized -> state.addReplace(stub, l.value) case l: CaseClassValueLift => - val stub = state.nextToken - val stablized = l.copy(value = stub) - stablized -> state.addReplace(stub, l.value) + val stub = state.nextToken + val stabilized = l.copy(value = stub) + stabilized -> state.addReplace(stub, l.value) case l: CaseClassQueryLift => - val stub = state.nextToken - val stablized = l.copy(value = stub) - stablized -> state.addReplace(stub, l.value) + val stub = state.nextToken + val stabilized = l.copy(value = stub) + stabilized -> state.addReplace(stub, l.value) } } } diff --git a/quill-engine/src/main/scala/io/getquill/norm/SymbolicReduction.scala b/quill-engine/src/main/scala/io/getquill/norm/SymbolicReduction.scala index 018cad4156..29c14768d5 100644 --- a/quill-engine/src/main/scala/io/getquill/norm/SymbolicReduction.scala +++ b/quill-engine/src/main/scala/io/getquill/norm/SymbolicReduction.scala @@ -7,7 +7,7 @@ import io.getquill.util.TraceConfig * This stage represents Normalization Stage1: Symbolic Reduction in Philip * Wadler's Paper "A Practical Theory of Language Integrated Query", given in * Figure 11. - * http://homepages.inf.ed.ac.uk/slindley/papers/practical-theory-of-linq.pdf + * https://homepages.inf.ed.ac.uk/slindley/papers/practical-theory-of-linq.pdf * * It represents foundational normalizations done to sequences that represents * queries. In Wadler's paper, he characterizes them as `for x in P ...`` @@ -44,7 +44,7 @@ class SymbolicReduction(traceConfig: TraceConfig) { val er = AttachToEntity(Filter(_, _, cr))(e) Some(FlatMap(a, d, er)) - // This transformation does not have an analogue in Wadler's paper, it represents the fundemental nature of the Monadic 'bind' function + // This transformation does not have an analogue in Wadler's paper, it represents the fundamental nature of the Monadic 'bind' function // that A.flatMap(a => B).flatMap(b => C) is isomorphic to A.flatMap(a => B.flatMap(b => C)). // // a.flatMap(b => c).flatMap(d => e) => diff --git a/quill-engine/src/main/scala/io/getquill/norm/capture/AvoidAliasConflict.scala b/quill-engine/src/main/scala/io/getquill/norm/capture/AvoidAliasConflict.scala index 202cf087c1..4b1f3e53e2 100644 --- a/quill-engine/src/main/scala/io/getquill/norm/capture/AvoidAliasConflict.scala +++ b/quill-engine/src/main/scala/io/getquill/norm/capture/AvoidAliasConflict.scala @@ -48,7 +48,7 @@ import scala.collection.immutable.Set * however remains the same. To make sure that aliases do not conflict. * * One important side-function of this transformation is to transform temporary - * variables (e.g. as created by the `AttachToEntity` phase) into permanant ones + * variables (e.g. as created by the `AttachToEntity` phase) into permanent ones * of the form x[0-9]+. Since `AvoidAliasConflict` typically runs not on the * entire Ast but the sub-parts of it used by normalizations, making temporary * aliases permanent cannot be done in these sub-parts because the 'state' of @@ -83,7 +83,7 @@ private[getquill] case class AvoidAliasConflict(state: Set[IdentName], detemp: B } } - // Cannot realize direct super-cluase of a join because of how ExpandJoin does $a$b. + // Cannot realize direct super-clause of a join because of how ExpandJoin does $a$b. // This is tested in JoinComplexSpec which verifies that ExpandJoin behaves correctly. object CanRealias { def unapply(q: Ast): Boolean = @@ -323,9 +323,9 @@ private[getquill] object AvoidAliasConflict { } /** - * Make sure query parameters do not collide with paramters of a AST function. - * Do this by walkning through the function's subtree and transforming and - * queries encountered. + * Make sure query parameters do not collide with parameters of a AST + * function. Do this by walking through the function's subtree and + * transforming and queries encountered. */ def sanitizeVariables(f: Function, dangerousVariables: Set[IdentName], traceConfig: TraceConfig): Function = AvoidAliasConflict(dangerousVariables, false, traceConfig).applyFunction(f) diff --git a/quill-engine/src/main/scala/io/getquill/norm/capture/DemarcateExternalAliases.scala b/quill-engine/src/main/scala/io/getquill/norm/capture/DemarcateExternalAliases.scala index 70fae4ab58..5c96d3b7d9 100644 --- a/quill-engine/src/main/scala/io/getquill/norm/capture/DemarcateExternalAliases.scala +++ b/quill-engine/src/main/scala/io/getquill/norm/capture/DemarcateExternalAliases.scala @@ -13,7 +13,7 @@ import io.getquill.ast._ * clause. These two filters will be combined into one at which point the * meaning of `r.i` in the 2nd filter will be confused for the first filter's * alias (i.e. the `r` in `filter(r => ...)`. Therefore, we need to change this - * vunerable `r.i` in the second filter clause to an `ExternalIdent` before any + * vulnerable `r.i` in the second filter clause to an `ExternalIdent` before any * of the simplifications are done. * * Note that we only want to do this for Queries inside of a `Returning` clause diff --git a/quill-engine/src/main/scala/io/getquill/quat/Quat.scala b/quill-engine/src/main/scala/io/getquill/quat/Quat.scala index c0f3fb2e9e..c16f46b861 100644 --- a/quill-engine/src/main/scala/io/getquill/quat/Quat.scala +++ b/quill-engine/src/main/scala/io/getquill/quat/Quat.scala @@ -153,7 +153,7 @@ sealed trait Quat { } case (Quat.Generic, fieldName) => io.getquill.util.Messages.trace( - s"The field '${fieldName}' was looked up from from a Generic Quat. Assuming it will also be Quat.Generic", + s"The field '${fieldName}' was looked up from a Generic Quat. Assuming it will also be Quat.Generic", traceType = TraceType.Warning ) Quat.Unknown diff --git a/quill-engine/src/main/scala/io/getquill/quotation/FreeVariables.scala b/quill-engine/src/main/scala/io/getquill/quotation/FreeVariables.scala index 8d6cb8c874..4d640d6de2 100644 --- a/quill-engine/src/main/scala/io/getquill/quotation/FreeVariables.scala +++ b/quill-engine/src/main/scala/io/getquill/quotation/FreeVariables.scala @@ -89,7 +89,7 @@ case class FreeVariables(state: State) extends StatefulTransformer[State] { case q @ GroupBy(a, b, c) => (q, free(a, b, c)) case q @ GroupByMap(a, b, c, d, e) => // First search for free variables in the groupBy's `by` clause, then search for them in the `to` clause - // if any were found int he `by` clause, propogate them forward to the to-clause + // if any were found int he `by` clause, propagate them forward to the to-clause val s1 = free(a, b, c) val s2 = new FreeVariables(s1.state).free(a, d, e) (q, s2) diff --git a/quill-engine/src/main/scala/io/getquill/sql/SqlQuery.scala b/quill-engine/src/main/scala/io/getquill/sql/SqlQuery.scala index 3241ee39ff..df0ce67d9a 100644 --- a/quill-engine/src/main/scala/io/getquill/sql/SqlQuery.scala +++ b/quill-engine/src/main/scala/io/getquill/sql/SqlQuery.scala @@ -259,7 +259,7 @@ class SqlQueryApply(traceConfig: TraceConfig) { // Note: In the future, in the GroupByMap case need to verify that columns used in aggregations are actually contained in the grouping // we can use the list that comes out of flatGroupByAsts for reference for fields being grouped-by - // (e.g. need to keep in mind embedded objects could be in them... so we might have to traverse mutliple levels + // (e.g. need to keep in mind embedded objects could be in them... so we might have to traverse multiple levels // of properties in order to verify) // Given a clause that looks like: @@ -380,25 +380,25 @@ class SqlQueryApply(traceConfig: TraceConfig) { )(quat) case SortBy(q, Ident(alias, _), p, o) => - val b = base(q, alias, false) - val criterias = orderByCriterias(p, o, b.from) + val b = base(q, alias, false) + val criteria = orderByCriteria(p, o, b.from) // If the sortBy body uses the filter alias, make sure it matches one of the aliases in the fromContexts if ( b.orderBy.isEmpty && (!CollectAst.byType[Ident](p).map(_.name).contains(alias) || collectAliases(b.from) .contains(alias)) ) trace"Flattening| SortBy(Ident) [Simple]" andReturn - b.copy(orderBy = criterias)(quat) + b.copy(orderBy = criteria)(quat) else trace"Flattening| SortBy(Ident) [Complex]" andReturn FlattenSqlQuery( from = QueryContext(apply(q), alias) :: Nil, - orderBy = criterias, + orderBy = criteria, select = select(alias, quat) )(quat) // TODO Finish describing - // Happens when you either have an aggrgation in the middle of a query + // Happens when you either have an aggregation in the middle of a query // ... // Or as the result of a map case Aggregation(op, q: Query) => @@ -468,7 +468,7 @@ class SqlQueryApply(traceConfig: TraceConfig) { // selects from an alias of an outer clause. For example, query[Person].map(p => Name(p.firstName, p.lastName)).distinctOn(_.name) // (Let's say Person(firstName, lastName, age), Name(first, last)) will turn into // SELECT DISTINCT ON (p.name), p.firstName AS first, p.lastName AS last, p.age FROM Person - // This doesn't work beause `name` in `p.name` doesn't exist yet. Therefore we have to nest this in a subquery: + // This doesn't work because `name` in `p.name` doesn't exist yet. Therefore we have to nest this in a subquery: // SELECT DISTINCT ON (p.name) FROM (SELECT p.firstName AS first, p.lastName AS last, p.age FROM Person p) AS p // The only exception to this is if we are directly selecting from an entity: // query[Person].distinctOn(_.firstName) which should be fine: SELECT (x.firstName), x.firstName, x.lastName, a.age FROM Person x @@ -508,14 +508,14 @@ class SqlQueryApply(traceConfig: TraceConfig) { case other => QueryContext(apply(other), alias) } - private def orderByCriterias(ast: Ast, ordering: Ast, from: List[FromContext]): List[OrderByCriteria] = + private def orderByCriteria(ast: Ast, ordering: Ast, from: List[FromContext]): List[OrderByCriteria] = (ast, ordering) match { - case (Tuple(properties), ord: PropertyOrdering) => properties.flatMap(orderByCriterias(_, ord, from)) + case (Tuple(properties), ord: PropertyOrdering) => properties.flatMap(orderByCriteria(_, ord, from)) case (Tuple(properties), TupleOrdering(ord)) => - properties.zip(ord).flatMap { case (a, o) => orderByCriterias(a, o, from) } + properties.zip(ord).flatMap { case (a, o) => orderByCriteria(a, o, from) } // if its a quat product, use ExpandSelection to break it down into its component fields and apply the ordering to all of them case (id @ Ident(_, _: Quat.Product), ord) => - new ExpandSelection(from).ofSubselect(List(SelectValue(ast))).map(_.ast).flatMap(orderByCriterias(_, ord, from)) + new ExpandSelection(from).ofSubselect(List(SelectValue(ast))).map(_.ast).flatMap(orderByCriteria(_, ord, from)) case (a, o: PropertyOrdering) => List(OrderByCriteria(a, o)) case other => fail(s"Invalid order by criteria $ast") } diff --git a/quill-engine/src/main/scala/io/getquill/sql/idiom/SqlIdiom.scala b/quill-engine/src/main/scala/io/getquill/sql/idiom/SqlIdiom.scala index f205e6b72f..2fea936a4b 100644 --- a/quill-engine/src/main/scala/io/getquill/sql/idiom/SqlIdiom.scala +++ b/quill-engine/src/main/scala/io/getquill/sql/idiom/SqlIdiom.scala @@ -77,7 +77,7 @@ trait SqlIdiom extends Idiom { } implicit val transpileContextImplicit: IdiomContext = idiomContext - implicit val tokernizer: Tokenizer[Ast] = defaultTokenizer + implicit val tokenizer: Tokenizer[Ast] = defaultTokenizer val interp = new Interpolator(TraceType.SqlNormalizations, idiomContext.traceConfig, 1) import interp._ @@ -398,9 +398,9 @@ trait SqlIdiom extends Idiom { } protected def tokenOrderBy( - criterias: List[OrderByCriteria] + criteria: List[OrderByCriteria] )(implicit astTokenizer: Tokenizer[Ast], strategy: NamingStrategy) = - stmt"ORDER BY ${criterias.token}" + stmt"ORDER BY ${criteria.token}" implicit def sourceTokenizer(implicit astTokenizer: Tokenizer[Ast], diff --git a/quill-engine/src/main/scala/io/getquill/sql/idiom/VerifySqlQuery.scala b/quill-engine/src/main/scala/io/getquill/sql/idiom/VerifySqlQuery.scala index 43fa8ddee5..3c33064475 100644 --- a/quill-engine/src/main/scala/io/getquill/sql/idiom/VerifySqlQuery.scala +++ b/quill-engine/src/main/scala/io/getquill/sql/idiom/VerifySqlQuery.scala @@ -72,7 +72,7 @@ object VerifySqlQuery { case free => Some( Error(free.map(f => Ident(f.name, Quat.Value)), ast) - ) // Quat is not actually needed here here just for the sake of the Error Ident + ) // Quat is not actually needed here just for the sake of the Error Ident } } @@ -141,9 +141,9 @@ object VerifySqlQuery { throw new IllegalArgumentException("Cannot use Option.tableExists on a table or embedded case class") case cond: If if cond.`then`.isInstanceOf[Quat.Product] => - throw throw new IllegalArgumentException("Cannot use table or embedded case class as a result of a condition") + throw new IllegalArgumentException("Cannot use table or embedded case class as a result of a condition") case cond: If if cond.`else`.isInstanceOf[Quat.Product] => - throw throw new IllegalArgumentException("Cannot use table or embedded case class as a result of a condition") + throw new IllegalArgumentException("Cannot use table or embedded case class as a result of a condition") case cond: If => checkIllegalIdents(cond.condition) case other => None diff --git a/quill-engine/src/main/scala/io/getquill/sql/norm/ExpandNestedQueries.scala b/quill-engine/src/main/scala/io/getquill/sql/norm/ExpandNestedQueries.scala index 3a26215d92..1d8a607627 100644 --- a/quill-engine/src/main/scala/io/getquill/sql/norm/ExpandNestedQueries.scala +++ b/quill-engine/src/main/scala/io/getquill/sql/norm/ExpandNestedQueries.scala @@ -4,7 +4,7 @@ import io.getquill.ast._ import io.getquill.context.sql._ import io.getquill.sql.norm.{InContext, QueryLevel, SelectPropertyProtractor, StatelessQueryTransformer} import io.getquill.ast.PropertyOrCore -import io.getquill.norm.PropertyMatroshka +import io.getquill.norm.PropertyMatryoshka import io.getquill.quat.Quat class ExpandSelection(from: List[FromContext]) { @@ -45,7 +45,7 @@ class ExpandSelection(from: List[FromContext]) { val exp = SelectPropertyProtractor(from)(ast, alternateQuat) exp.map { case (p: Property, Nil) => - // If the quat-path is nothing and there is some pre-existing alias (e.g. if we came from a case-class or quat) + // If the quat-path is nothing and there is some preexisting alias (e.g. if we came from a case-class or quat) // the use that. Otherwise the selection is of an individual element so use the element name (before the rename) // as the alias. alias match { @@ -103,7 +103,7 @@ object ExpandNestedQueries extends StatelessQueryTransformer { def apply(p: Ast): Ast = p match { - case p @ PropertyMatroshka(inner, path, renameables) => + case p @ PropertyMatryoshka(inner, path, renameables) => val isSubselect = inContext.isSubselect(p) val propsAlreadyFixed = renameables.forall(_ == Renameable.Fixed) val isPropertyRenamed = p.prevName.isDefined diff --git a/quill-engine/src/main/scala/io/getquill/sql/norm/RemoveExtraAlias.scala b/quill-engine/src/main/scala/io/getquill/sql/norm/RemoveExtraAlias.scala index 5ae1ade49b..95d4f4cfd0 100644 --- a/quill-engine/src/main/scala/io/getquill/sql/norm/RemoveExtraAlias.scala +++ b/quill-engine/src/main/scala/io/getquill/sql/norm/RemoveExtraAlias.scala @@ -6,9 +6,9 @@ import io.getquill.context.sql.{FlattenSqlQuery, SelectValue} /** * Remove aliases at the top level of the AST since they are not needed (quill - * uses select row indexes to figure out what data corresponds to what - * encodeable object) as well as entities whose aliases are the same as their - * selection e.g. "select x.foo as foo" since this just adds syntactic noise. + * uses select row indexes to figure out what data corresponds to what encodable + * object) as well as entities whose aliases are the same as their selection + * e.g. "select x.foo as foo" since this just adds syntactic noise. */ case class RemoveExtraAlias(strategy: NamingStrategy) extends StatelessQueryTransformer { // Remove aliases that are the same as as the select values. Since a strategy may change the name, diff --git a/quill-engine/src/main/scala/io/getquill/sql/norm/RemoveUnusedSelects.scala b/quill-engine/src/main/scala/io/getquill/sql/norm/RemoveUnusedSelects.scala index a05175f263..7343895975 100644 --- a/quill-engine/src/main/scala/io/getquill/sql/norm/RemoveUnusedSelects.scala +++ b/quill-engine/src/main/scala/io/getquill/sql/norm/RemoveUnusedSelects.scala @@ -15,7 +15,7 @@ import io.getquill.context.sql.{ TableContext, UnaryOperationSqlQuery } -import io.getquill.norm.PropertyMatroshka +import io.getquill.norm.PropertyMatryoshka import io.getquill.quat.Quat import scala.collection.mutable @@ -53,7 +53,7 @@ object RemoveUnusedSelects { // Since we first need to replace select values from super queries onto sub queries, // take the newly filtered selects instead of the ones in the query which are pre-filtered // ... unless we are on the top level query. Since in the top level query 'references' - // will always be empty we need to copy through the entire select caluse + // will always be empty we need to copy through the entire select clause val asts = gatherAsts(q, if (doSelectFiltration) newSelect else q.select) // recurse into the from clause with ExpandContext @@ -65,7 +65,7 @@ object RemoveUnusedSelects { q.copy(from = fromContexts, select = newSelect)(q.quat) } else { // If we are on the top level, the list of aliases being used by clauses outer to 'us' - // don't exist since we are the outermost level of the sql. Therefore no filteration + // don't exist since we are the outermost level of the sql. Therefore no filtration // should happen in that case. q.copy(from = fromContexts)(q.quat) } @@ -77,7 +77,7 @@ object RemoveUnusedSelects { } private def filterUnused(select: List[SelectValue], references: Set[Property]): List[SelectValue] = { - val usedAliases = references.map { case PropertyMatroshka(_, list, _) => + val usedAliases = references.map { case PropertyMatryoshka(_, list, _) => list.mkString }.toSet select.filter(sv => diff --git a/quill-engine/src/main/scala/io/getquill/sql/norm/SelectPropertyProtractor.scala b/quill-engine/src/main/scala/io/getquill/sql/norm/SelectPropertyProtractor.scala index 0c43024c2e..7439d9d4dd 100644 --- a/quill-engine/src/main/scala/io/getquill/sql/norm/SelectPropertyProtractor.scala +++ b/quill-engine/src/main/scala/io/getquill/sql/norm/SelectPropertyProtractor.scala @@ -3,7 +3,7 @@ package io.getquill.sql.norm import io.getquill.ast.{Ast, Core, Ident, Property, Renameable} import io.getquill.ast.Visibility.{Hidden, Visible} import io.getquill.context.sql.{FlatJoinContext, FromContext, InfixContext, JoinContext, QueryContext, TableContext} -import io.getquill.norm.PropertyMatroshka +import io.getquill.norm.PropertyMatryoshka import io.getquill.quat.Quat import io.getquill.sql.norm.InContext.{InContextType, InInfixContext, InQueryContext, InTableContext} @@ -52,9 +52,9 @@ case class InContext(from: List[FromContext]) { def contextReferenceType(ast: Ast) = { val references = collectTableAliases(from) ast match { - case Ident(v, _) => references.get(v) - case PropertyMatroshka(Ident(v, _), _, _) => references.get(v) - case _ => None + case Ident(v, _) => references.get(v) + case PropertyMatryoshka(Ident(v, _), _, _) => references.get(v) + case _ => None } } @@ -124,7 +124,7 @@ case class SelectPropertyProtractor(from: List[FromContext]) { } // Assuming a property contains only an Ident, Infix or Constant at this point // and all situations where there is a case-class, tuple, etc... inside have already been beta-reduced - case prop @ PropertyMatroshka(id @ Core(), _, _) => + case prop @ PropertyMatryoshka(id @ Core(), _, _) => val isEntity = inContext.isEntityReference(id) val effectiveQuat = nonAbstractQuat(prop.quat, alternateQuat) @@ -184,7 +184,7 @@ case class ProtractQuat(refersToEntity: Boolean) { * * This needs to be projected into: * SELECT g.mammid, g.mammood FROM -- (2) so their selection of sub-properties from here is correct - * SELECT gim.mid AS mammid, gim.mood as mammood FROM g -- (1) for mamid and mammood need full quat path here... + * SELECT gim.mid AS mammid, gim.mood as mammood FROM g -- (1) for mammid and mammood need full quat path here... * * (See examples of this in ExpandNestedQueries multiple embedding levels series of tests. Also note that since sub-selection * is typically done from tuples, paths typically start with _1,_2 etc...) diff --git a/quill-engine/src/main/scala/io/getquill/sql/norm/nested/FindUnexpressedInfixes.scala b/quill-engine/src/main/scala/io/getquill/sql/norm/nested/FindUnexpressedInfixes.scala index d3317a5ca2..f4ebb9002f 100644 --- a/quill-engine/src/main/scala/io/getquill/sql/norm/nested/FindUnexpressedInfixes.scala +++ b/quill-engine/src/main/scala/io/getquill/sql/norm/nested/FindUnexpressedInfixes.scala @@ -7,7 +7,7 @@ import io.getquill.ast._ import io.getquill.context.sql.SelectValue /** - * The challenge with appeneding infixes (that have not been used but are still + * The challenge with appending infixes (that have not been used but are still * needed) back into the query, is that they could be inside of * tuples/case-classes that have already been selected, or inside of sibling * elements which have been selected. Take for instance a query that looks like diff --git a/quill-engine/src/main/scala/io/getquill/util/Interpolator.scala b/quill-engine/src/main/scala/io/getquill/util/Interpolator.scala index 96b6adaac7..b7111adb4e 100644 --- a/quill-engine/src/main/scala/io/getquill/util/Interpolator.scala +++ b/quill-engine/src/main/scala/io/getquill/util/Interpolator.scala @@ -178,7 +178,7 @@ class Interpolator( case Some((output, indent)) => // Even though we usually want to evaluate the command after the initial log was done // (so that future logs are nested under this one after the intro text but not - // before the return) but we cann't do that in this case because the switch indicating + // before the return) but we can't do that in this case because the switch indicating // whether to output anything or not is dependant on the return value. val result = command diff --git a/quill-engine/src/main/scala/io/getquill/util/Messages.scala b/quill-engine/src/main/scala/io/getquill/util/Messages.scala index 253f83b1f5..b4fae12020 100644 --- a/quill-engine/src/main/scala/io/getquill/util/Messages.scala +++ b/quill-engine/src/main/scala/io/getquill/util/Messages.scala @@ -46,8 +46,8 @@ object Messages { def traceQuats = cache("quill.trace.quat", QuatTrace(variable("quill.trace.quat", "quill_trace_quat", QuatTrace.None.value))) def cacheDynamicQueries = cache( - "quill.query.cacheDaynamic", - variable("quill.query.cacheDaynamic", "query_query_cacheDaynamic", "true").toBoolean + "quill.query.cacheDynamic", + variable("quill.query.cacheDynamic", "query_query_cacheDynamic", "true").toBoolean ) def querySubexpand = cache("quill.query.subexpand", variable("quill.query.subexpand", "query_query_subexpand", "true").toBoolean) diff --git a/quill-jasync-mysql/src/test/scala/io/getquill/context/jasync/mysql/CaseClassQueryJAsyncSpec.scala b/quill-jasync-mysql/src/test/scala/io/getquill/context/jasync/mysql/CaseClassQueryJAsyncSpec.scala index bf8ab23a04..cd73620d9a 100644 --- a/quill-jasync-mysql/src/test/scala/io/getquill/context/jasync/mysql/CaseClassQueryJAsyncSpec.scala +++ b/quill-jasync-mysql/src/test/scala/io/getquill/context/jasync/mysql/CaseClassQueryJAsyncSpec.scala @@ -46,6 +46,6 @@ class CaseClassQueryJAsyncSpec extends CaseClassQuerySpec { "Example 3 - Inline Record as Filter" in { await( testContext.run(`Ex 3 Inline Record Usage`) - ) should contain theSameElementsAs `Ex 3 Inline Record Usage exepected result` + ) should contain theSameElementsAs `Ex 3 Inline Record Usage expected result` } } diff --git a/quill-jasync-postgres/src/test/scala/io/getquill/context/jasync/postgres/ArrayAsyncEncodingSpec.scala b/quill-jasync-postgres/src/test/scala/io/getquill/context/jasync/postgres/ArrayAsyncEncodingSpec.scala index 177f2a3c9f..6ef5d0ced5 100644 --- a/quill-jasync-postgres/src/test/scala/io/getquill/context/jasync/postgres/ArrayAsyncEncodingSpec.scala +++ b/quill-jasync-postgres/src/test/scala/io/getquill/context/jasync/postgres/ArrayAsyncEncodingSpec.scala @@ -47,7 +47,7 @@ class ArrayAsyncEncodingSpec extends ArrayEncodingBaseSpec { } // Need to have an actual value in the table in order for the decoder to go off. Previously, - // there was guarenteed to be information there due to ordering of build artifacts but not anymore. + // there was guaranteed to be information there due to ordering of build artifacts but not anymore. "fail if found not an array" in { case class RealEncodingTestEntity( v1: String, diff --git a/quill-jasync-postgres/src/test/scala/io/getquill/context/jasync/postgres/CaseClassQueryAsyncSpec.scala b/quill-jasync-postgres/src/test/scala/io/getquill/context/jasync/postgres/CaseClassQueryAsyncSpec.scala index 0c08f9f118..068b58cd78 100644 --- a/quill-jasync-postgres/src/test/scala/io/getquill/context/jasync/postgres/CaseClassQueryAsyncSpec.scala +++ b/quill-jasync-postgres/src/test/scala/io/getquill/context/jasync/postgres/CaseClassQueryAsyncSpec.scala @@ -46,6 +46,6 @@ class CaseClassQueryAsyncSpec extends CaseClassQuerySpec { "Example 3 - Inline Record as Filter" in { await( testContext.run(`Ex 3 Inline Record Usage`) - ) should contain theSameElementsAs `Ex 3 Inline Record Usage exepected result` + ) should contain theSameElementsAs `Ex 3 Inline Record Usage expected result` } } diff --git a/quill-jasync-zio-postgres/src/test/scala/io/getquill/context/zio/jasync/postgres/ArrayAsyncEncodingSpec.scala b/quill-jasync-zio-postgres/src/test/scala/io/getquill/context/zio/jasync/postgres/ArrayAsyncEncodingSpec.scala index 161b3a882a..a92d373eae 100644 --- a/quill-jasync-zio-postgres/src/test/scala/io/getquill/context/zio/jasync/postgres/ArrayAsyncEncodingSpec.scala +++ b/quill-jasync-zio-postgres/src/test/scala/io/getquill/context/zio/jasync/postgres/ArrayAsyncEncodingSpec.scala @@ -44,7 +44,7 @@ class ArrayAsyncEncodingSpec extends ArrayEncodingBaseSpec with ZioSpec { } // Need to have an actual value in the table in order for the decoder to go off. Previously, - // there was guarenteed to be information there due to ordering of build artifacts but not anymore. + // there was guaranteed to be information there due to ordering of build artifacts but not anymore. "fail if found not an array" in { case class RealEncodingTestEntity( v1: String, diff --git a/quill-jasync-zio-postgres/src/test/scala/io/getquill/context/zio/jasync/postgres/CaseClassQueryAsyncSpec.scala b/quill-jasync-zio-postgres/src/test/scala/io/getquill/context/zio/jasync/postgres/CaseClassQueryAsyncSpec.scala index 0e6d245513..a9379341f0 100644 --- a/quill-jasync-zio-postgres/src/test/scala/io/getquill/context/zio/jasync/postgres/CaseClassQueryAsyncSpec.scala +++ b/quill-jasync-zio-postgres/src/test/scala/io/getquill/context/zio/jasync/postgres/CaseClassQueryAsyncSpec.scala @@ -44,6 +44,6 @@ class CaseClassQueryAsyncSpec extends CaseClassQuerySpec with ZioSpec { "Example 3 - Inline Record as Filter" in { runSyncUnsafe( context.run(`Ex 3 Inline Record Usage`) - ) should contain theSameElementsAs `Ex 3 Inline Record Usage exepected result` + ) should contain theSameElementsAs `Ex 3 Inline Record Usage expected result` } } diff --git a/quill-jdbc-monix/src/test/scala/io/getquill/mock/MockTests.scala b/quill-jdbc-monix/src/test/scala/io/getquill/mock/MockTests.scala index 09ba56715a..2d01912cd7 100644 --- a/quill-jdbc-monix/src/test/scala/io/getquill/mock/MockTests.scala +++ b/quill-jdbc-monix/src/test/scala/io/getquill/mock/MockTests.scala @@ -136,7 +136,7 @@ class MockTests extends Spec with AsyncMockitoSugar { val ctx = new PostgresMonixJdbcContext(Literal, ds, EffectWrapper.using(scheduler)) import ctx._ - // In this case, instead of catching the error inside the observable, let it propogate to the top + // In this case, instead of catching the error inside the observable, let it propagate to the top // and make sure that the connection is closed anyhow val results = Try { diff --git a/quill-jdbc-zio/src/main/scala/io/getquill/context/qzio/ZioJdbcContext.scala b/quill-jdbc-zio/src/main/scala/io/getquill/context/qzio/ZioJdbcContext.scala index 4527c475f3..9c12030e5c 100644 --- a/quill-jdbc-zio/src/main/scala/io/getquill/context/qzio/ZioJdbcContext.scala +++ b/quill-jdbc-zio/src/main/scala/io/getquill/context/qzio/ZioJdbcContext.scala @@ -185,7 +185,7 @@ abstract class ZioJdbcContext[+Dialect <: SqlIdiom, +Naming <: NamingStrategy] * }}} * * The order of operations run in the case that a new connection needs to be - * aquired are as follows:
     getDS from env, acquire-connection,
    +   * acquired are as follows: 
     getDS from env, acquire-connection,
        * set-no-autocommit(connection), put-into-fiberref(connection), op - the
        * corresponding execute_ method which will execute and pull connection from
        * the fiberref, remove-from-fiberref(connection),
    @@ -209,7 +209,7 @@ abstract class ZioJdbcContext[+Dialect <: SqlIdiom, +Naming <: NamingStrategy]
                      attemptBlocking(connection.setAutoCommit(prevAutoCommit)).orDie
                    }
               _ <- ZIO.acquireRelease(currentConnection.set(Some(connection))) { _ =>
    -                 // Note. We are failing the fiber if auto-commit reset fails. For some circumstances this may be too aggresive.
    +                 // Note. We are failing the fiber if auto-commit reset fails. For some circumstances this may be too aggressive.
                      // If the connection pool e.g. Hikari resets this property for a recycled connection anyway doing it here
                      // might not be necessary
                      currentConnection.set(None)
    diff --git a/quill-jdbc-zio/src/test/scala/io/getquill/mock/ZioMockSpec.scala b/quill-jdbc-zio/src/test/scala/io/getquill/mock/ZioMockSpec.scala
    index 57ea6ce4b0..d29e28b6a4 100644
    --- a/quill-jdbc-zio/src/test/scala/io/getquill/mock/ZioMockSpec.scala
    +++ b/quill-jdbc-zio/src/test/scala/io/getquill/mock/ZioMockSpec.scala
    @@ -175,7 +175,7 @@ class ZioMockSpec extends AnyFreeSpec with MockitoSugar { // with AsyncMockitoSu
         val ctx = new PostgresZioJdbcContext(Literal)
         import ctx._
     
    -    // In this case, instead of catching the error inside the observable, let it propogate to the top
    +    // In this case, instead of catching the error inside the observable, let it propagate to the top
         // and make sure that the connection is closed anyhow
         val resultMsg = Unsafe.unsafe { implicit u =>
           zio.Runtime.default.unsafe.run {
    diff --git a/quill-jdbc/src/main/scala/io/getquill/context/jdbc/Encoders.scala b/quill-jdbc/src/main/scala/io/getquill/context/jdbc/Encoders.scala
    index 702d046ef9..bebf77651d 100644
    --- a/quill-jdbc/src/main/scala/io/getquill/context/jdbc/Encoders.scala
    +++ b/quill-jdbc/src/main/scala/io/getquill/context/jdbc/Encoders.scala
    @@ -84,13 +84,13 @@ trait BasicTimeEncoders { self: Encoders =>
       implicit val instantEncoder: Encoder[Instant] =
         encoder(Types.TIMESTAMP_WITH_TIMEZONE, (index, value, row) => row.setTimestamp(index, Timestamp.from(value)))
     
    -  implicit val offseTimeEncoder: Encoder[OffsetTime] =
    +  implicit val offsetTimeEncoder: Encoder[OffsetTime] =
         encoder(
           Types.TIME,
           (index, value, row) =>
             row.setTime(index, java.sql.Time.valueOf(value.withOffsetSameInstant(ZoneOffset.UTC).toLocalTime))
         )
    -  implicit val offseDateTimeEncoder: Encoder[OffsetDateTime] =
    +  implicit val offsetDateTimeEncoder: Encoder[OffsetDateTime] =
         encoder(
           Types.TIMESTAMP_WITH_TIMEZONE,
           (index, value, row) => row.setTimestamp(index, java.sql.Timestamp.from(value.toInstant))
    @@ -127,8 +127,8 @@ trait ObjectGenericTimeEncoders { self: Encoders =>
       implicit val instantEncoder: Encoder[Instant] =
         encoder(jdbcTypeOfInstant, (index, value, row) => row.setObject(index, jdbcEncodeInstant(value), jdbcTypeOfInstant))
     
    -  implicit val offseTimeEncoder: Encoder[OffsetTime] =
    +  implicit val offsetTimeEncoder: Encoder[OffsetTime] =
         encoder(jdbcTypeOfOffsetTime, (index, value, row) => row.setObject(index, value, jdbcTypeOfOffsetTime))
    -  implicit val offseDateTimeEncoder: Encoder[OffsetDateTime] =
    +  implicit val offsetDateTimeEncoder: Encoder[OffsetDateTime] =
         encoder(jdbcTypeOfOffsetDateTime, (index, value, row) => row.setObject(index, value, jdbcTypeOfOffsetDateTime))
     }
    diff --git a/quill-jdbc/src/main/scala/io/getquill/context/jdbc/JdbcContextTypes.scala b/quill-jdbc/src/main/scala/io/getquill/context/jdbc/JdbcContextTypes.scala
    index 13f3a75cb9..964ed19a6b 100644
    --- a/quill-jdbc/src/main/scala/io/getquill/context/jdbc/JdbcContextTypes.scala
    +++ b/quill-jdbc/src/main/scala/io/getquill/context/jdbc/JdbcContextTypes.scala
    @@ -34,7 +34,7 @@ trait JdbcContextTypes[+Dialect <: SqlIdiom, +Naming <: NamingStrategy]
        * Parses instances of java.sql.Types to string form so it can be used in
        * creation of sql arrays. Some databases does not support each of generic
        * types, hence it's welcome to override this method and provide alternatives
    -   * to non-existent types.
    +   * to nonexistent types.
        *
        * @param intType
        *   one of java.sql.Types
    diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/h2/CaseClassQueryJdbcSpec.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/h2/CaseClassQueryJdbcSpec.scala
    index 3587e3ac13..21dbaa30c2 100644
    --- a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/h2/CaseClassQueryJdbcSpec.scala
    +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/h2/CaseClassQueryJdbcSpec.scala
    @@ -43,7 +43,7 @@ class CaseClassQueryJdbcSpec extends CaseClassQuerySpec {
       "Example 3 - Inline Record as Filter" in {
         testContext.run(
           `Ex 3 Inline Record Usage`
    -    ) should contain theSameElementsAs `Ex 3 Inline Record Usage exepected result`
    +    ) should contain theSameElementsAs `Ex 3 Inline Record Usage expected result`
       }
     
       "Example 4 - Ex 4 Mapped Union of Nicknames" in {
    diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/mysql/CaseClassQueryJdbcSpec.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/mysql/CaseClassQueryJdbcSpec.scala
    index fcdcb7bac0..6cfd66e1b5 100644
    --- a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/mysql/CaseClassQueryJdbcSpec.scala
    +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/mysql/CaseClassQueryJdbcSpec.scala
    @@ -43,7 +43,7 @@ class CaseClassQueryJdbcSpec extends CaseClassQuerySpec {
       "Example 3 - Inline Record as Filter" in {
         testContext.run(
           `Ex 3 Inline Record Usage`
    -    ) should contain theSameElementsAs `Ex 3 Inline Record Usage exepected result`
    +    ) should contain theSameElementsAs `Ex 3 Inline Record Usage expected result`
       }
     
       "Example 4 - Ex 4 Mapped Union of Nicknames" in {
    diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/oracle/CaseClassQueryJdbcSpec.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/oracle/CaseClassQueryJdbcSpec.scala
    index 6647e3c239..1072282c54 100644
    --- a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/oracle/CaseClassQueryJdbcSpec.scala
    +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/oracle/CaseClassQueryJdbcSpec.scala
    @@ -45,7 +45,7 @@ class CaseClassQueryJdbcSpec extends CaseClassQuerySpec {
       "Example 3 - Inline Record as Filter" in {
         testContext.run(
           `Ex 3 Inline Record Usage`
    -    ) should contain theSameElementsAs `Ex 3 Inline Record Usage exepected result`
    +    ) should contain theSameElementsAs `Ex 3 Inline Record Usage expected result`
       }
     
       "Example 4 - Ex 4 Mapped Union of Nicknames" in {
    diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/CaseClassQueryJdbcSpec.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/CaseClassQueryJdbcSpec.scala
    index 3be1207245..1374eca6f6 100644
    --- a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/CaseClassQueryJdbcSpec.scala
    +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/CaseClassQueryJdbcSpec.scala
    @@ -43,7 +43,7 @@ class CaseClassQueryJdbcSpec extends CaseClassQuerySpec {
       "Example 3 - Inline Record as Filter" in {
         testContext.run(
           `Ex 3 Inline Record Usage`
    -    ) should contain theSameElementsAs `Ex 3 Inline Record Usage exepected result`
    +    ) should contain theSameElementsAs `Ex 3 Inline Record Usage expected result`
       }
     
       "Example 4 - Ex 4 Mapped Union of Nicknames" in {
    diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlite/CaseClassQueryJdbcSpec.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlite/CaseClassQueryJdbcSpec.scala
    index 25f3eb7d17..1f0183ed62 100644
    --- a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlite/CaseClassQueryJdbcSpec.scala
    +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlite/CaseClassQueryJdbcSpec.scala
    @@ -43,6 +43,6 @@ class CaseClassQueryJdbcSpec extends CaseClassQuerySpec {
       "Example 3 - Inline Record as Filter" in {
         testContext.run(
           `Ex 3 Inline Record Usage`
    -    ) should contain theSameElementsAs `Ex 3 Inline Record Usage exepected result`
    +    ) should contain theSameElementsAs `Ex 3 Inline Record Usage expected result`
       }
     }
    diff --git a/quill-ndbc-postgres/src/test/scala/io/getquill/context/ndbc/postgres/CaseClassQueryNdbcPostgresSpec.scala b/quill-ndbc-postgres/src/test/scala/io/getquill/context/ndbc/postgres/CaseClassQueryNdbcPostgresSpec.scala
    index d7637ed4f4..3fd97c7180 100644
    --- a/quill-ndbc-postgres/src/test/scala/io/getquill/context/ndbc/postgres/CaseClassQueryNdbcPostgresSpec.scala
    +++ b/quill-ndbc-postgres/src/test/scala/io/getquill/context/ndbc/postgres/CaseClassQueryNdbcPostgresSpec.scala
    @@ -44,6 +44,6 @@ class CaseClassQueryNdbcPostgresSpec extends CaseClassQuerySpec {
       "Example 3 - Inline Record as Filter" in {
         get(
           context.run(`Ex 3 Inline Record Usage`)
    -    ) must contain theSameElementsAs `Ex 3 Inline Record Usage exepected result`
    +    ) must contain theSameElementsAs `Ex 3 Inline Record Usage expected result`
       }
     }
    diff --git a/quill-orientdb/src/main/scala/io/getquill/context/orientdb/OrientDBIdiom.scala b/quill-orientdb/src/main/scala/io/getquill/context/orientdb/OrientDBIdiom.scala
    index 46e24ebf5c..80fc0eb9fa 100644
    --- a/quill-orientdb/src/main/scala/io/getquill/context/orientdb/OrientDBIdiom.scala
    +++ b/quill-orientdb/src/main/scala/io/getquill/context/orientdb/OrientDBIdiom.scala
    @@ -38,7 +38,7 @@ trait OrientDBIdiom extends Idiom {
       private def doTranslate(ast: Ast, cached: Boolean, executionType: ExecutionType, idiomContext: IdiomContext)(implicit
         naming: NamingStrategy
       ): (Ast, Statement, ExecutionType) = {
    -    implicit val implcitIdiomContext: IdiomContext = idiomContext
    +    implicit val implicitIdiomContext: IdiomContext = idiomContext
         val normalizedAst =
           if (cached)
             NormalizeCaching { ast: Ast => SqlNormalize(ast, idiomContext.config) }(ast)
    @@ -209,9 +209,9 @@ trait OrientDBIdiom extends Idiom {
       }
     
       protected def tokenOrderBy(
    -    criterias: List[OrderByCriteria]
    +    criteria: List[OrderByCriteria]
       )(implicit strategy: NamingStrategy, idiomContext: IdiomContext) =
    -    stmt"ORDER BY ${criterias.token}"
    +    stmt"ORDER BY ${criteria.token}"
     
       implicit def sourceTokenizer(implicit strategy: NamingStrategy, idiomContext: IdiomContext): Tokenizer[FromContext] =
         Tokenizer[FromContext] {
    diff --git a/quill-orientdb/src/test/scala/io/getquill/context/orientdb/CaseClassQueryOrientSpec.scala b/quill-orientdb/src/test/scala/io/getquill/context/orientdb/CaseClassQueryOrientSpec.scala
    index e118edfee7..1d3d6b9795 100644
    --- a/quill-orientdb/src/test/scala/io/getquill/context/orientdb/CaseClassQueryOrientSpec.scala
    +++ b/quill-orientdb/src/test/scala/io/getquill/context/orientdb/CaseClassQueryOrientSpec.scala
    @@ -48,7 +48,7 @@ class CaseClassQueryOrientSpec extends Spec {
         query[Contact].filter(p => p.id == filtrationObject.idFilter)
       }
     
    -  val `Ex 3 Inline Record Usage exepected result` = List(
    +  val `Ex 3 Inline Record Usage expected result` = List(
         new Contact(1, "Alex", "Jones", 60, 2, "foo")
       )
     
    @@ -68,6 +68,6 @@ class CaseClassQueryOrientSpec extends Spec {
       "Example 2 - Inline Record as Filter" in {
         testSyncDB.run(
           `Ex 3 Inline Record Usage`
    -    ) must contain theSameElementsAs `Ex 3 Inline Record Usage exepected result`
    +    ) must contain theSameElementsAs `Ex 3 Inline Record Usage expected result`
       }
     }
    diff --git a/quill-spark/src/main/scala/io/getquill/QuillSparkContext.scala b/quill-spark/src/main/scala/io/getquill/QuillSparkContext.scala
    index e6420592bb..71c3e1c022 100644
    --- a/quill-spark/src/main/scala/io/getquill/QuillSparkContext.scala
    +++ b/quill-spark/src/main/scala/io/getquill/QuillSparkContext.scala
    @@ -55,7 +55,7 @@ trait QuillSparkContext extends Context[SparkDialect, Literal] with Encoders wit
           sql"${lift(ds)}".pure.as[Query[T]]
         }
     
    -  // Helper class for the perculateNullArrays method
    +  // Helper class for the percolateNullArrays method
       case class StructElement(column: Column, structField: StructField) {
         def children: Array[StructElement] = structField.dataType match {
           case StructType(fields) => fields.map(f => StructElement(column.getField(f.name), f))
    @@ -88,7 +88,7 @@ trait QuillSparkContext extends Context[SparkDialect, Literal] with Encoders wit
           node.structField.dataType match {
             case st: StructType =>
               // Recursively convert all parent array columns to single null values if all their children are null
    -          val preculatedColumn = struct(node.children.map(percolateNullArraysRecursive(_)).toIndexedSeq: _*)
    +          val percolatedColumn = struct(node.children.map(percolateNullArraysRecursive(_)).toIndexedSeq: _*)
               // Then express that column back out the schema
     
               val mapped =
    @@ -99,7 +99,7 @@ trait QuillSparkContext extends Context[SparkDialect, Literal] with Encoders wit
                   ).otherwise(c)
                 }
     
    -          mapped(preculatedColumn).as(node.structField.name)
    +          mapped(percolatedColumn).as(node.structField.name)
             case _ =>
               node.column.as(node.structField.name)
           }
    diff --git a/quill-spark/src/main/scala/io/getquill/context/spark/SimpleNestedExpansion.scala b/quill-spark/src/main/scala/io/getquill/context/spark/SimpleNestedExpansion.scala
    index fa48e1a1bb..42372f90b3 100644
    --- a/quill-spark/src/main/scala/io/getquill/context/spark/SimpleNestedExpansion.scala
    +++ b/quill-spark/src/main/scala/io/getquill/context/spark/SimpleNestedExpansion.scala
    @@ -59,10 +59,10 @@ object TopLevelExpansion {
      * This unapplier object is used both here and in the SpartDialect select
      * tokenization.
      *
    - *   - unless the Ident has a Concrete Quat.Proudct with a single value, but
    + *   - unless the Ident has a Concrete Quat.Product with a single value, but
      *     that has already been expanded into it's composite elements in
    - *     TopLevelExpanion.apply and the Ident shuold no longer exist in the select
    - *     values.
    + *     TopLevelExpansion.apply and the Ident should no longer exist in the
    + *     select values.
      *
      * Technically, all we we need to do here is to check that the ast element is
      * not an ident, however due to previous issues encountered with surprising
    diff --git a/quill-spark/src/main/scala/io/getquill/context/spark/SparkDialect.scala b/quill-spark/src/main/scala/io/getquill/context/spark/SparkDialect.scala
    index bf8ce825c6..6b75db2a50 100644
    --- a/quill-spark/src/main/scala/io/getquill/context/spark/SparkDialect.scala
    +++ b/quill-spark/src/main/scala/io/getquill/context/spark/SparkDialect.scala
    @@ -54,7 +54,7 @@ trait SparkIdiom extends SqlIdiom with CannotReturn { self =>
         val normalizedAst = EscapeQuestionMarks(SqlNormalize(ast, idiomContext.config))
     
         implicit val implicitIdiomContext: IdiomContext = idiomContext
    -    implicit val tokernizer                         = defaultTokenizer
    +    implicit val tokenizer                          = defaultTokenizer
     
         val token =
           normalizedAst match {
    @@ -125,7 +125,7 @@ trait SparkIdiom extends SqlIdiom with CannotReturn { self =>
             // it is an ident but somehow it's type is not known
             case List(SelectValue(Ident(a, Quat.Placeholder(_)), _, _)) =>
               stmt"${a.token}.*"
    -        // It is an ident but actually it repsents a single sql-level value
    +        // It is an ident but actually it represents a single sql-level value
             case List(SelectValue(Ident(a, _: Quat.Primitive), _, _)) =>
               stmt"${a.token}.*"
             // If the selection is a single value e.g. SelectValue(prop.value), SelectValue(Constant) return it right here as a SingleValuePrimitive
    diff --git a/quill-spark/src/test/scala/io/getquill/context/spark/QuestionMarkSpec.scala b/quill-spark/src/test/scala/io/getquill/context/spark/QuestionMarkSpec.scala
    index 647339a9b8..baf1b6918e 100644
    --- a/quill-spark/src/test/scala/io/getquill/context/spark/QuestionMarkSpec.scala
    +++ b/quill-spark/src/test/scala/io/getquill/context/spark/QuestionMarkSpec.scala
    @@ -26,7 +26,7 @@ class QuestionMarkSpec extends Spec {
         testContext.run(q).collect() should contain theSameElementsAs Seq(peopleList(0))
       }
     
    -  "simple variable usage must work in the middle of a stirng" in {
    +  "simple variable usage must work in the middle of a string" in {
         val newContact      = Contact("Moe", "Rabbenu", 123, 2, "Something ? Something ? Else")
         val extraPeopleList = peopleList :+ newContact
     
    diff --git a/quill-spark/src/test/scala/io/getquill/context/spark/examples/GithubExample.scala b/quill-spark/src/test/scala/io/getquill/context/spark/examples/GitHubExample.scala
    similarity index 98%
    rename from quill-spark/src/test/scala/io/getquill/context/spark/examples/GithubExample.scala
    rename to quill-spark/src/test/scala/io/getquill/context/spark/examples/GitHubExample.scala
    index 677b5f795a..0073e708a7 100644
    --- a/quill-spark/src/test/scala/io/getquill/context/spark/examples/GithubExample.scala
    +++ b/quill-spark/src/test/scala/io/getquill/context/spark/examples/GitHubExample.scala
    @@ -35,7 +35,7 @@ case class Activity(
       org: User
     )
     
    -object GithubExample extends App {
    +object GitHubExample extends App {
     
       val files =
         for {
    diff --git a/quill-spark/src/test/scala/io/getquill/context/spark/examples/TopHashtagsExample.scala b/quill-spark/src/test/scala/io/getquill/context/spark/examples/TopHashtagsExample.scala
    index bd96678882..92f605b18f 100644
    --- a/quill-spark/src/test/scala/io/getquill/context/spark/examples/TopHashtagsExample.scala
    +++ b/quill-spark/src/test/scala/io/getquill/context/spark/examples/TopHashtagsExample.scala
    @@ -56,7 +56,7 @@ object TopHashtagsExample extends App {
             .map(_.toLowerCase)          // normalize hashtags     (Dataset)
             .groupBy($"value")           // group by each hashtag  (Dataframe)
             .agg(fcount("*") as "count") // aggregate the count    (Dataframe)
    -        .orderBy($"count" desc)      // order                  (Datafeame)
    +        .orderBy($"count" desc)      // order                  (Dataframe)
             .limit(n)                    // limit to top results   (Dataframe)
             .as[(String, BigInt)]        // set the type again     (Dataset)
       }
    @@ -64,7 +64,7 @@ object TopHashtagsExample extends App {
       object quill {
         def topHashtags(tweets: Dataset[Tweet], n: Int): Dataset[(String, Long)] =
           run {                             // produce a dataset from the Quill query
    -        liftQuery(tweets)               // trasform the dataset into a Quill query
    +        liftQuery(tweets)               // transform the dataset into a Quill query
               .concatMap(_.text.split(" ")) // split into words and unnest results
               .filter(_.startsWith("#"))    // filter hashtag words
               .map(_.toLowerCase)           // normalize hashtags
    diff --git a/quill-sql/src/test/scala/io/getquill/context/sql/AggregationSpec.scala b/quill-sql/src/test/scala/io/getquill/context/sql/AggregationSpec.scala
    index 702f84f315..fedca52c96 100644
    --- a/quill-sql/src/test/scala/io/getquill/context/sql/AggregationSpec.scala
    +++ b/quill-sql/src/test/scala/io/getquill/context/sql/AggregationSpec.scala
    @@ -27,7 +27,7 @@ class AggregationSpec extends Spec {
       // SELECT p.age FROM (SELECT x.age + 1 FROM Person x) AS p WHERE p.age = 123
       //   => SELECT p.age + 1 FROM (SELECT x.age FROM Person x) AS p WHERE (p.age + 1) = 123
       // Instead it should remain as the former query
    -  "simple operation should not propogate from nested" in {
    +  "simple operation should not propagate from nested" in {
         ctx.run {
           query[Person].map(p => p.age + 1).nested.filter(p => p == 123)
         }.string mustEqual "SELECT p.x FROM (SELECT p.age + 1 AS x FROM Person p) AS p WHERE p.x = 123"
    @@ -44,7 +44,7 @@ class AggregationSpec extends Spec {
           "sum" in { ctx.run(query[Person].map(p => sum(p.age))).string mustEqual "SELECT SUM(p.age) FROM Person p" }
         }
     
    -    "work correctly with a filter cause that is BEFORE the aggreation" in {
    +    "work correctly with a filter cause that is BEFORE the aggregation" in {
           val q = quote {
             query[Person].filter(p => p.name == "Joe").map(p => (p.id, max(p.name)))
           }
    @@ -160,7 +160,7 @@ class AggregationSpec extends Spec {
             "SELECT p.x FROM (SELECT MAX(p.age) AS x FROM Person p GROUP BY p.age) AS p WHERE p.x > 1000"
         }
     
    -    // Disable thte apply-map phase to make sure these work in cases where this reduction is not possible (e.g. where they use infix etc...).
    +    // Disable the apply-map phase to make sure these work in cases where this reduction is not possible (e.g. where they use infix etc...).
         // Infix has a special case already so want to not use that specifically.
         "work with a map(to-leaf).groupByMap.filter - no ApplyMap" in {
           implicit val d = new DisablePhase { override type Phase = OptionalPhase.ApplyMap :: HNil }
    diff --git a/quill-sql/src/test/scala/io/getquill/context/sql/GroupBySpec.scala b/quill-sql/src/test/scala/io/getquill/context/sql/GroupBySpec.scala
    index 64bca45f95..170ed53018 100644
    --- a/quill-sql/src/test/scala/io/getquill/context/sql/GroupBySpec.scala
    +++ b/quill-sql/src/test/scala/io/getquill/context/sql/GroupBySpec.scala
    @@ -21,7 +21,7 @@ class GroupBySpec extends Spec {
               .join(query[Country])
               .on { case (city, country) => city.countryId == country.id }
               .groupBy { case (city, country) => country }
    -          .map { case (country, citysInCountry) => (country.name, citysInCountry.map(cICn => cICn._1)) }
    +          .map { case (country, cityInCountry) => (country.name, cityInCountry.map(cICn => cICn._1)) }
               .map { case (country, citiesInCountry) => (country, citiesInCountry.size) }
           )
           testContext.run(q).string mustEqual
    @@ -35,7 +35,7 @@ class GroupBySpec extends Spec {
               .join(query[Country])
               .on { case (city, country) => city.countryId == country.id }
               .groupBy { case (city, country) => country }
    -          .map { case (country, citysInCountry) => (country.name, citysInCountry.map(cICn => cICn._1)) }
    +          .map { case (country, cityInCountry) => (country.name, cityInCountry.map(cICn => cICn._1)) }
               .map { case (country, citiesInCountry) => (country, citiesInCountry.size) }
           )
           testContext.run(q).string mustEqual
    @@ -81,8 +81,8 @@ class GroupBySpec extends Spec {
               .join(query[Country])
               .on { case (city, country) => city.countryCode == country.countryCode }
               .groupBy { case (city, country) => country }
    -          .map { case (country, citysInCountry) =>
    -            ((country.countryCode, country.language), citysInCountry.map(cICn => cICn._1))
    +          .map { case (country, cityInCountry) =>
    +            ((country.countryCode, country.language), cityInCountry.map(cICn => cICn._1))
               }
               .map { case (country, cityCountries) => (country, cityCountries.size) }
           )
    @@ -143,8 +143,8 @@ class GroupBySpec extends Spec {
               .join(query[Country])
               .on { case (city, country) => city.countryCode == country.countryCode }
               .groupBy { case (city, country) => country }
    -          .map { case (country, citysInCountry) =>
    -            ((country.countryCode, country.language), citysInCountry.map(cICn => cICn._1))
    +          .map { case (country, cityInCountry) =>
    +            ((country.countryCode, country.language), cityInCountry.map(cICn => cICn._1))
               }
               .map { case (country, cityCountries) => (country, cityCountries.size) }
           )
    @@ -239,7 +239,7 @@ class GroupBySpec extends Spec {
             "SELECT p.* FROM (SELECT MAX(p.age) FROM Person p GROUP BY p.age) AS p WHERE p > 1000"
         }
     
    -    // Disable thte apply-map phase to make sure these work in cases where this reduction is not possible (e.g. where they use infix etc...).
    +    // Disable the apply-map phase to make sure these work in cases where this reduction is not possible (e.g. where they use infix etc...).
         // Infix has a special case already so want to not use that specifically.
         "work with a map(to-leaf).groupByMap.map.filter - no ApplyMap" in {
           implicit val d = new DisablePhase { override type Phase = OptionalPhase.ApplyMap :: HNil }
    diff --git a/quill-sql/src/test/scala/io/getquill/context/sql/NestedDistinctSpec.scala b/quill-sql/src/test/scala/io/getquill/context/sql/NestedDistinctSpec.scala
    index 1e81e77fd7..b0e8a8cc8a 100644
    --- a/quill-sql/src/test/scala/io/getquill/context/sql/NestedDistinctSpec.scala
    +++ b/quill-sql/src/test/scala/io/getquill/context/sql/NestedDistinctSpec.scala
    @@ -123,12 +123,12 @@ class NestedDistinctSpec extends Spec {
           case class SimpleEnt(a: Int, b: String)
           case class SimpleEnt2(aa: Int, bb: String)
     
    -      val qschem = quote {
    +      val qschema = quote {
             querySchema[SimpleEnt]("CustomEnt", _.a -> "field_a")
           }
     
           val q = quote {
    -        qschem
    +        qschema
               .map(e => SimpleEnt(e.a + 1, e.b))
               .distinct
               .map(e => SimpleEnt2(e.a + 2, e.b))
    @@ -141,12 +141,12 @@ class NestedDistinctSpec extends Spec {
           case class SimpleEnt(a: Int, b: String)
           case class SimpleEnt2(aa: Int, bb: String)
     
    -      val qschem = quote {
    +      val qschema = quote {
             querySchema[SimpleEnt]("CustomEnt", _.a -> "field_a")
           }
     
           val q = quote {
    -        qschem
    +        qschema
               .map(e => (e.a + 1, sql"foo(${e.b})".as[String]))
               .nested
               .map(e => (e._1 + 2, sql"bar(${e._2})".as[String]))
    @@ -160,12 +160,12 @@ class NestedDistinctSpec extends Spec {
           case class SimpleEnt(a: Int, b: String)
           case class SimpleEnt2(aa: Int, bb: String)
     
    -      val qschem = quote {
    +      val qschema = quote {
             querySchema[SimpleEnt]("CustomEnt", _.a -> "field_a")
           }
     
           val q = quote {
    -        qschem
    +        qschema
               .map(e => (e.a + 1, sql"foo(${e.b})".as[String]))
               .map(e => (e._1 + 2, sql"bar(${e._2})".as[String]))
           }
    @@ -213,49 +213,49 @@ class NestedDistinctSpec extends Spec {
             ctx.run(q).string mustEqual "SELECT e._1, e._2id AS id, e._2theName AS theName FROM (SELECT p.idP AS _1, p.id AS _2id, p.theName AS _2theName FROM Parent p) AS e"
           }
     
    -      "can be propogated across query with naming intact and then used further" in {
    +      "can be propagated across query with naming intact and then used further" in {
             val q = quote {
               query[Parent].map(p => p.emb).distinct.map(e => (e.name, e.id)).distinct.map(tup => (tup._1, tup._2)).distinct
             }
             ctx.run(q).string mustEqual "SELECT DISTINCT p._1theName AS _1, p._1id AS _2 FROM (SELECT DISTINCT p.id AS _1id, p.theName AS _1theName FROM Parent p) AS p"
           }
     
    -      "can be propogated across query with naming intact and then used further - nested" in {
    +      "can be propagated across query with naming intact and then used further - nested" in {
             val q = quote {
               query[Parent].map(p => p.emb).nested.map(e => (e.name, e.id)).nested.map(tup => (tup._1, tup._2)).nested
             }
             ctx.run(q).string mustEqual "SELECT x._1, x._2 FROM (SELECT tup._1, tup._2 FROM (SELECT e.theName AS _1, e.id AS _2 FROM (SELECT p.id, p.theName FROM Parent p) AS e) AS tup) AS x"
           }
     
    -      "can be propogated across query with naming intact - returned as single property" in {
    +      "can be propagated across query with naming intact - returned as single property" in {
             val q = quote {
               query[Parent].map(p => p.emb).distinct.map(e => (e.name))
             }
             ctx.run(q).string mustEqual "SELECT p._1theName AS theName FROM (SELECT DISTINCT p.id AS _1id, p.theName AS _1theName FROM Parent p) AS p"
           }
     
    -      "can be propogated across query with naming intact - and the immediately returned" in {
    +      "can be propagated across query with naming intact - and the immediately returned" in {
             val q = quote {
               query[Parent].map(p => p.emb).nested.map(e => e)
             }
             ctx.run(q).string mustEqual "SELECT x.id, x.theName FROM (SELECT p.id, p.theName FROM Parent p) AS x"
           }
     
    -      "can be propogated across distinct with naming intact - and the immediately returned" in {
    +      "can be propagated across distinct with naming intact - and the immediately returned" in {
             val q = quote {
               query[Parent].map(p => p.emb).distinct.map(e => e)
             }
             ctx.run(q).string mustEqual "SELECT DISTINCT p.id, p.theName FROM Parent p"
           }
     
    -      "can be propogated across query with naming intact and then re-wrapped in case class" in {
    +      "can be propagated across query with naming intact and then re-wrapped in case class" in {
             val q = quote {
               query[Parent].map(p => p.emb).distinct.map(e => Parent(1, e))
             }
             ctx.run(q).string mustEqual "SELECT 1 AS idP, p._1id AS id, p._1theName AS theName FROM (SELECT DISTINCT p.id AS _1id, p.theName AS _1theName FROM Parent p) AS p"
           }
     
    -      "can be propogated across query with naming intact and then re-wrapped in tuple" in {
    +      "can be propagated across query with naming intact and then re-wrapped in tuple" in {
             val q = quote {
               query[Parent].map(p => p.emb).nested.map(e => Parent(1, e))
             }
    diff --git a/quill-sql/src/test/scala/io/getquill/context/sql/SqlQueryMacroSpec.scala b/quill-sql/src/test/scala/io/getquill/context/sql/SqlQueryMacroSpec.scala
    index bfcfd77ce3..c42cb3df1b 100644
    --- a/quill-sql/src/test/scala/io/getquill/context/sql/SqlQueryMacroSpec.scala
    +++ b/quill-sql/src/test/scala/io/getquill/context/sql/SqlQueryMacroSpec.scala
    @@ -37,7 +37,7 @@ class SqlQueryMacroSpec extends Spec {
             mirror.string mustEqual "SELECT x.s, x.i, x.l, x.o FROM TestEntity t, TestEntity2 x"
           }
         }
    -    "with bindigns" - {
    +    "with bindings" - {
           "one" in {
             val q = quote {
               qr1.filter(t => t.s != lift("s"))
    diff --git a/quill-sql/src/test/scala/io/getquill/context/sql/SqlQuerySpec.scala b/quill-sql/src/test/scala/io/getquill/context/sql/SqlQuerySpec.scala
    index 651ae66a65..98b7f9f01d 100644
    --- a/quill-sql/src/test/scala/io/getquill/context/sql/SqlQuerySpec.scala
    +++ b/quill-sql/src/test/scala/io/getquill/context/sql/SqlQuerySpec.scala
    @@ -588,7 +588,7 @@ class SqlQuerySpec extends Spec {
               "SELECT 1 FROM (SELECT DISTINCT t.i AS _1 FROM TestEntity t) AS t" // hel
           }
     
    -      "with map uppsercase" in {
    +      "with map uppercase" in {
             import testContextUpper._
             val q = quote {
               qr1.map(t => t.i).distinct.map(t => 1)
    diff --git a/quill-sql/src/test/scala/io/getquill/context/sql/base/CaseClassQuerySpec.scala b/quill-sql/src/test/scala/io/getquill/context/sql/base/CaseClassQuerySpec.scala
    index 8812950d9a..08f2a8e863 100644
    --- a/quill-sql/src/test/scala/io/getquill/context/sql/base/CaseClassQuerySpec.scala
    +++ b/quill-sql/src/test/scala/io/getquill/context/sql/base/CaseClassQuerySpec.scala
    @@ -77,7 +77,7 @@ trait CaseClassQuerySpec extends Spec {
         query[Contact].filter(p => p.firstName == person.firstName && person.lastName == person.lastName)
       }
     
    -  val `Ex 3 Inline Record Usage exepected result` = List(
    +  val `Ex 3 Inline Record Usage expected result` = List(
         new Contact("Alex", "Jones", 60, 2, "foo")
       )
     
    diff --git a/quill-sql/src/test/scala/io/getquill/context/sql/idiom/OffsetWithoutLimitWorkaroundSpec.scala b/quill-sql/src/test/scala/io/getquill/context/sql/idiom/OffsetWithoutLimitWorkaroundSpec.scala
    index 2ff120528d..5f985452d5 100644
    --- a/quill-sql/src/test/scala/io/getquill/context/sql/idiom/OffsetWithoutLimitWorkaroundSpec.scala
    +++ b/quill-sql/src/test/scala/io/getquill/context/sql/idiom/OffsetWithoutLimitWorkaroundSpec.scala
    @@ -18,7 +18,7 @@ class OffsetWithoutLimitWorkaroundSpec extends Spec {
       }
       import ctx._
     
    -  "creates a synthectic limit" in {
    +  "creates a synthetic limit" in {
         val q = quote {
           qr1.drop(1)
         }
    diff --git a/quill-sql/src/test/scala/io/getquill/context/sql/idiom/SqlIdiomNamingSpec.scala b/quill-sql/src/test/scala/io/getquill/context/sql/idiom/SqlIdiomNamingSpec.scala
    index 9cba415350..9da01af6e3 100644
    --- a/quill-sql/src/test/scala/io/getquill/context/sql/idiom/SqlIdiomNamingSpec.scala
    +++ b/quill-sql/src/test/scala/io/getquill/context/sql/idiom/SqlIdiomNamingSpec.scala
    @@ -35,7 +35,7 @@ class SqlIdiomNamingSpec extends Spec {
           db.run(query[SomeEntity]).string mustEqual
             "SELECT x.some_column AS someColumn FROM some_entity x"
         }
    -    "mutiple transformations" in {
    +    "multiple transformations" in {
           val db = new SqlMirrorContext(MirrorSqlDialect, NamingStrategy(SnakeCase, UpperCase, Escape))
           import db._
           db.run(query[SomeEntity]).string mustEqual
    diff --git a/quill-sql/src/test/scala/io/getquill/context/sql/norm/RenamePropertiesOverrideSpec.scala b/quill-sql/src/test/scala/io/getquill/context/sql/norm/RenamePropertiesOverrideSpec.scala
    index cd8d4815ee..a8208310dd 100644
    --- a/quill-sql/src/test/scala/io/getquill/context/sql/norm/RenamePropertiesOverrideSpec.scala
    +++ b/quill-sql/src/test/scala/io/getquill/context/sql/norm/RenamePropertiesOverrideSpec.scala
    @@ -249,7 +249,7 @@ class RenamePropertiesOverrideSpec extends Spec {
         }
     
         "join" - {
    -      "both sidess" in {
    +      "both sides" in {
             val q = quote {
               e.leftJoin(e).on((a, b) => a.s == b.s).map(t => (t._1.s, t._2.map(_.s)))
             }
    @@ -346,7 +346,7 @@ class RenamePropertiesOverrideSpec extends Spec {
         }
       }
     
    -  "respects the schema definition for embeddeds" - {
    +  "respects the schema definition for embedded" - {
         "query" - {
           "without schema" in {
             case class B(c: Int)
    @@ -364,7 +364,7 @@ class RenamePropertiesOverrideSpec extends Spec {
               "SELECT x.bC FROM A x"
           }
         }
    -    "query for Option embeddeds" - {
    +    "query for Option embedded" - {
           "without schema" in {
             case class B(c1: Int, c2: Int)
             case class A(b: Option[B])
    diff --git a/quill-sql/src/test/scala/io/getquill/context/sql/norm/RenamePropertiesSpec.scala b/quill-sql/src/test/scala/io/getquill/context/sql/norm/RenamePropertiesSpec.scala
    index ccba215a46..3bc8f448aa 100644
    --- a/quill-sql/src/test/scala/io/getquill/context/sql/norm/RenamePropertiesSpec.scala
    +++ b/quill-sql/src/test/scala/io/getquill/context/sql/norm/RenamePropertiesSpec.scala
    @@ -392,7 +392,7 @@ class RenamePropertiesSpec extends Spec {
         }
       }
     
    -  "respects the schema definition for embeddeds" - {
    +  "respects the schema definition for embedded" - {
         "query" - {
           "without schema" in {
             case class B(c: Int)
    @@ -410,7 +410,7 @@ class RenamePropertiesSpec extends Spec {
               "SELECT x.bC FROM A x"
           }
         }
    -    "query for Option embeddeds" - {
    +    "query for Option embedded" - {
           "without schema" in {
             case class B(c1: Int, c2: Int)
             case class A(b: Option[B])
    diff --git a/quill-sql/src/test/scala/io/getquill/context/sql/norm/SheathLeafClausesSpec.scala b/quill-sql/src/test/scala/io/getquill/context/sql/norm/SheathLeafClausesSpec.scala
    index 4420f9ccb9..7a15aa0f5a 100644
    --- a/quill-sql/src/test/scala/io/getquill/context/sql/norm/SheathLeafClausesSpec.scala
    +++ b/quill-sql/src/test/scala/io/getquill/context/sql/norm/SheathLeafClausesSpec.scala
    @@ -114,7 +114,7 @@ class SheathLeafClausesSpec extends Spec {
                 .join(query[Person].concatMap(t => t.firstName.split(" ")))
                 .on { case (a, b) => a == b }
             )
    -        // TODO star idenfiers should not have aliases
    +        // TODO star identifiers should not have aliases
             ctx.run(q).string mustEqual "SELECT x01.*, x11.* FROM (SELECT UNNEST(SPLIT(p.first_name, ' ')) AS x FROM person p) AS x01 INNER JOIN (SELECT UNNEST(SPLIT(t.first_name, ' ')) AS x FROM person t) AS x11 ON x01.x = x11.x"
           }
     
    diff --git a/quill-sql/src/test/scala/io/getquill/quat/QuatRunSpec.scala b/quill-sql/src/test/scala/io/getquill/quat/QuatRunSpec.scala
    index 6a9b6b5a0c..8b1648ee6e 100644
    --- a/quill-sql/src/test/scala/io/getquill/quat/QuatRunSpec.scala
    +++ b/quill-sql/src/test/scala/io/getquill/quat/QuatRunSpec.scala
    @@ -34,7 +34,7 @@ class QuatRunSpec extends Spec {
           result.string mustEqual "SELECT x.name, x.age FROM MyPerson x APPEND FOO"
         }
     
    -    "should support query-ops function - multile var" in {
    +    "should support query-ops function - multiple var" in {
           def appendFooFun[Q <: Query[_]] = quote((q: Q, i: Int) => sql"$q APPEND $i FOO".transparent.pure.as[Q])
           val q                           = quote(appendFooFun(query[MyPerson], 123))
           q.ast.quat mustEqual Quat.Generic // Is it unknown, how should the reducing work from an infix with multiple vars?
    diff --git a/quill-sql/src/test/sql/postgres-doobie-schema.sql b/quill-sql/src/test/sql/postgres-doobie-schema.sql
    index 3573fe9fba..a19e666077 100644
    --- a/quill-sql/src/test/sql/postgres-doobie-schema.sql
    +++ b/quill-sql/src/test/sql/postgres-doobie-schema.sql
    @@ -1,6 +1,6 @@
     --
     -- The sample data used in the world database is Copyright Statistics
    --- Finland, http://www.stat.fi/worldinfigures.
    +-- Finland, https://www.stat.fi/worldinfigures.
     --
     
     CREATE TABLE IF NOT EXISTS city (