diff --git a/.github/ISSUE_TEMPLATE.md b/.github/ISSUE_TEMPLATE.md index c60f4bf54b..d2b7b57043 100644 --- a/.github/ISSUE_TEMPLATE.md +++ b/.github/ISSUE_TEMPLATE.md @@ -10,7 +10,7 @@ This template isn't a strict requirement to open issues, but please try to provi ### Steps to reproduce the behavior -If the issue can be reproduced using a [mirror context](http://zio.dev/zio-quill/contexts#mirror-context), please provide a scastie snippet that reproduces it. See https://scastie.scala-lang.org/fwbrasil/Z2CeR2qHQJK6EyQWUBhANA as an example. Remember to select the correct Quill version in the left menu. +If the issue can be reproduced using a [mirror context](https://zio.dev/zio-quill/contexts#mirror-context), please provide a scastie snippet that reproduces it. See https://scastie.scala-lang.org/fwbrasil/Z2CeR2qHQJK6EyQWUBhANA as an example. Remember to select the correct Quill version in the left menu. ### Workaround diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md index e7eac2127b..31e4c7aada 100644 --- a/CODE_OF_CONDUCT.md +++ b/CODE_OF_CONDUCT.md @@ -68,7 +68,7 @@ members of the project's leadership. ## Attribution This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, -available at [http://contributor-covenant.org/version/1/4][version] +available at [https://contributor-covenant.org/version/1/4][version] -[homepage]: http://contributor-covenant.org -[version]: http://contributor-covenant.org/version/1/4/ +[homepage]: https://contributor-covenant.org +[version]: https://contributor-covenant.org/version/1/4/ diff --git a/README.md b/README.md index 58cad77d56..ba84a3c0e9 100644 --- a/README.md +++ b/README.md @@ -7,7 +7,7 @@ # ZIO Quill -Quill provides a Quoted Domain Specific Language ([QDSL](http://homepages.inf.ed.ac.uk/wadler/papers/qdsl/qdsl.pdf)) to express queries in Scala and execute them in a target language. +Quill provides a Quoted Domain Specific Language ([QDSL](https://homepages.inf.ed.ac.uk/wadler/papers/qdsl/qdsl.pdf)) to express queries in Scala and execute them in a target language. [![Production Ready](https://img.shields.io/badge/Project%20Stage-Production%20Ready-brightgreen.svg)](https://github.com/zio/zio/wiki/Project-Stages) ![CI Badge](https://github.com/zio/zio-quill/workflows/CI/badge.svg) [![Sonatype Releases](https://img.shields.io/nexus/r/https/oss.sonatype.org/io.getquill/quill-core_2.12.svg?label=Sonatype%20Release)](https://oss.sonatype.org/content/repositories/releases/io/getquill/quill-core_2.12/) [![Sonatype Snapshots](https://img.shields.io/nexus/s/https/oss.sonatype.org/io.getquill/quill-core_2.12.svg?label=Sonatype%20Snapshot)](https://oss.sonatype.org/content/repositories/snapshots/io/getquill/quill-core_2.12/) [![javadoc](https://javadoc.io/badge2/io.getquill/zio-quill-docs_2.12/javadoc.svg)](https://javadoc.io/doc/io.getquill/zio-quill-docs_2.12) [![ZIO Quill](https://img.shields.io/github/stars/zio/zio-quill?style=social)](https://github.com/zio/zio-quill) @@ -71,11 +71,11 @@ You can notify all current maintainers using the handle `@getquill/maintainers`. ## Acknowledgement -The project was created having Philip Wadler's talk ["A practical theory of language-integrated query"](http://www.infoq.com/presentations/theory-language-integrated-query) as its initial inspiration. The development was heavily influenced by the following papers: +The project was created having Philip Wadler's talk ["A practical theory of language-integrated query"](https://www.infoq.com/presentations/theory-language-integrated-query) as its initial inspiration. The development was heavily influenced by the following papers: -* [A Practical Theory of Language-Integrated Query](http://homepages.inf.ed.ac.uk/slindley/papers/practical-theory-of-linq.pdf) -* [Everything old is new again: Quoted Domain Specific Languages](http://homepages.inf.ed.ac.uk/wadler/papers/qdsl/qdsl.pdf) -* [The Flatter, the Better](http://db.inf.uni-tuebingen.de/staticfiles/publications/the-flatter-the-better.pdf) +* [A Practical Theory of Language-Integrated Query](https://homepages.inf.ed.ac.uk/slindley/papers/practical-theory-of-linq.pdf) +* [Everything old is new again: Quoted Domain Specific Languages](https://homepages.inf.ed.ac.uk/wadler/papers/qdsl/qdsl.pdf) +* [The Flatter, the Better](https://db.inf.uni-tuebingen.de/staticfiles/publications/the-flatter-the-better.pdf) ## License diff --git a/build.sbt b/build.sbt index a1a39d6ff5..907324ffab 100644 --- a/build.sbt +++ b/build.sbt @@ -931,7 +931,7 @@ lazy val releaseSettings = Seq( homepage := Some(url("https://zio.dev/zio-quill/")), licenses := List(("Apache License 2.0", url("http://www.apache.org/licenses/LICENSE-2.0"))), developers := List( - Developer("fwbrasil", "Flavio W. Brasil", "", url("http://github.com/fwbrasil")), + Developer("fwbrasil", "Flavio W. Brasil", "", url("https://github.com/fwbrasil")), Developer("deusaquilus", "Alexander Ioffe", "", url("https://github.com/deusaquilus")) ), scmInfo := Some( @@ -963,11 +963,11 @@ lazy val docs = project |
|""".stripMargin, readmeAcknowledgement := - """|The project was created having Philip Wadler's talk ["A practical theory of language-integrated query"](http://www.infoq.com/presentations/theory-language-integrated-query) as its initial inspiration. The development was heavily influenced by the following papers: + """|The project was created having Philip Wadler's talk ["A practical theory of language-integrated query"](https://www.infoq.com/presentations/theory-language-integrated-query) as its initial inspiration. The development was heavily influenced by the following papers: | - |* [A Practical Theory of Language-Integrated Query](http://homepages.inf.ed.ac.uk/slindley/papers/practical-theory-of-linq.pdf) - |* [Everything old is new again: Quoted Domain Specific Languages](http://homepages.inf.ed.ac.uk/wadler/papers/qdsl/qdsl.pdf) - |* [The Flatter, the Better](http://db.inf.uni-tuebingen.de/staticfiles/publications/the-flatter-the-better.pdf)""".stripMargin, + |* [A Practical Theory of Language-Integrated Query](https://homepages.inf.ed.ac.uk/slindley/papers/practical-theory-of-linq.pdf) + |* [Everything old is new again: Quoted Domain Specific Languages](https://homepages.inf.ed.ac.uk/wadler/papers/qdsl/qdsl.pdf) + |* [The Flatter, the Better](https://db.inf.uni-tuebingen.de/staticfiles/publications/the-flatter-the-better.pdf)""".stripMargin, readmeMaintainers := """|- @deusaquilus (lead maintainer) |- @fwbrasil (creator) diff --git a/build/m1/README.MD b/build/m1/README.MD index 5c6c631eda..e30f4064c8 100644 --- a/build/m1/README.MD +++ b/build/m1/README.MD @@ -3,7 +3,7 @@ In order to get this project to build on a Mac with an M1 or later chip, you wil 1. Ensure your Docker Desktop has enough resources. We recommend you set it for at least 4 CPUs and 8.25GB of RAM. Anything less will likely result in mysterious hangs/crashes and much wailing and gnashing of teeth during the build process. ![docker-resources.png](docker-resources.png) -2. Enable the Experimental 'Big Sur Virtualization'. Doing so will reduce your build time by nearly 1 hour! On a Macbook Air with 16gb of ram and the 1st gen M1 chip - the build (w/o codegen) should take around 25 minutes give or take with this setting enabled. +2. Enable the Experimental 'Big Sur Virtualization'. Doing so will reduce your build time by nearly 1 hour! On a MacBook Air with 16gb of ram and the 1st gen M1 chip - the build (w/o codegen) should take around 25 minutes give or take with this setting enabled. ![img.png](experimental.png) 3. Use the `docker-compose-m1.yml` file instead of the default `docker-compose.yml` - this adjusts the platform where necessary to match up with the ARM based M1 chip. @@ -28,7 +28,7 @@ Be nice to your M1 system - stop your services - don't down them. Running `dock If you plan to have some rapid build/test cycles - run: `docker-compose -f docker-compose-m1.yml stop` to stop your services. It will preserve the volumes that were created when you ran setup. It'll save you a good amount of time. ## Build With a Specific Scala Version -By default the build executes with Scala 2.13. Not horrible - but if you want to take advantage of the improved compiler of a more recent 2.x verison of Scala you can specify that! +By default the build executes with Scala 2.13. Not horrible - but if you want to take advantage of the improved compiler of a more recent 2.x version of Scala you can specify that! You can simply set the `quill.scala.version` when you start your build: `docker-compose -f docker-compose-m1.yml run sbt sbt -Dquill.scala.version=2.13.6 -Dmodules=db test` diff --git a/build/release.sh b/build/release.sh index d0ac13bf1c..4c5d469832 100755 --- a/build/release.sh +++ b/build/release.sh @@ -127,7 +127,7 @@ then if [[ $ARTIFACT == "publish" ]]; then echo "No-Op Publish for Non Release Snapshot Branch"; fi else VERSION_FILE=$(cat version.sbt) - echo "Github actions branch was: ${BRANCH} and version file is $VERSION_FILE. Not Sure what to do." + echo "GitHub actions branch was: ${BRANCH} and version file is $VERSION_FILE. Not Sure what to do." fi else echo "PULL_REQUEST is not 'false' ($PULL_REQUEST). Not doing a release." diff --git a/build/setup_db_scripts.sh b/build/setup_db_scripts.sh index 777db72979..f451d8da08 100755 --- a/build/setup_db_scripts.sh +++ b/build/setup_db_scripts.sh @@ -133,7 +133,7 @@ function setup_sqlserver() { /opt/mssql-tools/bin/sqlcmd -S $1 -U SA -P "QuillRocks!" -d quill_test -i $2 } -# Do a simple necat poll to make sure the oracle database is ready. +# Do a simple netcat poll to make sure the oracle database is ready. # All internal database creation and schema setup scripts are handled # by the container and docker-compose steps. diff --git a/docs/CONTRIBUTING.md b/docs/CONTRIBUTING.md index f1a6611d33..f4e868956a 100644 --- a/docs/CONTRIBUTING.md +++ b/docs/CONTRIBUTING.md @@ -167,18 +167,18 @@ Finally, you can use `sbt` locally. ### All In One ### -To restart the database services, rebuild them, and start with locally explosed ports run: +To restart the database services, rebuild them, and start with locally exposed ports run: docker-compose down && docker-compose build && docker-compose run --rm --service-ports setup Note: Make sure you have exposed all the ports as mentioned above. -## Debugging using Intellij +## Debugging using IntelliJ -[Intellij](https://www.jetbrains.com/idea/) has a comprehensive debugger that also works with macros which is very -helpful when working on Quill. There are two ways to debug Quill macros using Intellij. The first way is to launch SBT in -debug mode and use Intellij to remote debug it. The second way is to launch a debug session -from Intellij from the "Run/Debug Configurations" menu. +[IntelliJ](https://www.jetbrains.com/idea/) has a comprehensive debugger that also works with macros which is very +helpful when working on Quill. There are two ways to debug Quill macros using IntelliJ. The first way is to launch SBT in +debug mode and use IntelliJ to remote debug it. The second way is to launch a debug session +from IntelliJ from the "Run/Debug Configurations" menu. ### Debug Macros by Remote Debugging SBT @@ -190,10 +190,10 @@ After this you need to launch sbt with `sbt -jvm-debug 5005`. Note that since th recommended to launch sbt with additional memory, i.e. `sbt -jvm-debug 5005 -mem 4096` otherwise sbt may complain about having memory issues. -Then in Intellij you need to +Then in IntelliJ you need to [add a remote configuration](https://www.jetbrains.com/help/idea/run-debug-configuration-remote-debug.html). The default parameters will work fine (note that we started sbt with the debug port `5005` which is also the default debug port -in Intellij). After you have added the configuration you should be able to start it to start debugging! Feel to free +in IntelliJ). After you have added the configuration you should be able to start it to start debugging! Feel to free to add breakpoints to step through the code. Note that its possible to debug macros (you can even @@ -204,12 +204,12 @@ invocations are cached on a file basis. You can easily do this just by adding ne ### Debug Macros by Launching a Session Firstly, you will need to build Quill with some additional dependencies that include the file `scala.tools.nsc.Main`. -You can do this adding the argument `-DdebugMacro=true` to the sbt launcher. You can do this in the Intellij SBT +You can do this adding the argument `-DdebugMacro=true` to the sbt launcher. You can do this in the IntelliJ SBT menu: -![Intellij-SBT-Settings.png](etc/Intellij-SBT-Settings.png) +![IntelliJ-SBT-Settings.png](etc/IntelliJ-SBT-Settings.png) -In Intellij, go to `Run -> Edit Configurations...` click on the Plus (i.e. `+`) button (or `Add New Configuration`) +In IntelliJ, go to `Run -> Edit Configurations...` click on the Plus (i.e. `+`) button (or `Add New Configuration`) and select `Application`. Then enter the following settings: ``` @@ -222,12 +222,12 @@ Build, no error check (make sure to set this since you will frequently want to d ``` It should look like this: -![Intellij-Run-Debug-Config.png](etc/Intellij-Run-Debug-Config.png) +![IntelliJ-Run-Debug-Config.png](etc/IntelliJ-Run-Debug-Config.png) > NOTE In this example, our entry-point into Quill-macro-debugging is `MySqlTest.scala`. -> In our Intellij application configuration this file name is being explicitly specified.Common
object.
*
* class MyStereotypingGen(...) extends ComposeableTraitsGen(...) { - * override def namespacer: Namespacer = ts=> if(ts.tableSchem == "alpha" || - * ts.tableSchem == "bravo") "common" else ts.tableSchem + * override def namespacer: Namespacer = ts=> if(ts.tableSchema == "alpha" || + * ts.tableSchema == "bravo") "common" else ts.tableSchema * * override def memberNamer: MemberNamer = ts => ts.tableName.snakeToLowerCamel * }@@ -105,7 +105,7 @@ import javax.sql.DataSource * // Since PersonDao is inside MyCustomContext.alpha and MyCustomContext.bravo * as opposed to MyCustomContext // there will be no collision. object * MyCustomContext extends SqlMirrorContext[H2Dialect, Literal](H2Dialect, - * Literal) with AlphaExtnsions[H2Dialect, Literal] with + * Literal) with AlphaExtensions[H2Dialect, Literal] with * BravoExtensions[H2Dialect, Literal] */ diff --git a/quill-codegen-jdbc/src/main/scala/io/getquill/codegen/jdbc/gen/DefaultJdbcSchemaReader.scala b/quill-codegen-jdbc/src/main/scala/io/getquill/codegen/jdbc/gen/DefaultJdbcSchemaReader.scala index ac58cfb5a0..800707aa5e 100644 --- a/quill-codegen-jdbc/src/main/scala/io/getquill/codegen/jdbc/gen/DefaultJdbcSchemaReader.scala +++ b/quill-codegen-jdbc/src/main/scala/io/getquill/codegen/jdbc/gen/DefaultJdbcSchemaReader.scala @@ -78,13 +78,13 @@ class DefaultJdbcSchemaReader( override def apply(connectionMaker: JdbcConnectionMaker): Seq[RawSchema[JdbcTableMeta, JdbcColumnMeta]] = { val tableMap = extractTables(connectionMaker) - .map(t => ((t.tableCat, t.tableSchem, t.tableName), t)) + .map(t => ((t.tableCat, t.tableSchema, t.tableName), t)) .toMap val columns = extractColumns(connectionMaker) val tableColumns = columns - .groupBy(c => (c.tableCat, c.tableSchem, c.tableName)) + .groupBy(c => (c.tableCat, c.tableSchema, c.tableName)) .map { case (tup, cols) => tableMap.get(tup).map(RawSchema(_, cols)) } .collect { case Some(tbl) => tbl } diff --git a/quill-codegen-jdbc/src/main/scala/io/getquill/codegen/jdbc/gen/JdbcGenerator.scala b/quill-codegen-jdbc/src/main/scala/io/getquill/codegen/jdbc/gen/JdbcGenerator.scala index d2b5f2594d..0d217de47e 100644 --- a/quill-codegen-jdbc/src/main/scala/io/getquill/codegen/jdbc/gen/JdbcGenerator.scala +++ b/quill-codegen-jdbc/src/main/scala/io/getquill/codegen/jdbc/gen/JdbcGenerator.scala @@ -28,11 +28,11 @@ trait JdbcGenerator extends Generator { this: JdbcCodeGeneratorComponents with J override def filter(tc: RawSchema[JdbcTableMeta, JdbcColumnMeta]): Boolean = databaseType match { case MySql => !tc.table.tableCat.existsInSetNocase(defaultExcludedSchemas.toList: _*) - case _ => !tc.table.tableSchem.existsInSetNocase(defaultExcludedSchemas.toList: _*) + case _ => !tc.table.tableSchema.existsInSetNocase(defaultExcludedSchemas.toList: _*) } override def namespacer: Namespacer[TableMeta] = databaseType match { case MySql | SqlServer => tm => tm.tableCat.map(_.snakeToLowerCamel).getOrElse(defaultNamespace) - case _ => tm => tm.tableSchem.orElse(tm.tableCat).map(_.snakeToLowerCamel).getOrElse(defaultNamespace) + case _ => tm => tm.tableSchema.orElse(tm.tableCat).map(_.snakeToLowerCamel).getOrElse(defaultNamespace) } } diff --git a/quill-codegen-jdbc/src/test/scala/io/getquill/codegen/SimpleCodegenSpec.scala b/quill-codegen-jdbc/src/test/scala/io/getquill/codegen/SimpleCodegenSpec.scala index b0451c8a4b..c1b3c73a42 100644 --- a/quill-codegen-jdbc/src/test/scala/io/getquill/codegen/SimpleCodegenSpec.scala +++ b/quill-codegen-jdbc/src/test/scala/io/getquill/codegen/SimpleCodegenSpec.scala @@ -41,9 +41,9 @@ class SimpleCodegenSpec extends AnyFreeSpec with Matchers { querySchemas.zip(body).foreach { case (schema, methodTree) => { methodTree match { - case q"$mods def $tname(...$paramss): $tpt = $expr" => { + case q"$mods def $tname(...$params): $tpt = $expr" => { assert(tname.toString.unquote == schema.defName, s"Def method ${tname} should be ${schema.defName}") - assert(paramss.length == 0, s"Def method ${tname} should not have any params for $tname") + assert(params.length == 0, s"Def method ${tname} should not have any params for $tname") val quotedExpr = expr match { case q"quote { $qs_args }" => { @@ -116,9 +116,9 @@ class SimpleCodegenSpec extends AnyFreeSpec with Matchers { val tb = runtimeMirror(this.getClass.getClassLoader).mkToolBox() val cc = tb.parse(generatedCode) cc match { - case q"case class $tpname(...$paramss) extends { ..$earlydefns } with ..$parents" => { + case q"case class $tpname(...$params) extends { ..$earlydefns } with ..$parents" => { tpname.toString() should equal(className) - val constructorList = paramss + val constructorList = params if (constructorList.length != 1) fail(s"Class $tpname has more then one constructor list") val paramList: Seq[_] = constructorList.toList(0).toList if (paramList.length != fields.length) diff --git a/quill-codegen-jdbc/src/test/scala/io/getquill/codegen/StructuralTests.scala b/quill-codegen-jdbc/src/test/scala/io/getquill/codegen/StructuralTests.scala index 50b66a0c76..2f7381498c 100644 --- a/quill-codegen-jdbc/src/test/scala/io/getquill/codegen/StructuralTests.scala +++ b/quill-codegen-jdbc/src/test/scala/io/getquill/codegen/StructuralTests.scala @@ -68,7 +68,7 @@ class StructuralTests extends SimpleCodegenSpec with WithStandardCodegen { } } - "custom naming strateogy" - { + "custom naming strategy" - { val personData = fdgConv("id" -> "Int", "firstname" -> "Option[String]", "lastname" -> "Option[String]", "age" -> "Int")( @@ -165,7 +165,7 @@ class StructuralTests extends SimpleCodegenSpec with WithStandardCodegen { "with snake schema" - { - "prefix collision - different columns without datatype perculation" - { + "prefix collision - different columns without datatype percolation" - { val personData = fdgConv("id" -> "Int", "firstName" -> "Option[String]", "lastName" -> "Option[String]", "age" -> "Int")( @@ -203,7 +203,7 @@ class StructuralTests extends SimpleCodegenSpec with WithStandardCodegen { ) } - "prefix collision - different columns with datatype perculation" in { + "prefix collision - different columns with datatype percolation" in { val gens = standardCodegen( `schema_snakecase_twotable_differentcolumns`, entityNamingStrategy = SnakeCaseCustomTable(_.tableName.toLowerCase.replaceFirst("(alpha_)|(bravo_)", "")) @@ -232,7 +232,7 @@ class StructuralTests extends SimpleCodegenSpec with WithStandardCodegen { } } - "prefix collision - different columns with datatype perculation" - { + "prefix collision - different columns with datatype percolation" - { val personData = fdgConv( "id" -> "Int", @@ -276,7 +276,7 @@ class StructuralTests extends SimpleCodegenSpec with WithStandardCodegen { } } - "namespace collision - different columns with datatype perculation" - { + "namespace collision - different columns with datatype percolation" - { val personData = fdgConv( "id" -> "Int", @@ -296,8 +296,8 @@ class StructuralTests extends SimpleCodegenSpec with WithStandardCodegen { entityNamingStrategy = SnakeCaseCustomTable(_.tableName.toLowerCase.replaceFirst("(alpha_)|(bravo_)", "").capitalize), entityNamespacer = - _.tableSchem.map(_.toLowerCase.replaceAll("(alpha)|(bravo)", "public")).getOrElse(this.defaultNamespace), - entityMemberNamer = ts => s"${ts.tableSchem.get}_${ts.tableName}".toLowerCase.snakeToLowerCamel + _.tableSchema.map(_.toLowerCase.replaceAll("(alpha)|(bravo)", "public")).getOrElse(this.defaultNamespace), + entityMemberNamer = ts => s"${ts.tableSchema.get}_${ts.tableName}".toLowerCase.snakeToLowerCamel ).makeGenerators.toList.sortBy(_.caseClassesCode) gens.foreach(gen => LOG.info(gen.code)) diff --git a/quill-codegen-jdbc/src/test/scala/io/getquill/codegen/WithStandardCodegen.scala b/quill-codegen-jdbc/src/test/scala/io/getquill/codegen/WithStandardCodegen.scala index d94b84fa6d..4d48c224c8 100644 --- a/quill-codegen-jdbc/src/test/scala/io/getquill/codegen/WithStandardCodegen.scala +++ b/quill-codegen-jdbc/src/test/scala/io/getquill/codegen/WithStandardCodegen.scala @@ -17,7 +17,7 @@ trait WithStandardCodegen { schemaConfig: SchemaConfig, tableFilter: RawSchema[JdbcTableMeta, JdbcColumnMeta] => Boolean = _ => true, entityNamingStrategy: NameParser = LiteralNames, - entityNamespacer: Namespacer[JdbcTableMeta] = ts => ts.tableSchem.getOrElse(defaultNamespace), + entityNamespacer: Namespacer[JdbcTableMeta] = ts => ts.tableSchema.getOrElse(defaultNamespace), entityMemberNamer: JdbcQuerySchemaNaming = ts => ts.tableName.snakeToLowerCamel ) = new JdbcGeneratorBase(() => { diff --git a/quill-codegen-jdbc/src/test/scala/io/getquill/codegen/integration/CodegenTestCases.scala b/quill-codegen-jdbc/src/test/scala/io/getquill/codegen/integration/CodegenTestCases.scala index ccd62e82b6..41df2b9528 100644 --- a/quill-codegen-jdbc/src/test/scala/io/getquill/codegen/integration/CodegenTestCases.scala +++ b/quill-codegen-jdbc/src/test/scala/io/getquill/codegen/integration/CodegenTestCases.scala @@ -251,7 +251,7 @@ object CodegenTestCases { case H2 => tc.table.tableCat.exists(_.toLowerCase startsWith "codegen_test") case Postgres => - tc.table.tableSchem.existsInSetNocase("public", "alpha", "bravo") + tc.table.tableSchema.existsInSetNocase("public", "alpha", "bravo") case SqlServer => tc.table.tableCat.existsInSetNocase("codegen_test", "alpha", "bravo") } @@ -260,7 +260,7 @@ object CodegenTestCases { dbPrefix match { // SQLite does not support user-defined schemas. It has the ability to use multiple files // but does not show what table belongs to what file in any JDBC call. This makes multi-schema - // stereotyping untenable so the respective tests are not not included. + // stereotyping untenable so the respective tests are not included. case TestSqliteDB => List( `1-simple-snake`, diff --git a/quill-codegen-jdbc/src/test/scala/io/getquill/codegen/integration/DbHelper.scala b/quill-codegen-jdbc/src/test/scala/io/getquill/codegen/integration/DbHelper.scala index 7863b1ff27..61df0a8362 100644 --- a/quill-codegen-jdbc/src/test/scala/io/getquill/codegen/integration/DbHelper.scala +++ b/quill-codegen-jdbc/src/test/scala/io/getquill/codegen/integration/DbHelper.scala @@ -104,7 +104,7 @@ object DbHelper { (select table_catalog as _1, table_schema as _2, table_name as _3, table_type as _4 from bravo.information_schema.tables) """.as[Query[(String, String, String, String)]] ) - tables.map { case (cat, schem, name, tpe) => JdbcTableMeta(Option(cat), Option(schem), name, Option(tpe)) } + tables.map { case (cat, schema, name, tpe) => JdbcTableMeta(Option(cat), Option(schema), name, Option(tpe)) } } case _ => @@ -113,8 +113,8 @@ object DbHelper { val getSchema: JdbcTableMeta => Option[String] = databaseType match { case MySql => tm => tm.tableCat - case SqlServer => tm => tm.tableCat.flatMap(tc => tm.tableSchem.flatMap(ts => Some(s"${tc}.${ts}"))) - case _ => tm => tm.tableSchem + case SqlServer => tm => tm.tableCat.flatMap(tc => tm.tableSchema.flatMap(ts => Some(s"${tc}.${ts}"))) + case _ => tm => tm.tableSchema } val tables = allTables.filter { tm => @@ -122,18 +122,18 @@ object DbHelper { case MySql => tm.tableCat.existsInSetNocase("codegen_test", "alpha", "bravo") case SqlServer => - tm.tableCat.existsInSetNocase("codegen_test", "alpha", "bravo") && tm.tableSchem.exists( + tm.tableCat.existsInSetNocase("codegen_test", "alpha", "bravo") && tm.tableSchema.exists( _.toLowerCase == "dbo" ) case Oracle => - tm.tableSchem.existsInSetNocase("codegen_test", "alpha", "bravo") + tm.tableSchema.existsInSetNocase("codegen_test", "alpha", "bravo") case Sqlite => // SQLite does not have individual schemas at all. true case Postgres => - tm.tableSchem.existsInSetNocase("public", "alpha", "bravo") + tm.tableSchema.existsInSetNocase("public", "alpha", "bravo") case H2 => tm.tableCat.exists(_.toLowerCase == "codegen_test.h2") && - tm.tableSchem.exists(_.toLowerCase != "information_schema") + tm.tableSchema.exists(_.toLowerCase != "information_schema") } } diff --git a/quill-codegen/src/main/scala/io/getquill/codegen/dag/Ancestry.scala b/quill-codegen/src/main/scala/io/getquill/codegen/dag/Ancestry.scala index 119737a073..9ac4a07eef 100644 --- a/quill-codegen/src/main/scala/io/getquill/codegen/dag/Ancestry.scala +++ b/quill-codegen/src/main/scala/io/getquill/codegen/dag/Ancestry.scala @@ -51,7 +51,7 @@ object DefaultNodeCatalog extends NodeCatalog { override def lookup(cls: ClassTag[_]): DagNode = nodeCatalogNodes .find(_.cls == cls) .getOrElse({ - logger.warn(s"Could not find type hiearchy node for: ${cls} Must assume it's a string") + logger.warn(s"Could not find type hierarchy node for: ${cls} Must assume it's a string") StringNode }) } diff --git a/quill-codegen/src/main/scala/io/getquill/codegen/gen/CodeGeneratorComponents.scala b/quill-codegen/src/main/scala/io/getquill/codegen/gen/CodeGeneratorComponents.scala index 9c91786b3d..fbca33fdfc 100644 --- a/quill-codegen/src/main/scala/io/getquill/codegen/gen/CodeGeneratorComponents.scala +++ b/quill-codegen/src/main/scala/io/getquill/codegen/gen/CodeGeneratorComponents.scala @@ -50,7 +50,7 @@ trait CodeGeneratorComponents extends HasBasicMeta with QuerySchemaNaming { *
{@code case class Person(firstName:String, lastName:String, age:Int) * * object Person { // Taking ts.tableName.snakeToLowerCamel will ensure each - * one has a different name. Otherise // all of them will be 'query' which + * one has a different name. Otherwise // all of them will be 'query' which * will result in a compile error. def alphaPerson = * querySchema[Person]("ALPHA.PERSON", ...) def bravoPerson = * querySchema[Person]("BRAVO.PERSON", ...) } }diff --git a/quill-codegen/src/main/scala/io/getquill/codegen/gen/Generator.scala b/quill-codegen/src/main/scala/io/getquill/codegen/gen/Generator.scala index 2ffd934d5d..fb8a1e3294 100644 --- a/quill-codegen/src/main/scala/io/getquill/codegen/gen/Generator.scala +++ b/quill-codegen/src/main/scala/io/getquill/codegen/gen/Generator.scala @@ -23,7 +23,7 @@ trait Generator { /** * Should we prefix object/package produced by this generator? Set this as the - * the value of that. Otherwise set this to be the empty string. + * value of that. Otherwise set this to be the empty string. */ def packagePrefix: String def connectionMakers: Seq[ConnectionMaker] @@ -52,8 +52,8 @@ trait Generator { } def makeGenerators = new MultiGeneratorFactory(generatorMaker).apply - def writeAllFiles(localtion: String): Future[Seq[Path]] = - Future.sequence(writeFiles(localtion)) + def writeAllFiles(location: String): Future[Seq[Path]] = + Future.sequence(writeFiles(location)) def writeFiles(location: String): Seq[Future[Path]] = { // can't put Seq[Gen] into here because doing Seq[Gen] <: SingleUnitCodegen makes it covariant @@ -187,7 +187,7 @@ trait Generator { override def code: String = surroundByObject(body) override def objectName: Option[String] = Some(escape(tableColumns.table.name)) - // TODO Have this come directly from the Generator's context (but make sure to override it in the structural tests so it doesn't distrub them) + // TODO Have this come directly from the Generator's context (but make sure to override it in the structural tests so it doesn't disturb them) def imports = querySchemaImports // generate variables for every schema e.g. @@ -225,7 +225,7 @@ trait Generator { """.stripMargin.trimFront override def tableName: String = schema.tableName - override def schemaName: Option[String] = schema.tableSchem + override def schemaName: Option[String] = schema.tableSchema def QuerySchemaMapping = new QuerySchemaMappingGen(_) class QuerySchemaMappingGen(val column: ColumnFusion[ColumnMeta]) diff --git a/quill-codegen/src/main/scala/io/getquill/codegen/model/PackagingStrategy.scala b/quill-codegen/src/main/scala/io/getquill/codegen/model/PackagingStrategy.scala index a64116c5dd..4a7c3f8a6a 100644 --- a/quill-codegen/src/main/scala/io/getquill/codegen/model/PackagingStrategy.scala +++ b/quill-codegen/src/main/scala/io/getquill/codegen/model/PackagingStrategy.scala @@ -21,7 +21,7 @@ object PackagingStrategy { * Use this strategy when you want a separate source code file (or string) * for every single table. Typically you'll want to use this when table * schemas are very large and you want to minimize the footprint of your - * imports (i.e. since each file is a seperate table you can be sure to just + * imports (i.e. since each file is a separate table you can be sure to just * imports the exact tables needed for every source file). */ def TablePerFile(packagePrefix: String = "") = diff --git a/quill-codegen/src/main/scala/io/getquill/codegen/model/SchemaModel.scala b/quill-codegen/src/main/scala/io/getquill/codegen/model/SchemaModel.scala index cd1934abc4..5ad0910c2c 100644 --- a/quill-codegen/src/main/scala/io/getquill/codegen/model/SchemaModel.scala +++ b/quill-codegen/src/main/scala/io/getquill/codegen/model/SchemaModel.scala @@ -5,7 +5,7 @@ import java.sql.ResultSet case class RawSchema[T, C](table: T, columns: Seq[C]) trait BasicTableMeta { - def tableSchem: Option[String] + def tableSchema: Option[String] def tableName: String } @@ -15,7 +15,7 @@ trait BasicColumnMeta { case class JdbcTableMeta( tableCat: Option[String], - tableSchem: Option[String], + tableSchema: Option[String], tableName: String, tableType: Option[String] ) extends BasicTableMeta @@ -23,7 +23,7 @@ case class JdbcTableMeta( object JdbcTableMeta { def fromResultSet(rs: ResultSet) = JdbcTableMeta( tableCat = Option(rs.getString("TABLE_CAT")), - tableSchem = Option(rs.getString("TABLE_SCHEM")), + tableSchema = Option(rs.getString("TABLE_SCHEM")), tableName = rs.getString("TABLE_NAME"), tableType = Option(rs.getString("TABLE_TYPE")) ) @@ -31,7 +31,7 @@ object JdbcTableMeta { case class JdbcColumnMeta( tableCat: Option[String], - tableSchem: Option[String], + tableSchema: Option[String], tableName: String, columnName: String, dataType: Int, @@ -44,7 +44,7 @@ object JdbcColumnMeta { def fromResultSet(rs: ResultSet) = JdbcColumnMeta( tableCat = Option(rs.getString("TABLE_CAT")), - tableSchem = Option(rs.getString("TABLE_SCHEM")), + tableSchema = Option(rs.getString("TABLE_SCHEM")), tableName = rs.getString("TABLE_NAME"), columnName = rs.getString("COLUMN_NAME"), dataType = rs.getInt("DATA_TYPE"), diff --git a/quill-codegen/src/main/scala/io/getquill/codegen/model/StererotypedModel.scala b/quill-codegen/src/main/scala/io/getquill/codegen/model/StereotypedModel.scala similarity index 100% rename from quill-codegen/src/main/scala/io/getquill/codegen/model/StererotypedModel.scala rename to quill-codegen/src/main/scala/io/getquill/codegen/model/StereotypedModel.scala diff --git a/quill-core/js/src/main/scala/io/getquill/dsl/DynamicQueryDSL.scala b/quill-core/js/src/main/scala/io/getquill/dsl/DynamicQueryDSL.scala index b333584585..966668b1f5 100644 --- a/quill-core/js/src/main/scala/io/getquill/dsl/DynamicQueryDSL.scala +++ b/quill-core/js/src/main/scala/io/getquill/dsl/DynamicQueryDSL.scala @@ -387,7 +387,7 @@ trait DynamicQueryDsl { type DynamicAssignment[U] = ((Quoted[T] => Quoted[U]), U) - private[this] def assignemnts[S]( + private[this] def assignments[S]( l: List[DynamicSet[S, _]] ): List[Assignment] = l.collect { case s: DynamicSetValue[_, _] => @@ -397,7 +397,7 @@ trait DynamicQueryDsl { def insert(l: DynamicSet[T, _]*): DynamicInsert[T] = DynamicInsert( - splice(Insert(DynamicEntityQuery.this.q.ast, assignemnts(l.toList))) + splice(Insert(DynamicEntityQuery.this.q.ast, assignments(l.toList))) ) def updateValue(value: T): DynamicUpdate[T] = macro DynamicQueryDslMacro.updateValue @@ -405,7 +405,7 @@ trait DynamicQueryDsl { def update(sets: DynamicSet[T, _]*): DynamicUpdate[T] = DynamicUpdate( splice[Update[T]]( - Update(DynamicEntityQuery.this.q.ast, assignemnts(sets.toList)) + Update(DynamicEntityQuery.this.q.ast, assignments(sets.toList)) ) ) diff --git a/quill-core/jvm/src/main/scala/io/getquill/dsl/DynamicQueryDSL.scala b/quill-core/jvm/src/main/scala/io/getquill/dsl/DynamicQueryDSL.scala index 4042b3638f..c9ecdcabf9 100644 --- a/quill-core/jvm/src/main/scala/io/getquill/dsl/DynamicQueryDSL.scala +++ b/quill-core/jvm/src/main/scala/io/getquill/dsl/DynamicQueryDSL.scala @@ -410,7 +410,7 @@ trait DynamicQueryDsl { type DynamicAssignment[U] = ((Quoted[T] => Quoted[U]), U) - private[this] def assignemnts[S]( + private[this] def assignments[S]( l: List[DynamicSet[S, _]] ): List[Assignment] = l.collect { case s: DynamicSetValue[_, _] => @@ -420,7 +420,7 @@ trait DynamicQueryDsl { def insert(l: DynamicSet[T, _]*): DynamicInsert[T] = DynamicInsert( - splice(Insert(DynamicEntityQuery.this.q.ast, assignemnts(l.toList))) + splice(Insert(DynamicEntityQuery.this.q.ast, assignments(l.toList))) ) def updateValue(value: T): DynamicUpdate[T] = macro DynamicQueryDslMacro.updateValue @@ -428,7 +428,7 @@ trait DynamicQueryDsl { def update(sets: DynamicSet[T, _]*): DynamicUpdate[T] = DynamicUpdate( splice[Update[T]]( - Update(DynamicEntityQuery.this.q.ast, assignemnts(sets.toList)) + Update(DynamicEntityQuery.this.q.ast, assignments(sets.toList)) ) ) diff --git a/quill-core/src/main/scala/io/getquill/MirrorContext.scala b/quill-core/src/main/scala/io/getquill/MirrorContext.scala index 4c7f458618..291a94ad2f 100644 --- a/quill-core/src/main/scala/io/getquill/MirrorContext.scala +++ b/quill-core/src/main/scala/io/getquill/MirrorContext.scala @@ -21,9 +21,9 @@ case class BatchActionReturningMirrorGeneric[T, PrepareRow, Extractor[_]]( * This is supposed to emulate how Row retrieval works in JDBC Int JDBC, * ResultSet won't ever actually have Option values inside, so the actual * option-decoder needs to understand that fact e.g. - * `Deocder[Option[Int]](java.sql.ResultSet(foo:1, etc)).getInt(1)`* and wrap it + * `Decoder[Option[Int]](java.sql.ResultSet(foo:1, etc)).getInt(1)`* and wrap it * into a Optional value for the equivalent row implementation: - * `Deocder[Option[Int]](Row(foo:1, etc)).apply(1)`. (*note that + * `Decoder[Option[Int]](Row(foo:1, etc)).apply(1)`. (*note that * java.sql.ResultSet actually doesn't have this syntax because it isn't a * product). Similarly, when doing `ResultSet(foo:null /*Expecting an int*/, * etc).getInt(1)` the result will be 0 as opposed to throwing a NPE as would be diff --git a/quill-core/src/main/scala/io/getquill/Quoted.scala b/quill-core/src/main/scala/io/getquill/Quoted.scala index f426e7ed7f..0207f5c5cb 100644 --- a/quill-core/src/main/scala/io/getquill/Quoted.scala +++ b/quill-core/src/main/scala/io/getquill/Quoted.scala @@ -4,9 +4,9 @@ import io.getquill.ast.Ast /** * Defines the primary interface by which information in Quill is composed. This - * includes not only queries but all code fragements. A quotation can be a - * simple value: {{ val pi = quote(3.14159) }} And be used within another - * quotation: {{ case class Circle(radius: Float) + * includes not only queries but all code fragments. A quotation can be a simple + * value: {{ val pi = quote(3.14159) }} And be used within another quotation: {{ + * case class Circle(radius: Float) * * val areas = quote { query[Circle].map(c => pi * c.radius * c.radius) } }} * Quotations can also contain high-order functions and inline values: {{ val diff --git a/quill-core/src/main/scala/io/getquill/context/ActionMacro.scala b/quill-core/src/main/scala/io/getquill/context/ActionMacro.scala index 8996a80014..8085933581 100644 --- a/quill-core/src/main/scala/io/getquill/context/ActionMacro.scala +++ b/quill-core/src/main/scala/io/getquill/context/ActionMacro.scala @@ -127,7 +127,7 @@ class ActionMacro(val c: MacroContext) extends ContextMacro with ReifyLiftings { val idiomContext = $idiomContext /* for liftQuery(people:List[Person]) `batch` is `people` */ /* TODO Need secondary check to see if context is actually capable of batch-values insert */ - /* If there is a INSERT ... VALUES clause this will be cnoded as ValuesClauseToken(lifts) which we need to duplicate */ + /* If there is a INSERT ... VALUES clause this will be encoded as ValuesClauseToken(lifts) which we need to duplicate */ /* batches: List[List[Person]] */ val batches = if ($canDoBatch && $numRows != 1) { @@ -307,7 +307,7 @@ class ActionMacro(val c: MacroContext) extends ContextMacro with ReifyLiftings { super.apply(e) } - // Only extrace lifts that come from values-clauses: + // Only extract lifts that come from values-clauses: // liftQuery(people).foreach(ps => query[Person].filter(_.name == lift("not this")).insertValue(_.name ->
getDS from env, acquire-connection, + * acquired are as follows:getDS from env, acquire-connection, * set-no-autocommit(connection), put-into-fiberref(connection), op - the * corresponding execute_ method which will execute and pull connection from * the fiberref, remove-from-fiberref(connection), @@ -209,7 +209,7 @@ abstract class ZioJdbcContext[+Dialect <: SqlIdiom, +Naming <: NamingStrategy] attemptBlocking(connection.setAutoCommit(prevAutoCommit)).orDie } _ <- ZIO.acquireRelease(currentConnection.set(Some(connection))) { _ => - // Note. We are failing the fiber if auto-commit reset fails. For some circumstances this may be too aggresive. + // Note. We are failing the fiber if auto-commit reset fails. For some circumstances this may be too aggressive. // If the connection pool e.g. Hikari resets this property for a recycled connection anyway doing it here // might not be necessary currentConnection.set(None) diff --git a/quill-jdbc-zio/src/test/scala/io/getquill/mock/ZioMockSpec.scala b/quill-jdbc-zio/src/test/scala/io/getquill/mock/ZioMockSpec.scala index 57ea6ce4b0..d29e28b6a4 100644 --- a/quill-jdbc-zio/src/test/scala/io/getquill/mock/ZioMockSpec.scala +++ b/quill-jdbc-zio/src/test/scala/io/getquill/mock/ZioMockSpec.scala @@ -175,7 +175,7 @@ class ZioMockSpec extends AnyFreeSpec with MockitoSugar { // with AsyncMockitoSu val ctx = new PostgresZioJdbcContext(Literal) import ctx._ - // In this case, instead of catching the error inside the observable, let it propogate to the top + // In this case, instead of catching the error inside the observable, let it propagate to the top // and make sure that the connection is closed anyhow val resultMsg = Unsafe.unsafe { implicit u => zio.Runtime.default.unsafe.run { diff --git a/quill-jdbc/src/main/scala/io/getquill/context/jdbc/Encoders.scala b/quill-jdbc/src/main/scala/io/getquill/context/jdbc/Encoders.scala index 702d046ef9..bebf77651d 100644 --- a/quill-jdbc/src/main/scala/io/getquill/context/jdbc/Encoders.scala +++ b/quill-jdbc/src/main/scala/io/getquill/context/jdbc/Encoders.scala @@ -84,13 +84,13 @@ trait BasicTimeEncoders { self: Encoders => implicit val instantEncoder: Encoder[Instant] = encoder(Types.TIMESTAMP_WITH_TIMEZONE, (index, value, row) => row.setTimestamp(index, Timestamp.from(value))) - implicit val offseTimeEncoder: Encoder[OffsetTime] = + implicit val offsetTimeEncoder: Encoder[OffsetTime] = encoder( Types.TIME, (index, value, row) => row.setTime(index, java.sql.Time.valueOf(value.withOffsetSameInstant(ZoneOffset.UTC).toLocalTime)) ) - implicit val offseDateTimeEncoder: Encoder[OffsetDateTime] = + implicit val offsetDateTimeEncoder: Encoder[OffsetDateTime] = encoder( Types.TIMESTAMP_WITH_TIMEZONE, (index, value, row) => row.setTimestamp(index, java.sql.Timestamp.from(value.toInstant)) @@ -127,8 +127,8 @@ trait ObjectGenericTimeEncoders { self: Encoders => implicit val instantEncoder: Encoder[Instant] = encoder(jdbcTypeOfInstant, (index, value, row) => row.setObject(index, jdbcEncodeInstant(value), jdbcTypeOfInstant)) - implicit val offseTimeEncoder: Encoder[OffsetTime] = + implicit val offsetTimeEncoder: Encoder[OffsetTime] = encoder(jdbcTypeOfOffsetTime, (index, value, row) => row.setObject(index, value, jdbcTypeOfOffsetTime)) - implicit val offseDateTimeEncoder: Encoder[OffsetDateTime] = + implicit val offsetDateTimeEncoder: Encoder[OffsetDateTime] = encoder(jdbcTypeOfOffsetDateTime, (index, value, row) => row.setObject(index, value, jdbcTypeOfOffsetDateTime)) } diff --git a/quill-jdbc/src/main/scala/io/getquill/context/jdbc/JdbcContextTypes.scala b/quill-jdbc/src/main/scala/io/getquill/context/jdbc/JdbcContextTypes.scala index 13f3a75cb9..964ed19a6b 100644 --- a/quill-jdbc/src/main/scala/io/getquill/context/jdbc/JdbcContextTypes.scala +++ b/quill-jdbc/src/main/scala/io/getquill/context/jdbc/JdbcContextTypes.scala @@ -34,7 +34,7 @@ trait JdbcContextTypes[+Dialect <: SqlIdiom, +Naming <: NamingStrategy] * Parses instances of java.sql.Types to string form so it can be used in * creation of sql arrays. Some databases does not support each of generic * types, hence it's welcome to override this method and provide alternatives - * to non-existent types. + * to nonexistent types. * * @param intType * one of java.sql.Types diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/h2/CaseClassQueryJdbcSpec.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/h2/CaseClassQueryJdbcSpec.scala index 3587e3ac13..21dbaa30c2 100644 --- a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/h2/CaseClassQueryJdbcSpec.scala +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/h2/CaseClassQueryJdbcSpec.scala @@ -43,7 +43,7 @@ class CaseClassQueryJdbcSpec extends CaseClassQuerySpec { "Example 3 - Inline Record as Filter" in { testContext.run( `Ex 3 Inline Record Usage` - ) should contain theSameElementsAs `Ex 3 Inline Record Usage exepected result` + ) should contain theSameElementsAs `Ex 3 Inline Record Usage expected result` } "Example 4 - Ex 4 Mapped Union of Nicknames" in { diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/mysql/CaseClassQueryJdbcSpec.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/mysql/CaseClassQueryJdbcSpec.scala index fcdcb7bac0..6cfd66e1b5 100644 --- a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/mysql/CaseClassQueryJdbcSpec.scala +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/mysql/CaseClassQueryJdbcSpec.scala @@ -43,7 +43,7 @@ class CaseClassQueryJdbcSpec extends CaseClassQuerySpec { "Example 3 - Inline Record as Filter" in { testContext.run( `Ex 3 Inline Record Usage` - ) should contain theSameElementsAs `Ex 3 Inline Record Usage exepected result` + ) should contain theSameElementsAs `Ex 3 Inline Record Usage expected result` } "Example 4 - Ex 4 Mapped Union of Nicknames" in { diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/oracle/CaseClassQueryJdbcSpec.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/oracle/CaseClassQueryJdbcSpec.scala index 6647e3c239..1072282c54 100644 --- a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/oracle/CaseClassQueryJdbcSpec.scala +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/oracle/CaseClassQueryJdbcSpec.scala @@ -45,7 +45,7 @@ class CaseClassQueryJdbcSpec extends CaseClassQuerySpec { "Example 3 - Inline Record as Filter" in { testContext.run( `Ex 3 Inline Record Usage` - ) should contain theSameElementsAs `Ex 3 Inline Record Usage exepected result` + ) should contain theSameElementsAs `Ex 3 Inline Record Usage expected result` } "Example 4 - Ex 4 Mapped Union of Nicknames" in { diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/CaseClassQueryJdbcSpec.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/CaseClassQueryJdbcSpec.scala index 3be1207245..1374eca6f6 100644 --- a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/CaseClassQueryJdbcSpec.scala +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/CaseClassQueryJdbcSpec.scala @@ -43,7 +43,7 @@ class CaseClassQueryJdbcSpec extends CaseClassQuerySpec { "Example 3 - Inline Record as Filter" in { testContext.run( `Ex 3 Inline Record Usage` - ) should contain theSameElementsAs `Ex 3 Inline Record Usage exepected result` + ) should contain theSameElementsAs `Ex 3 Inline Record Usage expected result` } "Example 4 - Ex 4 Mapped Union of Nicknames" in { diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlite/CaseClassQueryJdbcSpec.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlite/CaseClassQueryJdbcSpec.scala index 25f3eb7d17..1f0183ed62 100644 --- a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlite/CaseClassQueryJdbcSpec.scala +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlite/CaseClassQueryJdbcSpec.scala @@ -43,6 +43,6 @@ class CaseClassQueryJdbcSpec extends CaseClassQuerySpec { "Example 3 - Inline Record as Filter" in { testContext.run( `Ex 3 Inline Record Usage` - ) should contain theSameElementsAs `Ex 3 Inline Record Usage exepected result` + ) should contain theSameElementsAs `Ex 3 Inline Record Usage expected result` } } diff --git a/quill-ndbc-postgres/src/test/scala/io/getquill/context/ndbc/postgres/CaseClassQueryNdbcPostgresSpec.scala b/quill-ndbc-postgres/src/test/scala/io/getquill/context/ndbc/postgres/CaseClassQueryNdbcPostgresSpec.scala index d7637ed4f4..3fd97c7180 100644 --- a/quill-ndbc-postgres/src/test/scala/io/getquill/context/ndbc/postgres/CaseClassQueryNdbcPostgresSpec.scala +++ b/quill-ndbc-postgres/src/test/scala/io/getquill/context/ndbc/postgres/CaseClassQueryNdbcPostgresSpec.scala @@ -44,6 +44,6 @@ class CaseClassQueryNdbcPostgresSpec extends CaseClassQuerySpec { "Example 3 - Inline Record as Filter" in { get( context.run(`Ex 3 Inline Record Usage`) - ) must contain theSameElementsAs `Ex 3 Inline Record Usage exepected result` + ) must contain theSameElementsAs `Ex 3 Inline Record Usage expected result` } } diff --git a/quill-orientdb/src/main/scala/io/getquill/context/orientdb/OrientDBIdiom.scala b/quill-orientdb/src/main/scala/io/getquill/context/orientdb/OrientDBIdiom.scala index 46e24ebf5c..80fc0eb9fa 100644 --- a/quill-orientdb/src/main/scala/io/getquill/context/orientdb/OrientDBIdiom.scala +++ b/quill-orientdb/src/main/scala/io/getquill/context/orientdb/OrientDBIdiom.scala @@ -38,7 +38,7 @@ trait OrientDBIdiom extends Idiom { private def doTranslate(ast: Ast, cached: Boolean, executionType: ExecutionType, idiomContext: IdiomContext)(implicit naming: NamingStrategy ): (Ast, Statement, ExecutionType) = { - implicit val implcitIdiomContext: IdiomContext = idiomContext + implicit val implicitIdiomContext: IdiomContext = idiomContext val normalizedAst = if (cached) NormalizeCaching { ast: Ast => SqlNormalize(ast, idiomContext.config) }(ast) @@ -209,9 +209,9 @@ trait OrientDBIdiom extends Idiom { } protected def tokenOrderBy( - criterias: List[OrderByCriteria] + criteria: List[OrderByCriteria] )(implicit strategy: NamingStrategy, idiomContext: IdiomContext) = - stmt"ORDER BY ${criterias.token}" + stmt"ORDER BY ${criteria.token}" implicit def sourceTokenizer(implicit strategy: NamingStrategy, idiomContext: IdiomContext): Tokenizer[FromContext] = Tokenizer[FromContext] { diff --git a/quill-orientdb/src/test/scala/io/getquill/context/orientdb/CaseClassQueryOrientSpec.scala b/quill-orientdb/src/test/scala/io/getquill/context/orientdb/CaseClassQueryOrientSpec.scala index e118edfee7..1d3d6b9795 100644 --- a/quill-orientdb/src/test/scala/io/getquill/context/orientdb/CaseClassQueryOrientSpec.scala +++ b/quill-orientdb/src/test/scala/io/getquill/context/orientdb/CaseClassQueryOrientSpec.scala @@ -48,7 +48,7 @@ class CaseClassQueryOrientSpec extends Spec { query[Contact].filter(p => p.id == filtrationObject.idFilter) } - val `Ex 3 Inline Record Usage exepected result` = List( + val `Ex 3 Inline Record Usage expected result` = List( new Contact(1, "Alex", "Jones", 60, 2, "foo") ) @@ -68,6 +68,6 @@ class CaseClassQueryOrientSpec extends Spec { "Example 2 - Inline Record as Filter" in { testSyncDB.run( `Ex 3 Inline Record Usage` - ) must contain theSameElementsAs `Ex 3 Inline Record Usage exepected result` + ) must contain theSameElementsAs `Ex 3 Inline Record Usage expected result` } } diff --git a/quill-spark/src/main/scala/io/getquill/QuillSparkContext.scala b/quill-spark/src/main/scala/io/getquill/QuillSparkContext.scala index e6420592bb..71c3e1c022 100644 --- a/quill-spark/src/main/scala/io/getquill/QuillSparkContext.scala +++ b/quill-spark/src/main/scala/io/getquill/QuillSparkContext.scala @@ -55,7 +55,7 @@ trait QuillSparkContext extends Context[SparkDialect, Literal] with Encoders wit sql"${lift(ds)}".pure.as[Query[T]] } - // Helper class for the perculateNullArrays method + // Helper class for the percolateNullArrays method case class StructElement(column: Column, structField: StructField) { def children: Array[StructElement] = structField.dataType match { case StructType(fields) => fields.map(f => StructElement(column.getField(f.name), f)) @@ -88,7 +88,7 @@ trait QuillSparkContext extends Context[SparkDialect, Literal] with Encoders wit node.structField.dataType match { case st: StructType => // Recursively convert all parent array columns to single null values if all their children are null - val preculatedColumn = struct(node.children.map(percolateNullArraysRecursive(_)).toIndexedSeq: _*) + val percolatedColumn = struct(node.children.map(percolateNullArraysRecursive(_)).toIndexedSeq: _*) // Then express that column back out the schema val mapped = @@ -99,7 +99,7 @@ trait QuillSparkContext extends Context[SparkDialect, Literal] with Encoders wit ).otherwise(c) } - mapped(preculatedColumn).as(node.structField.name) + mapped(percolatedColumn).as(node.structField.name) case _ => node.column.as(node.structField.name) } diff --git a/quill-spark/src/main/scala/io/getquill/context/spark/SimpleNestedExpansion.scala b/quill-spark/src/main/scala/io/getquill/context/spark/SimpleNestedExpansion.scala index fa48e1a1bb..42372f90b3 100644 --- a/quill-spark/src/main/scala/io/getquill/context/spark/SimpleNestedExpansion.scala +++ b/quill-spark/src/main/scala/io/getquill/context/spark/SimpleNestedExpansion.scala @@ -59,10 +59,10 @@ object TopLevelExpansion { * This unapplier object is used both here and in the SpartDialect select * tokenization. * - * - unless the Ident has a Concrete Quat.Proudct with a single value, but + * - unless the Ident has a Concrete Quat.Product with a single value, but * that has already been expanded into it's composite elements in - * TopLevelExpanion.apply and the Ident shuold no longer exist in the select - * values. + * TopLevelExpansion.apply and the Ident should no longer exist in the + * select values. * * Technically, all we we need to do here is to check that the ast element is * not an ident, however due to previous issues encountered with surprising diff --git a/quill-spark/src/main/scala/io/getquill/context/spark/SparkDialect.scala b/quill-spark/src/main/scala/io/getquill/context/spark/SparkDialect.scala index bf8ce825c6..6b75db2a50 100644 --- a/quill-spark/src/main/scala/io/getquill/context/spark/SparkDialect.scala +++ b/quill-spark/src/main/scala/io/getquill/context/spark/SparkDialect.scala @@ -54,7 +54,7 @@ trait SparkIdiom extends SqlIdiom with CannotReturn { self => val normalizedAst = EscapeQuestionMarks(SqlNormalize(ast, idiomContext.config)) implicit val implicitIdiomContext: IdiomContext = idiomContext - implicit val tokernizer = defaultTokenizer + implicit val tokenizer = defaultTokenizer val token = normalizedAst match { @@ -125,7 +125,7 @@ trait SparkIdiom extends SqlIdiom with CannotReturn { self => // it is an ident but somehow it's type is not known case List(SelectValue(Ident(a, Quat.Placeholder(_)), _, _)) => stmt"${a.token}.*" - // It is an ident but actually it repsents a single sql-level value + // It is an ident but actually it represents a single sql-level value case List(SelectValue(Ident(a, _: Quat.Primitive), _, _)) => stmt"${a.token}.*" // If the selection is a single value e.g. SelectValue(prop.value), SelectValue(Constant) return it right here as a SingleValuePrimitive diff --git a/quill-spark/src/test/scala/io/getquill/context/spark/QuestionMarkSpec.scala b/quill-spark/src/test/scala/io/getquill/context/spark/QuestionMarkSpec.scala index 647339a9b8..baf1b6918e 100644 --- a/quill-spark/src/test/scala/io/getquill/context/spark/QuestionMarkSpec.scala +++ b/quill-spark/src/test/scala/io/getquill/context/spark/QuestionMarkSpec.scala @@ -26,7 +26,7 @@ class QuestionMarkSpec extends Spec { testContext.run(q).collect() should contain theSameElementsAs Seq(peopleList(0)) } - "simple variable usage must work in the middle of a stirng" in { + "simple variable usage must work in the middle of a string" in { val newContact = Contact("Moe", "Rabbenu", 123, 2, "Something ? Something ? Else") val extraPeopleList = peopleList :+ newContact diff --git a/quill-spark/src/test/scala/io/getquill/context/spark/examples/GithubExample.scala b/quill-spark/src/test/scala/io/getquill/context/spark/examples/GitHubExample.scala similarity index 98% rename from quill-spark/src/test/scala/io/getquill/context/spark/examples/GithubExample.scala rename to quill-spark/src/test/scala/io/getquill/context/spark/examples/GitHubExample.scala index 677b5f795a..0073e708a7 100644 --- a/quill-spark/src/test/scala/io/getquill/context/spark/examples/GithubExample.scala +++ b/quill-spark/src/test/scala/io/getquill/context/spark/examples/GitHubExample.scala @@ -35,7 +35,7 @@ case class Activity( org: User ) -object GithubExample extends App { +object GitHubExample extends App { val files = for { diff --git a/quill-spark/src/test/scala/io/getquill/context/spark/examples/TopHashtagsExample.scala b/quill-spark/src/test/scala/io/getquill/context/spark/examples/TopHashtagsExample.scala index bd96678882..92f605b18f 100644 --- a/quill-spark/src/test/scala/io/getquill/context/spark/examples/TopHashtagsExample.scala +++ b/quill-spark/src/test/scala/io/getquill/context/spark/examples/TopHashtagsExample.scala @@ -56,7 +56,7 @@ object TopHashtagsExample extends App { .map(_.toLowerCase) // normalize hashtags (Dataset) .groupBy($"value") // group by each hashtag (Dataframe) .agg(fcount("*") as "count") // aggregate the count (Dataframe) - .orderBy($"count" desc) // order (Datafeame) + .orderBy($"count" desc) // order (Dataframe) .limit(n) // limit to top results (Dataframe) .as[(String, BigInt)] // set the type again (Dataset) } @@ -64,7 +64,7 @@ object TopHashtagsExample extends App { object quill { def topHashtags(tweets: Dataset[Tweet], n: Int): Dataset[(String, Long)] = run { // produce a dataset from the Quill query - liftQuery(tweets) // trasform the dataset into a Quill query + liftQuery(tweets) // transform the dataset into a Quill query .concatMap(_.text.split(" ")) // split into words and unnest results .filter(_.startsWith("#")) // filter hashtag words .map(_.toLowerCase) // normalize hashtags diff --git a/quill-sql/src/test/scala/io/getquill/context/sql/AggregationSpec.scala b/quill-sql/src/test/scala/io/getquill/context/sql/AggregationSpec.scala index 702f84f315..fedca52c96 100644 --- a/quill-sql/src/test/scala/io/getquill/context/sql/AggregationSpec.scala +++ b/quill-sql/src/test/scala/io/getquill/context/sql/AggregationSpec.scala @@ -27,7 +27,7 @@ class AggregationSpec extends Spec { // SELECT p.age FROM (SELECT x.age + 1 FROM Person x) AS p WHERE p.age = 123 // => SELECT p.age + 1 FROM (SELECT x.age FROM Person x) AS p WHERE (p.age + 1) = 123 // Instead it should remain as the former query - "simple operation should not propogate from nested" in { + "simple operation should not propagate from nested" in { ctx.run { query[Person].map(p => p.age + 1).nested.filter(p => p == 123) }.string mustEqual "SELECT p.x FROM (SELECT p.age + 1 AS x FROM Person p) AS p WHERE p.x = 123" @@ -44,7 +44,7 @@ class AggregationSpec extends Spec { "sum" in { ctx.run(query[Person].map(p => sum(p.age))).string mustEqual "SELECT SUM(p.age) FROM Person p" } } - "work correctly with a filter cause that is BEFORE the aggreation" in { + "work correctly with a filter cause that is BEFORE the aggregation" in { val q = quote { query[Person].filter(p => p.name == "Joe").map(p => (p.id, max(p.name))) } @@ -160,7 +160,7 @@ class AggregationSpec extends Spec { "SELECT p.x FROM (SELECT MAX(p.age) AS x FROM Person p GROUP BY p.age) AS p WHERE p.x > 1000" } - // Disable thte apply-map phase to make sure these work in cases where this reduction is not possible (e.g. where they use infix etc...). + // Disable the apply-map phase to make sure these work in cases where this reduction is not possible (e.g. where they use infix etc...). // Infix has a special case already so want to not use that specifically. "work with a map(to-leaf).groupByMap.filter - no ApplyMap" in { implicit val d = new DisablePhase { override type Phase = OptionalPhase.ApplyMap :: HNil } diff --git a/quill-sql/src/test/scala/io/getquill/context/sql/GroupBySpec.scala b/quill-sql/src/test/scala/io/getquill/context/sql/GroupBySpec.scala index 64bca45f95..170ed53018 100644 --- a/quill-sql/src/test/scala/io/getquill/context/sql/GroupBySpec.scala +++ b/quill-sql/src/test/scala/io/getquill/context/sql/GroupBySpec.scala @@ -21,7 +21,7 @@ class GroupBySpec extends Spec { .join(query[Country]) .on { case (city, country) => city.countryId == country.id } .groupBy { case (city, country) => country } - .map { case (country, citysInCountry) => (country.name, citysInCountry.map(cICn => cICn._1)) } + .map { case (country, cityInCountry) => (country.name, cityInCountry.map(cICn => cICn._1)) } .map { case (country, citiesInCountry) => (country, citiesInCountry.size) } ) testContext.run(q).string mustEqual @@ -35,7 +35,7 @@ class GroupBySpec extends Spec { .join(query[Country]) .on { case (city, country) => city.countryId == country.id } .groupBy { case (city, country) => country } - .map { case (country, citysInCountry) => (country.name, citysInCountry.map(cICn => cICn._1)) } + .map { case (country, cityInCountry) => (country.name, cityInCountry.map(cICn => cICn._1)) } .map { case (country, citiesInCountry) => (country, citiesInCountry.size) } ) testContext.run(q).string mustEqual @@ -81,8 +81,8 @@ class GroupBySpec extends Spec { .join(query[Country]) .on { case (city, country) => city.countryCode == country.countryCode } .groupBy { case (city, country) => country } - .map { case (country, citysInCountry) => - ((country.countryCode, country.language), citysInCountry.map(cICn => cICn._1)) + .map { case (country, cityInCountry) => + ((country.countryCode, country.language), cityInCountry.map(cICn => cICn._1)) } .map { case (country, cityCountries) => (country, cityCountries.size) } ) @@ -143,8 +143,8 @@ class GroupBySpec extends Spec { .join(query[Country]) .on { case (city, country) => city.countryCode == country.countryCode } .groupBy { case (city, country) => country } - .map { case (country, citysInCountry) => - ((country.countryCode, country.language), citysInCountry.map(cICn => cICn._1)) + .map { case (country, cityInCountry) => + ((country.countryCode, country.language), cityInCountry.map(cICn => cICn._1)) } .map { case (country, cityCountries) => (country, cityCountries.size) } ) @@ -239,7 +239,7 @@ class GroupBySpec extends Spec { "SELECT p.* FROM (SELECT MAX(p.age) FROM Person p GROUP BY p.age) AS p WHERE p > 1000" } - // Disable thte apply-map phase to make sure these work in cases where this reduction is not possible (e.g. where they use infix etc...). + // Disable the apply-map phase to make sure these work in cases where this reduction is not possible (e.g. where they use infix etc...). // Infix has a special case already so want to not use that specifically. "work with a map(to-leaf).groupByMap.map.filter - no ApplyMap" in { implicit val d = new DisablePhase { override type Phase = OptionalPhase.ApplyMap :: HNil } diff --git a/quill-sql/src/test/scala/io/getquill/context/sql/NestedDistinctSpec.scala b/quill-sql/src/test/scala/io/getquill/context/sql/NestedDistinctSpec.scala index 1e81e77fd7..b0e8a8cc8a 100644 --- a/quill-sql/src/test/scala/io/getquill/context/sql/NestedDistinctSpec.scala +++ b/quill-sql/src/test/scala/io/getquill/context/sql/NestedDistinctSpec.scala @@ -123,12 +123,12 @@ class NestedDistinctSpec extends Spec { case class SimpleEnt(a: Int, b: String) case class SimpleEnt2(aa: Int, bb: String) - val qschem = quote { + val qschema = quote { querySchema[SimpleEnt]("CustomEnt", _.a -> "field_a") } val q = quote { - qschem + qschema .map(e => SimpleEnt(e.a + 1, e.b)) .distinct .map(e => SimpleEnt2(e.a + 2, e.b)) @@ -141,12 +141,12 @@ class NestedDistinctSpec extends Spec { case class SimpleEnt(a: Int, b: String) case class SimpleEnt2(aa: Int, bb: String) - val qschem = quote { + val qschema = quote { querySchema[SimpleEnt]("CustomEnt", _.a -> "field_a") } val q = quote { - qschem + qschema .map(e => (e.a + 1, sql"foo(${e.b})".as[String])) .nested .map(e => (e._1 + 2, sql"bar(${e._2})".as[String])) @@ -160,12 +160,12 @@ class NestedDistinctSpec extends Spec { case class SimpleEnt(a: Int, b: String) case class SimpleEnt2(aa: Int, bb: String) - val qschem = quote { + val qschema = quote { querySchema[SimpleEnt]("CustomEnt", _.a -> "field_a") } val q = quote { - qschem + qschema .map(e => (e.a + 1, sql"foo(${e.b})".as[String])) .map(e => (e._1 + 2, sql"bar(${e._2})".as[String])) } @@ -213,49 +213,49 @@ class NestedDistinctSpec extends Spec { ctx.run(q).string mustEqual "SELECT e._1, e._2id AS id, e._2theName AS theName FROM (SELECT p.idP AS _1, p.id AS _2id, p.theName AS _2theName FROM Parent p) AS e" } - "can be propogated across query with naming intact and then used further" in { + "can be propagated across query with naming intact and then used further" in { val q = quote { query[Parent].map(p => p.emb).distinct.map(e => (e.name, e.id)).distinct.map(tup => (tup._1, tup._2)).distinct } ctx.run(q).string mustEqual "SELECT DISTINCT p._1theName AS _1, p._1id AS _2 FROM (SELECT DISTINCT p.id AS _1id, p.theName AS _1theName FROM Parent p) AS p" } - "can be propogated across query with naming intact and then used further - nested" in { + "can be propagated across query with naming intact and then used further - nested" in { val q = quote { query[Parent].map(p => p.emb).nested.map(e => (e.name, e.id)).nested.map(tup => (tup._1, tup._2)).nested } ctx.run(q).string mustEqual "SELECT x._1, x._2 FROM (SELECT tup._1, tup._2 FROM (SELECT e.theName AS _1, e.id AS _2 FROM (SELECT p.id, p.theName FROM Parent p) AS e) AS tup) AS x" } - "can be propogated across query with naming intact - returned as single property" in { + "can be propagated across query with naming intact - returned as single property" in { val q = quote { query[Parent].map(p => p.emb).distinct.map(e => (e.name)) } ctx.run(q).string mustEqual "SELECT p._1theName AS theName FROM (SELECT DISTINCT p.id AS _1id, p.theName AS _1theName FROM Parent p) AS p" } - "can be propogated across query with naming intact - and the immediately returned" in { + "can be propagated across query with naming intact - and the immediately returned" in { val q = quote { query[Parent].map(p => p.emb).nested.map(e => e) } ctx.run(q).string mustEqual "SELECT x.id, x.theName FROM (SELECT p.id, p.theName FROM Parent p) AS x" } - "can be propogated across distinct with naming intact - and the immediately returned" in { + "can be propagated across distinct with naming intact - and the immediately returned" in { val q = quote { query[Parent].map(p => p.emb).distinct.map(e => e) } ctx.run(q).string mustEqual "SELECT DISTINCT p.id, p.theName FROM Parent p" } - "can be propogated across query with naming intact and then re-wrapped in case class" in { + "can be propagated across query with naming intact and then re-wrapped in case class" in { val q = quote { query[Parent].map(p => p.emb).distinct.map(e => Parent(1, e)) } ctx.run(q).string mustEqual "SELECT 1 AS idP, p._1id AS id, p._1theName AS theName FROM (SELECT DISTINCT p.id AS _1id, p.theName AS _1theName FROM Parent p) AS p" } - "can be propogated across query with naming intact and then re-wrapped in tuple" in { + "can be propagated across query with naming intact and then re-wrapped in tuple" in { val q = quote { query[Parent].map(p => p.emb).nested.map(e => Parent(1, e)) } diff --git a/quill-sql/src/test/scala/io/getquill/context/sql/SqlQueryMacroSpec.scala b/quill-sql/src/test/scala/io/getquill/context/sql/SqlQueryMacroSpec.scala index bfcfd77ce3..c42cb3df1b 100644 --- a/quill-sql/src/test/scala/io/getquill/context/sql/SqlQueryMacroSpec.scala +++ b/quill-sql/src/test/scala/io/getquill/context/sql/SqlQueryMacroSpec.scala @@ -37,7 +37,7 @@ class SqlQueryMacroSpec extends Spec { mirror.string mustEqual "SELECT x.s, x.i, x.l, x.o FROM TestEntity t, TestEntity2 x" } } - "with bindigns" - { + "with bindings" - { "one" in { val q = quote { qr1.filter(t => t.s != lift("s")) diff --git a/quill-sql/src/test/scala/io/getquill/context/sql/SqlQuerySpec.scala b/quill-sql/src/test/scala/io/getquill/context/sql/SqlQuerySpec.scala index 651ae66a65..98b7f9f01d 100644 --- a/quill-sql/src/test/scala/io/getquill/context/sql/SqlQuerySpec.scala +++ b/quill-sql/src/test/scala/io/getquill/context/sql/SqlQuerySpec.scala @@ -588,7 +588,7 @@ class SqlQuerySpec extends Spec { "SELECT 1 FROM (SELECT DISTINCT t.i AS _1 FROM TestEntity t) AS t" // hel } - "with map uppsercase" in { + "with map uppercase" in { import testContextUpper._ val q = quote { qr1.map(t => t.i).distinct.map(t => 1) diff --git a/quill-sql/src/test/scala/io/getquill/context/sql/base/CaseClassQuerySpec.scala b/quill-sql/src/test/scala/io/getquill/context/sql/base/CaseClassQuerySpec.scala index 8812950d9a..08f2a8e863 100644 --- a/quill-sql/src/test/scala/io/getquill/context/sql/base/CaseClassQuerySpec.scala +++ b/quill-sql/src/test/scala/io/getquill/context/sql/base/CaseClassQuerySpec.scala @@ -77,7 +77,7 @@ trait CaseClassQuerySpec extends Spec { query[Contact].filter(p => p.firstName == person.firstName && person.lastName == person.lastName) } - val `Ex 3 Inline Record Usage exepected result` = List( + val `Ex 3 Inline Record Usage expected result` = List( new Contact("Alex", "Jones", 60, 2, "foo") ) diff --git a/quill-sql/src/test/scala/io/getquill/context/sql/idiom/OffsetWithoutLimitWorkaroundSpec.scala b/quill-sql/src/test/scala/io/getquill/context/sql/idiom/OffsetWithoutLimitWorkaroundSpec.scala index 2ff120528d..5f985452d5 100644 --- a/quill-sql/src/test/scala/io/getquill/context/sql/idiom/OffsetWithoutLimitWorkaroundSpec.scala +++ b/quill-sql/src/test/scala/io/getquill/context/sql/idiom/OffsetWithoutLimitWorkaroundSpec.scala @@ -18,7 +18,7 @@ class OffsetWithoutLimitWorkaroundSpec extends Spec { } import ctx._ - "creates a synthectic limit" in { + "creates a synthetic limit" in { val q = quote { qr1.drop(1) } diff --git a/quill-sql/src/test/scala/io/getquill/context/sql/idiom/SqlIdiomNamingSpec.scala b/quill-sql/src/test/scala/io/getquill/context/sql/idiom/SqlIdiomNamingSpec.scala index 9cba415350..9da01af6e3 100644 --- a/quill-sql/src/test/scala/io/getquill/context/sql/idiom/SqlIdiomNamingSpec.scala +++ b/quill-sql/src/test/scala/io/getquill/context/sql/idiom/SqlIdiomNamingSpec.scala @@ -35,7 +35,7 @@ class SqlIdiomNamingSpec extends Spec { db.run(query[SomeEntity]).string mustEqual "SELECT x.some_column AS someColumn FROM some_entity x" } - "mutiple transformations" in { + "multiple transformations" in { val db = new SqlMirrorContext(MirrorSqlDialect, NamingStrategy(SnakeCase, UpperCase, Escape)) import db._ db.run(query[SomeEntity]).string mustEqual diff --git a/quill-sql/src/test/scala/io/getquill/context/sql/norm/RenamePropertiesOverrideSpec.scala b/quill-sql/src/test/scala/io/getquill/context/sql/norm/RenamePropertiesOverrideSpec.scala index cd8d4815ee..a8208310dd 100644 --- a/quill-sql/src/test/scala/io/getquill/context/sql/norm/RenamePropertiesOverrideSpec.scala +++ b/quill-sql/src/test/scala/io/getquill/context/sql/norm/RenamePropertiesOverrideSpec.scala @@ -249,7 +249,7 @@ class RenamePropertiesOverrideSpec extends Spec { } "join" - { - "both sidess" in { + "both sides" in { val q = quote { e.leftJoin(e).on((a, b) => a.s == b.s).map(t => (t._1.s, t._2.map(_.s))) } @@ -346,7 +346,7 @@ class RenamePropertiesOverrideSpec extends Spec { } } - "respects the schema definition for embeddeds" - { + "respects the schema definition for embedded" - { "query" - { "without schema" in { case class B(c: Int) @@ -364,7 +364,7 @@ class RenamePropertiesOverrideSpec extends Spec { "SELECT x.bC FROM A x" } } - "query for Option embeddeds" - { + "query for Option embedded" - { "without schema" in { case class B(c1: Int, c2: Int) case class A(b: Option[B]) diff --git a/quill-sql/src/test/scala/io/getquill/context/sql/norm/RenamePropertiesSpec.scala b/quill-sql/src/test/scala/io/getquill/context/sql/norm/RenamePropertiesSpec.scala index ccba215a46..3bc8f448aa 100644 --- a/quill-sql/src/test/scala/io/getquill/context/sql/norm/RenamePropertiesSpec.scala +++ b/quill-sql/src/test/scala/io/getquill/context/sql/norm/RenamePropertiesSpec.scala @@ -392,7 +392,7 @@ class RenamePropertiesSpec extends Spec { } } - "respects the schema definition for embeddeds" - { + "respects the schema definition for embedded" - { "query" - { "without schema" in { case class B(c: Int) @@ -410,7 +410,7 @@ class RenamePropertiesSpec extends Spec { "SELECT x.bC FROM A x" } } - "query for Option embeddeds" - { + "query for Option embedded" - { "without schema" in { case class B(c1: Int, c2: Int) case class A(b: Option[B]) diff --git a/quill-sql/src/test/scala/io/getquill/context/sql/norm/SheathLeafClausesSpec.scala b/quill-sql/src/test/scala/io/getquill/context/sql/norm/SheathLeafClausesSpec.scala index 4420f9ccb9..7a15aa0f5a 100644 --- a/quill-sql/src/test/scala/io/getquill/context/sql/norm/SheathLeafClausesSpec.scala +++ b/quill-sql/src/test/scala/io/getquill/context/sql/norm/SheathLeafClausesSpec.scala @@ -114,7 +114,7 @@ class SheathLeafClausesSpec extends Spec { .join(query[Person].concatMap(t => t.firstName.split(" "))) .on { case (a, b) => a == b } ) - // TODO star idenfiers should not have aliases + // TODO star identifiers should not have aliases ctx.run(q).string mustEqual "SELECT x01.*, x11.* FROM (SELECT UNNEST(SPLIT(p.first_name, ' ')) AS x FROM person p) AS x01 INNER JOIN (SELECT UNNEST(SPLIT(t.first_name, ' ')) AS x FROM person t) AS x11 ON x01.x = x11.x" } diff --git a/quill-sql/src/test/scala/io/getquill/quat/QuatRunSpec.scala b/quill-sql/src/test/scala/io/getquill/quat/QuatRunSpec.scala index 6a9b6b5a0c..8b1648ee6e 100644 --- a/quill-sql/src/test/scala/io/getquill/quat/QuatRunSpec.scala +++ b/quill-sql/src/test/scala/io/getquill/quat/QuatRunSpec.scala @@ -34,7 +34,7 @@ class QuatRunSpec extends Spec { result.string mustEqual "SELECT x.name, x.age FROM MyPerson x APPEND FOO" } - "should support query-ops function - multile var" in { + "should support query-ops function - multiple var" in { def appendFooFun[Q <: Query[_]] = quote((q: Q, i: Int) => sql"$q APPEND $i FOO".transparent.pure.as[Q]) val q = quote(appendFooFun(query[MyPerson], 123)) q.ast.quat mustEqual Quat.Generic // Is it unknown, how should the reducing work from an infix with multiple vars? diff --git a/quill-sql/src/test/sql/postgres-doobie-schema.sql b/quill-sql/src/test/sql/postgres-doobie-schema.sql index 3573fe9fba..a19e666077 100644 --- a/quill-sql/src/test/sql/postgres-doobie-schema.sql +++ b/quill-sql/src/test/sql/postgres-doobie-schema.sql @@ -1,6 +1,6 @@ -- -- The sample data used in the world database is Copyright Statistics --- Finland, http://www.stat.fi/worldinfigures. +-- Finland, https://www.stat.fi/worldinfigures. -- CREATE TABLE IF NOT EXISTS city (