Skip to content

Commit

Permalink
Fix Spark cases
Browse files Browse the repository at this point in the history
  • Loading branch information
deusaquilus committed Dec 6, 2024
1 parent 71c2020 commit 35e1f49
Show file tree
Hide file tree
Showing 3 changed files with 14 additions and 5 deletions.
4 changes: 2 additions & 2 deletions quill-engine/src/main/scala/io/getquill/sql/SqlQuery.scala
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,7 @@ object CaseClassMake {
}
}

class SqlQueryApply(traceConfig: TraceConfig) {
class SqlQueryApply(traceConfig: TraceConfig, allowTopLevelInfix: Boolean = true) {

val interp: Interpolator = new Interpolator(TraceType.SqlQueryConstruct, traceConfig, 1)
import interp._
Expand Down Expand Up @@ -155,7 +155,7 @@ class SqlQueryApply(traceConfig: TraceConfig) {
flatten(infix, "x")
}
case infix: Infix =>
if (isTopLevel)
if (allowTopLevelInfix && isTopLevel)
TopInfixQuery(infix)
else
trace"Construct SqlQuery from: Infix" andReturn {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,10 @@ trait SparkIdiom extends SqlIdiom with CannotReturn { self =>

def liftingPlaceholder(index: Int): String = "?"

// Do not allow top-level infix queries in spark by default because then run(liftQuery(myDataset)) will fail
// power-users may want to override this in rare cases.
def allowTopLevelInfix = false

override def prepareForProbing(string: String) = string

override implicit def externalIdentTokenizer(implicit
Expand All @@ -39,7 +43,7 @@ trait SparkIdiom extends SqlIdiom with CannotReturn { self =>
val token =
normalizedAst match {
case q: Query =>
val sql = new SqlQueryApply(idiomContext.config.traceConfig)(q)
val sql = new SqlQueryApply(idiomContext.config.traceConfig, allowTopLevelInfix)(q)
trace("sql")(sql)
val expanded = SimpleNestedExpansion(sql)
trace("expanded sql")(expanded)
Expand Down Expand Up @@ -123,14 +127,17 @@ trait SparkIdiom extends SqlIdiom with CannotReturn { self =>
strategy: NamingStrategy,
idiomContext: IdiomContext
): Tokenizer[SqlQuery] = Tokenizer[SqlQuery] {
case q: TopInfixQuery =>
q.ast.token
case q: FlattenSqlQuery =>
new SparkFlattenSqlQueryTokenizerHelper(q).apply
case SetOperationSqlQuery(a, op, b) =>
stmt"(${a.token}) ${op.token} (${b.token})"
case UnaryOperationSqlQuery(op, q) =>
stmt"SELECT ${op.token} (${q.token})"
// Technically top-level infix queries are not allowed in Spark because
// run(liftQuery(myDataset)) then won't work but in case the user
// wants to override it, provide the translation.
case q: TopInfixQuery =>
q.ast.token
}

override implicit def propertyTokenizer(implicit
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
package io.getquill.context.spark

import io.getquill.base.Spec
import io.getquill.{Test => _, _}

import scala.util.Success

class QuillSparkContextSpec extends Spec {
Expand Down

0 comments on commit 35e1f49

Please sign in to comment.