Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Feature/delimiter #163

Merged
merged 12 commits into from
May 30, 2024
Prev Previous commit
Next Next commit
Renamed delimiters
  • Loading branch information
Leonard Wolters committed May 29, 2024
commit 1b953c2a2517a684aa5b84b812f99a78d7a99446
Original file line number Diff line number Diff line change
@@ -9,9 +9,9 @@ trait AggregationFunctionTokenizer { this: ClickhouseTokenizerModule =>
case nested: CombinedAggregatedFunction[_, _] =>
val tokenizedCombinators = collectCombinators(nested).map(tokenizeCombinator)
val combinators = tokenizedCombinators.map(_._1).mkString("")
val combinatorsValues = tokenizedCombinators.flatMap(_._2).mkString(ctx.vDelim)
val combinatorsValues = tokenizedCombinators.flatMap(_._2).mkString(ctx.valueDelimiter)
val (function, values) = tokenizeInnerAggregatedFunction(extractTarget(nested))
val separator = if (values.isEmpty || combinatorsValues.isEmpty) "" else ctx.fDelim
val separator = if (values.isEmpty || combinatorsValues.isEmpty) "" else ctx.functionDelimiter
s"$function$combinators($values$separator$combinatorsValues)"
case timeSeries: TimeSeries => tokenizeTimeSeries(timeSeries)
case aggregated: AggregateFunction[_] =>
@@ -45,25 +45,28 @@ trait AggregationFunctionTokenizer { this: ClickhouseTokenizerModule =>
case LastValue(column) => ("last_value", tokenizeColumn(column))
case Median(column, level, modifier) =>
val (modifierName, modifierValue) = tokenizeLevelModifier(modifier)
(s"median$modifierName", s"$level)(${tokenizeColumn(column)}${modifierValue.map(ctx.vDelim + _).getOrElse("")}")
(
s"median$modifierName",
s"$level)(${tokenizeColumn(column)}${modifierValue.map(ctx.valueDelimiter + _).getOrElse("")}"
)
case Min(tableColumn) => ("min", tokenizeColumn(tableColumn))
case Max(tableColumn) => ("max", tokenizeColumn(tableColumn))
case Quantile(column, level, modifier) =>
val (modifierName, modifierValue) = tokenizeLevelModifier(modifier)
(
s"quantile$modifierName",
s"$level)(${tokenizeColumn(column)}${modifierValue.map(ctx.vDelim + _).getOrElse("")})"
s"$level)(${tokenizeColumn(column)}${modifierValue.map(ctx.valueDelimiter + _).getOrElse("")})"
)
case Quantiles(column, levels, modifier) =>
val (modifierName, modifierValue) = tokenizeLevelModifier(modifier)
(
s"quantiles$modifierName",
s"${levels.mkString(ctx.vDelim)})(${tokenizeColumn(column)}${modifierValue.map(ctx.vDelim + _).getOrElse("")}"
s"${levels.mkString(ctx.valueDelimiter)})(${tokenizeColumn(column)}${modifierValue.map(ctx.valueDelimiter + _).getOrElse("")}"
)
case Sum(column, modifier) => (s"sum${tokenizeSumModifier(modifier)}", tokenizeColumn(column))
case SumMap(key, value) => (s"sumMap", tokenizeColumns(Seq(key, value)))
case Uniq(columns, modifier) =>
(s"uniq${tokenizeUniqModifier(modifier)}", columns.map(tokenizeColumn).mkString(ctx.vDelim))
(s"uniq${tokenizeUniqModifier(modifier)}", columns.map(tokenizeColumn).mkString(ctx.valueDelimiter))
case f: AggregateFunction[_] =>
throw new IllegalArgumentException(s"Cannot use $f aggregated function with combinator")
}
Original file line number Diff line number Diff line change
@@ -15,8 +15,8 @@ case class TokenizeContext(
var joinNr: Int = 0,
var tableAliases: Map[Table, String] = Map.empty,
var useTableAlias: Boolean = false,
fDelim: String = ", ", // function delimiter
vDelim: String = ", " // values delimiter
functionDelimiter: String = ", ",
valueDelimiter: String = ", "
) {

def incrementJoinNumber(): Unit = joinNr += 1
@@ -80,7 +80,7 @@ trait ClickhouseTokenizerModule
}

protected def tokenizeSeqCol(columns: Column*)(implicit ctx: TokenizeContext): String =
columns.map(tokenizeColumn).mkString(ctx.vDelim)
columns.map(tokenizeColumn).mkString(ctx.valueDelimiter)

override def toSql(query: InternalQuery, formatting: Option[String] = Some("JSON"))(implicit
ctx: TokenizeContext
@@ -151,7 +151,7 @@ trait ClickhouseTokenizerModule
case alias: AliasedColumn[_] =>
val originalColumnToken = tokenizeColumn(alias.original)
if (originalColumnToken.isEmpty) alias.quoted else s"$originalColumnToken AS ${alias.quoted}"
case tuple: TupleColumn[_] => s"(${tuple.elements.map(tokenizeColumn).mkString(ctx.vDelim)})"
case tuple: TupleColumn[_] => s"(${tuple.elements.map(tokenizeColumn).mkString(ctx.valueDelimiter)})"
case col: ExpressionColumn[_] => tokenizeExpressionColumn(col)
case col: Column => col.quoted
}
Original file line number Diff line number Diff line change
@@ -24,9 +24,9 @@ trait StringSearchFunctionTokenizer {
}

val maybeReplaceParam = col match {
case r: StringSearchReplaceFunc => ctx.fDelim + tokenizeColumn(r.replace.column)
case r: StringSearchReplaceFunc => ctx.functionDelimiter + tokenizeColumn(r.replace.column)
case _ => ""
}
s"$command(${tokenizeColumn(col.col1.column)}${ctx.fDelim}${tokenizeColumn(col.col2.column)}$maybeReplaceParam)"
s"$command(${tokenizeColumn(col.col1.column)}${ctx.functionDelimiter}${tokenizeColumn(col.col2.column)}$maybeReplaceParam)"
}
}
Loading