Skip to content

Commit

Permalink
trivial fix
Browse files Browse the repository at this point in the history
  • Loading branch information
LantaoJin committed Jan 6, 2020
1 parent 85a2386 commit 701ced7
Show file tree
Hide file tree
Showing 4 changed files with 11 additions and 5 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,13 @@ case class DeltaLogFileIndex(format: FileFormat, files: Array[FileStatus]) exten
override val sizeInBytes: Long = files.map(_.getLen).sum

override def partitionSchema: StructType = new StructType()

override def listFiles(
partitionFilters: Seq[Expression],
dynamicPartitionFilters: Seq[Expression],
dataFilters: Seq[Expression]): Seq[PartitionDirectory] = {
listFiles(partitionFilters, dataFilters)
}
}

object DeltaLogFileIndex {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -173,7 +173,7 @@ case class UpdateWithJoinCommand(
// Calculate frequency of matches per source row
val matchedRowCounts = collectTouchedFiles.groupBy(ROW_ID_COL).agg(sum("one").as("count"))
if (matchedRowCounts.filter("count > 1").count() != 0) {
throw DeltaErrors.multipleSourceRowMatchingTargetRowInMergeException
throw DeltaErrors.multipleSourceRowMatchingTargetRowInMergeException(spark)
}

// Get the AddFiles using the touched file names.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,15 +22,14 @@ import org.apache.spark.sql.delta.metering.DeltaLogging
import org.apache.spark.sql.delta.storage.LogStore
import org.apache.spark.sql.delta.util.DeltaFileOperations
import org.apache.hadoop.fs.Path

import org.apache.spark.SparkEnv
import org.apache.spark.sql._
import org.apache.spark.sql.catalyst.analysis.UnresolvedAttribute
import org.apache.spark.sql.catalyst.catalog.ExternalCatalogUtils
import org.apache.spark.sql.catalyst.expressions.{Attribute, Cast, Concat, Expression, Literal, ScalaUDF}
import org.apache.spark.sql.execution.datasources.InMemoryFileIndex
import org.apache.spark.sql.functions.col
import org.apache.spark.sql.types.StringType
import org.apache.spark.sql.types.{BooleanType, StringType}
import org.apache.spark.util.SerializableConfiguration

/**
Expand Down Expand Up @@ -336,7 +335,7 @@ trait GenerateSymlinkManifestImpl extends PostCommitHook with DeltaLogging with
ExternalCatalogUtils.getPartitionPathString _,
StringType,
Seq(Literal(colName), Cast(col, StringType, Option(timeZoneId))),
Seq(true, true))
Seq(BooleanType, BooleanType))
if (i == 0) Seq(partitionName) else Seq(Literal(Path.SEPARATOR), partitionName)
}
)
Expand Down
2 changes: 1 addition & 1 deletion version.sbt
Original file line number Diff line number Diff line change
@@ -1 +1 @@
version in ThisBuild := "0.5.1.carmel0.1-SNAPSHOT"
version in ThisBuild := "0.5.0.carmel0.1-SNAPSHOT"

0 comments on commit 701ced7

Please sign in to comment.