Skip to content

Commit

Permalink
Addresses comments
Browse files Browse the repository at this point in the history
  • Loading branch information
liancheng committed Feb 28, 2015
1 parent cee55cf commit 538f506
Show file tree
Hide file tree
Showing 2 changed files with 7 additions and 3 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -126,9 +126,13 @@ private[sql] case class ParquetTableScan(
conf)

if (requestedPartitionOrdinals.nonEmpty) {
// This check if based on CatalystConverter.createRootConverter.
// This check is based on CatalystConverter.createRootConverter.
val primitiveRow = output.forall(a => ParquetTypesConverter.isPrimitiveType(a.dataType))

// Uses temporary variable to avoid the whole `ParquetTableScan` object being captured into
// the `mapPartitionsWithInputSplit` closure below.
val outputSize = output.size

baseRDD.mapPartitionsWithInputSplit { case (split, iter) =>
val partValue = "([^=]+)=([^=]+)".r
val partValues =
Expand Down Expand Up @@ -165,7 +169,7 @@ private[sql] case class ParquetTableScan(
}
} else {
// Create a mutable row since we need to fill in values from partition columns.
val mutableRow = new GenericMutableRow(output.size)
val mutableRow = new GenericMutableRow(outputSize)
new Iterator[Row] {
def hasNext = iter.hasNext
def next() = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -476,7 +476,7 @@ private[sql] case class ParquetRelation2(
// When the data does not include the key and the key is requested then we must fill it in
// based on information from the input split.
if (!partitionKeysIncludedInDataSchema && partitionKeyLocations.nonEmpty) {
// This check if based on CatalystConverter.createRootConverter.
// This check is based on CatalystConverter.createRootConverter.
val primitiveRow =
requestedSchema.forall(a => ParquetTypesConverter.isPrimitiveType(a.dataType))

Expand Down

0 comments on commit 538f506

Please sign in to comment.