diff --git a/core/src/main/scala/org/apache/spark/storage/BlockObjectWriter.scala b/core/src/main/scala/org/apache/spark/storage/BlockObjectWriter.scala index 499dd97c0656a..8bc4e205bc3c6 100644 --- a/core/src/main/scala/org/apache/spark/storage/BlockObjectWriter.scala +++ b/core/src/main/scala/org/apache/spark/storage/BlockObjectWriter.scala @@ -59,7 +59,7 @@ private[spark] abstract class BlockObjectWriter(val blockId: BlockId) extends Ou def write(key: Any, value: Any) /** - * Notify the writer that a record worth of bytes has been written with writeBytes. + * Notify the writer that a record worth of bytes has been written with OutputStream#write. */ def recordWritten() @@ -215,12 +215,7 @@ private[spark] class DiskBlockObjectWriter( objOut.writeKey(key) objOut.writeValue(value) - numRecordsWritten += 1 - writeMetrics.incShuffleRecordsWritten(1) - - if (numRecordsWritten % 32 == 0) { - updateBytesWritten() - } + recordWritten() } override def write(b: Int): Unit = throw new UnsupportedOperationException() diff --git a/core/src/main/scala/org/apache/spark/util/collection/PartitionedSerializedPairBuffer.scala b/core/src/main/scala/org/apache/spark/util/collection/PartitionedSerializedPairBuffer.scala index b5ca0c62a04f2..ac9ea6393628f 100644 --- a/core/src/main/scala/org/apache/spark/util/collection/PartitionedSerializedPairBuffer.scala +++ b/core/src/main/scala/org/apache/spark/util/collection/PartitionedSerializedPairBuffer.scala @@ -71,10 +71,10 @@ private[spark] class PartitionedSerializedPairBuffer[K, V]( if (keyStart < 0) { throw new Exception(s"Can't grow buffer beyond ${1 << 31} bytes") } - kvSerializationStream.writeObject[Any](key) + kvSerializationStream.writeKey[Any](key) kvSerializationStream.flush() val valueStart = kvBuffer.size - kvSerializationStream.writeObject[Any](value) + kvSerializationStream.writeValue[Any](value) kvSerializationStream.flush() val valueEnd = kvBuffer.size