From e29c721132fcee79d65d4b6e30dd4ee46a814ef7 Mon Sep 17 00:00:00 2001 From: Reynold Xin Date: Tue, 2 Sep 2014 22:59:46 -0700 Subject: [PATCH] Updated comment for ShuffleBlockFetcherIterator. --- .../spark/storage/ShuffleBlockFetcherIterator.scala | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/storage/ShuffleBlockFetcherIterator.scala b/core/src/main/scala/org/apache/spark/storage/ShuffleBlockFetcherIterator.scala index f0abcfdd6090f..439bbe06f1314 100644 --- a/core/src/main/scala/org/apache/spark/storage/ShuffleBlockFetcherIterator.scala +++ b/core/src/main/scala/org/apache/spark/storage/ShuffleBlockFetcherIterator.scala @@ -39,10 +39,13 @@ import org.apache.spark.util.Utils * The implementation throttles the remote fetches to they don't exceed maxBytesInFlight to avoid * using too much memory. * - * @param context - * @param blockManager - * @param blocksByAddress - * @param serializer + * @param context [[TaskContext]], used for metrics update + * @param blockTransferService [[BlockTransferService]] for fetching remote blocks + * @param blockManager [[BlockManager]] for reading local blocks + * @param blocksByAddress list of blocks to fetch grouped by the [[BlockManagerId]]. + * For each block we also require the size (in bytes as a long field) in + * order to throttle the memory usage. + * @param serializer serializer used to deserialize the data. * @param maxBytesInFlight max size (in bytes) of remote blocks to fetch at any given point. */ private[spark]