Skip to content

Commit

Permalink
Formatting and coloring of badges
Browse files Browse the repository at this point in the history
  • Loading branch information
pwendell committed Apr 5, 2014
1 parent cd7a465 commit 1ed27d2
Show file tree
Hide file tree
Showing 28 changed files with 83 additions and 121 deletions.
2 changes: 1 addition & 1 deletion core/src/main/scala/org/apache/spark/Aggregator.scala
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ package org.apache.spark
import org.apache.spark.util.collection.{AppendOnlyMap, ExternalAppendOnlyMap}

/**
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
* <span class="badge" style="float: right; background-color: #44751E;">DEVELOPER API</span>
* A set of functions used to aggregate data.
*
* @param createCombiner function to create the initial value of the aggregation.
Expand Down
15 changes: 5 additions & 10 deletions core/src/main/scala/org/apache/spark/Dependency.scala
Original file line number Diff line number Diff line change
Expand Up @@ -21,16 +21,14 @@ import org.apache.spark.rdd.RDD
import org.apache.spark.serializer.Serializer

/**
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
*
* <span class="badge" style="float: right; background-color: #44751E;">DEVELOPER API</span>
* Base class for dependencies.
*/
abstract class Dependency[T](val rdd: RDD[T]) extends Serializable


/**
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
*
* <span class="badge" style="float: right; background-color: #44751E;">DEVELOPER API</span>
* Base class for dependencies where each partition of the parent RDD is used by at most one
* partition of the child RDD. Narrow dependencies allow for pipelined execution.
*/
Expand All @@ -45,8 +43,7 @@ abstract class NarrowDependency[T](rdd: RDD[T]) extends Dependency(rdd) {


/**
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
*
* <span class="badge" style="float: right; background-color: #44751E;">DEVELOPER API</span>
* Represents a dependency on the output of a shuffle stage.
* @param rdd the parent RDD
* @param partitioner partitioner used to partition the shuffle output
Expand All @@ -65,8 +62,7 @@ class ShuffleDependency[K, V](


/**
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
*
* <span class="badge" style="float: right; background-color: #44751E;">DEVELOPER API</span>
* Represents a one-to-one dependency between partitions of the parent and child RDDs.
*/
class OneToOneDependency[T](rdd: RDD[T]) extends NarrowDependency[T](rdd) {
Expand All @@ -75,8 +71,7 @@ class OneToOneDependency[T](rdd: RDD[T]) extends NarrowDependency[T](rdd) {


/**
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
*
* <span class="badge" style="float: right; background-color: #44751E;">DEVELOPER API</span>
* Represents a one-to-one dependency between ranges of partitions in the parent and child RDDs.
* @param rdd the parent RDD
* @param inStart the start of the range in the parent RDD
Expand Down
9 changes: 3 additions & 6 deletions core/src/main/scala/org/apache/spark/FutureAction.scala
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,7 @@ import org.apache.spark.rdd.RDD
import org.apache.spark.scheduler.{JobFailed, JobSucceeded, JobWaiter}

/**
* <span class="badge badge-red">EXPERIMENTAL API</span>
*
* <span class="badge" style="float: right; background-color: #257080;">EXPERIMENTAL API</span>
* A future for the result of an action to support cancellation. This is an extension of the
* Scala Future interface to support cancellation.
*/
Expand Down Expand Up @@ -86,8 +85,7 @@ trait FutureAction[T] extends Future[T] {


/**
* <span class="badge badge-red">EXPERIMENTAL API</span>
*
* <span class="badge" style="float: right; background-color: #257080;">EXPERIMENTAL API</span>
* A [[FutureAction]] holding the result of an action that triggers a single job. Examples include
* count, collect, reduce.
*/
Expand Down Expand Up @@ -152,8 +150,7 @@ class SimpleFutureAction[T] private[spark](jobWaiter: JobWaiter[_], resultFunc:


/**
* <span class="badge badge-red">EXPERIMENTAL API</span>
*
* <span class="badge" style="float: right; background-color: #257080;">EXPERIMENTAL API</span>
* A [[FutureAction]] for actions that could trigger multiple Spark jobs. Examples include take,
* takeSample. Cancellation works by setting the cancelled flag to true and interrupting the
* action thread if it is being blocked by a job.
Expand Down
6 changes: 3 additions & 3 deletions core/src/main/scala/org/apache/spark/SparkContext.scala
Original file line number Diff line number Diff line change
Expand Up @@ -666,7 +666,7 @@ class SparkContext(
}

/**
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
* <span class="badge" style="float: right; background-color: #44751E;">DEVELOPER API</span>
*
* Register a listener to receive up-calls from events that happen during execution.
*/
Expand Down Expand Up @@ -979,7 +979,7 @@ class SparkContext(
}

/**
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
* <span class="badge" style="float: right; background-color: #44751E;">DEVELOPER API</span>
*
* Run a job that can return approximate results.
*/
Expand All @@ -998,7 +998,7 @@ class SparkContext(
}

/**
* <span class="badge badge-red">EXPERIMENTAL API</span>
* <span class="badge" style="float: right; background-color: #257080;">EXPERIMENTAL API</span>
*
* Submit a job for execution and return a FutureJob holding the result.
*/
Expand Down
3 changes: 1 addition & 2 deletions core/src/main/scala/org/apache/spark/TaskContext.scala
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,7 @@ import scala.collection.mutable.ArrayBuffer
import org.apache.spark.executor.TaskMetrics

/**
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
*
* <span class="badge" style="float: right; background-color: #44751E;">DEVELOPER API</span>
* Contextual information about a task which can be read or mutated during execution.
*/
class TaskContext(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,7 @@ import org.apache.spark.SecurityManager
import org.apache.spark.SparkConf

/**
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
*
* <span class="badge" style="float: right; background-color: #44751E;">DEVELOPER API</span>
* An interface for all the broadcast implementations in Spark (to allow
* multiple broadcast implementations). SparkContext uses a user-specified
* BroadcastFactory implementation to instantiate a particular broadcast for the
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,7 @@ package org.apache.spark.executor
import org.apache.spark.storage.{BlockId, BlockStatus}

/**
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
*
* <span class="badge" style="float: right; background-color: #44751E;">DEVELOPER API</span>
* Metrics tracked during the execution of a task.
*/
class TaskMetrics extends Serializable {
Expand Down Expand Up @@ -88,8 +87,7 @@ object TaskMetrics {


/**
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
*
* <span class="badge" style="float: right; background-color: #44751E;">DEVELOPER API</span>
* Metrics pertaining to shuffle data read in a given task.
*/
class ShuffleReadMetrics extends Serializable {
Expand Down Expand Up @@ -127,8 +125,7 @@ class ShuffleReadMetrics extends Serializable {
}

/**
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
*
* <span class="badge" style="float: right; background-color: #44751E;">DEVELOPER API</span>
* Metrics pertaining to shuffle data written in a given task.
*/
class ShuffleWriteMetrics extends Serializable {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,7 @@ import org.xerial.snappy.{SnappyInputStream, SnappyOutputStream}
import org.apache.spark.SparkConf

/**
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
*
* <span class="badge" style="float: right; background-color: #44751E;">DEVELOPER API</span>
* CompressionCodec allows the customization of choosing different compression implementations
* to be used in block storage.
*
Expand Down Expand Up @@ -58,8 +57,7 @@ private[spark] object CompressionCodec {


/**
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
*
* <span class="badge" style="float: right; background-color: #44751E;">DEVELOPER API</span>
* LZF implementation of [[org.apache.spark.io.CompressionCodec]].
*
* Note: The wire protocol for this codec is not guaranteed to be compatible across versions
Expand All @@ -77,8 +75,7 @@ class LZFCompressionCodec(conf: SparkConf) extends CompressionCodec {


/**
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
*
* <span class="badge" style="float: right; background-color: #44751E;">DEVELOPER API</span>
* Snappy implementation of [[org.apache.spark.io.CompressionCodec]].
* Block size can be configured by spark.io.compression.snappy.block.size.
*
Expand Down
16 changes: 8 additions & 8 deletions core/src/main/scala/org/apache/spark/package.scala
Original file line number Diff line number Diff line change
Expand Up @@ -33,15 +33,15 @@ package org.apache
* Java programmers should reference the [[spark.api.java]] package
* for Spark programming APIs in Java.
*
* Classes and methods marked with <span class="badge badge-red">EXPERIMENTAL API</span> are
* user-facing features which have not been officially adopted by the Spark project. These are
* subject to change or removal in minor releases.
*
* Classes and methods marked with <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
* are intended for advanced users want to extend Spark through lower level interfaces. These are
* subject to changes or removal in minor releases.
* Classes and methods marked with <span class="badge" style="background-color: #257080">
* EXPERIMENTAL API</span> are user-facing features which have not been officially adopted by the
* Spark project. These are subject to change or removal in minor releases.
*
* Classes and methods marked with <span class="badge" style="background-color: #44751E">
* DEVELOPER API</span> are intended for advanced users want to extend Spark through lower
* level interfaces. These are subject to changes or removal in minor releases.
*/

package object spark {
// For package docs only
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,7 @@
package org.apache.spark.partial

/**
* <span class="badge badge-red">EXPERIMENTAL API</span>
*
* <span class="badge" style="float: right; background-color: #257080;">EXPERIMENTAL API</span>
* A Double value with error bars and associated confidence.
*/
class BoundedDouble(val mean: Double, val confidence: Double, val low: Double, val high: Double) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
package org.apache.spark.partial

/**
* <span class="badge badge-red">EXPERIMENTAL API</span>
* <span class="badge" style="float: right; background-color: #257080;">EXPERIMENTAL API</span>
*/
class PartialResult[R](initialVal: R, isFinal: Boolean) {
private var finalValue: Option[R] = if (isFinal) Some(initialVal) else None
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,7 @@ import scala.reflect.ClassTag
import org.apache.spark.{ComplexFutureAction, FutureAction, Logging}

/**
* <span class="badge badge-red">EXPERIMENTAL API</span>
*
* <span class="badge" style="float: right; background-color: #257080;">EXPERIMENTAL API</span>
* A set of asynchronous RDD actions available through an implicit conversion.
* Import `org.apache.spark.SparkContext._` at the top of your program to use these functions.
*/
Expand Down
3 changes: 1 addition & 2 deletions core/src/main/scala/org/apache/spark/rdd/CoGroupedRDD.scala
Original file line number Diff line number Diff line change
Expand Up @@ -51,8 +51,7 @@ private[spark] class CoGroupPartition(idx: Int, val deps: Array[CoGroupSplitDep]
}

/**
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
*
* <span class="badge" style="float: right; background-color: #44751E;">DEVELOPER API</span>
* A RDD that cogroups its parents. For each key k in parent RDDs, the resulting RDD contains a
* tuple with the list of values for that key.
*
Expand Down
3 changes: 1 addition & 2 deletions core/src/main/scala/org/apache/spark/rdd/HadoopRDD.scala
Original file line number Diff line number Diff line change
Expand Up @@ -70,8 +70,7 @@ private[spark] class HadoopPartition(rddId: Int, idx: Int, @transient s: InputSp
}

/**
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
*
* <span class="badge" style="float: right; background-color: #44751E;">DEVELOPER API</span>
* An RDD that provides core functionality for reading data stored in Hadoop (e.g., files in HDFS,
* sources in HBase, or S3), using the older MapReduce API (`org.apache.hadoop.mapred`).
*
Expand Down
3 changes: 1 addition & 2 deletions core/src/main/scala/org/apache/spark/rdd/NewHadoopRDD.scala
Original file line number Diff line number Diff line change
Expand Up @@ -36,8 +36,7 @@ class NewHadoopPartition(rddId: Int, val index: Int, @transient rawSplit: InputS
}

/**
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
*
* <span class="badge" style="float: right; background-color: #44751E;">DEVELOPER API</span>
* An RDD that provides core functionality for reading data stored in Hadoop (e.g., files in HDFS,
* sources in HBase, or S3), using the new MapReduce API (`org.apache.hadoop.mapreduce`).
*
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,8 +46,7 @@ private[spark] class PruneDependency[T](rdd: RDD[T], @transient partitionFilterF


/**
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
*
* <span class="badge" style="float: right; background-color: #44751E;">DEVELOPER API</span>
* A RDD used to prune RDD partitions/partitions so we can avoid launching tasks on
* all partitions. An example use case: If we know the RDD is partitioned by range,
* and the execution DAG has a filter on the key, we can avoid launching tasks
Expand All @@ -67,7 +66,7 @@ class PartitionPruningRDD[T: ClassTag](


/**
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
* <span class="badge" style="float: right; background-color: #44751E;">DEVELOPER API</span>
*/
object PartitionPruningRDD {

Expand Down
20 changes: 13 additions & 7 deletions core/src/main/scala/org/apache/spark/rdd/RDD.scala
Original file line number Diff line number Diff line change
Expand Up @@ -86,22 +86,29 @@ abstract class RDD[T: ClassTag](
// Methods that should be implemented by subclasses of RDD
// =======================================================================

/** Implemented by subclasses to compute a given partition. */
/**
* <span class="badge" style="float: right; background-color: #44751E;">DEVELOPER API</span>
* Implemented by subclasses to compute a given partition.
*/
def compute(split: Partition, context: TaskContext): Iterator[T]

/**
* <span class="badge" style="float: right; background-color: #44751E;">DEVELOPER API</span>
* Implemented by subclasses to return the set of partitions in this RDD. This method will only
* be called once, so it is safe to implement a time-consuming computation in it.
*/
protected def getPartitions: Array[Partition]

/**
* <span class="badge" style="float: right; background-color: #44751E;">DEVELOPER API</span>
* Implemented by subclasses to return how this RDD depends on parent RDDs. This method will only
* be called once, so it is safe to implement a time-consuming computation in it.
*/
protected def getDependencies: Seq[Dependency[_]] = deps

/** Optionally overridden by subclasses to specify placement preferences. */
/**
* <span class="badge" style="float: right; background-color: #44751E;">DEVELOPER API</span>
* Optionally overridden by subclasses to specify placement preferences. */
protected def getPreferredLocations(split: Partition): Seq[String] = Nil

/** Optionally overridden by subclasses to specify how they are partitioned. */
Expand Down Expand Up @@ -513,8 +520,7 @@ abstract class RDD[T: ClassTag](
}

/**
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
*
* <span class="badge" style="float: right; background-color: #44751E;">DEVELOPER API</span>
* Return a new RDD by applying a function to each partition of this RDD. This is a variant of
* mapPartitions that also passes the TaskContext into the closure.
*/
Expand Down Expand Up @@ -777,7 +783,7 @@ abstract class RDD[T: ClassTag](
def count(): Long = sc.runJob(this, Utils.getIteratorSize _).sum

/**
* <span class="badge badge-red">EXPERIMENTAL API</span>
* <span class="badge" style="float: right; background-color: #257080;">EXPERIMENTAL API</span>
*
* Approximate version of count() that returns a potentially incomplete result
* within a timeout, even if not all tasks have finished.
Expand Down Expand Up @@ -825,7 +831,7 @@ abstract class RDD[T: ClassTag](
}

/**
* <span class="badge badge-red">EXPERIMENTAL API</span>
* <span class="badge" style="float: right; background-color: #257080;">EXPERIMENTAL API</span>
*
* Approximate version of countByValue().
*/
Expand All @@ -849,7 +855,7 @@ abstract class RDD[T: ClassTag](
}

/**
* <span class="badge badge-red">EXPERIMENTAL API</span>
* <span class="badge" style="float: right; background-color: #257080;">EXPERIMENTAL API</span>
*
* Return approximate number of distinct elements in the RDD.
*
Expand Down
3 changes: 1 addition & 2 deletions core/src/main/scala/org/apache/spark/rdd/ShuffledRDD.scala
Original file line number Diff line number Diff line change
Expand Up @@ -28,8 +28,7 @@ private[spark] class ShuffledRDDPartition(val idx: Int) extends Partition {
}

/**
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
*
* <span class="badge" style="float: right; background-color: #44751E;">DEVELOPER API</span>
* The resulting RDD from a shuffle (e.g. repartitioning of data).
* @param prev the parent RDD.
* @param part the partitioner used to partition the RDD
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,8 +28,7 @@ import org.apache.spark._
import org.apache.spark.executor.TaskMetrics

/**
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
*
* <span class="badge" style="float: right; background-color: #44751E;">DEVELOPER API</span>
* A logger class to record runtime information for jobs in Spark. This class outputs one log file
* for each Spark job, containing tasks start/stop and shuffle information. JobLogger is a subclass
* of SparkListener, use addSparkListener to add JobLogger to a SparkContext after the SparkContext
Expand Down
Loading

0 comments on commit 1ed27d2

Please sign in to comment.