Skip to content

Commit 1ed27d2

Browse files
committed
Formatting and coloring of badges
1 parent cd7a465 commit 1ed27d2

28 files changed

+83
-121
lines changed

core/src/main/scala/org/apache/spark/Aggregator.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ package org.apache.spark
2020
import org.apache.spark.util.collection.{AppendOnlyMap, ExternalAppendOnlyMap}
2121

2222
/**
23-
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
23+
* <span class="badge" style="float: right; background-color: #44751E;">DEVELOPER API</span>
2424
* A set of functions used to aggregate data.
2525
*
2626
* @param createCombiner function to create the initial value of the aggregation.

core/src/main/scala/org/apache/spark/Dependency.scala

Lines changed: 5 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -21,16 +21,14 @@ import org.apache.spark.rdd.RDD
2121
import org.apache.spark.serializer.Serializer
2222

2323
/**
24-
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
25-
*
24+
* <span class="badge" style="float: right; background-color: #44751E;">DEVELOPER API</span>
2625
* Base class for dependencies.
2726
*/
2827
abstract class Dependency[T](val rdd: RDD[T]) extends Serializable
2928

3029

3130
/**
32-
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
33-
*
31+
* <span class="badge" style="float: right; background-color: #44751E;">DEVELOPER API</span>
3432
* Base class for dependencies where each partition of the parent RDD is used by at most one
3533
* partition of the child RDD. Narrow dependencies allow for pipelined execution.
3634
*/
@@ -45,8 +43,7 @@ abstract class NarrowDependency[T](rdd: RDD[T]) extends Dependency(rdd) {
4543

4644

4745
/**
48-
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
49-
*
46+
* <span class="badge" style="float: right; background-color: #44751E;">DEVELOPER API</span>
5047
* Represents a dependency on the output of a shuffle stage.
5148
* @param rdd the parent RDD
5249
* @param partitioner partitioner used to partition the shuffle output
@@ -65,8 +62,7 @@ class ShuffleDependency[K, V](
6562

6663

6764
/**
68-
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
69-
*
65+
* <span class="badge" style="float: right; background-color: #44751E;">DEVELOPER API</span>
7066
* Represents a one-to-one dependency between partitions of the parent and child RDDs.
7167
*/
7268
class OneToOneDependency[T](rdd: RDD[T]) extends NarrowDependency[T](rdd) {
@@ -75,8 +71,7 @@ class OneToOneDependency[T](rdd: RDD[T]) extends NarrowDependency[T](rdd) {
7571

7672

7773
/**
78-
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
79-
*
74+
* <span class="badge" style="float: right; background-color: #44751E;">DEVELOPER API</span>
8075
* Represents a one-to-one dependency between ranges of partitions in the parent and child RDDs.
8176
* @param rdd the parent RDD
8277
* @param inStart the start of the range in the parent RDD

core/src/main/scala/org/apache/spark/FutureAction.scala

Lines changed: 3 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -25,8 +25,7 @@ import org.apache.spark.rdd.RDD
2525
import org.apache.spark.scheduler.{JobFailed, JobSucceeded, JobWaiter}
2626

2727
/**
28-
* <span class="badge badge-red">EXPERIMENTAL API</span>
29-
*
28+
* <span class="badge" style="float: right; background-color: #257080;">EXPERIMENTAL API</span>
3029
* A future for the result of an action to support cancellation. This is an extension of the
3130
* Scala Future interface to support cancellation.
3231
*/
@@ -86,8 +85,7 @@ trait FutureAction[T] extends Future[T] {
8685

8786

8887
/**
89-
* <span class="badge badge-red">EXPERIMENTAL API</span>
90-
*
88+
* <span class="badge" style="float: right; background-color: #257080;">EXPERIMENTAL API</span>
9189
* A [[FutureAction]] holding the result of an action that triggers a single job. Examples include
9290
* count, collect, reduce.
9391
*/
@@ -152,8 +150,7 @@ class SimpleFutureAction[T] private[spark](jobWaiter: JobWaiter[_], resultFunc:
152150

153151

154152
/**
155-
* <span class="badge badge-red">EXPERIMENTAL API</span>
156-
*
153+
* <span class="badge" style="float: right; background-color: #257080;">EXPERIMENTAL API</span>
157154
* A [[FutureAction]] for actions that could trigger multiple Spark jobs. Examples include take,
158155
* takeSample. Cancellation works by setting the cancelled flag to true and interrupting the
159156
* action thread if it is being blocked by a job.

core/src/main/scala/org/apache/spark/SparkContext.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -666,7 +666,7 @@ class SparkContext(
666666
}
667667

668668
/**
669-
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
669+
* <span class="badge" style="float: right; background-color: #44751E;">DEVELOPER API</span>
670670
*
671671
* Register a listener to receive up-calls from events that happen during execution.
672672
*/
@@ -979,7 +979,7 @@ class SparkContext(
979979
}
980980

981981
/**
982-
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
982+
* <span class="badge" style="float: right; background-color: #44751E;">DEVELOPER API</span>
983983
*
984984
* Run a job that can return approximate results.
985985
*/
@@ -998,7 +998,7 @@ class SparkContext(
998998
}
999999

10001000
/**
1001-
* <span class="badge badge-red">EXPERIMENTAL API</span>
1001+
* <span class="badge" style="float: right; background-color: #257080;">EXPERIMENTAL API</span>
10021002
*
10031003
* Submit a job for execution and return a FutureJob holding the result.
10041004
*/

core/src/main/scala/org/apache/spark/TaskContext.scala

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -22,8 +22,7 @@ import scala.collection.mutable.ArrayBuffer
2222
import org.apache.spark.executor.TaskMetrics
2323

2424
/**
25-
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
26-
*
25+
* <span class="badge" style="float: right; background-color: #44751E;">DEVELOPER API</span>
2726
* Contextual information about a task which can be read or mutated during execution.
2827
*/
2928
class TaskContext(

core/src/main/scala/org/apache/spark/broadcast/BroadcastFactory.scala

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -21,8 +21,7 @@ import org.apache.spark.SecurityManager
2121
import org.apache.spark.SparkConf
2222

2323
/**
24-
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
25-
*
24+
* <span class="badge" style="float: right; background-color: #44751E;">DEVELOPER API</span>
2625
* An interface for all the broadcast implementations in Spark (to allow
2726
* multiple broadcast implementations). SparkContext uses a user-specified
2827
* BroadcastFactory implementation to instantiate a particular broadcast for the

core/src/main/scala/org/apache/spark/executor/TaskMetrics.scala

Lines changed: 3 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -20,8 +20,7 @@ package org.apache.spark.executor
2020
import org.apache.spark.storage.{BlockId, BlockStatus}
2121

2222
/**
23-
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
24-
*
23+
* <span class="badge" style="float: right; background-color: #44751E;">DEVELOPER API</span>
2524
* Metrics tracked during the execution of a task.
2625
*/
2726
class TaskMetrics extends Serializable {
@@ -88,8 +87,7 @@ object TaskMetrics {
8887

8988

9089
/**
91-
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
92-
*
90+
* <span class="badge" style="float: right; background-color: #44751E;">DEVELOPER API</span>
9391
* Metrics pertaining to shuffle data read in a given task.
9492
*/
9593
class ShuffleReadMetrics extends Serializable {
@@ -127,8 +125,7 @@ class ShuffleReadMetrics extends Serializable {
127125
}
128126

129127
/**
130-
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
131-
*
128+
* <span class="badge" style="float: right; background-color: #44751E;">DEVELOPER API</span>
132129
* Metrics pertaining to shuffle data written in a given task.
133130
*/
134131
class ShuffleWriteMetrics extends Serializable {

core/src/main/scala/org/apache/spark/io/CompressionCodec.scala

Lines changed: 3 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -25,8 +25,7 @@ import org.xerial.snappy.{SnappyInputStream, SnappyOutputStream}
2525
import org.apache.spark.SparkConf
2626

2727
/**
28-
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
29-
*
28+
* <span class="badge" style="float: right; background-color: #44751E;">DEVELOPER API</span>
3029
* CompressionCodec allows the customization of choosing different compression implementations
3130
* to be used in block storage.
3231
*
@@ -58,8 +57,7 @@ private[spark] object CompressionCodec {
5857

5958

6059
/**
61-
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
62-
*
60+
* <span class="badge" style="float: right; background-color: #44751E;">DEVELOPER API</span>
6361
* LZF implementation of [[org.apache.spark.io.CompressionCodec]].
6462
*
6563
* Note: The wire protocol for this codec is not guaranteed to be compatible across versions
@@ -77,8 +75,7 @@ class LZFCompressionCodec(conf: SparkConf) extends CompressionCodec {
7775

7876

7977
/**
80-
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
81-
*
78+
* <span class="badge" style="float: right; background-color: #44751E;">DEVELOPER API</span>
8279
* Snappy implementation of [[org.apache.spark.io.CompressionCodec]].
8380
* Block size can be configured by spark.io.compression.snappy.block.size.
8481
*

core/src/main/scala/org/apache/spark/package.scala

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -33,15 +33,15 @@ package org.apache
3333
* Java programmers should reference the [[spark.api.java]] package
3434
* for Spark programming APIs in Java.
3535
*
36-
* Classes and methods marked with <span class="badge badge-red">EXPERIMENTAL API</span> are
37-
* user-facing features which have not been officially adopted by the Spark project. These are
38-
* subject to change or removal in minor releases.
39-
*
40-
* Classes and methods marked with <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
41-
* are intended for advanced users want to extend Spark through lower level interfaces. These are
42-
* subject to changes or removal in minor releases.
36+
* Classes and methods marked with <span class="badge" style="background-color: #257080">
37+
* EXPERIMENTAL API</span> are user-facing features which have not been officially adopted by the
38+
* Spark project. These are subject to change or removal in minor releases.
4339
*
40+
* Classes and methods marked with <span class="badge" style="background-color: #44751E">
41+
* DEVELOPER API</span> are intended for advanced users want to extend Spark through lower
42+
* level interfaces. These are subject to changes or removal in minor releases.
4443
*/
44+
4545
package object spark {
4646
// For package docs only
47-
}
47+
}

core/src/main/scala/org/apache/spark/partial/BoundedDouble.scala

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -18,8 +18,7 @@
1818
package org.apache.spark.partial
1919

2020
/**
21-
* <span class="badge badge-red">EXPERIMENTAL API</span>
22-
*
21+
* <span class="badge" style="float: right; background-color: #257080;">EXPERIMENTAL API</span>
2322
* A Double value with error bars and associated confidence.
2423
*/
2524
class BoundedDouble(val mean: Double, val confidence: Double, val low: Double, val high: Double) {

0 commit comments

Comments
 (0)