Skip to content

Commit 8773d0d

Browse files
committed
add DeveloperApi annotation
1 parent da31733 commit 8773d0d

19 files changed

+64
-16
lines changed

mllib/src/main/scala/org/apache/spark/mllib/api/python/PythonMLLibAPI.scala

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@ package org.apache.spark.mllib.api.python
1919

2020
import java.nio.{ByteBuffer, ByteOrder}
2121

22+
import org.apache.spark.annotation.DeveloperApi
2223
import org.apache.spark.api.java.JavaRDD
2324
import org.apache.spark.mllib.classification._
2425
import org.apache.spark.mllib.clustering._
@@ -32,6 +33,7 @@ import org.apache.spark.rdd.RDD
3233
*
3334
* The Java stubs necessary for the Python mllib bindings.
3435
*/
36+
@DeveloperApi
3537
class PythonMLLibAPI extends Serializable {
3638
private def deserializeDoubleVector(bytes: Array[Byte]): Array[Double] = {
3739
val packetLength = bytes.length

mllib/src/main/scala/org/apache/spark/mllib/optimization/Gradient.scala

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,13 +19,15 @@ package org.apache.spark.mllib.optimization
1919

2020
import breeze.linalg.{axpy => brzAxpy}
2121

22+
import org.apache.spark.annotation.DeveloperApi
2223
import org.apache.spark.mllib.linalg.{Vectors, Vector}
2324

2425
/**
2526
* :: DeveloperApi ::
2627
*
2728
* Class used to compute the gradient for a loss function, given a single data point.
2829
*/
30+
@DeveloperApi
2931
abstract class Gradient extends Serializable {
3032
/**
3133
* Compute the gradient and loss given the features of a single data point.
@@ -58,6 +60,7 @@ abstract class Gradient extends Serializable {
5860
* Compute gradient and loss for a logistic loss function, as used in binary classification.
5961
* See also the documentation for the precise formulation.
6062
*/
63+
@DeveloperApi
6164
class LogisticGradient extends Gradient {
6265
override def compute(data: Vector, label: Double, weights: Vector): (Vector, Double) = {
6366
val brzData = data.toBreeze
@@ -103,6 +106,7 @@ class LogisticGradient extends Gradient {
103106
* L = 1/n ||A weights-y||^2
104107
* See also the documentation for the precise formulation.
105108
*/
109+
@DeveloperApi
106110
class LeastSquaresGradient extends Gradient {
107111
override def compute(data: Vector, label: Double, weights: Vector): (Vector, Double) = {
108112
val brzData = data.toBreeze
@@ -136,6 +140,7 @@ class LeastSquaresGradient extends Gradient {
136140
* See also the documentation for the precise formulation.
137141
* NOTE: This assumes that the labels are {0,1}
138142
*/
143+
@DeveloperApi
139144
class HingeGradient extends Gradient {
140145
override def compute(data: Vector, label: Double, weights: Vector): (Vector, Double) = {
141146
val brzData = data.toBreeze

mllib/src/main/scala/org/apache/spark/mllib/optimization/GradientDescent.scala

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,7 @@ import scala.collection.mutable.ArrayBuffer
2121

2222
import breeze.linalg.{DenseVector => BDV}
2323

24+
import org.apache.spark.annotation.DeveloperApi
2425
import org.apache.spark.Logging
2526
import org.apache.spark.rdd.RDD
2627
import org.apache.spark.mllib.linalg.{Vectors, Vector}
@@ -32,6 +33,7 @@ import org.apache.spark.mllib.linalg.{Vectors, Vector}
3233
* @param gradient Gradient function to be used.
3334
* @param updater Updater to be used to update weights after every iteration.
3435
*/
36+
@DeveloperApi
3537
class GradientDescent(private var gradient: Gradient, private var updater: Updater)
3638
extends Optimizer with Logging
3739
{
@@ -114,6 +116,7 @@ class GradientDescent(private var gradient: Gradient, private var updater: Updat
114116
*
115117
* Top-level method to run gradient descent.
116118
*/
119+
@DeveloperApi
117120
object GradientDescent extends Logging {
118121
/**
119122
* Run stochastic gradient descent (SGD) in parallel using mini batches.

mllib/src/main/scala/org/apache/spark/mllib/optimization/Optimizer.scala

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,13 +19,15 @@ package org.apache.spark.mllib.optimization
1919

2020
import org.apache.spark.rdd.RDD
2121

22+
import org.apache.spark.annotation.DeveloperApi
2223
import org.apache.spark.mllib.linalg.Vector
2324

2425
/**
2526
* :: DeveloperApi ::
2627
*
2728
* Trait for optimization problem solvers.
2829
*/
30+
@DeveloperApi
2931
trait Optimizer extends Serializable {
3032

3133
/**

mllib/src/main/scala/org/apache/spark/mllib/optimization/Updater.scala

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,7 @@ import scala.math._
2121

2222
import breeze.linalg.{norm => brzNorm, axpy => brzAxpy, Vector => BV}
2323

24+
import org.apache.spark.annotation.DeveloperApi
2425
import org.apache.spark.mllib.linalg.{Vectors, Vector}
2526

2627
/**
@@ -37,6 +38,7 @@ import org.apache.spark.mllib.linalg.{Vectors, Vector}
3738
* The updater is responsible to also perform the update coming from the
3839
* regularization term R(w) (if any regularization is used).
3940
*/
41+
@DeveloperApi
4042
abstract class Updater extends Serializable {
4143
/**
4244
* Compute an updated value for weights given the gradient, stepSize, iteration number and
@@ -66,6 +68,7 @@ abstract class Updater extends Serializable {
6668
* A simple updater for gradient descent *without* any regularization.
6769
* Uses a step-size decreasing with the square root of the number of iterations.
6870
*/
71+
@DeveloperApi
6972
class SimpleUpdater extends Updater {
7073
override def compute(
7174
weightsOld: Vector,
@@ -101,6 +104,7 @@ class SimpleUpdater extends Updater {
101104
*
102105
* Equivalently, set weight component to signum(w) * max(0.0, abs(w) - shrinkageVal)
103106
*/
107+
@DeveloperApi
104108
class L1Updater extends Updater {
105109
override def compute(
106110
weightsOld: Vector,
@@ -132,6 +136,7 @@ class L1Updater extends Updater {
132136
* R(w) = 1/2 ||w||^2
133137
* Uses a step-size decreasing with the square root of the number of iterations.
134138
*/
139+
@DeveloperApi
135140
class SquaredL2Updater extends Updater {
136141
override def compute(
137142
weightsOld: Vector,

mllib/src/main/scala/org/apache/spark/mllib/recommendation/MatrixFactorizationModel.scala

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -17,13 +17,14 @@
1717

1818
package org.apache.spark.mllib.recommendation
1919

20+
import org.jblas._
21+
22+
import org.apache.spark.annotation.DeveloperApi
23+
import org.apache.spark.api.java.JavaRDD
2024
import org.apache.spark.rdd.RDD
2125
import org.apache.spark.SparkContext._
2226
import org.apache.spark.mllib.api.python.PythonMLLibAPI
2327

24-
import org.jblas._
25-
import org.apache.spark.api.java.JavaRDD
26-
2728

2829
/**
2930
* Model representing the result of matrix factorization.

mllib/src/main/scala/org/apache/spark/mllib/tree/impurity/Entropy.scala

Lines changed: 14 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,8 @@
1717

1818
package org.apache.spark.mllib.tree.impurity
1919

20+
import org.apache.spark.annotation.DeveloperApi
21+
2022
/**
2123
* :: Experimental ::
2224
*
@@ -25,7 +27,7 @@ package org.apache.spark.mllib.tree.impurity
2527
*/
2628
object Entropy extends Impurity {
2729

28-
private[tree] def log2(x: Double) = scala.math.log(x) / scala.math.log(2)
30+
private[tree] def log2(x: Double) = scala.math.log(x) / scala.math.log(2)
2931

3032
/**
3133
* :: DeveloperApi ::
@@ -35,16 +37,17 @@ object Entropy extends Impurity {
3537
* @param c1 count of instances with label 1
3638
* @return entropy value
3739
*/
38-
override def calculate(c0: Double, c1: Double): Double = {
39-
if (c0 == 0 || c1 == 0) {
40-
0
41-
} else {
42-
val total = c0 + c1
43-
val f0 = c0 / total
44-
val f1 = c1 / total
45-
-(f0 * log2(f0)) - (f1 * log2(f1))
46-
}
47-
}
40+
@DeveloperApi
41+
override def calculate(c0: Double, c1: Double): Double = {
42+
if (c0 == 0 || c1 == 0) {
43+
0
44+
} else {
45+
val total = c0 + c1
46+
val f0 = c0 / total
47+
val f1 = c1 / total
48+
-(f0 * log2(f0)) - (f1 * log2(f1))
49+
}
50+
}
4851

4952
override def calculate(count: Double, sum: Double, sumSquares: Double): Double =
5053
throw new UnsupportedOperationException("Entropy.calculate")

mllib/src/main/scala/org/apache/spark/mllib/tree/impurity/Gini.scala

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,8 @@
1717

1818
package org.apache.spark.mllib.tree.impurity
1919

20+
import org.apache.spark.annotation.DeveloperApi
21+
2022
/**
2123
* :: Experimental ::
2224
*
@@ -34,6 +36,7 @@ object Gini extends Impurity {
3436
* @param c1 count of instances with label 1
3537
* @return Gini coefficient value
3638
*/
39+
@DeveloperApi
3740
override def calculate(c0: Double, c1: Double): Double = {
3841
if (c0 == 0 || c1 == 0) {
3942
0

mllib/src/main/scala/org/apache/spark/mllib/tree/impurity/Impurity.scala

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,8 @@
1717

1818
package org.apache.spark.mllib.tree.impurity
1919

20+
import org.apache.spark.annotation.DeveloperApi
21+
2022
/**
2123
* :: Experimental ::
2224
*
@@ -32,6 +34,7 @@ trait Impurity extends Serializable {
3234
* @param c1 count of instances with label 1
3335
* @return information value
3436
*/
37+
@DeveloperApi
3538
def calculate(c0 : Double, c1 : Double): Double
3639

3740
/**
@@ -43,6 +46,6 @@ trait Impurity extends Serializable {
4346
* @param sumSquares summation of squares of the labels
4447
* @return information value
4548
*/
49+
@DeveloperApi
4650
def calculate(count: Double, sum: Double, sumSquares: Double): Double
47-
4851
}

mllib/src/main/scala/org/apache/spark/mllib/tree/impurity/Variance.scala

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,8 @@
1717

1818
package org.apache.spark.mllib.tree.impurity
1919

20+
import org.apache.spark.annotation.DeveloperApi
21+
2022
/**
2123
* :: Experimental ::
2224
*
@@ -34,6 +36,7 @@ object Variance extends Impurity {
3436
* @param sum sum of labels
3537
* @param sumSquares summation of squares of the labels
3638
*/
39+
@DeveloperApi
3740
override def calculate(count: Double, sum: Double, sumSquares: Double): Double = {
3841
val squaredLoss = sumSquares - (sum * sum) / count
3942
squaredLoss / count

0 commit comments

Comments
 (0)