Skip to content

Commit 321d775

Browse files
mengxrDB Tsai
authored andcommitted
[SPARK-10236] [MLLIB] update since versions in mllib.feature
Same as #8421 but for `mllib.feature`. cc dbtsai Author: Xiangrui Meng <meng@databricks.com> Closes #8449 from mengxr/SPARK-10236.feature and squashes the following commits: 0e8d658 [Xiangrui Meng] remove unnecessary comment ad70b03 [Xiangrui Meng] update since versions in mllib.feature
1 parent 4657fa1 commit 321d775

File tree

8 files changed

+21
-16
lines changed

8 files changed

+21
-16
lines changed

mllib/src/main/scala/org/apache/spark/mllib/clustering/PowerIterationClustering.scala

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -71,8 +71,6 @@ object PowerIterationClusteringModel extends Loader[PowerIterationClusteringMode
7171
private[clustering]
7272
val thisClassName = "org.apache.spark.mllib.clustering.PowerIterationClusteringModel"
7373

74-
/**
75-
*/
7674
@Since("1.4.0")
7775
def save(sc: SparkContext, model: PowerIterationClusteringModel, path: String): Unit = {
7876
val sqlContext = new SQLContext(sc)

mllib/src/main/scala/org/apache/spark/mllib/feature/ChiSqSelector.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@ import org.apache.spark.rdd.RDD
3333
*/
3434
@Since("1.3.0")
3535
@Experimental
36-
class ChiSqSelectorModel (
36+
class ChiSqSelectorModel @Since("1.3.0") (
3737
@Since("1.3.0") val selectedFeatures: Array[Int]) extends VectorTransformer {
3838

3939
require(isSorted(selectedFeatures), "Array has to be sorted asc")
@@ -112,7 +112,7 @@ class ChiSqSelectorModel (
112112
*/
113113
@Since("1.3.0")
114114
@Experimental
115-
class ChiSqSelector (
115+
class ChiSqSelector @Since("1.3.0") (
116116
@Since("1.3.0") val numTopFeatures: Int) extends Serializable {
117117

118118
/**

mllib/src/main/scala/org/apache/spark/mllib/feature/ElementwiseProduct.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,8 @@ import org.apache.spark.mllib.linalg._
2929
*/
3030
@Since("1.4.0")
3131
@Experimental
32-
class ElementwiseProduct(val scalingVec: Vector) extends VectorTransformer {
32+
class ElementwiseProduct @Since("1.4.0") (
33+
@Since("1.4.0") val scalingVec: Vector) extends VectorTransformer {
3334

3435
/**
3536
* Does the hadamard product transformation.

mllib/src/main/scala/org/apache/spark/mllib/feature/IDF.scala

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -39,8 +39,9 @@ import org.apache.spark.rdd.RDD
3939
*/
4040
@Since("1.1.0")
4141
@Experimental
42-
class IDF(val minDocFreq: Int) {
42+
class IDF @Since("1.2.0") (@Since("1.2.0") val minDocFreq: Int) {
4343

44+
@Since("1.1.0")
4445
def this() = this(0)
4546

4647
// TODO: Allow different IDF formulations.
@@ -162,7 +163,8 @@ private object IDF {
162163
* Represents an IDF model that can transform term frequency vectors.
163164
*/
164165
@Experimental
165-
class IDFModel private[spark] (val idf: Vector) extends Serializable {
166+
@Since("1.1.0")
167+
class IDFModel private[spark] (@Since("1.1.0") val idf: Vector) extends Serializable {
166168

167169
/**
168170
* Transforms term frequency (TF) vectors to TF-IDF vectors.

mllib/src/main/scala/org/apache/spark/mllib/feature/Normalizer.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@ import org.apache.spark.mllib.linalg.{DenseVector, SparseVector, Vector, Vectors
3333
*/
3434
@Since("1.1.0")
3535
@Experimental
36-
class Normalizer(p: Double) extends VectorTransformer {
36+
class Normalizer @Since("1.1.0") (p: Double) extends VectorTransformer {
3737

3838
@Since("1.1.0")
3939
def this() = this(2)

mllib/src/main/scala/org/apache/spark/mllib/feature/PCA.scala

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ import org.apache.spark.rdd.RDD
2929
* @param k number of principal components
3030
*/
3131
@Since("1.4.0")
32-
class PCA(val k: Int) {
32+
class PCA @Since("1.4.0") (@Since("1.4.0") val k: Int) {
3333
require(k >= 1, s"PCA requires a number of principal components k >= 1 but was given $k")
3434

3535
/**
@@ -74,7 +74,10 @@ class PCA(val k: Int) {
7474
* @param k number of principal components.
7575
* @param pc a principal components Matrix. Each column is one principal component.
7676
*/
77-
class PCAModel private[spark] (val k: Int, val pc: DenseMatrix) extends VectorTransformer {
77+
@Since("1.4.0")
78+
class PCAModel private[spark] (
79+
@Since("1.4.0") val k: Int,
80+
@Since("1.4.0") val pc: DenseMatrix) extends VectorTransformer {
7881
/**
7982
* Transform a vector by computed Principal Components.
8083
*

mllib/src/main/scala/org/apache/spark/mllib/feature/StandardScaler.scala

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,7 @@ import org.apache.spark.rdd.RDD
3434
*/
3535
@Since("1.1.0")
3636
@Experimental
37-
class StandardScaler(withMean: Boolean, withStd: Boolean) extends Logging {
37+
class StandardScaler @Since("1.1.0") (withMean: Boolean, withStd: Boolean) extends Logging {
3838

3939
@Since("1.1.0")
4040
def this() = this(false, true)
@@ -74,11 +74,11 @@ class StandardScaler(withMean: Boolean, withStd: Boolean) extends Logging {
7474
*/
7575
@Since("1.1.0")
7676
@Experimental
77-
class StandardScalerModel (
78-
val std: Vector,
79-
val mean: Vector,
80-
var withStd: Boolean,
81-
var withMean: Boolean) extends VectorTransformer {
77+
class StandardScalerModel @Since("1.3.0") (
78+
@Since("1.3.0") val std: Vector,
79+
@Since("1.1.0") val mean: Vector,
80+
@Since("1.3.0") var withStd: Boolean,
81+
@Since("1.3.0") var withMean: Boolean) extends VectorTransformer {
8282

8383
/**
8484
*/

mllib/src/main/scala/org/apache/spark/mllib/feature/Word2Vec.scala

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -436,6 +436,7 @@ class Word2Vec extends Serializable with Logging {
436436
* (i * vectorSize, i * vectorSize + vectorSize)
437437
*/
438438
@Experimental
439+
@Since("1.1.0")
439440
class Word2VecModel private[mllib] (
440441
private val wordIndex: Map[String, Int],
441442
private val wordVectors: Array[Float]) extends Serializable with Saveable {

0 commit comments

Comments
 (0)