Skip to content

Commit 0e7087c

Browse files
committed
update since versions in mllib.recommendation
1 parent b37f0cc commit 0e7087c

File tree

2 files changed

+25
-5
lines changed

2 files changed

+25
-5
lines changed

mllib/src/main/scala/org/apache/spark/mllib/recommendation/ALS.scala

Lines changed: 21 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,10 @@ import org.apache.spark.storage.StorageLevel
2828
* A more compact class to represent a rating than Tuple3[Int, Int, Double].
2929
*/
3030
@Since("0.8.0")
31-
case class Rating(user: Int, product: Int, rating: Double)
31+
case class Rating @Since("0.8.0") (
32+
@Since("0.8.0") user: Int,
33+
@Since("0.8.0") product: Int,
34+
@Since("0.8.0") rating: Double)
3235

3336
/**
3437
* Alternating Least Squares matrix factorization.
@@ -59,6 +62,7 @@ case class Rating(user: Int, product: Int, rating: Double)
5962
* indicated user
6063
* preferences rather than explicit ratings given to items.
6164
*/
65+
@Since("0.8.0")
6266
class ALS private (
6367
private var numUserBlocks: Int,
6468
private var numProductBlocks: Int,
@@ -74,6 +78,7 @@ class ALS private (
7478
* Constructs an ALS instance with default parameters: {numBlocks: -1, rank: 10, iterations: 10,
7579
* lambda: 0.01, implicitPrefs: false, alpha: 1.0}.
7680
*/
81+
@Since("0.8.0")
7782
def this() = this(-1, -1, 10, 10, 0.01, false, 1.0)
7883

7984
/** If true, do alternating nonnegative least squares. */
@@ -90,6 +95,7 @@ class ALS private (
9095
* Set the number of blocks for both user blocks and product blocks to parallelize the computation
9196
* into; pass -1 for an auto-configured number of blocks. Default: -1.
9297
*/
98+
@Since("0.8.0")
9399
def setBlocks(numBlocks: Int): this.type = {
94100
this.numUserBlocks = numBlocks
95101
this.numProductBlocks = numBlocks
@@ -99,6 +105,7 @@ class ALS private (
99105
/**
100106
* Set the number of user blocks to parallelize the computation.
101107
*/
108+
@Since("1.1.0")
102109
def setUserBlocks(numUserBlocks: Int): this.type = {
103110
this.numUserBlocks = numUserBlocks
104111
this
@@ -107,30 +114,35 @@ class ALS private (
107114
/**
108115
* Set the number of product blocks to parallelize the computation.
109116
*/
117+
@Since("1.1.0")
110118
def setProductBlocks(numProductBlocks: Int): this.type = {
111119
this.numProductBlocks = numProductBlocks
112120
this
113121
}
114122

115123
/** Set the rank of the feature matrices computed (number of features). Default: 10. */
124+
@Since("0.8.0")
116125
def setRank(rank: Int): this.type = {
117126
this.rank = rank
118127
this
119128
}
120129

121130
/** Set the number of iterations to run. Default: 10. */
131+
@Since("0.8.0")
122132
def setIterations(iterations: Int): this.type = {
123133
this.iterations = iterations
124134
this
125135
}
126136

127137
/** Set the regularization parameter, lambda. Default: 0.01. */
138+
@Since("0.8.0")
128139
def setLambda(lambda: Double): this.type = {
129140
this.lambda = lambda
130141
this
131142
}
132143

133144
/** Sets whether to use implicit preference. Default: false. */
145+
@Since("0.8.1")
134146
def setImplicitPrefs(implicitPrefs: Boolean): this.type = {
135147
this.implicitPrefs = implicitPrefs
136148
this
@@ -139,12 +151,14 @@ class ALS private (
139151
/**
140152
* Sets the constant used in computing confidence in implicit ALS. Default: 1.0.
141153
*/
154+
@Since("0.8.1")
142155
def setAlpha(alpha: Double): this.type = {
143156
this.alpha = alpha
144157
this
145158
}
146159

147160
/** Sets a random seed to have deterministic results. */
161+
@Since("1.0.0")
148162
def setSeed(seed: Long): this.type = {
149163
this.seed = seed
150164
this
@@ -154,6 +168,7 @@ class ALS private (
154168
* Set whether the least-squares problems solved at each iteration should have
155169
* nonnegativity constraints.
156170
*/
171+
@Since("1.1.0")
157172
def setNonnegative(b: Boolean): this.type = {
158173
this.nonnegative = b
159174
this
@@ -166,6 +181,7 @@ class ALS private (
166181
* set `spark.rdd.compress` to `true` to reduce the space requirement, at the cost of speed.
167182
*/
168183
@DeveloperApi
184+
@Since("1.1.0")
169185
def setIntermediateRDDStorageLevel(storageLevel: StorageLevel): this.type = {
170186
require(storageLevel != StorageLevel.NONE,
171187
"ALS is not designed to run without persisting intermediate RDDs.")
@@ -181,6 +197,7 @@ class ALS private (
181197
* at the cost of speed.
182198
*/
183199
@DeveloperApi
200+
@Since("1.3.0")
184201
def setFinalRDDStorageLevel(storageLevel: StorageLevel): this.type = {
185202
this.finalRDDStorageLevel = storageLevel
186203
this
@@ -194,6 +211,7 @@ class ALS private (
194211
* this setting is ignored.
195212
*/
196213
@DeveloperApi
214+
@Since("1.4.0")
197215
def setCheckpointInterval(checkpointInterval: Int): this.type = {
198216
this.checkpointInterval = checkpointInterval
199217
this
@@ -203,6 +221,7 @@ class ALS private (
203221
* Run ALS with the configured parameters on an input RDD of (user, product, rating) triples.
204222
* Returns a MatrixFactorizationModel with feature vectors for each user and product.
205223
*/
224+
@Since("0.8.0")
206225
def run(ratings: RDD[Rating]): MatrixFactorizationModel = {
207226
val sc = ratings.context
208227

@@ -250,6 +269,7 @@ class ALS private (
250269
/**
251270
* Java-friendly version of [[ALS.run]].
252271
*/
272+
@Since("1.3.0")
253273
def run(ratings: JavaRDD[Rating]): MatrixFactorizationModel = run(ratings.rdd)
254274
}
255275

mllib/src/main/scala/org/apache/spark/mllib/recommendation/MatrixFactorizationModel.scala

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -52,10 +52,10 @@ import org.apache.spark.storage.StorageLevel
5252
* and the features computed for this product.
5353
*/
5454
@Since("0.8.0")
55-
class MatrixFactorizationModel(
56-
val rank: Int,
57-
val userFeatures: RDD[(Int, Array[Double])],
58-
val productFeatures: RDD[(Int, Array[Double])])
55+
class MatrixFactorizationModel @Since("0.8.0") (
56+
@Since("0.8.0") val rank: Int,
57+
@Since("0.8.0") val userFeatures: RDD[(Int, Array[Double])],
58+
@Since("0.8.0") val productFeatures: RDD[(Int, Array[Double])])
5959
extends Saveable with Serializable with Logging {
6060

6161
require(rank > 0)

0 commit comments

Comments
 (0)