@@ -28,7 +28,10 @@ import org.apache.spark.storage.StorageLevel
28
28
* A more compact class to represent a rating than Tuple3[Int, Int, Double].
29
29
*/
30
30
@ Since (" 0.8.0" )
31
- case class Rating (user : Int , product : Int , rating : Double )
31
+ case class Rating @ Since (" 0.8.0" ) (
32
+ @ Since (" 0.8.0" ) user : Int ,
33
+ @ Since (" 0.8.0" ) product : Int ,
34
+ @ Since (" 0.8.0" ) rating : Double )
32
35
33
36
/**
34
37
* Alternating Least Squares matrix factorization.
@@ -59,6 +62,7 @@ case class Rating(user: Int, product: Int, rating: Double)
59
62
* indicated user
60
63
* preferences rather than explicit ratings given to items.
61
64
*/
65
+ @ Since (" 0.8.0" )
62
66
class ALS private (
63
67
private var numUserBlocks : Int ,
64
68
private var numProductBlocks : Int ,
@@ -74,6 +78,7 @@ class ALS private (
74
78
* Constructs an ALS instance with default parameters: {numBlocks: -1, rank: 10, iterations: 10,
75
79
* lambda: 0.01, implicitPrefs: false, alpha: 1.0}.
76
80
*/
81
+ @ Since (" 0.8.0" )
77
82
def this () = this (- 1 , - 1 , 10 , 10 , 0.01 , false , 1.0 )
78
83
79
84
/** If true, do alternating nonnegative least squares. */
@@ -90,6 +95,7 @@ class ALS private (
90
95
* Set the number of blocks for both user blocks and product blocks to parallelize the computation
91
96
* into; pass -1 for an auto-configured number of blocks. Default: -1.
92
97
*/
98
+ @ Since (" 0.8.0" )
93
99
def setBlocks (numBlocks : Int ): this .type = {
94
100
this .numUserBlocks = numBlocks
95
101
this .numProductBlocks = numBlocks
@@ -99,6 +105,7 @@ class ALS private (
99
105
/**
100
106
* Set the number of user blocks to parallelize the computation.
101
107
*/
108
+ @ Since (" 1.1.0" )
102
109
def setUserBlocks (numUserBlocks : Int ): this .type = {
103
110
this .numUserBlocks = numUserBlocks
104
111
this
@@ -107,30 +114,35 @@ class ALS private (
107
114
/**
108
115
* Set the number of product blocks to parallelize the computation.
109
116
*/
117
+ @ Since (" 1.1.0" )
110
118
def setProductBlocks (numProductBlocks : Int ): this .type = {
111
119
this .numProductBlocks = numProductBlocks
112
120
this
113
121
}
114
122
115
123
/** Set the rank of the feature matrices computed (number of features). Default: 10. */
124
+ @ Since (" 0.8.0" )
116
125
def setRank (rank : Int ): this .type = {
117
126
this .rank = rank
118
127
this
119
128
}
120
129
121
130
/** Set the number of iterations to run. Default: 10. */
131
+ @ Since (" 0.8.0" )
122
132
def setIterations (iterations : Int ): this .type = {
123
133
this .iterations = iterations
124
134
this
125
135
}
126
136
127
137
/** Set the regularization parameter, lambda. Default: 0.01. */
138
+ @ Since (" 0.8.0" )
128
139
def setLambda (lambda : Double ): this .type = {
129
140
this .lambda = lambda
130
141
this
131
142
}
132
143
133
144
/** Sets whether to use implicit preference. Default: false. */
145
+ @ Since (" 0.8.1" )
134
146
def setImplicitPrefs (implicitPrefs : Boolean ): this .type = {
135
147
this .implicitPrefs = implicitPrefs
136
148
this
@@ -139,12 +151,14 @@ class ALS private (
139
151
/**
140
152
* Sets the constant used in computing confidence in implicit ALS. Default: 1.0.
141
153
*/
154
+ @ Since (" 0.8.1" )
142
155
def setAlpha (alpha : Double ): this .type = {
143
156
this .alpha = alpha
144
157
this
145
158
}
146
159
147
160
/** Sets a random seed to have deterministic results. */
161
+ @ Since (" 1.0.0" )
148
162
def setSeed (seed : Long ): this .type = {
149
163
this .seed = seed
150
164
this
@@ -154,6 +168,7 @@ class ALS private (
154
168
* Set whether the least-squares problems solved at each iteration should have
155
169
* nonnegativity constraints.
156
170
*/
171
+ @ Since (" 1.1.0" )
157
172
def setNonnegative (b : Boolean ): this .type = {
158
173
this .nonnegative = b
159
174
this
@@ -166,6 +181,7 @@ class ALS private (
166
181
* set `spark.rdd.compress` to `true` to reduce the space requirement, at the cost of speed.
167
182
*/
168
183
@ DeveloperApi
184
+ @ Since (" 1.1.0" )
169
185
def setIntermediateRDDStorageLevel (storageLevel : StorageLevel ): this .type = {
170
186
require(storageLevel != StorageLevel .NONE ,
171
187
" ALS is not designed to run without persisting intermediate RDDs." )
@@ -181,6 +197,7 @@ class ALS private (
181
197
* at the cost of speed.
182
198
*/
183
199
@ DeveloperApi
200
+ @ Since (" 1.3.0" )
184
201
def setFinalRDDStorageLevel (storageLevel : StorageLevel ): this .type = {
185
202
this .finalRDDStorageLevel = storageLevel
186
203
this
@@ -194,6 +211,7 @@ class ALS private (
194
211
* this setting is ignored.
195
212
*/
196
213
@ DeveloperApi
214
+ @ Since (" 1.4.0" )
197
215
def setCheckpointInterval (checkpointInterval : Int ): this .type = {
198
216
this .checkpointInterval = checkpointInterval
199
217
this
@@ -203,6 +221,7 @@ class ALS private (
203
221
* Run ALS with the configured parameters on an input RDD of (user, product, rating) triples.
204
222
* Returns a MatrixFactorizationModel with feature vectors for each user and product.
205
223
*/
224
+ @ Since (" 0.8.0" )
206
225
def run (ratings : RDD [Rating ]): MatrixFactorizationModel = {
207
226
val sc = ratings.context
208
227
@@ -250,6 +269,7 @@ class ALS private (
250
269
/**
251
270
* Java-friendly version of [[ALS.run ]].
252
271
*/
272
+ @ Since (" 1.3.0" )
253
273
def run (ratings : JavaRDD [Rating ]): MatrixFactorizationModel = run(ratings.rdd)
254
274
}
255
275
0 commit comments