@@ -50,23 +50,17 @@ class StandardScalerSuite extends FunSuite with LocalSparkContext {
50
50
val standardizer2 = new StandardScaler ()
51
51
val standardizer3 = new StandardScaler (withMean = true , withStd = false )
52
52
53
- withClue(" Using a standardizer before fitting the model should throw exception." ) {
54
- intercept[IllegalStateException ] {
55
- data.map(standardizer1.transform)
56
- }
57
- }
58
-
59
- standardizer1.fit(dataRDD)
60
- standardizer2.fit(dataRDD)
61
- standardizer3.fit(dataRDD)
53
+ val model1 = standardizer1.fit(dataRDD)
54
+ val model2 = standardizer2.fit(dataRDD)
55
+ val model3 = standardizer3.fit(dataRDD)
62
56
63
- val data1 = data.map(standardizer1 .transform)
64
- val data2 = data.map(standardizer2 .transform)
65
- val data3 = data.map(standardizer3 .transform)
57
+ val data1 = data.map(model1 .transform)
58
+ val data2 = data.map(model2 .transform)
59
+ val data3 = data.map(model3 .transform)
66
60
67
- val data1RDD = standardizer1 .transform(dataRDD)
68
- val data2RDD = standardizer2 .transform(dataRDD)
69
- val data3RDD = standardizer3 .transform(dataRDD)
61
+ val data1RDD = model1 .transform(dataRDD)
62
+ val data2RDD = model2 .transform(dataRDD)
63
+ val data3RDD = model3 .transform(dataRDD)
70
64
71
65
val summary = computeSummary(dataRDD)
72
66
val summary1 = computeSummary(data1RDD)
@@ -129,25 +123,25 @@ class StandardScalerSuite extends FunSuite with LocalSparkContext {
129
123
val standardizer2 = new StandardScaler ()
130
124
val standardizer3 = new StandardScaler (withMean = true , withStd = false )
131
125
132
- standardizer1.fit(dataRDD)
133
- standardizer2.fit(dataRDD)
134
- standardizer3.fit(dataRDD)
126
+ val model1 = standardizer1.fit(dataRDD)
127
+ val model2 = standardizer2.fit(dataRDD)
128
+ val model3 = standardizer3.fit(dataRDD)
135
129
136
- val data2 = data.map(standardizer2 .transform)
130
+ val data2 = data.map(model2 .transform)
137
131
138
132
withClue(" Standardization with mean can not be applied on sparse input." ) {
139
133
intercept[IllegalArgumentException ] {
140
- data.map(standardizer1 .transform)
134
+ data.map(model1 .transform)
141
135
}
142
136
}
143
137
144
138
withClue(" Standardization with mean can not be applied on sparse input." ) {
145
139
intercept[IllegalArgumentException ] {
146
- data.map(standardizer3 .transform)
140
+ data.map(model3 .transform)
147
141
}
148
142
}
149
143
150
- val data2RDD = standardizer2 .transform(dataRDD)
144
+ val data2RDD = model2 .transform(dataRDD)
151
145
152
146
val summary2 = computeSummary(data2RDD)
153
147
@@ -181,13 +175,13 @@ class StandardScalerSuite extends FunSuite with LocalSparkContext {
181
175
val standardizer2 = new StandardScaler (withMean = true , withStd = false )
182
176
val standardizer3 = new StandardScaler (withMean = false , withStd = true )
183
177
184
- standardizer1.fit(dataRDD)
185
- standardizer2.fit(dataRDD)
186
- standardizer3.fit(dataRDD)
178
+ val model1 = standardizer1.fit(dataRDD)
179
+ val model2 = standardizer2.fit(dataRDD)
180
+ val model3 = standardizer3.fit(dataRDD)
187
181
188
- val data1 = data.map(standardizer1 .transform)
189
- val data2 = data.map(standardizer2 .transform)
190
- val data3 = data.map(standardizer3 .transform)
182
+ val data1 = data.map(model1 .transform)
183
+ val data2 = data.map(model2 .transform)
184
+ val data3 = data.map(model3 .transform)
191
185
192
186
assert(data1.forall(_.toArray.forall(_ == 0.0 )),
193
187
" The variance is zero, so the transformed result should be 0.0" )
0 commit comments