File tree Expand file tree Collapse file tree 1 file changed +13
-9
lines changed
mllib/src/main/scala/org/apache/spark/ml/feature Expand file tree Collapse file tree 1 file changed +13
-9
lines changed Original file line number Diff line number Diff line change @@ -30,14 +30,18 @@ import org.apache.spark.sql.types.{StructField, StructType}
3030 * Params for [[StandardScaler ]] and [[StandardScalerModel ]].
3131 */
3232private [feature] trait StandardScalerParams extends Params with HasInputCol with HasOutputCol {
33- val withMean : BooleanParam = new BooleanParam (this ,
34- " withMean" ,
35- " Center data with mean before scaling"
36- )
37- val withStd : BooleanParam = new BooleanParam (this ,
38- " withStd" ,
39- " Scale to unit standard deviation"
40- )
33+
34+ /**
35+ * Whether to center the data before scaling
36+ * @group param
37+ */
38+ val withMean : BooleanParam = new BooleanParam (this , " withMean" , " Center data with mean before scaling" )
39+
40+ /**
41+ * Whether to scale the data to have unit standard deviation
42+ * @group param
43+ */
44+ val withStd : BooleanParam = new BooleanParam (this , " withStd" , " Scale to unit standard deviation" )
4145}
4246
4347
@@ -76,7 +80,7 @@ class StandardScaler extends Estimator[StandardScalerModel] with StandardScalerP
7680 }
7781
7882 override def transformSchema (schema : StructType , paramMap : ParamMap ): StructType = {
79- val map = this . paramMap ++ paramMap
83+ val map = extractParamMap( paramMap)
8084 val inputType = schema(map(inputCol)).dataType
8185 require(inputType.isInstanceOf [VectorUDT ],
8286 s " Input column ${map(inputCol)} must be a vector column " )
You can’t perform that action at this time.
0 commit comments