@@ -66,7 +66,7 @@ object MonotonicityConstraint {
66
66
* @param monotonicityConstraint specifies if the sequence is increasing or decreasing
67
67
*/
68
68
class IsotonicRegressionModel (
69
- val predictions : Seq [WeightedLabeledPoint ],
69
+ val predictions : Seq [( Double , Double , Double ) ],
70
70
val monotonicityConstraint : MonotonicityConstraint )
71
71
extends RegressionModel {
72
72
@@ -76,7 +76,7 @@ class IsotonicRegressionModel(
76
76
override def predict (testData : Vector ): Double = {
77
77
// Take the highest of data points smaller than our feature or data point with lowest feature
78
78
(predictions.head +:
79
- predictions.filter(y => y.features.toArray.head <= testData.toArray.head)).last.label
79
+ predictions.filter(y => y._2 <= testData.toArray.head)).last._1
80
80
}
81
81
}
82
82
@@ -95,7 +95,7 @@ trait IsotonicRegressionAlgorithm
95
95
* @return isotonic regression model
96
96
*/
97
97
protected def createModel (
98
- predictions : Seq [WeightedLabeledPoint ],
98
+ predictions : Seq [( Double , Double , Double ) ],
99
99
monotonicityConstraint : MonotonicityConstraint ): IsotonicRegressionModel
100
100
101
101
/**
@@ -106,7 +106,7 @@ trait IsotonicRegressionAlgorithm
106
106
* @return isotonic regression model
107
107
*/
108
108
def run (
109
- input : RDD [WeightedLabeledPoint ],
109
+ input : RDD [( Double , Double , Double ) ],
110
110
monotonicityConstraint : MonotonicityConstraint ): IsotonicRegressionModel
111
111
}
112
112
@@ -117,15 +117,15 @@ class PoolAdjacentViolators private [mllib]
117
117
extends IsotonicRegressionAlgorithm {
118
118
119
119
override def run (
120
- input : RDD [WeightedLabeledPoint ],
120
+ input : RDD [( Double , Double , Double ) ],
121
121
monotonicityConstraint : MonotonicityConstraint ): IsotonicRegressionModel = {
122
122
createModel(
123
123
parallelPoolAdjacentViolators(input, monotonicityConstraint),
124
124
monotonicityConstraint)
125
125
}
126
126
127
127
override protected def createModel (
128
- predictions : Seq [WeightedLabeledPoint ],
128
+ predictions : Seq [( Double , Double , Double ) ],
129
129
monotonicityConstraint : MonotonicityConstraint ): IsotonicRegressionModel = {
130
130
new IsotonicRegressionModel (predictions, monotonicityConstraint)
131
131
}
@@ -194,12 +194,12 @@ class PoolAdjacentViolators private [mllib]
194
194
* @return result
195
195
*/
196
196
private def parallelPoolAdjacentViolators (
197
- testData : RDD [WeightedLabeledPoint ],
198
- monotonicityConstraint : MonotonicityConstraint ): Seq [WeightedLabeledPoint ] = {
197
+ testData : RDD [( Double , Double , Double ) ],
198
+ monotonicityConstraint : MonotonicityConstraint ): Seq [( Double , Double , Double ) ] = {
199
199
200
200
poolAdjacentViolators(
201
201
testData
202
- .sortBy(_.features.toArray.head )
202
+ .sortBy(_._2 )
203
203
.cache()
204
204
.mapPartitions(it => poolAdjacentViolators(it.toArray, monotonicityConstraint).toIterator)
205
205
.collect(), monotonicityConstraint)
@@ -224,7 +224,7 @@ object IsotonicRegression {
224
224
* @param monotonicityConstraint Isotonic (increasing) or Antitonic (decreasing) sequence
225
225
*/
226
226
def train (
227
- input : RDD [WeightedLabeledPoint ],
227
+ input : RDD [( Double , Double , Double ) ],
228
228
monotonicityConstraint : MonotonicityConstraint = Isotonic ): IsotonicRegressionModel = {
229
229
new PoolAdjacentViolators ().run(input, monotonicityConstraint)
230
230
}
0 commit comments