Skip to content

Commit be9fae2

Browse files
committed
Remove Seq2D type. Use Seq instead.
1 parent d40fc12 commit be9fae2

File tree

9 files changed

+40
-138
lines changed

9 files changed

+40
-138
lines changed

array2D/src/main/scala/com/thoughtworks/deeplearning/array2D/layers/ToSeq.scala

Lines changed: 13 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,6 @@ import com.thoughtworks.deeplearning.Batch._
77
import com.thoughtworks.deeplearning.Layer._
88
import com.thoughtworks.deeplearning.array2D.utilities._
99
import com.thoughtworks.deeplearning.{Batch, BatchId, BufferedLayer, Layer}
10-
import com.thoughtworks.deeplearning.seq2D.utilities.{Seq2D, Seq2DBatch}
1110
import org.nd4j.linalg.api.ndarray.INDArray
1211
import org.nd4j.linalg.factory.Nd4j
1312
import org.nd4j.linalg.ops.transforms.Transforms
@@ -19,8 +18,10 @@ final case class ToSeq[Input0 <: Batch](operand: Layer.Aux[Input0, Array2D#Batch
1918
override type Input = Input0
2019

2120
final class BufferedBatch private[ToSeq] (override val input: BatchId.Aux[Input], upstream: Array2D#Batch)
22-
extends ReferenceCount
23-
with Seq2DBatch {
21+
extends ReferenceCount {
22+
23+
override type Data = scala.Seq[scala.Seq[Eval[scala.Double]]]
24+
override type Delta = (scala.Int, (scala.Int, Eval[scala.Double]))
2425

2526
private def zeroDelta =
2627
upstream.value.map { upstreamData =>
@@ -44,19 +45,20 @@ final case class ToSeq[Input0 <: Batch](operand: Layer.Aux[Input0, Array2D#Batch
4445

4546
override def backward(delta: Delta): Unit = {
4647
synchronized {
47-
val (i, j, value) = delta.value
48-
upstreamDelta.value(i, j) = upstreamDelta
49-
.value(i, j) + value // Cannot use += because of https://issues.scala-lang.org/browse/SI-10021
48+
val (i, (j, value)) = delta
49+
// Cannot use += because of https://issues.scala-lang.org/browse/SI-10021
50+
upstreamDelta.value(i, j) = upstreamDelta.value(i, j) + value.value
5051
}
5152
}
5253

5354
override val value: Data = {
54-
upstream.value.map { ndarray: INDArray =>
55-
val doubleArray = ndarray.data.asDouble()
56-
for (i <- (0 until ndarray.rows).view) yield {
57-
doubleArray.view(i * ndarray.columns, (i + 1) * ndarray.columns)
55+
val ndarray = upstream.value.value
56+
val doubleArray = ndarray.data.asDouble()
57+
for (i <- (0 until ndarray.rows).view) yield {
58+
doubleArray.view(i * ndarray.columns, (i + 1) * ndarray.columns).map { doubleValue =>
59+
Eval.now(doubleValue)
5860
}
59-
}.memoize
61+
}
6062
}
6163
}
6264

array2D/src/main/scala/com/thoughtworks/deeplearning/array2D/package.scala

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,6 @@ import com.thoughtworks.deeplearning.dsl._
55
import com.thoughtworks.deeplearning.array2D.layers._
66
import com.thoughtworks.deeplearning.array2D.optimizers.Optimizer
77
import com.thoughtworks.deeplearning.double.utilities.Double
8-
import com.thoughtworks.deeplearning.seq2D.utilities.Seq2D
98
import org.nd4j.linalg.api.ndarray.INDArray
109

1110
import scala.language.implicitConversions
@@ -16,7 +15,7 @@ import scala.language.implicitConversions
1615
package object array2D {
1716

1817
/** @template */
19-
type Array2D = utilities.Array2D
18+
type Array2D = com.thoughtworks.deeplearning.array2D.utilities.Array2D
2019

2120
implicit def `max(Array2D,Double)`[Left, Right, Input <: Batch]
2221
: max.Case.Aux[Layer.Aux[Input, Array2D#Batch], Layer.Aux[Input, Double#Batch], Layer.Aux[Input, Array2D#Batch]] =
@@ -133,7 +132,9 @@ package object array2D {
133132
Negative(differentiable)
134133
}
135134

136-
def toSeq: Layer.Aux[Input, Seq2D#Batch] = {
135+
def toSeq: Layer.Aux[
136+
Input,
137+
Batch.Aux[scala.Seq[scala.Seq[Eval[scala.Double]]], (scala.Int, (scala.Int, Eval[scala.Double]))]] = {
137138
ToSeq(differentiable)
138139
}
139140

array2D/src/main/scala/com/thoughtworks/deeplearning/seq2D/layers/Get.scala

Lines changed: 0 additions & 39 deletions
This file was deleted.

array2D/src/main/scala/com/thoughtworks/deeplearning/seq2D/package.scala

Lines changed: 0 additions & 27 deletions
This file was deleted.

array2D/src/main/scala/com/thoughtworks/deeplearning/seq2D/utilities/Seq2DBatch.scala

Lines changed: 0 additions & 19 deletions
This file was deleted.

array2D/src/main/scala/com/thoughtworks/deeplearning/seq2D/utilities/package.scala

Lines changed: 0 additions & 18 deletions
This file was deleted.

double/src/main/scala/com/thoughtworks/deeplearning/double/package.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@ import scala.language.implicitConversions
1515
package object double {
1616

1717
/** @template */
18-
type Double = utilities.Double
18+
type Double = com.thoughtworks.deeplearning.double.utilities.Double
1919

2020
implicit def liftNativeDoubleToLayer[InputData, InputDelta](implicit inputType: Type[InputData, InputDelta])
2121
: ToLayer.Aux[scala.Double, Batch.Aux[InputData, InputDelta], Eval[scala.Double], Eval[scala.Double]] =

src/test/scala/com/thoughtworks/deeplearning/FortuneTeller.scala

Lines changed: 16 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -5,8 +5,8 @@ import com.thoughtworks.deeplearning.Layer._
55
import com.thoughtworks.deeplearning.Batch._
66
import com.thoughtworks.deeplearning.hlist._
77
import com.thoughtworks.deeplearning.boolean._
8-
import com.thoughtworks.deeplearning.seq2D._
98
import com.thoughtworks.deeplearning.double._
9+
import com.thoughtworks.deeplearning.seq._
1010
import com.thoughtworks.deeplearning.array2D._
1111
import com.thoughtworks.deeplearning.dsl._
1212
import com.thoughtworks.deeplearning.dsl.layers.{Identity, Literal}
@@ -26,6 +26,8 @@ import scala.util.Random
2626
*/
2727
object FortuneTeller {
2828

29+
type Seq2D = Seq[Seq[Double]]
30+
2931
type Nullable[A <: Type[_, _]] = HNil :+: A :+: CNil
3032

3133
type InputField[A <: Type[_, _]] = HNil :+: A :+: CNil
@@ -84,19 +86,19 @@ object FortuneTeller {
8486
} {
8587
_.head.choice { _ =>
8688
// probabilityLossNetwork.compose()
87-
probabilityLossNetwork.compose(min(exp(-rowSeq(0, 0)), 1.0))
88-
// max(1.0 - rowSeq(0, 0), 0.0)
89+
probabilityLossNetwork.compose(min(exp(-rowSeq(0)(0)), 1.0))
90+
// max(1.0 - rowSeq(0)(0), 0.0)
8991
} { inr =>
9092
val expectedValue = inr.head
91-
(rowSeq(0, 0) + abs(rowSeq(0, 1) - expectedValue)): rowAndExpectedLabel.To[Double]
93+
(rowSeq(0)(0) + abs(rowSeq(0)(1) - expectedValue)): rowAndExpectedLabel.To[Double]
9294
}
9395
}
9496

9597
val loss1 = expectedLabelField1.choice { _ =>
9698
0.0 // Drop out
9799
} { expectedEnum =>
98-
val score0 = rowSeq(0, 2)
99-
val score1 = rowSeq(0, 3)
100+
val score0 = rowSeq(0)(2)
101+
val score1 = rowSeq(0)(3)
100102
val sum = score0 + score1 + 0.00000001
101103
val probability0 = score0 / sum
102104
val probability1 = score1 / sum
@@ -110,15 +112,15 @@ object FortuneTeller {
110112
val loss2 = expectedLabelField2.choice { _ =>
111113
0.0 // Drop out
112114
} { expectedDouble =>
113-
abs(expectedDouble.head - rowSeq(0, 4) + 1.0)
115+
abs(expectedDouble.head - rowSeq(0)(4) + 1.0)
114116
}
115117

116118
val loss3 = expectedLabelField3.choice { _ =>
117119
0.0 // Drop out
118120
} { expectedEnum =>
119-
val score0 = rowSeq(0, 5)
120-
val score1 = rowSeq(0, 6)
121-
val score2 = rowSeq(0, 7)
121+
val score0 = rowSeq(0)(5)
122+
val score1 = rowSeq(0)(6)
123+
val score2 = rowSeq(0)(7)
122124
val sum = score0 + score1 + score2 + 0.00000001
123125
val probability0 = score0 / sum
124126
val probability1 = score1 / sum
@@ -142,10 +144,10 @@ object FortuneTeller {
142144

143145
def array2DToRow(implicit input: Array2D): input.To[PredictionResult] = {
144146
val rowSeq = input.toSeq
145-
val field0: input.To[Double :: Double :: HNil] = min(rowSeq(0, 0), 1.0) :: rowSeq(0, 1) :: HNil
146-
val field1: input.To[Enum0Prediction] = rowSeq(0, 2) :: rowSeq(0, 3) :: HNil
147-
val field2: input.To[Double] = rowSeq(0, 4)
148-
val field3 = rowSeq(0, 5) :: rowSeq(0, 6) :: rowSeq(0, 7) :: HNil
147+
val field0: input.To[Double :: Double :: HNil] = min(rowSeq(0)(0), 1.0) :: rowSeq(0)(1) :: HNil
148+
val field1: input.To[Enum0Prediction] = rowSeq(0)(2) :: rowSeq(0)(3) :: HNil
149+
val field2: input.To[Double] = rowSeq(0)(4)
150+
val field3 = rowSeq(0)(5) :: rowSeq(0)(6) :: rowSeq(0)(7) :: HNil
149151
field0 :: field1 :: field2 :: field3 :: HNil
150152
}
151153
val array2DToRowNetwork = array2DToRow

src/test/scala/com/thoughtworks/deeplearning/XorSpec.scala

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@ import com.thoughtworks.deeplearning.Layer._
1111
import com.thoughtworks.deeplearning.Batch._
1212
import com.thoughtworks.deeplearning.hlist._
1313
import com.thoughtworks.deeplearning.boolean._
14-
import com.thoughtworks.deeplearning.seq2D._
14+
import com.thoughtworks.deeplearning.seq._
1515
import com.thoughtworks.deeplearning.double._
1616
import com.thoughtworks.deeplearning.array2D._
1717
import com.thoughtworks.deeplearning.dsl._
@@ -123,7 +123,7 @@ final class XorSpec extends FreeSpec with Matchers {
123123

124124
def decode(implicit row: Array2D): row.To[XorSpec.Output] = {
125125
val rowSeq = row.toSeq
126-
rowSeq(0, 0) :: rowSeq(0, 1) :: rowSeq(0, 2) :: HNil
126+
rowSeq(0)(0) :: rowSeq(0)(1) :: rowSeq(0)(2) :: HNil
127127
}
128128

129129
val decodeNetwork = decode
@@ -150,7 +150,7 @@ final class XorSpec extends FreeSpec with Matchers {
150150
0.0
151151
} {
152152
_.choice { expectedValue =>
153-
val value = predictionResult(0, 0)
153+
val value = predictionResult(0)(0)
154154
-expectedValue * log(value) - (1.0 - expectedValue) * log(1.0 - value)
155155
} { _ =>
156156
`throw`(new IllegalArgumentException)
@@ -161,7 +161,7 @@ final class XorSpec extends FreeSpec with Matchers {
161161
0.0
162162
} {
163163
_.choice { expectedValue =>
164-
val value = predictionResult(0, 1)
164+
val value = predictionResult(0)(1)
165165
-expectedValue * log(value) - (1.0 - expectedValue) * log(1.0 - value)
166166
} { _ =>
167167
`throw`(new IllegalArgumentException)
@@ -172,7 +172,7 @@ final class XorSpec extends FreeSpec with Matchers {
172172
0.0
173173
} {
174174
_.choice { expectedValue =>
175-
val value = predictionResult(0, 2)
175+
val value = predictionResult(0)(2)
176176
-expectedValue * log(value) - (1.0 - expectedValue) * log(1.0 - value)
177177
} { _ =>
178178
`throw`(new IllegalArgumentException)
@@ -195,7 +195,7 @@ final class XorSpec extends FreeSpec with Matchers {
195195
val field0 = Random.nextBoolean()
196196
val field1 = Random.nextBoolean()
197197
val field2 = field0 ^ field1
198-
val Seq(dropout0, dropout1, dropout2) = Seq.fill(3)(false).updated(Random.nextInt(3), true)
198+
val scala.Seq(dropout0, dropout1, dropout2) = scala.Seq.fill(3)(false).updated(Random.nextInt(3), true)
199199
def input(isDropout: scala.Boolean, value: scala.Boolean) = {
200200
if (isDropout) {
201201
Inl(HNil)

0 commit comments

Comments
 (0)