Skip to content

Commit

Permalink
adding layers
Browse files Browse the repository at this point in the history
  • Loading branch information
jonahshader committed Apr 1, 2022
1 parent a17dd02 commit fce98a3
Show file tree
Hide file tree
Showing 3 changed files with 64 additions and 5 deletions.
13 changes: 13 additions & 0 deletions core/src/main/java/com/jonahshader/systems/neuralnet/Layer.kt
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
package com.jonahshader.systems.neuralnet

import org.jetbrains.kotlinx.multik.ndarray.data.D1
import org.jetbrains.kotlinx.multik.ndarray.data.NDArray

interface Layer {
fun update(input: NDArray<Float, D1>, dt: Float) : NDArray<Float, D1>
fun mutateParameters(amount: Float)
fun getParameters() : NDArray<Float, D1>
fun setParameters(params: NDArray<Float, D1>)
fun clone() : Layer
fun reset()
}
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,7 @@ open class DenseWashboardCyclic : Network {
private val hiddenToHiddenWeights: NDArray<Float, D2>
private val hiddenToOutputWeights: NDArray<Float, D2>

var useRK4 = true
override val multithreadable = true

constructor(inputSize: Int, hiddenSize: Int, outputSize: Int, rand: Random = Rand.randx) {
Expand Down Expand Up @@ -150,13 +151,18 @@ override fun getParameters(): List<Float> =
}

override fun update(dt: Float) {
hiddenBuffer = ((hiddenToHiddenWeights dot hiddenOut) + (inputToHiddenWeights dot inputVector) * INPUT_SCALING + globalBias) * WEIGHT_SCALE
if (useRK4) {

} else {
hiddenBuffer = ((hiddenToHiddenWeights dot hiddenOut) + (inputToHiddenWeights dot inputVector) * INPUT_SCALING + globalBias) * WEIGHT_SCALE
// outputVector = ((hiddenToOutputWeights dot hiddenOut) + outputBias) * (OUTPUT_SCALING) // scale here? might need custom scale?
outputVector = ((hiddenToOutputWeights dot hiddenOut)) * (OUTPUT_SCALING) // scale here? might need custom scale?
hiddenAngleVel = hiddenAngleVel + (hiddenBuffer * lSigma.toFloat() - DAMPENING*hiddenAngleVel - (w_e.toFloat()/2) * (hiddenAngle*2f).sin()) * w_ex.toFloat() * DT
hiddenAngle = hiddenAngle + hiddenAngleVel * DT
outputVector = ((hiddenToOutputWeights dot hiddenOut)) * (OUTPUT_SCALING) // scale here? might need custom scale?
hiddenAngleVel = hiddenAngleVel + (hiddenBuffer * lSigma.toFloat() - DAMPENING*hiddenAngleVel - (w_e.toFloat()/2) * (hiddenAngle*2f).sin()) * w_ex.toFloat() * dt
hiddenAngle = hiddenAngle + hiddenAngleVel * dt

hiddenOut = hiddenAngleVel.map { it * B.toFloat() * FEMTO.toFloat() }
}

hiddenOut = hiddenAngleVel.map { it * B.toFloat() * FEMTO.toFloat() }
}

override fun clone() = DenseWashboardCyclic(this)
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
package com.jonahshader.systems.neuralnet.washboard

import com.jonahshader.systems.neuralnet.Layer
import com.jonahshader.systems.utils.Rand
import org.jetbrains.kotlinx.multik.api.*
import org.jetbrains.kotlinx.multik.ndarray.data.D1
import org.jetbrains.kotlinx.multik.ndarray.data.D2
import org.jetbrains.kotlinx.multik.ndarray.data.NDArray

class WashboardLayer(inputSize: Int, outputSize: Int) : Layer {
private var weights: NDArray<Float, D2>
private var output: NDArray<Float, D1> = mk.zeros(outputSize)
private var theta = mk.d1array(outputSize) { 0f }
init {
weights = mk.d2array(outputSize, inputSize) { Rand.randx.nextFloat() * 40f }
}
override fun update(input: NDArray<Float, D1>, dt: Float): NDArray<Float, D1> {
TODO("Not yet implemented")
}

override fun mutateParameters(amount: Float) {
TODO("Not yet implemented")
}

override fun getParameters(): NDArray<Float, D1> {
TODO("Not yet implemented")
}

override fun setParameters(params: NDArray<Float, D1>) {
TODO("Not yet implemented")
}

override fun clone(): Layer {
TODO("Not yet implemented")
}

override fun reset() {
TODO("Not yet implemented")
}
}

0 comments on commit fce98a3

Please sign in to comment.