Skip to content

Update NeuralNet.pde #9

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 4 commits into
base: master
Choose a base branch
from
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
78 changes: 77 additions & 1 deletion SmartSnakesCombine/NeuralNet.pde
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,13 @@ class NeuralNet {
int iNodes;//No. of input nodes
int hNodes;//No. of hidden nodes
int oNodes;//No. of output nodes
int hLayers;//No. of hidden layers

Matrix whi;//matrix containing weights between the input nodes and the hidden nodes
Matrix whh;//matrix containing weights between the hidden nodes and the second layer hidden nodes
Matrix woh;//matrix containing weights between the second hidden layer nodes and the output nodes
Matrix weights[];//matrix containing all weights between the layers

//---------------------------------------------------------------------------------------------------------------------------------------------------------

//constructor
Expand Down Expand Up @@ -35,6 +38,41 @@ class NeuralNet {
whh.randomize();
woh.randomize();
}

//Constructor
NeuralNet(int inputs, int hiddenNo; int outputNo, int hiddenLayers)
{

//Set dimensions from parameters
iNodes = inputs;
oNodes = outputNo;
hNodes = hiddenNo;
hLayers = hiddenLayers

//create first layer weights(input layer)
//included bias weight
weights[0] = new Matrix(hNodes, iNodes+1);

//create second layer weights(hidden layers)
//bias included for each hidden layer in for loop
for(int i = 1; i < hLayers; i++)
{
weights[i] = new Matrix(hNodes, hNodes +1)
}
//create third layer weights(output layer)
//included bias weight
weights[weights.length-1] = new Matrix(oNodes, hNodes +1);

//set the matricies to random values
weights[0].randomize();

for(int i=0; i < hLayers; i++)
{
weights[i].randomize();
}

weights[weights.length-1].randomize();
}
//---------------------------------------------------------------------------------------------------------------------------------------------------------

//mutation function for genetic algorithm
Expand Down Expand Up @@ -82,6 +120,44 @@ class NeuralNet {
return outputs.toArray();
}
//---------------------------------------------------------------------------------------------------------------------------------------------------------
//calculate the output values by feeding forward through the deep neural network
float[] output_dnn(float[] inputsArr) {

//convert array to matrix
//Note weights[0] has nothing to do with it its just a function in the Matrix class
Matrix inputs = weights[0].singleColumnMatrixFromArray(inputsArr);

//add bias
Matrix inputsBias = inputs.addBias();


//-----------------------calculate the guessed output

//apply layer one weights to the inputs
Matrix hiddenInputs = weights[0].dot(inputsBias);

//pass through activation function(sigmoid)
Matrix hiddenOutputs = hiddenInputs.activate();

//add bias
Matrix hiddenOutputsBias = hiddenOutputs.addBias();

//apply hidden layer two weights
for(int i = 1; i < hLayers; i++){
Matrix hiddenInputs2 = weights[i].dot(hiddenOutputsBias);
Matrix hiddenOutputs2 = hiddenInputs2.activate();
Matrix hiddenOutputsBias2 = hiddenOutputs2.addBias();
}
//apply level three weights
Matrix outputInputs = weights[weights.length-1].dot(hiddenOutputsBias2);
//pass through activation function(sigmoid)
Matrix outputs = outputInputs.activate();

//convert to an array and return
return outputs.toArray();
}
//---------------------------------------------------------------------------------------------------------------------------------------------------------

//crossover function for genetic algorithm
NeuralNet crossover(NeuralNet partner) {

Expand Down Expand Up @@ -186,4 +262,4 @@ class NeuralNet {
whh.fromArray(whhArr);
woh.fromArray(wohArr);
}
}
}