Skip to content

Commit 2e2b8df

Browse files
committed
refactor: re-wrote feed and FeedForeward to run the opposite direction for speed.
1 parent a79e540 commit 2e2b8df

File tree

4 files changed

+12
-19
lines changed

4 files changed

+12
-19
lines changed

header/net.h

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -108,7 +108,8 @@ class net //represents a the vector of vectors of neurons
108108

109109
void AddToSequence(int iteration, int layer, int first, int last, void(neuron::*func)());
110110
// void AddToSequence(int iteration, int layer, void(neuron::*func)());
111-
111+
//TODO: make main way to run any training and prediction.
112+
//(higher level functions like restricted boltzman set up sequence, then call run sequence)
112113
void RunSequence();
113114
ofstream& Save(ofstream& file, bool save_neuron_values =false);
114115
ifstream& Open(ifstream& file);

src/data_operations.cpp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ using namespace std;
99
double data_operations::random_number_generator::generate(){
1010
mt19937 gen((random_device())());
1111
uniform_real_distribution<> dis(-1.0, 1.0);
12-
return dis(gen);
12+
return dis(gen);
1313
}
1414

1515
//todo:make these use random number generator

src/net.cpp

Lines changed: 5 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -328,15 +328,11 @@ void net::FeedForeward(neuron::ActivationType activation_type)
328328
vector<thread> threads;
329329
for(int i=0; i<layers.size(); i++)
330330
{
331-
// for(int l=0; l<layers[i].size(); l++)
332-
// {
333-
// (((layers[i][l])->_value)=(layers[i][l])->_buffer);//to avoid accessing and activating a neuron at the same time ( like if a neuron is connected to one in the same layer)
334-
// }
335331

336-
if (i==0)_activation_type = neuron::none;//if input layer, dont activate, just multiply by weights
337-
else{
338-
_activation_type = activation_type;
339-
}
332+
// if (i==0)_activation_type = neuron::none;//if input layer, dont activate, just multiply by weights
333+
// else{
334+
_activation_type = activation_type;
335+
// }
340336

341337
for(int j=0; j<layers[i].size(); j++)
342338
{
@@ -743,7 +739,7 @@ net* net::AppendNet(net* net_from, bool pasteOver)
743739
void net:: Connect(int layer_from, int column_from, int layer_to, int column_to, float weight)
744740
{
745741
layers.at(layer_from).at(column_from)->inweights.push_back(new link((*(layers.at(layer_to).at(column_to))), weight));
746-
cout<<"new link from "<<layer_from<<":"<<column_from<<" to "<<layer_to<<":"<<column_to<<endl;
742+
//cout<<"new link from "<<layer_from<<":"<<column_from<<" to "<<layer_to<<":"<<column_to<<endl;
747743
}
748744

749745
void net::Connect(int layer_from, int column_from, int layer_to, int column_to)

src/neuron.cpp

Lines changed: 4 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@ using namespace std;
1313
*/
1414
neuron::~neuron()
1515
{
16-
cout<<"deleting: "<<this<<endl;
16+
//cout<<"deleting: "<<this<<endl;
1717
int j=0;
1818
for ( vector <link*>::iterator i=inweights.begin (); i !=inweights.end(); i++)
1919
{
@@ -43,6 +43,7 @@ void neuron::setin(double input)
4343
*/
4444
void neuron::activate(ActivationType activationtype)// activation function using a specific Activation type
4545
{
46+
_buffer+=_bias;
4647
switch(activationtype){
4748
case none:
4849
break;
@@ -193,7 +194,7 @@ void neuron::clear()
193194
_buffer=0;
194195
}
195196

196-
197+
//Todo: switch origin name to destination
197198
void neuron::feed(ActivationType activationtype){
198199

199200
activate(activationtype);
@@ -208,6 +209,7 @@ void neuron::feed(ActivationType activationtype){
208209
}
209210

210211

212+
//TODO rename this
211213
/**
212214
* @brief to get the error and pass it back
213215
*/
@@ -229,12 +231,6 @@ void neuron::throwup()
229231
}
230232
}
231233
}
232-
/*
233-
virtual void throwup(net::LearningType leaning_type)
234-
{
235-
236-
}
237-
*/
238234

239235

240236

0 commit comments

Comments
 (0)