Skip to content

Commit ad9d1ce

Browse files
committed
use value
1 parent 563df2a commit ad9d1ce

File tree

2 files changed

+4
-5
lines changed

2 files changed

+4
-5
lines changed

cpp/layer.cc

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,6 @@ int FullyConnect::configure(int batch, float learning_rate, float v_param, Layer
3838
Layer::configure(batch, learning_rate, v_param, prevLayer, phase);
3939
this->Y.resize(this->batch*this->units);
4040
this->W.resize(this->input_shape*this->units);
41-
this->B = (float*)malloc(sizeof(float));
4241
if (this->phase == TRAIN) {
4342
this->E.resize(this->batch*this->input_shape);
4443
this->delta_buf = (float*)malloc(sizeof(float)*this->batch*this->units);
@@ -50,7 +49,7 @@ int FullyConnect::configure(int batch, float learning_rate, float v_param, Layer
5049
for (int iu = 0; iu < this->input_shape*this->units; iu++) {
5150
this->W[iu] = rand(mt);
5251
}
53-
*this->B = rand(mt);
52+
this->B = rand(mt);
5453
return 1;
5554
}
5655

@@ -63,7 +62,7 @@ void FullyConnect::forward(vector<float> *x) {
6362
for (int b = 0; b < this->batch; b++) {
6463
for (int i = 0; i < this->input_shape; i++) {
6564
for (int u = 0; u < this->units; u++) {
66-
this->Y[b*this->units + u] += x->at(b*this->input_shape + i) * this->W[i*this->units + u] + *this->B;
65+
this->Y[b*this->units + u] += x->at(b*this->input_shape + i) * this->W[i*this->units + u] + this->B;
6766
}
6867
}
6968
}
@@ -115,7 +114,7 @@ void FullyConnect::backward(vector<float> *e) {
115114
#pragma omp parallel for
116115
for (int b = 0; b < this->batch; b++) {
117116
for (int u = 0; u < this->units; u++) {
118-
*this->B -= this->learning_rate * e->at(b*this->units + u)*this->batch_inv;
117+
this->B -= this->learning_rate * e->at(b*this->units + u)*this->batch_inv;
119118
}
120119
}
121120

cpp/layer.h

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@ class Layer {
4040
class FullyConnect : public Layer {
4141
public:
4242
vector<float> W;
43-
float* B;
43+
float B;
4444
FullyConnect(int input_shape, int units);
4545
~FullyConnect();
4646
int configure(int batch, float learning_rate, float v_param, Layer* prevLayer, phase_t phase);

0 commit comments

Comments
 (0)