Skip to content

Commit 2656721

Browse files
committed
Comments for clarity
1 parent 7335f3e commit 2656721

File tree

2 files changed

+21
-0
lines changed

2 files changed

+21
-0
lines changed

src/net.hpp

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -6,9 +6,16 @@
66
namespace nn{
77
using autodiff::Var;
88

9+
/**
10+
Net
11+
An abstract neural net object using automatic differentiation
12+
A forward function should be defined by the end-user within a child class
13+
*/
914
class Net{
1015
public:
16+
// Backpropigation using the chain rule and the AutoDiff module
1117
void backward(const Var &loss);
18+
// Parameter registration and creation
1219
Var& create_parameter(const Tensor& data) {
1320
parameters.push_front(Var(data));
1421
return parameters.front();

src/optim.hpp

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,10 @@ namespace opt{
99
using autodiff::Var;
1010
typedef std::forward_list<Var> ParameterList;
1111

12+
/**
13+
Opt
14+
Abstract optimiser storing a reference to a list of autodiff::Vars
15+
*/
1216
class Opt{
1317
public:
1418
Opt(ParameterList&);
@@ -18,18 +22,28 @@ class Opt{
1822
ParameterList& parameters;
1923
};
2024

25+
/**
26+
GD
27+
Gradient descent optimisation of a ParameterList
28+
*/
2129
class GD: public Opt{
2230
public:
2331
GD(ParameterList&, double = 0.1);
32+
// One optimisation step
2433
void step();
2534
~GD(){};
2635
private:
2736
double l_rate;
2837
};
2938

39+
/**
40+
Moment
41+
Gradient descent optimisation of a ParameterList with momentum
42+
*/
3043
class Moment : public Opt{
3144
public:
3245
Moment(ParameterList&, double = 0.1, double = 0.9);
46+
// One optimisation step
3347
void step();
3448
~Moment(){}
3549
private:

0 commit comments

Comments
 (0)