Skip to content

Commit ac4d894

Browse files
authored
Done!
1 parent 22f12f0 commit ac4d894

29 files changed

+1074
-0
lines changed

Activation.cpp

+97
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,97 @@
1+
#include "Activation.h"
2+
#include <cfloat>
3+
4+
/*
5+
* ReLu function implementation.
6+
* @param mat matrix to calculate ReLu on.
7+
* @return Matrix with ReLu entries.
8+
*/
9+
Matrix relu_impl(const Matrix &mat)
10+
{
11+
int rows = mat.get_rows();
12+
int cols = mat.get_cols();
13+
Matrix res(rows, cols);
14+
for (int i = 0; i < rows; i++)
15+
{
16+
for (int j = 0; j < cols; j++)
17+
{
18+
// Calculate ReLu according to the given formula
19+
float x = mat(i, j);
20+
x >= 0 ? res(i, j) = x : res(i, j) = 0;
21+
}
22+
}
23+
return res;
24+
}
25+
26+
/*
27+
* softmax function for a row implementation.
28+
* @param mat matrix to calculate softmax on.
29+
* @return Matrix with softmax entries.
30+
*/
31+
Matrix softmax_row(const Matrix& mat)
32+
{
33+
int rows = mat.get_rows();
34+
int cols = mat.get_cols();
35+
Matrix res(rows, cols);
36+
for (int i = 0; i < rows; i++)
37+
{
38+
// Find the max value for numerical stability
39+
float max_val = -FLT_MAX;
40+
for (int j = 0; j < cols; j++)
41+
{
42+
if (mat(i, j) > max_val)
43+
{
44+
max_val = mat(i, j);
45+
}
46+
}
47+
// Calculate the sum of exponentials
48+
float exp_sum = 0.0;
49+
for (int j = 0; j < cols; j++)
50+
{
51+
exp_sum += std::exp(mat(i, j) - max_val);
52+
}
53+
// Calculate softmax probabilities
54+
for (int j = 0; j < cols; j++)
55+
{
56+
res(i, j) = std::exp(mat(i, j) - max_val) / exp_sum;
57+
}
58+
}
59+
60+
return res;
61+
}
62+
63+
/*
64+
* softmax function implementation.
65+
* @param mat matrix to calculate softmax on.
66+
* @return Matrix with softmax entries.
67+
*/
68+
Matrix softmax_impl(const Matrix &mat)
69+
{
70+
int rows = mat.get_rows();
71+
int cols = mat.get_cols();
72+
if (rows == 1)
73+
{
74+
return softmax_row(mat);
75+
}
76+
Matrix res(rows, cols);
77+
for (int i = 0; i < cols; i++) // Iterate over columns
78+
{
79+
float exp_coordinate_sum = 0.0;
80+
for (int j = 0; j < rows; j++) // Sum exponentials in a column
81+
{
82+
float x = mat(j, i);
83+
exp_coordinate_sum += std::exp(x);
84+
}
85+
for (int j = 0; j < rows; j++) // Apply softmax formula per column
86+
{
87+
float x = mat(j, i);
88+
res(j, i) = std::exp(x) / exp_coordinate_sum;
89+
}
90+
}
91+
return res;
92+
}
93+
94+
namespace activation {
95+
activation_func relu = relu_impl;
96+
activation_func softmax = softmax_impl;
97+
}

Activation.h

+16
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,16 @@
1+
// Activation.h
2+
#ifndef ACTIVATION_H
3+
#define ACTIVATION_H
4+
5+
#include "Matrix.h"
6+
#include <cmath>
7+
8+
typedef Matrix (*activation_func) (const Matrix& mat);
9+
10+
// Insert Activation namespace here...
11+
namespace activation {
12+
extern activation_func relu;
13+
extern activation_func softmax;
14+
}
15+
16+
#endif //ACTIVATION_H

Dense.cpp

+49
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,49 @@
1+
#include "Dense.h"
2+
3+
/*
4+
* Dense constructor.
5+
* @param weights - the weight matrix of the layer.
6+
* @param bias - the bias matrix of the layer.
7+
* @param act_func - the activation function of the layer.
8+
* @return a new Dense object.
9+
*/
10+
Dense::Dense(Matrix& weights, Matrix& bias, activation_func act_func) :
11+
_weights(weights), _bias(bias), _act_func(act_func)
12+
{}
13+
14+
/*
15+
* getter function.
16+
* @return the weight matrix of the layer.
17+
*/
18+
Matrix Dense::get_weights () const
19+
{
20+
return _weights;
21+
}
22+
23+
/*
24+
* getter function.
25+
* @return the bias matrix of the layer.
26+
*/
27+
Matrix Dense::get_bias () const
28+
{
29+
return _bias;
30+
}
31+
32+
/*
33+
* getter function.
34+
* @return the activation function of the layer.
35+
*/
36+
activation_func Dense::get_activation () const
37+
{
38+
return _act_func;
39+
}
40+
41+
/*
42+
* operator() overload.
43+
* @param input - the input matrix to the layer.
44+
* @return the output matrix of the layer.
45+
*/
46+
Matrix Dense::operator()(const Matrix &input)
47+
{
48+
return Matrix(_act_func(_weights * input + _bias));
49+
}

Dense.h

+29
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,29 @@
1+
// Dense.h
2+
#ifndef DENSE_H
3+
#define DENSE_H
4+
5+
#include "Activation.h"
6+
7+
// Insert Dense class here...
8+
class Dense {
9+
public:
10+
Dense(Matrix& weights, Matrix& bias, activation_func);
11+
Matrix get_weights() const;
12+
Matrix get_bias() const;
13+
activation_func get_activation() const;
14+
Matrix operator()(const Matrix &input);
15+
16+
private:
17+
Matrix _weights;
18+
Matrix _bias;
19+
activation_func _act_func;
20+
};
21+
22+
23+
24+
25+
26+
27+
28+
29+
#endif //DENSE_H

Makefile

+22
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
CC=g++
2+
CXXFLAGS= -Wall -Wvla -Wextra -Werror -g -std=c++14
3+
LDFLAGS= -lm
4+
HEADERS= Matrix.h Activation.h Dense.h MlpNetwork.h
5+
OBJS= Matrix.o Activation.o Dense.o MlpNetwork.o main.o
6+
7+
%.o : %.c
8+
9+
10+
mlpnetwork: $(OBJS)
11+
$(CC) $(LDFLAGS) -o $@ $^
12+
13+
$(OBJS) : $(HEADERS)
14+
15+
.PHONY: clean
16+
clean:
17+
rm -rf *.o
18+
rm -rf mlpnetwork
19+
20+
21+
22+

0 commit comments

Comments
 (0)