-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathActivation.cpp
97 lines (92 loc) · 2.46 KB
/
Activation.cpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
#include "Activation.h"
#include <cfloat>
/*
* ReLu function implementation.
* @param mat matrix to calculate ReLu on.
* @return Matrix with ReLu entries.
*/
Matrix relu_impl(const Matrix &mat)
{
int rows = mat.get_rows();
int cols = mat.get_cols();
Matrix res(rows, cols);
for (int i = 0; i < rows; i++)
{
for (int j = 0; j < cols; j++)
{
// Calculate ReLu according to the given formula
float x = mat(i, j);
x >= 0 ? res(i, j) = x : res(i, j) = 0;
}
}
return res;
}
/*
* softmax function for a row implementation.
* @param mat matrix to calculate softmax on.
* @return Matrix with softmax entries.
*/
Matrix softmax_row(const Matrix& mat)
{
int rows = mat.get_rows();
int cols = mat.get_cols();
Matrix res(rows, cols);
for (int i = 0; i < rows; i++)
{
// Find the max value for numerical stability
float max_val = -FLT_MAX;
for (int j = 0; j < cols; j++)
{
if (mat(i, j) > max_val)
{
max_val = mat(i, j);
}
}
// Calculate the sum of exponentials
float exp_sum = 0.0;
for (int j = 0; j < cols; j++)
{
exp_sum += std::exp(mat(i, j) - max_val);
}
// Calculate softmax probabilities
for (int j = 0; j < cols; j++)
{
res(i, j) = std::exp(mat(i, j) - max_val) / exp_sum;
}
}
return res;
}
/*
* softmax function implementation.
* @param mat matrix to calculate softmax on.
* @return Matrix with softmax entries.
*/
Matrix softmax_impl(const Matrix &mat)
{
int rows = mat.get_rows();
int cols = mat.get_cols();
if (rows == 1)
{
return softmax_row(mat);
}
Matrix res(rows, cols);
for (int i = 0; i < cols; i++) // Iterate over columns
{
float exp_coordinate_sum = 0.0;
for (int j = 0; j < rows; j++) // Sum exponentials in a column
{
float x = mat(j, i);
exp_coordinate_sum += std::exp(x);
}
for (int j = 0; j < rows; j++) // Apply softmax formula per column
{
float x = mat(j, i);
res(j, i) = std::exp(x) / exp_coordinate_sum;
}
}
return res;
}
namespace activation {
activation_func relu = relu_impl;
activation_func softmax = softmax_impl;
}