1+ import numpy as np
2+
3+ class ActivationFunction :
4+ @staticmethod
5+ def sigmoid (W , X ):
6+ '''
7+ Parameters
8+ ----------
9+ W : numpy array of size (D, M)
10+ X : numpy array of size (D, N)
11+
12+ Returns
13+ -------
14+ numpy array of size (M, N)
15+ '''
16+ return 1 / (1 + np .exp (- 1 * (W .T .dot (X ))))
17+
18+ @staticmethod
19+ def tanh (W , X ):
20+ '''
21+ Parameters
22+ ----------
23+ W : numpy array of size (D, M)
24+ X : numpy array of size (D, N)
25+
26+ Returns
27+ -------
28+ numpy array of size (M, N)
29+ '''
30+ x = W .T .dot (X )
31+ return (np .exp (x ) - np .exp (- x )) / (np .exp (x ) + np .exp (- x ))
32+
33+ @staticmethod
34+ def relu (W , X ):
35+ '''
36+ Parameters
37+ ----------
38+ W : numpy array of size (D, M)
39+ X : numpy array of size (D, N)
40+
41+ Returns
42+ -------
43+ numpy array of size (M, N)
44+ '''
45+ x = W .T .dot (X )
46+ return np .maximum (x , 0 )
47+
48+ def softmax (W , X ):
49+ '''
50+ Parameters
51+ ----------
52+ W : numpy array of size (M, K)
53+ X : numpy array of size (M, N)
54+
55+ Returns
56+ -------
57+ numpy array of size (M, N)
58+ '''
59+ x = W .T .dot (X )
60+ exp_X = np .exp (x )
61+ return exp_X / exp_X .sum (axis = 1 , keepdims = True )
62+
63+
64+
65+ if __name__ == '__main__' :
66+ N = 10
67+ D = 2
68+ M = 4
69+ K = 3
70+ X = np .random .random ((D , N )) - 0.5
71+ W1 = np .random .random ((D , M ))
72+ W2 = np .random .random ((M , K ))
73+ z1 = ActivationFunction .sigmoid (W1 , X )
74+ pred_y = ActivationFunction .softmax (W2 , z1 )
0 commit comments