forked from lazyprogrammer/machine_learning_examples
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathmlp.py
35 lines (27 loc) · 1.13 KB
/
mlp.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
# Simple multi-layer preceptron / neural network in Python and Numpy
# For the class Data Science: Practical Deep Learning Concepts in Theano and TensorFlow
# https://deeplearningcourses.com/c/data-science-deep-learning-in-theano-tensorflow
# https://www.udemy.com/data-science-deep-learning-in-theano-tensorflow
import numpy as np
def forward(X, W1, b1, W2, b2):
# Z = 1 / (1 + np.exp(-( X.dot(W1) + b1 )))
# rectifier
Z = X.dot(W1) + b1
Z[Z < 0] = 0
# print "Z:", Z
A = Z.dot(W2) + b2
expA = np.exp(A)
Y = expA / expA.sum(axis=1, keepdims=True)
# print "Y:", Y, "are any 0?", np.any(Y == 0), "are any nan?", np.any(np.isnan(Y))
# exit()
return Y, Z
def derivative_w2(Z, T, Y):
return Z.T.dot(Y - T)
def derivative_b2(T, Y):
return (Y - T).sum(axis=0)
def derivative_w1(X, Z, T, Y, W2):
# return X.T.dot( ( ( Y-T ).dot(W2.T) * ( Z*(1 - Z) ) ) ) # for sigmoid
return X.T.dot( ( ( Y-T ).dot(W2.T) * (Z > 0) ) ) # for relu
def derivative_b1(Z, T, Y, W2):
# return (( Y-T ).dot(W2.T) * ( Z*(1 - Z) )).sum(axis=0) # for sigmoid
return (( Y-T ).dot(W2.T) * (Z > 0)).sum(axis=0) # for relu