Skip to content

Commit 5c08159

Browse files
author
User
committed
check
1 parent 0cf1af0 commit 5c08159

File tree

1 file changed

+12
-7
lines changed

1 file changed

+12
-7
lines changed

nlp_class2/neural_network2.py

Lines changed: 12 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -96,18 +96,23 @@ def softmax(a):
9696
# # original: W1 = W1 - lr * inputs.T.dot(dhidden) # VxN NxD --> VxD
9797

9898
# fastest way
99+
W1_copy = W1.copy()
99100
np.subtract.at(W1, inputs, lr * dhidden)
100101

101-
# test this
102-
# i = 0
103-
# for w in inputs: # don't include end token
104-
# W1[w] = W1[w] - lr * dhidden[i]
105-
# i += 1
106-
107102
# vs this
103+
# W1_test = W1_copy.copy()
108104
# oh_inputs = np.zeros((n - 1, V))
109105
# oh_inputs[np.arange(n - 1), sentence[:n-1]] = 1
110-
# W1 = W1 - lr * oh_inputs.T.dot(dhidden)
106+
# W1_test = W1_test - lr * oh_inputs.T.dot(dhidden)
107+
# assert(np.allclose(W1_test, W1))
108+
109+
# vs this
110+
# W1_test = W1_copy.copy()
111+
# i = 0
112+
# for w in inputs: # don't include end token
113+
# W1_test[w] = W1_test[w] - lr * dhidden[i]
114+
# i += 1
115+
# assert(np.allclose(W1_test, W1))
111116

112117
# keep track of the bigram loss
113118
# only do it for the first epoch to avoid redundancy

0 commit comments

Comments
 (0)