Skip to content

Commit

Permalink
change title
Browse files Browse the repository at this point in the history
  • Loading branch information
DorsaRoh committed Aug 10, 2024
1 parent 2689ce8 commit c492f7d
Showing 1 changed file with 9 additions and 9 deletions.
18 changes: 9 additions & 9 deletions Neural-Networks/2-MNIST.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -551,17 +551,17 @@
"\n",
" # learning_rate: how big of a step is taken while updating weights and biases\n",
"\n",
" def backward(self, output_gradient, learning_rate):\n",
" # gradient: dC_dw = dC_da x da_dz x dz_dw\n",
" da_dz = self.relu_derivative(self.last_output) \n",
" dC_dz = output_gradient * da_dz \n",
" def backward(self, dC_da, learning_rate):\n",
" # dC_da is the gradient of the cost with respect to the activation of this layer\n",
" da_dz = self.relu_derivative(self.last_output)\n",
" dC_dz = dC_da * da_dz\n",
" dC_dw = np.outer(dC_dz, self.last_input)\n",
" dC_db = dC_dz \n",
" dC_db = dC_dz\n",
"\n",
" self.weights -= dC_dw * learning_rate\n",
" self.bias -= np.sum(dC_db, axis=0) * learning_rate\n",
"\n",
" self.weights -= learning_rate * dC_dw.reshape(self.weights.shape) # updates weights. gradient of the cost w.r.t. weight\n",
" self.bias -= learning_rate * np.sum(dC_db) # bias is a scalar & gradient could be a vector, so summed.\n",
" \n",
" dC_dx = np.dot(self.weights.T, dC_dz) # gradient of the cost w.r.t. input of neuron\n",
" dC_dx = np.dot(self.weights.T, dC_dz)\n",
" return dC_dx\n",
"\n",
"class Layer:\n",
Expand Down

0 comments on commit c492f7d

Please sign in to comment.