Skip to content

Commit ab15e28

Browse files
Add runtime output to notebooks.
Signed-off-by: Norman Heckscher <norman.heckscher@gmail.com>
1 parent 9650209 commit ab15e28

File tree

3 files changed

+345
-6
lines changed

3 files changed

+345
-6
lines changed

notebooks/3_NeuralNetworks/bidirectional_rnn.ipynb

Lines changed: 89 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -134,11 +134,98 @@
134134
},
135135
{
136136
"cell_type": "code",
137-
"execution_count": null,
137+
"execution_count": 4,
138138
"metadata": {
139139
"collapsed": false
140140
},
141-
"outputs": [],
141+
"outputs": [
142+
{
143+
"name": "stdout",
144+
"output_type": "stream",
145+
"text": [
146+
"Iter 1280, Minibatch Loss= 1.557283, Training Accuracy= 0.49219\n",
147+
"Iter 2560, Minibatch Loss= 1.358445, Training Accuracy= 0.56250\n",
148+
"Iter 3840, Minibatch Loss= 1.043732, Training Accuracy= 0.64062\n",
149+
"Iter 5120, Minibatch Loss= 0.796770, Training Accuracy= 0.72656\n",
150+
"Iter 6400, Minibatch Loss= 0.626206, Training Accuracy= 0.72656\n",
151+
"Iter 7680, Minibatch Loss= 1.025919, Training Accuracy= 0.65625\n",
152+
"Iter 8960, Minibatch Loss= 0.744850, Training Accuracy= 0.76562\n",
153+
"Iter 10240, Minibatch Loss= 0.530111, Training Accuracy= 0.84375\n",
154+
"Iter 11520, Minibatch Loss= 0.383806, Training Accuracy= 0.86719\n",
155+
"Iter 12800, Minibatch Loss= 0.607816, Training Accuracy= 0.82812\n",
156+
"Iter 14080, Minibatch Loss= 0.410879, Training Accuracy= 0.89062\n",
157+
"Iter 15360, Minibatch Loss= 0.335351, Training Accuracy= 0.89844\n",
158+
"Iter 16640, Minibatch Loss= 0.428004, Training Accuracy= 0.91406\n",
159+
"Iter 17920, Minibatch Loss= 0.307468, Training Accuracy= 0.91406\n",
160+
"Iter 19200, Minibatch Loss= 0.249527, Training Accuracy= 0.92188\n",
161+
"Iter 20480, Minibatch Loss= 0.148163, Training Accuracy= 0.96094\n",
162+
"Iter 21760, Minibatch Loss= 0.445275, Training Accuracy= 0.83594\n",
163+
"Iter 23040, Minibatch Loss= 0.173083, Training Accuracy= 0.93750\n",
164+
"Iter 24320, Minibatch Loss= 0.373696, Training Accuracy= 0.87500\n",
165+
"Iter 25600, Minibatch Loss= 0.509869, Training Accuracy= 0.85938\n",
166+
"Iter 26880, Minibatch Loss= 0.198096, Training Accuracy= 0.92969\n",
167+
"Iter 28160, Minibatch Loss= 0.228221, Training Accuracy= 0.92188\n",
168+
"Iter 29440, Minibatch Loss= 0.280088, Training Accuracy= 0.89844\n",
169+
"Iter 30720, Minibatch Loss= 0.300495, Training Accuracy= 0.91406\n",
170+
"Iter 32000, Minibatch Loss= 0.171746, Training Accuracy= 0.95312\n",
171+
"Iter 33280, Minibatch Loss= 0.263745, Training Accuracy= 0.89844\n",
172+
"Iter 34560, Minibatch Loss= 0.177300, Training Accuracy= 0.93750\n",
173+
"Iter 35840, Minibatch Loss= 0.160621, Training Accuracy= 0.95312\n",
174+
"Iter 37120, Minibatch Loss= 0.321745, Training Accuracy= 0.91406\n",
175+
"Iter 38400, Minibatch Loss= 0.188322, Training Accuracy= 0.93750\n",
176+
"Iter 39680, Minibatch Loss= 0.104025, Training Accuracy= 0.96875\n",
177+
"Iter 40960, Minibatch Loss= 0.291053, Training Accuracy= 0.89062\n",
178+
"Iter 42240, Minibatch Loss= 0.131189, Training Accuracy= 0.95312\n",
179+
"Iter 43520, Minibatch Loss= 0.154949, Training Accuracy= 0.92969\n",
180+
"Iter 44800, Minibatch Loss= 0.150411, Training Accuracy= 0.93750\n",
181+
"Iter 46080, Minibatch Loss= 0.117008, Training Accuracy= 0.96094\n",
182+
"Iter 47360, Minibatch Loss= 0.181344, Training Accuracy= 0.96094\n",
183+
"Iter 48640, Minibatch Loss= 0.209197, Training Accuracy= 0.94531\n",
184+
"Iter 49920, Minibatch Loss= 0.159350, Training Accuracy= 0.96094\n",
185+
"Iter 51200, Minibatch Loss= 0.124001, Training Accuracy= 0.95312\n",
186+
"Iter 52480, Minibatch Loss= 0.165183, Training Accuracy= 0.94531\n",
187+
"Iter 53760, Minibatch Loss= 0.046438, Training Accuracy= 0.97656\n",
188+
"Iter 55040, Minibatch Loss= 0.199995, Training Accuracy= 0.91406\n",
189+
"Iter 56320, Minibatch Loss= 0.057071, Training Accuracy= 0.97656\n",
190+
"Iter 57600, Minibatch Loss= 0.177065, Training Accuracy= 0.92188\n",
191+
"Iter 58880, Minibatch Loss= 0.091666, Training Accuracy= 0.96094\n",
192+
"Iter 60160, Minibatch Loss= 0.069232, Training Accuracy= 0.96875\n",
193+
"Iter 61440, Minibatch Loss= 0.127353, Training Accuracy= 0.94531\n",
194+
"Iter 62720, Minibatch Loss= 0.095795, Training Accuracy= 0.96094\n",
195+
"Iter 64000, Minibatch Loss= 0.202651, Training Accuracy= 0.96875\n",
196+
"Iter 65280, Minibatch Loss= 0.118779, Training Accuracy= 0.95312\n",
197+
"Iter 66560, Minibatch Loss= 0.043173, Training Accuracy= 0.98438\n",
198+
"Iter 67840, Minibatch Loss= 0.152280, Training Accuracy= 0.95312\n",
199+
"Iter 69120, Minibatch Loss= 0.085301, Training Accuracy= 0.96875\n",
200+
"Iter 70400, Minibatch Loss= 0.093421, Training Accuracy= 0.96094\n",
201+
"Iter 71680, Minibatch Loss= 0.096358, Training Accuracy= 0.96875\n",
202+
"Iter 72960, Minibatch Loss= 0.053386, Training Accuracy= 0.98438\n",
203+
"Iter 74240, Minibatch Loss= 0.065237, Training Accuracy= 0.97656\n",
204+
"Iter 75520, Minibatch Loss= 0.228090, Training Accuracy= 0.92188\n",
205+
"Iter 76800, Minibatch Loss= 0.106751, Training Accuracy= 0.95312\n",
206+
"Iter 78080, Minibatch Loss= 0.187795, Training Accuracy= 0.94531\n",
207+
"Iter 79360, Minibatch Loss= 0.092611, Training Accuracy= 0.96094\n",
208+
"Iter 80640, Minibatch Loss= 0.137386, Training Accuracy= 0.96875\n",
209+
"Iter 81920, Minibatch Loss= 0.106634, Training Accuracy= 0.98438\n",
210+
"Iter 83200, Minibatch Loss= 0.111749, Training Accuracy= 0.94531\n",
211+
"Iter 84480, Minibatch Loss= 0.191184, Training Accuracy= 0.94531\n",
212+
"Iter 85760, Minibatch Loss= 0.063982, Training Accuracy= 0.96094\n",
213+
"Iter 87040, Minibatch Loss= 0.092380, Training Accuracy= 0.96875\n",
214+
"Iter 88320, Minibatch Loss= 0.089899, Training Accuracy= 0.97656\n",
215+
"Iter 89600, Minibatch Loss= 0.141107, Training Accuracy= 0.94531\n",
216+
"Iter 90880, Minibatch Loss= 0.075549, Training Accuracy= 0.96094\n",
217+
"Iter 92160, Minibatch Loss= 0.186539, Training Accuracy= 0.94531\n",
218+
"Iter 93440, Minibatch Loss= 0.079639, Training Accuracy= 0.97656\n",
219+
"Iter 94720, Minibatch Loss= 0.156895, Training Accuracy= 0.95312\n",
220+
"Iter 96000, Minibatch Loss= 0.088042, Training Accuracy= 0.97656\n",
221+
"Iter 97280, Minibatch Loss= 0.076670, Training Accuracy= 0.96875\n",
222+
"Iter 98560, Minibatch Loss= 0.051336, Training Accuracy= 0.97656\n",
223+
"Iter 99840, Minibatch Loss= 0.086923, Training Accuracy= 0.98438\n",
224+
"Optimization Finished!\n",
225+
"Testing Accuracy: 0.960938\n"
226+
]
227+
}
228+
],
142229
"source": [
143230
"# Launch the graph\n",
144231
"with tf.Session() as sess:\n",

notebooks/3_NeuralNetworks/convolutional_network.ipynb

Lines changed: 167 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -152,11 +152,176 @@
152152
},
153153
{
154154
"cell_type": "code",
155-
"execution_count": null,
155+
"execution_count": 5,
156156
"metadata": {
157157
"collapsed": false
158158
},
159-
"outputs": [],
159+
"outputs": [
160+
{
161+
"name": "stdout",
162+
"output_type": "stream",
163+
"text": [
164+
"Iter 1280, Minibatch Loss= 26574.855469, Training Accuracy= 0.25781\n",
165+
"Iter 2560, Minibatch Loss= 11454.494141, Training Accuracy= 0.49219\n",
166+
"Iter 3840, Minibatch Loss= 10070.515625, Training Accuracy= 0.55469\n",
167+
"Iter 5120, Minibatch Loss= 4008.586426, Training Accuracy= 0.78125\n",
168+
"Iter 6400, Minibatch Loss= 3148.004639, Training Accuracy= 0.80469\n",
169+
"Iter 7680, Minibatch Loss= 6740.440430, Training Accuracy= 0.71875\n",
170+
"Iter 8960, Minibatch Loss= 4103.991699, Training Accuracy= 0.80469\n",
171+
"Iter 10240, Minibatch Loss= 2631.275391, Training Accuracy= 0.85938\n",
172+
"Iter 11520, Minibatch Loss= 1428.798828, Training Accuracy= 0.91406\n",
173+
"Iter 12800, Minibatch Loss= 3909.772705, Training Accuracy= 0.78906\n",
174+
"Iter 14080, Minibatch Loss= 1423.095947, Training Accuracy= 0.88281\n",
175+
"Iter 15360, Minibatch Loss= 1524.569824, Training Accuracy= 0.89062\n",
176+
"Iter 16640, Minibatch Loss= 2234.539795, Training Accuracy= 0.86719\n",
177+
"Iter 17920, Minibatch Loss= 933.932800, Training Accuracy= 0.90625\n",
178+
"Iter 19200, Minibatch Loss= 2039.046021, Training Accuracy= 0.89062\n",
179+
"Iter 20480, Minibatch Loss= 674.179932, Training Accuracy= 0.95312\n",
180+
"Iter 21760, Minibatch Loss= 3778.958984, Training Accuracy= 0.82812\n",
181+
"Iter 23040, Minibatch Loss= 1038.217773, Training Accuracy= 0.91406\n",
182+
"Iter 24320, Minibatch Loss= 1689.513672, Training Accuracy= 0.89062\n",
183+
"Iter 25600, Minibatch Loss= 1800.954956, Training Accuracy= 0.85938\n",
184+
"Iter 26880, Minibatch Loss= 1086.292847, Training Accuracy= 0.90625\n",
185+
"Iter 28160, Minibatch Loss= 656.042847, Training Accuracy= 0.94531\n",
186+
"Iter 29440, Minibatch Loss= 1210.589844, Training Accuracy= 0.91406\n",
187+
"Iter 30720, Minibatch Loss= 1099.606323, Training Accuracy= 0.90625\n",
188+
"Iter 32000, Minibatch Loss= 1073.128174, Training Accuracy= 0.92969\n",
189+
"Iter 33280, Minibatch Loss= 518.844543, Training Accuracy= 0.95312\n",
190+
"Iter 34560, Minibatch Loss= 540.856689, Training Accuracy= 0.92188\n",
191+
"Iter 35840, Minibatch Loss= 353.990906, Training Accuracy= 0.97656\n",
192+
"Iter 37120, Minibatch Loss= 1488.962891, Training Accuracy= 0.91406\n",
193+
"Iter 38400, Minibatch Loss= 231.191864, Training Accuracy= 0.98438\n",
194+
"Iter 39680, Minibatch Loss= 171.154480, Training Accuracy= 0.98438\n",
195+
"Iter 40960, Minibatch Loss= 2092.023682, Training Accuracy= 0.90625\n",
196+
"Iter 42240, Minibatch Loss= 480.594299, Training Accuracy= 0.95312\n",
197+
"Iter 43520, Minibatch Loss= 504.128143, Training Accuracy= 0.96875\n",
198+
"Iter 44800, Minibatch Loss= 143.534485, Training Accuracy= 0.97656\n",
199+
"Iter 46080, Minibatch Loss= 325.875580, Training Accuracy= 0.96094\n",
200+
"Iter 47360, Minibatch Loss= 602.813049, Training Accuracy= 0.91406\n",
201+
"Iter 48640, Minibatch Loss= 794.595093, Training Accuracy= 0.94531\n",
202+
"Iter 49920, Minibatch Loss= 415.539032, Training Accuracy= 0.95312\n",
203+
"Iter 51200, Minibatch Loss= 146.016022, Training Accuracy= 0.96094\n",
204+
"Iter 52480, Minibatch Loss= 294.180786, Training Accuracy= 0.94531\n",
205+
"Iter 53760, Minibatch Loss= 50.955730, Training Accuracy= 0.99219\n",
206+
"Iter 55040, Minibatch Loss= 1026.607056, Training Accuracy= 0.92188\n",
207+
"Iter 56320, Minibatch Loss= 283.756134, Training Accuracy= 0.96875\n",
208+
"Iter 57600, Minibatch Loss= 691.538208, Training Accuracy= 0.95312\n",
209+
"Iter 58880, Minibatch Loss= 491.075073, Training Accuracy= 0.96094\n",
210+
"Iter 60160, Minibatch Loss= 571.951660, Training Accuracy= 0.95312\n",
211+
"Iter 61440, Minibatch Loss= 284.041168, Training Accuracy= 0.97656\n",
212+
"Iter 62720, Minibatch Loss= 1041.941528, Training Accuracy= 0.92969\n",
213+
"Iter 64000, Minibatch Loss= 664.833923, Training Accuracy= 0.93750\n",
214+
"Iter 65280, Minibatch Loss= 1582.112793, Training Accuracy= 0.88281\n",
215+
"Iter 66560, Minibatch Loss= 783.135376, Training Accuracy= 0.94531\n",
216+
"Iter 67840, Minibatch Loss= 245.942398, Training Accuracy= 0.96094\n",
217+
"Iter 69120, Minibatch Loss= 752.858948, Training Accuracy= 0.96875\n",
218+
"Iter 70400, Minibatch Loss= 623.243286, Training Accuracy= 0.94531\n",
219+
"Iter 71680, Minibatch Loss= 846.498230, Training Accuracy= 0.93750\n",
220+
"Iter 72960, Minibatch Loss= 586.516479, Training Accuracy= 0.95312\n",
221+
"Iter 74240, Minibatch Loss= 92.774963, Training Accuracy= 0.98438\n",
222+
"Iter 75520, Minibatch Loss= 644.039612, Training Accuracy= 0.95312\n",
223+
"Iter 76800, Minibatch Loss= 693.247681, Training Accuracy= 0.96094\n",
224+
"Iter 78080, Minibatch Loss= 466.491882, Training Accuracy= 0.96094\n",
225+
"Iter 79360, Minibatch Loss= 964.212341, Training Accuracy= 0.93750\n",
226+
"Iter 80640, Minibatch Loss= 230.451904, Training Accuracy= 0.97656\n",
227+
"Iter 81920, Minibatch Loss= 280.434570, Training Accuracy= 0.95312\n",
228+
"Iter 83200, Minibatch Loss= 213.208252, Training Accuracy= 0.97656\n",
229+
"Iter 84480, Minibatch Loss= 774.836060, Training Accuracy= 0.94531\n",
230+
"Iter 85760, Minibatch Loss= 164.687729, Training Accuracy= 0.96094\n",
231+
"Iter 87040, Minibatch Loss= 419.967407, Training Accuracy= 0.96875\n",
232+
"Iter 88320, Minibatch Loss= 160.920151, Training Accuracy= 0.96875\n",
233+
"Iter 89600, Minibatch Loss= 586.063599, Training Accuracy= 0.96094\n",
234+
"Iter 90880, Minibatch Loss= 345.598145, Training Accuracy= 0.96875\n",
235+
"Iter 92160, Minibatch Loss= 931.361145, Training Accuracy= 0.92188\n",
236+
"Iter 93440, Minibatch Loss= 170.107117, Training Accuracy= 0.97656\n",
237+
"Iter 94720, Minibatch Loss= 497.162750, Training Accuracy= 0.93750\n",
238+
"Iter 96000, Minibatch Loss= 906.600464, Training Accuracy= 0.94531\n",
239+
"Iter 97280, Minibatch Loss= 303.382202, Training Accuracy= 0.92969\n",
240+
"Iter 98560, Minibatch Loss= 509.161652, Training Accuracy= 0.97656\n",
241+
"Iter 99840, Minibatch Loss= 359.561981, Training Accuracy= 0.97656\n",
242+
"Iter 101120, Minibatch Loss= 136.516541, Training Accuracy= 0.97656\n",
243+
"Iter 102400, Minibatch Loss= 517.199341, Training Accuracy= 0.96875\n",
244+
"Iter 103680, Minibatch Loss= 487.793335, Training Accuracy= 0.95312\n",
245+
"Iter 104960, Minibatch Loss= 407.351929, Training Accuracy= 0.96094\n",
246+
"Iter 106240, Minibatch Loss= 70.495193, Training Accuracy= 0.98438\n",
247+
"Iter 107520, Minibatch Loss= 344.783508, Training Accuracy= 0.96094\n",
248+
"Iter 108800, Minibatch Loss= 242.682465, Training Accuracy= 0.95312\n",
249+
"Iter 110080, Minibatch Loss= 169.181458, Training Accuracy= 0.96094\n",
250+
"Iter 111360, Minibatch Loss= 152.638245, Training Accuracy= 0.98438\n",
251+
"Iter 112640, Minibatch Loss= 170.795868, Training Accuracy= 0.96875\n",
252+
"Iter 113920, Minibatch Loss= 133.262726, Training Accuracy= 0.98438\n",
253+
"Iter 115200, Minibatch Loss= 296.063293, Training Accuracy= 0.95312\n",
254+
"Iter 116480, Minibatch Loss= 254.247543, Training Accuracy= 0.96094\n",
255+
"Iter 117760, Minibatch Loss= 506.795715, Training Accuracy= 0.94531\n",
256+
"Iter 119040, Minibatch Loss= 446.006897, Training Accuracy= 0.96094\n",
257+
"Iter 120320, Minibatch Loss= 149.467377, Training Accuracy= 0.97656\n",
258+
"Iter 121600, Minibatch Loss= 52.783600, Training Accuracy= 0.98438\n",
259+
"Iter 122880, Minibatch Loss= 49.041794, Training Accuracy= 0.98438\n",
260+
"Iter 124160, Minibatch Loss= 184.371246, Training Accuracy= 0.97656\n",
261+
"Iter 125440, Minibatch Loss= 129.838501, Training Accuracy= 0.97656\n",
262+
"Iter 126720, Minibatch Loss= 288.006531, Training Accuracy= 0.96875\n",
263+
"Iter 128000, Minibatch Loss= 187.284653, Training Accuracy= 0.97656\n",
264+
"Iter 129280, Minibatch Loss= 197.969955, Training Accuracy= 0.96875\n",
265+
"Iter 130560, Minibatch Loss= 299.969818, Training Accuracy= 0.96875\n",
266+
"Iter 131840, Minibatch Loss= 537.602173, Training Accuracy= 0.96094\n",
267+
"Iter 133120, Minibatch Loss= 4.519302, Training Accuracy= 0.99219\n",
268+
"Iter 134400, Minibatch Loss= 133.264191, Training Accuracy= 0.97656\n",
269+
"Iter 135680, Minibatch Loss= 89.662292, Training Accuracy= 0.97656\n",
270+
"Iter 136960, Minibatch Loss= 107.774078, Training Accuracy= 0.96875\n",
271+
"Iter 138240, Minibatch Loss= 335.904572, Training Accuracy= 0.96094\n",
272+
"Iter 139520, Minibatch Loss= 457.494568, Training Accuracy= 0.96094\n",
273+
"Iter 140800, Minibatch Loss= 259.131531, Training Accuracy= 0.95312\n",
274+
"Iter 142080, Minibatch Loss= 152.205383, Training Accuracy= 0.96094\n",
275+
"Iter 143360, Minibatch Loss= 252.535828, Training Accuracy= 0.95312\n",
276+
"Iter 144640, Minibatch Loss= 109.477585, Training Accuracy= 0.96875\n",
277+
"Iter 145920, Minibatch Loss= 24.468613, Training Accuracy= 0.99219\n",
278+
"Iter 147200, Minibatch Loss= 51.722107, Training Accuracy= 0.97656\n",
279+
"Iter 148480, Minibatch Loss= 69.715233, Training Accuracy= 0.97656\n",
280+
"Iter 149760, Minibatch Loss= 405.289246, Training Accuracy= 0.92969\n",
281+
"Iter 151040, Minibatch Loss= 282.976379, Training Accuracy= 0.95312\n",
282+
"Iter 152320, Minibatch Loss= 134.991119, Training Accuracy= 0.97656\n",
283+
"Iter 153600, Minibatch Loss= 491.618103, Training Accuracy= 0.92188\n",
284+
"Iter 154880, Minibatch Loss= 154.299988, Training Accuracy= 0.99219\n",
285+
"Iter 156160, Minibatch Loss= 79.480019, Training Accuracy= 0.96875\n",
286+
"Iter 157440, Minibatch Loss= 68.093750, Training Accuracy= 0.99219\n",
287+
"Iter 158720, Minibatch Loss= 459.739685, Training Accuracy= 0.92188\n",
288+
"Iter 160000, Minibatch Loss= 168.076843, Training Accuracy= 0.94531\n",
289+
"Iter 161280, Minibatch Loss= 256.141846, Training Accuracy= 0.97656\n",
290+
"Iter 162560, Minibatch Loss= 236.400391, Training Accuracy= 0.94531\n",
291+
"Iter 163840, Minibatch Loss= 177.011261, Training Accuracy= 0.96875\n",
292+
"Iter 165120, Minibatch Loss= 48.583298, Training Accuracy= 0.97656\n",
293+
"Iter 166400, Minibatch Loss= 413.800293, Training Accuracy= 0.96094\n",
294+
"Iter 167680, Minibatch Loss= 209.587387, Training Accuracy= 0.96875\n",
295+
"Iter 168960, Minibatch Loss= 239.407318, Training Accuracy= 0.98438\n",
296+
"Iter 170240, Minibatch Loss= 183.567017, Training Accuracy= 0.96875\n",
297+
"Iter 171520, Minibatch Loss= 87.937515, Training Accuracy= 0.96875\n",
298+
"Iter 172800, Minibatch Loss= 203.777039, Training Accuracy= 0.98438\n",
299+
"Iter 174080, Minibatch Loss= 566.378052, Training Accuracy= 0.94531\n",
300+
"Iter 175360, Minibatch Loss= 325.170898, Training Accuracy= 0.95312\n",
301+
"Iter 176640, Minibatch Loss= 300.142212, Training Accuracy= 0.97656\n",
302+
"Iter 177920, Minibatch Loss= 205.370193, Training Accuracy= 0.95312\n",
303+
"Iter 179200, Minibatch Loss= 5.594437, Training Accuracy= 0.99219\n",
304+
"Iter 180480, Minibatch Loss= 110.732109, Training Accuracy= 0.98438\n",
305+
"Iter 181760, Minibatch Loss= 33.320297, Training Accuracy= 0.99219\n",
306+
"Iter 183040, Minibatch Loss= 6.885544, Training Accuracy= 0.99219\n",
307+
"Iter 184320, Minibatch Loss= 221.144806, Training Accuracy= 0.96875\n",
308+
"Iter 185600, Minibatch Loss= 365.337372, Training Accuracy= 0.94531\n",
309+
"Iter 186880, Minibatch Loss= 186.558258, Training Accuracy= 0.96094\n",
310+
"Iter 188160, Minibatch Loss= 149.720322, Training Accuracy= 0.98438\n",
311+
"Iter 189440, Minibatch Loss= 105.281998, Training Accuracy= 0.97656\n",
312+
"Iter 190720, Minibatch Loss= 289.980011, Training Accuracy= 0.96094\n",
313+
"Iter 192000, Minibatch Loss= 214.382278, Training Accuracy= 0.96094\n",
314+
"Iter 193280, Minibatch Loss= 461.044312, Training Accuracy= 0.93750\n",
315+
"Iter 194560, Minibatch Loss= 138.653076, Training Accuracy= 0.98438\n",
316+
"Iter 195840, Minibatch Loss= 112.004883, Training Accuracy= 0.98438\n",
317+
"Iter 197120, Minibatch Loss= 212.691467, Training Accuracy= 0.97656\n",
318+
"Iter 198400, Minibatch Loss= 57.642502, Training Accuracy= 0.97656\n",
319+
"Iter 199680, Minibatch Loss= 80.503563, Training Accuracy= 0.96875\n",
320+
"Optimization Finished!\n",
321+
"Testing Accuracy: 0.984375\n"
322+
]
323+
}
324+
],
160325
"source": [
161326
"# Launch the graph\n",
162327
"with tf.Session() as sess:\n",

0 commit comments

Comments
 (0)