Skip to content

Commit 90f22bc

Browse files
Refactor multilayer_perceptron for TF1.0
Signed-off-by: Norman Heckscher <norman.heckscher@gmail.com>
1 parent 8e03823 commit 90f22bc

File tree

2 files changed

+40
-31
lines changed

2 files changed

+40
-31
lines changed

examples/3_NeuralNetworks/multilayer_perceptron.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -60,11 +60,11 @@ def multilayer_perceptron(x, weights, biases):
6060
pred = multilayer_perceptron(x, weights, biases)
6161

6262
# Define loss and optimizer
63-
cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(pred, y))
63+
cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=pred, labels=y))
6464
optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate).minimize(cost)
6565

6666
# Initializing the variables
67-
init = tf.initialize_all_variables()
67+
init = tf.global_variables_initializer()
6868

6969
# Launch the graph
7070
with tf.Session() as sess:

notebooks/3_NeuralNetworks/multilayer_perceptron.ipynb

Lines changed: 38 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -29,17 +29,17 @@
2929
"name": "stdout",
3030
"output_type": "stream",
3131
"text": [
32-
"Extracting /tmp/data/train-images-idx3-ubyte.gz\n",
33-
"Extracting /tmp/data/train-labels-idx1-ubyte.gz\n",
34-
"Extracting /tmp/data/t10k-images-idx3-ubyte.gz\n",
35-
"Extracting /tmp/data/t10k-labels-idx1-ubyte.gz\n"
32+
"Extracting MNIST_data/train-images-idx3-ubyte.gz\n",
33+
"Extracting MNIST_data/train-labels-idx1-ubyte.gz\n",
34+
"Extracting MNIST_data/t10k-images-idx3-ubyte.gz\n",
35+
"Extracting MNIST_data/t10k-labels-idx1-ubyte.gz\n"
3636
]
3737
}
3838
],
3939
"source": [
4040
"# Import MINST data\n",
4141
"from tensorflow.examples.tutorials.mnist import input_data\n",
42-
"mnist = input_data.read_data_sets(\"/tmp/data/\", one_hot=True)\n",
42+
"mnist = input_data.read_data_sets(\"MNIST_data/\", one_hot=True)\n",
4343
"\n",
4444
"import tensorflow as tf"
4545
]
@@ -92,9 +92,9 @@
9292
},
9393
{
9494
"cell_type": "code",
95-
"execution_count": 4,
95+
"execution_count": 5,
9696
"metadata": {
97-
"collapsed": true
97+
"collapsed": false
9898
},
9999
"outputs": [],
100100
"source": [
@@ -114,16 +114,16 @@
114114
"pred = multilayer_perceptron(x, weights, biases)\n",
115115
"\n",
116116
"# Define loss and optimizer\n",
117-
"cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(pred, y))\n",
117+
"cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=pred, labels=y))\n",
118118
"optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate).minimize(cost)\n",
119119
"\n",
120120
"# Initializing the variables\n",
121-
"init = tf.initialize_all_variables()"
121+
"init = tf.global_variables_initializer()"
122122
]
123123
},
124124
{
125125
"cell_type": "code",
126-
"execution_count": 5,
126+
"execution_count": 6,
127127
"metadata": {
128128
"collapsed": false
129129
},
@@ -132,23 +132,23 @@
132132
"name": "stdout",
133133
"output_type": "stream",
134134
"text": [
135-
"Epoch: 0001 cost= 185.342230390\n",
136-
"Epoch: 0002 cost= 44.266946572\n",
137-
"Epoch: 0003 cost= 27.999560453\n",
138-
"Epoch: 0004 cost= 19.655567043\n",
139-
"Epoch: 0005 cost= 14.284429696\n",
140-
"Epoch: 0006 cost= 10.640310403\n",
141-
"Epoch: 0007 cost= 7.904047886\n",
142-
"Epoch: 0008 cost= 5.989115090\n",
143-
"Epoch: 0009 cost= 4.689374613\n",
144-
"Epoch: 0010 cost= 3.455884229\n",
145-
"Epoch: 0011 cost= 2.733002625\n",
146-
"Epoch: 0012 cost= 2.101091420\n",
147-
"Epoch: 0013 cost= 1.496508092\n",
148-
"Epoch: 0014 cost= 1.245452015\n",
149-
"Epoch: 0015 cost= 0.912072906\n",
135+
"Epoch: 0001 cost= 173.056566575\n",
136+
"Epoch: 0002 cost= 44.054413928\n",
137+
"Epoch: 0003 cost= 27.455470655\n",
138+
"Epoch: 0004 cost= 19.008652363\n",
139+
"Epoch: 0005 cost= 13.654873594\n",
140+
"Epoch: 0006 cost= 10.059267435\n",
141+
"Epoch: 0007 cost= 7.436018432\n",
142+
"Epoch: 0008 cost= 5.587794416\n",
143+
"Epoch: 0009 cost= 4.209882509\n",
144+
"Epoch: 0010 cost= 3.203879515\n",
145+
"Epoch: 0011 cost= 2.319920681\n",
146+
"Epoch: 0012 cost= 1.676204545\n",
147+
"Epoch: 0013 cost= 1.248805338\n",
148+
"Epoch: 0014 cost= 1.052676844\n",
149+
"Epoch: 0015 cost= 0.890117338\n",
150150
"Optimization Finished!\n",
151-
"Accuracy: 0.9422\n"
151+
"Accuracy: 0.9459\n"
152152
]
153153
}
154154
],
@@ -181,6 +181,15 @@
181181
" accuracy = tf.reduce_mean(tf.cast(correct_prediction, \"float\"))\n",
182182
" print \"Accuracy:\", accuracy.eval({x: mnist.test.images, y: mnist.test.labels})"
183183
]
184+
},
185+
{
186+
"cell_type": "code",
187+
"execution_count": null,
188+
"metadata": {
189+
"collapsed": true
190+
},
191+
"outputs": [],
192+
"source": []
184193
}
185194
],
186195
"metadata": {
@@ -192,16 +201,16 @@
192201
"language_info": {
193202
"codemirror_mode": {
194203
"name": "ipython",
195-
"version": 2.0
204+
"version": 2
196205
},
197206
"file_extension": ".py",
198207
"mimetype": "text/x-python",
199208
"name": "python",
200209
"nbconvert_exporter": "python",
201210
"pygments_lexer": "ipython2",
202-
"version": "2.7.11"
211+
"version": "2.7.13"
203212
}
204213
},
205214
"nbformat": 4,
206215
"nbformat_minor": 0
207-
}
216+
}

0 commit comments

Comments
 (0)