@@ -41,12 +41,12 @@ def multilayer_perceptron(x, weights, biases):
41
41
layer_1 = tf .add (tf .matmul (x , weights ['w1' ]), biases ['b1' ])
42
42
layer_1 = tf .nn .relu (layer_1 )
43
43
# Create a summary to visualize the first layer ReLU activation
44
- tf .histogram_summary ("relu1" , layer_1 )
44
+ tf .summary . histogram ("relu1" , layer_1 )
45
45
# Hidden layer with RELU activation
46
46
layer_2 = tf .add (tf .matmul (layer_1 , weights ['w2' ]), biases ['b2' ])
47
47
layer_2 = tf .nn .relu (layer_2 )
48
48
# Create another summary to visualize the second layer ReLU activation
49
- tf .histogram_summary ("relu2" , layer_2 )
49
+ tf .summary . histogram ("relu2" , layer_2 )
50
50
# Output layer
51
51
out_layer = tf .add (tf .matmul (layer_2 , weights ['w3' ]), biases ['b3' ])
52
52
return out_layer
@@ -91,24 +91,24 @@ def multilayer_perceptron(x, weights, biases):
91
91
init = tf .initialize_all_variables ()
92
92
93
93
# Create a summary to monitor cost tensor
94
- tf .scalar_summary ("loss" , loss )
94
+ tf .summary . scalar ("loss" , loss )
95
95
# Create a summary to monitor accuracy tensor
96
- tf .scalar_summary ("accuracy" , acc )
96
+ tf .summary . scalar ("accuracy" , acc )
97
97
# Create summaries to visualize weights
98
98
for var in tf .trainable_variables ():
99
- tf .histogram_summary (var .name , var )
99
+ tf .summary . histogram (var .name , var )
100
100
# Summarize all gradients
101
101
for grad , var in grads :
102
- tf .histogram_summary (var .name + '/gradient' , grad )
102
+ tf .summary . histogram (var .name + '/gradient' , grad )
103
103
# Merge all summaries into a single op
104
- merged_summary_op = tf .merge_all_summaries ()
104
+ merged_summary_op = tf .summary . merge_all ()
105
105
106
106
# Launch the graph
107
107
with tf .Session () as sess :
108
108
sess .run (init )
109
109
110
110
# op to write logs to Tensorboard
111
- summary_writer = tf .train . SummaryWriter (logs_path ,
111
+ summary_writer = tf .summary . FileWriter (logs_path ,
112
112
graph = tf .get_default_graph ())
113
113
114
114
# Training cycle
0 commit comments