Skip to content

Commit

Permalink
TensorBoard Integration with Notebook Example (#653)
Browse files Browse the repository at this point in the history
* WIP multivariate normal betas estimated propery

* WIP

* added logrun parameter to initialize

* Added tensorboard example notebook

* added default logging options for latent variables, scalars and distributions

* Added screen tensorboard screenshots

* Final minor typos

* pep8 fixes

* whitespace

* reverting changes in documents to related to this TensorBoard pull request

* incorporated @dustinvtran feedback

* pep8 compliance

* changed logrun to log_timestamp and logvars to log_vars

* updated example notebook with better descriptions of screenshots

* aligned logging naming conventions and simplified logging configuration

* updated tutorial and images to include gradients and parameter graphs

* Update supervised_regression.ipynb

* Update iclr2017.ipynb
  • Loading branch information
closedLoop authored and dustinvtran committed May 30, 2017
1 parent 66116ac commit a17d650
Show file tree
Hide file tree
Showing 8 changed files with 379 additions and 4 deletions.
Binary file added docs/images/tensorboard_distributions.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added docs/images/tensorboard_graph1.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added docs/images/tensorboard_graphs.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added docs/images/tensorboard_histograms.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added docs/images/tensorboard_scalars.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
71 changes: 70 additions & 1 deletion edward/inferences/inference.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,12 @@
import numpy as np
import six
import tensorflow as tf
import os
from datetime import datetime

from edward.models import RandomVariable
from edward.util import check_data, check_latent_vars, get_session, Progbar
from edward.util import get_variables


@six.add_metaclass(abc.ABCMeta)
Expand Down Expand Up @@ -146,7 +149,7 @@ def run(self, variables=None, use_coordinator=True, *args, **kwargs):

@abc.abstractmethod
def initialize(self, n_iter=1000, n_print=None, scale=None, logdir=None,
debug=False):
log_timestamp=None, log_vars=None, debug=False):
"""Initialize inference algorithm. It initializes hyperparameters
and builds ops for the algorithm's computational graph. No ops
should be created outside the call to ``initialize()``.
Expand All @@ -169,6 +172,14 @@ def initialize(self, n_iter=1000, n_print=None, scale=None, logdir=None,
logdir : str, optional
Directory where event file will be written. For details,
see ``tf.summary.FileWriter``. Default is to write nothing.
log_timestamp : bool, optional
If true, creates a subdirectory of logdir to save the specific run
results that is set to the current UTC timestamp in the format
'YYYYMMDD_HHMMSS"
log_vars : list, optional
Specifies the list of variables to log after each n_print steps. If
None, will log all `latent_variables` that have been given custom names`.
If log_vars == [], no variables will be logged.
debug : bool, optional
If True, add checks for ``NaN`` and ``Inf`` to all computations
in the graph. May result in substantially slower execution
Expand All @@ -194,6 +205,15 @@ def initialize(self, n_iter=1000, n_print=None, scale=None, logdir=None,

if logdir is not None:
self.logging = True

if log_timestamp:
# Appends the timestamp as a subdirectory
logdir = os.path.join(logdir,
datetime.strftime(datetime.utcnow(),
"%Y%m%d_%H%M%S"))

self.set_log_variables(log_vars=log_vars)

self.train_writer = tf.summary.FileWriter(logdir, tf.get_default_graph())
self.summarize = tf.summary.merge_all()
else:
Expand Down Expand Up @@ -258,3 +278,52 @@ def finalize(self):
"""
if self.logging:
self.train_writer.close()

def set_log_variables(self, log_vars=None):
"""Logs variables to TensorBoard.
For each variable in log_vars, creates ``scalar`` and / or ``histogram`` by
calling ``tf.summary.scalar`` or ``tf.summary.histogram``
if log_vars is None, automatically log all latent variables that have been
given non-default names. If log_vars is [], no logging will be created.
Parameters
----------
log_vars : list, optional
A list of variables to be logged
Returns
-------
None
"""
summary_key = 'summaries_' + str(id(self))
if log_vars is None:
log_vars = []

# Add model parameters
for k in self.data:
log_vars += get_variables(k)

# Add latent variables and model priors
for k in self.latent_vars:
log_vars += get_variables(k)
log_vars += get_variables(self.latent_vars[k])

# Prune variables to only be custom named variables (without 'Variable')
# substring
log_vars = [var for var in log_vars if 'Variable' not in var.name]

for var in log_vars:
var_name = var.name.replace(':', '/') # colons are an invalid character

if len(var.shape) == 1 and var.shape[0] == 1:
# Log all scalars
tf.summary.scalar("parameter/{}".format(var_name),
var[0], collections=[summary_key])

# If var is multi-dimensional, log the distribution
if len(var.shape) > 0 and np.max(var.shape) > 1:
tf.summary.histogram("parameter/{}".format(var_name),
var, collections=[summary_key])
3 changes: 0 additions & 3 deletions edward/inferences/variational_inference.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,9 +78,6 @@ def initialize(self, optimizer=None, var_list=None, use_prettytensor=False,
tf.summary.scalar("loss", self.loss, collections=[summary_key])
for grad, var in grads_and_vars:
# replace colons which are an invalid character
tf.summary.histogram("parameter/" +
var.name.replace(':', '/'),
var, collections=[summary_key])
tf.summary.histogram("gradient/" +
var.name.replace(':', '/'),
grad, collections=[summary_key])
Expand Down
309 changes: 309 additions & 0 deletions notebooks/tensorboard.ipynb

Large diffs are not rendered by default.

0 comments on commit a17d650

Please sign in to comment.