Skip to content

Commit

Permalink
Switch flags and logging from tf with absl.
Browse files Browse the repository at this point in the history
PiperOrigin-RevId: 233558731
  • Loading branch information
TensorFlow Hub Authors authored and vbardiovskyg committed Feb 26, 2019
1 parent dc53bdc commit d9b489b
Show file tree
Hide file tree
Showing 9 changed files with 41 additions and 30 deletions.
4 changes: 3 additions & 1 deletion tensorflow_hub/compressed_module_resolver_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,14 +32,16 @@
import tempfile
import uuid

from absl import flags
import tensorflow as tf

from tensorflow_hub import compressed_module_resolver
from tensorflow_hub import resolver
from tensorflow_hub import test_utils
from tensorflow_hub import tf_utils

FLAGS = tf.flags.FLAGS

FLAGS = flags.FLAGS


class HttpCompressedFileResolverTest(tf.test.TestCase):
Expand Down
3 changes: 2 additions & 1 deletion tensorflow_hub/e2e_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import os
import tarfile

from absl import logging
import tensorflow as tf
import tensorflow_hub as hub

Expand Down Expand Up @@ -90,7 +91,7 @@ def test_http_locations(self):
self.assertAllClose(sess.run(out), 121)

cache_content = sorted(tf.gfile.ListDirectory(cache_dir))
tf.logging.info("Cache context: %s", str(cache_content))
logging.info("Cache context: %s", str(cache_content))
self.assertEqual(2, len(cache_content))
self.assertTrue(cache_content[1].endswith(".descriptor.txt"))
module_files = sorted(tf.gfile.ListDirectory(
Expand Down
5 changes: 3 additions & 2 deletions tensorflow_hub/estimator.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@

import os

from absl import logging
import tensorflow as tf
from tensorflow_hub import tf_utils

Expand Down Expand Up @@ -160,8 +161,8 @@ def export(self, estimator, export_path, checkpoint_path=None,
tf_utils.garbage_collect_exports(export_path, self._exports_to_keep)
return export_dir
else:
tf.logging.warn("LatestModuleExporter found zero modules to export. "
"Use hub.register_module_for_export() if needed.")
logging.warn("LatestModuleExporter found zero modules to export. "
"Use hub.register_module_for_export() if needed.")
# No export_dir has been created.
return None

Expand Down
3 changes: 2 additions & 1 deletion tensorflow_hub/meta_graph_lib.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@

import re

from absl import logging
import tensorflow as tf


Expand All @@ -36,7 +37,7 @@ def prepend_name_scope(name, import_scope):
tf.compat.as_str_any(name))
except TypeError as e:
# If the name is not of a type we can process, simply return it.
tf.logging.warning(e)
logging.warning(e)
return name
else:
return name
Expand Down
9 changes: 5 additions & 4 deletions tensorflow_hub/native_module.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
import os
import re

from absl import logging
import tensorflow as tf

from tensorflow_hub import meta_graph_lib
Expand Down Expand Up @@ -222,7 +223,7 @@ def add_signature(name=None, inputs=None, outputs=None):
if not isinstance(outputs, dict):
outputs = {"default": outputs}
message = find_signature_inputs_from_multivalued_ops(inputs)
if message: tf.logging.error(message)
if message: logging.error(message)
message = find_signature_input_colocation_error(name, inputs)
if message: raise ValueError(message)
saved_model_lib.add_signature(name, inputs, outputs)
Expand Down Expand Up @@ -353,7 +354,7 @@ def _export(self, path, variables_saver):
module_def_filename,
module_def_proto.SerializeToString(),
overwrite=False)
tf.logging.info("Exported TF-Hub module to: %s", path)
logging.info("Exported TF-Hub module to: %s", path)


class _ModuleImpl(module_impl.ModuleImpl):
Expand Down Expand Up @@ -731,12 +732,12 @@ def register_ops_if_needed(graph_ops):
missing_op_list = op_def_pb2.OpList()
for missing_op in missing_ops:
if missing_op not in cpp_registry_ops:
tf.logging.info(
logging.info(
"Op %s is missing from both the python and C++ registry.",
missing_op)
else:
missing_op_list.op.extend([cpp_registry_ops[missing_op]])
tf.logging.info(
logging.info(
"Adding op %s from c++ registry to python registry.",
missing_op)

Expand Down
27 changes: 15 additions & 12 deletions tensorflow_hub/resolver.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,12 +28,15 @@
import time
import uuid

from absl import flags
from absl import logging
import tensorflow as tf
from tensorflow_hub import tf_utils

FLAGS = tf.flags.FLAGS

tf.flags.DEFINE_string(
FLAGS = flags.FLAGS

flags.DEFINE_string(
"tfhub_cache_dir",
None,
"If set, TF-Hub will download and cache Modules into this directory. "
Expand Down Expand Up @@ -71,8 +74,8 @@ def tfhub_cache_dir(default_cache_dir=None, use_temp=False):
# Place all TF-Hub modules under <system's temp>/tfhub_modules.
cache_dir = os.path.join(tempfile.gettempdir(), "tfhub_modules")
if cache_dir:
tf.logging.log_first_n(tf.logging.INFO, "Using %s to cache modules.", 1,
cache_dir)
logging.log_first_n(logging.INFO, "Using %s to cache modules.", 1,
cache_dir)
return cache_dir


Expand Down Expand Up @@ -115,7 +118,7 @@ def _print_download_progress_msg(self, msg, flush=False):
else:
# Interactive progress tracking is disabled. Print progress to the
# standard TF log.
tf.logging.info(msg)
logging.info(msg)

def _log_progress(self, bytes_downloaded):
"""Logs progress information about ongoing module download.
Expand Down Expand Up @@ -307,8 +310,8 @@ def _wait_for_lock_to_disappear(handle, lock_file, lock_file_timeout_sec):
lock_file_content = None
while tf.gfile.Exists(lock_file):
try:
tf.logging.log_every_n(
tf.logging.INFO,
logging.log_every_n(
logging.INFO,
"Module '%s' already being downloaded by '%s'. Waiting.", 10,
handle, tf_utils.read_file_to_string(lock_file))
if (time.time() - locked_tmp_dir_size_check_time >
Expand All @@ -322,8 +325,8 @@ def _wait_for_lock_to_disappear(handle, lock_file, lock_file_timeout_sec):
# There is was no data downloaded in the past
# 'lock_file_timeout_sec'. Steal the lock and proceed with the
# local download.
tf.logging.warning("Deleting lock file %s due to inactivity." %
lock_file)
logging.warning("Deleting lock file %s due to inactivity.",
lock_file)
tf.gfile.Remove(lock_file)
break
locked_tmp_dir_size = cur_locked_tmp_dir_size
Expand Down Expand Up @@ -390,7 +393,7 @@ def atomic_download(handle,
# would obtain a lock ourselves, or wait again for the lock to disappear.

# Lock file acquired.
tf.logging.info("Downloading TF-Hub Module '%s'.", handle)
logging.info("Downloading TF-Hub Module '%s'.", handle)
tf.gfile.MakeDirs(tmp_dir)
download_fn(handle, tmp_dir)
# Write module descriptor to capture information about which module was
Expand All @@ -405,9 +408,9 @@ def atomic_download(handle,
_write_module_descriptor_file(handle, module_dir)
try:
tf.gfile.Rename(tmp_dir, module_dir)
tf.logging.info("Downloaded TF-Hub Module '%s'.", handle)
logging.info("Downloaded TF-Hub Module '%s'.", handle)
except tf.errors.AlreadyExistsError:
tf.logging.warning("Module already exists in %s" % module_dir)
logging.warning("Module already exists in %s", module_dir)

finally:
try:
Expand Down
3 changes: 2 additions & 1 deletion tensorflow_hub/resolver_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,12 +26,13 @@
import time
import uuid

from absl import flags
import tensorflow as tf

from tensorflow_hub import resolver
from tensorflow_hub import tf_utils

FLAGS = tf.flags.FLAGS
FLAGS = flags.FLAGS


class PathResolverTest(tf.test.TestCase):
Expand Down
6 changes: 3 additions & 3 deletions tensorflow_hub/test_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
import sys
import threading

import tensorflow as tf
from absl import flags


def _do_redirect(handler, location):
Expand Down Expand Up @@ -132,8 +132,8 @@ def do_GET(self):

def test_srcdir():
"""Returns the path where to look for test data files."""
if "test_srcdir" in tf.app.flags.FLAGS:
return tf.app.flags.FLAGS["test_srcdir"].value
if "test_srcdir" in flags.FLAGS:
return flags.FLAGS["test_srcdir"].value
elif "TEST_SRCDIR" in os.environ:
return os.environ["TEST_SRCDIR"]
else:
Expand Down
11 changes: 6 additions & 5 deletions tensorflow_hub/tf_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
import time
import uuid

from absl import logging
import tensorflow as tf


Expand Down Expand Up @@ -100,11 +101,11 @@ def get_timestamped_export_dir(export_dir_base):
return export_dir
time.sleep(1)
attempts += 1
tf.logging.warn(
"Export directory {} already exists; retrying (attempt {}/{})".format(
export_dir, attempts, MAX_DIRECTORY_CREATION_ATTEMPTS))
logging.warn(
"Export directory %s already exists; retrying (attempt %d/%d)",
export_dir, attempts, MAX_DIRECTORY_CREATION_ATTEMPTS)
raise RuntimeError("Failed to obtain a unique export directory name after "
"{} attempts.".format(MAX_DIRECTORY_CREATION_ATTEMPTS))
"%d attempts.".MAX_DIRECTORY_CREATION_ATTEMPTS)


def get_temp_export_dir(timestamped_export_dir):
Expand Down Expand Up @@ -156,7 +157,7 @@ def garbage_collect_exports(export_dir_base, exports_to_keep):
try:
tf.gfile.DeleteRecursively(path)
except tf.errors.NotFoundError as e:
tf.logging.warn("Can not delete %s recursively: %s" % (path, e))
logging.warn("Can not delete %s recursively: %s", path, e)


def bytes_to_readable_str(num_bytes, include_b=False):
Expand Down

0 comments on commit d9b489b

Please sign in to comment.