diff --git a/tensorflow_hub/compressed_module_resolver_test.py b/tensorflow_hub/compressed_module_resolver_test.py index 214312861..21ef9135f 100644 --- a/tensorflow_hub/compressed_module_resolver_test.py +++ b/tensorflow_hub/compressed_module_resolver_test.py @@ -32,6 +32,7 @@ import tempfile import uuid +from absl import flags import tensorflow as tf from tensorflow_hub import compressed_module_resolver @@ -39,7 +40,8 @@ from tensorflow_hub import test_utils from tensorflow_hub import tf_utils -FLAGS = tf.flags.FLAGS + +FLAGS = flags.FLAGS class HttpCompressedFileResolverTest(tf.test.TestCase): diff --git a/tensorflow_hub/e2e_test.py b/tensorflow_hub/e2e_test.py index 80a78963d..d706702bd 100644 --- a/tensorflow_hub/e2e_test.py +++ b/tensorflow_hub/e2e_test.py @@ -21,6 +21,7 @@ import os import tarfile +from absl import logging import tensorflow as tf import tensorflow_hub as hub @@ -90,7 +91,7 @@ def test_http_locations(self): self.assertAllClose(sess.run(out), 121) cache_content = sorted(tf.gfile.ListDirectory(cache_dir)) - tf.logging.info("Cache context: %s", str(cache_content)) + logging.info("Cache context: %s", str(cache_content)) self.assertEqual(2, len(cache_content)) self.assertTrue(cache_content[1].endswith(".descriptor.txt")) module_files = sorted(tf.gfile.ListDirectory( diff --git a/tensorflow_hub/estimator.py b/tensorflow_hub/estimator.py index 427f7437f..0344f263f 100644 --- a/tensorflow_hub/estimator.py +++ b/tensorflow_hub/estimator.py @@ -20,6 +20,7 @@ import os +from absl import logging import tensorflow as tf from tensorflow_hub import tf_utils @@ -160,8 +161,8 @@ def export(self, estimator, export_path, checkpoint_path=None, tf_utils.garbage_collect_exports(export_path, self._exports_to_keep) return export_dir else: - tf.logging.warn("LatestModuleExporter found zero modules to export. " - "Use hub.register_module_for_export() if needed.") + logging.warn("LatestModuleExporter found zero modules to export. " + "Use hub.register_module_for_export() if needed.") # No export_dir has been created. return None diff --git a/tensorflow_hub/meta_graph_lib.py b/tensorflow_hub/meta_graph_lib.py index 048a724d9..045e4ddd0 100644 --- a/tensorflow_hub/meta_graph_lib.py +++ b/tensorflow_hub/meta_graph_lib.py @@ -23,6 +23,7 @@ import re +from absl import logging import tensorflow as tf @@ -36,7 +37,7 @@ def prepend_name_scope(name, import_scope): tf.compat.as_str_any(name)) except TypeError as e: # If the name is not of a type we can process, simply return it. - tf.logging.warning(e) + logging.warning(e) return name else: return name diff --git a/tensorflow_hub/native_module.py b/tensorflow_hub/native_module.py index e3847f99f..226ab6e56 100644 --- a/tensorflow_hub/native_module.py +++ b/tensorflow_hub/native_module.py @@ -22,6 +22,7 @@ import os import re +from absl import logging import tensorflow as tf from tensorflow_hub import meta_graph_lib @@ -222,7 +223,7 @@ def add_signature(name=None, inputs=None, outputs=None): if not isinstance(outputs, dict): outputs = {"default": outputs} message = find_signature_inputs_from_multivalued_ops(inputs) - if message: tf.logging.error(message) + if message: logging.error(message) message = find_signature_input_colocation_error(name, inputs) if message: raise ValueError(message) saved_model_lib.add_signature(name, inputs, outputs) @@ -353,7 +354,7 @@ def _export(self, path, variables_saver): module_def_filename, module_def_proto.SerializeToString(), overwrite=False) - tf.logging.info("Exported TF-Hub module to: %s", path) + logging.info("Exported TF-Hub module to: %s", path) class _ModuleImpl(module_impl.ModuleImpl): @@ -731,12 +732,12 @@ def register_ops_if_needed(graph_ops): missing_op_list = op_def_pb2.OpList() for missing_op in missing_ops: if missing_op not in cpp_registry_ops: - tf.logging.info( + logging.info( "Op %s is missing from both the python and C++ registry.", missing_op) else: missing_op_list.op.extend([cpp_registry_ops[missing_op]]) - tf.logging.info( + logging.info( "Adding op %s from c++ registry to python registry.", missing_op) diff --git a/tensorflow_hub/resolver.py b/tensorflow_hub/resolver.py index d87249ede..2dd6fa6c1 100644 --- a/tensorflow_hub/resolver.py +++ b/tensorflow_hub/resolver.py @@ -28,12 +28,15 @@ import time import uuid +from absl import flags +from absl import logging import tensorflow as tf from tensorflow_hub import tf_utils -FLAGS = tf.flags.FLAGS -tf.flags.DEFINE_string( +FLAGS = flags.FLAGS + +flags.DEFINE_string( "tfhub_cache_dir", None, "If set, TF-Hub will download and cache Modules into this directory. " @@ -71,8 +74,8 @@ def tfhub_cache_dir(default_cache_dir=None, use_temp=False): # Place all TF-Hub modules under /tfhub_modules. cache_dir = os.path.join(tempfile.gettempdir(), "tfhub_modules") if cache_dir: - tf.logging.log_first_n(tf.logging.INFO, "Using %s to cache modules.", 1, - cache_dir) + logging.log_first_n(logging.INFO, "Using %s to cache modules.", 1, + cache_dir) return cache_dir @@ -115,7 +118,7 @@ def _print_download_progress_msg(self, msg, flush=False): else: # Interactive progress tracking is disabled. Print progress to the # standard TF log. - tf.logging.info(msg) + logging.info(msg) def _log_progress(self, bytes_downloaded): """Logs progress information about ongoing module download. @@ -307,8 +310,8 @@ def _wait_for_lock_to_disappear(handle, lock_file, lock_file_timeout_sec): lock_file_content = None while tf.gfile.Exists(lock_file): try: - tf.logging.log_every_n( - tf.logging.INFO, + logging.log_every_n( + logging.INFO, "Module '%s' already being downloaded by '%s'. Waiting.", 10, handle, tf_utils.read_file_to_string(lock_file)) if (time.time() - locked_tmp_dir_size_check_time > @@ -322,8 +325,8 @@ def _wait_for_lock_to_disappear(handle, lock_file, lock_file_timeout_sec): # There is was no data downloaded in the past # 'lock_file_timeout_sec'. Steal the lock and proceed with the # local download. - tf.logging.warning("Deleting lock file %s due to inactivity." % - lock_file) + logging.warning("Deleting lock file %s due to inactivity.", + lock_file) tf.gfile.Remove(lock_file) break locked_tmp_dir_size = cur_locked_tmp_dir_size @@ -390,7 +393,7 @@ def atomic_download(handle, # would obtain a lock ourselves, or wait again for the lock to disappear. # Lock file acquired. - tf.logging.info("Downloading TF-Hub Module '%s'.", handle) + logging.info("Downloading TF-Hub Module '%s'.", handle) tf.gfile.MakeDirs(tmp_dir) download_fn(handle, tmp_dir) # Write module descriptor to capture information about which module was @@ -405,9 +408,9 @@ def atomic_download(handle, _write_module_descriptor_file(handle, module_dir) try: tf.gfile.Rename(tmp_dir, module_dir) - tf.logging.info("Downloaded TF-Hub Module '%s'.", handle) + logging.info("Downloaded TF-Hub Module '%s'.", handle) except tf.errors.AlreadyExistsError: - tf.logging.warning("Module already exists in %s" % module_dir) + logging.warning("Module already exists in %s", module_dir) finally: try: diff --git a/tensorflow_hub/resolver_test.py b/tensorflow_hub/resolver_test.py index a8f456051..bc6e68501 100644 --- a/tensorflow_hub/resolver_test.py +++ b/tensorflow_hub/resolver_test.py @@ -26,12 +26,13 @@ import time import uuid +from absl import flags import tensorflow as tf from tensorflow_hub import resolver from tensorflow_hub import tf_utils -FLAGS = tf.flags.FLAGS +FLAGS = flags.FLAGS class PathResolverTest(tf.test.TestCase): diff --git a/tensorflow_hub/test_utils.py b/tensorflow_hub/test_utils.py index aa1955579..3c1ca9c44 100644 --- a/tensorflow_hub/test_utils.py +++ b/tensorflow_hub/test_utils.py @@ -23,7 +23,7 @@ import sys import threading -import tensorflow as tf +from absl import flags def _do_redirect(handler, location): @@ -132,8 +132,8 @@ def do_GET(self): def test_srcdir(): """Returns the path where to look for test data files.""" - if "test_srcdir" in tf.app.flags.FLAGS: - return tf.app.flags.FLAGS["test_srcdir"].value + if "test_srcdir" in flags.FLAGS: + return flags.FLAGS["test_srcdir"].value elif "TEST_SRCDIR" in os.environ: return os.environ["TEST_SRCDIR"] else: diff --git a/tensorflow_hub/tf_utils.py b/tensorflow_hub/tf_utils.py index 2cff3b077..b5ed38ae7 100644 --- a/tensorflow_hub/tf_utils.py +++ b/tensorflow_hub/tf_utils.py @@ -22,6 +22,7 @@ import time import uuid +from absl import logging import tensorflow as tf @@ -100,11 +101,11 @@ def get_timestamped_export_dir(export_dir_base): return export_dir time.sleep(1) attempts += 1 - tf.logging.warn( - "Export directory {} already exists; retrying (attempt {}/{})".format( - export_dir, attempts, MAX_DIRECTORY_CREATION_ATTEMPTS)) + logging.warn( + "Export directory %s already exists; retrying (attempt %d/%d)", + export_dir, attempts, MAX_DIRECTORY_CREATION_ATTEMPTS) raise RuntimeError("Failed to obtain a unique export directory name after " - "{} attempts.".format(MAX_DIRECTORY_CREATION_ATTEMPTS)) + "%d attempts.".MAX_DIRECTORY_CREATION_ATTEMPTS) def get_temp_export_dir(timestamped_export_dir): @@ -156,7 +157,7 @@ def garbage_collect_exports(export_dir_base, exports_to_keep): try: tf.gfile.DeleteRecursively(path) except tf.errors.NotFoundError as e: - tf.logging.warn("Can not delete %s recursively: %s" % (path, e)) + logging.warn("Can not delete %s recursively: %s", path, e) def bytes_to_readable_str(num_bytes, include_b=False):