From b6145b072188fd6e03eae8aa986b4c69e0f7b9e5 Mon Sep 17 00:00:00 2001 From: Sam Rabin Date: Tue, 15 Oct 2024 12:18:07 -0600 Subject: [PATCH] Hillslope scripts now use CTSM logging. --- python/ctsm/hillslopes/combine_chunk_files.py | 37 ++++++++++++------- .../ctsm/hillslopes/combine_gridcell_files.py | 29 +++++++++------ python/ctsm/hillslopes/hillslope_utils.py | 11 +++++- 3 files changed, 51 insertions(+), 26 deletions(-) diff --git a/python/ctsm/hillslopes/combine_chunk_files.py b/python/ctsm/hillslopes/combine_chunk_files.py index 21e992951c..61c401e762 100755 --- a/python/ctsm/hillslopes/combine_chunk_files.py +++ b/python/ctsm/hillslopes/combine_chunk_files.py @@ -10,6 +10,7 @@ # The below "pylint: disable" is because pylint complains that netCDF4 has no member Dataset, even # though it does. from netCDF4 import Dataset # pylint: disable=no-name-in-module +from ctsm import ctsm_logging from ctsm.hillslopes.hillslope_utils import ( add_variable_nc, add_longxy_latixy_nc, @@ -31,6 +32,7 @@ def parse_arguments(argv): # positional) arguments (e.g., --input-file) get shown as optional. required_named = parser.add_argument_group("Required named arguments") optional_named = parser.add_argument_group("Optional named arguments") + ctsm_logging.add_logging_args(parser) # Input and output file settings required_named.add_argument( @@ -79,19 +81,22 @@ def parse_arguments(argv): default=default_hillslope_form, ) - optional_named.add_argument( - "-v", "--verbose", help="print info", action="store_true", default=False - ) - args = parser.parse_args(argv) + ctsm_logging.process_logging_args(args) # Check arguments if not os.path.exists(args.input_file): - raise FileNotFoundError(f"Input file not found: {args.input_file}") + msg = f"Input file not found: {args.input_file}" + ctsm_logging.logger.error(msg) + raise FileNotFoundError(msg) if not os.path.exists(args.input_dir): - raise FileNotFoundError(f"Input directory not found: {args.input_dir}") + msg = f"Input directory not found: {args.input_dir}" + ctsm_logging.logger.error(msg) + raise FileNotFoundError(msg) if os.path.exists(args.output_file) and not args.overwrite: - raise FileExistsError(f"Output file already exists: {args.output_file}") + msg = f"Output file already exists: {args.output_file}" + ctsm_logging.logger.error(msg) + raise FileExistsError(msg) return args @@ -133,7 +138,10 @@ def get_mask_var(surface_ds): if mask_var_option in surface_ds.variables.keys(): mask_var = mask_var_option if mask_var is None: - raise KeyError(f"No variable found in sfcfile that looks like a mask ({mask_var_options})") + msg = f"No variable found in sfcfile that looks like a mask ({mask_var_options})" + ctsm_logging.logger.error(msg) + raise KeyError(msg) + landmask = np.asarray( surface_ds.variables[mask_var][ :, @@ -146,6 +154,7 @@ def main(): """ See module description """ + ctsm_logging.setup_logging_pre_config() args = parse_arguments(sys.argv[1:]) # Choose data files to combine and append @@ -164,7 +173,7 @@ def main(): arrays_uninitialized = True chunks_to_process = get_chunks_to_process(args, "combined_chunk") for cndx in chunks_to_process: - print(f"Chunk {cndx}...") + ctsm_logging.logger.info("Chunk %d...", cndx) cstr = "{:02d}".format(cndx) chunk_file = cfile0.replace("ChunkIndex", cstr) file_exists = os.path.exists(chunk_file) @@ -186,8 +195,7 @@ def main(): arrays_uninitialized = False if not file_exists: - if args.verbose: - print(f"Skipping; chunk file not found: {chunk_file}") + ctsm_logging.logger.info("Skipping; chunk file not found: %s", chunk_file) continue # Read hillslope variables from one chunk file @@ -198,7 +206,9 @@ def main(): hillslope_vars.update(i, j, add_bedrock, add_stream, landmask=landmask) if arrays_uninitialized: - raise FileNotFoundError("No files found") + msg = f"No files found in '{args.input_dir}'" + ctsm_logging.logger.error(msg) + raise FileNotFoundError(msg) # -- Write data to file ------------------ hillslope_vars.save( @@ -208,7 +218,8 @@ def main(): nhillslope, add_bedrock, add_stream, + logger=ctsm_logging.logger, ) finish_saving(args) - print(args.output_file + " created") + ctsm_logging.logger.info("%s created", args.output_file) diff --git a/python/ctsm/hillslopes/combine_gridcell_files.py b/python/ctsm/hillslopes/combine_gridcell_files.py index b9d176c128..5cd17a0307 100755 --- a/python/ctsm/hillslopes/combine_gridcell_files.py +++ b/python/ctsm/hillslopes/combine_gridcell_files.py @@ -11,6 +11,7 @@ # member Dataset, even though it does. from netCDF4 import Dataset # pylint: disable=no-name-in-module +from ctsm import ctsm_logging from ctsm.hillslopes.hillslope_utils import HillslopeVars, get_chunks_to_process @@ -82,17 +83,21 @@ def parse_arguments(argv): action="store_true", default=False, ) - optional_named.add_argument( - "-v", "--verbose", help="print info", action="store_true", default=False - ) + + ctsm_logging.add_logging_args(parser) args = parser.parse_args(argv) + ctsm_logging.process_logging_args(args) # Check arguments if not os.path.exists(args.input_file): - raise FileNotFoundError(f"Input file not found: {args.input_file}") + msg = f"Input file not found: {args.input_file}" + ctsm_logging.logger.error(msg) + raise FileNotFoundError(msg) if not os.path.exists(args.input_dir): - raise FileNotFoundError(f"Input directory not found: {args.input_dir}") + msg = f"Input directory not found: {args.input_dir}" + ctsm_logging.logger.error(msg) + raise FileNotFoundError(msg) if not os.path.exists(args.output_dir): os.makedirs(args.output_dir) @@ -104,8 +109,8 @@ def main(): See module description """ + ctsm_logging.setup_logging_pre_config() args = parse_arguments(sys.argv[1:]) - verbose = args.verbose chunks_to_process = get_chunks_to_process(args, "chunk") @@ -136,10 +141,9 @@ def main(): # Check for output file existence if os.path.exists(outfile_path): if args.overwrite: - if verbose: - print(outfile_path, " exists; overwriting") + ctsm_logging.logger.warning("%s exists; overwriting", outfile_path) else: - print(outfile_path, " exists; skipping") + ctsm_logging.logger.warning("%s exists; skipping", outfile_path) continue # Locate gridcell files @@ -147,9 +151,9 @@ def main(): gfiles = glob.glob(gfile) gfiles.sort() if len(gfiles) == 0: - print(f"Chunk {cndx}: Skipping; no files found matching {gfile}") + ctsm_logging.logger.info("Chunk %d: Skipping; no files found matching %s", cndx, gfile) continue - print(f"Chunk {cndx}: Combining {len(gfiles)} files...") + ctsm_logging.logger.info("Chunk %d: Combining %d files...", cndx, len(gfiles)) # Read hillslope data dimensions/settings, if not done yet if nhillslope is None: @@ -223,9 +227,10 @@ def write_to_file( nhillslope, add_bedrock, do_add_stream_channel_vars, + logger=ctsm_logging.logger, n_lon=n_lon, n_lat=n_lat, incl_latlon=True, incl_chunkmask=True, ) - print(outfile_path + " created") + ctsm_logging.logger.info("%s created", outfile_path) diff --git a/python/ctsm/hillslopes/hillslope_utils.py b/python/ctsm/hillslopes/hillslope_utils.py index 525a9a0fa0..a1cd7f236f 100644 --- a/python/ctsm/hillslopes/hillslope_utils.py +++ b/python/ctsm/hillslopes/hillslope_utils.py @@ -253,6 +253,7 @@ def save( nhillslope, add_bedrock, add_stream, + logger=None, n_lon=None, n_lat=None, incl_latlon=False, @@ -262,7 +263,11 @@ def save( """ Save to netCDF """ - print("saving") + msg = f"Saving to {output_file}" + if logger is None: + print(msg) + else: + logger.info(msg) # Create and open file if save_fsurdat: @@ -733,12 +738,16 @@ def get_chunks_to_process(args, prefix): if not hasattr(args, "cndx") or args.cndx is None: # List of gridcell files file_list = glob.glob(os.path.join(args.input_dir, prefix + "_[0-9]*nc")) + if not file_list: + raise FileNotFoundError(f"No files found in '{args.input_dir}'") # Extract the chunk number from the file names chunk_list = [re.search(r"chunk_\d+", x).group() for x in file_list] chunk_list = [x.replace("chunk_", "") for x in chunk_list] # Get the list of unique chunk numbers chunks_to_process = [int(x) for x in list(set(chunk_list))] chunks_to_process.sort() + if not chunks_to_process: + raise FileNotFoundError(f"No MATCHING chunk files found in '{args.input_dir}'") else: chunks_to_process = [int(cndx) for cndx in args.cndx[0].split(",")] for cndx in chunks_to_process: