Skip to content

Commit

Permalink
change logging to use module specific logger, not root logger
Browse files Browse the repository at this point in the history
  • Loading branch information
prakaa committed Oct 14, 2022
1 parent 09bd43f commit 8b62972
Show file tree
Hide file tree
Showing 4 changed files with 23 additions and 19 deletions.
25 changes: 13 additions & 12 deletions nemosis/data_fetch_methods.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
from . import custom_tables as _custom_tables
from .custom_errors import UserInputError, NoDataToReturn, DataMismatchError

logger = logging.getLogger(__name__)

def dynamic_data_compiler(
start_time,
Expand Down Expand Up @@ -89,7 +90,7 @@ def dynamic_data_compiler(
)
)

logging.info(f"Compiling data for table {table_name}")
logger.info(f"Compiling data for table {table_name}")

start_time = _datetime.strptime(start_time, "%Y/%m/%d %H:%M:%S")
end_time = _datetime.strptime(end_time, "%Y/%m/%d %H:%M:%S")
Expand Down Expand Up @@ -126,7 +127,7 @@ def dynamic_data_compiler(
all_data = _filters.filter_on_column_value(
all_data, filter_cols, filter_values
)
logging.info(f"Returning {table_name}.")
logger.info(f"Returning {table_name}.")
return all_data
else:
raise NoDataToReturn(
Expand Down Expand Up @@ -199,7 +200,7 @@ def cache_compiler(
)
)

logging.info(f"Caching data for table {table_name}")
logger.info(f"Caching data for table {table_name}")

(
start_time,
Expand Down Expand Up @@ -273,10 +274,10 @@ def static_table(
)
)

logging.info(f"Retrieving static table {table_name}")
logger.info(f"Retrieving static table {table_name}")
path_and_name = _os.path.join(raw_data_location, _defaults.names[table_name])
if not _os.path.isfile(path_and_name) or update_static_file:
logging.info(f"Downloading data for table {table_name}")
logger.info(f"Downloading data for table {table_name}")
try:
static_downloader_map[table_name](
_defaults.static_table_url[table_name], path_and_name
Expand Down Expand Up @@ -548,7 +549,7 @@ def _dynamic_data_fetch_loop(
data = _get_read_function(fformat, table_type, day)(full_filename)
else:
data = None
logging.info(
logger.info(
f"Cache for {table_name} in date range already compiled in"
+ f" {raw_data_location}."
)
Expand Down Expand Up @@ -599,7 +600,7 @@ def _dynamic_data_fetch_loop(

data_tables.append(data)
elif not caching_mode:
logging.warning(f"Loading data from {full_filename} failed.")
logger.warning(f"Loading data from {full_filename} failed.")

return data_tables

Expand Down Expand Up @@ -653,7 +654,7 @@ def _validate_select_columns(data, select_columns, full_filename):
return []
else:
if rejected_cols:
logging.warning(
logger.warning(
f"{rejected_cols} not in {full_filename}. "
+ f"Loading {available_cols}"
)
Expand All @@ -667,7 +668,7 @@ def _log_file_creation_message(fformat, table_name, year, month, day, index):
else:
output = logstr + f" {day}, {index}"

logging.info(output)
logger.info(output)


def _determine_columns_and_read_csv(
Expand Down Expand Up @@ -739,16 +740,16 @@ def _download_data(
Returns: nothing
"""
if day is None:
logging.info(
logger.info(
f"Downloading data for table {table_name}, " + f"year {year}, month {month}"
)
elif index is None:
logging.info(
logger.info(
f"Downloading data for table {table_name}, "
+ f"year {year}, month {month}, day {day}"
)
else:
logging.info(
logger.info(
f"Downloading data for table {table_name}, "
+ f"year {year}, month {month}, day {day},"
+ f"time {index}."
Expand Down
5 changes: 3 additions & 2 deletions nemosis/date_generators.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from calendar import monthrange
from datetime import timedelta

logger = logging.getLogger(__name__)

def year_and_month_gen(start_time, end_time):

Expand Down Expand Up @@ -120,7 +121,7 @@ def bid_table_gen(start_time, end_time):
):
continue
if int(year) == 2021 and int(month) == 4 and int(day) == 1:
logging.warning(
logger.warning(
"Offer data for 2021/04/01 is known to be missing from the AEMO public \n"
"archive, explicitly skipping. This file would also contain data for the first 4 hr of \n"
+ "2021/04/02 so that data will also be missing from the returned dataframe."
Expand All @@ -130,7 +131,7 @@ def bid_table_gen(start_time, end_time):

else:
if int(year) == 2021 and int(month) == 3:
logging.warning(
logger.warning(
"Offer data for March 2021 is known to be missing from the AEMO public \n"
"archive, explicitly skipping. This file would also contain data for the first 4 hr of \n"
+ "2021/04/01 so that data will also be missing from the returned dataframe."
Expand Down
5 changes: 3 additions & 2 deletions nemosis/downloader.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@

from . import defaults, custom_errors

logger = logging.getLogger(__name__)

# Windows Chrome for User-Agent request headers
USR_AGENT_HEADER = {
Expand All @@ -30,7 +31,7 @@ def run(year, month, day, index, filename_stub, down_load_to):
try:
download_unzip_csv(url_formatted, down_load_to)
except Exception:
logging.warning(f"{filename_stub} not downloaded")
logger.warning(f"{filename_stub} not downloaded")


def run_bid_tables(year, month, day, index, filename_stub, down_load_to):
Expand Down Expand Up @@ -119,7 +120,7 @@ def run_fcas4s(year, month, day, index, filename_stub, down_load_to):
# Check if the csv exists before warning
file_check = os.path.join(down_load_to, filename_stub + ".csv")
if not os.path.isfile(file_check):
logging.warning(f"{filename_stub} not downloaded")
logger.warning(f"{filename_stub} not downloaded")


def download_unzip_csv(url, down_load_to):
Expand Down
7 changes: 4 additions & 3 deletions nemosis/filters.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from datetime import datetime, timedelta
import numpy as np

logger = logging.getLogger(__name__)

def filter_on_start_and_end_date(data, start_time, end_time):
data["START_DATE"] = pd.to_datetime(data["START_DATE"], format="%Y/%m/%d %H:%M:%S")
Expand Down Expand Up @@ -40,16 +41,16 @@ def filter_on_timestamp(data, start_time, end_time):
data["TIMESTAMP"], format="%Y/%m/%d %H:%M:%S"
)
except Exception as e:
logging.error(e)
logger.error(e)
# if date format is wrong, str may be too short
med_str_len = np.median(data["TIMESTAMP"].str.len())
not_data = data.loc[data["TIMESTAMP"].str.len() < med_str_len, :]
data = data.loc[data["TIMESTAMP"].str.len() >= med_str_len, :]
data["TIMESTAMP"] = pd.to_datetime(
data["TIMESTAMP"], format="%Y/%m/%d %H:%M:%S"
)
logging.warning("Rows with incorrect data formats omitted")
logging.warning(not_data)
logger.warning("Rows with incorrect data formats omitted")
logger.warning(not_data)
finally:
data = data[(data["TIMESTAMP"] > start_time) & (data["TIMESTAMP"] <= end_time)]
return data
Expand Down

0 comments on commit 8b62972

Please sign in to comment.